re PR c++/66270 (ICE: canonical types differ for identical types)
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "hash-set.h"
36 #include "machmode.h"
37 #include "vec.h"
38 #include "double-int.h"
39 #include "input.h"
40 #include "alias.h"
41 #include "symtab.h"
42 #include "wide-int.h"
43 #include "inchash.h"
44 #include "tree.h"
45 #include "fold-const.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "attribs.h"
49 #include "varasm.h"
50 #include "tm_p.h"
51 #include "hashtab.h"
52 #include "hard-reg-set.h"
53 #include "function.h"
54 #include "obstack.h"
55 #include "toplev.h" /* get_random_seed */
56 #include "filenames.h"
57 #include "output.h"
58 #include "target.h"
59 #include "common/common-target.h"
60 #include "langhooks.h"
61 #include "tree-inline.h"
62 #include "tree-iterator.h"
63 #include "predict.h"
64 #include "dominance.h"
65 #include "cfg.h"
66 #include "basic-block.h"
67 #include "bitmap.h"
68 #include "tree-ssa-alias.h"
69 #include "internal-fn.h"
70 #include "gimple-expr.h"
71 #include "is-a.h"
72 #include "gimple.h"
73 #include "gimple-iterator.h"
74 #include "gimplify.h"
75 #include "gimple-ssa.h"
76 #include "hash-map.h"
77 #include "plugin-api.h"
78 #include "ipa-ref.h"
79 #include "cgraph.h"
80 #include "tree-phinodes.h"
81 #include "stringpool.h"
82 #include "tree-ssanames.h"
83 #include "rtl.h"
84 #include "statistics.h"
85 #include "real.h"
86 #include "fixed-value.h"
87 #include "insn-config.h"
88 #include "expmed.h"
89 #include "dojump.h"
90 #include "explow.h"
91 #include "emit-rtl.h"
92 #include "stmt.h"
93 #include "expr.h"
94 #include "tree-dfa.h"
95 #include "params.h"
96 #include "tree-pass.h"
97 #include "langhooks-def.h"
98 #include "diagnostic.h"
99 #include "tree-diagnostic.h"
100 #include "tree-pretty-print.h"
101 #include "except.h"
102 #include "debug.h"
103 #include "intl.h"
104 #include "builtins.h"
105 #include "print-tree.h"
106 #include "ipa-utils.h"
107
108 /* Tree code classes. */
109
110 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
111 #define END_OF_BASE_TREE_CODES tcc_exceptional,
112
113 const enum tree_code_class tree_code_type[] = {
114 #include "all-tree.def"
115 };
116
117 #undef DEFTREECODE
118 #undef END_OF_BASE_TREE_CODES
119
120 /* Table indexed by tree code giving number of expression
121 operands beyond the fixed part of the node structure.
122 Not used for types or decls. */
123
124 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
125 #define END_OF_BASE_TREE_CODES 0,
126
127 const unsigned char tree_code_length[] = {
128 #include "all-tree.def"
129 };
130
131 #undef DEFTREECODE
132 #undef END_OF_BASE_TREE_CODES
133
134 /* Names of tree components.
135 Used for printing out the tree and error messages. */
136 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
137 #define END_OF_BASE_TREE_CODES "@dummy",
138
139 static const char *const tree_code_name[] = {
140 #include "all-tree.def"
141 };
142
143 #undef DEFTREECODE
144 #undef END_OF_BASE_TREE_CODES
145
146 /* Each tree code class has an associated string representation.
147 These must correspond to the tree_code_class entries. */
148
149 const char *const tree_code_class_strings[] =
150 {
151 "exceptional",
152 "constant",
153 "type",
154 "declaration",
155 "reference",
156 "comparison",
157 "unary",
158 "binary",
159 "statement",
160 "vl_exp",
161 "expression"
162 };
163
164 /* obstack.[ch] explicitly declined to prototype this. */
165 extern int _obstack_allocated_p (struct obstack *h, void *obj);
166
167 /* Statistics-gathering stuff. */
168
169 static int tree_code_counts[MAX_TREE_CODES];
170 int tree_node_counts[(int) all_kinds];
171 int tree_node_sizes[(int) all_kinds];
172
173 /* Keep in sync with tree.h:enum tree_node_kind. */
174 static const char * const tree_node_kind_names[] = {
175 "decls",
176 "types",
177 "blocks",
178 "stmts",
179 "refs",
180 "exprs",
181 "constants",
182 "identifiers",
183 "vecs",
184 "binfos",
185 "ssa names",
186 "constructors",
187 "random kinds",
188 "lang_decl kinds",
189 "lang_type kinds",
190 "omp clauses",
191 };
192
193 /* Unique id for next decl created. */
194 static GTY(()) int next_decl_uid;
195 /* Unique id for next type created. */
196 static GTY(()) int next_type_uid = 1;
197 /* Unique id for next debug decl created. Use negative numbers,
198 to catch erroneous uses. */
199 static GTY(()) int next_debug_decl_uid;
200
201 /* Since we cannot rehash a type after it is in the table, we have to
202 keep the hash code. */
203
204 struct GTY((for_user)) type_hash {
205 unsigned long hash;
206 tree type;
207 };
208
209 /* Initial size of the hash table (rounded to next prime). */
210 #define TYPE_HASH_INITIAL_SIZE 1000
211
212 struct type_cache_hasher : ggc_cache_hasher<type_hash *>
213 {
214 static hashval_t hash (type_hash *t) { return t->hash; }
215 static bool equal (type_hash *a, type_hash *b);
216
217 static void
218 handle_cache_entry (type_hash *&t)
219 {
220 extern void gt_ggc_mx (type_hash *&);
221 if (t == HTAB_DELETED_ENTRY || t == HTAB_EMPTY_ENTRY)
222 return;
223 else if (ggc_marked_p (t->type))
224 gt_ggc_mx (t);
225 else
226 t = static_cast<type_hash *> (HTAB_DELETED_ENTRY);
227 }
228 };
229
230 /* Now here is the hash table. When recording a type, it is added to
231 the slot whose index is the hash code. Note that the hash table is
232 used for several kinds of types (function types, array types and
233 array index range types, for now). While all these live in the
234 same table, they are completely independent, and the hash code is
235 computed differently for each of these. */
236
237 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
238
239 /* Hash table and temporary node for larger integer const values. */
240 static GTY (()) tree int_cst_node;
241
242 struct int_cst_hasher : ggc_cache_hasher<tree>
243 {
244 static hashval_t hash (tree t);
245 static bool equal (tree x, tree y);
246 };
247
248 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
249
250 /* Hash table for optimization flags and target option flags. Use the same
251 hash table for both sets of options. Nodes for building the current
252 optimization and target option nodes. The assumption is most of the time
253 the options created will already be in the hash table, so we avoid
254 allocating and freeing up a node repeatably. */
255 static GTY (()) tree cl_optimization_node;
256 static GTY (()) tree cl_target_option_node;
257
258 struct cl_option_hasher : ggc_cache_hasher<tree>
259 {
260 static hashval_t hash (tree t);
261 static bool equal (tree x, tree y);
262 };
263
264 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
265
266 /* General tree->tree mapping structure for use in hash tables. */
267
268
269 static GTY ((cache))
270 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
271
272 static GTY ((cache))
273 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
274
275 struct tree_vec_map_cache_hasher : ggc_cache_hasher<tree_vec_map *>
276 {
277 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
278
279 static bool
280 equal (tree_vec_map *a, tree_vec_map *b)
281 {
282 return a->base.from == b->base.from;
283 }
284
285 static void
286 handle_cache_entry (tree_vec_map *&m)
287 {
288 extern void gt_ggc_mx (tree_vec_map *&);
289 if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
290 return;
291 else if (ggc_marked_p (m->base.from))
292 gt_ggc_mx (m);
293 else
294 m = static_cast<tree_vec_map *> (HTAB_DELETED_ENTRY);
295 }
296 };
297
298 static GTY ((cache))
299 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
300
301 static void set_type_quals (tree, int);
302 static void print_type_hash_statistics (void);
303 static void print_debug_expr_statistics (void);
304 static void print_value_expr_statistics (void);
305 static void type_hash_list (const_tree, inchash::hash &);
306 static void attribute_hash_list (const_tree, inchash::hash &);
307
308 tree global_trees[TI_MAX];
309 tree integer_types[itk_none];
310
311 bool int_n_enabled_p[NUM_INT_N_ENTS];
312 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
313
314 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
315
316 /* Number of operands for each OpenMP clause. */
317 unsigned const char omp_clause_num_ops[] =
318 {
319 0, /* OMP_CLAUSE_ERROR */
320 1, /* OMP_CLAUSE_PRIVATE */
321 1, /* OMP_CLAUSE_SHARED */
322 1, /* OMP_CLAUSE_FIRSTPRIVATE */
323 2, /* OMP_CLAUSE_LASTPRIVATE */
324 4, /* OMP_CLAUSE_REDUCTION */
325 1, /* OMP_CLAUSE_COPYIN */
326 1, /* OMP_CLAUSE_COPYPRIVATE */
327 3, /* OMP_CLAUSE_LINEAR */
328 2, /* OMP_CLAUSE_ALIGNED */
329 1, /* OMP_CLAUSE_DEPEND */
330 1, /* OMP_CLAUSE_UNIFORM */
331 2, /* OMP_CLAUSE_FROM */
332 2, /* OMP_CLAUSE_TO */
333 2, /* OMP_CLAUSE_MAP */
334 2, /* OMP_CLAUSE__CACHE_ */
335 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
336 1, /* OMP_CLAUSE_USE_DEVICE */
337 2, /* OMP_CLAUSE_GANG */
338 1, /* OMP_CLAUSE_ASYNC */
339 1, /* OMP_CLAUSE_WAIT */
340 0, /* OMP_CLAUSE_AUTO */
341 0, /* OMP_CLAUSE_SEQ */
342 1, /* OMP_CLAUSE__LOOPTEMP_ */
343 1, /* OMP_CLAUSE_IF */
344 1, /* OMP_CLAUSE_NUM_THREADS */
345 1, /* OMP_CLAUSE_SCHEDULE */
346 0, /* OMP_CLAUSE_NOWAIT */
347 0, /* OMP_CLAUSE_ORDERED */
348 0, /* OMP_CLAUSE_DEFAULT */
349 3, /* OMP_CLAUSE_COLLAPSE */
350 0, /* OMP_CLAUSE_UNTIED */
351 1, /* OMP_CLAUSE_FINAL */
352 0, /* OMP_CLAUSE_MERGEABLE */
353 1, /* OMP_CLAUSE_DEVICE */
354 1, /* OMP_CLAUSE_DIST_SCHEDULE */
355 0, /* OMP_CLAUSE_INBRANCH */
356 0, /* OMP_CLAUSE_NOTINBRANCH */
357 1, /* OMP_CLAUSE_NUM_TEAMS */
358 1, /* OMP_CLAUSE_THREAD_LIMIT */
359 0, /* OMP_CLAUSE_PROC_BIND */
360 1, /* OMP_CLAUSE_SAFELEN */
361 1, /* OMP_CLAUSE_SIMDLEN */
362 0, /* OMP_CLAUSE_FOR */
363 0, /* OMP_CLAUSE_PARALLEL */
364 0, /* OMP_CLAUSE_SECTIONS */
365 0, /* OMP_CLAUSE_TASKGROUP */
366 1, /* OMP_CLAUSE__SIMDUID_ */
367 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
368 0, /* OMP_CLAUSE_INDEPENDENT */
369 1, /* OMP_CLAUSE_WORKER */
370 1, /* OMP_CLAUSE_VECTOR */
371 1, /* OMP_CLAUSE_NUM_GANGS */
372 1, /* OMP_CLAUSE_NUM_WORKERS */
373 1, /* OMP_CLAUSE_VECTOR_LENGTH */
374 };
375
376 const char * const omp_clause_code_name[] =
377 {
378 "error_clause",
379 "private",
380 "shared",
381 "firstprivate",
382 "lastprivate",
383 "reduction",
384 "copyin",
385 "copyprivate",
386 "linear",
387 "aligned",
388 "depend",
389 "uniform",
390 "from",
391 "to",
392 "map",
393 "_cache_",
394 "device_resident",
395 "use_device",
396 "gang",
397 "async",
398 "wait",
399 "auto",
400 "seq",
401 "_looptemp_",
402 "if",
403 "num_threads",
404 "schedule",
405 "nowait",
406 "ordered",
407 "default",
408 "collapse",
409 "untied",
410 "final",
411 "mergeable",
412 "device",
413 "dist_schedule",
414 "inbranch",
415 "notinbranch",
416 "num_teams",
417 "thread_limit",
418 "proc_bind",
419 "safelen",
420 "simdlen",
421 "for",
422 "parallel",
423 "sections",
424 "taskgroup",
425 "_simduid_",
426 "_Cilk_for_count_",
427 "independent",
428 "worker",
429 "vector",
430 "num_gangs",
431 "num_workers",
432 "vector_length"
433 };
434
435
436 /* Return the tree node structure used by tree code CODE. */
437
438 static inline enum tree_node_structure_enum
439 tree_node_structure_for_code (enum tree_code code)
440 {
441 switch (TREE_CODE_CLASS (code))
442 {
443 case tcc_declaration:
444 {
445 switch (code)
446 {
447 case FIELD_DECL:
448 return TS_FIELD_DECL;
449 case PARM_DECL:
450 return TS_PARM_DECL;
451 case VAR_DECL:
452 return TS_VAR_DECL;
453 case LABEL_DECL:
454 return TS_LABEL_DECL;
455 case RESULT_DECL:
456 return TS_RESULT_DECL;
457 case DEBUG_EXPR_DECL:
458 return TS_DECL_WRTL;
459 case CONST_DECL:
460 return TS_CONST_DECL;
461 case TYPE_DECL:
462 return TS_TYPE_DECL;
463 case FUNCTION_DECL:
464 return TS_FUNCTION_DECL;
465 case TRANSLATION_UNIT_DECL:
466 return TS_TRANSLATION_UNIT_DECL;
467 default:
468 return TS_DECL_NON_COMMON;
469 }
470 }
471 case tcc_type:
472 return TS_TYPE_NON_COMMON;
473 case tcc_reference:
474 case tcc_comparison:
475 case tcc_unary:
476 case tcc_binary:
477 case tcc_expression:
478 case tcc_statement:
479 case tcc_vl_exp:
480 return TS_EXP;
481 default: /* tcc_constant and tcc_exceptional */
482 break;
483 }
484 switch (code)
485 {
486 /* tcc_constant cases. */
487 case VOID_CST: return TS_TYPED;
488 case INTEGER_CST: return TS_INT_CST;
489 case REAL_CST: return TS_REAL_CST;
490 case FIXED_CST: return TS_FIXED_CST;
491 case COMPLEX_CST: return TS_COMPLEX;
492 case VECTOR_CST: return TS_VECTOR;
493 case STRING_CST: return TS_STRING;
494 /* tcc_exceptional cases. */
495 case ERROR_MARK: return TS_COMMON;
496 case IDENTIFIER_NODE: return TS_IDENTIFIER;
497 case TREE_LIST: return TS_LIST;
498 case TREE_VEC: return TS_VEC;
499 case SSA_NAME: return TS_SSA_NAME;
500 case PLACEHOLDER_EXPR: return TS_COMMON;
501 case STATEMENT_LIST: return TS_STATEMENT_LIST;
502 case BLOCK: return TS_BLOCK;
503 case CONSTRUCTOR: return TS_CONSTRUCTOR;
504 case TREE_BINFO: return TS_BINFO;
505 case OMP_CLAUSE: return TS_OMP_CLAUSE;
506 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
507 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
508
509 default:
510 gcc_unreachable ();
511 }
512 }
513
514
515 /* Initialize tree_contains_struct to describe the hierarchy of tree
516 nodes. */
517
518 static void
519 initialize_tree_contains_struct (void)
520 {
521 unsigned i;
522
523 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
524 {
525 enum tree_code code;
526 enum tree_node_structure_enum ts_code;
527
528 code = (enum tree_code) i;
529 ts_code = tree_node_structure_for_code (code);
530
531 /* Mark the TS structure itself. */
532 tree_contains_struct[code][ts_code] = 1;
533
534 /* Mark all the structures that TS is derived from. */
535 switch (ts_code)
536 {
537 case TS_TYPED:
538 case TS_BLOCK:
539 MARK_TS_BASE (code);
540 break;
541
542 case TS_COMMON:
543 case TS_INT_CST:
544 case TS_REAL_CST:
545 case TS_FIXED_CST:
546 case TS_VECTOR:
547 case TS_STRING:
548 case TS_COMPLEX:
549 case TS_SSA_NAME:
550 case TS_CONSTRUCTOR:
551 case TS_EXP:
552 case TS_STATEMENT_LIST:
553 MARK_TS_TYPED (code);
554 break;
555
556 case TS_IDENTIFIER:
557 case TS_DECL_MINIMAL:
558 case TS_TYPE_COMMON:
559 case TS_LIST:
560 case TS_VEC:
561 case TS_BINFO:
562 case TS_OMP_CLAUSE:
563 case TS_OPTIMIZATION:
564 case TS_TARGET_OPTION:
565 MARK_TS_COMMON (code);
566 break;
567
568 case TS_TYPE_WITH_LANG_SPECIFIC:
569 MARK_TS_TYPE_COMMON (code);
570 break;
571
572 case TS_TYPE_NON_COMMON:
573 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
574 break;
575
576 case TS_DECL_COMMON:
577 MARK_TS_DECL_MINIMAL (code);
578 break;
579
580 case TS_DECL_WRTL:
581 case TS_CONST_DECL:
582 MARK_TS_DECL_COMMON (code);
583 break;
584
585 case TS_DECL_NON_COMMON:
586 MARK_TS_DECL_WITH_VIS (code);
587 break;
588
589 case TS_DECL_WITH_VIS:
590 case TS_PARM_DECL:
591 case TS_LABEL_DECL:
592 case TS_RESULT_DECL:
593 MARK_TS_DECL_WRTL (code);
594 break;
595
596 case TS_FIELD_DECL:
597 MARK_TS_DECL_COMMON (code);
598 break;
599
600 case TS_VAR_DECL:
601 MARK_TS_DECL_WITH_VIS (code);
602 break;
603
604 case TS_TYPE_DECL:
605 case TS_FUNCTION_DECL:
606 MARK_TS_DECL_NON_COMMON (code);
607 break;
608
609 case TS_TRANSLATION_UNIT_DECL:
610 MARK_TS_DECL_COMMON (code);
611 break;
612
613 default:
614 gcc_unreachable ();
615 }
616 }
617
618 /* Basic consistency checks for attributes used in fold. */
619 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
620 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
621 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
622 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
623 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
624 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
625 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
626 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
627 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
628 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
629 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
630 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
631 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
632 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
633 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
634 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
635 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
636 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
637 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
638 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
639 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
640 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
641 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
642 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
643 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
644 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
645 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
646 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
647 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
648 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
649 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
650 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
651 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
652 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
653 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
654 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
655 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
657 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
659 }
660
661
662 /* Init tree.c. */
663
664 void
665 init_ttree (void)
666 {
667 /* Initialize the hash table of types. */
668 type_hash_table
669 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
670
671 debug_expr_for_decl
672 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
673
674 value_expr_for_decl
675 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
676
677 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
678
679 int_cst_node = make_int_cst (1, 1);
680
681 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
682
683 cl_optimization_node = make_node (OPTIMIZATION_NODE);
684 cl_target_option_node = make_node (TARGET_OPTION_NODE);
685
686 /* Initialize the tree_contains_struct array. */
687 initialize_tree_contains_struct ();
688 lang_hooks.init_ts ();
689 }
690
691 \f
692 /* The name of the object as the assembler will see it (but before any
693 translations made by ASM_OUTPUT_LABELREF). Often this is the same
694 as DECL_NAME. It is an IDENTIFIER_NODE. */
695 tree
696 decl_assembler_name (tree decl)
697 {
698 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
699 lang_hooks.set_decl_assembler_name (decl);
700 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
701 }
702
703 /* When the target supports COMDAT groups, this indicates which group the
704 DECL is associated with. This can be either an IDENTIFIER_NODE or a
705 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
706 tree
707 decl_comdat_group (const_tree node)
708 {
709 struct symtab_node *snode = symtab_node::get (node);
710 if (!snode)
711 return NULL;
712 return snode->get_comdat_group ();
713 }
714
715 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
716 tree
717 decl_comdat_group_id (const_tree node)
718 {
719 struct symtab_node *snode = symtab_node::get (node);
720 if (!snode)
721 return NULL;
722 return snode->get_comdat_group_id ();
723 }
724
725 /* When the target supports named section, return its name as IDENTIFIER_NODE
726 or NULL if it is in no section. */
727 const char *
728 decl_section_name (const_tree node)
729 {
730 struct symtab_node *snode = symtab_node::get (node);
731 if (!snode)
732 return NULL;
733 return snode->get_section ();
734 }
735
736 /* Set section section name of NODE to VALUE (that is expected to
737 be identifier node) */
738 void
739 set_decl_section_name (tree node, const char *value)
740 {
741 struct symtab_node *snode;
742
743 if (value == NULL)
744 {
745 snode = symtab_node::get (node);
746 if (!snode)
747 return;
748 }
749 else if (TREE_CODE (node) == VAR_DECL)
750 snode = varpool_node::get_create (node);
751 else
752 snode = cgraph_node::get_create (node);
753 snode->set_section (value);
754 }
755
756 /* Return TLS model of a variable NODE. */
757 enum tls_model
758 decl_tls_model (const_tree node)
759 {
760 struct varpool_node *snode = varpool_node::get (node);
761 if (!snode)
762 return TLS_MODEL_NONE;
763 return snode->tls_model;
764 }
765
766 /* Set TLS model of variable NODE to MODEL. */
767 void
768 set_decl_tls_model (tree node, enum tls_model model)
769 {
770 struct varpool_node *vnode;
771
772 if (model == TLS_MODEL_NONE)
773 {
774 vnode = varpool_node::get (node);
775 if (!vnode)
776 return;
777 }
778 else
779 vnode = varpool_node::get_create (node);
780 vnode->tls_model = model;
781 }
782
783 /* Compute the number of bytes occupied by a tree with code CODE.
784 This function cannot be used for nodes that have variable sizes,
785 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
786 size_t
787 tree_code_size (enum tree_code code)
788 {
789 switch (TREE_CODE_CLASS (code))
790 {
791 case tcc_declaration: /* A decl node */
792 {
793 switch (code)
794 {
795 case FIELD_DECL:
796 return sizeof (struct tree_field_decl);
797 case PARM_DECL:
798 return sizeof (struct tree_parm_decl);
799 case VAR_DECL:
800 return sizeof (struct tree_var_decl);
801 case LABEL_DECL:
802 return sizeof (struct tree_label_decl);
803 case RESULT_DECL:
804 return sizeof (struct tree_result_decl);
805 case CONST_DECL:
806 return sizeof (struct tree_const_decl);
807 case TYPE_DECL:
808 return sizeof (struct tree_type_decl);
809 case FUNCTION_DECL:
810 return sizeof (struct tree_function_decl);
811 case DEBUG_EXPR_DECL:
812 return sizeof (struct tree_decl_with_rtl);
813 case TRANSLATION_UNIT_DECL:
814 return sizeof (struct tree_translation_unit_decl);
815 case NAMESPACE_DECL:
816 case IMPORTED_DECL:
817 case NAMELIST_DECL:
818 return sizeof (struct tree_decl_non_common);
819 default:
820 return lang_hooks.tree_size (code);
821 }
822 }
823
824 case tcc_type: /* a type node */
825 return sizeof (struct tree_type_non_common);
826
827 case tcc_reference: /* a reference */
828 case tcc_expression: /* an expression */
829 case tcc_statement: /* an expression with side effects */
830 case tcc_comparison: /* a comparison expression */
831 case tcc_unary: /* a unary arithmetic expression */
832 case tcc_binary: /* a binary arithmetic expression */
833 return (sizeof (struct tree_exp)
834 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
835
836 case tcc_constant: /* a constant */
837 switch (code)
838 {
839 case VOID_CST: return sizeof (struct tree_typed);
840 case INTEGER_CST: gcc_unreachable ();
841 case REAL_CST: return sizeof (struct tree_real_cst);
842 case FIXED_CST: return sizeof (struct tree_fixed_cst);
843 case COMPLEX_CST: return sizeof (struct tree_complex);
844 case VECTOR_CST: return sizeof (struct tree_vector);
845 case STRING_CST: gcc_unreachable ();
846 default:
847 return lang_hooks.tree_size (code);
848 }
849
850 case tcc_exceptional: /* something random, like an identifier. */
851 switch (code)
852 {
853 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
854 case TREE_LIST: return sizeof (struct tree_list);
855
856 case ERROR_MARK:
857 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
858
859 case TREE_VEC:
860 case OMP_CLAUSE: gcc_unreachable ();
861
862 case SSA_NAME: return sizeof (struct tree_ssa_name);
863
864 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
865 case BLOCK: return sizeof (struct tree_block);
866 case CONSTRUCTOR: return sizeof (struct tree_constructor);
867 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
868 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
869
870 default:
871 return lang_hooks.tree_size (code);
872 }
873
874 default:
875 gcc_unreachable ();
876 }
877 }
878
879 /* Compute the number of bytes occupied by NODE. This routine only
880 looks at TREE_CODE, except for those nodes that have variable sizes. */
881 size_t
882 tree_size (const_tree node)
883 {
884 const enum tree_code code = TREE_CODE (node);
885 switch (code)
886 {
887 case INTEGER_CST:
888 return (sizeof (struct tree_int_cst)
889 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
890
891 case TREE_BINFO:
892 return (offsetof (struct tree_binfo, base_binfos)
893 + vec<tree, va_gc>
894 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
895
896 case TREE_VEC:
897 return (sizeof (struct tree_vec)
898 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
899
900 case VECTOR_CST:
901 return (sizeof (struct tree_vector)
902 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
903
904 case STRING_CST:
905 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
906
907 case OMP_CLAUSE:
908 return (sizeof (struct tree_omp_clause)
909 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
910 * sizeof (tree));
911
912 default:
913 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
914 return (sizeof (struct tree_exp)
915 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
916 else
917 return tree_code_size (code);
918 }
919 }
920
921 /* Record interesting allocation statistics for a tree node with CODE
922 and LENGTH. */
923
924 static void
925 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
926 size_t length ATTRIBUTE_UNUSED)
927 {
928 enum tree_code_class type = TREE_CODE_CLASS (code);
929 tree_node_kind kind;
930
931 if (!GATHER_STATISTICS)
932 return;
933
934 switch (type)
935 {
936 case tcc_declaration: /* A decl node */
937 kind = d_kind;
938 break;
939
940 case tcc_type: /* a type node */
941 kind = t_kind;
942 break;
943
944 case tcc_statement: /* an expression with side effects */
945 kind = s_kind;
946 break;
947
948 case tcc_reference: /* a reference */
949 kind = r_kind;
950 break;
951
952 case tcc_expression: /* an expression */
953 case tcc_comparison: /* a comparison expression */
954 case tcc_unary: /* a unary arithmetic expression */
955 case tcc_binary: /* a binary arithmetic expression */
956 kind = e_kind;
957 break;
958
959 case tcc_constant: /* a constant */
960 kind = c_kind;
961 break;
962
963 case tcc_exceptional: /* something random, like an identifier. */
964 switch (code)
965 {
966 case IDENTIFIER_NODE:
967 kind = id_kind;
968 break;
969
970 case TREE_VEC:
971 kind = vec_kind;
972 break;
973
974 case TREE_BINFO:
975 kind = binfo_kind;
976 break;
977
978 case SSA_NAME:
979 kind = ssa_name_kind;
980 break;
981
982 case BLOCK:
983 kind = b_kind;
984 break;
985
986 case CONSTRUCTOR:
987 kind = constr_kind;
988 break;
989
990 case OMP_CLAUSE:
991 kind = omp_clause_kind;
992 break;
993
994 default:
995 kind = x_kind;
996 break;
997 }
998 break;
999
1000 case tcc_vl_exp:
1001 kind = e_kind;
1002 break;
1003
1004 default:
1005 gcc_unreachable ();
1006 }
1007
1008 tree_code_counts[(int) code]++;
1009 tree_node_counts[(int) kind]++;
1010 tree_node_sizes[(int) kind] += length;
1011 }
1012
1013 /* Allocate and return a new UID from the DECL_UID namespace. */
1014
1015 int
1016 allocate_decl_uid (void)
1017 {
1018 return next_decl_uid++;
1019 }
1020
1021 /* Return a newly allocated node of code CODE. For decl and type
1022 nodes, some other fields are initialized. The rest of the node is
1023 initialized to zero. This function cannot be used for TREE_VEC,
1024 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1025 tree_code_size.
1026
1027 Achoo! I got a code in the node. */
1028
1029 tree
1030 make_node_stat (enum tree_code code MEM_STAT_DECL)
1031 {
1032 tree t;
1033 enum tree_code_class type = TREE_CODE_CLASS (code);
1034 size_t length = tree_code_size (code);
1035
1036 record_node_allocation_statistics (code, length);
1037
1038 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1039 TREE_SET_CODE (t, code);
1040
1041 switch (type)
1042 {
1043 case tcc_statement:
1044 TREE_SIDE_EFFECTS (t) = 1;
1045 break;
1046
1047 case tcc_declaration:
1048 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1049 {
1050 if (code == FUNCTION_DECL)
1051 {
1052 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1053 DECL_MODE (t) = FUNCTION_MODE;
1054 }
1055 else
1056 DECL_ALIGN (t) = 1;
1057 }
1058 DECL_SOURCE_LOCATION (t) = input_location;
1059 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1060 DECL_UID (t) = --next_debug_decl_uid;
1061 else
1062 {
1063 DECL_UID (t) = allocate_decl_uid ();
1064 SET_DECL_PT_UID (t, -1);
1065 }
1066 if (TREE_CODE (t) == LABEL_DECL)
1067 LABEL_DECL_UID (t) = -1;
1068
1069 break;
1070
1071 case tcc_type:
1072 TYPE_UID (t) = next_type_uid++;
1073 TYPE_ALIGN (t) = BITS_PER_UNIT;
1074 TYPE_USER_ALIGN (t) = 0;
1075 TYPE_MAIN_VARIANT (t) = t;
1076 TYPE_CANONICAL (t) = t;
1077
1078 /* Default to no attributes for type, but let target change that. */
1079 TYPE_ATTRIBUTES (t) = NULL_TREE;
1080 targetm.set_default_type_attributes (t);
1081
1082 /* We have not yet computed the alias set for this type. */
1083 TYPE_ALIAS_SET (t) = -1;
1084 break;
1085
1086 case tcc_constant:
1087 TREE_CONSTANT (t) = 1;
1088 break;
1089
1090 case tcc_expression:
1091 switch (code)
1092 {
1093 case INIT_EXPR:
1094 case MODIFY_EXPR:
1095 case VA_ARG_EXPR:
1096 case PREDECREMENT_EXPR:
1097 case PREINCREMENT_EXPR:
1098 case POSTDECREMENT_EXPR:
1099 case POSTINCREMENT_EXPR:
1100 /* All of these have side-effects, no matter what their
1101 operands are. */
1102 TREE_SIDE_EFFECTS (t) = 1;
1103 break;
1104
1105 default:
1106 break;
1107 }
1108 break;
1109
1110 default:
1111 /* Other classes need no special treatment. */
1112 break;
1113 }
1114
1115 return t;
1116 }
1117 \f
1118 /* Return a new node with the same contents as NODE except that its
1119 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1120
1121 tree
1122 copy_node_stat (tree node MEM_STAT_DECL)
1123 {
1124 tree t;
1125 enum tree_code code = TREE_CODE (node);
1126 size_t length;
1127
1128 gcc_assert (code != STATEMENT_LIST);
1129
1130 length = tree_size (node);
1131 record_node_allocation_statistics (code, length);
1132 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1133 memcpy (t, node, length);
1134
1135 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1136 TREE_CHAIN (t) = 0;
1137 TREE_ASM_WRITTEN (t) = 0;
1138 TREE_VISITED (t) = 0;
1139
1140 if (TREE_CODE_CLASS (code) == tcc_declaration)
1141 {
1142 if (code == DEBUG_EXPR_DECL)
1143 DECL_UID (t) = --next_debug_decl_uid;
1144 else
1145 {
1146 DECL_UID (t) = allocate_decl_uid ();
1147 if (DECL_PT_UID_SET_P (node))
1148 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1149 }
1150 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1151 && DECL_HAS_VALUE_EXPR_P (node))
1152 {
1153 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1154 DECL_HAS_VALUE_EXPR_P (t) = 1;
1155 }
1156 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1157 if (TREE_CODE (node) == VAR_DECL)
1158 {
1159 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1160 t->decl_with_vis.symtab_node = NULL;
1161 }
1162 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1163 {
1164 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1165 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1166 }
1167 if (TREE_CODE (node) == FUNCTION_DECL)
1168 {
1169 DECL_STRUCT_FUNCTION (t) = NULL;
1170 t->decl_with_vis.symtab_node = NULL;
1171 }
1172 }
1173 else if (TREE_CODE_CLASS (code) == tcc_type)
1174 {
1175 TYPE_UID (t) = next_type_uid++;
1176 /* The following is so that the debug code for
1177 the copy is different from the original type.
1178 The two statements usually duplicate each other
1179 (because they clear fields of the same union),
1180 but the optimizer should catch that. */
1181 TYPE_SYMTAB_POINTER (t) = 0;
1182 TYPE_SYMTAB_ADDRESS (t) = 0;
1183
1184 /* Do not copy the values cache. */
1185 if (TYPE_CACHED_VALUES_P (t))
1186 {
1187 TYPE_CACHED_VALUES_P (t) = 0;
1188 TYPE_CACHED_VALUES (t) = NULL_TREE;
1189 }
1190 }
1191
1192 return t;
1193 }
1194
1195 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1196 For example, this can copy a list made of TREE_LIST nodes. */
1197
1198 tree
1199 copy_list (tree list)
1200 {
1201 tree head;
1202 tree prev, next;
1203
1204 if (list == 0)
1205 return 0;
1206
1207 head = prev = copy_node (list);
1208 next = TREE_CHAIN (list);
1209 while (next)
1210 {
1211 TREE_CHAIN (prev) = copy_node (next);
1212 prev = TREE_CHAIN (prev);
1213 next = TREE_CHAIN (next);
1214 }
1215 return head;
1216 }
1217
1218 \f
1219 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1220 INTEGER_CST with value CST and type TYPE. */
1221
1222 static unsigned int
1223 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1224 {
1225 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1226 /* We need an extra zero HWI if CST is an unsigned integer with its
1227 upper bit set, and if CST occupies a whole number of HWIs. */
1228 if (TYPE_UNSIGNED (type)
1229 && wi::neg_p (cst)
1230 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1231 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1232 return cst.get_len ();
1233 }
1234
1235 /* Return a new INTEGER_CST with value CST and type TYPE. */
1236
1237 static tree
1238 build_new_int_cst (tree type, const wide_int &cst)
1239 {
1240 unsigned int len = cst.get_len ();
1241 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1242 tree nt = make_int_cst (len, ext_len);
1243
1244 if (len < ext_len)
1245 {
1246 --ext_len;
1247 TREE_INT_CST_ELT (nt, ext_len) = 0;
1248 for (unsigned int i = len; i < ext_len; ++i)
1249 TREE_INT_CST_ELT (nt, i) = -1;
1250 }
1251 else if (TYPE_UNSIGNED (type)
1252 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1253 {
1254 len--;
1255 TREE_INT_CST_ELT (nt, len)
1256 = zext_hwi (cst.elt (len),
1257 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1258 }
1259
1260 for (unsigned int i = 0; i < len; i++)
1261 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1262 TREE_TYPE (nt) = type;
1263 return nt;
1264 }
1265
1266 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1267
1268 tree
1269 build_int_cst (tree type, HOST_WIDE_INT low)
1270 {
1271 /* Support legacy code. */
1272 if (!type)
1273 type = integer_type_node;
1274
1275 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1276 }
1277
1278 tree
1279 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1280 {
1281 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1282 }
1283
1284 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1285
1286 tree
1287 build_int_cst_type (tree type, HOST_WIDE_INT low)
1288 {
1289 gcc_assert (type);
1290 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1291 }
1292
1293 /* Constructs tree in type TYPE from with value given by CST. Signedness
1294 of CST is assumed to be the same as the signedness of TYPE. */
1295
1296 tree
1297 double_int_to_tree (tree type, double_int cst)
1298 {
1299 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1300 }
1301
1302 /* We force the wide_int CST to the range of the type TYPE by sign or
1303 zero extending it. OVERFLOWABLE indicates if we are interested in
1304 overflow of the value, when >0 we are only interested in signed
1305 overflow, for <0 we are interested in any overflow. OVERFLOWED
1306 indicates whether overflow has already occurred. CONST_OVERFLOWED
1307 indicates whether constant overflow has already occurred. We force
1308 T's value to be within range of T's type (by setting to 0 or 1 all
1309 the bits outside the type's range). We set TREE_OVERFLOWED if,
1310 OVERFLOWED is nonzero,
1311 or OVERFLOWABLE is >0 and signed overflow occurs
1312 or OVERFLOWABLE is <0 and any overflow occurs
1313 We return a new tree node for the extended wide_int. The node
1314 is shared if no overflow flags are set. */
1315
1316
1317 tree
1318 force_fit_type (tree type, const wide_int_ref &cst,
1319 int overflowable, bool overflowed)
1320 {
1321 signop sign = TYPE_SIGN (type);
1322
1323 /* If we need to set overflow flags, return a new unshared node. */
1324 if (overflowed || !wi::fits_to_tree_p (cst, type))
1325 {
1326 if (overflowed
1327 || overflowable < 0
1328 || (overflowable > 0 && sign == SIGNED))
1329 {
1330 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1331 tree t = build_new_int_cst (type, tmp);
1332 TREE_OVERFLOW (t) = 1;
1333 return t;
1334 }
1335 }
1336
1337 /* Else build a shared node. */
1338 return wide_int_to_tree (type, cst);
1339 }
1340
1341 /* These are the hash table functions for the hash table of INTEGER_CST
1342 nodes of a sizetype. */
1343
1344 /* Return the hash code code X, an INTEGER_CST. */
1345
1346 hashval_t
1347 int_cst_hasher::hash (tree x)
1348 {
1349 const_tree const t = x;
1350 hashval_t code = TYPE_UID (TREE_TYPE (t));
1351 int i;
1352
1353 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1354 code ^= TREE_INT_CST_ELT (t, i);
1355
1356 return code;
1357 }
1358
1359 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1360 is the same as that given by *Y, which is the same. */
1361
1362 bool
1363 int_cst_hasher::equal (tree x, tree y)
1364 {
1365 const_tree const xt = x;
1366 const_tree const yt = y;
1367
1368 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1369 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1370 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1371 return false;
1372
1373 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1374 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1375 return false;
1376
1377 return true;
1378 }
1379
1380 /* Create an INT_CST node of TYPE and value CST.
1381 The returned node is always shared. For small integers we use a
1382 per-type vector cache, for larger ones we use a single hash table.
1383 The value is extended from its precision according to the sign of
1384 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1385 the upper bits and ensures that hashing and value equality based
1386 upon the underlying HOST_WIDE_INTs works without masking. */
1387
1388 tree
1389 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1390 {
1391 tree t;
1392 int ix = -1;
1393 int limit = 0;
1394
1395 gcc_assert (type);
1396 unsigned int prec = TYPE_PRECISION (type);
1397 signop sgn = TYPE_SIGN (type);
1398
1399 /* Verify that everything is canonical. */
1400 int l = pcst.get_len ();
1401 if (l > 1)
1402 {
1403 if (pcst.elt (l - 1) == 0)
1404 gcc_checking_assert (pcst.elt (l - 2) < 0);
1405 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1406 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1407 }
1408
1409 wide_int cst = wide_int::from (pcst, prec, sgn);
1410 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1411
1412 if (ext_len == 1)
1413 {
1414 /* We just need to store a single HOST_WIDE_INT. */
1415 HOST_WIDE_INT hwi;
1416 if (TYPE_UNSIGNED (type))
1417 hwi = cst.to_uhwi ();
1418 else
1419 hwi = cst.to_shwi ();
1420
1421 switch (TREE_CODE (type))
1422 {
1423 case NULLPTR_TYPE:
1424 gcc_assert (hwi == 0);
1425 /* Fallthru. */
1426
1427 case POINTER_TYPE:
1428 case REFERENCE_TYPE:
1429 case POINTER_BOUNDS_TYPE:
1430 /* Cache NULL pointer and zero bounds. */
1431 if (hwi == 0)
1432 {
1433 limit = 1;
1434 ix = 0;
1435 }
1436 break;
1437
1438 case BOOLEAN_TYPE:
1439 /* Cache false or true. */
1440 limit = 2;
1441 if (hwi < 2)
1442 ix = hwi;
1443 break;
1444
1445 case INTEGER_TYPE:
1446 case OFFSET_TYPE:
1447 if (TYPE_SIGN (type) == UNSIGNED)
1448 {
1449 /* Cache [0, N). */
1450 limit = INTEGER_SHARE_LIMIT;
1451 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1452 ix = hwi;
1453 }
1454 else
1455 {
1456 /* Cache [-1, N). */
1457 limit = INTEGER_SHARE_LIMIT + 1;
1458 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1459 ix = hwi + 1;
1460 }
1461 break;
1462
1463 case ENUMERAL_TYPE:
1464 break;
1465
1466 default:
1467 gcc_unreachable ();
1468 }
1469
1470 if (ix >= 0)
1471 {
1472 /* Look for it in the type's vector of small shared ints. */
1473 if (!TYPE_CACHED_VALUES_P (type))
1474 {
1475 TYPE_CACHED_VALUES_P (type) = 1;
1476 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1477 }
1478
1479 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1480 if (t)
1481 /* Make sure no one is clobbering the shared constant. */
1482 gcc_checking_assert (TREE_TYPE (t) == type
1483 && TREE_INT_CST_NUNITS (t) == 1
1484 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1485 && TREE_INT_CST_EXT_NUNITS (t) == 1
1486 && TREE_INT_CST_ELT (t, 0) == hwi);
1487 else
1488 {
1489 /* Create a new shared int. */
1490 t = build_new_int_cst (type, cst);
1491 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1492 }
1493 }
1494 else
1495 {
1496 /* Use the cache of larger shared ints, using int_cst_node as
1497 a temporary. */
1498
1499 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1500 TREE_TYPE (int_cst_node) = type;
1501
1502 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1503 t = *slot;
1504 if (!t)
1505 {
1506 /* Insert this one into the hash table. */
1507 t = int_cst_node;
1508 *slot = t;
1509 /* Make a new node for next time round. */
1510 int_cst_node = make_int_cst (1, 1);
1511 }
1512 }
1513 }
1514 else
1515 {
1516 /* The value either hashes properly or we drop it on the floor
1517 for the gc to take care of. There will not be enough of them
1518 to worry about. */
1519
1520 tree nt = build_new_int_cst (type, cst);
1521 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1522 t = *slot;
1523 if (!t)
1524 {
1525 /* Insert this one into the hash table. */
1526 t = nt;
1527 *slot = t;
1528 }
1529 }
1530
1531 return t;
1532 }
1533
1534 void
1535 cache_integer_cst (tree t)
1536 {
1537 tree type = TREE_TYPE (t);
1538 int ix = -1;
1539 int limit = 0;
1540 int prec = TYPE_PRECISION (type);
1541
1542 gcc_assert (!TREE_OVERFLOW (t));
1543
1544 switch (TREE_CODE (type))
1545 {
1546 case NULLPTR_TYPE:
1547 gcc_assert (integer_zerop (t));
1548 /* Fallthru. */
1549
1550 case POINTER_TYPE:
1551 case REFERENCE_TYPE:
1552 /* Cache NULL pointer. */
1553 if (integer_zerop (t))
1554 {
1555 limit = 1;
1556 ix = 0;
1557 }
1558 break;
1559
1560 case BOOLEAN_TYPE:
1561 /* Cache false or true. */
1562 limit = 2;
1563 if (wi::ltu_p (t, 2))
1564 ix = TREE_INT_CST_ELT (t, 0);
1565 break;
1566
1567 case INTEGER_TYPE:
1568 case OFFSET_TYPE:
1569 if (TYPE_UNSIGNED (type))
1570 {
1571 /* Cache 0..N */
1572 limit = INTEGER_SHARE_LIMIT;
1573
1574 /* This is a little hokie, but if the prec is smaller than
1575 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1576 obvious test will not get the correct answer. */
1577 if (prec < HOST_BITS_PER_WIDE_INT)
1578 {
1579 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1580 ix = tree_to_uhwi (t);
1581 }
1582 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1583 ix = tree_to_uhwi (t);
1584 }
1585 else
1586 {
1587 /* Cache -1..N */
1588 limit = INTEGER_SHARE_LIMIT + 1;
1589
1590 if (integer_minus_onep (t))
1591 ix = 0;
1592 else if (!wi::neg_p (t))
1593 {
1594 if (prec < HOST_BITS_PER_WIDE_INT)
1595 {
1596 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1597 ix = tree_to_shwi (t) + 1;
1598 }
1599 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1600 ix = tree_to_shwi (t) + 1;
1601 }
1602 }
1603 break;
1604
1605 case ENUMERAL_TYPE:
1606 break;
1607
1608 default:
1609 gcc_unreachable ();
1610 }
1611
1612 if (ix >= 0)
1613 {
1614 /* Look for it in the type's vector of small shared ints. */
1615 if (!TYPE_CACHED_VALUES_P (type))
1616 {
1617 TYPE_CACHED_VALUES_P (type) = 1;
1618 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1619 }
1620
1621 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1622 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1623 }
1624 else
1625 {
1626 /* Use the cache of larger shared ints. */
1627 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1628 /* If there is already an entry for the number verify it's the
1629 same. */
1630 if (*slot)
1631 gcc_assert (wi::eq_p (tree (*slot), t));
1632 else
1633 /* Otherwise insert this one into the hash table. */
1634 *slot = t;
1635 }
1636 }
1637
1638
1639 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1640 and the rest are zeros. */
1641
1642 tree
1643 build_low_bits_mask (tree type, unsigned bits)
1644 {
1645 gcc_assert (bits <= TYPE_PRECISION (type));
1646
1647 return wide_int_to_tree (type, wi::mask (bits, false,
1648 TYPE_PRECISION (type)));
1649 }
1650
1651 /* Checks that X is integer constant that can be expressed in (unsigned)
1652 HOST_WIDE_INT without loss of precision. */
1653
1654 bool
1655 cst_and_fits_in_hwi (const_tree x)
1656 {
1657 if (TREE_CODE (x) != INTEGER_CST)
1658 return false;
1659
1660 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1661 return false;
1662
1663 return TREE_INT_CST_NUNITS (x) == 1;
1664 }
1665
1666 /* Build a newly constructed TREE_VEC node of length LEN. */
1667
1668 tree
1669 make_vector_stat (unsigned len MEM_STAT_DECL)
1670 {
1671 tree t;
1672 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1673
1674 record_node_allocation_statistics (VECTOR_CST, length);
1675
1676 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1677
1678 TREE_SET_CODE (t, VECTOR_CST);
1679 TREE_CONSTANT (t) = 1;
1680
1681 return t;
1682 }
1683
1684 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1685 are in a list pointed to by VALS. */
1686
1687 tree
1688 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1689 {
1690 int over = 0;
1691 unsigned cnt = 0;
1692 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1693 TREE_TYPE (v) = type;
1694
1695 /* Iterate through elements and check for overflow. */
1696 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1697 {
1698 tree value = vals[cnt];
1699
1700 VECTOR_CST_ELT (v, cnt) = value;
1701
1702 /* Don't crash if we get an address constant. */
1703 if (!CONSTANT_CLASS_P (value))
1704 continue;
1705
1706 over |= TREE_OVERFLOW (value);
1707 }
1708
1709 TREE_OVERFLOW (v) = over;
1710 return v;
1711 }
1712
1713 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1714 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1715
1716 tree
1717 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1718 {
1719 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1720 unsigned HOST_WIDE_INT idx;
1721 tree value;
1722
1723 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1724 vec[idx] = value;
1725 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1726 vec[idx] = build_zero_cst (TREE_TYPE (type));
1727
1728 return build_vector (type, vec);
1729 }
1730
1731 /* Build a vector of type VECTYPE where all the elements are SCs. */
1732 tree
1733 build_vector_from_val (tree vectype, tree sc)
1734 {
1735 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1736
1737 if (sc == error_mark_node)
1738 return sc;
1739
1740 /* Verify that the vector type is suitable for SC. Note that there
1741 is some inconsistency in the type-system with respect to restrict
1742 qualifications of pointers. Vector types always have a main-variant
1743 element type and the qualification is applied to the vector-type.
1744 So TREE_TYPE (vector-type) does not return a properly qualified
1745 vector element-type. */
1746 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1747 TREE_TYPE (vectype)));
1748
1749 if (CONSTANT_CLASS_P (sc))
1750 {
1751 tree *v = XALLOCAVEC (tree, nunits);
1752 for (i = 0; i < nunits; ++i)
1753 v[i] = sc;
1754 return build_vector (vectype, v);
1755 }
1756 else
1757 {
1758 vec<constructor_elt, va_gc> *v;
1759 vec_alloc (v, nunits);
1760 for (i = 0; i < nunits; ++i)
1761 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1762 return build_constructor (vectype, v);
1763 }
1764 }
1765
1766 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1767 are in the vec pointed to by VALS. */
1768 tree
1769 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1770 {
1771 tree c = make_node (CONSTRUCTOR);
1772 unsigned int i;
1773 constructor_elt *elt;
1774 bool constant_p = true;
1775 bool side_effects_p = false;
1776
1777 TREE_TYPE (c) = type;
1778 CONSTRUCTOR_ELTS (c) = vals;
1779
1780 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1781 {
1782 /* Mostly ctors will have elts that don't have side-effects, so
1783 the usual case is to scan all the elements. Hence a single
1784 loop for both const and side effects, rather than one loop
1785 each (with early outs). */
1786 if (!TREE_CONSTANT (elt->value))
1787 constant_p = false;
1788 if (TREE_SIDE_EFFECTS (elt->value))
1789 side_effects_p = true;
1790 }
1791
1792 TREE_SIDE_EFFECTS (c) = side_effects_p;
1793 TREE_CONSTANT (c) = constant_p;
1794
1795 return c;
1796 }
1797
1798 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1799 INDEX and VALUE. */
1800 tree
1801 build_constructor_single (tree type, tree index, tree value)
1802 {
1803 vec<constructor_elt, va_gc> *v;
1804 constructor_elt elt = {index, value};
1805
1806 vec_alloc (v, 1);
1807 v->quick_push (elt);
1808
1809 return build_constructor (type, v);
1810 }
1811
1812
1813 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1814 are in a list pointed to by VALS. */
1815 tree
1816 build_constructor_from_list (tree type, tree vals)
1817 {
1818 tree t;
1819 vec<constructor_elt, va_gc> *v = NULL;
1820
1821 if (vals)
1822 {
1823 vec_alloc (v, list_length (vals));
1824 for (t = vals; t; t = TREE_CHAIN (t))
1825 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1826 }
1827
1828 return build_constructor (type, v);
1829 }
1830
1831 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1832 of elements, provided as index/value pairs. */
1833
1834 tree
1835 build_constructor_va (tree type, int nelts, ...)
1836 {
1837 vec<constructor_elt, va_gc> *v = NULL;
1838 va_list p;
1839
1840 va_start (p, nelts);
1841 vec_alloc (v, nelts);
1842 while (nelts--)
1843 {
1844 tree index = va_arg (p, tree);
1845 tree value = va_arg (p, tree);
1846 CONSTRUCTOR_APPEND_ELT (v, index, value);
1847 }
1848 va_end (p);
1849 return build_constructor (type, v);
1850 }
1851
1852 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1853
1854 tree
1855 build_fixed (tree type, FIXED_VALUE_TYPE f)
1856 {
1857 tree v;
1858 FIXED_VALUE_TYPE *fp;
1859
1860 v = make_node (FIXED_CST);
1861 fp = ggc_alloc<fixed_value> ();
1862 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1863
1864 TREE_TYPE (v) = type;
1865 TREE_FIXED_CST_PTR (v) = fp;
1866 return v;
1867 }
1868
1869 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1870
1871 tree
1872 build_real (tree type, REAL_VALUE_TYPE d)
1873 {
1874 tree v;
1875 REAL_VALUE_TYPE *dp;
1876 int overflow = 0;
1877
1878 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1879 Consider doing it via real_convert now. */
1880
1881 v = make_node (REAL_CST);
1882 dp = ggc_alloc<real_value> ();
1883 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1884
1885 TREE_TYPE (v) = type;
1886 TREE_REAL_CST_PTR (v) = dp;
1887 TREE_OVERFLOW (v) = overflow;
1888 return v;
1889 }
1890
1891 /* Return a new REAL_CST node whose type is TYPE
1892 and whose value is the integer value of the INTEGER_CST node I. */
1893
1894 REAL_VALUE_TYPE
1895 real_value_from_int_cst (const_tree type, const_tree i)
1896 {
1897 REAL_VALUE_TYPE d;
1898
1899 /* Clear all bits of the real value type so that we can later do
1900 bitwise comparisons to see if two values are the same. */
1901 memset (&d, 0, sizeof d);
1902
1903 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1904 TYPE_SIGN (TREE_TYPE (i)));
1905 return d;
1906 }
1907
1908 /* Given a tree representing an integer constant I, return a tree
1909 representing the same value as a floating-point constant of type TYPE. */
1910
1911 tree
1912 build_real_from_int_cst (tree type, const_tree i)
1913 {
1914 tree v;
1915 int overflow = TREE_OVERFLOW (i);
1916
1917 v = build_real (type, real_value_from_int_cst (type, i));
1918
1919 TREE_OVERFLOW (v) |= overflow;
1920 return v;
1921 }
1922
1923 /* Return a newly constructed STRING_CST node whose value is
1924 the LEN characters at STR.
1925 Note that for a C string literal, LEN should include the trailing NUL.
1926 The TREE_TYPE is not initialized. */
1927
1928 tree
1929 build_string (int len, const char *str)
1930 {
1931 tree s;
1932 size_t length;
1933
1934 /* Do not waste bytes provided by padding of struct tree_string. */
1935 length = len + offsetof (struct tree_string, str) + 1;
1936
1937 record_node_allocation_statistics (STRING_CST, length);
1938
1939 s = (tree) ggc_internal_alloc (length);
1940
1941 memset (s, 0, sizeof (struct tree_typed));
1942 TREE_SET_CODE (s, STRING_CST);
1943 TREE_CONSTANT (s) = 1;
1944 TREE_STRING_LENGTH (s) = len;
1945 memcpy (s->string.str, str, len);
1946 s->string.str[len] = '\0';
1947
1948 return s;
1949 }
1950
1951 /* Return a newly constructed COMPLEX_CST node whose value is
1952 specified by the real and imaginary parts REAL and IMAG.
1953 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1954 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1955
1956 tree
1957 build_complex (tree type, tree real, tree imag)
1958 {
1959 tree t = make_node (COMPLEX_CST);
1960
1961 TREE_REALPART (t) = real;
1962 TREE_IMAGPART (t) = imag;
1963 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1964 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1965 return t;
1966 }
1967
1968 /* Return a constant of arithmetic type TYPE which is the
1969 multiplicative identity of the set TYPE. */
1970
1971 tree
1972 build_one_cst (tree type)
1973 {
1974 switch (TREE_CODE (type))
1975 {
1976 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1977 case POINTER_TYPE: case REFERENCE_TYPE:
1978 case OFFSET_TYPE:
1979 return build_int_cst (type, 1);
1980
1981 case REAL_TYPE:
1982 return build_real (type, dconst1);
1983
1984 case FIXED_POINT_TYPE:
1985 /* We can only generate 1 for accum types. */
1986 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1987 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1988
1989 case VECTOR_TYPE:
1990 {
1991 tree scalar = build_one_cst (TREE_TYPE (type));
1992
1993 return build_vector_from_val (type, scalar);
1994 }
1995
1996 case COMPLEX_TYPE:
1997 return build_complex (type,
1998 build_one_cst (TREE_TYPE (type)),
1999 build_zero_cst (TREE_TYPE (type)));
2000
2001 default:
2002 gcc_unreachable ();
2003 }
2004 }
2005
2006 /* Return an integer of type TYPE containing all 1's in as much precision as
2007 it contains, or a complex or vector whose subparts are such integers. */
2008
2009 tree
2010 build_all_ones_cst (tree type)
2011 {
2012 if (TREE_CODE (type) == COMPLEX_TYPE)
2013 {
2014 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2015 return build_complex (type, scalar, scalar);
2016 }
2017 else
2018 return build_minus_one_cst (type);
2019 }
2020
2021 /* Return a constant of arithmetic type TYPE which is the
2022 opposite of the multiplicative identity of the set TYPE. */
2023
2024 tree
2025 build_minus_one_cst (tree type)
2026 {
2027 switch (TREE_CODE (type))
2028 {
2029 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2030 case POINTER_TYPE: case REFERENCE_TYPE:
2031 case OFFSET_TYPE:
2032 return build_int_cst (type, -1);
2033
2034 case REAL_TYPE:
2035 return build_real (type, dconstm1);
2036
2037 case FIXED_POINT_TYPE:
2038 /* We can only generate 1 for accum types. */
2039 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2040 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2041 TYPE_MODE (type)));
2042
2043 case VECTOR_TYPE:
2044 {
2045 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2046
2047 return build_vector_from_val (type, scalar);
2048 }
2049
2050 case COMPLEX_TYPE:
2051 return build_complex (type,
2052 build_minus_one_cst (TREE_TYPE (type)),
2053 build_zero_cst (TREE_TYPE (type)));
2054
2055 default:
2056 gcc_unreachable ();
2057 }
2058 }
2059
2060 /* Build 0 constant of type TYPE. This is used by constructor folding
2061 and thus the constant should be represented in memory by
2062 zero(es). */
2063
2064 tree
2065 build_zero_cst (tree type)
2066 {
2067 switch (TREE_CODE (type))
2068 {
2069 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2070 case POINTER_TYPE: case REFERENCE_TYPE:
2071 case OFFSET_TYPE: case NULLPTR_TYPE:
2072 return build_int_cst (type, 0);
2073
2074 case REAL_TYPE:
2075 return build_real (type, dconst0);
2076
2077 case FIXED_POINT_TYPE:
2078 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2079
2080 case VECTOR_TYPE:
2081 {
2082 tree scalar = build_zero_cst (TREE_TYPE (type));
2083
2084 return build_vector_from_val (type, scalar);
2085 }
2086
2087 case COMPLEX_TYPE:
2088 {
2089 tree zero = build_zero_cst (TREE_TYPE (type));
2090
2091 return build_complex (type, zero, zero);
2092 }
2093
2094 default:
2095 if (!AGGREGATE_TYPE_P (type))
2096 return fold_convert (type, integer_zero_node);
2097 return build_constructor (type, NULL);
2098 }
2099 }
2100
2101
2102 /* Build a BINFO with LEN language slots. */
2103
2104 tree
2105 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2106 {
2107 tree t;
2108 size_t length = (offsetof (struct tree_binfo, base_binfos)
2109 + vec<tree, va_gc>::embedded_size (base_binfos));
2110
2111 record_node_allocation_statistics (TREE_BINFO, length);
2112
2113 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2114
2115 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2116
2117 TREE_SET_CODE (t, TREE_BINFO);
2118
2119 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2120
2121 return t;
2122 }
2123
2124 /* Create a CASE_LABEL_EXPR tree node and return it. */
2125
2126 tree
2127 build_case_label (tree low_value, tree high_value, tree label_decl)
2128 {
2129 tree t = make_node (CASE_LABEL_EXPR);
2130
2131 TREE_TYPE (t) = void_type_node;
2132 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2133
2134 CASE_LOW (t) = low_value;
2135 CASE_HIGH (t) = high_value;
2136 CASE_LABEL (t) = label_decl;
2137 CASE_CHAIN (t) = NULL_TREE;
2138
2139 return t;
2140 }
2141
2142 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2143 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2144 The latter determines the length of the HOST_WIDE_INT vector. */
2145
2146 tree
2147 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2148 {
2149 tree t;
2150 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2151 + sizeof (struct tree_int_cst));
2152
2153 gcc_assert (len);
2154 record_node_allocation_statistics (INTEGER_CST, length);
2155
2156 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2157
2158 TREE_SET_CODE (t, INTEGER_CST);
2159 TREE_INT_CST_NUNITS (t) = len;
2160 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2161 /* to_offset can only be applied to trees that are offset_int-sized
2162 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2163 must be exactly the precision of offset_int and so LEN is correct. */
2164 if (ext_len <= OFFSET_INT_ELTS)
2165 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2166 else
2167 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2168
2169 TREE_CONSTANT (t) = 1;
2170
2171 return t;
2172 }
2173
2174 /* Build a newly constructed TREE_VEC node of length LEN. */
2175
2176 tree
2177 make_tree_vec_stat (int len MEM_STAT_DECL)
2178 {
2179 tree t;
2180 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2181
2182 record_node_allocation_statistics (TREE_VEC, length);
2183
2184 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2185
2186 TREE_SET_CODE (t, TREE_VEC);
2187 TREE_VEC_LENGTH (t) = len;
2188
2189 return t;
2190 }
2191
2192 /* Grow a TREE_VEC node to new length LEN. */
2193
2194 tree
2195 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2196 {
2197 gcc_assert (TREE_CODE (v) == TREE_VEC);
2198
2199 int oldlen = TREE_VEC_LENGTH (v);
2200 gcc_assert (len > oldlen);
2201
2202 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2203 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2204
2205 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2206
2207 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2208
2209 TREE_VEC_LENGTH (v) = len;
2210
2211 return v;
2212 }
2213 \f
2214 /* Return 1 if EXPR is the integer constant zero or a complex constant
2215 of zero. */
2216
2217 int
2218 integer_zerop (const_tree expr)
2219 {
2220 STRIP_NOPS (expr);
2221
2222 switch (TREE_CODE (expr))
2223 {
2224 case INTEGER_CST:
2225 return wi::eq_p (expr, 0);
2226 case COMPLEX_CST:
2227 return (integer_zerop (TREE_REALPART (expr))
2228 && integer_zerop (TREE_IMAGPART (expr)));
2229 case VECTOR_CST:
2230 {
2231 unsigned i;
2232 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2233 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2234 return false;
2235 return true;
2236 }
2237 default:
2238 return false;
2239 }
2240 }
2241
2242 /* Return 1 if EXPR is the integer constant one or the corresponding
2243 complex constant. */
2244
2245 int
2246 integer_onep (const_tree expr)
2247 {
2248 STRIP_NOPS (expr);
2249
2250 switch (TREE_CODE (expr))
2251 {
2252 case INTEGER_CST:
2253 return wi::eq_p (wi::to_widest (expr), 1);
2254 case COMPLEX_CST:
2255 return (integer_onep (TREE_REALPART (expr))
2256 && integer_zerop (TREE_IMAGPART (expr)));
2257 case VECTOR_CST:
2258 {
2259 unsigned i;
2260 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2261 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2262 return false;
2263 return true;
2264 }
2265 default:
2266 return false;
2267 }
2268 }
2269
2270 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2271 return 1 if every piece is the integer constant one. */
2272
2273 int
2274 integer_each_onep (const_tree expr)
2275 {
2276 STRIP_NOPS (expr);
2277
2278 if (TREE_CODE (expr) == COMPLEX_CST)
2279 return (integer_onep (TREE_REALPART (expr))
2280 && integer_onep (TREE_IMAGPART (expr)));
2281 else
2282 return integer_onep (expr);
2283 }
2284
2285 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2286 it contains, or a complex or vector whose subparts are such integers. */
2287
2288 int
2289 integer_all_onesp (const_tree expr)
2290 {
2291 STRIP_NOPS (expr);
2292
2293 if (TREE_CODE (expr) == COMPLEX_CST
2294 && integer_all_onesp (TREE_REALPART (expr))
2295 && integer_all_onesp (TREE_IMAGPART (expr)))
2296 return 1;
2297
2298 else if (TREE_CODE (expr) == VECTOR_CST)
2299 {
2300 unsigned i;
2301 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2302 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2303 return 0;
2304 return 1;
2305 }
2306
2307 else if (TREE_CODE (expr) != INTEGER_CST)
2308 return 0;
2309
2310 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2311 }
2312
2313 /* Return 1 if EXPR is the integer constant minus one. */
2314
2315 int
2316 integer_minus_onep (const_tree expr)
2317 {
2318 STRIP_NOPS (expr);
2319
2320 if (TREE_CODE (expr) == COMPLEX_CST)
2321 return (integer_all_onesp (TREE_REALPART (expr))
2322 && integer_zerop (TREE_IMAGPART (expr)));
2323 else
2324 return integer_all_onesp (expr);
2325 }
2326
2327 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2328 one bit on). */
2329
2330 int
2331 integer_pow2p (const_tree expr)
2332 {
2333 STRIP_NOPS (expr);
2334
2335 if (TREE_CODE (expr) == COMPLEX_CST
2336 && integer_pow2p (TREE_REALPART (expr))
2337 && integer_zerop (TREE_IMAGPART (expr)))
2338 return 1;
2339
2340 if (TREE_CODE (expr) != INTEGER_CST)
2341 return 0;
2342
2343 return wi::popcount (expr) == 1;
2344 }
2345
2346 /* Return 1 if EXPR is an integer constant other than zero or a
2347 complex constant other than zero. */
2348
2349 int
2350 integer_nonzerop (const_tree expr)
2351 {
2352 STRIP_NOPS (expr);
2353
2354 return ((TREE_CODE (expr) == INTEGER_CST
2355 && !wi::eq_p (expr, 0))
2356 || (TREE_CODE (expr) == COMPLEX_CST
2357 && (integer_nonzerop (TREE_REALPART (expr))
2358 || integer_nonzerop (TREE_IMAGPART (expr)))));
2359 }
2360
2361 /* Return 1 if EXPR is the integer constant one. For vector,
2362 return 1 if every piece is the integer constant minus one
2363 (representing the value TRUE). */
2364
2365 int
2366 integer_truep (const_tree expr)
2367 {
2368 STRIP_NOPS (expr);
2369
2370 if (TREE_CODE (expr) == VECTOR_CST)
2371 return integer_all_onesp (expr);
2372 return integer_onep (expr);
2373 }
2374
2375 /* Return 1 if EXPR is the fixed-point constant zero. */
2376
2377 int
2378 fixed_zerop (const_tree expr)
2379 {
2380 return (TREE_CODE (expr) == FIXED_CST
2381 && TREE_FIXED_CST (expr).data.is_zero ());
2382 }
2383
2384 /* Return the power of two represented by a tree node known to be a
2385 power of two. */
2386
2387 int
2388 tree_log2 (const_tree expr)
2389 {
2390 STRIP_NOPS (expr);
2391
2392 if (TREE_CODE (expr) == COMPLEX_CST)
2393 return tree_log2 (TREE_REALPART (expr));
2394
2395 return wi::exact_log2 (expr);
2396 }
2397
2398 /* Similar, but return the largest integer Y such that 2 ** Y is less
2399 than or equal to EXPR. */
2400
2401 int
2402 tree_floor_log2 (const_tree expr)
2403 {
2404 STRIP_NOPS (expr);
2405
2406 if (TREE_CODE (expr) == COMPLEX_CST)
2407 return tree_log2 (TREE_REALPART (expr));
2408
2409 return wi::floor_log2 (expr);
2410 }
2411
2412 /* Return number of known trailing zero bits in EXPR, or, if the value of
2413 EXPR is known to be zero, the precision of it's type. */
2414
2415 unsigned int
2416 tree_ctz (const_tree expr)
2417 {
2418 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2419 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2420 return 0;
2421
2422 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2423 switch (TREE_CODE (expr))
2424 {
2425 case INTEGER_CST:
2426 ret1 = wi::ctz (expr);
2427 return MIN (ret1, prec);
2428 case SSA_NAME:
2429 ret1 = wi::ctz (get_nonzero_bits (expr));
2430 return MIN (ret1, prec);
2431 case PLUS_EXPR:
2432 case MINUS_EXPR:
2433 case BIT_IOR_EXPR:
2434 case BIT_XOR_EXPR:
2435 case MIN_EXPR:
2436 case MAX_EXPR:
2437 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2438 if (ret1 == 0)
2439 return ret1;
2440 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2441 return MIN (ret1, ret2);
2442 case POINTER_PLUS_EXPR:
2443 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2444 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2445 /* Second operand is sizetype, which could be in theory
2446 wider than pointer's precision. Make sure we never
2447 return more than prec. */
2448 ret2 = MIN (ret2, prec);
2449 return MIN (ret1, ret2);
2450 case BIT_AND_EXPR:
2451 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2452 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2453 return MAX (ret1, ret2);
2454 case MULT_EXPR:
2455 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2456 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2457 return MIN (ret1 + ret2, prec);
2458 case LSHIFT_EXPR:
2459 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2460 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2461 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2462 {
2463 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2464 return MIN (ret1 + ret2, prec);
2465 }
2466 return ret1;
2467 case RSHIFT_EXPR:
2468 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2469 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2470 {
2471 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2472 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2473 if (ret1 > ret2)
2474 return ret1 - ret2;
2475 }
2476 return 0;
2477 case TRUNC_DIV_EXPR:
2478 case CEIL_DIV_EXPR:
2479 case FLOOR_DIV_EXPR:
2480 case ROUND_DIV_EXPR:
2481 case EXACT_DIV_EXPR:
2482 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2483 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2484 {
2485 int l = tree_log2 (TREE_OPERAND (expr, 1));
2486 if (l >= 0)
2487 {
2488 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2489 ret2 = l;
2490 if (ret1 > ret2)
2491 return ret1 - ret2;
2492 }
2493 }
2494 return 0;
2495 CASE_CONVERT:
2496 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2497 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2498 ret1 = prec;
2499 return MIN (ret1, prec);
2500 case SAVE_EXPR:
2501 return tree_ctz (TREE_OPERAND (expr, 0));
2502 case COND_EXPR:
2503 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2504 if (ret1 == 0)
2505 return 0;
2506 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2507 return MIN (ret1, ret2);
2508 case COMPOUND_EXPR:
2509 return tree_ctz (TREE_OPERAND (expr, 1));
2510 case ADDR_EXPR:
2511 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2512 if (ret1 > BITS_PER_UNIT)
2513 {
2514 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2515 return MIN (ret1, prec);
2516 }
2517 return 0;
2518 default:
2519 return 0;
2520 }
2521 }
2522
2523 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2524 decimal float constants, so don't return 1 for them. */
2525
2526 int
2527 real_zerop (const_tree expr)
2528 {
2529 STRIP_NOPS (expr);
2530
2531 switch (TREE_CODE (expr))
2532 {
2533 case REAL_CST:
2534 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2535 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2536 case COMPLEX_CST:
2537 return real_zerop (TREE_REALPART (expr))
2538 && real_zerop (TREE_IMAGPART (expr));
2539 case VECTOR_CST:
2540 {
2541 unsigned i;
2542 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2543 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2544 return false;
2545 return true;
2546 }
2547 default:
2548 return false;
2549 }
2550 }
2551
2552 /* Return 1 if EXPR is the real constant one in real or complex form.
2553 Trailing zeroes matter for decimal float constants, so don't return
2554 1 for them. */
2555
2556 int
2557 real_onep (const_tree expr)
2558 {
2559 STRIP_NOPS (expr);
2560
2561 switch (TREE_CODE (expr))
2562 {
2563 case REAL_CST:
2564 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2565 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2566 case COMPLEX_CST:
2567 return real_onep (TREE_REALPART (expr))
2568 && real_zerop (TREE_IMAGPART (expr));
2569 case VECTOR_CST:
2570 {
2571 unsigned i;
2572 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2573 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2574 return false;
2575 return true;
2576 }
2577 default:
2578 return false;
2579 }
2580 }
2581
2582 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2583 matter for decimal float constants, so don't return 1 for them. */
2584
2585 int
2586 real_minus_onep (const_tree expr)
2587 {
2588 STRIP_NOPS (expr);
2589
2590 switch (TREE_CODE (expr))
2591 {
2592 case REAL_CST:
2593 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2594 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2595 case COMPLEX_CST:
2596 return real_minus_onep (TREE_REALPART (expr))
2597 && real_zerop (TREE_IMAGPART (expr));
2598 case VECTOR_CST:
2599 {
2600 unsigned i;
2601 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2602 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2603 return false;
2604 return true;
2605 }
2606 default:
2607 return false;
2608 }
2609 }
2610
2611 /* Nonzero if EXP is a constant or a cast of a constant. */
2612
2613 int
2614 really_constant_p (const_tree exp)
2615 {
2616 /* This is not quite the same as STRIP_NOPS. It does more. */
2617 while (CONVERT_EXPR_P (exp)
2618 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2619 exp = TREE_OPERAND (exp, 0);
2620 return TREE_CONSTANT (exp);
2621 }
2622 \f
2623 /* Return first list element whose TREE_VALUE is ELEM.
2624 Return 0 if ELEM is not in LIST. */
2625
2626 tree
2627 value_member (tree elem, tree list)
2628 {
2629 while (list)
2630 {
2631 if (elem == TREE_VALUE (list))
2632 return list;
2633 list = TREE_CHAIN (list);
2634 }
2635 return NULL_TREE;
2636 }
2637
2638 /* Return first list element whose TREE_PURPOSE is ELEM.
2639 Return 0 if ELEM is not in LIST. */
2640
2641 tree
2642 purpose_member (const_tree elem, tree list)
2643 {
2644 while (list)
2645 {
2646 if (elem == TREE_PURPOSE (list))
2647 return list;
2648 list = TREE_CHAIN (list);
2649 }
2650 return NULL_TREE;
2651 }
2652
2653 /* Return true if ELEM is in V. */
2654
2655 bool
2656 vec_member (const_tree elem, vec<tree, va_gc> *v)
2657 {
2658 unsigned ix;
2659 tree t;
2660 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2661 if (elem == t)
2662 return true;
2663 return false;
2664 }
2665
2666 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2667 NULL_TREE. */
2668
2669 tree
2670 chain_index (int idx, tree chain)
2671 {
2672 for (; chain && idx > 0; --idx)
2673 chain = TREE_CHAIN (chain);
2674 return chain;
2675 }
2676
2677 /* Return nonzero if ELEM is part of the chain CHAIN. */
2678
2679 int
2680 chain_member (const_tree elem, const_tree chain)
2681 {
2682 while (chain)
2683 {
2684 if (elem == chain)
2685 return 1;
2686 chain = DECL_CHAIN (chain);
2687 }
2688
2689 return 0;
2690 }
2691
2692 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2693 We expect a null pointer to mark the end of the chain.
2694 This is the Lisp primitive `length'. */
2695
2696 int
2697 list_length (const_tree t)
2698 {
2699 const_tree p = t;
2700 #ifdef ENABLE_TREE_CHECKING
2701 const_tree q = t;
2702 #endif
2703 int len = 0;
2704
2705 while (p)
2706 {
2707 p = TREE_CHAIN (p);
2708 #ifdef ENABLE_TREE_CHECKING
2709 if (len % 2)
2710 q = TREE_CHAIN (q);
2711 gcc_assert (p != q);
2712 #endif
2713 len++;
2714 }
2715
2716 return len;
2717 }
2718
2719 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2720 UNION_TYPE TYPE, or NULL_TREE if none. */
2721
2722 tree
2723 first_field (const_tree type)
2724 {
2725 tree t = TYPE_FIELDS (type);
2726 while (t && TREE_CODE (t) != FIELD_DECL)
2727 t = TREE_CHAIN (t);
2728 return t;
2729 }
2730
2731 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2732 by modifying the last node in chain 1 to point to chain 2.
2733 This is the Lisp primitive `nconc'. */
2734
2735 tree
2736 chainon (tree op1, tree op2)
2737 {
2738 tree t1;
2739
2740 if (!op1)
2741 return op2;
2742 if (!op2)
2743 return op1;
2744
2745 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2746 continue;
2747 TREE_CHAIN (t1) = op2;
2748
2749 #ifdef ENABLE_TREE_CHECKING
2750 {
2751 tree t2;
2752 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2753 gcc_assert (t2 != t1);
2754 }
2755 #endif
2756
2757 return op1;
2758 }
2759
2760 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2761
2762 tree
2763 tree_last (tree chain)
2764 {
2765 tree next;
2766 if (chain)
2767 while ((next = TREE_CHAIN (chain)))
2768 chain = next;
2769 return chain;
2770 }
2771
2772 /* Reverse the order of elements in the chain T,
2773 and return the new head of the chain (old last element). */
2774
2775 tree
2776 nreverse (tree t)
2777 {
2778 tree prev = 0, decl, next;
2779 for (decl = t; decl; decl = next)
2780 {
2781 /* We shouldn't be using this function to reverse BLOCK chains; we
2782 have blocks_nreverse for that. */
2783 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2784 next = TREE_CHAIN (decl);
2785 TREE_CHAIN (decl) = prev;
2786 prev = decl;
2787 }
2788 return prev;
2789 }
2790 \f
2791 /* Return a newly created TREE_LIST node whose
2792 purpose and value fields are PARM and VALUE. */
2793
2794 tree
2795 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2796 {
2797 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2798 TREE_PURPOSE (t) = parm;
2799 TREE_VALUE (t) = value;
2800 return t;
2801 }
2802
2803 /* Build a chain of TREE_LIST nodes from a vector. */
2804
2805 tree
2806 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2807 {
2808 tree ret = NULL_TREE;
2809 tree *pp = &ret;
2810 unsigned int i;
2811 tree t;
2812 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2813 {
2814 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2815 pp = &TREE_CHAIN (*pp);
2816 }
2817 return ret;
2818 }
2819
2820 /* Return a newly created TREE_LIST node whose
2821 purpose and value fields are PURPOSE and VALUE
2822 and whose TREE_CHAIN is CHAIN. */
2823
2824 tree
2825 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2826 {
2827 tree node;
2828
2829 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2830 memset (node, 0, sizeof (struct tree_common));
2831
2832 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2833
2834 TREE_SET_CODE (node, TREE_LIST);
2835 TREE_CHAIN (node) = chain;
2836 TREE_PURPOSE (node) = purpose;
2837 TREE_VALUE (node) = value;
2838 return node;
2839 }
2840
2841 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2842 trees. */
2843
2844 vec<tree, va_gc> *
2845 ctor_to_vec (tree ctor)
2846 {
2847 vec<tree, va_gc> *vec;
2848 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2849 unsigned int ix;
2850 tree val;
2851
2852 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2853 vec->quick_push (val);
2854
2855 return vec;
2856 }
2857 \f
2858 /* Return the size nominally occupied by an object of type TYPE
2859 when it resides in memory. The value is measured in units of bytes,
2860 and its data type is that normally used for type sizes
2861 (which is the first type created by make_signed_type or
2862 make_unsigned_type). */
2863
2864 tree
2865 size_in_bytes (const_tree type)
2866 {
2867 tree t;
2868
2869 if (type == error_mark_node)
2870 return integer_zero_node;
2871
2872 type = TYPE_MAIN_VARIANT (type);
2873 t = TYPE_SIZE_UNIT (type);
2874
2875 if (t == 0)
2876 {
2877 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2878 return size_zero_node;
2879 }
2880
2881 return t;
2882 }
2883
2884 /* Return the size of TYPE (in bytes) as a wide integer
2885 or return -1 if the size can vary or is larger than an integer. */
2886
2887 HOST_WIDE_INT
2888 int_size_in_bytes (const_tree type)
2889 {
2890 tree t;
2891
2892 if (type == error_mark_node)
2893 return 0;
2894
2895 type = TYPE_MAIN_VARIANT (type);
2896 t = TYPE_SIZE_UNIT (type);
2897
2898 if (t && tree_fits_uhwi_p (t))
2899 return TREE_INT_CST_LOW (t);
2900 else
2901 return -1;
2902 }
2903
2904 /* Return the maximum size of TYPE (in bytes) as a wide integer
2905 or return -1 if the size can vary or is larger than an integer. */
2906
2907 HOST_WIDE_INT
2908 max_int_size_in_bytes (const_tree type)
2909 {
2910 HOST_WIDE_INT size = -1;
2911 tree size_tree;
2912
2913 /* If this is an array type, check for a possible MAX_SIZE attached. */
2914
2915 if (TREE_CODE (type) == ARRAY_TYPE)
2916 {
2917 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2918
2919 if (size_tree && tree_fits_uhwi_p (size_tree))
2920 size = tree_to_uhwi (size_tree);
2921 }
2922
2923 /* If we still haven't been able to get a size, see if the language
2924 can compute a maximum size. */
2925
2926 if (size == -1)
2927 {
2928 size_tree = lang_hooks.types.max_size (type);
2929
2930 if (size_tree && tree_fits_uhwi_p (size_tree))
2931 size = tree_to_uhwi (size_tree);
2932 }
2933
2934 return size;
2935 }
2936 \f
2937 /* Return the bit position of FIELD, in bits from the start of the record.
2938 This is a tree of type bitsizetype. */
2939
2940 tree
2941 bit_position (const_tree field)
2942 {
2943 return bit_from_pos (DECL_FIELD_OFFSET (field),
2944 DECL_FIELD_BIT_OFFSET (field));
2945 }
2946 \f
2947 /* Return the byte position of FIELD, in bytes from the start of the record.
2948 This is a tree of type sizetype. */
2949
2950 tree
2951 byte_position (const_tree field)
2952 {
2953 return byte_from_pos (DECL_FIELD_OFFSET (field),
2954 DECL_FIELD_BIT_OFFSET (field));
2955 }
2956
2957 /* Likewise, but return as an integer. It must be representable in
2958 that way (since it could be a signed value, we don't have the
2959 option of returning -1 like int_size_in_byte can. */
2960
2961 HOST_WIDE_INT
2962 int_byte_position (const_tree field)
2963 {
2964 return tree_to_shwi (byte_position (field));
2965 }
2966 \f
2967 /* Return the strictest alignment, in bits, that T is known to have. */
2968
2969 unsigned int
2970 expr_align (const_tree t)
2971 {
2972 unsigned int align0, align1;
2973
2974 switch (TREE_CODE (t))
2975 {
2976 CASE_CONVERT: case NON_LVALUE_EXPR:
2977 /* If we have conversions, we know that the alignment of the
2978 object must meet each of the alignments of the types. */
2979 align0 = expr_align (TREE_OPERAND (t, 0));
2980 align1 = TYPE_ALIGN (TREE_TYPE (t));
2981 return MAX (align0, align1);
2982
2983 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2984 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2985 case CLEANUP_POINT_EXPR:
2986 /* These don't change the alignment of an object. */
2987 return expr_align (TREE_OPERAND (t, 0));
2988
2989 case COND_EXPR:
2990 /* The best we can do is say that the alignment is the least aligned
2991 of the two arms. */
2992 align0 = expr_align (TREE_OPERAND (t, 1));
2993 align1 = expr_align (TREE_OPERAND (t, 2));
2994 return MIN (align0, align1);
2995
2996 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2997 meaningfully, it's always 1. */
2998 case LABEL_DECL: case CONST_DECL:
2999 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3000 case FUNCTION_DECL:
3001 gcc_assert (DECL_ALIGN (t) != 0);
3002 return DECL_ALIGN (t);
3003
3004 default:
3005 break;
3006 }
3007
3008 /* Otherwise take the alignment from that of the type. */
3009 return TYPE_ALIGN (TREE_TYPE (t));
3010 }
3011 \f
3012 /* Return, as a tree node, the number of elements for TYPE (which is an
3013 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3014
3015 tree
3016 array_type_nelts (const_tree type)
3017 {
3018 tree index_type, min, max;
3019
3020 /* If they did it with unspecified bounds, then we should have already
3021 given an error about it before we got here. */
3022 if (! TYPE_DOMAIN (type))
3023 return error_mark_node;
3024
3025 index_type = TYPE_DOMAIN (type);
3026 min = TYPE_MIN_VALUE (index_type);
3027 max = TYPE_MAX_VALUE (index_type);
3028
3029 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3030 if (!max)
3031 return error_mark_node;
3032
3033 return (integer_zerop (min)
3034 ? max
3035 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3036 }
3037 \f
3038 /* If arg is static -- a reference to an object in static storage -- then
3039 return the object. This is not the same as the C meaning of `static'.
3040 If arg isn't static, return NULL. */
3041
3042 tree
3043 staticp (tree arg)
3044 {
3045 switch (TREE_CODE (arg))
3046 {
3047 case FUNCTION_DECL:
3048 /* Nested functions are static, even though taking their address will
3049 involve a trampoline as we unnest the nested function and create
3050 the trampoline on the tree level. */
3051 return arg;
3052
3053 case VAR_DECL:
3054 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3055 && ! DECL_THREAD_LOCAL_P (arg)
3056 && ! DECL_DLLIMPORT_P (arg)
3057 ? arg : NULL);
3058
3059 case CONST_DECL:
3060 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3061 ? arg : NULL);
3062
3063 case CONSTRUCTOR:
3064 return TREE_STATIC (arg) ? arg : NULL;
3065
3066 case LABEL_DECL:
3067 case STRING_CST:
3068 return arg;
3069
3070 case COMPONENT_REF:
3071 /* If the thing being referenced is not a field, then it is
3072 something language specific. */
3073 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3074
3075 /* If we are referencing a bitfield, we can't evaluate an
3076 ADDR_EXPR at compile time and so it isn't a constant. */
3077 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3078 return NULL;
3079
3080 return staticp (TREE_OPERAND (arg, 0));
3081
3082 case BIT_FIELD_REF:
3083 return NULL;
3084
3085 case INDIRECT_REF:
3086 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3087
3088 case ARRAY_REF:
3089 case ARRAY_RANGE_REF:
3090 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3091 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3092 return staticp (TREE_OPERAND (arg, 0));
3093 else
3094 return NULL;
3095
3096 case COMPOUND_LITERAL_EXPR:
3097 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3098
3099 default:
3100 return NULL;
3101 }
3102 }
3103
3104 \f
3105
3106
3107 /* Return whether OP is a DECL whose address is function-invariant. */
3108
3109 bool
3110 decl_address_invariant_p (const_tree op)
3111 {
3112 /* The conditions below are slightly less strict than the one in
3113 staticp. */
3114
3115 switch (TREE_CODE (op))
3116 {
3117 case PARM_DECL:
3118 case RESULT_DECL:
3119 case LABEL_DECL:
3120 case FUNCTION_DECL:
3121 return true;
3122
3123 case VAR_DECL:
3124 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3125 || DECL_THREAD_LOCAL_P (op)
3126 || DECL_CONTEXT (op) == current_function_decl
3127 || decl_function_context (op) == current_function_decl)
3128 return true;
3129 break;
3130
3131 case CONST_DECL:
3132 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3133 || decl_function_context (op) == current_function_decl)
3134 return true;
3135 break;
3136
3137 default:
3138 break;
3139 }
3140
3141 return false;
3142 }
3143
3144 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3145
3146 bool
3147 decl_address_ip_invariant_p (const_tree op)
3148 {
3149 /* The conditions below are slightly less strict than the one in
3150 staticp. */
3151
3152 switch (TREE_CODE (op))
3153 {
3154 case LABEL_DECL:
3155 case FUNCTION_DECL:
3156 case STRING_CST:
3157 return true;
3158
3159 case VAR_DECL:
3160 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3161 && !DECL_DLLIMPORT_P (op))
3162 || DECL_THREAD_LOCAL_P (op))
3163 return true;
3164 break;
3165
3166 case CONST_DECL:
3167 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3168 return true;
3169 break;
3170
3171 default:
3172 break;
3173 }
3174
3175 return false;
3176 }
3177
3178
3179 /* Return true if T is function-invariant (internal function, does
3180 not handle arithmetic; that's handled in skip_simple_arithmetic and
3181 tree_invariant_p). */
3182
3183 static bool tree_invariant_p (tree t);
3184
3185 static bool
3186 tree_invariant_p_1 (tree t)
3187 {
3188 tree op;
3189
3190 if (TREE_CONSTANT (t)
3191 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3192 return true;
3193
3194 switch (TREE_CODE (t))
3195 {
3196 case SAVE_EXPR:
3197 return true;
3198
3199 case ADDR_EXPR:
3200 op = TREE_OPERAND (t, 0);
3201 while (handled_component_p (op))
3202 {
3203 switch (TREE_CODE (op))
3204 {
3205 case ARRAY_REF:
3206 case ARRAY_RANGE_REF:
3207 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3208 || TREE_OPERAND (op, 2) != NULL_TREE
3209 || TREE_OPERAND (op, 3) != NULL_TREE)
3210 return false;
3211 break;
3212
3213 case COMPONENT_REF:
3214 if (TREE_OPERAND (op, 2) != NULL_TREE)
3215 return false;
3216 break;
3217
3218 default:;
3219 }
3220 op = TREE_OPERAND (op, 0);
3221 }
3222
3223 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3224
3225 default:
3226 break;
3227 }
3228
3229 return false;
3230 }
3231
3232 /* Return true if T is function-invariant. */
3233
3234 static bool
3235 tree_invariant_p (tree t)
3236 {
3237 tree inner = skip_simple_arithmetic (t);
3238 return tree_invariant_p_1 (inner);
3239 }
3240
3241 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3242 Do this to any expression which may be used in more than one place,
3243 but must be evaluated only once.
3244
3245 Normally, expand_expr would reevaluate the expression each time.
3246 Calling save_expr produces something that is evaluated and recorded
3247 the first time expand_expr is called on it. Subsequent calls to
3248 expand_expr just reuse the recorded value.
3249
3250 The call to expand_expr that generates code that actually computes
3251 the value is the first call *at compile time*. Subsequent calls
3252 *at compile time* generate code to use the saved value.
3253 This produces correct result provided that *at run time* control
3254 always flows through the insns made by the first expand_expr
3255 before reaching the other places where the save_expr was evaluated.
3256 You, the caller of save_expr, must make sure this is so.
3257
3258 Constants, and certain read-only nodes, are returned with no
3259 SAVE_EXPR because that is safe. Expressions containing placeholders
3260 are not touched; see tree.def for an explanation of what these
3261 are used for. */
3262
3263 tree
3264 save_expr (tree expr)
3265 {
3266 tree t = fold (expr);
3267 tree inner;
3268
3269 /* If the tree evaluates to a constant, then we don't want to hide that
3270 fact (i.e. this allows further folding, and direct checks for constants).
3271 However, a read-only object that has side effects cannot be bypassed.
3272 Since it is no problem to reevaluate literals, we just return the
3273 literal node. */
3274 inner = skip_simple_arithmetic (t);
3275 if (TREE_CODE (inner) == ERROR_MARK)
3276 return inner;
3277
3278 if (tree_invariant_p_1 (inner))
3279 return t;
3280
3281 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3282 it means that the size or offset of some field of an object depends on
3283 the value within another field.
3284
3285 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3286 and some variable since it would then need to be both evaluated once and
3287 evaluated more than once. Front-ends must assure this case cannot
3288 happen by surrounding any such subexpressions in their own SAVE_EXPR
3289 and forcing evaluation at the proper time. */
3290 if (contains_placeholder_p (inner))
3291 return t;
3292
3293 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3294 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3295
3296 /* This expression might be placed ahead of a jump to ensure that the
3297 value was computed on both sides of the jump. So make sure it isn't
3298 eliminated as dead. */
3299 TREE_SIDE_EFFECTS (t) = 1;
3300 return t;
3301 }
3302
3303 /* Look inside EXPR into any simple arithmetic operations. Return the
3304 outermost non-arithmetic or non-invariant node. */
3305
3306 tree
3307 skip_simple_arithmetic (tree expr)
3308 {
3309 /* We don't care about whether this can be used as an lvalue in this
3310 context. */
3311 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3312 expr = TREE_OPERAND (expr, 0);
3313
3314 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3315 a constant, it will be more efficient to not make another SAVE_EXPR since
3316 it will allow better simplification and GCSE will be able to merge the
3317 computations if they actually occur. */
3318 while (true)
3319 {
3320 if (UNARY_CLASS_P (expr))
3321 expr = TREE_OPERAND (expr, 0);
3322 else if (BINARY_CLASS_P (expr))
3323 {
3324 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3325 expr = TREE_OPERAND (expr, 0);
3326 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3327 expr = TREE_OPERAND (expr, 1);
3328 else
3329 break;
3330 }
3331 else
3332 break;
3333 }
3334
3335 return expr;
3336 }
3337
3338 /* Look inside EXPR into simple arithmetic operations involving constants.
3339 Return the outermost non-arithmetic or non-constant node. */
3340
3341 tree
3342 skip_simple_constant_arithmetic (tree expr)
3343 {
3344 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3345 expr = TREE_OPERAND (expr, 0);
3346
3347 while (true)
3348 {
3349 if (UNARY_CLASS_P (expr))
3350 expr = TREE_OPERAND (expr, 0);
3351 else if (BINARY_CLASS_P (expr))
3352 {
3353 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3354 expr = TREE_OPERAND (expr, 0);
3355 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3356 expr = TREE_OPERAND (expr, 1);
3357 else
3358 break;
3359 }
3360 else
3361 break;
3362 }
3363
3364 return expr;
3365 }
3366
3367 /* Return which tree structure is used by T. */
3368
3369 enum tree_node_structure_enum
3370 tree_node_structure (const_tree t)
3371 {
3372 const enum tree_code code = TREE_CODE (t);
3373 return tree_node_structure_for_code (code);
3374 }
3375
3376 /* Set various status flags when building a CALL_EXPR object T. */
3377
3378 static void
3379 process_call_operands (tree t)
3380 {
3381 bool side_effects = TREE_SIDE_EFFECTS (t);
3382 bool read_only = false;
3383 int i = call_expr_flags (t);
3384
3385 /* Calls have side-effects, except those to const or pure functions. */
3386 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3387 side_effects = true;
3388 /* Propagate TREE_READONLY of arguments for const functions. */
3389 if (i & ECF_CONST)
3390 read_only = true;
3391
3392 if (!side_effects || read_only)
3393 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3394 {
3395 tree op = TREE_OPERAND (t, i);
3396 if (op && TREE_SIDE_EFFECTS (op))
3397 side_effects = true;
3398 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3399 read_only = false;
3400 }
3401
3402 TREE_SIDE_EFFECTS (t) = side_effects;
3403 TREE_READONLY (t) = read_only;
3404 }
3405 \f
3406 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3407 size or offset that depends on a field within a record. */
3408
3409 bool
3410 contains_placeholder_p (const_tree exp)
3411 {
3412 enum tree_code code;
3413
3414 if (!exp)
3415 return 0;
3416
3417 code = TREE_CODE (exp);
3418 if (code == PLACEHOLDER_EXPR)
3419 return 1;
3420
3421 switch (TREE_CODE_CLASS (code))
3422 {
3423 case tcc_reference:
3424 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3425 position computations since they will be converted into a
3426 WITH_RECORD_EXPR involving the reference, which will assume
3427 here will be valid. */
3428 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3429
3430 case tcc_exceptional:
3431 if (code == TREE_LIST)
3432 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3433 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3434 break;
3435
3436 case tcc_unary:
3437 case tcc_binary:
3438 case tcc_comparison:
3439 case tcc_expression:
3440 switch (code)
3441 {
3442 case COMPOUND_EXPR:
3443 /* Ignoring the first operand isn't quite right, but works best. */
3444 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3445
3446 case COND_EXPR:
3447 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3448 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3449 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3450
3451 case SAVE_EXPR:
3452 /* The save_expr function never wraps anything containing
3453 a PLACEHOLDER_EXPR. */
3454 return 0;
3455
3456 default:
3457 break;
3458 }
3459
3460 switch (TREE_CODE_LENGTH (code))
3461 {
3462 case 1:
3463 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3464 case 2:
3465 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3466 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3467 default:
3468 return 0;
3469 }
3470
3471 case tcc_vl_exp:
3472 switch (code)
3473 {
3474 case CALL_EXPR:
3475 {
3476 const_tree arg;
3477 const_call_expr_arg_iterator iter;
3478 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3479 if (CONTAINS_PLACEHOLDER_P (arg))
3480 return 1;
3481 return 0;
3482 }
3483 default:
3484 return 0;
3485 }
3486
3487 default:
3488 return 0;
3489 }
3490 return 0;
3491 }
3492
3493 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3494 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3495 field positions. */
3496
3497 static bool
3498 type_contains_placeholder_1 (const_tree type)
3499 {
3500 /* If the size contains a placeholder or the parent type (component type in
3501 the case of arrays) type involves a placeholder, this type does. */
3502 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3503 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3504 || (!POINTER_TYPE_P (type)
3505 && TREE_TYPE (type)
3506 && type_contains_placeholder_p (TREE_TYPE (type))))
3507 return true;
3508
3509 /* Now do type-specific checks. Note that the last part of the check above
3510 greatly limits what we have to do below. */
3511 switch (TREE_CODE (type))
3512 {
3513 case VOID_TYPE:
3514 case POINTER_BOUNDS_TYPE:
3515 case COMPLEX_TYPE:
3516 case ENUMERAL_TYPE:
3517 case BOOLEAN_TYPE:
3518 case POINTER_TYPE:
3519 case OFFSET_TYPE:
3520 case REFERENCE_TYPE:
3521 case METHOD_TYPE:
3522 case FUNCTION_TYPE:
3523 case VECTOR_TYPE:
3524 case NULLPTR_TYPE:
3525 return false;
3526
3527 case INTEGER_TYPE:
3528 case REAL_TYPE:
3529 case FIXED_POINT_TYPE:
3530 /* Here we just check the bounds. */
3531 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3532 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3533
3534 case ARRAY_TYPE:
3535 /* We have already checked the component type above, so just check the
3536 domain type. */
3537 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3538
3539 case RECORD_TYPE:
3540 case UNION_TYPE:
3541 case QUAL_UNION_TYPE:
3542 {
3543 tree field;
3544
3545 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3546 if (TREE_CODE (field) == FIELD_DECL
3547 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3548 || (TREE_CODE (type) == QUAL_UNION_TYPE
3549 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3550 || type_contains_placeholder_p (TREE_TYPE (field))))
3551 return true;
3552
3553 return false;
3554 }
3555
3556 default:
3557 gcc_unreachable ();
3558 }
3559 }
3560
3561 /* Wrapper around above function used to cache its result. */
3562
3563 bool
3564 type_contains_placeholder_p (tree type)
3565 {
3566 bool result;
3567
3568 /* If the contains_placeholder_bits field has been initialized,
3569 then we know the answer. */
3570 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3571 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3572
3573 /* Indicate that we've seen this type node, and the answer is false.
3574 This is what we want to return if we run into recursion via fields. */
3575 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3576
3577 /* Compute the real value. */
3578 result = type_contains_placeholder_1 (type);
3579
3580 /* Store the real value. */
3581 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3582
3583 return result;
3584 }
3585 \f
3586 /* Push tree EXP onto vector QUEUE if it is not already present. */
3587
3588 static void
3589 push_without_duplicates (tree exp, vec<tree> *queue)
3590 {
3591 unsigned int i;
3592 tree iter;
3593
3594 FOR_EACH_VEC_ELT (*queue, i, iter)
3595 if (simple_cst_equal (iter, exp) == 1)
3596 break;
3597
3598 if (!iter)
3599 queue->safe_push (exp);
3600 }
3601
3602 /* Given a tree EXP, find all occurrences of references to fields
3603 in a PLACEHOLDER_EXPR and place them in vector REFS without
3604 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3605 we assume here that EXP contains only arithmetic expressions
3606 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3607 argument list. */
3608
3609 void
3610 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3611 {
3612 enum tree_code code = TREE_CODE (exp);
3613 tree inner;
3614 int i;
3615
3616 /* We handle TREE_LIST and COMPONENT_REF separately. */
3617 if (code == TREE_LIST)
3618 {
3619 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3620 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3621 }
3622 else if (code == COMPONENT_REF)
3623 {
3624 for (inner = TREE_OPERAND (exp, 0);
3625 REFERENCE_CLASS_P (inner);
3626 inner = TREE_OPERAND (inner, 0))
3627 ;
3628
3629 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3630 push_without_duplicates (exp, refs);
3631 else
3632 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3633 }
3634 else
3635 switch (TREE_CODE_CLASS (code))
3636 {
3637 case tcc_constant:
3638 break;
3639
3640 case tcc_declaration:
3641 /* Variables allocated to static storage can stay. */
3642 if (!TREE_STATIC (exp))
3643 push_without_duplicates (exp, refs);
3644 break;
3645
3646 case tcc_expression:
3647 /* This is the pattern built in ada/make_aligning_type. */
3648 if (code == ADDR_EXPR
3649 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3650 {
3651 push_without_duplicates (exp, refs);
3652 break;
3653 }
3654
3655 /* Fall through... */
3656
3657 case tcc_exceptional:
3658 case tcc_unary:
3659 case tcc_binary:
3660 case tcc_comparison:
3661 case tcc_reference:
3662 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3663 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3664 break;
3665
3666 case tcc_vl_exp:
3667 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3668 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3669 break;
3670
3671 default:
3672 gcc_unreachable ();
3673 }
3674 }
3675
3676 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3677 return a tree with all occurrences of references to F in a
3678 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3679 CONST_DECLs. Note that we assume here that EXP contains only
3680 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3681 occurring only in their argument list. */
3682
3683 tree
3684 substitute_in_expr (tree exp, tree f, tree r)
3685 {
3686 enum tree_code code = TREE_CODE (exp);
3687 tree op0, op1, op2, op3;
3688 tree new_tree;
3689
3690 /* We handle TREE_LIST and COMPONENT_REF separately. */
3691 if (code == TREE_LIST)
3692 {
3693 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3694 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3695 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3696 return exp;
3697
3698 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3699 }
3700 else if (code == COMPONENT_REF)
3701 {
3702 tree inner;
3703
3704 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3705 and it is the right field, replace it with R. */
3706 for (inner = TREE_OPERAND (exp, 0);
3707 REFERENCE_CLASS_P (inner);
3708 inner = TREE_OPERAND (inner, 0))
3709 ;
3710
3711 /* The field. */
3712 op1 = TREE_OPERAND (exp, 1);
3713
3714 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3715 return r;
3716
3717 /* If this expression hasn't been completed let, leave it alone. */
3718 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3719 return exp;
3720
3721 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3722 if (op0 == TREE_OPERAND (exp, 0))
3723 return exp;
3724
3725 new_tree
3726 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3727 }
3728 else
3729 switch (TREE_CODE_CLASS (code))
3730 {
3731 case tcc_constant:
3732 return exp;
3733
3734 case tcc_declaration:
3735 if (exp == f)
3736 return r;
3737 else
3738 return exp;
3739
3740 case tcc_expression:
3741 if (exp == f)
3742 return r;
3743
3744 /* Fall through... */
3745
3746 case tcc_exceptional:
3747 case tcc_unary:
3748 case tcc_binary:
3749 case tcc_comparison:
3750 case tcc_reference:
3751 switch (TREE_CODE_LENGTH (code))
3752 {
3753 case 0:
3754 return exp;
3755
3756 case 1:
3757 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3758 if (op0 == TREE_OPERAND (exp, 0))
3759 return exp;
3760
3761 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3762 break;
3763
3764 case 2:
3765 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3766 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3767
3768 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3769 return exp;
3770
3771 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3772 break;
3773
3774 case 3:
3775 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3776 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3777 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3778
3779 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3780 && op2 == TREE_OPERAND (exp, 2))
3781 return exp;
3782
3783 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3784 break;
3785
3786 case 4:
3787 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3788 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3789 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3790 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3791
3792 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3793 && op2 == TREE_OPERAND (exp, 2)
3794 && op3 == TREE_OPERAND (exp, 3))
3795 return exp;
3796
3797 new_tree
3798 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3799 break;
3800
3801 default:
3802 gcc_unreachable ();
3803 }
3804 break;
3805
3806 case tcc_vl_exp:
3807 {
3808 int i;
3809
3810 new_tree = NULL_TREE;
3811
3812 /* If we are trying to replace F with a constant, inline back
3813 functions which do nothing else than computing a value from
3814 the arguments they are passed. This makes it possible to
3815 fold partially or entirely the replacement expression. */
3816 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3817 {
3818 tree t = maybe_inline_call_in_expr (exp);
3819 if (t)
3820 return SUBSTITUTE_IN_EXPR (t, f, r);
3821 }
3822
3823 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3824 {
3825 tree op = TREE_OPERAND (exp, i);
3826 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3827 if (new_op != op)
3828 {
3829 if (!new_tree)
3830 new_tree = copy_node (exp);
3831 TREE_OPERAND (new_tree, i) = new_op;
3832 }
3833 }
3834
3835 if (new_tree)
3836 {
3837 new_tree = fold (new_tree);
3838 if (TREE_CODE (new_tree) == CALL_EXPR)
3839 process_call_operands (new_tree);
3840 }
3841 else
3842 return exp;
3843 }
3844 break;
3845
3846 default:
3847 gcc_unreachable ();
3848 }
3849
3850 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3851
3852 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3853 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3854
3855 return new_tree;
3856 }
3857
3858 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3859 for it within OBJ, a tree that is an object or a chain of references. */
3860
3861 tree
3862 substitute_placeholder_in_expr (tree exp, tree obj)
3863 {
3864 enum tree_code code = TREE_CODE (exp);
3865 tree op0, op1, op2, op3;
3866 tree new_tree;
3867
3868 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3869 in the chain of OBJ. */
3870 if (code == PLACEHOLDER_EXPR)
3871 {
3872 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3873 tree elt;
3874
3875 for (elt = obj; elt != 0;
3876 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3877 || TREE_CODE (elt) == COND_EXPR)
3878 ? TREE_OPERAND (elt, 1)
3879 : (REFERENCE_CLASS_P (elt)
3880 || UNARY_CLASS_P (elt)
3881 || BINARY_CLASS_P (elt)
3882 || VL_EXP_CLASS_P (elt)
3883 || EXPRESSION_CLASS_P (elt))
3884 ? TREE_OPERAND (elt, 0) : 0))
3885 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3886 return elt;
3887
3888 for (elt = obj; elt != 0;
3889 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3890 || TREE_CODE (elt) == COND_EXPR)
3891 ? TREE_OPERAND (elt, 1)
3892 : (REFERENCE_CLASS_P (elt)
3893 || UNARY_CLASS_P (elt)
3894 || BINARY_CLASS_P (elt)
3895 || VL_EXP_CLASS_P (elt)
3896 || EXPRESSION_CLASS_P (elt))
3897 ? TREE_OPERAND (elt, 0) : 0))
3898 if (POINTER_TYPE_P (TREE_TYPE (elt))
3899 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3900 == need_type))
3901 return fold_build1 (INDIRECT_REF, need_type, elt);
3902
3903 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3904 survives until RTL generation, there will be an error. */
3905 return exp;
3906 }
3907
3908 /* TREE_LIST is special because we need to look at TREE_VALUE
3909 and TREE_CHAIN, not TREE_OPERANDS. */
3910 else if (code == TREE_LIST)
3911 {
3912 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3913 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3914 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3915 return exp;
3916
3917 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3918 }
3919 else
3920 switch (TREE_CODE_CLASS (code))
3921 {
3922 case tcc_constant:
3923 case tcc_declaration:
3924 return exp;
3925
3926 case tcc_exceptional:
3927 case tcc_unary:
3928 case tcc_binary:
3929 case tcc_comparison:
3930 case tcc_expression:
3931 case tcc_reference:
3932 case tcc_statement:
3933 switch (TREE_CODE_LENGTH (code))
3934 {
3935 case 0:
3936 return exp;
3937
3938 case 1:
3939 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3940 if (op0 == TREE_OPERAND (exp, 0))
3941 return exp;
3942
3943 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3944 break;
3945
3946 case 2:
3947 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3948 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3949
3950 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3951 return exp;
3952
3953 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3954 break;
3955
3956 case 3:
3957 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3958 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3959 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3960
3961 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3962 && op2 == TREE_OPERAND (exp, 2))
3963 return exp;
3964
3965 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3966 break;
3967
3968 case 4:
3969 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3970 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3971 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3972 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3973
3974 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3975 && op2 == TREE_OPERAND (exp, 2)
3976 && op3 == TREE_OPERAND (exp, 3))
3977 return exp;
3978
3979 new_tree
3980 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3981 break;
3982
3983 default:
3984 gcc_unreachable ();
3985 }
3986 break;
3987
3988 case tcc_vl_exp:
3989 {
3990 int i;
3991
3992 new_tree = NULL_TREE;
3993
3994 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3995 {
3996 tree op = TREE_OPERAND (exp, i);
3997 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3998 if (new_op != op)
3999 {
4000 if (!new_tree)
4001 new_tree = copy_node (exp);
4002 TREE_OPERAND (new_tree, i) = new_op;
4003 }
4004 }
4005
4006 if (new_tree)
4007 {
4008 new_tree = fold (new_tree);
4009 if (TREE_CODE (new_tree) == CALL_EXPR)
4010 process_call_operands (new_tree);
4011 }
4012 else
4013 return exp;
4014 }
4015 break;
4016
4017 default:
4018 gcc_unreachable ();
4019 }
4020
4021 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4022
4023 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4024 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4025
4026 return new_tree;
4027 }
4028 \f
4029
4030 /* Subroutine of stabilize_reference; this is called for subtrees of
4031 references. Any expression with side-effects must be put in a SAVE_EXPR
4032 to ensure that it is only evaluated once.
4033
4034 We don't put SAVE_EXPR nodes around everything, because assigning very
4035 simple expressions to temporaries causes us to miss good opportunities
4036 for optimizations. Among other things, the opportunity to fold in the
4037 addition of a constant into an addressing mode often gets lost, e.g.
4038 "y[i+1] += x;". In general, we take the approach that we should not make
4039 an assignment unless we are forced into it - i.e., that any non-side effect
4040 operator should be allowed, and that cse should take care of coalescing
4041 multiple utterances of the same expression should that prove fruitful. */
4042
4043 static tree
4044 stabilize_reference_1 (tree e)
4045 {
4046 tree result;
4047 enum tree_code code = TREE_CODE (e);
4048
4049 /* We cannot ignore const expressions because it might be a reference
4050 to a const array but whose index contains side-effects. But we can
4051 ignore things that are actual constant or that already have been
4052 handled by this function. */
4053
4054 if (tree_invariant_p (e))
4055 return e;
4056
4057 switch (TREE_CODE_CLASS (code))
4058 {
4059 case tcc_exceptional:
4060 case tcc_type:
4061 case tcc_declaration:
4062 case tcc_comparison:
4063 case tcc_statement:
4064 case tcc_expression:
4065 case tcc_reference:
4066 case tcc_vl_exp:
4067 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4068 so that it will only be evaluated once. */
4069 /* The reference (r) and comparison (<) classes could be handled as
4070 below, but it is generally faster to only evaluate them once. */
4071 if (TREE_SIDE_EFFECTS (e))
4072 return save_expr (e);
4073 return e;
4074
4075 case tcc_constant:
4076 /* Constants need no processing. In fact, we should never reach
4077 here. */
4078 return e;
4079
4080 case tcc_binary:
4081 /* Division is slow and tends to be compiled with jumps,
4082 especially the division by powers of 2 that is often
4083 found inside of an array reference. So do it just once. */
4084 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4085 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4086 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4087 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4088 return save_expr (e);
4089 /* Recursively stabilize each operand. */
4090 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4091 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4092 break;
4093
4094 case tcc_unary:
4095 /* Recursively stabilize each operand. */
4096 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4097 break;
4098
4099 default:
4100 gcc_unreachable ();
4101 }
4102
4103 TREE_TYPE (result) = TREE_TYPE (e);
4104 TREE_READONLY (result) = TREE_READONLY (e);
4105 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4106 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4107
4108 return result;
4109 }
4110
4111 /* Stabilize a reference so that we can use it any number of times
4112 without causing its operands to be evaluated more than once.
4113 Returns the stabilized reference. This works by means of save_expr,
4114 so see the caveats in the comments about save_expr.
4115
4116 Also allows conversion expressions whose operands are references.
4117 Any other kind of expression is returned unchanged. */
4118
4119 tree
4120 stabilize_reference (tree ref)
4121 {
4122 tree result;
4123 enum tree_code code = TREE_CODE (ref);
4124
4125 switch (code)
4126 {
4127 case VAR_DECL:
4128 case PARM_DECL:
4129 case RESULT_DECL:
4130 /* No action is needed in this case. */
4131 return ref;
4132
4133 CASE_CONVERT:
4134 case FLOAT_EXPR:
4135 case FIX_TRUNC_EXPR:
4136 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4137 break;
4138
4139 case INDIRECT_REF:
4140 result = build_nt (INDIRECT_REF,
4141 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4142 break;
4143
4144 case COMPONENT_REF:
4145 result = build_nt (COMPONENT_REF,
4146 stabilize_reference (TREE_OPERAND (ref, 0)),
4147 TREE_OPERAND (ref, 1), NULL_TREE);
4148 break;
4149
4150 case BIT_FIELD_REF:
4151 result = build_nt (BIT_FIELD_REF,
4152 stabilize_reference (TREE_OPERAND (ref, 0)),
4153 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4154 break;
4155
4156 case ARRAY_REF:
4157 result = build_nt (ARRAY_REF,
4158 stabilize_reference (TREE_OPERAND (ref, 0)),
4159 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4160 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4161 break;
4162
4163 case ARRAY_RANGE_REF:
4164 result = build_nt (ARRAY_RANGE_REF,
4165 stabilize_reference (TREE_OPERAND (ref, 0)),
4166 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4167 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4168 break;
4169
4170 case COMPOUND_EXPR:
4171 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4172 it wouldn't be ignored. This matters when dealing with
4173 volatiles. */
4174 return stabilize_reference_1 (ref);
4175
4176 /* If arg isn't a kind of lvalue we recognize, make no change.
4177 Caller should recognize the error for an invalid lvalue. */
4178 default:
4179 return ref;
4180
4181 case ERROR_MARK:
4182 return error_mark_node;
4183 }
4184
4185 TREE_TYPE (result) = TREE_TYPE (ref);
4186 TREE_READONLY (result) = TREE_READONLY (ref);
4187 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4188 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4189
4190 return result;
4191 }
4192 \f
4193 /* Low-level constructors for expressions. */
4194
4195 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4196 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4197
4198 void
4199 recompute_tree_invariant_for_addr_expr (tree t)
4200 {
4201 tree node;
4202 bool tc = true, se = false;
4203
4204 /* We started out assuming this address is both invariant and constant, but
4205 does not have side effects. Now go down any handled components and see if
4206 any of them involve offsets that are either non-constant or non-invariant.
4207 Also check for side-effects.
4208
4209 ??? Note that this code makes no attempt to deal with the case where
4210 taking the address of something causes a copy due to misalignment. */
4211
4212 #define UPDATE_FLAGS(NODE) \
4213 do { tree _node = (NODE); \
4214 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4215 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4216
4217 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4218 node = TREE_OPERAND (node, 0))
4219 {
4220 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4221 array reference (probably made temporarily by the G++ front end),
4222 so ignore all the operands. */
4223 if ((TREE_CODE (node) == ARRAY_REF
4224 || TREE_CODE (node) == ARRAY_RANGE_REF)
4225 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4226 {
4227 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4228 if (TREE_OPERAND (node, 2))
4229 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4230 if (TREE_OPERAND (node, 3))
4231 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4232 }
4233 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4234 FIELD_DECL, apparently. The G++ front end can put something else
4235 there, at least temporarily. */
4236 else if (TREE_CODE (node) == COMPONENT_REF
4237 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4238 {
4239 if (TREE_OPERAND (node, 2))
4240 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4241 }
4242 }
4243
4244 node = lang_hooks.expr_to_decl (node, &tc, &se);
4245
4246 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4247 the address, since &(*a)->b is a form of addition. If it's a constant, the
4248 address is constant too. If it's a decl, its address is constant if the
4249 decl is static. Everything else is not constant and, furthermore,
4250 taking the address of a volatile variable is not volatile. */
4251 if (TREE_CODE (node) == INDIRECT_REF
4252 || TREE_CODE (node) == MEM_REF)
4253 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4254 else if (CONSTANT_CLASS_P (node))
4255 ;
4256 else if (DECL_P (node))
4257 tc &= (staticp (node) != NULL_TREE);
4258 else
4259 {
4260 tc = false;
4261 se |= TREE_SIDE_EFFECTS (node);
4262 }
4263
4264
4265 TREE_CONSTANT (t) = tc;
4266 TREE_SIDE_EFFECTS (t) = se;
4267 #undef UPDATE_FLAGS
4268 }
4269
4270 /* Build an expression of code CODE, data type TYPE, and operands as
4271 specified. Expressions and reference nodes can be created this way.
4272 Constants, decls, types and misc nodes cannot be.
4273
4274 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4275 enough for all extant tree codes. */
4276
4277 tree
4278 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4279 {
4280 tree t;
4281
4282 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4283
4284 t = make_node_stat (code PASS_MEM_STAT);
4285 TREE_TYPE (t) = tt;
4286
4287 return t;
4288 }
4289
4290 tree
4291 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4292 {
4293 int length = sizeof (struct tree_exp);
4294 tree t;
4295
4296 record_node_allocation_statistics (code, length);
4297
4298 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4299
4300 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4301
4302 memset (t, 0, sizeof (struct tree_common));
4303
4304 TREE_SET_CODE (t, code);
4305
4306 TREE_TYPE (t) = type;
4307 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4308 TREE_OPERAND (t, 0) = node;
4309 if (node && !TYPE_P (node))
4310 {
4311 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4312 TREE_READONLY (t) = TREE_READONLY (node);
4313 }
4314
4315 if (TREE_CODE_CLASS (code) == tcc_statement)
4316 TREE_SIDE_EFFECTS (t) = 1;
4317 else switch (code)
4318 {
4319 case VA_ARG_EXPR:
4320 /* All of these have side-effects, no matter what their
4321 operands are. */
4322 TREE_SIDE_EFFECTS (t) = 1;
4323 TREE_READONLY (t) = 0;
4324 break;
4325
4326 case INDIRECT_REF:
4327 /* Whether a dereference is readonly has nothing to do with whether
4328 its operand is readonly. */
4329 TREE_READONLY (t) = 0;
4330 break;
4331
4332 case ADDR_EXPR:
4333 if (node)
4334 recompute_tree_invariant_for_addr_expr (t);
4335 break;
4336
4337 default:
4338 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4339 && node && !TYPE_P (node)
4340 && TREE_CONSTANT (node))
4341 TREE_CONSTANT (t) = 1;
4342 if (TREE_CODE_CLASS (code) == tcc_reference
4343 && node && TREE_THIS_VOLATILE (node))
4344 TREE_THIS_VOLATILE (t) = 1;
4345 break;
4346 }
4347
4348 return t;
4349 }
4350
4351 #define PROCESS_ARG(N) \
4352 do { \
4353 TREE_OPERAND (t, N) = arg##N; \
4354 if (arg##N &&!TYPE_P (arg##N)) \
4355 { \
4356 if (TREE_SIDE_EFFECTS (arg##N)) \
4357 side_effects = 1; \
4358 if (!TREE_READONLY (arg##N) \
4359 && !CONSTANT_CLASS_P (arg##N)) \
4360 (void) (read_only = 0); \
4361 if (!TREE_CONSTANT (arg##N)) \
4362 (void) (constant = 0); \
4363 } \
4364 } while (0)
4365
4366 tree
4367 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4368 {
4369 bool constant, read_only, side_effects;
4370 tree t;
4371
4372 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4373
4374 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4375 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4376 /* When sizetype precision doesn't match that of pointers
4377 we need to be able to build explicit extensions or truncations
4378 of the offset argument. */
4379 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4380 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4381 && TREE_CODE (arg1) == INTEGER_CST);
4382
4383 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4384 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4385 && ptrofftype_p (TREE_TYPE (arg1)));
4386
4387 t = make_node_stat (code PASS_MEM_STAT);
4388 TREE_TYPE (t) = tt;
4389
4390 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4391 result based on those same flags for the arguments. But if the
4392 arguments aren't really even `tree' expressions, we shouldn't be trying
4393 to do this. */
4394
4395 /* Expressions without side effects may be constant if their
4396 arguments are as well. */
4397 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4398 || TREE_CODE_CLASS (code) == tcc_binary);
4399 read_only = 1;
4400 side_effects = TREE_SIDE_EFFECTS (t);
4401
4402 PROCESS_ARG (0);
4403 PROCESS_ARG (1);
4404
4405 TREE_SIDE_EFFECTS (t) = side_effects;
4406 if (code == MEM_REF)
4407 {
4408 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4409 {
4410 tree o = TREE_OPERAND (arg0, 0);
4411 TREE_READONLY (t) = TREE_READONLY (o);
4412 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4413 }
4414 }
4415 else
4416 {
4417 TREE_READONLY (t) = read_only;
4418 TREE_CONSTANT (t) = constant;
4419 TREE_THIS_VOLATILE (t)
4420 = (TREE_CODE_CLASS (code) == tcc_reference
4421 && arg0 && TREE_THIS_VOLATILE (arg0));
4422 }
4423
4424 return t;
4425 }
4426
4427
4428 tree
4429 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4430 tree arg2 MEM_STAT_DECL)
4431 {
4432 bool constant, read_only, side_effects;
4433 tree t;
4434
4435 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4436 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4437
4438 t = make_node_stat (code PASS_MEM_STAT);
4439 TREE_TYPE (t) = tt;
4440
4441 read_only = 1;
4442
4443 /* As a special exception, if COND_EXPR has NULL branches, we
4444 assume that it is a gimple statement and always consider
4445 it to have side effects. */
4446 if (code == COND_EXPR
4447 && tt == void_type_node
4448 && arg1 == NULL_TREE
4449 && arg2 == NULL_TREE)
4450 side_effects = true;
4451 else
4452 side_effects = TREE_SIDE_EFFECTS (t);
4453
4454 PROCESS_ARG (0);
4455 PROCESS_ARG (1);
4456 PROCESS_ARG (2);
4457
4458 if (code == COND_EXPR)
4459 TREE_READONLY (t) = read_only;
4460
4461 TREE_SIDE_EFFECTS (t) = side_effects;
4462 TREE_THIS_VOLATILE (t)
4463 = (TREE_CODE_CLASS (code) == tcc_reference
4464 && arg0 && TREE_THIS_VOLATILE (arg0));
4465
4466 return t;
4467 }
4468
4469 tree
4470 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4471 tree arg2, tree arg3 MEM_STAT_DECL)
4472 {
4473 bool constant, read_only, side_effects;
4474 tree t;
4475
4476 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4477
4478 t = make_node_stat (code PASS_MEM_STAT);
4479 TREE_TYPE (t) = tt;
4480
4481 side_effects = TREE_SIDE_EFFECTS (t);
4482
4483 PROCESS_ARG (0);
4484 PROCESS_ARG (1);
4485 PROCESS_ARG (2);
4486 PROCESS_ARG (3);
4487
4488 TREE_SIDE_EFFECTS (t) = side_effects;
4489 TREE_THIS_VOLATILE (t)
4490 = (TREE_CODE_CLASS (code) == tcc_reference
4491 && arg0 && TREE_THIS_VOLATILE (arg0));
4492
4493 return t;
4494 }
4495
4496 tree
4497 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4498 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4499 {
4500 bool constant, read_only, side_effects;
4501 tree t;
4502
4503 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4504
4505 t = make_node_stat (code PASS_MEM_STAT);
4506 TREE_TYPE (t) = tt;
4507
4508 side_effects = TREE_SIDE_EFFECTS (t);
4509
4510 PROCESS_ARG (0);
4511 PROCESS_ARG (1);
4512 PROCESS_ARG (2);
4513 PROCESS_ARG (3);
4514 PROCESS_ARG (4);
4515
4516 TREE_SIDE_EFFECTS (t) = side_effects;
4517 if (code == TARGET_MEM_REF)
4518 {
4519 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4520 {
4521 tree o = TREE_OPERAND (arg0, 0);
4522 TREE_READONLY (t) = TREE_READONLY (o);
4523 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4524 }
4525 }
4526 else
4527 TREE_THIS_VOLATILE (t)
4528 = (TREE_CODE_CLASS (code) == tcc_reference
4529 && arg0 && TREE_THIS_VOLATILE (arg0));
4530
4531 return t;
4532 }
4533
4534 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4535 on the pointer PTR. */
4536
4537 tree
4538 build_simple_mem_ref_loc (location_t loc, tree ptr)
4539 {
4540 HOST_WIDE_INT offset = 0;
4541 tree ptype = TREE_TYPE (ptr);
4542 tree tem;
4543 /* For convenience allow addresses that collapse to a simple base
4544 and offset. */
4545 if (TREE_CODE (ptr) == ADDR_EXPR
4546 && (handled_component_p (TREE_OPERAND (ptr, 0))
4547 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4548 {
4549 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4550 gcc_assert (ptr);
4551 ptr = build_fold_addr_expr (ptr);
4552 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4553 }
4554 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4555 ptr, build_int_cst (ptype, offset));
4556 SET_EXPR_LOCATION (tem, loc);
4557 return tem;
4558 }
4559
4560 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4561
4562 offset_int
4563 mem_ref_offset (const_tree t)
4564 {
4565 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4566 }
4567
4568 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4569 offsetted by OFFSET units. */
4570
4571 tree
4572 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4573 {
4574 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4575 build_fold_addr_expr (base),
4576 build_int_cst (ptr_type_node, offset));
4577 tree addr = build1 (ADDR_EXPR, type, ref);
4578 recompute_tree_invariant_for_addr_expr (addr);
4579 return addr;
4580 }
4581
4582 /* Similar except don't specify the TREE_TYPE
4583 and leave the TREE_SIDE_EFFECTS as 0.
4584 It is permissible for arguments to be null,
4585 or even garbage if their values do not matter. */
4586
4587 tree
4588 build_nt (enum tree_code code, ...)
4589 {
4590 tree t;
4591 int length;
4592 int i;
4593 va_list p;
4594
4595 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4596
4597 va_start (p, code);
4598
4599 t = make_node (code);
4600 length = TREE_CODE_LENGTH (code);
4601
4602 for (i = 0; i < length; i++)
4603 TREE_OPERAND (t, i) = va_arg (p, tree);
4604
4605 va_end (p);
4606 return t;
4607 }
4608
4609 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4610 tree vec. */
4611
4612 tree
4613 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4614 {
4615 tree ret, t;
4616 unsigned int ix;
4617
4618 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4619 CALL_EXPR_FN (ret) = fn;
4620 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4621 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4622 CALL_EXPR_ARG (ret, ix) = t;
4623 return ret;
4624 }
4625 \f
4626 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4627 We do NOT enter this node in any sort of symbol table.
4628
4629 LOC is the location of the decl.
4630
4631 layout_decl is used to set up the decl's storage layout.
4632 Other slots are initialized to 0 or null pointers. */
4633
4634 tree
4635 build_decl_stat (location_t loc, enum tree_code code, tree name,
4636 tree type MEM_STAT_DECL)
4637 {
4638 tree t;
4639
4640 t = make_node_stat (code PASS_MEM_STAT);
4641 DECL_SOURCE_LOCATION (t) = loc;
4642
4643 /* if (type == error_mark_node)
4644 type = integer_type_node; */
4645 /* That is not done, deliberately, so that having error_mark_node
4646 as the type can suppress useless errors in the use of this variable. */
4647
4648 DECL_NAME (t) = name;
4649 TREE_TYPE (t) = type;
4650
4651 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4652 layout_decl (t, 0);
4653
4654 return t;
4655 }
4656
4657 /* Builds and returns function declaration with NAME and TYPE. */
4658
4659 tree
4660 build_fn_decl (const char *name, tree type)
4661 {
4662 tree id = get_identifier (name);
4663 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4664
4665 DECL_EXTERNAL (decl) = 1;
4666 TREE_PUBLIC (decl) = 1;
4667 DECL_ARTIFICIAL (decl) = 1;
4668 TREE_NOTHROW (decl) = 1;
4669
4670 return decl;
4671 }
4672
4673 vec<tree, va_gc> *all_translation_units;
4674
4675 /* Builds a new translation-unit decl with name NAME, queues it in the
4676 global list of translation-unit decls and returns it. */
4677
4678 tree
4679 build_translation_unit_decl (tree name)
4680 {
4681 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4682 name, NULL_TREE);
4683 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4684 vec_safe_push (all_translation_units, tu);
4685 return tu;
4686 }
4687
4688 \f
4689 /* BLOCK nodes are used to represent the structure of binding contours
4690 and declarations, once those contours have been exited and their contents
4691 compiled. This information is used for outputting debugging info. */
4692
4693 tree
4694 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4695 {
4696 tree block = make_node (BLOCK);
4697
4698 BLOCK_VARS (block) = vars;
4699 BLOCK_SUBBLOCKS (block) = subblocks;
4700 BLOCK_SUPERCONTEXT (block) = supercontext;
4701 BLOCK_CHAIN (block) = chain;
4702 return block;
4703 }
4704
4705 \f
4706 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4707
4708 LOC is the location to use in tree T. */
4709
4710 void
4711 protected_set_expr_location (tree t, location_t loc)
4712 {
4713 if (CAN_HAVE_LOCATION_P (t))
4714 SET_EXPR_LOCATION (t, loc);
4715 }
4716 \f
4717 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4718 is ATTRIBUTE. */
4719
4720 tree
4721 build_decl_attribute_variant (tree ddecl, tree attribute)
4722 {
4723 DECL_ATTRIBUTES (ddecl) = attribute;
4724 return ddecl;
4725 }
4726
4727 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4728 is ATTRIBUTE and its qualifiers are QUALS.
4729
4730 Record such modified types already made so we don't make duplicates. */
4731
4732 tree
4733 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4734 {
4735 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4736 {
4737 inchash::hash hstate;
4738 tree ntype;
4739 int i;
4740 tree t;
4741 enum tree_code code = TREE_CODE (ttype);
4742
4743 /* Building a distinct copy of a tagged type is inappropriate; it
4744 causes breakage in code that expects there to be a one-to-one
4745 relationship between a struct and its fields.
4746 build_duplicate_type is another solution (as used in
4747 handle_transparent_union_attribute), but that doesn't play well
4748 with the stronger C++ type identity model. */
4749 if (TREE_CODE (ttype) == RECORD_TYPE
4750 || TREE_CODE (ttype) == UNION_TYPE
4751 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4752 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4753 {
4754 warning (OPT_Wattributes,
4755 "ignoring attributes applied to %qT after definition",
4756 TYPE_MAIN_VARIANT (ttype));
4757 return build_qualified_type (ttype, quals);
4758 }
4759
4760 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4761 ntype = build_distinct_type_copy (ttype);
4762
4763 TYPE_ATTRIBUTES (ntype) = attribute;
4764
4765 hstate.add_int (code);
4766 if (TREE_TYPE (ntype))
4767 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4768 attribute_hash_list (attribute, hstate);
4769
4770 switch (TREE_CODE (ntype))
4771 {
4772 case FUNCTION_TYPE:
4773 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4774 break;
4775 case ARRAY_TYPE:
4776 if (TYPE_DOMAIN (ntype))
4777 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4778 break;
4779 case INTEGER_TYPE:
4780 t = TYPE_MAX_VALUE (ntype);
4781 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4782 hstate.add_object (TREE_INT_CST_ELT (t, i));
4783 break;
4784 case REAL_TYPE:
4785 case FIXED_POINT_TYPE:
4786 {
4787 unsigned int precision = TYPE_PRECISION (ntype);
4788 hstate.add_object (precision);
4789 }
4790 break;
4791 default:
4792 break;
4793 }
4794
4795 ntype = type_hash_canon (hstate.end(), ntype);
4796
4797 /* If the target-dependent attributes make NTYPE different from
4798 its canonical type, we will need to use structural equality
4799 checks for this type. */
4800 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4801 || !comp_type_attributes (ntype, ttype))
4802 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4803 else if (TYPE_CANONICAL (ntype) == ntype)
4804 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4805
4806 ttype = build_qualified_type (ntype, quals);
4807 }
4808 else if (TYPE_QUALS (ttype) != quals)
4809 ttype = build_qualified_type (ttype, quals);
4810
4811 return ttype;
4812 }
4813
4814 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4815 the same. */
4816
4817 static bool
4818 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4819 {
4820 tree cl1, cl2;
4821 for (cl1 = clauses1, cl2 = clauses2;
4822 cl1 && cl2;
4823 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4824 {
4825 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4826 return false;
4827 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4828 {
4829 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4830 OMP_CLAUSE_DECL (cl2)) != 1)
4831 return false;
4832 }
4833 switch (OMP_CLAUSE_CODE (cl1))
4834 {
4835 case OMP_CLAUSE_ALIGNED:
4836 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4837 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4838 return false;
4839 break;
4840 case OMP_CLAUSE_LINEAR:
4841 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4842 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4843 return false;
4844 break;
4845 case OMP_CLAUSE_SIMDLEN:
4846 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4847 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4848 return false;
4849 default:
4850 break;
4851 }
4852 }
4853 return true;
4854 }
4855
4856 /* Compare two constructor-element-type constants. Return 1 if the lists
4857 are known to be equal; otherwise return 0. */
4858
4859 static bool
4860 simple_cst_list_equal (const_tree l1, const_tree l2)
4861 {
4862 while (l1 != NULL_TREE && l2 != NULL_TREE)
4863 {
4864 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4865 return false;
4866
4867 l1 = TREE_CHAIN (l1);
4868 l2 = TREE_CHAIN (l2);
4869 }
4870
4871 return l1 == l2;
4872 }
4873
4874 /* Compare two attributes for their value identity. Return true if the
4875 attribute values are known to be equal; otherwise return false.
4876 */
4877
4878 bool
4879 attribute_value_equal (const_tree attr1, const_tree attr2)
4880 {
4881 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4882 return true;
4883
4884 if (TREE_VALUE (attr1) != NULL_TREE
4885 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4886 && TREE_VALUE (attr2) != NULL
4887 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4888 return (simple_cst_list_equal (TREE_VALUE (attr1),
4889 TREE_VALUE (attr2)) == 1);
4890
4891 if ((flag_openmp || flag_openmp_simd)
4892 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4893 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4894 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4895 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4896 TREE_VALUE (attr2));
4897
4898 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4899 }
4900
4901 /* Return 0 if the attributes for two types are incompatible, 1 if they
4902 are compatible, and 2 if they are nearly compatible (which causes a
4903 warning to be generated). */
4904 int
4905 comp_type_attributes (const_tree type1, const_tree type2)
4906 {
4907 const_tree a1 = TYPE_ATTRIBUTES (type1);
4908 const_tree a2 = TYPE_ATTRIBUTES (type2);
4909 const_tree a;
4910
4911 if (a1 == a2)
4912 return 1;
4913 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4914 {
4915 const struct attribute_spec *as;
4916 const_tree attr;
4917
4918 as = lookup_attribute_spec (get_attribute_name (a));
4919 if (!as || as->affects_type_identity == false)
4920 continue;
4921
4922 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4923 if (!attr || !attribute_value_equal (a, attr))
4924 break;
4925 }
4926 if (!a)
4927 {
4928 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4929 {
4930 const struct attribute_spec *as;
4931
4932 as = lookup_attribute_spec (get_attribute_name (a));
4933 if (!as || as->affects_type_identity == false)
4934 continue;
4935
4936 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4937 break;
4938 /* We don't need to compare trees again, as we did this
4939 already in first loop. */
4940 }
4941 /* All types - affecting identity - are equal, so
4942 there is no need to call target hook for comparison. */
4943 if (!a)
4944 return 1;
4945 }
4946 /* As some type combinations - like default calling-convention - might
4947 be compatible, we have to call the target hook to get the final result. */
4948 return targetm.comp_type_attributes (type1, type2);
4949 }
4950
4951 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4952 is ATTRIBUTE.
4953
4954 Record such modified types already made so we don't make duplicates. */
4955
4956 tree
4957 build_type_attribute_variant (tree ttype, tree attribute)
4958 {
4959 return build_type_attribute_qual_variant (ttype, attribute,
4960 TYPE_QUALS (ttype));
4961 }
4962
4963
4964 /* Reset the expression *EXPR_P, a size or position.
4965
4966 ??? We could reset all non-constant sizes or positions. But it's cheap
4967 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4968
4969 We need to reset self-referential sizes or positions because they cannot
4970 be gimplified and thus can contain a CALL_EXPR after the gimplification
4971 is finished, which will run afoul of LTO streaming. And they need to be
4972 reset to something essentially dummy but not constant, so as to preserve
4973 the properties of the object they are attached to. */
4974
4975 static inline void
4976 free_lang_data_in_one_sizepos (tree *expr_p)
4977 {
4978 tree expr = *expr_p;
4979 if (CONTAINS_PLACEHOLDER_P (expr))
4980 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4981 }
4982
4983
4984 /* Reset all the fields in a binfo node BINFO. We only keep
4985 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4986
4987 static void
4988 free_lang_data_in_binfo (tree binfo)
4989 {
4990 unsigned i;
4991 tree t;
4992
4993 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4994
4995 BINFO_VIRTUALS (binfo) = NULL_TREE;
4996 BINFO_BASE_ACCESSES (binfo) = NULL;
4997 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4998 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4999
5000 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5001 free_lang_data_in_binfo (t);
5002 }
5003
5004
5005 /* Reset all language specific information still present in TYPE. */
5006
5007 static void
5008 free_lang_data_in_type (tree type)
5009 {
5010 gcc_assert (TYPE_P (type));
5011
5012 /* Give the FE a chance to remove its own data first. */
5013 lang_hooks.free_lang_data (type);
5014
5015 TREE_LANG_FLAG_0 (type) = 0;
5016 TREE_LANG_FLAG_1 (type) = 0;
5017 TREE_LANG_FLAG_2 (type) = 0;
5018 TREE_LANG_FLAG_3 (type) = 0;
5019 TREE_LANG_FLAG_4 (type) = 0;
5020 TREE_LANG_FLAG_5 (type) = 0;
5021 TREE_LANG_FLAG_6 (type) = 0;
5022
5023 if (TREE_CODE (type) == FUNCTION_TYPE)
5024 {
5025 /* Remove the const and volatile qualifiers from arguments. The
5026 C++ front end removes them, but the C front end does not,
5027 leading to false ODR violation errors when merging two
5028 instances of the same function signature compiled by
5029 different front ends. */
5030 tree p;
5031
5032 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5033 {
5034 tree arg_type = TREE_VALUE (p);
5035
5036 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5037 {
5038 int quals = TYPE_QUALS (arg_type)
5039 & ~TYPE_QUAL_CONST
5040 & ~TYPE_QUAL_VOLATILE;
5041 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5042 free_lang_data_in_type (TREE_VALUE (p));
5043 }
5044 /* C++ FE uses TREE_PURPOSE to store initial values. */
5045 TREE_PURPOSE (p) = NULL;
5046 }
5047 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5048 TYPE_MINVAL (type) = NULL;
5049 }
5050 if (TREE_CODE (type) == METHOD_TYPE)
5051 {
5052 tree p;
5053
5054 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5055 {
5056 /* C++ FE uses TREE_PURPOSE to store initial values. */
5057 TREE_PURPOSE (p) = NULL;
5058 }
5059 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5060 TYPE_MINVAL (type) = NULL;
5061 }
5062
5063 /* Remove members that are not actually FIELD_DECLs from the field
5064 list of an aggregate. These occur in C++. */
5065 if (RECORD_OR_UNION_TYPE_P (type))
5066 {
5067 tree prev, member;
5068
5069 /* Note that TYPE_FIELDS can be shared across distinct
5070 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5071 to be removed, we cannot set its TREE_CHAIN to NULL.
5072 Otherwise, we would not be able to find all the other fields
5073 in the other instances of this TREE_TYPE.
5074
5075 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5076 prev = NULL_TREE;
5077 member = TYPE_FIELDS (type);
5078 while (member)
5079 {
5080 if (TREE_CODE (member) == FIELD_DECL
5081 || TREE_CODE (member) == TYPE_DECL)
5082 {
5083 if (prev)
5084 TREE_CHAIN (prev) = member;
5085 else
5086 TYPE_FIELDS (type) = member;
5087 prev = member;
5088 }
5089
5090 member = TREE_CHAIN (member);
5091 }
5092
5093 if (prev)
5094 TREE_CHAIN (prev) = NULL_TREE;
5095 else
5096 TYPE_FIELDS (type) = NULL_TREE;
5097
5098 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5099 and danagle the pointer from time to time. */
5100 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5101 TYPE_VFIELD (type) = NULL_TREE;
5102
5103 /* Remove TYPE_METHODS list. While it would be nice to keep it
5104 to enable ODR warnings about different method lists, doing so
5105 seems to impractically increase size of LTO data streamed.
5106 Keep the infrmation if TYPE_METHODS was non-NULL. This is used
5107 by function.c and pretty printers. */
5108 if (TYPE_METHODS (type))
5109 TYPE_METHODS (type) = error_mark_node;
5110 if (TYPE_BINFO (type))
5111 {
5112 free_lang_data_in_binfo (TYPE_BINFO (type));
5113 /* We need to preserve link to bases and virtual table for all
5114 polymorphic types to make devirtualization machinery working.
5115 Debug output cares only about bases, but output also
5116 virtual table pointers so merging of -fdevirtualize and
5117 -fno-devirtualize units is easier. */
5118 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5119 || !flag_devirtualize)
5120 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5121 && !BINFO_VTABLE (TYPE_BINFO (type)))
5122 || debug_info_level != DINFO_LEVEL_NONE))
5123 TYPE_BINFO (type) = NULL;
5124 }
5125 }
5126 else
5127 {
5128 /* For non-aggregate types, clear out the language slot (which
5129 overloads TYPE_BINFO). */
5130 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5131
5132 if (INTEGRAL_TYPE_P (type)
5133 || SCALAR_FLOAT_TYPE_P (type)
5134 || FIXED_POINT_TYPE_P (type))
5135 {
5136 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5137 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5138 }
5139 }
5140
5141 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5142 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5143
5144 if (TYPE_CONTEXT (type)
5145 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5146 {
5147 tree ctx = TYPE_CONTEXT (type);
5148 do
5149 {
5150 ctx = BLOCK_SUPERCONTEXT (ctx);
5151 }
5152 while (ctx && TREE_CODE (ctx) == BLOCK);
5153 TYPE_CONTEXT (type) = ctx;
5154 }
5155 }
5156
5157
5158 /* Return true if DECL may need an assembler name to be set. */
5159
5160 static inline bool
5161 need_assembler_name_p (tree decl)
5162 {
5163 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5164 Rule merging. This makes type_odr_p to return true on those types during
5165 LTO and by comparing the mangled name, we can say what types are intended
5166 to be equivalent across compilation unit.
5167
5168 We do not store names of type_in_anonymous_namespace_p.
5169
5170 Record, union and enumeration type have linkage that allows use
5171 to check type_in_anonymous_namespace_p. We do not mangle compound types
5172 that always can be compared structurally.
5173
5174 Similarly for builtin types, we compare properties of their main variant.
5175 A special case are integer types where mangling do make differences
5176 between char/signed char/unsigned char etc. Storing name for these makes
5177 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5178 See cp/mangle.c:write_builtin_type for details. */
5179
5180 if (flag_lto_odr_type_mering
5181 && TREE_CODE (decl) == TYPE_DECL
5182 && DECL_NAME (decl)
5183 && decl == TYPE_NAME (TREE_TYPE (decl))
5184 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5185 && (type_with_linkage_p (TREE_TYPE (decl))
5186 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5187 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5188 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5189 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5190 if (TREE_CODE (decl) != FUNCTION_DECL
5191 && TREE_CODE (decl) != VAR_DECL)
5192 return false;
5193
5194 /* If DECL already has its assembler name set, it does not need a
5195 new one. */
5196 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5197 || DECL_ASSEMBLER_NAME_SET_P (decl))
5198 return false;
5199
5200 /* Abstract decls do not need an assembler name. */
5201 if (DECL_ABSTRACT_P (decl))
5202 return false;
5203
5204 /* For VAR_DECLs, only static, public and external symbols need an
5205 assembler name. */
5206 if (TREE_CODE (decl) == VAR_DECL
5207 && !TREE_STATIC (decl)
5208 && !TREE_PUBLIC (decl)
5209 && !DECL_EXTERNAL (decl))
5210 return false;
5211
5212 if (TREE_CODE (decl) == FUNCTION_DECL)
5213 {
5214 /* Do not set assembler name on builtins. Allow RTL expansion to
5215 decide whether to expand inline or via a regular call. */
5216 if (DECL_BUILT_IN (decl)
5217 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5218 return false;
5219
5220 /* Functions represented in the callgraph need an assembler name. */
5221 if (cgraph_node::get (decl) != NULL)
5222 return true;
5223
5224 /* Unused and not public functions don't need an assembler name. */
5225 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5226 return false;
5227 }
5228
5229 return true;
5230 }
5231
5232
5233 /* Reset all language specific information still present in symbol
5234 DECL. */
5235
5236 static void
5237 free_lang_data_in_decl (tree decl)
5238 {
5239 gcc_assert (DECL_P (decl));
5240
5241 /* Give the FE a chance to remove its own data first. */
5242 lang_hooks.free_lang_data (decl);
5243
5244 TREE_LANG_FLAG_0 (decl) = 0;
5245 TREE_LANG_FLAG_1 (decl) = 0;
5246 TREE_LANG_FLAG_2 (decl) = 0;
5247 TREE_LANG_FLAG_3 (decl) = 0;
5248 TREE_LANG_FLAG_4 (decl) = 0;
5249 TREE_LANG_FLAG_5 (decl) = 0;
5250 TREE_LANG_FLAG_6 (decl) = 0;
5251
5252 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5253 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5254 if (TREE_CODE (decl) == FIELD_DECL)
5255 {
5256 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5257 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5258 DECL_QUALIFIER (decl) = NULL_TREE;
5259 }
5260
5261 if (TREE_CODE (decl) == FUNCTION_DECL)
5262 {
5263 struct cgraph_node *node;
5264 if (!(node = cgraph_node::get (decl))
5265 || (!node->definition && !node->clones))
5266 {
5267 if (node)
5268 node->release_body ();
5269 else
5270 {
5271 release_function_body (decl);
5272 DECL_ARGUMENTS (decl) = NULL;
5273 DECL_RESULT (decl) = NULL;
5274 DECL_INITIAL (decl) = error_mark_node;
5275 }
5276 }
5277 if (gimple_has_body_p (decl))
5278 {
5279 tree t;
5280
5281 /* If DECL has a gimple body, then the context for its
5282 arguments must be DECL. Otherwise, it doesn't really
5283 matter, as we will not be emitting any code for DECL. In
5284 general, there may be other instances of DECL created by
5285 the front end and since PARM_DECLs are generally shared,
5286 their DECL_CONTEXT changes as the replicas of DECL are
5287 created. The only time where DECL_CONTEXT is important
5288 is for the FUNCTION_DECLs that have a gimple body (since
5289 the PARM_DECL will be used in the function's body). */
5290 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5291 DECL_CONTEXT (t) = decl;
5292 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5293 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5294 = target_option_default_node;
5295 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5296 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5297 = optimization_default_node;
5298 }
5299
5300 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5301 At this point, it is not needed anymore. */
5302 DECL_SAVED_TREE (decl) = NULL_TREE;
5303
5304 /* Clear the abstract origin if it refers to a method. Otherwise
5305 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5306 origin will not be output correctly. */
5307 if (DECL_ABSTRACT_ORIGIN (decl)
5308 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5309 && RECORD_OR_UNION_TYPE_P
5310 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5311 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5312
5313 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5314 DECL_VINDEX referring to itself into a vtable slot number as it
5315 should. Happens with functions that are copied and then forgotten
5316 about. Just clear it, it won't matter anymore. */
5317 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5318 DECL_VINDEX (decl) = NULL_TREE;
5319 }
5320 else if (TREE_CODE (decl) == VAR_DECL)
5321 {
5322 if ((DECL_EXTERNAL (decl)
5323 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5324 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5325 DECL_INITIAL (decl) = NULL_TREE;
5326 }
5327 else if (TREE_CODE (decl) == TYPE_DECL
5328 || TREE_CODE (decl) == FIELD_DECL)
5329 DECL_INITIAL (decl) = NULL_TREE;
5330 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5331 && DECL_INITIAL (decl)
5332 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5333 {
5334 /* Strip builtins from the translation-unit BLOCK. We still have targets
5335 without builtin_decl_explicit support and also builtins are shared
5336 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5337 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5338 while (*nextp)
5339 {
5340 tree var = *nextp;
5341 if (TREE_CODE (var) == FUNCTION_DECL
5342 && DECL_BUILT_IN (var))
5343 *nextp = TREE_CHAIN (var);
5344 else
5345 nextp = &TREE_CHAIN (var);
5346 }
5347 }
5348 }
5349
5350
5351 /* Data used when collecting DECLs and TYPEs for language data removal. */
5352
5353 struct free_lang_data_d
5354 {
5355 /* Worklist to avoid excessive recursion. */
5356 vec<tree> worklist;
5357
5358 /* Set of traversed objects. Used to avoid duplicate visits. */
5359 hash_set<tree> *pset;
5360
5361 /* Array of symbols to process with free_lang_data_in_decl. */
5362 vec<tree> decls;
5363
5364 /* Array of types to process with free_lang_data_in_type. */
5365 vec<tree> types;
5366 };
5367
5368
5369 /* Save all language fields needed to generate proper debug information
5370 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5371
5372 static void
5373 save_debug_info_for_decl (tree t)
5374 {
5375 /*struct saved_debug_info_d *sdi;*/
5376
5377 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5378
5379 /* FIXME. Partial implementation for saving debug info removed. */
5380 }
5381
5382
5383 /* Save all language fields needed to generate proper debug information
5384 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5385
5386 static void
5387 save_debug_info_for_type (tree t)
5388 {
5389 /*struct saved_debug_info_d *sdi;*/
5390
5391 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5392
5393 /* FIXME. Partial implementation for saving debug info removed. */
5394 }
5395
5396
5397 /* Add type or decl T to one of the list of tree nodes that need their
5398 language data removed. The lists are held inside FLD. */
5399
5400 static void
5401 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5402 {
5403 if (DECL_P (t))
5404 {
5405 fld->decls.safe_push (t);
5406 if (debug_info_level > DINFO_LEVEL_TERSE)
5407 save_debug_info_for_decl (t);
5408 }
5409 else if (TYPE_P (t))
5410 {
5411 fld->types.safe_push (t);
5412 if (debug_info_level > DINFO_LEVEL_TERSE)
5413 save_debug_info_for_type (t);
5414 }
5415 else
5416 gcc_unreachable ();
5417 }
5418
5419 /* Push tree node T into FLD->WORKLIST. */
5420
5421 static inline void
5422 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5423 {
5424 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5425 fld->worklist.safe_push ((t));
5426 }
5427
5428
5429 /* Operand callback helper for free_lang_data_in_node. *TP is the
5430 subtree operand being considered. */
5431
5432 static tree
5433 find_decls_types_r (tree *tp, int *ws, void *data)
5434 {
5435 tree t = *tp;
5436 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5437
5438 if (TREE_CODE (t) == TREE_LIST)
5439 return NULL_TREE;
5440
5441 /* Language specific nodes will be removed, so there is no need
5442 to gather anything under them. */
5443 if (is_lang_specific (t))
5444 {
5445 *ws = 0;
5446 return NULL_TREE;
5447 }
5448
5449 if (DECL_P (t))
5450 {
5451 /* Note that walk_tree does not traverse every possible field in
5452 decls, so we have to do our own traversals here. */
5453 add_tree_to_fld_list (t, fld);
5454
5455 fld_worklist_push (DECL_NAME (t), fld);
5456 fld_worklist_push (DECL_CONTEXT (t), fld);
5457 fld_worklist_push (DECL_SIZE (t), fld);
5458 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5459
5460 /* We are going to remove everything under DECL_INITIAL for
5461 TYPE_DECLs. No point walking them. */
5462 if (TREE_CODE (t) != TYPE_DECL)
5463 fld_worklist_push (DECL_INITIAL (t), fld);
5464
5465 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5466 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5467
5468 if (TREE_CODE (t) == FUNCTION_DECL)
5469 {
5470 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5471 fld_worklist_push (DECL_RESULT (t), fld);
5472 }
5473 else if (TREE_CODE (t) == TYPE_DECL)
5474 {
5475 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5476 }
5477 else if (TREE_CODE (t) == FIELD_DECL)
5478 {
5479 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5480 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5481 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5482 fld_worklist_push (DECL_FCONTEXT (t), fld);
5483 }
5484
5485 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5486 && DECL_HAS_VALUE_EXPR_P (t))
5487 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5488
5489 if (TREE_CODE (t) != FIELD_DECL
5490 && TREE_CODE (t) != TYPE_DECL)
5491 fld_worklist_push (TREE_CHAIN (t), fld);
5492 *ws = 0;
5493 }
5494 else if (TYPE_P (t))
5495 {
5496 /* Note that walk_tree does not traverse every possible field in
5497 types, so we have to do our own traversals here. */
5498 add_tree_to_fld_list (t, fld);
5499
5500 if (!RECORD_OR_UNION_TYPE_P (t))
5501 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5502 fld_worklist_push (TYPE_SIZE (t), fld);
5503 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5504 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5505 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5506 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5507 fld_worklist_push (TYPE_NAME (t), fld);
5508 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5509 them and thus do not and want not to reach unused pointer types
5510 this way. */
5511 if (!POINTER_TYPE_P (t))
5512 fld_worklist_push (TYPE_MINVAL (t), fld);
5513 if (!RECORD_OR_UNION_TYPE_P (t))
5514 fld_worklist_push (TYPE_MAXVAL (t), fld);
5515 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5516 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5517 do not and want not to reach unused variants this way. */
5518 if (TYPE_CONTEXT (t))
5519 {
5520 tree ctx = TYPE_CONTEXT (t);
5521 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5522 So push that instead. */
5523 while (ctx && TREE_CODE (ctx) == BLOCK)
5524 ctx = BLOCK_SUPERCONTEXT (ctx);
5525 fld_worklist_push (ctx, fld);
5526 }
5527 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5528 and want not to reach unused types this way. */
5529
5530 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5531 {
5532 unsigned i;
5533 tree tem;
5534 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5535 fld_worklist_push (TREE_TYPE (tem), fld);
5536 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5537 if (tem
5538 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5539 && TREE_CODE (tem) == TREE_LIST)
5540 do
5541 {
5542 fld_worklist_push (TREE_VALUE (tem), fld);
5543 tem = TREE_CHAIN (tem);
5544 }
5545 while (tem);
5546 }
5547 if (RECORD_OR_UNION_TYPE_P (t))
5548 {
5549 tree tem;
5550 /* Push all TYPE_FIELDS - there can be interleaving interesting
5551 and non-interesting things. */
5552 tem = TYPE_FIELDS (t);
5553 while (tem)
5554 {
5555 if (TREE_CODE (tem) == FIELD_DECL
5556 || TREE_CODE (tem) == TYPE_DECL)
5557 fld_worklist_push (tem, fld);
5558 tem = TREE_CHAIN (tem);
5559 }
5560 }
5561
5562 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5563 *ws = 0;
5564 }
5565 else if (TREE_CODE (t) == BLOCK)
5566 {
5567 tree tem;
5568 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5569 fld_worklist_push (tem, fld);
5570 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5571 fld_worklist_push (tem, fld);
5572 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5573 }
5574
5575 if (TREE_CODE (t) != IDENTIFIER_NODE
5576 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5577 fld_worklist_push (TREE_TYPE (t), fld);
5578
5579 return NULL_TREE;
5580 }
5581
5582
5583 /* Find decls and types in T. */
5584
5585 static void
5586 find_decls_types (tree t, struct free_lang_data_d *fld)
5587 {
5588 while (1)
5589 {
5590 if (!fld->pset->contains (t))
5591 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5592 if (fld->worklist.is_empty ())
5593 break;
5594 t = fld->worklist.pop ();
5595 }
5596 }
5597
5598 /* Translate all the types in LIST with the corresponding runtime
5599 types. */
5600
5601 static tree
5602 get_eh_types_for_runtime (tree list)
5603 {
5604 tree head, prev;
5605
5606 if (list == NULL_TREE)
5607 return NULL_TREE;
5608
5609 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5610 prev = head;
5611 list = TREE_CHAIN (list);
5612 while (list)
5613 {
5614 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5615 TREE_CHAIN (prev) = n;
5616 prev = TREE_CHAIN (prev);
5617 list = TREE_CHAIN (list);
5618 }
5619
5620 return head;
5621 }
5622
5623
5624 /* Find decls and types referenced in EH region R and store them in
5625 FLD->DECLS and FLD->TYPES. */
5626
5627 static void
5628 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5629 {
5630 switch (r->type)
5631 {
5632 case ERT_CLEANUP:
5633 break;
5634
5635 case ERT_TRY:
5636 {
5637 eh_catch c;
5638
5639 /* The types referenced in each catch must first be changed to the
5640 EH types used at runtime. This removes references to FE types
5641 in the region. */
5642 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5643 {
5644 c->type_list = get_eh_types_for_runtime (c->type_list);
5645 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5646 }
5647 }
5648 break;
5649
5650 case ERT_ALLOWED_EXCEPTIONS:
5651 r->u.allowed.type_list
5652 = get_eh_types_for_runtime (r->u.allowed.type_list);
5653 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5654 break;
5655
5656 case ERT_MUST_NOT_THROW:
5657 walk_tree (&r->u.must_not_throw.failure_decl,
5658 find_decls_types_r, fld, fld->pset);
5659 break;
5660 }
5661 }
5662
5663
5664 /* Find decls and types referenced in cgraph node N and store them in
5665 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5666 look for *every* kind of DECL and TYPE node reachable from N,
5667 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5668 NAMESPACE_DECLs, etc). */
5669
5670 static void
5671 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5672 {
5673 basic_block bb;
5674 struct function *fn;
5675 unsigned ix;
5676 tree t;
5677
5678 find_decls_types (n->decl, fld);
5679
5680 if (!gimple_has_body_p (n->decl))
5681 return;
5682
5683 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5684
5685 fn = DECL_STRUCT_FUNCTION (n->decl);
5686
5687 /* Traverse locals. */
5688 FOR_EACH_LOCAL_DECL (fn, ix, t)
5689 find_decls_types (t, fld);
5690
5691 /* Traverse EH regions in FN. */
5692 {
5693 eh_region r;
5694 FOR_ALL_EH_REGION_FN (r, fn)
5695 find_decls_types_in_eh_region (r, fld);
5696 }
5697
5698 /* Traverse every statement in FN. */
5699 FOR_EACH_BB_FN (bb, fn)
5700 {
5701 gphi_iterator psi;
5702 gimple_stmt_iterator si;
5703 unsigned i;
5704
5705 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5706 {
5707 gphi *phi = psi.phi ();
5708
5709 for (i = 0; i < gimple_phi_num_args (phi); i++)
5710 {
5711 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5712 find_decls_types (*arg_p, fld);
5713 }
5714 }
5715
5716 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5717 {
5718 gimple stmt = gsi_stmt (si);
5719
5720 if (is_gimple_call (stmt))
5721 find_decls_types (gimple_call_fntype (stmt), fld);
5722
5723 for (i = 0; i < gimple_num_ops (stmt); i++)
5724 {
5725 tree arg = gimple_op (stmt, i);
5726 find_decls_types (arg, fld);
5727 }
5728 }
5729 }
5730 }
5731
5732
5733 /* Find decls and types referenced in varpool node N and store them in
5734 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5735 look for *every* kind of DECL and TYPE node reachable from N,
5736 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5737 NAMESPACE_DECLs, etc). */
5738
5739 static void
5740 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5741 {
5742 find_decls_types (v->decl, fld);
5743 }
5744
5745 /* If T needs an assembler name, have one created for it. */
5746
5747 void
5748 assign_assembler_name_if_neeeded (tree t)
5749 {
5750 if (need_assembler_name_p (t))
5751 {
5752 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5753 diagnostics that use input_location to show locus
5754 information. The problem here is that, at this point,
5755 input_location is generally anchored to the end of the file
5756 (since the parser is long gone), so we don't have a good
5757 position to pin it to.
5758
5759 To alleviate this problem, this uses the location of T's
5760 declaration. Examples of this are
5761 testsuite/g++.dg/template/cond2.C and
5762 testsuite/g++.dg/template/pr35240.C. */
5763 location_t saved_location = input_location;
5764 input_location = DECL_SOURCE_LOCATION (t);
5765
5766 decl_assembler_name (t);
5767
5768 input_location = saved_location;
5769 }
5770 }
5771
5772
5773 /* Free language specific information for every operand and expression
5774 in every node of the call graph. This process operates in three stages:
5775
5776 1- Every callgraph node and varpool node is traversed looking for
5777 decls and types embedded in them. This is a more exhaustive
5778 search than that done by find_referenced_vars, because it will
5779 also collect individual fields, decls embedded in types, etc.
5780
5781 2- All the decls found are sent to free_lang_data_in_decl.
5782
5783 3- All the types found are sent to free_lang_data_in_type.
5784
5785 The ordering between decls and types is important because
5786 free_lang_data_in_decl sets assembler names, which includes
5787 mangling. So types cannot be freed up until assembler names have
5788 been set up. */
5789
5790 static void
5791 free_lang_data_in_cgraph (void)
5792 {
5793 struct cgraph_node *n;
5794 varpool_node *v;
5795 struct free_lang_data_d fld;
5796 tree t;
5797 unsigned i;
5798 alias_pair *p;
5799
5800 /* Initialize sets and arrays to store referenced decls and types. */
5801 fld.pset = new hash_set<tree>;
5802 fld.worklist.create (0);
5803 fld.decls.create (100);
5804 fld.types.create (100);
5805
5806 /* Find decls and types in the body of every function in the callgraph. */
5807 FOR_EACH_FUNCTION (n)
5808 find_decls_types_in_node (n, &fld);
5809
5810 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5811 find_decls_types (p->decl, &fld);
5812
5813 /* Find decls and types in every varpool symbol. */
5814 FOR_EACH_VARIABLE (v)
5815 find_decls_types_in_var (v, &fld);
5816
5817 /* Set the assembler name on every decl found. We need to do this
5818 now because free_lang_data_in_decl will invalidate data needed
5819 for mangling. This breaks mangling on interdependent decls. */
5820 FOR_EACH_VEC_ELT (fld.decls, i, t)
5821 assign_assembler_name_if_neeeded (t);
5822
5823 /* Traverse every decl found freeing its language data. */
5824 FOR_EACH_VEC_ELT (fld.decls, i, t)
5825 free_lang_data_in_decl (t);
5826
5827 /* Traverse every type found freeing its language data. */
5828 FOR_EACH_VEC_ELT (fld.types, i, t)
5829 free_lang_data_in_type (t);
5830 #ifdef ENABLE_CHECKING
5831 FOR_EACH_VEC_ELT (fld.types, i, t)
5832 verify_type (t);
5833 #endif
5834
5835 delete fld.pset;
5836 fld.worklist.release ();
5837 fld.decls.release ();
5838 fld.types.release ();
5839 }
5840
5841
5842 /* Free resources that are used by FE but are not needed once they are done. */
5843
5844 static unsigned
5845 free_lang_data (void)
5846 {
5847 unsigned i;
5848
5849 /* If we are the LTO frontend we have freed lang-specific data already. */
5850 if (in_lto_p
5851 || (!flag_generate_lto && !flag_generate_offload))
5852 return 0;
5853
5854 /* Allocate and assign alias sets to the standard integer types
5855 while the slots are still in the way the frontends generated them. */
5856 for (i = 0; i < itk_none; ++i)
5857 if (integer_types[i])
5858 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5859
5860 /* Traverse the IL resetting language specific information for
5861 operands, expressions, etc. */
5862 free_lang_data_in_cgraph ();
5863
5864 /* Create gimple variants for common types. */
5865 ptrdiff_type_node = integer_type_node;
5866 fileptr_type_node = ptr_type_node;
5867
5868 /* Reset some langhooks. Do not reset types_compatible_p, it may
5869 still be used indirectly via the get_alias_set langhook. */
5870 lang_hooks.dwarf_name = lhd_dwarf_name;
5871 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5872 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5873
5874 /* We do not want the default decl_assembler_name implementation,
5875 rather if we have fixed everything we want a wrapper around it
5876 asserting that all non-local symbols already got their assembler
5877 name and only produce assembler names for local symbols. Or rather
5878 make sure we never call decl_assembler_name on local symbols and
5879 devise a separate, middle-end private scheme for it. */
5880
5881 /* Reset diagnostic machinery. */
5882 tree_diagnostics_defaults (global_dc);
5883
5884 return 0;
5885 }
5886
5887
5888 namespace {
5889
5890 const pass_data pass_data_ipa_free_lang_data =
5891 {
5892 SIMPLE_IPA_PASS, /* type */
5893 "*free_lang_data", /* name */
5894 OPTGROUP_NONE, /* optinfo_flags */
5895 TV_IPA_FREE_LANG_DATA, /* tv_id */
5896 0, /* properties_required */
5897 0, /* properties_provided */
5898 0, /* properties_destroyed */
5899 0, /* todo_flags_start */
5900 0, /* todo_flags_finish */
5901 };
5902
5903 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5904 {
5905 public:
5906 pass_ipa_free_lang_data (gcc::context *ctxt)
5907 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5908 {}
5909
5910 /* opt_pass methods: */
5911 virtual unsigned int execute (function *) { return free_lang_data (); }
5912
5913 }; // class pass_ipa_free_lang_data
5914
5915 } // anon namespace
5916
5917 simple_ipa_opt_pass *
5918 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5919 {
5920 return new pass_ipa_free_lang_data (ctxt);
5921 }
5922
5923 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5924 ATTR_NAME. Also used internally by remove_attribute(). */
5925 bool
5926 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5927 {
5928 size_t ident_len = IDENTIFIER_LENGTH (ident);
5929
5930 if (ident_len == attr_len)
5931 {
5932 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5933 return true;
5934 }
5935 else if (ident_len == attr_len + 4)
5936 {
5937 /* There is the possibility that ATTR is 'text' and IDENT is
5938 '__text__'. */
5939 const char *p = IDENTIFIER_POINTER (ident);
5940 if (p[0] == '_' && p[1] == '_'
5941 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5942 && strncmp (attr_name, p + 2, attr_len) == 0)
5943 return true;
5944 }
5945
5946 return false;
5947 }
5948
5949 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5950 of ATTR_NAME, and LIST is not NULL_TREE. */
5951 tree
5952 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5953 {
5954 while (list)
5955 {
5956 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5957
5958 if (ident_len == attr_len)
5959 {
5960 if (!strcmp (attr_name,
5961 IDENTIFIER_POINTER (get_attribute_name (list))))
5962 break;
5963 }
5964 /* TODO: If we made sure that attributes were stored in the
5965 canonical form without '__...__' (ie, as in 'text' as opposed
5966 to '__text__') then we could avoid the following case. */
5967 else if (ident_len == attr_len + 4)
5968 {
5969 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5970 if (p[0] == '_' && p[1] == '_'
5971 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5972 && strncmp (attr_name, p + 2, attr_len) == 0)
5973 break;
5974 }
5975 list = TREE_CHAIN (list);
5976 }
5977
5978 return list;
5979 }
5980
5981 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5982 return a pointer to the attribute's list first element if the attribute
5983 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5984 '__text__'). */
5985
5986 tree
5987 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5988 tree list)
5989 {
5990 while (list)
5991 {
5992 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5993
5994 if (attr_len > ident_len)
5995 {
5996 list = TREE_CHAIN (list);
5997 continue;
5998 }
5999
6000 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6001
6002 if (strncmp (attr_name, p, attr_len) == 0)
6003 break;
6004
6005 /* TODO: If we made sure that attributes were stored in the
6006 canonical form without '__...__' (ie, as in 'text' as opposed
6007 to '__text__') then we could avoid the following case. */
6008 if (p[0] == '_' && p[1] == '_' &&
6009 strncmp (attr_name, p + 2, attr_len) == 0)
6010 break;
6011
6012 list = TREE_CHAIN (list);
6013 }
6014
6015 return list;
6016 }
6017
6018
6019 /* A variant of lookup_attribute() that can be used with an identifier
6020 as the first argument, and where the identifier can be either
6021 'text' or '__text__'.
6022
6023 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6024 return a pointer to the attribute's list element if the attribute
6025 is part of the list, or NULL_TREE if not found. If the attribute
6026 appears more than once, this only returns the first occurrence; the
6027 TREE_CHAIN of the return value should be passed back in if further
6028 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6029 can be in the form 'text' or '__text__'. */
6030 static tree
6031 lookup_ident_attribute (tree attr_identifier, tree list)
6032 {
6033 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6034
6035 while (list)
6036 {
6037 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6038 == IDENTIFIER_NODE);
6039
6040 /* Identifiers can be compared directly for equality. */
6041 if (attr_identifier == get_attribute_name (list))
6042 break;
6043
6044 /* If they are not equal, they may still be one in the form
6045 'text' while the other one is in the form '__text__'. TODO:
6046 If we were storing attributes in normalized 'text' form, then
6047 this could all go away and we could take full advantage of
6048 the fact that we're comparing identifiers. :-) */
6049 {
6050 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
6051 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6052
6053 if (ident_len == attr_len + 4)
6054 {
6055 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6056 const char *q = IDENTIFIER_POINTER (attr_identifier);
6057 if (p[0] == '_' && p[1] == '_'
6058 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6059 && strncmp (q, p + 2, attr_len) == 0)
6060 break;
6061 }
6062 else if (ident_len + 4 == attr_len)
6063 {
6064 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6065 const char *q = IDENTIFIER_POINTER (attr_identifier);
6066 if (q[0] == '_' && q[1] == '_'
6067 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
6068 && strncmp (q + 2, p, ident_len) == 0)
6069 break;
6070 }
6071 }
6072 list = TREE_CHAIN (list);
6073 }
6074
6075 return list;
6076 }
6077
6078 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6079 modified list. */
6080
6081 tree
6082 remove_attribute (const char *attr_name, tree list)
6083 {
6084 tree *p;
6085 size_t attr_len = strlen (attr_name);
6086
6087 gcc_checking_assert (attr_name[0] != '_');
6088
6089 for (p = &list; *p; )
6090 {
6091 tree l = *p;
6092 /* TODO: If we were storing attributes in normalized form, here
6093 we could use a simple strcmp(). */
6094 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6095 *p = TREE_CHAIN (l);
6096 else
6097 p = &TREE_CHAIN (l);
6098 }
6099
6100 return list;
6101 }
6102
6103 /* Return an attribute list that is the union of a1 and a2. */
6104
6105 tree
6106 merge_attributes (tree a1, tree a2)
6107 {
6108 tree attributes;
6109
6110 /* Either one unset? Take the set one. */
6111
6112 if ((attributes = a1) == 0)
6113 attributes = a2;
6114
6115 /* One that completely contains the other? Take it. */
6116
6117 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6118 {
6119 if (attribute_list_contained (a2, a1))
6120 attributes = a2;
6121 else
6122 {
6123 /* Pick the longest list, and hang on the other list. */
6124
6125 if (list_length (a1) < list_length (a2))
6126 attributes = a2, a2 = a1;
6127
6128 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6129 {
6130 tree a;
6131 for (a = lookup_ident_attribute (get_attribute_name (a2),
6132 attributes);
6133 a != NULL_TREE && !attribute_value_equal (a, a2);
6134 a = lookup_ident_attribute (get_attribute_name (a2),
6135 TREE_CHAIN (a)))
6136 ;
6137 if (a == NULL_TREE)
6138 {
6139 a1 = copy_node (a2);
6140 TREE_CHAIN (a1) = attributes;
6141 attributes = a1;
6142 }
6143 }
6144 }
6145 }
6146 return attributes;
6147 }
6148
6149 /* Given types T1 and T2, merge their attributes and return
6150 the result. */
6151
6152 tree
6153 merge_type_attributes (tree t1, tree t2)
6154 {
6155 return merge_attributes (TYPE_ATTRIBUTES (t1),
6156 TYPE_ATTRIBUTES (t2));
6157 }
6158
6159 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6160 the result. */
6161
6162 tree
6163 merge_decl_attributes (tree olddecl, tree newdecl)
6164 {
6165 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6166 DECL_ATTRIBUTES (newdecl));
6167 }
6168
6169 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6170
6171 /* Specialization of merge_decl_attributes for various Windows targets.
6172
6173 This handles the following situation:
6174
6175 __declspec (dllimport) int foo;
6176 int foo;
6177
6178 The second instance of `foo' nullifies the dllimport. */
6179
6180 tree
6181 merge_dllimport_decl_attributes (tree old, tree new_tree)
6182 {
6183 tree a;
6184 int delete_dllimport_p = 1;
6185
6186 /* What we need to do here is remove from `old' dllimport if it doesn't
6187 appear in `new'. dllimport behaves like extern: if a declaration is
6188 marked dllimport and a definition appears later, then the object
6189 is not dllimport'd. We also remove a `new' dllimport if the old list
6190 contains dllexport: dllexport always overrides dllimport, regardless
6191 of the order of declaration. */
6192 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6193 delete_dllimport_p = 0;
6194 else if (DECL_DLLIMPORT_P (new_tree)
6195 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6196 {
6197 DECL_DLLIMPORT_P (new_tree) = 0;
6198 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6199 "dllimport ignored", new_tree);
6200 }
6201 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6202 {
6203 /* Warn about overriding a symbol that has already been used, e.g.:
6204 extern int __attribute__ ((dllimport)) foo;
6205 int* bar () {return &foo;}
6206 int foo;
6207 */
6208 if (TREE_USED (old))
6209 {
6210 warning (0, "%q+D redeclared without dllimport attribute "
6211 "after being referenced with dll linkage", new_tree);
6212 /* If we have used a variable's address with dllimport linkage,
6213 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6214 decl may already have had TREE_CONSTANT computed.
6215 We still remove the attribute so that assembler code refers
6216 to '&foo rather than '_imp__foo'. */
6217 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6218 DECL_DLLIMPORT_P (new_tree) = 1;
6219 }
6220
6221 /* Let an inline definition silently override the external reference,
6222 but otherwise warn about attribute inconsistency. */
6223 else if (TREE_CODE (new_tree) == VAR_DECL
6224 || !DECL_DECLARED_INLINE_P (new_tree))
6225 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6226 "previous dllimport ignored", new_tree);
6227 }
6228 else
6229 delete_dllimport_p = 0;
6230
6231 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6232
6233 if (delete_dllimport_p)
6234 a = remove_attribute ("dllimport", a);
6235
6236 return a;
6237 }
6238
6239 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6240 struct attribute_spec.handler. */
6241
6242 tree
6243 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6244 bool *no_add_attrs)
6245 {
6246 tree node = *pnode;
6247 bool is_dllimport;
6248
6249 /* These attributes may apply to structure and union types being created,
6250 but otherwise should pass to the declaration involved. */
6251 if (!DECL_P (node))
6252 {
6253 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6254 | (int) ATTR_FLAG_ARRAY_NEXT))
6255 {
6256 *no_add_attrs = true;
6257 return tree_cons (name, args, NULL_TREE);
6258 }
6259 if (TREE_CODE (node) == RECORD_TYPE
6260 || TREE_CODE (node) == UNION_TYPE)
6261 {
6262 node = TYPE_NAME (node);
6263 if (!node)
6264 return NULL_TREE;
6265 }
6266 else
6267 {
6268 warning (OPT_Wattributes, "%qE attribute ignored",
6269 name);
6270 *no_add_attrs = true;
6271 return NULL_TREE;
6272 }
6273 }
6274
6275 if (TREE_CODE (node) != FUNCTION_DECL
6276 && TREE_CODE (node) != VAR_DECL
6277 && TREE_CODE (node) != TYPE_DECL)
6278 {
6279 *no_add_attrs = true;
6280 warning (OPT_Wattributes, "%qE attribute ignored",
6281 name);
6282 return NULL_TREE;
6283 }
6284
6285 if (TREE_CODE (node) == TYPE_DECL
6286 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6287 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6288 {
6289 *no_add_attrs = true;
6290 warning (OPT_Wattributes, "%qE attribute ignored",
6291 name);
6292 return NULL_TREE;
6293 }
6294
6295 is_dllimport = is_attribute_p ("dllimport", name);
6296
6297 /* Report error on dllimport ambiguities seen now before they cause
6298 any damage. */
6299 if (is_dllimport)
6300 {
6301 /* Honor any target-specific overrides. */
6302 if (!targetm.valid_dllimport_attribute_p (node))
6303 *no_add_attrs = true;
6304
6305 else if (TREE_CODE (node) == FUNCTION_DECL
6306 && DECL_DECLARED_INLINE_P (node))
6307 {
6308 warning (OPT_Wattributes, "inline function %q+D declared as "
6309 " dllimport: attribute ignored", node);
6310 *no_add_attrs = true;
6311 }
6312 /* Like MS, treat definition of dllimported variables and
6313 non-inlined functions on declaration as syntax errors. */
6314 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6315 {
6316 error ("function %q+D definition is marked dllimport", node);
6317 *no_add_attrs = true;
6318 }
6319
6320 else if (TREE_CODE (node) == VAR_DECL)
6321 {
6322 if (DECL_INITIAL (node))
6323 {
6324 error ("variable %q+D definition is marked dllimport",
6325 node);
6326 *no_add_attrs = true;
6327 }
6328
6329 /* `extern' needn't be specified with dllimport.
6330 Specify `extern' now and hope for the best. Sigh. */
6331 DECL_EXTERNAL (node) = 1;
6332 /* Also, implicitly give dllimport'd variables declared within
6333 a function global scope, unless declared static. */
6334 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6335 TREE_PUBLIC (node) = 1;
6336 }
6337
6338 if (*no_add_attrs == false)
6339 DECL_DLLIMPORT_P (node) = 1;
6340 }
6341 else if (TREE_CODE (node) == FUNCTION_DECL
6342 && DECL_DECLARED_INLINE_P (node)
6343 && flag_keep_inline_dllexport)
6344 /* An exported function, even if inline, must be emitted. */
6345 DECL_EXTERNAL (node) = 0;
6346
6347 /* Report error if symbol is not accessible at global scope. */
6348 if (!TREE_PUBLIC (node)
6349 && (TREE_CODE (node) == VAR_DECL
6350 || TREE_CODE (node) == FUNCTION_DECL))
6351 {
6352 error ("external linkage required for symbol %q+D because of "
6353 "%qE attribute", node, name);
6354 *no_add_attrs = true;
6355 }
6356
6357 /* A dllexport'd entity must have default visibility so that other
6358 program units (shared libraries or the main executable) can see
6359 it. A dllimport'd entity must have default visibility so that
6360 the linker knows that undefined references within this program
6361 unit can be resolved by the dynamic linker. */
6362 if (!*no_add_attrs)
6363 {
6364 if (DECL_VISIBILITY_SPECIFIED (node)
6365 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6366 error ("%qE implies default visibility, but %qD has already "
6367 "been declared with a different visibility",
6368 name, node);
6369 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6370 DECL_VISIBILITY_SPECIFIED (node) = 1;
6371 }
6372
6373 return NULL_TREE;
6374 }
6375
6376 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6377 \f
6378 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6379 of the various TYPE_QUAL values. */
6380
6381 static void
6382 set_type_quals (tree type, int type_quals)
6383 {
6384 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6385 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6386 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6387 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6388 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6389 }
6390
6391 /* Returns true iff unqualified CAND and BASE are equivalent. */
6392
6393 bool
6394 check_base_type (const_tree cand, const_tree base)
6395 {
6396 return (TYPE_NAME (cand) == TYPE_NAME (base)
6397 /* Apparently this is needed for Objective-C. */
6398 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6399 /* Check alignment. */
6400 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6401 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6402 TYPE_ATTRIBUTES (base)));
6403 }
6404
6405 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6406
6407 bool
6408 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6409 {
6410 return (TYPE_QUALS (cand) == type_quals
6411 && check_base_type (cand, base));
6412 }
6413
6414 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6415
6416 static bool
6417 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6418 {
6419 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6420 && TYPE_NAME (cand) == TYPE_NAME (base)
6421 /* Apparently this is needed for Objective-C. */
6422 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6423 /* Check alignment. */
6424 && TYPE_ALIGN (cand) == align
6425 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6426 TYPE_ATTRIBUTES (base)));
6427 }
6428
6429 /* This function checks to see if TYPE matches the size one of the built-in
6430 atomic types, and returns that core atomic type. */
6431
6432 static tree
6433 find_atomic_core_type (tree type)
6434 {
6435 tree base_atomic_type;
6436
6437 /* Only handle complete types. */
6438 if (TYPE_SIZE (type) == NULL_TREE)
6439 return NULL_TREE;
6440
6441 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6442 switch (type_size)
6443 {
6444 case 8:
6445 base_atomic_type = atomicQI_type_node;
6446 break;
6447
6448 case 16:
6449 base_atomic_type = atomicHI_type_node;
6450 break;
6451
6452 case 32:
6453 base_atomic_type = atomicSI_type_node;
6454 break;
6455
6456 case 64:
6457 base_atomic_type = atomicDI_type_node;
6458 break;
6459
6460 case 128:
6461 base_atomic_type = atomicTI_type_node;
6462 break;
6463
6464 default:
6465 base_atomic_type = NULL_TREE;
6466 }
6467
6468 return base_atomic_type;
6469 }
6470
6471 /* Return a version of the TYPE, qualified as indicated by the
6472 TYPE_QUALS, if one exists. If no qualified version exists yet,
6473 return NULL_TREE. */
6474
6475 tree
6476 get_qualified_type (tree type, int type_quals)
6477 {
6478 tree t;
6479
6480 if (TYPE_QUALS (type) == type_quals)
6481 return type;
6482
6483 /* Search the chain of variants to see if there is already one there just
6484 like the one we need to have. If so, use that existing one. We must
6485 preserve the TYPE_NAME, since there is code that depends on this. */
6486 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6487 if (check_qualified_type (t, type, type_quals))
6488 return t;
6489
6490 return NULL_TREE;
6491 }
6492
6493 /* Like get_qualified_type, but creates the type if it does not
6494 exist. This function never returns NULL_TREE. */
6495
6496 tree
6497 build_qualified_type (tree type, int type_quals)
6498 {
6499 tree t;
6500
6501 /* See if we already have the appropriate qualified variant. */
6502 t = get_qualified_type (type, type_quals);
6503
6504 /* If not, build it. */
6505 if (!t)
6506 {
6507 t = build_variant_type_copy (type);
6508 set_type_quals (t, type_quals);
6509
6510 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6511 {
6512 /* See if this object can map to a basic atomic type. */
6513 tree atomic_type = find_atomic_core_type (type);
6514 if (atomic_type)
6515 {
6516 /* Ensure the alignment of this type is compatible with
6517 the required alignment of the atomic type. */
6518 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6519 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6520 }
6521 }
6522
6523 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6524 /* Propagate structural equality. */
6525 SET_TYPE_STRUCTURAL_EQUALITY (t);
6526 else if (TYPE_CANONICAL (type) != type)
6527 /* Build the underlying canonical type, since it is different
6528 from TYPE. */
6529 {
6530 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6531 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6532 }
6533 else
6534 /* T is its own canonical type. */
6535 TYPE_CANONICAL (t) = t;
6536
6537 }
6538
6539 return t;
6540 }
6541
6542 /* Create a variant of type T with alignment ALIGN. */
6543
6544 tree
6545 build_aligned_type (tree type, unsigned int align)
6546 {
6547 tree t;
6548
6549 if (TYPE_PACKED (type)
6550 || TYPE_ALIGN (type) == align)
6551 return type;
6552
6553 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6554 if (check_aligned_type (t, type, align))
6555 return t;
6556
6557 t = build_variant_type_copy (type);
6558 TYPE_ALIGN (t) = align;
6559
6560 return t;
6561 }
6562
6563 /* Create a new distinct copy of TYPE. The new type is made its own
6564 MAIN_VARIANT. If TYPE requires structural equality checks, the
6565 resulting type requires structural equality checks; otherwise, its
6566 TYPE_CANONICAL points to itself. */
6567
6568 tree
6569 build_distinct_type_copy (tree type)
6570 {
6571 tree t = copy_node (type);
6572
6573 TYPE_POINTER_TO (t) = 0;
6574 TYPE_REFERENCE_TO (t) = 0;
6575
6576 /* Set the canonical type either to a new equivalence class, or
6577 propagate the need for structural equality checks. */
6578 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6579 SET_TYPE_STRUCTURAL_EQUALITY (t);
6580 else
6581 TYPE_CANONICAL (t) = t;
6582
6583 /* Make it its own variant. */
6584 TYPE_MAIN_VARIANT (t) = t;
6585 TYPE_NEXT_VARIANT (t) = 0;
6586
6587 /* We do not record methods in type copies nor variants
6588 so we do not need to keep them up to date when new method
6589 is inserted. */
6590 if (RECORD_OR_UNION_TYPE_P (t))
6591 TYPE_METHODS (t) = NULL_TREE;
6592
6593 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6594 whose TREE_TYPE is not t. This can also happen in the Ada
6595 frontend when using subtypes. */
6596
6597 return t;
6598 }
6599
6600 /* Create a new variant of TYPE, equivalent but distinct. This is so
6601 the caller can modify it. TYPE_CANONICAL for the return type will
6602 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6603 are considered equal by the language itself (or that both types
6604 require structural equality checks). */
6605
6606 tree
6607 build_variant_type_copy (tree type)
6608 {
6609 tree t, m = TYPE_MAIN_VARIANT (type);
6610
6611 t = build_distinct_type_copy (type);
6612
6613 /* Since we're building a variant, assume that it is a non-semantic
6614 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6615 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6616
6617 /* Add the new type to the chain of variants of TYPE. */
6618 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6619 TYPE_NEXT_VARIANT (m) = t;
6620 TYPE_MAIN_VARIANT (t) = m;
6621
6622 return t;
6623 }
6624 \f
6625 /* Return true if the from tree in both tree maps are equal. */
6626
6627 int
6628 tree_map_base_eq (const void *va, const void *vb)
6629 {
6630 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6631 *const b = (const struct tree_map_base *) vb;
6632 return (a->from == b->from);
6633 }
6634
6635 /* Hash a from tree in a tree_base_map. */
6636
6637 unsigned int
6638 tree_map_base_hash (const void *item)
6639 {
6640 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6641 }
6642
6643 /* Return true if this tree map structure is marked for garbage collection
6644 purposes. We simply return true if the from tree is marked, so that this
6645 structure goes away when the from tree goes away. */
6646
6647 int
6648 tree_map_base_marked_p (const void *p)
6649 {
6650 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6651 }
6652
6653 /* Hash a from tree in a tree_map. */
6654
6655 unsigned int
6656 tree_map_hash (const void *item)
6657 {
6658 return (((const struct tree_map *) item)->hash);
6659 }
6660
6661 /* Hash a from tree in a tree_decl_map. */
6662
6663 unsigned int
6664 tree_decl_map_hash (const void *item)
6665 {
6666 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6667 }
6668
6669 /* Return the initialization priority for DECL. */
6670
6671 priority_type
6672 decl_init_priority_lookup (tree decl)
6673 {
6674 symtab_node *snode = symtab_node::get (decl);
6675
6676 if (!snode)
6677 return DEFAULT_INIT_PRIORITY;
6678 return
6679 snode->get_init_priority ();
6680 }
6681
6682 /* Return the finalization priority for DECL. */
6683
6684 priority_type
6685 decl_fini_priority_lookup (tree decl)
6686 {
6687 cgraph_node *node = cgraph_node::get (decl);
6688
6689 if (!node)
6690 return DEFAULT_INIT_PRIORITY;
6691 return
6692 node->get_fini_priority ();
6693 }
6694
6695 /* Set the initialization priority for DECL to PRIORITY. */
6696
6697 void
6698 decl_init_priority_insert (tree decl, priority_type priority)
6699 {
6700 struct symtab_node *snode;
6701
6702 if (priority == DEFAULT_INIT_PRIORITY)
6703 {
6704 snode = symtab_node::get (decl);
6705 if (!snode)
6706 return;
6707 }
6708 else if (TREE_CODE (decl) == VAR_DECL)
6709 snode = varpool_node::get_create (decl);
6710 else
6711 snode = cgraph_node::get_create (decl);
6712 snode->set_init_priority (priority);
6713 }
6714
6715 /* Set the finalization priority for DECL to PRIORITY. */
6716
6717 void
6718 decl_fini_priority_insert (tree decl, priority_type priority)
6719 {
6720 struct cgraph_node *node;
6721
6722 if (priority == DEFAULT_INIT_PRIORITY)
6723 {
6724 node = cgraph_node::get (decl);
6725 if (!node)
6726 return;
6727 }
6728 else
6729 node = cgraph_node::get_create (decl);
6730 node->set_fini_priority (priority);
6731 }
6732
6733 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6734
6735 static void
6736 print_debug_expr_statistics (void)
6737 {
6738 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6739 (long) debug_expr_for_decl->size (),
6740 (long) debug_expr_for_decl->elements (),
6741 debug_expr_for_decl->collisions ());
6742 }
6743
6744 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6745
6746 static void
6747 print_value_expr_statistics (void)
6748 {
6749 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6750 (long) value_expr_for_decl->size (),
6751 (long) value_expr_for_decl->elements (),
6752 value_expr_for_decl->collisions ());
6753 }
6754
6755 /* Lookup a debug expression for FROM, and return it if we find one. */
6756
6757 tree
6758 decl_debug_expr_lookup (tree from)
6759 {
6760 struct tree_decl_map *h, in;
6761 in.base.from = from;
6762
6763 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6764 if (h)
6765 return h->to;
6766 return NULL_TREE;
6767 }
6768
6769 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6770
6771 void
6772 decl_debug_expr_insert (tree from, tree to)
6773 {
6774 struct tree_decl_map *h;
6775
6776 h = ggc_alloc<tree_decl_map> ();
6777 h->base.from = from;
6778 h->to = to;
6779 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6780 }
6781
6782 /* Lookup a value expression for FROM, and return it if we find one. */
6783
6784 tree
6785 decl_value_expr_lookup (tree from)
6786 {
6787 struct tree_decl_map *h, in;
6788 in.base.from = from;
6789
6790 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6791 if (h)
6792 return h->to;
6793 return NULL_TREE;
6794 }
6795
6796 /* Insert a mapping FROM->TO in the value expression hashtable. */
6797
6798 void
6799 decl_value_expr_insert (tree from, tree to)
6800 {
6801 struct tree_decl_map *h;
6802
6803 h = ggc_alloc<tree_decl_map> ();
6804 h->base.from = from;
6805 h->to = to;
6806 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6807 }
6808
6809 /* Lookup a vector of debug arguments for FROM, and return it if we
6810 find one. */
6811
6812 vec<tree, va_gc> **
6813 decl_debug_args_lookup (tree from)
6814 {
6815 struct tree_vec_map *h, in;
6816
6817 if (!DECL_HAS_DEBUG_ARGS_P (from))
6818 return NULL;
6819 gcc_checking_assert (debug_args_for_decl != NULL);
6820 in.base.from = from;
6821 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6822 if (h)
6823 return &h->to;
6824 return NULL;
6825 }
6826
6827 /* Insert a mapping FROM->empty vector of debug arguments in the value
6828 expression hashtable. */
6829
6830 vec<tree, va_gc> **
6831 decl_debug_args_insert (tree from)
6832 {
6833 struct tree_vec_map *h;
6834 tree_vec_map **loc;
6835
6836 if (DECL_HAS_DEBUG_ARGS_P (from))
6837 return decl_debug_args_lookup (from);
6838 if (debug_args_for_decl == NULL)
6839 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6840 h = ggc_alloc<tree_vec_map> ();
6841 h->base.from = from;
6842 h->to = NULL;
6843 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6844 *loc = h;
6845 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6846 return &h->to;
6847 }
6848
6849 /* Hashing of types so that we don't make duplicates.
6850 The entry point is `type_hash_canon'. */
6851
6852 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6853 with types in the TREE_VALUE slots), by adding the hash codes
6854 of the individual types. */
6855
6856 static void
6857 type_hash_list (const_tree list, inchash::hash &hstate)
6858 {
6859 const_tree tail;
6860
6861 for (tail = list; tail; tail = TREE_CHAIN (tail))
6862 if (TREE_VALUE (tail) != error_mark_node)
6863 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6864 }
6865
6866 /* These are the Hashtable callback functions. */
6867
6868 /* Returns true iff the types are equivalent. */
6869
6870 bool
6871 type_cache_hasher::equal (type_hash *a, type_hash *b)
6872 {
6873 /* First test the things that are the same for all types. */
6874 if (a->hash != b->hash
6875 || TREE_CODE (a->type) != TREE_CODE (b->type)
6876 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6877 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6878 TYPE_ATTRIBUTES (b->type))
6879 || (TREE_CODE (a->type) != COMPLEX_TYPE
6880 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6881 return 0;
6882
6883 /* Be careful about comparing arrays before and after the element type
6884 has been completed; don't compare TYPE_ALIGN unless both types are
6885 complete. */
6886 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6887 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6888 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6889 return 0;
6890
6891 switch (TREE_CODE (a->type))
6892 {
6893 case VOID_TYPE:
6894 case COMPLEX_TYPE:
6895 case POINTER_TYPE:
6896 case REFERENCE_TYPE:
6897 case NULLPTR_TYPE:
6898 return 1;
6899
6900 case VECTOR_TYPE:
6901 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6902
6903 case ENUMERAL_TYPE:
6904 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6905 && !(TYPE_VALUES (a->type)
6906 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6907 && TYPE_VALUES (b->type)
6908 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6909 && type_list_equal (TYPE_VALUES (a->type),
6910 TYPE_VALUES (b->type))))
6911 return 0;
6912
6913 /* ... fall through ... */
6914
6915 case INTEGER_TYPE:
6916 case REAL_TYPE:
6917 case BOOLEAN_TYPE:
6918 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6919 return false;
6920 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6921 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6922 TYPE_MAX_VALUE (b->type)))
6923 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6924 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6925 TYPE_MIN_VALUE (b->type))));
6926
6927 case FIXED_POINT_TYPE:
6928 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6929
6930 case OFFSET_TYPE:
6931 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6932
6933 case METHOD_TYPE:
6934 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6935 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6936 || (TYPE_ARG_TYPES (a->type)
6937 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6938 && TYPE_ARG_TYPES (b->type)
6939 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6940 && type_list_equal (TYPE_ARG_TYPES (a->type),
6941 TYPE_ARG_TYPES (b->type)))))
6942 break;
6943 return 0;
6944 case ARRAY_TYPE:
6945 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6946
6947 case RECORD_TYPE:
6948 case UNION_TYPE:
6949 case QUAL_UNION_TYPE:
6950 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6951 || (TYPE_FIELDS (a->type)
6952 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6953 && TYPE_FIELDS (b->type)
6954 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6955 && type_list_equal (TYPE_FIELDS (a->type),
6956 TYPE_FIELDS (b->type))));
6957
6958 case FUNCTION_TYPE:
6959 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6960 || (TYPE_ARG_TYPES (a->type)
6961 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6962 && TYPE_ARG_TYPES (b->type)
6963 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6964 && type_list_equal (TYPE_ARG_TYPES (a->type),
6965 TYPE_ARG_TYPES (b->type))))
6966 break;
6967 return 0;
6968
6969 default:
6970 return 0;
6971 }
6972
6973 if (lang_hooks.types.type_hash_eq != NULL)
6974 return lang_hooks.types.type_hash_eq (a->type, b->type);
6975
6976 return 1;
6977 }
6978
6979 /* Given TYPE, and HASHCODE its hash code, return the canonical
6980 object for an identical type if one already exists.
6981 Otherwise, return TYPE, and record it as the canonical object.
6982
6983 To use this function, first create a type of the sort you want.
6984 Then compute its hash code from the fields of the type that
6985 make it different from other similar types.
6986 Then call this function and use the value. */
6987
6988 tree
6989 type_hash_canon (unsigned int hashcode, tree type)
6990 {
6991 type_hash in;
6992 type_hash **loc;
6993
6994 /* The hash table only contains main variants, so ensure that's what we're
6995 being passed. */
6996 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6997
6998 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6999 must call that routine before comparing TYPE_ALIGNs. */
7000 layout_type (type);
7001
7002 in.hash = hashcode;
7003 in.type = type;
7004
7005 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7006 if (*loc)
7007 {
7008 tree t1 = ((type_hash *) *loc)->type;
7009 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7010 if (GATHER_STATISTICS)
7011 {
7012 tree_code_counts[(int) TREE_CODE (type)]--;
7013 tree_node_counts[(int) t_kind]--;
7014 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
7015 }
7016 return t1;
7017 }
7018 else
7019 {
7020 struct type_hash *h;
7021
7022 h = ggc_alloc<type_hash> ();
7023 h->hash = hashcode;
7024 h->type = type;
7025 *loc = h;
7026
7027 return type;
7028 }
7029 }
7030
7031 static void
7032 print_type_hash_statistics (void)
7033 {
7034 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7035 (long) type_hash_table->size (),
7036 (long) type_hash_table->elements (),
7037 type_hash_table->collisions ());
7038 }
7039
7040 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7041 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7042 by adding the hash codes of the individual attributes. */
7043
7044 static void
7045 attribute_hash_list (const_tree list, inchash::hash &hstate)
7046 {
7047 const_tree tail;
7048
7049 for (tail = list; tail; tail = TREE_CHAIN (tail))
7050 /* ??? Do we want to add in TREE_VALUE too? */
7051 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7052 }
7053
7054 /* Given two lists of attributes, return true if list l2 is
7055 equivalent to l1. */
7056
7057 int
7058 attribute_list_equal (const_tree l1, const_tree l2)
7059 {
7060 if (l1 == l2)
7061 return 1;
7062
7063 return attribute_list_contained (l1, l2)
7064 && attribute_list_contained (l2, l1);
7065 }
7066
7067 /* Given two lists of attributes, return true if list L2 is
7068 completely contained within L1. */
7069 /* ??? This would be faster if attribute names were stored in a canonicalized
7070 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7071 must be used to show these elements are equivalent (which they are). */
7072 /* ??? It's not clear that attributes with arguments will always be handled
7073 correctly. */
7074
7075 int
7076 attribute_list_contained (const_tree l1, const_tree l2)
7077 {
7078 const_tree t1, t2;
7079
7080 /* First check the obvious, maybe the lists are identical. */
7081 if (l1 == l2)
7082 return 1;
7083
7084 /* Maybe the lists are similar. */
7085 for (t1 = l1, t2 = l2;
7086 t1 != 0 && t2 != 0
7087 && get_attribute_name (t1) == get_attribute_name (t2)
7088 && TREE_VALUE (t1) == TREE_VALUE (t2);
7089 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7090 ;
7091
7092 /* Maybe the lists are equal. */
7093 if (t1 == 0 && t2 == 0)
7094 return 1;
7095
7096 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7097 {
7098 const_tree attr;
7099 /* This CONST_CAST is okay because lookup_attribute does not
7100 modify its argument and the return value is assigned to a
7101 const_tree. */
7102 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7103 CONST_CAST_TREE (l1));
7104 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7105 attr = lookup_ident_attribute (get_attribute_name (t2),
7106 TREE_CHAIN (attr)))
7107 ;
7108
7109 if (attr == NULL_TREE)
7110 return 0;
7111 }
7112
7113 return 1;
7114 }
7115
7116 /* Given two lists of types
7117 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7118 return 1 if the lists contain the same types in the same order.
7119 Also, the TREE_PURPOSEs must match. */
7120
7121 int
7122 type_list_equal (const_tree l1, const_tree l2)
7123 {
7124 const_tree t1, t2;
7125
7126 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7127 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7128 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7129 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7130 && (TREE_TYPE (TREE_PURPOSE (t1))
7131 == TREE_TYPE (TREE_PURPOSE (t2))))))
7132 return 0;
7133
7134 return t1 == t2;
7135 }
7136
7137 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7138 given by TYPE. If the argument list accepts variable arguments,
7139 then this function counts only the ordinary arguments. */
7140
7141 int
7142 type_num_arguments (const_tree type)
7143 {
7144 int i = 0;
7145 tree t;
7146
7147 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7148 /* If the function does not take a variable number of arguments,
7149 the last element in the list will have type `void'. */
7150 if (VOID_TYPE_P (TREE_VALUE (t)))
7151 break;
7152 else
7153 ++i;
7154
7155 return i;
7156 }
7157
7158 /* Nonzero if integer constants T1 and T2
7159 represent the same constant value. */
7160
7161 int
7162 tree_int_cst_equal (const_tree t1, const_tree t2)
7163 {
7164 if (t1 == t2)
7165 return 1;
7166
7167 if (t1 == 0 || t2 == 0)
7168 return 0;
7169
7170 if (TREE_CODE (t1) == INTEGER_CST
7171 && TREE_CODE (t2) == INTEGER_CST
7172 && wi::to_widest (t1) == wi::to_widest (t2))
7173 return 1;
7174
7175 return 0;
7176 }
7177
7178 /* Return true if T is an INTEGER_CST whose numerical value (extended
7179 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7180
7181 bool
7182 tree_fits_shwi_p (const_tree t)
7183 {
7184 return (t != NULL_TREE
7185 && TREE_CODE (t) == INTEGER_CST
7186 && wi::fits_shwi_p (wi::to_widest (t)));
7187 }
7188
7189 /* Return true if T is an INTEGER_CST whose numerical value (extended
7190 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7191
7192 bool
7193 tree_fits_uhwi_p (const_tree t)
7194 {
7195 return (t != NULL_TREE
7196 && TREE_CODE (t) == INTEGER_CST
7197 && wi::fits_uhwi_p (wi::to_widest (t)));
7198 }
7199
7200 /* T is an INTEGER_CST whose numerical value (extended according to
7201 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7202 HOST_WIDE_INT. */
7203
7204 HOST_WIDE_INT
7205 tree_to_shwi (const_tree t)
7206 {
7207 gcc_assert (tree_fits_shwi_p (t));
7208 return TREE_INT_CST_LOW (t);
7209 }
7210
7211 /* T is an INTEGER_CST whose numerical value (extended according to
7212 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7213 HOST_WIDE_INT. */
7214
7215 unsigned HOST_WIDE_INT
7216 tree_to_uhwi (const_tree t)
7217 {
7218 gcc_assert (tree_fits_uhwi_p (t));
7219 return TREE_INT_CST_LOW (t);
7220 }
7221
7222 /* Return the most significant (sign) bit of T. */
7223
7224 int
7225 tree_int_cst_sign_bit (const_tree t)
7226 {
7227 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7228
7229 return wi::extract_uhwi (t, bitno, 1);
7230 }
7231
7232 /* Return an indication of the sign of the integer constant T.
7233 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7234 Note that -1 will never be returned if T's type is unsigned. */
7235
7236 int
7237 tree_int_cst_sgn (const_tree t)
7238 {
7239 if (wi::eq_p (t, 0))
7240 return 0;
7241 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7242 return 1;
7243 else if (wi::neg_p (t))
7244 return -1;
7245 else
7246 return 1;
7247 }
7248
7249 /* Return the minimum number of bits needed to represent VALUE in a
7250 signed or unsigned type, UNSIGNEDP says which. */
7251
7252 unsigned int
7253 tree_int_cst_min_precision (tree value, signop sgn)
7254 {
7255 /* If the value is negative, compute its negative minus 1. The latter
7256 adjustment is because the absolute value of the largest negative value
7257 is one larger than the largest positive value. This is equivalent to
7258 a bit-wise negation, so use that operation instead. */
7259
7260 if (tree_int_cst_sgn (value) < 0)
7261 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7262
7263 /* Return the number of bits needed, taking into account the fact
7264 that we need one more bit for a signed than unsigned type.
7265 If value is 0 or -1, the minimum precision is 1 no matter
7266 whether unsignedp is true or false. */
7267
7268 if (integer_zerop (value))
7269 return 1;
7270 else
7271 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7272 }
7273
7274 /* Return truthvalue of whether T1 is the same tree structure as T2.
7275 Return 1 if they are the same.
7276 Return 0 if they are understandably different.
7277 Return -1 if either contains tree structure not understood by
7278 this function. */
7279
7280 int
7281 simple_cst_equal (const_tree t1, const_tree t2)
7282 {
7283 enum tree_code code1, code2;
7284 int cmp;
7285 int i;
7286
7287 if (t1 == t2)
7288 return 1;
7289 if (t1 == 0 || t2 == 0)
7290 return 0;
7291
7292 code1 = TREE_CODE (t1);
7293 code2 = TREE_CODE (t2);
7294
7295 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7296 {
7297 if (CONVERT_EXPR_CODE_P (code2)
7298 || code2 == NON_LVALUE_EXPR)
7299 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7300 else
7301 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7302 }
7303
7304 else if (CONVERT_EXPR_CODE_P (code2)
7305 || code2 == NON_LVALUE_EXPR)
7306 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7307
7308 if (code1 != code2)
7309 return 0;
7310
7311 switch (code1)
7312 {
7313 case INTEGER_CST:
7314 return wi::to_widest (t1) == wi::to_widest (t2);
7315
7316 case REAL_CST:
7317 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7318
7319 case FIXED_CST:
7320 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7321
7322 case STRING_CST:
7323 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7324 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7325 TREE_STRING_LENGTH (t1)));
7326
7327 case CONSTRUCTOR:
7328 {
7329 unsigned HOST_WIDE_INT idx;
7330 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7331 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7332
7333 if (vec_safe_length (v1) != vec_safe_length (v2))
7334 return false;
7335
7336 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7337 /* ??? Should we handle also fields here? */
7338 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7339 return false;
7340 return true;
7341 }
7342
7343 case SAVE_EXPR:
7344 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7345
7346 case CALL_EXPR:
7347 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7348 if (cmp <= 0)
7349 return cmp;
7350 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7351 return 0;
7352 {
7353 const_tree arg1, arg2;
7354 const_call_expr_arg_iterator iter1, iter2;
7355 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7356 arg2 = first_const_call_expr_arg (t2, &iter2);
7357 arg1 && arg2;
7358 arg1 = next_const_call_expr_arg (&iter1),
7359 arg2 = next_const_call_expr_arg (&iter2))
7360 {
7361 cmp = simple_cst_equal (arg1, arg2);
7362 if (cmp <= 0)
7363 return cmp;
7364 }
7365 return arg1 == arg2;
7366 }
7367
7368 case TARGET_EXPR:
7369 /* Special case: if either target is an unallocated VAR_DECL,
7370 it means that it's going to be unified with whatever the
7371 TARGET_EXPR is really supposed to initialize, so treat it
7372 as being equivalent to anything. */
7373 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7374 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7375 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7376 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7377 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7378 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7379 cmp = 1;
7380 else
7381 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7382
7383 if (cmp <= 0)
7384 return cmp;
7385
7386 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7387
7388 case WITH_CLEANUP_EXPR:
7389 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7390 if (cmp <= 0)
7391 return cmp;
7392
7393 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7394
7395 case COMPONENT_REF:
7396 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7397 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7398
7399 return 0;
7400
7401 case VAR_DECL:
7402 case PARM_DECL:
7403 case CONST_DECL:
7404 case FUNCTION_DECL:
7405 return 0;
7406
7407 default:
7408 break;
7409 }
7410
7411 /* This general rule works for most tree codes. All exceptions should be
7412 handled above. If this is a language-specific tree code, we can't
7413 trust what might be in the operand, so say we don't know
7414 the situation. */
7415 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7416 return -1;
7417
7418 switch (TREE_CODE_CLASS (code1))
7419 {
7420 case tcc_unary:
7421 case tcc_binary:
7422 case tcc_comparison:
7423 case tcc_expression:
7424 case tcc_reference:
7425 case tcc_statement:
7426 cmp = 1;
7427 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7428 {
7429 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7430 if (cmp <= 0)
7431 return cmp;
7432 }
7433
7434 return cmp;
7435
7436 default:
7437 return -1;
7438 }
7439 }
7440
7441 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7442 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7443 than U, respectively. */
7444
7445 int
7446 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7447 {
7448 if (tree_int_cst_sgn (t) < 0)
7449 return -1;
7450 else if (!tree_fits_uhwi_p (t))
7451 return 1;
7452 else if (TREE_INT_CST_LOW (t) == u)
7453 return 0;
7454 else if (TREE_INT_CST_LOW (t) < u)
7455 return -1;
7456 else
7457 return 1;
7458 }
7459
7460 /* Return true if SIZE represents a constant size that is in bounds of
7461 what the middle-end and the backend accepts (covering not more than
7462 half of the address-space). */
7463
7464 bool
7465 valid_constant_size_p (const_tree size)
7466 {
7467 if (! tree_fits_uhwi_p (size)
7468 || TREE_OVERFLOW (size)
7469 || tree_int_cst_sign_bit (size) != 0)
7470 return false;
7471 return true;
7472 }
7473
7474 /* Return the precision of the type, or for a complex or vector type the
7475 precision of the type of its elements. */
7476
7477 unsigned int
7478 element_precision (const_tree type)
7479 {
7480 enum tree_code code = TREE_CODE (type);
7481 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7482 type = TREE_TYPE (type);
7483
7484 return TYPE_PRECISION (type);
7485 }
7486
7487 /* Return true if CODE represents an associative tree code. Otherwise
7488 return false. */
7489 bool
7490 associative_tree_code (enum tree_code code)
7491 {
7492 switch (code)
7493 {
7494 case BIT_IOR_EXPR:
7495 case BIT_AND_EXPR:
7496 case BIT_XOR_EXPR:
7497 case PLUS_EXPR:
7498 case MULT_EXPR:
7499 case MIN_EXPR:
7500 case MAX_EXPR:
7501 return true;
7502
7503 default:
7504 break;
7505 }
7506 return false;
7507 }
7508
7509 /* Return true if CODE represents a commutative tree code. Otherwise
7510 return false. */
7511 bool
7512 commutative_tree_code (enum tree_code code)
7513 {
7514 switch (code)
7515 {
7516 case PLUS_EXPR:
7517 case MULT_EXPR:
7518 case MULT_HIGHPART_EXPR:
7519 case MIN_EXPR:
7520 case MAX_EXPR:
7521 case BIT_IOR_EXPR:
7522 case BIT_XOR_EXPR:
7523 case BIT_AND_EXPR:
7524 case NE_EXPR:
7525 case EQ_EXPR:
7526 case UNORDERED_EXPR:
7527 case ORDERED_EXPR:
7528 case UNEQ_EXPR:
7529 case LTGT_EXPR:
7530 case TRUTH_AND_EXPR:
7531 case TRUTH_XOR_EXPR:
7532 case TRUTH_OR_EXPR:
7533 case WIDEN_MULT_EXPR:
7534 case VEC_WIDEN_MULT_HI_EXPR:
7535 case VEC_WIDEN_MULT_LO_EXPR:
7536 case VEC_WIDEN_MULT_EVEN_EXPR:
7537 case VEC_WIDEN_MULT_ODD_EXPR:
7538 return true;
7539
7540 default:
7541 break;
7542 }
7543 return false;
7544 }
7545
7546 /* Return true if CODE represents a ternary tree code for which the
7547 first two operands are commutative. Otherwise return false. */
7548 bool
7549 commutative_ternary_tree_code (enum tree_code code)
7550 {
7551 switch (code)
7552 {
7553 case WIDEN_MULT_PLUS_EXPR:
7554 case WIDEN_MULT_MINUS_EXPR:
7555 case DOT_PROD_EXPR:
7556 case FMA_EXPR:
7557 return true;
7558
7559 default:
7560 break;
7561 }
7562 return false;
7563 }
7564
7565 namespace inchash
7566 {
7567
7568 /* Generate a hash value for an expression. This can be used iteratively
7569 by passing a previous result as the HSTATE argument.
7570
7571 This function is intended to produce the same hash for expressions which
7572 would compare equal using operand_equal_p. */
7573 void
7574 add_expr (const_tree t, inchash::hash &hstate)
7575 {
7576 int i;
7577 enum tree_code code;
7578 enum tree_code_class tclass;
7579
7580 if (t == NULL_TREE)
7581 {
7582 hstate.merge_hash (0);
7583 return;
7584 }
7585
7586 code = TREE_CODE (t);
7587
7588 switch (code)
7589 {
7590 /* Alas, constants aren't shared, so we can't rely on pointer
7591 identity. */
7592 case VOID_CST:
7593 hstate.merge_hash (0);
7594 return;
7595 case INTEGER_CST:
7596 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7597 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7598 return;
7599 case REAL_CST:
7600 {
7601 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7602 hstate.merge_hash (val2);
7603 return;
7604 }
7605 case FIXED_CST:
7606 {
7607 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7608 hstate.merge_hash (val2);
7609 return;
7610 }
7611 case STRING_CST:
7612 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7613 return;
7614 case COMPLEX_CST:
7615 inchash::add_expr (TREE_REALPART (t), hstate);
7616 inchash::add_expr (TREE_IMAGPART (t), hstate);
7617 return;
7618 case VECTOR_CST:
7619 {
7620 unsigned i;
7621 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7622 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7623 return;
7624 }
7625 case SSA_NAME:
7626 /* We can just compare by pointer. */
7627 hstate.add_wide_int (SSA_NAME_VERSION (t));
7628 return;
7629 case PLACEHOLDER_EXPR:
7630 /* The node itself doesn't matter. */
7631 return;
7632 case TREE_LIST:
7633 /* A list of expressions, for a CALL_EXPR or as the elements of a
7634 VECTOR_CST. */
7635 for (; t; t = TREE_CHAIN (t))
7636 inchash::add_expr (TREE_VALUE (t), hstate);
7637 return;
7638 case CONSTRUCTOR:
7639 {
7640 unsigned HOST_WIDE_INT idx;
7641 tree field, value;
7642 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7643 {
7644 inchash::add_expr (field, hstate);
7645 inchash::add_expr (value, hstate);
7646 }
7647 return;
7648 }
7649 case FUNCTION_DECL:
7650 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7651 Otherwise nodes that compare equal according to operand_equal_p might
7652 get different hash codes. However, don't do this for machine specific
7653 or front end builtins, since the function code is overloaded in those
7654 cases. */
7655 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7656 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7657 {
7658 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7659 code = TREE_CODE (t);
7660 }
7661 /* FALL THROUGH */
7662 default:
7663 tclass = TREE_CODE_CLASS (code);
7664
7665 if (tclass == tcc_declaration)
7666 {
7667 /* DECL's have a unique ID */
7668 hstate.add_wide_int (DECL_UID (t));
7669 }
7670 else
7671 {
7672 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7673
7674 hstate.add_object (code);
7675
7676 /* Don't hash the type, that can lead to having nodes which
7677 compare equal according to operand_equal_p, but which
7678 have different hash codes. */
7679 if (CONVERT_EXPR_CODE_P (code)
7680 || code == NON_LVALUE_EXPR)
7681 {
7682 /* Make sure to include signness in the hash computation. */
7683 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7684 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7685 }
7686
7687 else if (commutative_tree_code (code))
7688 {
7689 /* It's a commutative expression. We want to hash it the same
7690 however it appears. We do this by first hashing both operands
7691 and then rehashing based on the order of their independent
7692 hashes. */
7693 inchash::hash one, two;
7694 inchash::add_expr (TREE_OPERAND (t, 0), one);
7695 inchash::add_expr (TREE_OPERAND (t, 1), two);
7696 hstate.add_commutative (one, two);
7697 }
7698 else
7699 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7700 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7701 }
7702 return;
7703 }
7704 }
7705
7706 }
7707
7708 /* Constructors for pointer, array and function types.
7709 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7710 constructed by language-dependent code, not here.) */
7711
7712 /* Construct, lay out and return the type of pointers to TO_TYPE with
7713 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7714 reference all of memory. If such a type has already been
7715 constructed, reuse it. */
7716
7717 tree
7718 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7719 bool can_alias_all)
7720 {
7721 tree t;
7722 bool could_alias = can_alias_all;
7723
7724 if (to_type == error_mark_node)
7725 return error_mark_node;
7726
7727 /* If the pointed-to type has the may_alias attribute set, force
7728 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7729 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7730 can_alias_all = true;
7731
7732 /* In some cases, languages will have things that aren't a POINTER_TYPE
7733 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7734 In that case, return that type without regard to the rest of our
7735 operands.
7736
7737 ??? This is a kludge, but consistent with the way this function has
7738 always operated and there doesn't seem to be a good way to avoid this
7739 at the moment. */
7740 if (TYPE_POINTER_TO (to_type) != 0
7741 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7742 return TYPE_POINTER_TO (to_type);
7743
7744 /* First, if we already have a type for pointers to TO_TYPE and it's
7745 the proper mode, use it. */
7746 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7747 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7748 return t;
7749
7750 t = make_node (POINTER_TYPE);
7751
7752 TREE_TYPE (t) = to_type;
7753 SET_TYPE_MODE (t, mode);
7754 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7755 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7756 TYPE_POINTER_TO (to_type) = t;
7757
7758 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7759 SET_TYPE_STRUCTURAL_EQUALITY (t);
7760 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7761 TYPE_CANONICAL (t)
7762 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7763 mode, false);
7764
7765 /* Lay out the type. This function has many callers that are concerned
7766 with expression-construction, and this simplifies them all. */
7767 layout_type (t);
7768
7769 return t;
7770 }
7771
7772 /* By default build pointers in ptr_mode. */
7773
7774 tree
7775 build_pointer_type (tree to_type)
7776 {
7777 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7778 : TYPE_ADDR_SPACE (to_type);
7779 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7780 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7781 }
7782
7783 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7784
7785 tree
7786 build_reference_type_for_mode (tree to_type, machine_mode mode,
7787 bool can_alias_all)
7788 {
7789 tree t;
7790 bool could_alias = can_alias_all;
7791
7792 if (to_type == error_mark_node)
7793 return error_mark_node;
7794
7795 /* If the pointed-to type has the may_alias attribute set, force
7796 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7797 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7798 can_alias_all = true;
7799
7800 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7801 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7802 In that case, return that type without regard to the rest of our
7803 operands.
7804
7805 ??? This is a kludge, but consistent with the way this function has
7806 always operated and there doesn't seem to be a good way to avoid this
7807 at the moment. */
7808 if (TYPE_REFERENCE_TO (to_type) != 0
7809 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7810 return TYPE_REFERENCE_TO (to_type);
7811
7812 /* First, if we already have a type for pointers to TO_TYPE and it's
7813 the proper mode, use it. */
7814 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7815 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7816 return t;
7817
7818 t = make_node (REFERENCE_TYPE);
7819
7820 TREE_TYPE (t) = to_type;
7821 SET_TYPE_MODE (t, mode);
7822 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7823 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7824 TYPE_REFERENCE_TO (to_type) = t;
7825
7826 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7827 SET_TYPE_STRUCTURAL_EQUALITY (t);
7828 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7829 TYPE_CANONICAL (t)
7830 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7831 mode, false);
7832
7833 layout_type (t);
7834
7835 return t;
7836 }
7837
7838
7839 /* Build the node for the type of references-to-TO_TYPE by default
7840 in ptr_mode. */
7841
7842 tree
7843 build_reference_type (tree to_type)
7844 {
7845 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7846 : TYPE_ADDR_SPACE (to_type);
7847 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7848 return build_reference_type_for_mode (to_type, pointer_mode, false);
7849 }
7850
7851 #define MAX_INT_CACHED_PREC \
7852 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7853 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7854
7855 /* Builds a signed or unsigned integer type of precision PRECISION.
7856 Used for C bitfields whose precision does not match that of
7857 built-in target types. */
7858 tree
7859 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7860 int unsignedp)
7861 {
7862 tree itype, ret;
7863
7864 if (unsignedp)
7865 unsignedp = MAX_INT_CACHED_PREC + 1;
7866
7867 if (precision <= MAX_INT_CACHED_PREC)
7868 {
7869 itype = nonstandard_integer_type_cache[precision + unsignedp];
7870 if (itype)
7871 return itype;
7872 }
7873
7874 itype = make_node (INTEGER_TYPE);
7875 TYPE_PRECISION (itype) = precision;
7876
7877 if (unsignedp)
7878 fixup_unsigned_type (itype);
7879 else
7880 fixup_signed_type (itype);
7881
7882 ret = itype;
7883 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7884 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7885 if (precision <= MAX_INT_CACHED_PREC)
7886 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7887
7888 return ret;
7889 }
7890
7891 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7892 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7893 is true, reuse such a type that has already been constructed. */
7894
7895 static tree
7896 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7897 {
7898 tree itype = make_node (INTEGER_TYPE);
7899 inchash::hash hstate;
7900
7901 TREE_TYPE (itype) = type;
7902
7903 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7904 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7905
7906 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7907 SET_TYPE_MODE (itype, TYPE_MODE (type));
7908 TYPE_SIZE (itype) = TYPE_SIZE (type);
7909 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7910 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7911 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7912
7913 if (!shared)
7914 return itype;
7915
7916 if ((TYPE_MIN_VALUE (itype)
7917 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7918 || (TYPE_MAX_VALUE (itype)
7919 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7920 {
7921 /* Since we cannot reliably merge this type, we need to compare it using
7922 structural equality checks. */
7923 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7924 return itype;
7925 }
7926
7927 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7928 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7929 hstate.merge_hash (TYPE_HASH (type));
7930 itype = type_hash_canon (hstate.end (), itype);
7931
7932 return itype;
7933 }
7934
7935 /* Wrapper around build_range_type_1 with SHARED set to true. */
7936
7937 tree
7938 build_range_type (tree type, tree lowval, tree highval)
7939 {
7940 return build_range_type_1 (type, lowval, highval, true);
7941 }
7942
7943 /* Wrapper around build_range_type_1 with SHARED set to false. */
7944
7945 tree
7946 build_nonshared_range_type (tree type, tree lowval, tree highval)
7947 {
7948 return build_range_type_1 (type, lowval, highval, false);
7949 }
7950
7951 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7952 MAXVAL should be the maximum value in the domain
7953 (one less than the length of the array).
7954
7955 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7956 We don't enforce this limit, that is up to caller (e.g. language front end).
7957 The limit exists because the result is a signed type and we don't handle
7958 sizes that use more than one HOST_WIDE_INT. */
7959
7960 tree
7961 build_index_type (tree maxval)
7962 {
7963 return build_range_type (sizetype, size_zero_node, maxval);
7964 }
7965
7966 /* Return true if the debug information for TYPE, a subtype, should be emitted
7967 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7968 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7969 debug info and doesn't reflect the source code. */
7970
7971 bool
7972 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7973 {
7974 tree base_type = TREE_TYPE (type), low, high;
7975
7976 /* Subrange types have a base type which is an integral type. */
7977 if (!INTEGRAL_TYPE_P (base_type))
7978 return false;
7979
7980 /* Get the real bounds of the subtype. */
7981 if (lang_hooks.types.get_subrange_bounds)
7982 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7983 else
7984 {
7985 low = TYPE_MIN_VALUE (type);
7986 high = TYPE_MAX_VALUE (type);
7987 }
7988
7989 /* If the type and its base type have the same representation and the same
7990 name, then the type is not a subrange but a copy of the base type. */
7991 if ((TREE_CODE (base_type) == INTEGER_TYPE
7992 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7993 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7994 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7995 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7996 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7997 return false;
7998
7999 if (lowval)
8000 *lowval = low;
8001 if (highval)
8002 *highval = high;
8003 return true;
8004 }
8005
8006 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8007 and number of elements specified by the range of values of INDEX_TYPE.
8008 If SHARED is true, reuse such a type that has already been constructed. */
8009
8010 static tree
8011 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8012 {
8013 tree t;
8014
8015 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8016 {
8017 error ("arrays of functions are not meaningful");
8018 elt_type = integer_type_node;
8019 }
8020
8021 t = make_node (ARRAY_TYPE);
8022 TREE_TYPE (t) = elt_type;
8023 TYPE_DOMAIN (t) = index_type;
8024 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8025 layout_type (t);
8026
8027 /* If the element type is incomplete at this point we get marked for
8028 structural equality. Do not record these types in the canonical
8029 type hashtable. */
8030 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8031 return t;
8032
8033 if (shared)
8034 {
8035 inchash::hash hstate;
8036 hstate.add_object (TYPE_HASH (elt_type));
8037 if (index_type)
8038 hstate.add_object (TYPE_HASH (index_type));
8039 t = type_hash_canon (hstate.end (), t);
8040 }
8041
8042 if (TYPE_CANONICAL (t) == t)
8043 {
8044 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8045 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
8046 SET_TYPE_STRUCTURAL_EQUALITY (t);
8047 else if (TYPE_CANONICAL (elt_type) != elt_type
8048 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8049 TYPE_CANONICAL (t)
8050 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8051 index_type
8052 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8053 shared);
8054 }
8055
8056 return t;
8057 }
8058
8059 /* Wrapper around build_array_type_1 with SHARED set to true. */
8060
8061 tree
8062 build_array_type (tree elt_type, tree index_type)
8063 {
8064 return build_array_type_1 (elt_type, index_type, true);
8065 }
8066
8067 /* Wrapper around build_array_type_1 with SHARED set to false. */
8068
8069 tree
8070 build_nonshared_array_type (tree elt_type, tree index_type)
8071 {
8072 return build_array_type_1 (elt_type, index_type, false);
8073 }
8074
8075 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8076 sizetype. */
8077
8078 tree
8079 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8080 {
8081 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8082 }
8083
8084 /* Recursively examines the array elements of TYPE, until a non-array
8085 element type is found. */
8086
8087 tree
8088 strip_array_types (tree type)
8089 {
8090 while (TREE_CODE (type) == ARRAY_TYPE)
8091 type = TREE_TYPE (type);
8092
8093 return type;
8094 }
8095
8096 /* Computes the canonical argument types from the argument type list
8097 ARGTYPES.
8098
8099 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8100 on entry to this function, or if any of the ARGTYPES are
8101 structural.
8102
8103 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8104 true on entry to this function, or if any of the ARGTYPES are
8105 non-canonical.
8106
8107 Returns a canonical argument list, which may be ARGTYPES when the
8108 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8109 true) or would not differ from ARGTYPES. */
8110
8111 static tree
8112 maybe_canonicalize_argtypes (tree argtypes,
8113 bool *any_structural_p,
8114 bool *any_noncanonical_p)
8115 {
8116 tree arg;
8117 bool any_noncanonical_argtypes_p = false;
8118
8119 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8120 {
8121 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8122 /* Fail gracefully by stating that the type is structural. */
8123 *any_structural_p = true;
8124 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8125 *any_structural_p = true;
8126 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8127 || TREE_PURPOSE (arg))
8128 /* If the argument has a default argument, we consider it
8129 non-canonical even though the type itself is canonical.
8130 That way, different variants of function and method types
8131 with default arguments will all point to the variant with
8132 no defaults as their canonical type. */
8133 any_noncanonical_argtypes_p = true;
8134 }
8135
8136 if (*any_structural_p)
8137 return argtypes;
8138
8139 if (any_noncanonical_argtypes_p)
8140 {
8141 /* Build the canonical list of argument types. */
8142 tree canon_argtypes = NULL_TREE;
8143 bool is_void = false;
8144
8145 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8146 {
8147 if (arg == void_list_node)
8148 is_void = true;
8149 else
8150 canon_argtypes = tree_cons (NULL_TREE,
8151 TYPE_CANONICAL (TREE_VALUE (arg)),
8152 canon_argtypes);
8153 }
8154
8155 canon_argtypes = nreverse (canon_argtypes);
8156 if (is_void)
8157 canon_argtypes = chainon (canon_argtypes, void_list_node);
8158
8159 /* There is a non-canonical type. */
8160 *any_noncanonical_p = true;
8161 return canon_argtypes;
8162 }
8163
8164 /* The canonical argument types are the same as ARGTYPES. */
8165 return argtypes;
8166 }
8167
8168 /* Construct, lay out and return
8169 the type of functions returning type VALUE_TYPE
8170 given arguments of types ARG_TYPES.
8171 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8172 are data type nodes for the arguments of the function.
8173 If such a type has already been constructed, reuse it. */
8174
8175 tree
8176 build_function_type (tree value_type, tree arg_types)
8177 {
8178 tree t;
8179 inchash::hash hstate;
8180 bool any_structural_p, any_noncanonical_p;
8181 tree canon_argtypes;
8182
8183 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8184 {
8185 error ("function return type cannot be function");
8186 value_type = integer_type_node;
8187 }
8188
8189 /* Make a node of the sort we want. */
8190 t = make_node (FUNCTION_TYPE);
8191 TREE_TYPE (t) = value_type;
8192 TYPE_ARG_TYPES (t) = arg_types;
8193
8194 /* If we already have such a type, use the old one. */
8195 hstate.add_object (TYPE_HASH (value_type));
8196 type_hash_list (arg_types, hstate);
8197 t = type_hash_canon (hstate.end (), t);
8198
8199 /* Set up the canonical type. */
8200 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8201 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8202 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8203 &any_structural_p,
8204 &any_noncanonical_p);
8205 if (any_structural_p)
8206 SET_TYPE_STRUCTURAL_EQUALITY (t);
8207 else if (any_noncanonical_p)
8208 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8209 canon_argtypes);
8210
8211 if (!COMPLETE_TYPE_P (t))
8212 layout_type (t);
8213 return t;
8214 }
8215
8216 /* Build a function type. The RETURN_TYPE is the type returned by the
8217 function. If VAARGS is set, no void_type_node is appended to the
8218 the list. ARGP must be always be terminated be a NULL_TREE. */
8219
8220 static tree
8221 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8222 {
8223 tree t, args, last;
8224
8225 t = va_arg (argp, tree);
8226 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8227 args = tree_cons (NULL_TREE, t, args);
8228
8229 if (vaargs)
8230 {
8231 last = args;
8232 if (args != NULL_TREE)
8233 args = nreverse (args);
8234 gcc_assert (last != void_list_node);
8235 }
8236 else if (args == NULL_TREE)
8237 args = void_list_node;
8238 else
8239 {
8240 last = args;
8241 args = nreverse (args);
8242 TREE_CHAIN (last) = void_list_node;
8243 }
8244 args = build_function_type (return_type, args);
8245
8246 return args;
8247 }
8248
8249 /* Build a function type. The RETURN_TYPE is the type returned by the
8250 function. If additional arguments are provided, they are
8251 additional argument types. The list of argument types must always
8252 be terminated by NULL_TREE. */
8253
8254 tree
8255 build_function_type_list (tree return_type, ...)
8256 {
8257 tree args;
8258 va_list p;
8259
8260 va_start (p, return_type);
8261 args = build_function_type_list_1 (false, return_type, p);
8262 va_end (p);
8263 return args;
8264 }
8265
8266 /* Build a variable argument function type. The RETURN_TYPE is the
8267 type returned by the function. If additional arguments are provided,
8268 they are additional argument types. The list of argument types must
8269 always be terminated by NULL_TREE. */
8270
8271 tree
8272 build_varargs_function_type_list (tree return_type, ...)
8273 {
8274 tree args;
8275 va_list p;
8276
8277 va_start (p, return_type);
8278 args = build_function_type_list_1 (true, return_type, p);
8279 va_end (p);
8280
8281 return args;
8282 }
8283
8284 /* Build a function type. RETURN_TYPE is the type returned by the
8285 function; VAARGS indicates whether the function takes varargs. The
8286 function takes N named arguments, the types of which are provided in
8287 ARG_TYPES. */
8288
8289 static tree
8290 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8291 tree *arg_types)
8292 {
8293 int i;
8294 tree t = vaargs ? NULL_TREE : void_list_node;
8295
8296 for (i = n - 1; i >= 0; i--)
8297 t = tree_cons (NULL_TREE, arg_types[i], t);
8298
8299 return build_function_type (return_type, t);
8300 }
8301
8302 /* Build a function type. RETURN_TYPE is the type returned by the
8303 function. The function takes N named arguments, the types of which
8304 are provided in ARG_TYPES. */
8305
8306 tree
8307 build_function_type_array (tree return_type, int n, tree *arg_types)
8308 {
8309 return build_function_type_array_1 (false, return_type, n, arg_types);
8310 }
8311
8312 /* Build a variable argument function type. RETURN_TYPE is the type
8313 returned by the function. The function takes N named arguments, the
8314 types of which are provided in ARG_TYPES. */
8315
8316 tree
8317 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8318 {
8319 return build_function_type_array_1 (true, return_type, n, arg_types);
8320 }
8321
8322 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8323 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8324 for the method. An implicit additional parameter (of type
8325 pointer-to-BASETYPE) is added to the ARGTYPES. */
8326
8327 tree
8328 build_method_type_directly (tree basetype,
8329 tree rettype,
8330 tree argtypes)
8331 {
8332 tree t;
8333 tree ptype;
8334 inchash::hash hstate;
8335 bool any_structural_p, any_noncanonical_p;
8336 tree canon_argtypes;
8337
8338 /* Make a node of the sort we want. */
8339 t = make_node (METHOD_TYPE);
8340
8341 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8342 TREE_TYPE (t) = rettype;
8343 ptype = build_pointer_type (basetype);
8344
8345 /* The actual arglist for this function includes a "hidden" argument
8346 which is "this". Put it into the list of argument types. */
8347 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8348 TYPE_ARG_TYPES (t) = argtypes;
8349
8350 /* If we already have such a type, use the old one. */
8351 hstate.add_object (TYPE_HASH (basetype));
8352 hstate.add_object (TYPE_HASH (rettype));
8353 type_hash_list (argtypes, hstate);
8354 t = type_hash_canon (hstate.end (), t);
8355
8356 /* Set up the canonical type. */
8357 any_structural_p
8358 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8359 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8360 any_noncanonical_p
8361 = (TYPE_CANONICAL (basetype) != basetype
8362 || TYPE_CANONICAL (rettype) != rettype);
8363 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8364 &any_structural_p,
8365 &any_noncanonical_p);
8366 if (any_structural_p)
8367 SET_TYPE_STRUCTURAL_EQUALITY (t);
8368 else if (any_noncanonical_p)
8369 TYPE_CANONICAL (t)
8370 = build_method_type_directly (TYPE_CANONICAL (basetype),
8371 TYPE_CANONICAL (rettype),
8372 canon_argtypes);
8373 if (!COMPLETE_TYPE_P (t))
8374 layout_type (t);
8375
8376 return t;
8377 }
8378
8379 /* Construct, lay out and return the type of methods belonging to class
8380 BASETYPE and whose arguments and values are described by TYPE.
8381 If that type exists already, reuse it.
8382 TYPE must be a FUNCTION_TYPE node. */
8383
8384 tree
8385 build_method_type (tree basetype, tree type)
8386 {
8387 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8388
8389 return build_method_type_directly (basetype,
8390 TREE_TYPE (type),
8391 TYPE_ARG_TYPES (type));
8392 }
8393
8394 /* Construct, lay out and return the type of offsets to a value
8395 of type TYPE, within an object of type BASETYPE.
8396 If a suitable offset type exists already, reuse it. */
8397
8398 tree
8399 build_offset_type (tree basetype, tree type)
8400 {
8401 tree t;
8402 inchash::hash hstate;
8403
8404 /* Make a node of the sort we want. */
8405 t = make_node (OFFSET_TYPE);
8406
8407 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8408 TREE_TYPE (t) = type;
8409
8410 /* If we already have such a type, use the old one. */
8411 hstate.add_object (TYPE_HASH (basetype));
8412 hstate.add_object (TYPE_HASH (type));
8413 t = type_hash_canon (hstate.end (), t);
8414
8415 if (!COMPLETE_TYPE_P (t))
8416 layout_type (t);
8417
8418 if (TYPE_CANONICAL (t) == t)
8419 {
8420 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8421 || TYPE_STRUCTURAL_EQUALITY_P (type))
8422 SET_TYPE_STRUCTURAL_EQUALITY (t);
8423 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8424 || TYPE_CANONICAL (type) != type)
8425 TYPE_CANONICAL (t)
8426 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8427 TYPE_CANONICAL (type));
8428 }
8429
8430 return t;
8431 }
8432
8433 /* Create a complex type whose components are COMPONENT_TYPE. */
8434
8435 tree
8436 build_complex_type (tree component_type)
8437 {
8438 tree t;
8439 inchash::hash hstate;
8440
8441 gcc_assert (INTEGRAL_TYPE_P (component_type)
8442 || SCALAR_FLOAT_TYPE_P (component_type)
8443 || FIXED_POINT_TYPE_P (component_type));
8444
8445 /* Make a node of the sort we want. */
8446 t = make_node (COMPLEX_TYPE);
8447
8448 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8449
8450 /* If we already have such a type, use the old one. */
8451 hstate.add_object (TYPE_HASH (component_type));
8452 t = type_hash_canon (hstate.end (), t);
8453
8454 if (!COMPLETE_TYPE_P (t))
8455 layout_type (t);
8456
8457 if (TYPE_CANONICAL (t) == t)
8458 {
8459 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8460 SET_TYPE_STRUCTURAL_EQUALITY (t);
8461 else if (TYPE_CANONICAL (component_type) != component_type)
8462 TYPE_CANONICAL (t)
8463 = build_complex_type (TYPE_CANONICAL (component_type));
8464 }
8465
8466 /* We need to create a name, since complex is a fundamental type. */
8467 if (! TYPE_NAME (t))
8468 {
8469 const char *name;
8470 if (component_type == char_type_node)
8471 name = "complex char";
8472 else if (component_type == signed_char_type_node)
8473 name = "complex signed char";
8474 else if (component_type == unsigned_char_type_node)
8475 name = "complex unsigned char";
8476 else if (component_type == short_integer_type_node)
8477 name = "complex short int";
8478 else if (component_type == short_unsigned_type_node)
8479 name = "complex short unsigned int";
8480 else if (component_type == integer_type_node)
8481 name = "complex int";
8482 else if (component_type == unsigned_type_node)
8483 name = "complex unsigned int";
8484 else if (component_type == long_integer_type_node)
8485 name = "complex long int";
8486 else if (component_type == long_unsigned_type_node)
8487 name = "complex long unsigned int";
8488 else if (component_type == long_long_integer_type_node)
8489 name = "complex long long int";
8490 else if (component_type == long_long_unsigned_type_node)
8491 name = "complex long long unsigned int";
8492 else
8493 name = 0;
8494
8495 if (name != 0)
8496 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8497 get_identifier (name), t);
8498 }
8499
8500 return build_qualified_type (t, TYPE_QUALS (component_type));
8501 }
8502
8503 /* If TYPE is a real or complex floating-point type and the target
8504 does not directly support arithmetic on TYPE then return the wider
8505 type to be used for arithmetic on TYPE. Otherwise, return
8506 NULL_TREE. */
8507
8508 tree
8509 excess_precision_type (tree type)
8510 {
8511 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8512 {
8513 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8514 switch (TREE_CODE (type))
8515 {
8516 case REAL_TYPE:
8517 switch (flt_eval_method)
8518 {
8519 case 1:
8520 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8521 return double_type_node;
8522 break;
8523 case 2:
8524 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8525 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8526 return long_double_type_node;
8527 break;
8528 default:
8529 gcc_unreachable ();
8530 }
8531 break;
8532 case COMPLEX_TYPE:
8533 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8534 return NULL_TREE;
8535 switch (flt_eval_method)
8536 {
8537 case 1:
8538 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8539 return complex_double_type_node;
8540 break;
8541 case 2:
8542 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8543 || (TYPE_MODE (TREE_TYPE (type))
8544 == TYPE_MODE (double_type_node)))
8545 return complex_long_double_type_node;
8546 break;
8547 default:
8548 gcc_unreachable ();
8549 }
8550 break;
8551 default:
8552 break;
8553 }
8554 }
8555 return NULL_TREE;
8556 }
8557 \f
8558 /* Return OP, stripped of any conversions to wider types as much as is safe.
8559 Converting the value back to OP's type makes a value equivalent to OP.
8560
8561 If FOR_TYPE is nonzero, we return a value which, if converted to
8562 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8563
8564 OP must have integer, real or enumeral type. Pointers are not allowed!
8565
8566 There are some cases where the obvious value we could return
8567 would regenerate to OP if converted to OP's type,
8568 but would not extend like OP to wider types.
8569 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8570 For example, if OP is (unsigned short)(signed char)-1,
8571 we avoid returning (signed char)-1 if FOR_TYPE is int,
8572 even though extending that to an unsigned short would regenerate OP,
8573 since the result of extending (signed char)-1 to (int)
8574 is different from (int) OP. */
8575
8576 tree
8577 get_unwidened (tree op, tree for_type)
8578 {
8579 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8580 tree type = TREE_TYPE (op);
8581 unsigned final_prec
8582 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8583 int uns
8584 = (for_type != 0 && for_type != type
8585 && final_prec > TYPE_PRECISION (type)
8586 && TYPE_UNSIGNED (type));
8587 tree win = op;
8588
8589 while (CONVERT_EXPR_P (op))
8590 {
8591 int bitschange;
8592
8593 /* TYPE_PRECISION on vector types has different meaning
8594 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8595 so avoid them here. */
8596 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8597 break;
8598
8599 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8600 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8601
8602 /* Truncations are many-one so cannot be removed.
8603 Unless we are later going to truncate down even farther. */
8604 if (bitschange < 0
8605 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8606 break;
8607
8608 /* See what's inside this conversion. If we decide to strip it,
8609 we will set WIN. */
8610 op = TREE_OPERAND (op, 0);
8611
8612 /* If we have not stripped any zero-extensions (uns is 0),
8613 we can strip any kind of extension.
8614 If we have previously stripped a zero-extension,
8615 only zero-extensions can safely be stripped.
8616 Any extension can be stripped if the bits it would produce
8617 are all going to be discarded later by truncating to FOR_TYPE. */
8618
8619 if (bitschange > 0)
8620 {
8621 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8622 win = op;
8623 /* TYPE_UNSIGNED says whether this is a zero-extension.
8624 Let's avoid computing it if it does not affect WIN
8625 and if UNS will not be needed again. */
8626 if ((uns
8627 || CONVERT_EXPR_P (op))
8628 && TYPE_UNSIGNED (TREE_TYPE (op)))
8629 {
8630 uns = 1;
8631 win = op;
8632 }
8633 }
8634 }
8635
8636 /* If we finally reach a constant see if it fits in for_type and
8637 in that case convert it. */
8638 if (for_type
8639 && TREE_CODE (win) == INTEGER_CST
8640 && TREE_TYPE (win) != for_type
8641 && int_fits_type_p (win, for_type))
8642 win = fold_convert (for_type, win);
8643
8644 return win;
8645 }
8646 \f
8647 /* Return OP or a simpler expression for a narrower value
8648 which can be sign-extended or zero-extended to give back OP.
8649 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8650 or 0 if the value should be sign-extended. */
8651
8652 tree
8653 get_narrower (tree op, int *unsignedp_ptr)
8654 {
8655 int uns = 0;
8656 int first = 1;
8657 tree win = op;
8658 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8659
8660 while (TREE_CODE (op) == NOP_EXPR)
8661 {
8662 int bitschange
8663 = (TYPE_PRECISION (TREE_TYPE (op))
8664 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8665
8666 /* Truncations are many-one so cannot be removed. */
8667 if (bitschange < 0)
8668 break;
8669
8670 /* See what's inside this conversion. If we decide to strip it,
8671 we will set WIN. */
8672
8673 if (bitschange > 0)
8674 {
8675 op = TREE_OPERAND (op, 0);
8676 /* An extension: the outermost one can be stripped,
8677 but remember whether it is zero or sign extension. */
8678 if (first)
8679 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8680 /* Otherwise, if a sign extension has been stripped,
8681 only sign extensions can now be stripped;
8682 if a zero extension has been stripped, only zero-extensions. */
8683 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8684 break;
8685 first = 0;
8686 }
8687 else /* bitschange == 0 */
8688 {
8689 /* A change in nominal type can always be stripped, but we must
8690 preserve the unsignedness. */
8691 if (first)
8692 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8693 first = 0;
8694 op = TREE_OPERAND (op, 0);
8695 /* Keep trying to narrow, but don't assign op to win if it
8696 would turn an integral type into something else. */
8697 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8698 continue;
8699 }
8700
8701 win = op;
8702 }
8703
8704 if (TREE_CODE (op) == COMPONENT_REF
8705 /* Since type_for_size always gives an integer type. */
8706 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8707 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8708 /* Ensure field is laid out already. */
8709 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8710 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8711 {
8712 unsigned HOST_WIDE_INT innerprec
8713 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8714 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8715 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8716 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8717
8718 /* We can get this structure field in a narrower type that fits it,
8719 but the resulting extension to its nominal type (a fullword type)
8720 must satisfy the same conditions as for other extensions.
8721
8722 Do this only for fields that are aligned (not bit-fields),
8723 because when bit-field insns will be used there is no
8724 advantage in doing this. */
8725
8726 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8727 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8728 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8729 && type != 0)
8730 {
8731 if (first)
8732 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8733 win = fold_convert (type, op);
8734 }
8735 }
8736
8737 *unsignedp_ptr = uns;
8738 return win;
8739 }
8740 \f
8741 /* Returns true if integer constant C has a value that is permissible
8742 for type TYPE (an INTEGER_TYPE). */
8743
8744 bool
8745 int_fits_type_p (const_tree c, const_tree type)
8746 {
8747 tree type_low_bound, type_high_bound;
8748 bool ok_for_low_bound, ok_for_high_bound;
8749 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8750
8751 retry:
8752 type_low_bound = TYPE_MIN_VALUE (type);
8753 type_high_bound = TYPE_MAX_VALUE (type);
8754
8755 /* If at least one bound of the type is a constant integer, we can check
8756 ourselves and maybe make a decision. If no such decision is possible, but
8757 this type is a subtype, try checking against that. Otherwise, use
8758 fits_to_tree_p, which checks against the precision.
8759
8760 Compute the status for each possibly constant bound, and return if we see
8761 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8762 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8763 for "constant known to fit". */
8764
8765 /* Check if c >= type_low_bound. */
8766 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8767 {
8768 if (tree_int_cst_lt (c, type_low_bound))
8769 return false;
8770 ok_for_low_bound = true;
8771 }
8772 else
8773 ok_for_low_bound = false;
8774
8775 /* Check if c <= type_high_bound. */
8776 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8777 {
8778 if (tree_int_cst_lt (type_high_bound, c))
8779 return false;
8780 ok_for_high_bound = true;
8781 }
8782 else
8783 ok_for_high_bound = false;
8784
8785 /* If the constant fits both bounds, the result is known. */
8786 if (ok_for_low_bound && ok_for_high_bound)
8787 return true;
8788
8789 /* Perform some generic filtering which may allow making a decision
8790 even if the bounds are not constant. First, negative integers
8791 never fit in unsigned types, */
8792 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8793 return false;
8794
8795 /* Second, narrower types always fit in wider ones. */
8796 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8797 return true;
8798
8799 /* Third, unsigned integers with top bit set never fit signed types. */
8800 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8801 {
8802 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8803 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8804 {
8805 /* When a tree_cst is converted to a wide-int, the precision
8806 is taken from the type. However, if the precision of the
8807 mode underneath the type is smaller than that, it is
8808 possible that the value will not fit. The test below
8809 fails if any bit is set between the sign bit of the
8810 underlying mode and the top bit of the type. */
8811 if (wi::ne_p (wi::zext (c, prec - 1), c))
8812 return false;
8813 }
8814 else if (wi::neg_p (c))
8815 return false;
8816 }
8817
8818 /* If we haven't been able to decide at this point, there nothing more we
8819 can check ourselves here. Look at the base type if we have one and it
8820 has the same precision. */
8821 if (TREE_CODE (type) == INTEGER_TYPE
8822 && TREE_TYPE (type) != 0
8823 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8824 {
8825 type = TREE_TYPE (type);
8826 goto retry;
8827 }
8828
8829 /* Or to fits_to_tree_p, if nothing else. */
8830 return wi::fits_to_tree_p (c, type);
8831 }
8832
8833 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8834 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8835 represented (assuming two's-complement arithmetic) within the bit
8836 precision of the type are returned instead. */
8837
8838 void
8839 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8840 {
8841 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8842 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8843 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8844 else
8845 {
8846 if (TYPE_UNSIGNED (type))
8847 mpz_set_ui (min, 0);
8848 else
8849 {
8850 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8851 wi::to_mpz (mn, min, SIGNED);
8852 }
8853 }
8854
8855 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8856 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8857 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8858 else
8859 {
8860 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8861 wi::to_mpz (mn, max, TYPE_SIGN (type));
8862 }
8863 }
8864
8865 /* Return true if VAR is an automatic variable defined in function FN. */
8866
8867 bool
8868 auto_var_in_fn_p (const_tree var, const_tree fn)
8869 {
8870 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8871 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8872 || TREE_CODE (var) == PARM_DECL)
8873 && ! TREE_STATIC (var))
8874 || TREE_CODE (var) == LABEL_DECL
8875 || TREE_CODE (var) == RESULT_DECL));
8876 }
8877
8878 /* Subprogram of following function. Called by walk_tree.
8879
8880 Return *TP if it is an automatic variable or parameter of the
8881 function passed in as DATA. */
8882
8883 static tree
8884 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8885 {
8886 tree fn = (tree) data;
8887
8888 if (TYPE_P (*tp))
8889 *walk_subtrees = 0;
8890
8891 else if (DECL_P (*tp)
8892 && auto_var_in_fn_p (*tp, fn))
8893 return *tp;
8894
8895 return NULL_TREE;
8896 }
8897
8898 /* Returns true if T is, contains, or refers to a type with variable
8899 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8900 arguments, but not the return type. If FN is nonzero, only return
8901 true if a modifier of the type or position of FN is a variable or
8902 parameter inside FN.
8903
8904 This concept is more general than that of C99 'variably modified types':
8905 in C99, a struct type is never variably modified because a VLA may not
8906 appear as a structure member. However, in GNU C code like:
8907
8908 struct S { int i[f()]; };
8909
8910 is valid, and other languages may define similar constructs. */
8911
8912 bool
8913 variably_modified_type_p (tree type, tree fn)
8914 {
8915 tree t;
8916
8917 /* Test if T is either variable (if FN is zero) or an expression containing
8918 a variable in FN. If TYPE isn't gimplified, return true also if
8919 gimplify_one_sizepos would gimplify the expression into a local
8920 variable. */
8921 #define RETURN_TRUE_IF_VAR(T) \
8922 do { tree _t = (T); \
8923 if (_t != NULL_TREE \
8924 && _t != error_mark_node \
8925 && TREE_CODE (_t) != INTEGER_CST \
8926 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8927 && (!fn \
8928 || (!TYPE_SIZES_GIMPLIFIED (type) \
8929 && !is_gimple_sizepos (_t)) \
8930 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8931 return true; } while (0)
8932
8933 if (type == error_mark_node)
8934 return false;
8935
8936 /* If TYPE itself has variable size, it is variably modified. */
8937 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8938 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8939
8940 switch (TREE_CODE (type))
8941 {
8942 case POINTER_TYPE:
8943 case REFERENCE_TYPE:
8944 case VECTOR_TYPE:
8945 if (variably_modified_type_p (TREE_TYPE (type), fn))
8946 return true;
8947 break;
8948
8949 case FUNCTION_TYPE:
8950 case METHOD_TYPE:
8951 /* If TYPE is a function type, it is variably modified if the
8952 return type is variably modified. */
8953 if (variably_modified_type_p (TREE_TYPE (type), fn))
8954 return true;
8955 break;
8956
8957 case INTEGER_TYPE:
8958 case REAL_TYPE:
8959 case FIXED_POINT_TYPE:
8960 case ENUMERAL_TYPE:
8961 case BOOLEAN_TYPE:
8962 /* Scalar types are variably modified if their end points
8963 aren't constant. */
8964 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8965 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8966 break;
8967
8968 case RECORD_TYPE:
8969 case UNION_TYPE:
8970 case QUAL_UNION_TYPE:
8971 /* We can't see if any of the fields are variably-modified by the
8972 definition we normally use, since that would produce infinite
8973 recursion via pointers. */
8974 /* This is variably modified if some field's type is. */
8975 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8976 if (TREE_CODE (t) == FIELD_DECL)
8977 {
8978 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8979 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8980 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8981
8982 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8983 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8984 }
8985 break;
8986
8987 case ARRAY_TYPE:
8988 /* Do not call ourselves to avoid infinite recursion. This is
8989 variably modified if the element type is. */
8990 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8991 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8992 break;
8993
8994 default:
8995 break;
8996 }
8997
8998 /* The current language may have other cases to check, but in general,
8999 all other types are not variably modified. */
9000 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9001
9002 #undef RETURN_TRUE_IF_VAR
9003 }
9004
9005 /* Given a DECL or TYPE, return the scope in which it was declared, or
9006 NULL_TREE if there is no containing scope. */
9007
9008 tree
9009 get_containing_scope (const_tree t)
9010 {
9011 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9012 }
9013
9014 /* Return the innermost context enclosing DECL that is
9015 a FUNCTION_DECL, or zero if none. */
9016
9017 tree
9018 decl_function_context (const_tree decl)
9019 {
9020 tree context;
9021
9022 if (TREE_CODE (decl) == ERROR_MARK)
9023 return 0;
9024
9025 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9026 where we look up the function at runtime. Such functions always take
9027 a first argument of type 'pointer to real context'.
9028
9029 C++ should really be fixed to use DECL_CONTEXT for the real context,
9030 and use something else for the "virtual context". */
9031 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9032 context
9033 = TYPE_MAIN_VARIANT
9034 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9035 else
9036 context = DECL_CONTEXT (decl);
9037
9038 while (context && TREE_CODE (context) != FUNCTION_DECL)
9039 {
9040 if (TREE_CODE (context) == BLOCK)
9041 context = BLOCK_SUPERCONTEXT (context);
9042 else
9043 context = get_containing_scope (context);
9044 }
9045
9046 return context;
9047 }
9048
9049 /* Return the innermost context enclosing DECL that is
9050 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9051 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9052
9053 tree
9054 decl_type_context (const_tree decl)
9055 {
9056 tree context = DECL_CONTEXT (decl);
9057
9058 while (context)
9059 switch (TREE_CODE (context))
9060 {
9061 case NAMESPACE_DECL:
9062 case TRANSLATION_UNIT_DECL:
9063 return NULL_TREE;
9064
9065 case RECORD_TYPE:
9066 case UNION_TYPE:
9067 case QUAL_UNION_TYPE:
9068 return context;
9069
9070 case TYPE_DECL:
9071 case FUNCTION_DECL:
9072 context = DECL_CONTEXT (context);
9073 break;
9074
9075 case BLOCK:
9076 context = BLOCK_SUPERCONTEXT (context);
9077 break;
9078
9079 default:
9080 gcc_unreachable ();
9081 }
9082
9083 return NULL_TREE;
9084 }
9085
9086 /* CALL is a CALL_EXPR. Return the declaration for the function
9087 called, or NULL_TREE if the called function cannot be
9088 determined. */
9089
9090 tree
9091 get_callee_fndecl (const_tree call)
9092 {
9093 tree addr;
9094
9095 if (call == error_mark_node)
9096 return error_mark_node;
9097
9098 /* It's invalid to call this function with anything but a
9099 CALL_EXPR. */
9100 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9101
9102 /* The first operand to the CALL is the address of the function
9103 called. */
9104 addr = CALL_EXPR_FN (call);
9105
9106 /* If there is no function, return early. */
9107 if (addr == NULL_TREE)
9108 return NULL_TREE;
9109
9110 STRIP_NOPS (addr);
9111
9112 /* If this is a readonly function pointer, extract its initial value. */
9113 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9114 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9115 && DECL_INITIAL (addr))
9116 addr = DECL_INITIAL (addr);
9117
9118 /* If the address is just `&f' for some function `f', then we know
9119 that `f' is being called. */
9120 if (TREE_CODE (addr) == ADDR_EXPR
9121 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9122 return TREE_OPERAND (addr, 0);
9123
9124 /* We couldn't figure out what was being called. */
9125 return NULL_TREE;
9126 }
9127
9128 /* Print debugging information about tree nodes generated during the compile,
9129 and any language-specific information. */
9130
9131 void
9132 dump_tree_statistics (void)
9133 {
9134 if (GATHER_STATISTICS)
9135 {
9136 int i;
9137 int total_nodes, total_bytes;
9138 fprintf (stderr, "Kind Nodes Bytes\n");
9139 fprintf (stderr, "---------------------------------------\n");
9140 total_nodes = total_bytes = 0;
9141 for (i = 0; i < (int) all_kinds; i++)
9142 {
9143 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9144 tree_node_counts[i], tree_node_sizes[i]);
9145 total_nodes += tree_node_counts[i];
9146 total_bytes += tree_node_sizes[i];
9147 }
9148 fprintf (stderr, "---------------------------------------\n");
9149 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9150 fprintf (stderr, "---------------------------------------\n");
9151 fprintf (stderr, "Code Nodes\n");
9152 fprintf (stderr, "----------------------------\n");
9153 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9154 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9155 tree_code_counts[i]);
9156 fprintf (stderr, "----------------------------\n");
9157 ssanames_print_statistics ();
9158 phinodes_print_statistics ();
9159 }
9160 else
9161 fprintf (stderr, "(No per-node statistics)\n");
9162
9163 print_type_hash_statistics ();
9164 print_debug_expr_statistics ();
9165 print_value_expr_statistics ();
9166 lang_hooks.print_statistics ();
9167 }
9168 \f
9169 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9170
9171 /* Generate a crc32 of a byte. */
9172
9173 static unsigned
9174 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9175 {
9176 unsigned ix;
9177
9178 for (ix = bits; ix--; value <<= 1)
9179 {
9180 unsigned feedback;
9181
9182 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9183 chksum <<= 1;
9184 chksum ^= feedback;
9185 }
9186 return chksum;
9187 }
9188
9189 /* Generate a crc32 of a 32-bit unsigned. */
9190
9191 unsigned
9192 crc32_unsigned (unsigned chksum, unsigned value)
9193 {
9194 return crc32_unsigned_bits (chksum, value, 32);
9195 }
9196
9197 /* Generate a crc32 of a byte. */
9198
9199 unsigned
9200 crc32_byte (unsigned chksum, char byte)
9201 {
9202 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9203 }
9204
9205 /* Generate a crc32 of a string. */
9206
9207 unsigned
9208 crc32_string (unsigned chksum, const char *string)
9209 {
9210 do
9211 {
9212 chksum = crc32_byte (chksum, *string);
9213 }
9214 while (*string++);
9215 return chksum;
9216 }
9217
9218 /* P is a string that will be used in a symbol. Mask out any characters
9219 that are not valid in that context. */
9220
9221 void
9222 clean_symbol_name (char *p)
9223 {
9224 for (; *p; p++)
9225 if (! (ISALNUM (*p)
9226 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9227 || *p == '$'
9228 #endif
9229 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9230 || *p == '.'
9231 #endif
9232 ))
9233 *p = '_';
9234 }
9235
9236 /* Generate a name for a special-purpose function.
9237 The generated name may need to be unique across the whole link.
9238 Changes to this function may also require corresponding changes to
9239 xstrdup_mask_random.
9240 TYPE is some string to identify the purpose of this function to the
9241 linker or collect2; it must start with an uppercase letter,
9242 one of:
9243 I - for constructors
9244 D - for destructors
9245 N - for C++ anonymous namespaces
9246 F - for DWARF unwind frame information. */
9247
9248 tree
9249 get_file_function_name (const char *type)
9250 {
9251 char *buf;
9252 const char *p;
9253 char *q;
9254
9255 /* If we already have a name we know to be unique, just use that. */
9256 if (first_global_object_name)
9257 p = q = ASTRDUP (first_global_object_name);
9258 /* If the target is handling the constructors/destructors, they
9259 will be local to this file and the name is only necessary for
9260 debugging purposes.
9261 We also assign sub_I and sub_D sufixes to constructors called from
9262 the global static constructors. These are always local. */
9263 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9264 || (strncmp (type, "sub_", 4) == 0
9265 && (type[4] == 'I' || type[4] == 'D')))
9266 {
9267 const char *file = main_input_filename;
9268 if (! file)
9269 file = LOCATION_FILE (input_location);
9270 /* Just use the file's basename, because the full pathname
9271 might be quite long. */
9272 p = q = ASTRDUP (lbasename (file));
9273 }
9274 else
9275 {
9276 /* Otherwise, the name must be unique across the entire link.
9277 We don't have anything that we know to be unique to this translation
9278 unit, so use what we do have and throw in some randomness. */
9279 unsigned len;
9280 const char *name = weak_global_object_name;
9281 const char *file = main_input_filename;
9282
9283 if (! name)
9284 name = "";
9285 if (! file)
9286 file = LOCATION_FILE (input_location);
9287
9288 len = strlen (file);
9289 q = (char *) alloca (9 + 17 + len + 1);
9290 memcpy (q, file, len + 1);
9291
9292 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9293 crc32_string (0, name), get_random_seed (false));
9294
9295 p = q;
9296 }
9297
9298 clean_symbol_name (q);
9299 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9300 + strlen (type));
9301
9302 /* Set up the name of the file-level functions we may need.
9303 Use a global object (which is already required to be unique over
9304 the program) rather than the file name (which imposes extra
9305 constraints). */
9306 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9307
9308 return get_identifier (buf);
9309 }
9310 \f
9311 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9312
9313 /* Complain that the tree code of NODE does not match the expected 0
9314 terminated list of trailing codes. The trailing code list can be
9315 empty, for a more vague error message. FILE, LINE, and FUNCTION
9316 are of the caller. */
9317
9318 void
9319 tree_check_failed (const_tree node, const char *file,
9320 int line, const char *function, ...)
9321 {
9322 va_list args;
9323 const char *buffer;
9324 unsigned length = 0;
9325 enum tree_code code;
9326
9327 va_start (args, function);
9328 while ((code = (enum tree_code) va_arg (args, int)))
9329 length += 4 + strlen (get_tree_code_name (code));
9330 va_end (args);
9331 if (length)
9332 {
9333 char *tmp;
9334 va_start (args, function);
9335 length += strlen ("expected ");
9336 buffer = tmp = (char *) alloca (length);
9337 length = 0;
9338 while ((code = (enum tree_code) va_arg (args, int)))
9339 {
9340 const char *prefix = length ? " or " : "expected ";
9341
9342 strcpy (tmp + length, prefix);
9343 length += strlen (prefix);
9344 strcpy (tmp + length, get_tree_code_name (code));
9345 length += strlen (get_tree_code_name (code));
9346 }
9347 va_end (args);
9348 }
9349 else
9350 buffer = "unexpected node";
9351
9352 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9353 buffer, get_tree_code_name (TREE_CODE (node)),
9354 function, trim_filename (file), line);
9355 }
9356
9357 /* Complain that the tree code of NODE does match the expected 0
9358 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9359 the caller. */
9360
9361 void
9362 tree_not_check_failed (const_tree node, const char *file,
9363 int line, const char *function, ...)
9364 {
9365 va_list args;
9366 char *buffer;
9367 unsigned length = 0;
9368 enum tree_code code;
9369
9370 va_start (args, function);
9371 while ((code = (enum tree_code) va_arg (args, int)))
9372 length += 4 + strlen (get_tree_code_name (code));
9373 va_end (args);
9374 va_start (args, function);
9375 buffer = (char *) alloca (length);
9376 length = 0;
9377 while ((code = (enum tree_code) va_arg (args, int)))
9378 {
9379 if (length)
9380 {
9381 strcpy (buffer + length, " or ");
9382 length += 4;
9383 }
9384 strcpy (buffer + length, get_tree_code_name (code));
9385 length += strlen (get_tree_code_name (code));
9386 }
9387 va_end (args);
9388
9389 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9390 buffer, get_tree_code_name (TREE_CODE (node)),
9391 function, trim_filename (file), line);
9392 }
9393
9394 /* Similar to tree_check_failed, except that we check for a class of tree
9395 code, given in CL. */
9396
9397 void
9398 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9399 const char *file, int line, const char *function)
9400 {
9401 internal_error
9402 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9403 TREE_CODE_CLASS_STRING (cl),
9404 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9405 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9406 }
9407
9408 /* Similar to tree_check_failed, except that instead of specifying a
9409 dozen codes, use the knowledge that they're all sequential. */
9410
9411 void
9412 tree_range_check_failed (const_tree node, const char *file, int line,
9413 const char *function, enum tree_code c1,
9414 enum tree_code c2)
9415 {
9416 char *buffer;
9417 unsigned length = 0;
9418 unsigned int c;
9419
9420 for (c = c1; c <= c2; ++c)
9421 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9422
9423 length += strlen ("expected ");
9424 buffer = (char *) alloca (length);
9425 length = 0;
9426
9427 for (c = c1; c <= c2; ++c)
9428 {
9429 const char *prefix = length ? " or " : "expected ";
9430
9431 strcpy (buffer + length, prefix);
9432 length += strlen (prefix);
9433 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9434 length += strlen (get_tree_code_name ((enum tree_code) c));
9435 }
9436
9437 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9438 buffer, get_tree_code_name (TREE_CODE (node)),
9439 function, trim_filename (file), line);
9440 }
9441
9442
9443 /* Similar to tree_check_failed, except that we check that a tree does
9444 not have the specified code, given in CL. */
9445
9446 void
9447 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9448 const char *file, int line, const char *function)
9449 {
9450 internal_error
9451 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9452 TREE_CODE_CLASS_STRING (cl),
9453 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9454 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9455 }
9456
9457
9458 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9459
9460 void
9461 omp_clause_check_failed (const_tree node, const char *file, int line,
9462 const char *function, enum omp_clause_code code)
9463 {
9464 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9465 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9466 function, trim_filename (file), line);
9467 }
9468
9469
9470 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9471
9472 void
9473 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9474 const char *function, enum omp_clause_code c1,
9475 enum omp_clause_code c2)
9476 {
9477 char *buffer;
9478 unsigned length = 0;
9479 unsigned int c;
9480
9481 for (c = c1; c <= c2; ++c)
9482 length += 4 + strlen (omp_clause_code_name[c]);
9483
9484 length += strlen ("expected ");
9485 buffer = (char *) alloca (length);
9486 length = 0;
9487
9488 for (c = c1; c <= c2; ++c)
9489 {
9490 const char *prefix = length ? " or " : "expected ";
9491
9492 strcpy (buffer + length, prefix);
9493 length += strlen (prefix);
9494 strcpy (buffer + length, omp_clause_code_name[c]);
9495 length += strlen (omp_clause_code_name[c]);
9496 }
9497
9498 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9499 buffer, omp_clause_code_name[TREE_CODE (node)],
9500 function, trim_filename (file), line);
9501 }
9502
9503
9504 #undef DEFTREESTRUCT
9505 #define DEFTREESTRUCT(VAL, NAME) NAME,
9506
9507 static const char *ts_enum_names[] = {
9508 #include "treestruct.def"
9509 };
9510 #undef DEFTREESTRUCT
9511
9512 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9513
9514 /* Similar to tree_class_check_failed, except that we check for
9515 whether CODE contains the tree structure identified by EN. */
9516
9517 void
9518 tree_contains_struct_check_failed (const_tree node,
9519 const enum tree_node_structure_enum en,
9520 const char *file, int line,
9521 const char *function)
9522 {
9523 internal_error
9524 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9525 TS_ENUM_NAME (en),
9526 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9527 }
9528
9529
9530 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9531 (dynamically sized) vector. */
9532
9533 void
9534 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9535 const char *function)
9536 {
9537 internal_error
9538 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9539 idx + 1, len, function, trim_filename (file), line);
9540 }
9541
9542 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9543 (dynamically sized) vector. */
9544
9545 void
9546 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9547 const char *function)
9548 {
9549 internal_error
9550 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9551 idx + 1, len, function, trim_filename (file), line);
9552 }
9553
9554 /* Similar to above, except that the check is for the bounds of the operand
9555 vector of an expression node EXP. */
9556
9557 void
9558 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9559 int line, const char *function)
9560 {
9561 enum tree_code code = TREE_CODE (exp);
9562 internal_error
9563 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9564 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9565 function, trim_filename (file), line);
9566 }
9567
9568 /* Similar to above, except that the check is for the number of
9569 operands of an OMP_CLAUSE node. */
9570
9571 void
9572 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9573 int line, const char *function)
9574 {
9575 internal_error
9576 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9577 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9578 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9579 trim_filename (file), line);
9580 }
9581 #endif /* ENABLE_TREE_CHECKING */
9582 \f
9583 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9584 and mapped to the machine mode MODE. Initialize its fields and build
9585 the information necessary for debugging output. */
9586
9587 static tree
9588 make_vector_type (tree innertype, int nunits, machine_mode mode)
9589 {
9590 tree t;
9591 inchash::hash hstate;
9592
9593 t = make_node (VECTOR_TYPE);
9594 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9595 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9596 SET_TYPE_MODE (t, mode);
9597
9598 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9599 SET_TYPE_STRUCTURAL_EQUALITY (t);
9600 else if (TYPE_CANONICAL (innertype) != innertype
9601 || mode != VOIDmode)
9602 TYPE_CANONICAL (t)
9603 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9604
9605 layout_type (t);
9606
9607 hstate.add_wide_int (VECTOR_TYPE);
9608 hstate.add_wide_int (nunits);
9609 hstate.add_wide_int (mode);
9610 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9611 t = type_hash_canon (hstate.end (), t);
9612
9613 /* We have built a main variant, based on the main variant of the
9614 inner type. Use it to build the variant we return. */
9615 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9616 && TREE_TYPE (t) != innertype)
9617 return build_type_attribute_qual_variant (t,
9618 TYPE_ATTRIBUTES (innertype),
9619 TYPE_QUALS (innertype));
9620
9621 return t;
9622 }
9623
9624 static tree
9625 make_or_reuse_type (unsigned size, int unsignedp)
9626 {
9627 int i;
9628
9629 if (size == INT_TYPE_SIZE)
9630 return unsignedp ? unsigned_type_node : integer_type_node;
9631 if (size == CHAR_TYPE_SIZE)
9632 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9633 if (size == SHORT_TYPE_SIZE)
9634 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9635 if (size == LONG_TYPE_SIZE)
9636 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9637 if (size == LONG_LONG_TYPE_SIZE)
9638 return (unsignedp ? long_long_unsigned_type_node
9639 : long_long_integer_type_node);
9640
9641 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9642 if (size == int_n_data[i].bitsize
9643 && int_n_enabled_p[i])
9644 return (unsignedp ? int_n_trees[i].unsigned_type
9645 : int_n_trees[i].signed_type);
9646
9647 if (unsignedp)
9648 return make_unsigned_type (size);
9649 else
9650 return make_signed_type (size);
9651 }
9652
9653 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9654
9655 static tree
9656 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9657 {
9658 if (satp)
9659 {
9660 if (size == SHORT_FRACT_TYPE_SIZE)
9661 return unsignedp ? sat_unsigned_short_fract_type_node
9662 : sat_short_fract_type_node;
9663 if (size == FRACT_TYPE_SIZE)
9664 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9665 if (size == LONG_FRACT_TYPE_SIZE)
9666 return unsignedp ? sat_unsigned_long_fract_type_node
9667 : sat_long_fract_type_node;
9668 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9669 return unsignedp ? sat_unsigned_long_long_fract_type_node
9670 : sat_long_long_fract_type_node;
9671 }
9672 else
9673 {
9674 if (size == SHORT_FRACT_TYPE_SIZE)
9675 return unsignedp ? unsigned_short_fract_type_node
9676 : short_fract_type_node;
9677 if (size == FRACT_TYPE_SIZE)
9678 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9679 if (size == LONG_FRACT_TYPE_SIZE)
9680 return unsignedp ? unsigned_long_fract_type_node
9681 : long_fract_type_node;
9682 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9683 return unsignedp ? unsigned_long_long_fract_type_node
9684 : long_long_fract_type_node;
9685 }
9686
9687 return make_fract_type (size, unsignedp, satp);
9688 }
9689
9690 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9691
9692 static tree
9693 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9694 {
9695 if (satp)
9696 {
9697 if (size == SHORT_ACCUM_TYPE_SIZE)
9698 return unsignedp ? sat_unsigned_short_accum_type_node
9699 : sat_short_accum_type_node;
9700 if (size == ACCUM_TYPE_SIZE)
9701 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9702 if (size == LONG_ACCUM_TYPE_SIZE)
9703 return unsignedp ? sat_unsigned_long_accum_type_node
9704 : sat_long_accum_type_node;
9705 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9706 return unsignedp ? sat_unsigned_long_long_accum_type_node
9707 : sat_long_long_accum_type_node;
9708 }
9709 else
9710 {
9711 if (size == SHORT_ACCUM_TYPE_SIZE)
9712 return unsignedp ? unsigned_short_accum_type_node
9713 : short_accum_type_node;
9714 if (size == ACCUM_TYPE_SIZE)
9715 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9716 if (size == LONG_ACCUM_TYPE_SIZE)
9717 return unsignedp ? unsigned_long_accum_type_node
9718 : long_accum_type_node;
9719 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9720 return unsignedp ? unsigned_long_long_accum_type_node
9721 : long_long_accum_type_node;
9722 }
9723
9724 return make_accum_type (size, unsignedp, satp);
9725 }
9726
9727
9728 /* Create an atomic variant node for TYPE. This routine is called
9729 during initialization of data types to create the 5 basic atomic
9730 types. The generic build_variant_type function requires these to
9731 already be set up in order to function properly, so cannot be
9732 called from there. If ALIGN is non-zero, then ensure alignment is
9733 overridden to this value. */
9734
9735 static tree
9736 build_atomic_base (tree type, unsigned int align)
9737 {
9738 tree t;
9739
9740 /* Make sure its not already registered. */
9741 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9742 return t;
9743
9744 t = build_variant_type_copy (type);
9745 set_type_quals (t, TYPE_QUAL_ATOMIC);
9746
9747 if (align)
9748 TYPE_ALIGN (t) = align;
9749
9750 return t;
9751 }
9752
9753 /* Create nodes for all integer types (and error_mark_node) using the sizes
9754 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9755 SHORT_DOUBLE specifies whether double should be of the same precision
9756 as float. */
9757
9758 void
9759 build_common_tree_nodes (bool signed_char, bool short_double)
9760 {
9761 int i;
9762
9763 error_mark_node = make_node (ERROR_MARK);
9764 TREE_TYPE (error_mark_node) = error_mark_node;
9765
9766 initialize_sizetypes ();
9767
9768 /* Define both `signed char' and `unsigned char'. */
9769 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9770 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9771 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9772 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9773
9774 /* Define `char', which is like either `signed char' or `unsigned char'
9775 but not the same as either. */
9776 char_type_node
9777 = (signed_char
9778 ? make_signed_type (CHAR_TYPE_SIZE)
9779 : make_unsigned_type (CHAR_TYPE_SIZE));
9780 TYPE_STRING_FLAG (char_type_node) = 1;
9781
9782 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9783 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9784 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9785 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9786 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9787 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9788 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9789 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9790
9791 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9792 {
9793 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9794 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9795 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9796 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9797
9798 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9799 && int_n_enabled_p[i])
9800 {
9801 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9802 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9803 }
9804 }
9805
9806 /* Define a boolean type. This type only represents boolean values but
9807 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9808 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9809 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9810 TYPE_PRECISION (boolean_type_node) = 1;
9811 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9812
9813 /* Define what type to use for size_t. */
9814 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9815 size_type_node = unsigned_type_node;
9816 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9817 size_type_node = long_unsigned_type_node;
9818 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9819 size_type_node = long_long_unsigned_type_node;
9820 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9821 size_type_node = short_unsigned_type_node;
9822 else
9823 {
9824 int i;
9825
9826 size_type_node = NULL_TREE;
9827 for (i = 0; i < NUM_INT_N_ENTS; i++)
9828 if (int_n_enabled_p[i])
9829 {
9830 char name[50];
9831 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9832
9833 if (strcmp (name, SIZE_TYPE) == 0)
9834 {
9835 size_type_node = int_n_trees[i].unsigned_type;
9836 }
9837 }
9838 if (size_type_node == NULL_TREE)
9839 gcc_unreachable ();
9840 }
9841
9842 /* Fill in the rest of the sized types. Reuse existing type nodes
9843 when possible. */
9844 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9845 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9846 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9847 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9848 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9849
9850 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9851 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9852 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9853 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9854 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9855
9856 /* Don't call build_qualified type for atomics. That routine does
9857 special processing for atomics, and until they are initialized
9858 it's better not to make that call.
9859
9860 Check to see if there is a target override for atomic types. */
9861
9862 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9863 targetm.atomic_align_for_mode (QImode));
9864 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9865 targetm.atomic_align_for_mode (HImode));
9866 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9867 targetm.atomic_align_for_mode (SImode));
9868 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9869 targetm.atomic_align_for_mode (DImode));
9870 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9871 targetm.atomic_align_for_mode (TImode));
9872
9873 access_public_node = get_identifier ("public");
9874 access_protected_node = get_identifier ("protected");
9875 access_private_node = get_identifier ("private");
9876
9877 /* Define these next since types below may used them. */
9878 integer_zero_node = build_int_cst (integer_type_node, 0);
9879 integer_one_node = build_int_cst (integer_type_node, 1);
9880 integer_three_node = build_int_cst (integer_type_node, 3);
9881 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9882
9883 size_zero_node = size_int (0);
9884 size_one_node = size_int (1);
9885 bitsize_zero_node = bitsize_int (0);
9886 bitsize_one_node = bitsize_int (1);
9887 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9888
9889 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9890 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9891
9892 void_type_node = make_node (VOID_TYPE);
9893 layout_type (void_type_node);
9894
9895 pointer_bounds_type_node = targetm.chkp_bound_type ();
9896
9897 /* We are not going to have real types in C with less than byte alignment,
9898 so we might as well not have any types that claim to have it. */
9899 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9900 TYPE_USER_ALIGN (void_type_node) = 0;
9901
9902 void_node = make_node (VOID_CST);
9903 TREE_TYPE (void_node) = void_type_node;
9904
9905 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9906 layout_type (TREE_TYPE (null_pointer_node));
9907
9908 ptr_type_node = build_pointer_type (void_type_node);
9909 const_ptr_type_node
9910 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9911 fileptr_type_node = ptr_type_node;
9912
9913 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9914
9915 float_type_node = make_node (REAL_TYPE);
9916 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9917 layout_type (float_type_node);
9918
9919 double_type_node = make_node (REAL_TYPE);
9920 if (short_double)
9921 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9922 else
9923 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9924 layout_type (double_type_node);
9925
9926 long_double_type_node = make_node (REAL_TYPE);
9927 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9928 layout_type (long_double_type_node);
9929
9930 float_ptr_type_node = build_pointer_type (float_type_node);
9931 double_ptr_type_node = build_pointer_type (double_type_node);
9932 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9933 integer_ptr_type_node = build_pointer_type (integer_type_node);
9934
9935 /* Fixed size integer types. */
9936 uint16_type_node = make_or_reuse_type (16, 1);
9937 uint32_type_node = make_or_reuse_type (32, 1);
9938 uint64_type_node = make_or_reuse_type (64, 1);
9939
9940 /* Decimal float types. */
9941 dfloat32_type_node = make_node (REAL_TYPE);
9942 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9943 layout_type (dfloat32_type_node);
9944 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9945 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9946
9947 dfloat64_type_node = make_node (REAL_TYPE);
9948 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9949 layout_type (dfloat64_type_node);
9950 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9951 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9952
9953 dfloat128_type_node = make_node (REAL_TYPE);
9954 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9955 layout_type (dfloat128_type_node);
9956 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9957 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9958
9959 complex_integer_type_node = build_complex_type (integer_type_node);
9960 complex_float_type_node = build_complex_type (float_type_node);
9961 complex_double_type_node = build_complex_type (double_type_node);
9962 complex_long_double_type_node = build_complex_type (long_double_type_node);
9963
9964 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9965 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9966 sat_ ## KIND ## _type_node = \
9967 make_sat_signed_ ## KIND ## _type (SIZE); \
9968 sat_unsigned_ ## KIND ## _type_node = \
9969 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9970 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9971 unsigned_ ## KIND ## _type_node = \
9972 make_unsigned_ ## KIND ## _type (SIZE);
9973
9974 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9975 sat_ ## WIDTH ## KIND ## _type_node = \
9976 make_sat_signed_ ## KIND ## _type (SIZE); \
9977 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9978 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9979 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9980 unsigned_ ## WIDTH ## KIND ## _type_node = \
9981 make_unsigned_ ## KIND ## _type (SIZE);
9982
9983 /* Make fixed-point type nodes based on four different widths. */
9984 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9985 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9986 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9987 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9988 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9989
9990 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9991 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9992 NAME ## _type_node = \
9993 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9994 u ## NAME ## _type_node = \
9995 make_or_reuse_unsigned_ ## KIND ## _type \
9996 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9997 sat_ ## NAME ## _type_node = \
9998 make_or_reuse_sat_signed_ ## KIND ## _type \
9999 (GET_MODE_BITSIZE (MODE ## mode)); \
10000 sat_u ## NAME ## _type_node = \
10001 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10002 (GET_MODE_BITSIZE (U ## MODE ## mode));
10003
10004 /* Fixed-point type and mode nodes. */
10005 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10006 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10007 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10008 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10009 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10010 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10011 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10012 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10013 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10014 MAKE_FIXED_MODE_NODE (accum, da, DA)
10015 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10016
10017 {
10018 tree t = targetm.build_builtin_va_list ();
10019
10020 /* Many back-ends define record types without setting TYPE_NAME.
10021 If we copied the record type here, we'd keep the original
10022 record type without a name. This breaks name mangling. So,
10023 don't copy record types and let c_common_nodes_and_builtins()
10024 declare the type to be __builtin_va_list. */
10025 if (TREE_CODE (t) != RECORD_TYPE)
10026 t = build_variant_type_copy (t);
10027
10028 va_list_type_node = t;
10029 }
10030 }
10031
10032 /* Modify DECL for given flags.
10033 TM_PURE attribute is set only on types, so the function will modify
10034 DECL's type when ECF_TM_PURE is used. */
10035
10036 void
10037 set_call_expr_flags (tree decl, int flags)
10038 {
10039 if (flags & ECF_NOTHROW)
10040 TREE_NOTHROW (decl) = 1;
10041 if (flags & ECF_CONST)
10042 TREE_READONLY (decl) = 1;
10043 if (flags & ECF_PURE)
10044 DECL_PURE_P (decl) = 1;
10045 if (flags & ECF_LOOPING_CONST_OR_PURE)
10046 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10047 if (flags & ECF_NOVOPS)
10048 DECL_IS_NOVOPS (decl) = 1;
10049 if (flags & ECF_NORETURN)
10050 TREE_THIS_VOLATILE (decl) = 1;
10051 if (flags & ECF_MALLOC)
10052 DECL_IS_MALLOC (decl) = 1;
10053 if (flags & ECF_RETURNS_TWICE)
10054 DECL_IS_RETURNS_TWICE (decl) = 1;
10055 if (flags & ECF_LEAF)
10056 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10057 NULL, DECL_ATTRIBUTES (decl));
10058 if ((flags & ECF_TM_PURE) && flag_tm)
10059 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10060 /* Looping const or pure is implied by noreturn.
10061 There is currently no way to declare looping const or looping pure alone. */
10062 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10063 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10064 }
10065
10066
10067 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10068
10069 static void
10070 local_define_builtin (const char *name, tree type, enum built_in_function code,
10071 const char *library_name, int ecf_flags)
10072 {
10073 tree decl;
10074
10075 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10076 library_name, NULL_TREE);
10077 set_call_expr_flags (decl, ecf_flags);
10078
10079 set_builtin_decl (code, decl, true);
10080 }
10081
10082 /* Call this function after instantiating all builtins that the language
10083 front end cares about. This will build the rest of the builtins
10084 and internal functions that are relied upon by the tree optimizers and
10085 the middle-end. */
10086
10087 void
10088 build_common_builtin_nodes (void)
10089 {
10090 tree tmp, ftype;
10091 int ecf_flags;
10092
10093 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10094 {
10095 ftype = build_function_type (void_type_node, void_list_node);
10096 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10097 "__builtin_unreachable",
10098 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10099 | ECF_CONST);
10100 }
10101
10102 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10103 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10104 {
10105 ftype = build_function_type_list (ptr_type_node,
10106 ptr_type_node, const_ptr_type_node,
10107 size_type_node, NULL_TREE);
10108
10109 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10110 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10111 "memcpy", ECF_NOTHROW | ECF_LEAF);
10112 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10113 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10114 "memmove", ECF_NOTHROW | ECF_LEAF);
10115 }
10116
10117 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10118 {
10119 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10120 const_ptr_type_node, size_type_node,
10121 NULL_TREE);
10122 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10123 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10124 }
10125
10126 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10127 {
10128 ftype = build_function_type_list (ptr_type_node,
10129 ptr_type_node, integer_type_node,
10130 size_type_node, NULL_TREE);
10131 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10132 "memset", ECF_NOTHROW | ECF_LEAF);
10133 }
10134
10135 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10136 {
10137 ftype = build_function_type_list (ptr_type_node,
10138 size_type_node, NULL_TREE);
10139 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10140 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10141 }
10142
10143 ftype = build_function_type_list (ptr_type_node, size_type_node,
10144 size_type_node, NULL_TREE);
10145 local_define_builtin ("__builtin_alloca_with_align", ftype,
10146 BUILT_IN_ALLOCA_WITH_ALIGN,
10147 "__builtin_alloca_with_align",
10148 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10149
10150 /* If we're checking the stack, `alloca' can throw. */
10151 if (flag_stack_check)
10152 {
10153 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10154 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10155 }
10156
10157 ftype = build_function_type_list (void_type_node,
10158 ptr_type_node, ptr_type_node,
10159 ptr_type_node, NULL_TREE);
10160 local_define_builtin ("__builtin_init_trampoline", ftype,
10161 BUILT_IN_INIT_TRAMPOLINE,
10162 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10163 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10164 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10165 "__builtin_init_heap_trampoline",
10166 ECF_NOTHROW | ECF_LEAF);
10167
10168 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10169 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10170 BUILT_IN_ADJUST_TRAMPOLINE,
10171 "__builtin_adjust_trampoline",
10172 ECF_CONST | ECF_NOTHROW);
10173
10174 ftype = build_function_type_list (void_type_node,
10175 ptr_type_node, ptr_type_node, NULL_TREE);
10176 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10177 BUILT_IN_NONLOCAL_GOTO,
10178 "__builtin_nonlocal_goto",
10179 ECF_NORETURN | ECF_NOTHROW);
10180
10181 ftype = build_function_type_list (void_type_node,
10182 ptr_type_node, ptr_type_node, NULL_TREE);
10183 local_define_builtin ("__builtin_setjmp_setup", ftype,
10184 BUILT_IN_SETJMP_SETUP,
10185 "__builtin_setjmp_setup", ECF_NOTHROW);
10186
10187 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10188 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10189 BUILT_IN_SETJMP_RECEIVER,
10190 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10191
10192 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10193 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10194 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10195
10196 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10197 local_define_builtin ("__builtin_stack_restore", ftype,
10198 BUILT_IN_STACK_RESTORE,
10199 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10200
10201 /* If there's a possibility that we might use the ARM EABI, build the
10202 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10203 if (targetm.arm_eabi_unwinder)
10204 {
10205 ftype = build_function_type_list (void_type_node, NULL_TREE);
10206 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10207 BUILT_IN_CXA_END_CLEANUP,
10208 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10209 }
10210
10211 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10212 local_define_builtin ("__builtin_unwind_resume", ftype,
10213 BUILT_IN_UNWIND_RESUME,
10214 ((targetm_common.except_unwind_info (&global_options)
10215 == UI_SJLJ)
10216 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10217 ECF_NORETURN);
10218
10219 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10220 {
10221 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10222 NULL_TREE);
10223 local_define_builtin ("__builtin_return_address", ftype,
10224 BUILT_IN_RETURN_ADDRESS,
10225 "__builtin_return_address",
10226 ECF_NOTHROW);
10227 }
10228
10229 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10230 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10231 {
10232 ftype = build_function_type_list (void_type_node, ptr_type_node,
10233 ptr_type_node, NULL_TREE);
10234 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10235 local_define_builtin ("__cyg_profile_func_enter", ftype,
10236 BUILT_IN_PROFILE_FUNC_ENTER,
10237 "__cyg_profile_func_enter", 0);
10238 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10239 local_define_builtin ("__cyg_profile_func_exit", ftype,
10240 BUILT_IN_PROFILE_FUNC_EXIT,
10241 "__cyg_profile_func_exit", 0);
10242 }
10243
10244 /* The exception object and filter values from the runtime. The argument
10245 must be zero before exception lowering, i.e. from the front end. After
10246 exception lowering, it will be the region number for the exception
10247 landing pad. These functions are PURE instead of CONST to prevent
10248 them from being hoisted past the exception edge that will initialize
10249 its value in the landing pad. */
10250 ftype = build_function_type_list (ptr_type_node,
10251 integer_type_node, NULL_TREE);
10252 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10253 /* Only use TM_PURE if we we have TM language support. */
10254 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10255 ecf_flags |= ECF_TM_PURE;
10256 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10257 "__builtin_eh_pointer", ecf_flags);
10258
10259 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10260 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10261 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10262 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10263
10264 ftype = build_function_type_list (void_type_node,
10265 integer_type_node, integer_type_node,
10266 NULL_TREE);
10267 local_define_builtin ("__builtin_eh_copy_values", ftype,
10268 BUILT_IN_EH_COPY_VALUES,
10269 "__builtin_eh_copy_values", ECF_NOTHROW);
10270
10271 /* Complex multiplication and division. These are handled as builtins
10272 rather than optabs because emit_library_call_value doesn't support
10273 complex. Further, we can do slightly better with folding these
10274 beasties if the real and complex parts of the arguments are separate. */
10275 {
10276 int mode;
10277
10278 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10279 {
10280 char mode_name_buf[4], *q;
10281 const char *p;
10282 enum built_in_function mcode, dcode;
10283 tree type, inner_type;
10284 const char *prefix = "__";
10285
10286 if (targetm.libfunc_gnu_prefix)
10287 prefix = "__gnu_";
10288
10289 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10290 if (type == NULL)
10291 continue;
10292 inner_type = TREE_TYPE (type);
10293
10294 ftype = build_function_type_list (type, inner_type, inner_type,
10295 inner_type, inner_type, NULL_TREE);
10296
10297 mcode = ((enum built_in_function)
10298 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10299 dcode = ((enum built_in_function)
10300 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10301
10302 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10303 *q = TOLOWER (*p);
10304 *q = '\0';
10305
10306 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10307 NULL);
10308 local_define_builtin (built_in_names[mcode], ftype, mcode,
10309 built_in_names[mcode],
10310 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10311
10312 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10313 NULL);
10314 local_define_builtin (built_in_names[dcode], ftype, dcode,
10315 built_in_names[dcode],
10316 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10317 }
10318 }
10319
10320 init_internal_fns ();
10321 }
10322
10323 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10324 better way.
10325
10326 If we requested a pointer to a vector, build up the pointers that
10327 we stripped off while looking for the inner type. Similarly for
10328 return values from functions.
10329
10330 The argument TYPE is the top of the chain, and BOTTOM is the
10331 new type which we will point to. */
10332
10333 tree
10334 reconstruct_complex_type (tree type, tree bottom)
10335 {
10336 tree inner, outer;
10337
10338 if (TREE_CODE (type) == POINTER_TYPE)
10339 {
10340 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10341 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10342 TYPE_REF_CAN_ALIAS_ALL (type));
10343 }
10344 else if (TREE_CODE (type) == REFERENCE_TYPE)
10345 {
10346 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10347 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10348 TYPE_REF_CAN_ALIAS_ALL (type));
10349 }
10350 else if (TREE_CODE (type) == ARRAY_TYPE)
10351 {
10352 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10353 outer = build_array_type (inner, TYPE_DOMAIN (type));
10354 }
10355 else if (TREE_CODE (type) == FUNCTION_TYPE)
10356 {
10357 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10358 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10359 }
10360 else if (TREE_CODE (type) == METHOD_TYPE)
10361 {
10362 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10363 /* The build_method_type_directly() routine prepends 'this' to argument list,
10364 so we must compensate by getting rid of it. */
10365 outer
10366 = build_method_type_directly
10367 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10368 inner,
10369 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10370 }
10371 else if (TREE_CODE (type) == OFFSET_TYPE)
10372 {
10373 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10374 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10375 }
10376 else
10377 return bottom;
10378
10379 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10380 TYPE_QUALS (type));
10381 }
10382
10383 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10384 the inner type. */
10385 tree
10386 build_vector_type_for_mode (tree innertype, machine_mode mode)
10387 {
10388 int nunits;
10389
10390 switch (GET_MODE_CLASS (mode))
10391 {
10392 case MODE_VECTOR_INT:
10393 case MODE_VECTOR_FLOAT:
10394 case MODE_VECTOR_FRACT:
10395 case MODE_VECTOR_UFRACT:
10396 case MODE_VECTOR_ACCUM:
10397 case MODE_VECTOR_UACCUM:
10398 nunits = GET_MODE_NUNITS (mode);
10399 break;
10400
10401 case MODE_INT:
10402 /* Check that there are no leftover bits. */
10403 gcc_assert (GET_MODE_BITSIZE (mode)
10404 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10405
10406 nunits = GET_MODE_BITSIZE (mode)
10407 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10408 break;
10409
10410 default:
10411 gcc_unreachable ();
10412 }
10413
10414 return make_vector_type (innertype, nunits, mode);
10415 }
10416
10417 /* Similarly, but takes the inner type and number of units, which must be
10418 a power of two. */
10419
10420 tree
10421 build_vector_type (tree innertype, int nunits)
10422 {
10423 return make_vector_type (innertype, nunits, VOIDmode);
10424 }
10425
10426 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10427
10428 tree
10429 build_opaque_vector_type (tree innertype, int nunits)
10430 {
10431 tree t = make_vector_type (innertype, nunits, VOIDmode);
10432 tree cand;
10433 /* We always build the non-opaque variant before the opaque one,
10434 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10435 cand = TYPE_NEXT_VARIANT (t);
10436 if (cand
10437 && TYPE_VECTOR_OPAQUE (cand)
10438 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10439 return cand;
10440 /* Othewise build a variant type and make sure to queue it after
10441 the non-opaque type. */
10442 cand = build_distinct_type_copy (t);
10443 TYPE_VECTOR_OPAQUE (cand) = true;
10444 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10445 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10446 TYPE_NEXT_VARIANT (t) = cand;
10447 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10448 return cand;
10449 }
10450
10451
10452 /* Given an initializer INIT, return TRUE if INIT is zero or some
10453 aggregate of zeros. Otherwise return FALSE. */
10454 bool
10455 initializer_zerop (const_tree init)
10456 {
10457 tree elt;
10458
10459 STRIP_NOPS (init);
10460
10461 switch (TREE_CODE (init))
10462 {
10463 case INTEGER_CST:
10464 return integer_zerop (init);
10465
10466 case REAL_CST:
10467 /* ??? Note that this is not correct for C4X float formats. There,
10468 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10469 negative exponent. */
10470 return real_zerop (init)
10471 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10472
10473 case FIXED_CST:
10474 return fixed_zerop (init);
10475
10476 case COMPLEX_CST:
10477 return integer_zerop (init)
10478 || (real_zerop (init)
10479 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10480 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10481
10482 case VECTOR_CST:
10483 {
10484 unsigned i;
10485 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10486 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10487 return false;
10488 return true;
10489 }
10490
10491 case CONSTRUCTOR:
10492 {
10493 unsigned HOST_WIDE_INT idx;
10494
10495 if (TREE_CLOBBER_P (init))
10496 return false;
10497 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10498 if (!initializer_zerop (elt))
10499 return false;
10500 return true;
10501 }
10502
10503 case STRING_CST:
10504 {
10505 int i;
10506
10507 /* We need to loop through all elements to handle cases like
10508 "\0" and "\0foobar". */
10509 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10510 if (TREE_STRING_POINTER (init)[i] != '\0')
10511 return false;
10512
10513 return true;
10514 }
10515
10516 default:
10517 return false;
10518 }
10519 }
10520
10521 /* Check if vector VEC consists of all the equal elements and
10522 that the number of elements corresponds to the type of VEC.
10523 The function returns first element of the vector
10524 or NULL_TREE if the vector is not uniform. */
10525 tree
10526 uniform_vector_p (const_tree vec)
10527 {
10528 tree first, t;
10529 unsigned i;
10530
10531 if (vec == NULL_TREE)
10532 return NULL_TREE;
10533
10534 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10535
10536 if (TREE_CODE (vec) == VECTOR_CST)
10537 {
10538 first = VECTOR_CST_ELT (vec, 0);
10539 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10540 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10541 return NULL_TREE;
10542
10543 return first;
10544 }
10545
10546 else if (TREE_CODE (vec) == CONSTRUCTOR)
10547 {
10548 first = error_mark_node;
10549
10550 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10551 {
10552 if (i == 0)
10553 {
10554 first = t;
10555 continue;
10556 }
10557 if (!operand_equal_p (first, t, 0))
10558 return NULL_TREE;
10559 }
10560 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10561 return NULL_TREE;
10562
10563 return first;
10564 }
10565
10566 return NULL_TREE;
10567 }
10568
10569 /* Build an empty statement at location LOC. */
10570
10571 tree
10572 build_empty_stmt (location_t loc)
10573 {
10574 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10575 SET_EXPR_LOCATION (t, loc);
10576 return t;
10577 }
10578
10579
10580 /* Build an OpenMP clause with code CODE. LOC is the location of the
10581 clause. */
10582
10583 tree
10584 build_omp_clause (location_t loc, enum omp_clause_code code)
10585 {
10586 tree t;
10587 int size, length;
10588
10589 length = omp_clause_num_ops[code];
10590 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10591
10592 record_node_allocation_statistics (OMP_CLAUSE, size);
10593
10594 t = (tree) ggc_internal_alloc (size);
10595 memset (t, 0, size);
10596 TREE_SET_CODE (t, OMP_CLAUSE);
10597 OMP_CLAUSE_SET_CODE (t, code);
10598 OMP_CLAUSE_LOCATION (t) = loc;
10599
10600 return t;
10601 }
10602
10603 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10604 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10605 Except for the CODE and operand count field, other storage for the
10606 object is initialized to zeros. */
10607
10608 tree
10609 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10610 {
10611 tree t;
10612 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10613
10614 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10615 gcc_assert (len >= 1);
10616
10617 record_node_allocation_statistics (code, length);
10618
10619 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10620
10621 TREE_SET_CODE (t, code);
10622
10623 /* Can't use TREE_OPERAND to store the length because if checking is
10624 enabled, it will try to check the length before we store it. :-P */
10625 t->exp.operands[0] = build_int_cst (sizetype, len);
10626
10627 return t;
10628 }
10629
10630 /* Helper function for build_call_* functions; build a CALL_EXPR with
10631 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10632 the argument slots. */
10633
10634 static tree
10635 build_call_1 (tree return_type, tree fn, int nargs)
10636 {
10637 tree t;
10638
10639 t = build_vl_exp (CALL_EXPR, nargs + 3);
10640 TREE_TYPE (t) = return_type;
10641 CALL_EXPR_FN (t) = fn;
10642 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10643
10644 return t;
10645 }
10646
10647 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10648 FN and a null static chain slot. NARGS is the number of call arguments
10649 which are specified as "..." arguments. */
10650
10651 tree
10652 build_call_nary (tree return_type, tree fn, int nargs, ...)
10653 {
10654 tree ret;
10655 va_list args;
10656 va_start (args, nargs);
10657 ret = build_call_valist (return_type, fn, nargs, args);
10658 va_end (args);
10659 return ret;
10660 }
10661
10662 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10663 FN and a null static chain slot. NARGS is the number of call arguments
10664 which are specified as a va_list ARGS. */
10665
10666 tree
10667 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10668 {
10669 tree t;
10670 int i;
10671
10672 t = build_call_1 (return_type, fn, nargs);
10673 for (i = 0; i < nargs; i++)
10674 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10675 process_call_operands (t);
10676 return t;
10677 }
10678
10679 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10680 FN and a null static chain slot. NARGS is the number of call arguments
10681 which are specified as a tree array ARGS. */
10682
10683 tree
10684 build_call_array_loc (location_t loc, tree return_type, tree fn,
10685 int nargs, const tree *args)
10686 {
10687 tree t;
10688 int i;
10689
10690 t = build_call_1 (return_type, fn, nargs);
10691 for (i = 0; i < nargs; i++)
10692 CALL_EXPR_ARG (t, i) = args[i];
10693 process_call_operands (t);
10694 SET_EXPR_LOCATION (t, loc);
10695 return t;
10696 }
10697
10698 /* Like build_call_array, but takes a vec. */
10699
10700 tree
10701 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10702 {
10703 tree ret, t;
10704 unsigned int ix;
10705
10706 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10707 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10708 CALL_EXPR_ARG (ret, ix) = t;
10709 process_call_operands (ret);
10710 return ret;
10711 }
10712
10713 /* Conveniently construct a function call expression. FNDECL names the
10714 function to be called and N arguments are passed in the array
10715 ARGARRAY. */
10716
10717 tree
10718 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10719 {
10720 tree fntype = TREE_TYPE (fndecl);
10721 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10722
10723 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10724 }
10725
10726 /* Conveniently construct a function call expression. FNDECL names the
10727 function to be called and the arguments are passed in the vector
10728 VEC. */
10729
10730 tree
10731 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10732 {
10733 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10734 vec_safe_address (vec));
10735 }
10736
10737
10738 /* Conveniently construct a function call expression. FNDECL names the
10739 function to be called, N is the number of arguments, and the "..."
10740 parameters are the argument expressions. */
10741
10742 tree
10743 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10744 {
10745 va_list ap;
10746 tree *argarray = XALLOCAVEC (tree, n);
10747 int i;
10748
10749 va_start (ap, n);
10750 for (i = 0; i < n; i++)
10751 argarray[i] = va_arg (ap, tree);
10752 va_end (ap);
10753 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10754 }
10755
10756 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10757 varargs macros aren't supported by all bootstrap compilers. */
10758
10759 tree
10760 build_call_expr (tree fndecl, int n, ...)
10761 {
10762 va_list ap;
10763 tree *argarray = XALLOCAVEC (tree, n);
10764 int i;
10765
10766 va_start (ap, n);
10767 for (i = 0; i < n; i++)
10768 argarray[i] = va_arg (ap, tree);
10769 va_end (ap);
10770 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10771 }
10772
10773 /* Build internal call expression. This is just like CALL_EXPR, except
10774 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10775 internal function. */
10776
10777 tree
10778 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10779 tree type, int n, ...)
10780 {
10781 va_list ap;
10782 int i;
10783
10784 tree fn = build_call_1 (type, NULL_TREE, n);
10785 va_start (ap, n);
10786 for (i = 0; i < n; i++)
10787 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10788 va_end (ap);
10789 SET_EXPR_LOCATION (fn, loc);
10790 CALL_EXPR_IFN (fn) = ifn;
10791 return fn;
10792 }
10793
10794 /* Create a new constant string literal and return a char* pointer to it.
10795 The STRING_CST value is the LEN characters at STR. */
10796 tree
10797 build_string_literal (int len, const char *str)
10798 {
10799 tree t, elem, index, type;
10800
10801 t = build_string (len, str);
10802 elem = build_type_variant (char_type_node, 1, 0);
10803 index = build_index_type (size_int (len - 1));
10804 type = build_array_type (elem, index);
10805 TREE_TYPE (t) = type;
10806 TREE_CONSTANT (t) = 1;
10807 TREE_READONLY (t) = 1;
10808 TREE_STATIC (t) = 1;
10809
10810 type = build_pointer_type (elem);
10811 t = build1 (ADDR_EXPR, type,
10812 build4 (ARRAY_REF, elem,
10813 t, integer_zero_node, NULL_TREE, NULL_TREE));
10814 return t;
10815 }
10816
10817
10818
10819 /* Return true if T (assumed to be a DECL) must be assigned a memory
10820 location. */
10821
10822 bool
10823 needs_to_live_in_memory (const_tree t)
10824 {
10825 return (TREE_ADDRESSABLE (t)
10826 || is_global_var (t)
10827 || (TREE_CODE (t) == RESULT_DECL
10828 && !DECL_BY_REFERENCE (t)
10829 && aggregate_value_p (t, current_function_decl)));
10830 }
10831
10832 /* Return value of a constant X and sign-extend it. */
10833
10834 HOST_WIDE_INT
10835 int_cst_value (const_tree x)
10836 {
10837 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10838 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10839
10840 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10841 gcc_assert (cst_and_fits_in_hwi (x));
10842
10843 if (bits < HOST_BITS_PER_WIDE_INT)
10844 {
10845 bool negative = ((val >> (bits - 1)) & 1) != 0;
10846 if (negative)
10847 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10848 else
10849 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10850 }
10851
10852 return val;
10853 }
10854
10855 /* If TYPE is an integral or pointer type, return an integer type with
10856 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10857 if TYPE is already an integer type of signedness UNSIGNEDP. */
10858
10859 tree
10860 signed_or_unsigned_type_for (int unsignedp, tree type)
10861 {
10862 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10863 return type;
10864
10865 if (TREE_CODE (type) == VECTOR_TYPE)
10866 {
10867 tree inner = TREE_TYPE (type);
10868 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10869 if (!inner2)
10870 return NULL_TREE;
10871 if (inner == inner2)
10872 return type;
10873 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10874 }
10875
10876 if (!INTEGRAL_TYPE_P (type)
10877 && !POINTER_TYPE_P (type)
10878 && TREE_CODE (type) != OFFSET_TYPE)
10879 return NULL_TREE;
10880
10881 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10882 }
10883
10884 /* If TYPE is an integral or pointer type, return an integer type with
10885 the same precision which is unsigned, or itself if TYPE is already an
10886 unsigned integer type. */
10887
10888 tree
10889 unsigned_type_for (tree type)
10890 {
10891 return signed_or_unsigned_type_for (1, type);
10892 }
10893
10894 /* If TYPE is an integral or pointer type, return an integer type with
10895 the same precision which is signed, or itself if TYPE is already a
10896 signed integer type. */
10897
10898 tree
10899 signed_type_for (tree type)
10900 {
10901 return signed_or_unsigned_type_for (0, type);
10902 }
10903
10904 /* If TYPE is a vector type, return a signed integer vector type with the
10905 same width and number of subparts. Otherwise return boolean_type_node. */
10906
10907 tree
10908 truth_type_for (tree type)
10909 {
10910 if (TREE_CODE (type) == VECTOR_TYPE)
10911 {
10912 tree elem = lang_hooks.types.type_for_size
10913 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10914 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10915 }
10916 else
10917 return boolean_type_node;
10918 }
10919
10920 /* Returns the largest value obtainable by casting something in INNER type to
10921 OUTER type. */
10922
10923 tree
10924 upper_bound_in_type (tree outer, tree inner)
10925 {
10926 unsigned int det = 0;
10927 unsigned oprec = TYPE_PRECISION (outer);
10928 unsigned iprec = TYPE_PRECISION (inner);
10929 unsigned prec;
10930
10931 /* Compute a unique number for every combination. */
10932 det |= (oprec > iprec) ? 4 : 0;
10933 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10934 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10935
10936 /* Determine the exponent to use. */
10937 switch (det)
10938 {
10939 case 0:
10940 case 1:
10941 /* oprec <= iprec, outer: signed, inner: don't care. */
10942 prec = oprec - 1;
10943 break;
10944 case 2:
10945 case 3:
10946 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10947 prec = oprec;
10948 break;
10949 case 4:
10950 /* oprec > iprec, outer: signed, inner: signed. */
10951 prec = iprec - 1;
10952 break;
10953 case 5:
10954 /* oprec > iprec, outer: signed, inner: unsigned. */
10955 prec = iprec;
10956 break;
10957 case 6:
10958 /* oprec > iprec, outer: unsigned, inner: signed. */
10959 prec = oprec;
10960 break;
10961 case 7:
10962 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10963 prec = iprec;
10964 break;
10965 default:
10966 gcc_unreachable ();
10967 }
10968
10969 return wide_int_to_tree (outer,
10970 wi::mask (prec, false, TYPE_PRECISION (outer)));
10971 }
10972
10973 /* Returns the smallest value obtainable by casting something in INNER type to
10974 OUTER type. */
10975
10976 tree
10977 lower_bound_in_type (tree outer, tree inner)
10978 {
10979 unsigned oprec = TYPE_PRECISION (outer);
10980 unsigned iprec = TYPE_PRECISION (inner);
10981
10982 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10983 and obtain 0. */
10984 if (TYPE_UNSIGNED (outer)
10985 /* If we are widening something of an unsigned type, OUTER type
10986 contains all values of INNER type. In particular, both INNER
10987 and OUTER types have zero in common. */
10988 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10989 return build_int_cst (outer, 0);
10990 else
10991 {
10992 /* If we are widening a signed type to another signed type, we
10993 want to obtain -2^^(iprec-1). If we are keeping the
10994 precision or narrowing to a signed type, we want to obtain
10995 -2^(oprec-1). */
10996 unsigned prec = oprec > iprec ? iprec : oprec;
10997 return wide_int_to_tree (outer,
10998 wi::mask (prec - 1, true,
10999 TYPE_PRECISION (outer)));
11000 }
11001 }
11002
11003 /* Return nonzero if two operands that are suitable for PHI nodes are
11004 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11005 SSA_NAME or invariant. Note that this is strictly an optimization.
11006 That is, callers of this function can directly call operand_equal_p
11007 and get the same result, only slower. */
11008
11009 int
11010 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11011 {
11012 if (arg0 == arg1)
11013 return 1;
11014 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11015 return 0;
11016 return operand_equal_p (arg0, arg1, 0);
11017 }
11018
11019 /* Returns number of zeros at the end of binary representation of X. */
11020
11021 tree
11022 num_ending_zeros (const_tree x)
11023 {
11024 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11025 }
11026
11027
11028 #define WALK_SUBTREE(NODE) \
11029 do \
11030 { \
11031 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11032 if (result) \
11033 return result; \
11034 } \
11035 while (0)
11036
11037 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11038 be walked whenever a type is seen in the tree. Rest of operands and return
11039 value are as for walk_tree. */
11040
11041 static tree
11042 walk_type_fields (tree type, walk_tree_fn func, void *data,
11043 hash_set<tree> *pset, walk_tree_lh lh)
11044 {
11045 tree result = NULL_TREE;
11046
11047 switch (TREE_CODE (type))
11048 {
11049 case POINTER_TYPE:
11050 case REFERENCE_TYPE:
11051 case VECTOR_TYPE:
11052 /* We have to worry about mutually recursive pointers. These can't
11053 be written in C. They can in Ada. It's pathological, but
11054 there's an ACATS test (c38102a) that checks it. Deal with this
11055 by checking if we're pointing to another pointer, that one
11056 points to another pointer, that one does too, and we have no htab.
11057 If so, get a hash table. We check three levels deep to avoid
11058 the cost of the hash table if we don't need one. */
11059 if (POINTER_TYPE_P (TREE_TYPE (type))
11060 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11061 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11062 && !pset)
11063 {
11064 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11065 func, data);
11066 if (result)
11067 return result;
11068
11069 break;
11070 }
11071
11072 /* ... fall through ... */
11073
11074 case COMPLEX_TYPE:
11075 WALK_SUBTREE (TREE_TYPE (type));
11076 break;
11077
11078 case METHOD_TYPE:
11079 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11080
11081 /* Fall through. */
11082
11083 case FUNCTION_TYPE:
11084 WALK_SUBTREE (TREE_TYPE (type));
11085 {
11086 tree arg;
11087
11088 /* We never want to walk into default arguments. */
11089 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11090 WALK_SUBTREE (TREE_VALUE (arg));
11091 }
11092 break;
11093
11094 case ARRAY_TYPE:
11095 /* Don't follow this nodes's type if a pointer for fear that
11096 we'll have infinite recursion. If we have a PSET, then we
11097 need not fear. */
11098 if (pset
11099 || (!POINTER_TYPE_P (TREE_TYPE (type))
11100 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11101 WALK_SUBTREE (TREE_TYPE (type));
11102 WALK_SUBTREE (TYPE_DOMAIN (type));
11103 break;
11104
11105 case OFFSET_TYPE:
11106 WALK_SUBTREE (TREE_TYPE (type));
11107 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11108 break;
11109
11110 default:
11111 break;
11112 }
11113
11114 return NULL_TREE;
11115 }
11116
11117 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11118 called with the DATA and the address of each sub-tree. If FUNC returns a
11119 non-NULL value, the traversal is stopped, and the value returned by FUNC
11120 is returned. If PSET is non-NULL it is used to record the nodes visited,
11121 and to avoid visiting a node more than once. */
11122
11123 tree
11124 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11125 hash_set<tree> *pset, walk_tree_lh lh)
11126 {
11127 enum tree_code code;
11128 int walk_subtrees;
11129 tree result;
11130
11131 #define WALK_SUBTREE_TAIL(NODE) \
11132 do \
11133 { \
11134 tp = & (NODE); \
11135 goto tail_recurse; \
11136 } \
11137 while (0)
11138
11139 tail_recurse:
11140 /* Skip empty subtrees. */
11141 if (!*tp)
11142 return NULL_TREE;
11143
11144 /* Don't walk the same tree twice, if the user has requested
11145 that we avoid doing so. */
11146 if (pset && pset->add (*tp))
11147 return NULL_TREE;
11148
11149 /* Call the function. */
11150 walk_subtrees = 1;
11151 result = (*func) (tp, &walk_subtrees, data);
11152
11153 /* If we found something, return it. */
11154 if (result)
11155 return result;
11156
11157 code = TREE_CODE (*tp);
11158
11159 /* Even if we didn't, FUNC may have decided that there was nothing
11160 interesting below this point in the tree. */
11161 if (!walk_subtrees)
11162 {
11163 /* But we still need to check our siblings. */
11164 if (code == TREE_LIST)
11165 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11166 else if (code == OMP_CLAUSE)
11167 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11168 else
11169 return NULL_TREE;
11170 }
11171
11172 if (lh)
11173 {
11174 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11175 if (result || !walk_subtrees)
11176 return result;
11177 }
11178
11179 switch (code)
11180 {
11181 case ERROR_MARK:
11182 case IDENTIFIER_NODE:
11183 case INTEGER_CST:
11184 case REAL_CST:
11185 case FIXED_CST:
11186 case VECTOR_CST:
11187 case STRING_CST:
11188 case BLOCK:
11189 case PLACEHOLDER_EXPR:
11190 case SSA_NAME:
11191 case FIELD_DECL:
11192 case RESULT_DECL:
11193 /* None of these have subtrees other than those already walked
11194 above. */
11195 break;
11196
11197 case TREE_LIST:
11198 WALK_SUBTREE (TREE_VALUE (*tp));
11199 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11200 break;
11201
11202 case TREE_VEC:
11203 {
11204 int len = TREE_VEC_LENGTH (*tp);
11205
11206 if (len == 0)
11207 break;
11208
11209 /* Walk all elements but the first. */
11210 while (--len)
11211 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11212
11213 /* Now walk the first one as a tail call. */
11214 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11215 }
11216
11217 case COMPLEX_CST:
11218 WALK_SUBTREE (TREE_REALPART (*tp));
11219 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11220
11221 case CONSTRUCTOR:
11222 {
11223 unsigned HOST_WIDE_INT idx;
11224 constructor_elt *ce;
11225
11226 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11227 idx++)
11228 WALK_SUBTREE (ce->value);
11229 }
11230 break;
11231
11232 case SAVE_EXPR:
11233 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11234
11235 case BIND_EXPR:
11236 {
11237 tree decl;
11238 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11239 {
11240 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11241 into declarations that are just mentioned, rather than
11242 declared; they don't really belong to this part of the tree.
11243 And, we can see cycles: the initializer for a declaration
11244 can refer to the declaration itself. */
11245 WALK_SUBTREE (DECL_INITIAL (decl));
11246 WALK_SUBTREE (DECL_SIZE (decl));
11247 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11248 }
11249 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11250 }
11251
11252 case STATEMENT_LIST:
11253 {
11254 tree_stmt_iterator i;
11255 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11256 WALK_SUBTREE (*tsi_stmt_ptr (i));
11257 }
11258 break;
11259
11260 case OMP_CLAUSE:
11261 switch (OMP_CLAUSE_CODE (*tp))
11262 {
11263 case OMP_CLAUSE_GANG:
11264 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11265 /* FALLTHRU */
11266
11267 case OMP_CLAUSE_DEVICE_RESIDENT:
11268 case OMP_CLAUSE_USE_DEVICE:
11269 case OMP_CLAUSE_ASYNC:
11270 case OMP_CLAUSE_WAIT:
11271 case OMP_CLAUSE_WORKER:
11272 case OMP_CLAUSE_VECTOR:
11273 case OMP_CLAUSE_NUM_GANGS:
11274 case OMP_CLAUSE_NUM_WORKERS:
11275 case OMP_CLAUSE_VECTOR_LENGTH:
11276 case OMP_CLAUSE_PRIVATE:
11277 case OMP_CLAUSE_SHARED:
11278 case OMP_CLAUSE_FIRSTPRIVATE:
11279 case OMP_CLAUSE_COPYIN:
11280 case OMP_CLAUSE_COPYPRIVATE:
11281 case OMP_CLAUSE_FINAL:
11282 case OMP_CLAUSE_IF:
11283 case OMP_CLAUSE_NUM_THREADS:
11284 case OMP_CLAUSE_SCHEDULE:
11285 case OMP_CLAUSE_UNIFORM:
11286 case OMP_CLAUSE_DEPEND:
11287 case OMP_CLAUSE_NUM_TEAMS:
11288 case OMP_CLAUSE_THREAD_LIMIT:
11289 case OMP_CLAUSE_DEVICE:
11290 case OMP_CLAUSE_DIST_SCHEDULE:
11291 case OMP_CLAUSE_SAFELEN:
11292 case OMP_CLAUSE_SIMDLEN:
11293 case OMP_CLAUSE__LOOPTEMP_:
11294 case OMP_CLAUSE__SIMDUID_:
11295 case OMP_CLAUSE__CILK_FOR_COUNT_:
11296 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11297 /* FALLTHRU */
11298
11299 case OMP_CLAUSE_INDEPENDENT:
11300 case OMP_CLAUSE_NOWAIT:
11301 case OMP_CLAUSE_ORDERED:
11302 case OMP_CLAUSE_DEFAULT:
11303 case OMP_CLAUSE_UNTIED:
11304 case OMP_CLAUSE_MERGEABLE:
11305 case OMP_CLAUSE_PROC_BIND:
11306 case OMP_CLAUSE_INBRANCH:
11307 case OMP_CLAUSE_NOTINBRANCH:
11308 case OMP_CLAUSE_FOR:
11309 case OMP_CLAUSE_PARALLEL:
11310 case OMP_CLAUSE_SECTIONS:
11311 case OMP_CLAUSE_TASKGROUP:
11312 case OMP_CLAUSE_AUTO:
11313 case OMP_CLAUSE_SEQ:
11314 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11315
11316 case OMP_CLAUSE_LASTPRIVATE:
11317 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11318 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11319 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11320
11321 case OMP_CLAUSE_COLLAPSE:
11322 {
11323 int i;
11324 for (i = 0; i < 3; i++)
11325 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11326 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11327 }
11328
11329 case OMP_CLAUSE_LINEAR:
11330 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11331 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11332 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11333 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11334
11335 case OMP_CLAUSE_ALIGNED:
11336 case OMP_CLAUSE_FROM:
11337 case OMP_CLAUSE_TO:
11338 case OMP_CLAUSE_MAP:
11339 case OMP_CLAUSE__CACHE_:
11340 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11341 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11342 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11343
11344 case OMP_CLAUSE_REDUCTION:
11345 {
11346 int i;
11347 for (i = 0; i < 4; i++)
11348 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11349 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11350 }
11351
11352 default:
11353 gcc_unreachable ();
11354 }
11355 break;
11356
11357 case TARGET_EXPR:
11358 {
11359 int i, len;
11360
11361 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11362 But, we only want to walk once. */
11363 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11364 for (i = 0; i < len; ++i)
11365 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11366 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11367 }
11368
11369 case DECL_EXPR:
11370 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11371 defining. We only want to walk into these fields of a type in this
11372 case and not in the general case of a mere reference to the type.
11373
11374 The criterion is as follows: if the field can be an expression, it
11375 must be walked only here. This should be in keeping with the fields
11376 that are directly gimplified in gimplify_type_sizes in order for the
11377 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11378 variable-sized types.
11379
11380 Note that DECLs get walked as part of processing the BIND_EXPR. */
11381 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11382 {
11383 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11384 if (TREE_CODE (*type_p) == ERROR_MARK)
11385 return NULL_TREE;
11386
11387 /* Call the function for the type. See if it returns anything or
11388 doesn't want us to continue. If we are to continue, walk both
11389 the normal fields and those for the declaration case. */
11390 result = (*func) (type_p, &walk_subtrees, data);
11391 if (result || !walk_subtrees)
11392 return result;
11393
11394 /* But do not walk a pointed-to type since it may itself need to
11395 be walked in the declaration case if it isn't anonymous. */
11396 if (!POINTER_TYPE_P (*type_p))
11397 {
11398 result = walk_type_fields (*type_p, func, data, pset, lh);
11399 if (result)
11400 return result;
11401 }
11402
11403 /* If this is a record type, also walk the fields. */
11404 if (RECORD_OR_UNION_TYPE_P (*type_p))
11405 {
11406 tree field;
11407
11408 for (field = TYPE_FIELDS (*type_p); field;
11409 field = DECL_CHAIN (field))
11410 {
11411 /* We'd like to look at the type of the field, but we can
11412 easily get infinite recursion. So assume it's pointed
11413 to elsewhere in the tree. Also, ignore things that
11414 aren't fields. */
11415 if (TREE_CODE (field) != FIELD_DECL)
11416 continue;
11417
11418 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11419 WALK_SUBTREE (DECL_SIZE (field));
11420 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11421 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11422 WALK_SUBTREE (DECL_QUALIFIER (field));
11423 }
11424 }
11425
11426 /* Same for scalar types. */
11427 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11428 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11429 || TREE_CODE (*type_p) == INTEGER_TYPE
11430 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11431 || TREE_CODE (*type_p) == REAL_TYPE)
11432 {
11433 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11434 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11435 }
11436
11437 WALK_SUBTREE (TYPE_SIZE (*type_p));
11438 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11439 }
11440 /* FALLTHRU */
11441
11442 default:
11443 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11444 {
11445 int i, len;
11446
11447 /* Walk over all the sub-trees of this operand. */
11448 len = TREE_OPERAND_LENGTH (*tp);
11449
11450 /* Go through the subtrees. We need to do this in forward order so
11451 that the scope of a FOR_EXPR is handled properly. */
11452 if (len)
11453 {
11454 for (i = 0; i < len - 1; ++i)
11455 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11456 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11457 }
11458 }
11459 /* If this is a type, walk the needed fields in the type. */
11460 else if (TYPE_P (*tp))
11461 return walk_type_fields (*tp, func, data, pset, lh);
11462 break;
11463 }
11464
11465 /* We didn't find what we were looking for. */
11466 return NULL_TREE;
11467
11468 #undef WALK_SUBTREE_TAIL
11469 }
11470 #undef WALK_SUBTREE
11471
11472 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11473
11474 tree
11475 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11476 walk_tree_lh lh)
11477 {
11478 tree result;
11479
11480 hash_set<tree> pset;
11481 result = walk_tree_1 (tp, func, data, &pset, lh);
11482 return result;
11483 }
11484
11485
11486 tree
11487 tree_block (tree t)
11488 {
11489 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11490
11491 if (IS_EXPR_CODE_CLASS (c))
11492 return LOCATION_BLOCK (t->exp.locus);
11493 gcc_unreachable ();
11494 return NULL;
11495 }
11496
11497 void
11498 tree_set_block (tree t, tree b)
11499 {
11500 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11501
11502 if (IS_EXPR_CODE_CLASS (c))
11503 {
11504 if (b)
11505 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11506 else
11507 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11508 }
11509 else
11510 gcc_unreachable ();
11511 }
11512
11513 /* Create a nameless artificial label and put it in the current
11514 function context. The label has a location of LOC. Returns the
11515 newly created label. */
11516
11517 tree
11518 create_artificial_label (location_t loc)
11519 {
11520 tree lab = build_decl (loc,
11521 LABEL_DECL, NULL_TREE, void_type_node);
11522
11523 DECL_ARTIFICIAL (lab) = 1;
11524 DECL_IGNORED_P (lab) = 1;
11525 DECL_CONTEXT (lab) = current_function_decl;
11526 return lab;
11527 }
11528
11529 /* Given a tree, try to return a useful variable name that we can use
11530 to prefix a temporary that is being assigned the value of the tree.
11531 I.E. given <temp> = &A, return A. */
11532
11533 const char *
11534 get_name (tree t)
11535 {
11536 tree stripped_decl;
11537
11538 stripped_decl = t;
11539 STRIP_NOPS (stripped_decl);
11540 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11541 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11542 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11543 {
11544 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11545 if (!name)
11546 return NULL;
11547 return IDENTIFIER_POINTER (name);
11548 }
11549 else
11550 {
11551 switch (TREE_CODE (stripped_decl))
11552 {
11553 case ADDR_EXPR:
11554 return get_name (TREE_OPERAND (stripped_decl, 0));
11555 default:
11556 return NULL;
11557 }
11558 }
11559 }
11560
11561 /* Return true if TYPE has a variable argument list. */
11562
11563 bool
11564 stdarg_p (const_tree fntype)
11565 {
11566 function_args_iterator args_iter;
11567 tree n = NULL_TREE, t;
11568
11569 if (!fntype)
11570 return false;
11571
11572 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11573 {
11574 n = t;
11575 }
11576
11577 return n != NULL_TREE && n != void_type_node;
11578 }
11579
11580 /* Return true if TYPE has a prototype. */
11581
11582 bool
11583 prototype_p (const_tree fntype)
11584 {
11585 tree t;
11586
11587 gcc_assert (fntype != NULL_TREE);
11588
11589 t = TYPE_ARG_TYPES (fntype);
11590 return (t != NULL_TREE);
11591 }
11592
11593 /* If BLOCK is inlined from an __attribute__((__artificial__))
11594 routine, return pointer to location from where it has been
11595 called. */
11596 location_t *
11597 block_nonartificial_location (tree block)
11598 {
11599 location_t *ret = NULL;
11600
11601 while (block && TREE_CODE (block) == BLOCK
11602 && BLOCK_ABSTRACT_ORIGIN (block))
11603 {
11604 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11605
11606 while (TREE_CODE (ao) == BLOCK
11607 && BLOCK_ABSTRACT_ORIGIN (ao)
11608 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11609 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11610
11611 if (TREE_CODE (ao) == FUNCTION_DECL)
11612 {
11613 /* If AO is an artificial inline, point RET to the
11614 call site locus at which it has been inlined and continue
11615 the loop, in case AO's caller is also an artificial
11616 inline. */
11617 if (DECL_DECLARED_INLINE_P (ao)
11618 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11619 ret = &BLOCK_SOURCE_LOCATION (block);
11620 else
11621 break;
11622 }
11623 else if (TREE_CODE (ao) != BLOCK)
11624 break;
11625
11626 block = BLOCK_SUPERCONTEXT (block);
11627 }
11628 return ret;
11629 }
11630
11631
11632 /* If EXP is inlined from an __attribute__((__artificial__))
11633 function, return the location of the original call expression. */
11634
11635 location_t
11636 tree_nonartificial_location (tree exp)
11637 {
11638 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11639
11640 if (loc)
11641 return *loc;
11642 else
11643 return EXPR_LOCATION (exp);
11644 }
11645
11646
11647 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11648 nodes. */
11649
11650 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11651
11652 hashval_t
11653 cl_option_hasher::hash (tree x)
11654 {
11655 const_tree const t = x;
11656 const char *p;
11657 size_t i;
11658 size_t len = 0;
11659 hashval_t hash = 0;
11660
11661 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11662 {
11663 p = (const char *)TREE_OPTIMIZATION (t);
11664 len = sizeof (struct cl_optimization);
11665 }
11666
11667 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11668 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11669
11670 else
11671 gcc_unreachable ();
11672
11673 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11674 something else. */
11675 for (i = 0; i < len; i++)
11676 if (p[i])
11677 hash = (hash << 4) ^ ((i << 2) | p[i]);
11678
11679 return hash;
11680 }
11681
11682 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11683 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11684 same. */
11685
11686 bool
11687 cl_option_hasher::equal (tree x, tree y)
11688 {
11689 const_tree const xt = x;
11690 const_tree const yt = y;
11691 const char *xp;
11692 const char *yp;
11693 size_t len;
11694
11695 if (TREE_CODE (xt) != TREE_CODE (yt))
11696 return 0;
11697
11698 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11699 {
11700 xp = (const char *)TREE_OPTIMIZATION (xt);
11701 yp = (const char *)TREE_OPTIMIZATION (yt);
11702 len = sizeof (struct cl_optimization);
11703 }
11704
11705 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11706 {
11707 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11708 TREE_TARGET_OPTION (yt));
11709 }
11710
11711 else
11712 gcc_unreachable ();
11713
11714 return (memcmp (xp, yp, len) == 0);
11715 }
11716
11717 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11718
11719 tree
11720 build_optimization_node (struct gcc_options *opts)
11721 {
11722 tree t;
11723
11724 /* Use the cache of optimization nodes. */
11725
11726 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11727 opts);
11728
11729 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11730 t = *slot;
11731 if (!t)
11732 {
11733 /* Insert this one into the hash table. */
11734 t = cl_optimization_node;
11735 *slot = t;
11736
11737 /* Make a new node for next time round. */
11738 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11739 }
11740
11741 return t;
11742 }
11743
11744 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11745
11746 tree
11747 build_target_option_node (struct gcc_options *opts)
11748 {
11749 tree t;
11750
11751 /* Use the cache of optimization nodes. */
11752
11753 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11754 opts);
11755
11756 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11757 t = *slot;
11758 if (!t)
11759 {
11760 /* Insert this one into the hash table. */
11761 t = cl_target_option_node;
11762 *slot = t;
11763
11764 /* Make a new node for next time round. */
11765 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11766 }
11767
11768 return t;
11769 }
11770
11771 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11772 so that they aren't saved during PCH writing. */
11773
11774 void
11775 prepare_target_option_nodes_for_pch (void)
11776 {
11777 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11778 for (; iter != cl_option_hash_table->end (); ++iter)
11779 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11780 TREE_TARGET_GLOBALS (*iter) = NULL;
11781 }
11782
11783 /* Determine the "ultimate origin" of a block. The block may be an inlined
11784 instance of an inlined instance of a block which is local to an inline
11785 function, so we have to trace all of the way back through the origin chain
11786 to find out what sort of node actually served as the original seed for the
11787 given block. */
11788
11789 tree
11790 block_ultimate_origin (const_tree block)
11791 {
11792 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11793
11794 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11795 we're trying to output the abstract instance of this function. */
11796 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11797 return NULL_TREE;
11798
11799 if (immediate_origin == NULL_TREE)
11800 return NULL_TREE;
11801 else
11802 {
11803 tree ret_val;
11804 tree lookahead = immediate_origin;
11805
11806 do
11807 {
11808 ret_val = lookahead;
11809 lookahead = (TREE_CODE (ret_val) == BLOCK
11810 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11811 }
11812 while (lookahead != NULL && lookahead != ret_val);
11813
11814 /* The block's abstract origin chain may not be the *ultimate* origin of
11815 the block. It could lead to a DECL that has an abstract origin set.
11816 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11817 will give us if it has one). Note that DECL's abstract origins are
11818 supposed to be the most distant ancestor (or so decl_ultimate_origin
11819 claims), so we don't need to loop following the DECL origins. */
11820 if (DECL_P (ret_val))
11821 return DECL_ORIGIN (ret_val);
11822
11823 return ret_val;
11824 }
11825 }
11826
11827 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11828 no instruction. */
11829
11830 bool
11831 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11832 {
11833 /* Use precision rather then machine mode when we can, which gives
11834 the correct answer even for submode (bit-field) types. */
11835 if ((INTEGRAL_TYPE_P (outer_type)
11836 || POINTER_TYPE_P (outer_type)
11837 || TREE_CODE (outer_type) == OFFSET_TYPE)
11838 && (INTEGRAL_TYPE_P (inner_type)
11839 || POINTER_TYPE_P (inner_type)
11840 || TREE_CODE (inner_type) == OFFSET_TYPE))
11841 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11842
11843 /* Otherwise fall back on comparing machine modes (e.g. for
11844 aggregate types, floats). */
11845 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11846 }
11847
11848 /* Return true iff conversion in EXP generates no instruction. Mark
11849 it inline so that we fully inline into the stripping functions even
11850 though we have two uses of this function. */
11851
11852 static inline bool
11853 tree_nop_conversion (const_tree exp)
11854 {
11855 tree outer_type, inner_type;
11856
11857 if (!CONVERT_EXPR_P (exp)
11858 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11859 return false;
11860 if (TREE_OPERAND (exp, 0) == error_mark_node)
11861 return false;
11862
11863 outer_type = TREE_TYPE (exp);
11864 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11865
11866 if (!inner_type)
11867 return false;
11868
11869 return tree_nop_conversion_p (outer_type, inner_type);
11870 }
11871
11872 /* Return true iff conversion in EXP generates no instruction. Don't
11873 consider conversions changing the signedness. */
11874
11875 static bool
11876 tree_sign_nop_conversion (const_tree exp)
11877 {
11878 tree outer_type, inner_type;
11879
11880 if (!tree_nop_conversion (exp))
11881 return false;
11882
11883 outer_type = TREE_TYPE (exp);
11884 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11885
11886 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11887 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11888 }
11889
11890 /* Strip conversions from EXP according to tree_nop_conversion and
11891 return the resulting expression. */
11892
11893 tree
11894 tree_strip_nop_conversions (tree exp)
11895 {
11896 while (tree_nop_conversion (exp))
11897 exp = TREE_OPERAND (exp, 0);
11898 return exp;
11899 }
11900
11901 /* Strip conversions from EXP according to tree_sign_nop_conversion
11902 and return the resulting expression. */
11903
11904 tree
11905 tree_strip_sign_nop_conversions (tree exp)
11906 {
11907 while (tree_sign_nop_conversion (exp))
11908 exp = TREE_OPERAND (exp, 0);
11909 return exp;
11910 }
11911
11912 /* Avoid any floating point extensions from EXP. */
11913 tree
11914 strip_float_extensions (tree exp)
11915 {
11916 tree sub, expt, subt;
11917
11918 /* For floating point constant look up the narrowest type that can hold
11919 it properly and handle it like (type)(narrowest_type)constant.
11920 This way we can optimize for instance a=a*2.0 where "a" is float
11921 but 2.0 is double constant. */
11922 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11923 {
11924 REAL_VALUE_TYPE orig;
11925 tree type = NULL;
11926
11927 orig = TREE_REAL_CST (exp);
11928 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11929 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11930 type = float_type_node;
11931 else if (TYPE_PRECISION (TREE_TYPE (exp))
11932 > TYPE_PRECISION (double_type_node)
11933 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11934 type = double_type_node;
11935 if (type)
11936 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11937 }
11938
11939 if (!CONVERT_EXPR_P (exp))
11940 return exp;
11941
11942 sub = TREE_OPERAND (exp, 0);
11943 subt = TREE_TYPE (sub);
11944 expt = TREE_TYPE (exp);
11945
11946 if (!FLOAT_TYPE_P (subt))
11947 return exp;
11948
11949 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11950 return exp;
11951
11952 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11953 return exp;
11954
11955 return strip_float_extensions (sub);
11956 }
11957
11958 /* Strip out all handled components that produce invariant
11959 offsets. */
11960
11961 const_tree
11962 strip_invariant_refs (const_tree op)
11963 {
11964 while (handled_component_p (op))
11965 {
11966 switch (TREE_CODE (op))
11967 {
11968 case ARRAY_REF:
11969 case ARRAY_RANGE_REF:
11970 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11971 || TREE_OPERAND (op, 2) != NULL_TREE
11972 || TREE_OPERAND (op, 3) != NULL_TREE)
11973 return NULL;
11974 break;
11975
11976 case COMPONENT_REF:
11977 if (TREE_OPERAND (op, 2) != NULL_TREE)
11978 return NULL;
11979 break;
11980
11981 default:;
11982 }
11983 op = TREE_OPERAND (op, 0);
11984 }
11985
11986 return op;
11987 }
11988
11989 static GTY(()) tree gcc_eh_personality_decl;
11990
11991 /* Return the GCC personality function decl. */
11992
11993 tree
11994 lhd_gcc_personality (void)
11995 {
11996 if (!gcc_eh_personality_decl)
11997 gcc_eh_personality_decl = build_personality_function ("gcc");
11998 return gcc_eh_personality_decl;
11999 }
12000
12001 /* TARGET is a call target of GIMPLE call statement
12002 (obtained by gimple_call_fn). Return true if it is
12003 OBJ_TYPE_REF representing an virtual call of C++ method.
12004 (As opposed to OBJ_TYPE_REF representing objc calls
12005 through a cast where middle-end devirtualization machinery
12006 can't apply.) */
12007
12008 bool
12009 virtual_method_call_p (const_tree target)
12010 {
12011 if (TREE_CODE (target) != OBJ_TYPE_REF)
12012 return false;
12013 tree t = TREE_TYPE (target);
12014 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12015 t = TREE_TYPE (t);
12016 if (TREE_CODE (t) == FUNCTION_TYPE)
12017 return false;
12018 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12019 /* If we do not have BINFO associated, it means that type was built
12020 without devirtualization enabled. Do not consider this a virtual
12021 call. */
12022 if (!TYPE_BINFO (obj_type_ref_class (target)))
12023 return false;
12024 return true;
12025 }
12026
12027 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12028
12029 tree
12030 obj_type_ref_class (const_tree ref)
12031 {
12032 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12033 ref = TREE_TYPE (ref);
12034 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12035 ref = TREE_TYPE (ref);
12036 /* We look for type THIS points to. ObjC also builds
12037 OBJ_TYPE_REF with non-method calls, Their first parameter
12038 ID however also corresponds to class type. */
12039 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12040 || TREE_CODE (ref) == FUNCTION_TYPE);
12041 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12042 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12043 return TREE_TYPE (ref);
12044 }
12045
12046 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12047
12048 static tree
12049 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12050 {
12051 unsigned int i;
12052 tree base_binfo, b;
12053
12054 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12055 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12056 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12057 return base_binfo;
12058 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12059 return b;
12060 return NULL;
12061 }
12062
12063 /* Try to find a base info of BINFO that would have its field decl at offset
12064 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12065 found, return, otherwise return NULL_TREE. */
12066
12067 tree
12068 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12069 {
12070 tree type = BINFO_TYPE (binfo);
12071
12072 while (true)
12073 {
12074 HOST_WIDE_INT pos, size;
12075 tree fld;
12076 int i;
12077
12078 if (types_same_for_odr (type, expected_type))
12079 return binfo;
12080 if (offset < 0)
12081 return NULL_TREE;
12082
12083 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12084 {
12085 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12086 continue;
12087
12088 pos = int_bit_position (fld);
12089 size = tree_to_uhwi (DECL_SIZE (fld));
12090 if (pos <= offset && (pos + size) > offset)
12091 break;
12092 }
12093 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12094 return NULL_TREE;
12095
12096 /* Offset 0 indicates the primary base, whose vtable contents are
12097 represented in the binfo for the derived class. */
12098 else if (offset != 0)
12099 {
12100 tree found_binfo = NULL, base_binfo;
12101 /* Offsets in BINFO are in bytes relative to the whole structure
12102 while POS is in bits relative to the containing field. */
12103 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12104 / BITS_PER_UNIT);
12105
12106 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12107 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12108 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12109 {
12110 found_binfo = base_binfo;
12111 break;
12112 }
12113 if (found_binfo)
12114 binfo = found_binfo;
12115 else
12116 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12117 binfo_offset);
12118 }
12119
12120 type = TREE_TYPE (fld);
12121 offset -= pos;
12122 }
12123 }
12124
12125 /* Returns true if X is a typedef decl. */
12126
12127 bool
12128 is_typedef_decl (const_tree x)
12129 {
12130 return (x && TREE_CODE (x) == TYPE_DECL
12131 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12132 }
12133
12134 /* Returns true iff TYPE is a type variant created for a typedef. */
12135
12136 bool
12137 typedef_variant_p (const_tree type)
12138 {
12139 return is_typedef_decl (TYPE_NAME (type));
12140 }
12141
12142 /* Warn about a use of an identifier which was marked deprecated. */
12143 void
12144 warn_deprecated_use (tree node, tree attr)
12145 {
12146 const char *msg;
12147
12148 if (node == 0 || !warn_deprecated_decl)
12149 return;
12150
12151 if (!attr)
12152 {
12153 if (DECL_P (node))
12154 attr = DECL_ATTRIBUTES (node);
12155 else if (TYPE_P (node))
12156 {
12157 tree decl = TYPE_STUB_DECL (node);
12158 if (decl)
12159 attr = lookup_attribute ("deprecated",
12160 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12161 }
12162 }
12163
12164 if (attr)
12165 attr = lookup_attribute ("deprecated", attr);
12166
12167 if (attr)
12168 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12169 else
12170 msg = NULL;
12171
12172 bool w;
12173 if (DECL_P (node))
12174 {
12175 if (msg)
12176 w = warning (OPT_Wdeprecated_declarations,
12177 "%qD is deprecated: %s", node, msg);
12178 else
12179 w = warning (OPT_Wdeprecated_declarations,
12180 "%qD is deprecated", node);
12181 if (w)
12182 inform (DECL_SOURCE_LOCATION (node), "declared here");
12183 }
12184 else if (TYPE_P (node))
12185 {
12186 tree what = NULL_TREE;
12187 tree decl = TYPE_STUB_DECL (node);
12188
12189 if (TYPE_NAME (node))
12190 {
12191 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12192 what = TYPE_NAME (node);
12193 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12194 && DECL_NAME (TYPE_NAME (node)))
12195 what = DECL_NAME (TYPE_NAME (node));
12196 }
12197
12198 if (decl)
12199 {
12200 if (what)
12201 {
12202 if (msg)
12203 w = warning (OPT_Wdeprecated_declarations,
12204 "%qE is deprecated: %s", what, msg);
12205 else
12206 w = warning (OPT_Wdeprecated_declarations,
12207 "%qE is deprecated", what);
12208 }
12209 else
12210 {
12211 if (msg)
12212 w = warning (OPT_Wdeprecated_declarations,
12213 "type is deprecated: %s", msg);
12214 else
12215 w = warning (OPT_Wdeprecated_declarations,
12216 "type is deprecated");
12217 }
12218 if (w)
12219 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12220 }
12221 else
12222 {
12223 if (what)
12224 {
12225 if (msg)
12226 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12227 what, msg);
12228 else
12229 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12230 }
12231 else
12232 {
12233 if (msg)
12234 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12235 msg);
12236 else
12237 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12238 }
12239 }
12240 }
12241 }
12242
12243 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12244 somewhere in it. */
12245
12246 bool
12247 contains_bitfld_component_ref_p (const_tree ref)
12248 {
12249 while (handled_component_p (ref))
12250 {
12251 if (TREE_CODE (ref) == COMPONENT_REF
12252 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12253 return true;
12254 ref = TREE_OPERAND (ref, 0);
12255 }
12256
12257 return false;
12258 }
12259
12260 /* Try to determine whether a TRY_CATCH expression can fall through.
12261 This is a subroutine of block_may_fallthru. */
12262
12263 static bool
12264 try_catch_may_fallthru (const_tree stmt)
12265 {
12266 tree_stmt_iterator i;
12267
12268 /* If the TRY block can fall through, the whole TRY_CATCH can
12269 fall through. */
12270 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12271 return true;
12272
12273 i = tsi_start (TREE_OPERAND (stmt, 1));
12274 switch (TREE_CODE (tsi_stmt (i)))
12275 {
12276 case CATCH_EXPR:
12277 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12278 catch expression and a body. The whole TRY_CATCH may fall
12279 through iff any of the catch bodies falls through. */
12280 for (; !tsi_end_p (i); tsi_next (&i))
12281 {
12282 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12283 return true;
12284 }
12285 return false;
12286
12287 case EH_FILTER_EXPR:
12288 /* The exception filter expression only matters if there is an
12289 exception. If the exception does not match EH_FILTER_TYPES,
12290 we will execute EH_FILTER_FAILURE, and we will fall through
12291 if that falls through. If the exception does match
12292 EH_FILTER_TYPES, the stack unwinder will continue up the
12293 stack, so we will not fall through. We don't know whether we
12294 will throw an exception which matches EH_FILTER_TYPES or not,
12295 so we just ignore EH_FILTER_TYPES and assume that we might
12296 throw an exception which doesn't match. */
12297 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12298
12299 default:
12300 /* This case represents statements to be executed when an
12301 exception occurs. Those statements are implicitly followed
12302 by a RESX statement to resume execution after the exception.
12303 So in this case the TRY_CATCH never falls through. */
12304 return false;
12305 }
12306 }
12307
12308 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12309 need not be 100% accurate; simply be conservative and return true if we
12310 don't know. This is used only to avoid stupidly generating extra code.
12311 If we're wrong, we'll just delete the extra code later. */
12312
12313 bool
12314 block_may_fallthru (const_tree block)
12315 {
12316 /* This CONST_CAST is okay because expr_last returns its argument
12317 unmodified and we assign it to a const_tree. */
12318 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12319
12320 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12321 {
12322 case GOTO_EXPR:
12323 case RETURN_EXPR:
12324 /* Easy cases. If the last statement of the block implies
12325 control transfer, then we can't fall through. */
12326 return false;
12327
12328 case SWITCH_EXPR:
12329 /* If SWITCH_LABELS is set, this is lowered, and represents a
12330 branch to a selected label and hence can not fall through.
12331 Otherwise SWITCH_BODY is set, and the switch can fall
12332 through. */
12333 return SWITCH_LABELS (stmt) == NULL_TREE;
12334
12335 case COND_EXPR:
12336 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12337 return true;
12338 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12339
12340 case BIND_EXPR:
12341 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12342
12343 case TRY_CATCH_EXPR:
12344 return try_catch_may_fallthru (stmt);
12345
12346 case TRY_FINALLY_EXPR:
12347 /* The finally clause is always executed after the try clause,
12348 so if it does not fall through, then the try-finally will not
12349 fall through. Otherwise, if the try clause does not fall
12350 through, then when the finally clause falls through it will
12351 resume execution wherever the try clause was going. So the
12352 whole try-finally will only fall through if both the try
12353 clause and the finally clause fall through. */
12354 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12355 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12356
12357 case MODIFY_EXPR:
12358 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12359 stmt = TREE_OPERAND (stmt, 1);
12360 else
12361 return true;
12362 /* FALLTHRU */
12363
12364 case CALL_EXPR:
12365 /* Functions that do not return do not fall through. */
12366 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12367
12368 case CLEANUP_POINT_EXPR:
12369 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12370
12371 case TARGET_EXPR:
12372 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12373
12374 case ERROR_MARK:
12375 return true;
12376
12377 default:
12378 return lang_hooks.block_may_fallthru (stmt);
12379 }
12380 }
12381
12382 /* True if we are using EH to handle cleanups. */
12383 static bool using_eh_for_cleanups_flag = false;
12384
12385 /* This routine is called from front ends to indicate eh should be used for
12386 cleanups. */
12387 void
12388 using_eh_for_cleanups (void)
12389 {
12390 using_eh_for_cleanups_flag = true;
12391 }
12392
12393 /* Query whether EH is used for cleanups. */
12394 bool
12395 using_eh_for_cleanups_p (void)
12396 {
12397 return using_eh_for_cleanups_flag;
12398 }
12399
12400 /* Wrapper for tree_code_name to ensure that tree code is valid */
12401 const char *
12402 get_tree_code_name (enum tree_code code)
12403 {
12404 const char *invalid = "<invalid tree code>";
12405
12406 if (code >= MAX_TREE_CODES)
12407 return invalid;
12408
12409 return tree_code_name[code];
12410 }
12411
12412 /* Drops the TREE_OVERFLOW flag from T. */
12413
12414 tree
12415 drop_tree_overflow (tree t)
12416 {
12417 gcc_checking_assert (TREE_OVERFLOW (t));
12418
12419 /* For tree codes with a sharing machinery re-build the result. */
12420 if (TREE_CODE (t) == INTEGER_CST)
12421 return wide_int_to_tree (TREE_TYPE (t), t);
12422
12423 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12424 and drop the flag. */
12425 t = copy_node (t);
12426 TREE_OVERFLOW (t) = 0;
12427 return t;
12428 }
12429
12430 /* Given a memory reference expression T, return its base address.
12431 The base address of a memory reference expression is the main
12432 object being referenced. For instance, the base address for
12433 'array[i].fld[j]' is 'array'. You can think of this as stripping
12434 away the offset part from a memory address.
12435
12436 This function calls handled_component_p to strip away all the inner
12437 parts of the memory reference until it reaches the base object. */
12438
12439 tree
12440 get_base_address (tree t)
12441 {
12442 while (handled_component_p (t))
12443 t = TREE_OPERAND (t, 0);
12444
12445 if ((TREE_CODE (t) == MEM_REF
12446 || TREE_CODE (t) == TARGET_MEM_REF)
12447 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12448 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12449
12450 /* ??? Either the alias oracle or all callers need to properly deal
12451 with WITH_SIZE_EXPRs before we can look through those. */
12452 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12453 return NULL_TREE;
12454
12455 return t;
12456 }
12457
12458 /* Return a tree of sizetype representing the size, in bytes, of the element
12459 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12460
12461 tree
12462 array_ref_element_size (tree exp)
12463 {
12464 tree aligned_size = TREE_OPERAND (exp, 3);
12465 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12466 location_t loc = EXPR_LOCATION (exp);
12467
12468 /* If a size was specified in the ARRAY_REF, it's the size measured
12469 in alignment units of the element type. So multiply by that value. */
12470 if (aligned_size)
12471 {
12472 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12473 sizetype from another type of the same width and signedness. */
12474 if (TREE_TYPE (aligned_size) != sizetype)
12475 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12476 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12477 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12478 }
12479
12480 /* Otherwise, take the size from that of the element type. Substitute
12481 any PLACEHOLDER_EXPR that we have. */
12482 else
12483 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12484 }
12485
12486 /* Return a tree representing the lower bound of the array mentioned in
12487 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12488
12489 tree
12490 array_ref_low_bound (tree exp)
12491 {
12492 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12493
12494 /* If a lower bound is specified in EXP, use it. */
12495 if (TREE_OPERAND (exp, 2))
12496 return TREE_OPERAND (exp, 2);
12497
12498 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12499 substituting for a PLACEHOLDER_EXPR as needed. */
12500 if (domain_type && TYPE_MIN_VALUE (domain_type))
12501 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12502
12503 /* Otherwise, return a zero of the appropriate type. */
12504 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12505 }
12506
12507 /* Return a tree representing the upper bound of the array mentioned in
12508 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12509
12510 tree
12511 array_ref_up_bound (tree exp)
12512 {
12513 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12514
12515 /* If there is a domain type and it has an upper bound, use it, substituting
12516 for a PLACEHOLDER_EXPR as needed. */
12517 if (domain_type && TYPE_MAX_VALUE (domain_type))
12518 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12519
12520 /* Otherwise fail. */
12521 return NULL_TREE;
12522 }
12523
12524 /* Returns true if REF is an array reference to an array at the end of
12525 a structure. If this is the case, the array may be allocated larger
12526 than its upper bound implies. */
12527
12528 bool
12529 array_at_struct_end_p (tree ref)
12530 {
12531 if (TREE_CODE (ref) != ARRAY_REF
12532 && TREE_CODE (ref) != ARRAY_RANGE_REF)
12533 return false;
12534
12535 while (handled_component_p (ref))
12536 {
12537 /* If the reference chain contains a component reference to a
12538 non-union type and there follows another field the reference
12539 is not at the end of a structure. */
12540 if (TREE_CODE (ref) == COMPONENT_REF
12541 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12542 {
12543 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12544 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12545 nextf = DECL_CHAIN (nextf);
12546 if (nextf)
12547 return false;
12548 }
12549
12550 ref = TREE_OPERAND (ref, 0);
12551 }
12552
12553 /* If the reference is based on a declared entity, the size of the array
12554 is constrained by its given domain. */
12555 if (DECL_P (ref))
12556 return false;
12557
12558 return true;
12559 }
12560
12561 /* Return a tree representing the offset, in bytes, of the field referenced
12562 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12563
12564 tree
12565 component_ref_field_offset (tree exp)
12566 {
12567 tree aligned_offset = TREE_OPERAND (exp, 2);
12568 tree field = TREE_OPERAND (exp, 1);
12569 location_t loc = EXPR_LOCATION (exp);
12570
12571 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12572 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12573 value. */
12574 if (aligned_offset)
12575 {
12576 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12577 sizetype from another type of the same width and signedness. */
12578 if (TREE_TYPE (aligned_offset) != sizetype)
12579 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12580 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12581 size_int (DECL_OFFSET_ALIGN (field)
12582 / BITS_PER_UNIT));
12583 }
12584
12585 /* Otherwise, take the offset from that of the field. Substitute
12586 any PLACEHOLDER_EXPR that we have. */
12587 else
12588 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12589 }
12590
12591 /* Return the machine mode of T. For vectors, returns the mode of the
12592 inner type. The main use case is to feed the result to HONOR_NANS,
12593 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12594
12595 machine_mode
12596 element_mode (const_tree t)
12597 {
12598 if (!TYPE_P (t))
12599 t = TREE_TYPE (t);
12600 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12601 t = TREE_TYPE (t);
12602 return TYPE_MODE (t);
12603 }
12604
12605
12606 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12607 TV. TV should be the more specified variant (i.e. the main variant). */
12608
12609 static bool
12610 verify_type_variant (const_tree t, tree tv)
12611 {
12612 /* Type variant can differ by:
12613
12614 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12615 ENCODE_QUAL_ADDR_SPACE.
12616 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12617 in this case some values may not be set in the variant types
12618 (see TYPE_COMPLETE_P checks).
12619 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12620 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12621 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12622 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12623 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12624 this is necessary to make it possible to merge types form different TUs
12625 - arrays, pointers and references may have TREE_TYPE that is a variant
12626 of TREE_TYPE of their main variants.
12627 - aggregates may have new TYPE_FIELDS list that list variants of
12628 the main variant TYPE_FIELDS.
12629 - vector types may differ by TYPE_VECTOR_OPAQUE
12630 - TYPE_METHODS is always NULL for vairant types and maintained for
12631 main variant only.
12632 */
12633
12634 /* Convenience macro for matching individual fields. */
12635 #define verify_variant_match(flag) \
12636 do { \
12637 if (flag (tv) != flag (t)) \
12638 { \
12639 error ("type variant differs by " #flag "."); \
12640 debug_tree (tv); \
12641 return false; \
12642 } \
12643 } while (false)
12644
12645 /* tree_base checks. */
12646
12647 verify_variant_match (TREE_CODE);
12648 /* FIXME: Ada builds non-artificial variants of artificial types. */
12649 if (TYPE_ARTIFICIAL (tv) && 0)
12650 verify_variant_match (TYPE_ARTIFICIAL);
12651 if (POINTER_TYPE_P (tv))
12652 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12653 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12654 verify_variant_match (TYPE_UNSIGNED);
12655 verify_variant_match (TYPE_ALIGN_OK);
12656 verify_variant_match (TYPE_PACKED);
12657 if (TREE_CODE (t) == REFERENCE_TYPE)
12658 verify_variant_match (TYPE_REF_IS_RVALUE);
12659 verify_variant_match (TYPE_SATURATING);
12660 /* FIXME: This check trigger during libstdc++ build. */
12661 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12662 verify_variant_match (TYPE_FINAL_P);
12663
12664 /* tree_type_common checks. */
12665
12666 if (COMPLETE_TYPE_P (t))
12667 {
12668 verify_variant_match (TYPE_SIZE);
12669 verify_variant_match (TYPE_MODE);
12670 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
12671 /* FIXME: ideally we should compare pointer equality, but java FE
12672 produce variants where size is INTEGER_CST of different type (int
12673 wrt size_type) during libjava biuld. */
12674 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
12675 {
12676 error ("type variant has different TYPE_SIZE_UNIT");
12677 debug_tree (tv);
12678 error ("type variant's TYPE_SIZE_UNIT");
12679 debug_tree (TYPE_SIZE_UNIT (tv));
12680 error ("type's TYPE_SIZE_UNIT");
12681 debug_tree (TYPE_SIZE_UNIT (t));
12682 return false;
12683 }
12684 }
12685 verify_variant_match (TYPE_PRECISION);
12686 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12687 if (RECORD_OR_UNION_TYPE_P (t))
12688 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12689 else if (TREE_CODE (t) == ARRAY_TYPE)
12690 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12691 /* During LTO we merge variant lists from diferent translation units
12692 that may differ BY TYPE_CONTEXT that in turn may point
12693 to TRANSLATION_UNIT_DECL.
12694 Ada also builds variants of types with different TYPE_CONTEXT. */
12695 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12696 verify_variant_match (TYPE_CONTEXT);
12697 verify_variant_match (TYPE_STRING_FLAG);
12698 if (TYPE_ALIAS_SET_KNOWN_P (t) && TYPE_ALIAS_SET_KNOWN_P (tv))
12699 verify_variant_match (TYPE_ALIAS_SET);
12700
12701 /* tree_type_non_common checks. */
12702
12703 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12704 and dangle the pointer from time to time. */
12705 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12706 && (in_lto_p || !TYPE_VFIELD (tv)
12707 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12708 {
12709 error ("type variant has different TYPE_VFIELD");
12710 debug_tree (tv);
12711 return false;
12712 }
12713 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
12714 || TREE_CODE (t) == INTEGER_TYPE
12715 || TREE_CODE (t) == BOOLEAN_TYPE
12716 || TREE_CODE (t) == REAL_TYPE
12717 || TREE_CODE (t) == FIXED_POINT_TYPE)
12718 {
12719 verify_variant_match (TYPE_MAX_VALUE);
12720 verify_variant_match (TYPE_MIN_VALUE);
12721 }
12722 if (TREE_CODE (t) == METHOD_TYPE)
12723 verify_variant_match (TYPE_METHOD_BASETYPE);
12724 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
12725 {
12726 error ("type variant has TYPE_METHODS");
12727 debug_tree (tv);
12728 return false;
12729 }
12730 if (TREE_CODE (t) == OFFSET_TYPE)
12731 verify_variant_match (TYPE_OFFSET_BASETYPE);
12732 if (TREE_CODE (t) == ARRAY_TYPE)
12733 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
12734 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
12735 or even type's main variant. This is needed to make bootstrap pass
12736 and the bug seems new in GCC 5.
12737 C++ FE should be updated to make this consistent and we should check
12738 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
12739 is a match with main variant.
12740
12741 Also disable the check for Java for now because of parser hack that builds
12742 first an dummy BINFO and then sometimes replace it by real BINFO in some
12743 of the copies. */
12744 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
12745 && TYPE_BINFO (t) != TYPE_BINFO (tv)
12746 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
12747 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
12748 at LTO time only. */
12749 && (in_lto_p && odr_type_p (t)))
12750 {
12751 error ("type variant has different TYPE_BINFO");
12752 debug_tree (tv);
12753 error ("type variant's TYPE_BINFO");
12754 debug_tree (TYPE_BINFO (tv));
12755 error ("type's TYPE_BINFO");
12756 debug_tree (TYPE_BINFO (t));
12757 return false;
12758 }
12759
12760 /* Check various uses of TYPE_VALUES_RAW. */
12761 if (TREE_CODE (t) == ENUMERAL_TYPE)
12762 verify_variant_match (TYPE_VALUES);
12763 else if (TREE_CODE (t) == ARRAY_TYPE)
12764 verify_variant_match (TYPE_DOMAIN);
12765 /* Permit incomplete variants of complete type. While FEs may complete
12766 all variants, this does not happen for C++ templates in all cases. */
12767 else if (RECORD_OR_UNION_TYPE_P (t)
12768 && COMPLETE_TYPE_P (t)
12769 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
12770 {
12771 tree f1, f2;
12772
12773 /* Fortran builds qualified variants as new records with items of
12774 qualified type. Verify that they looks same. */
12775 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
12776 f1 && f2;
12777 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
12778 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
12779 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
12780 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
12781 /* FIXME: gfc_nonrestricted_type builds all types as variants
12782 with exception of pointer types. It deeply copies the type
12783 which means that we may end up with a variant type
12784 referring non-variant pointer. We may change it to
12785 produce types as variants, too, like
12786 objc_get_protocol_qualified_type does. */
12787 && !POINTER_TYPE_P (TREE_TYPE (f1)))
12788 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
12789 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
12790 break;
12791 if (f1 || f2)
12792 {
12793 error ("type variant has different TYPE_FIELDS");
12794 debug_tree (tv);
12795 error ("first mismatch is field");
12796 debug_tree (f1);
12797 error ("and field");
12798 debug_tree (f2);
12799 return false;
12800 }
12801 }
12802 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
12803 verify_variant_match (TYPE_ARG_TYPES);
12804 /* For C++ the qualified variant of array type is really an array type
12805 of qualified TREE_TYPE.
12806 objc builds variants of pointer where pointer to type is a variant, too
12807 in objc_get_protocol_qualified_type. */
12808 if (TREE_TYPE (t) != TREE_TYPE (tv)
12809 && ((TREE_CODE (t) != ARRAY_TYPE
12810 && !POINTER_TYPE_P (t))
12811 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
12812 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
12813 {
12814 error ("type variant has different TREE_TYPE");
12815 debug_tree (tv);
12816 error ("type variant's TREE_TYPE");
12817 debug_tree (TREE_TYPE (tv));
12818 error ("type's TREE_TYPE");
12819 debug_tree (TREE_TYPE (t));
12820 return false;
12821 }
12822 return true;
12823 #undef verify_variant_match
12824 }
12825
12826
12827 /* The TYPE_CANONICAL merging machinery. It should closely resemble
12828 the middle-end types_compatible_p function. It needs to avoid
12829 claiming types are different for types that should be treated
12830 the same with respect to TBAA. Canonical types are also used
12831 for IL consistency checks via the useless_type_conversion_p
12832 predicate which does not handle all type kinds itself but falls
12833 back to pointer-comparison of TYPE_CANONICAL for aggregates
12834 for example. */
12835
12836 /* Return true iff T1 and T2 are structurally identical for what
12837 TBAA is concerned.
12838 This function is used both by lto.c canonical type merging and by the
12839 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
12840 that have TYPE_CANONICAL defined and assume them equivalent. */
12841
12842 bool
12843 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
12844 bool trust_type_canonical)
12845 {
12846 /* Before starting to set up the SCC machinery handle simple cases. */
12847
12848 /* Check first for the obvious case of pointer identity. */
12849 if (t1 == t2)
12850 return true;
12851
12852 /* Check that we have two types to compare. */
12853 if (t1 == NULL_TREE || t2 == NULL_TREE)
12854 return false;
12855
12856 /* We consider complete types always compatible with incomplete type.
12857 This does not make sense for canonical type calculation and thus we
12858 need to ensure that we are never called on it.
12859
12860 FIXME: For more correctness the function probably should have three modes
12861 1) mode assuming that types are complete mathcing their structure
12862 2) mode allowing incomplete types but producing equivalence classes
12863 and thus ignoring all info from complete types
12864 3) mode allowing incomplete types to match complete but checking
12865 compatibility between complete types.
12866
12867 1 and 2 can be used for canonical type calculation. 3 is the real
12868 definition of type compatibility that can be used i.e. for warnings during
12869 declaration merging. */
12870
12871 gcc_assert (!trust_type_canonical
12872 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
12873 /* If the types have been previously registered and found equal
12874 they still are. */
12875 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
12876 && trust_type_canonical)
12877 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
12878
12879 /* Can't be the same type if the types don't have the same code. */
12880 if (TREE_CODE (t1) != TREE_CODE (t2))
12881 return false;
12882
12883 /* Qualifiers do not matter for canonical type comparison purposes. */
12884
12885 /* Void types and nullptr types are always the same. */
12886 if (TREE_CODE (t1) == VOID_TYPE
12887 || TREE_CODE (t1) == NULLPTR_TYPE)
12888 return true;
12889
12890 /* Can't be the same type if they have different mode. */
12891 if (TYPE_MODE (t1) != TYPE_MODE (t2))
12892 return false;
12893
12894 /* Non-aggregate types can be handled cheaply. */
12895 if (INTEGRAL_TYPE_P (t1)
12896 || SCALAR_FLOAT_TYPE_P (t1)
12897 || FIXED_POINT_TYPE_P (t1)
12898 || TREE_CODE (t1) == VECTOR_TYPE
12899 || TREE_CODE (t1) == COMPLEX_TYPE
12900 || TREE_CODE (t1) == OFFSET_TYPE
12901 || POINTER_TYPE_P (t1))
12902 {
12903 /* Can't be the same type if they have different sign or precision. */
12904 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
12905 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
12906 return false;
12907
12908 if (TREE_CODE (t1) == INTEGER_TYPE
12909 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
12910 return false;
12911
12912 /* For canonical type comparisons we do not want to build SCCs
12913 so we cannot compare pointed-to types. But we can, for now,
12914 require the same pointed-to type kind and match what
12915 useless_type_conversion_p would do. */
12916 if (POINTER_TYPE_P (t1))
12917 {
12918 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
12919 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
12920 return false;
12921
12922 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
12923 return false;
12924 }
12925
12926 /* Tail-recurse to components. */
12927 if (TREE_CODE (t1) == VECTOR_TYPE
12928 || TREE_CODE (t1) == COMPLEX_TYPE)
12929 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
12930 TREE_TYPE (t2),
12931 trust_type_canonical);
12932
12933 return true;
12934 }
12935
12936 /* Do type-specific comparisons. */
12937 switch (TREE_CODE (t1))
12938 {
12939 case ARRAY_TYPE:
12940 /* Array types are the same if the element types are the same and
12941 the number of elements are the same. */
12942 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
12943 trust_type_canonical)
12944 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
12945 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
12946 return false;
12947 else
12948 {
12949 tree i1 = TYPE_DOMAIN (t1);
12950 tree i2 = TYPE_DOMAIN (t2);
12951
12952 /* For an incomplete external array, the type domain can be
12953 NULL_TREE. Check this condition also. */
12954 if (i1 == NULL_TREE && i2 == NULL_TREE)
12955 return true;
12956 else if (i1 == NULL_TREE || i2 == NULL_TREE)
12957 return false;
12958 else
12959 {
12960 tree min1 = TYPE_MIN_VALUE (i1);
12961 tree min2 = TYPE_MIN_VALUE (i2);
12962 tree max1 = TYPE_MAX_VALUE (i1);
12963 tree max2 = TYPE_MAX_VALUE (i2);
12964
12965 /* The minimum/maximum values have to be the same. */
12966 if ((min1 == min2
12967 || (min1 && min2
12968 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
12969 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
12970 || operand_equal_p (min1, min2, 0))))
12971 && (max1 == max2
12972 || (max1 && max2
12973 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
12974 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
12975 || operand_equal_p (max1, max2, 0)))))
12976 return true;
12977 else
12978 return false;
12979 }
12980 }
12981
12982 case METHOD_TYPE:
12983 case FUNCTION_TYPE:
12984 /* Function types are the same if the return type and arguments types
12985 are the same. */
12986 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
12987 trust_type_canonical))
12988 return false;
12989
12990 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
12991 return true;
12992 else
12993 {
12994 tree parms1, parms2;
12995
12996 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
12997 parms1 && parms2;
12998 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
12999 {
13000 if (!gimple_canonical_types_compatible_p
13001 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13002 trust_type_canonical))
13003 return false;
13004 }
13005
13006 if (parms1 || parms2)
13007 return false;
13008
13009 return true;
13010 }
13011
13012 case RECORD_TYPE:
13013 case UNION_TYPE:
13014 case QUAL_UNION_TYPE:
13015 {
13016 tree f1, f2;
13017
13018 /* For aggregate types, all the fields must be the same. */
13019 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13020 f1 || f2;
13021 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13022 {
13023 /* Skip non-fields. */
13024 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13025 f1 = TREE_CHAIN (f1);
13026 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13027 f2 = TREE_CHAIN (f2);
13028 if (!f1 || !f2)
13029 break;
13030 /* The fields must have the same name, offset and type. */
13031 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13032 || !gimple_compare_field_offset (f1, f2)
13033 || !gimple_canonical_types_compatible_p
13034 (TREE_TYPE (f1), TREE_TYPE (f2),
13035 trust_type_canonical))
13036 return false;
13037 }
13038
13039 /* If one aggregate has more fields than the other, they
13040 are not the same. */
13041 if (f1 || f2)
13042 return false;
13043
13044 return true;
13045 }
13046
13047 default:
13048 /* Consider all types with language specific trees in them mutually
13049 compatible. This is executed only from verify_type and false
13050 positives can be tolerated. */
13051 gcc_assert (!in_lto_p);
13052 return true;
13053 }
13054 }
13055
13056 /* Verify type T. */
13057
13058 void
13059 verify_type (const_tree t)
13060 {
13061 bool error_found = false;
13062 tree mv = TYPE_MAIN_VARIANT (t);
13063 if (!mv)
13064 {
13065 error ("Main variant is not defined");
13066 error_found = true;
13067 }
13068 else if (mv != TYPE_MAIN_VARIANT (mv))
13069 {
13070 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13071 debug_tree (mv);
13072 error_found = true;
13073 }
13074 else if (t != mv && !verify_type_variant (t, mv))
13075 error_found = true;
13076
13077 tree ct = TYPE_CANONICAL (t);
13078 if (!ct)
13079 ;
13080 else if (TYPE_CANONICAL (t) != ct)
13081 {
13082 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13083 debug_tree (ct);
13084 error_found = true;
13085 }
13086 /* Method and function types can not be used to address memory and thus
13087 TYPE_CANONICAL really matters only for determining useless conversions.
13088
13089 FIXME: C++ FE produce declarations of builtin functions that are not
13090 compatible with main variants. */
13091 else if (TREE_CODE (t) == FUNCTION_TYPE)
13092 ;
13093 else if (t != ct
13094 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13095 with variably sized arrays because their sizes possibly
13096 gimplified to different variables. */
13097 && !variably_modified_type_p (ct, NULL)
13098 && !gimple_canonical_types_compatible_p (t, ct, false))
13099 {
13100 error ("TYPE_CANONICAL is not compatible");
13101 debug_tree (ct);
13102 error_found = true;
13103 }
13104
13105
13106 /* Check various uses of TYPE_MINVAL. */
13107 if (RECORD_OR_UNION_TYPE_P (t))
13108 {
13109 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13110 and danagle the pointer from time to time. */
13111 if (TYPE_VFIELD (t)
13112 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13113 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13114 {
13115 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13116 debug_tree (TYPE_VFIELD (t));
13117 error_found = true;
13118 }
13119 }
13120 else if (TREE_CODE (t) == POINTER_TYPE)
13121 {
13122 if (TYPE_NEXT_PTR_TO (t)
13123 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13124 {
13125 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13126 debug_tree (TYPE_NEXT_PTR_TO (t));
13127 error_found = true;
13128 }
13129 }
13130 else if (TREE_CODE (t) == REFERENCE_TYPE)
13131 {
13132 if (TYPE_NEXT_REF_TO (t)
13133 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13134 {
13135 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13136 debug_tree (TYPE_NEXT_REF_TO (t));
13137 error_found = true;
13138 }
13139 }
13140 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13141 || TREE_CODE (t) == FIXED_POINT_TYPE)
13142 {
13143 /* FIXME: The following check should pass:
13144 useless_type_conversion_p (const_cast <tree> (t),
13145 TREE_TYPE (TYPE_MIN_VALUE (t))
13146 but does not for C sizetypes in LTO. */
13147 }
13148 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13149 else if (TYPE_MINVAL (t)
13150 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13151 || in_lto_p))
13152 {
13153 error ("TYPE_MINVAL non-NULL");
13154 debug_tree (TYPE_MINVAL (t));
13155 error_found = true;
13156 }
13157
13158 /* Check various uses of TYPE_MAXVAL. */
13159 if (RECORD_OR_UNION_TYPE_P (t))
13160 {
13161 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13162 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13163 && TYPE_METHODS (t) != error_mark_node)
13164 {
13165 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13166 debug_tree (TYPE_METHODS (t));
13167 error_found = true;
13168 }
13169 }
13170 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13171 {
13172 if (TYPE_METHOD_BASETYPE (t)
13173 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13174 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13175 {
13176 error ("TYPE_METHOD_BASETYPE is not record nor union");
13177 debug_tree (TYPE_METHOD_BASETYPE (t));
13178 error_found = true;
13179 }
13180 }
13181 else if (TREE_CODE (t) == OFFSET_TYPE)
13182 {
13183 if (TYPE_OFFSET_BASETYPE (t)
13184 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13185 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13186 {
13187 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13188 debug_tree (TYPE_OFFSET_BASETYPE (t));
13189 error_found = true;
13190 }
13191 }
13192 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13193 || TREE_CODE (t) == FIXED_POINT_TYPE)
13194 {
13195 /* FIXME: The following check should pass:
13196 useless_type_conversion_p (const_cast <tree> (t),
13197 TREE_TYPE (TYPE_MAX_VALUE (t))
13198 but does not for C sizetypes in LTO. */
13199 }
13200 else if (TREE_CODE (t) == ARRAY_TYPE)
13201 {
13202 if (TYPE_ARRAY_MAX_SIZE (t)
13203 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13204 {
13205 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13206 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13207 error_found = true;
13208 }
13209 }
13210 else if (TYPE_MAXVAL (t))
13211 {
13212 error ("TYPE_MAXVAL non-NULL");
13213 debug_tree (TYPE_MAXVAL (t));
13214 error_found = true;
13215 }
13216
13217 /* Check various uses of TYPE_BINFO. */
13218 if (RECORD_OR_UNION_TYPE_P (t))
13219 {
13220 if (!TYPE_BINFO (t))
13221 ;
13222 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13223 {
13224 error ("TYPE_BINFO is not TREE_BINFO");
13225 debug_tree (TYPE_BINFO (t));
13226 error_found = true;
13227 }
13228 /* FIXME: Java builds invalid empty binfos that do not have
13229 TREE_TYPE set. */
13230 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13231 {
13232 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13233 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13234 error_found = true;
13235 }
13236 }
13237 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13238 {
13239 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13240 debug_tree (TYPE_LANG_SLOT_1 (t));
13241 error_found = true;
13242 }
13243
13244 /* Check various uses of TYPE_VALUES_RAW. */
13245 if (TREE_CODE (t) == ENUMERAL_TYPE)
13246 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13247 {
13248 tree value = TREE_VALUE (l);
13249 tree name = TREE_PURPOSE (l);
13250
13251 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13252 CONST_DECL of ENUMERAL TYPE. */
13253 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13254 {
13255 error ("Enum value is not CONST_DECL or INTEGER_CST");
13256 debug_tree (value);
13257 debug_tree (name);
13258 error_found = true;
13259 }
13260 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13261 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13262 {
13263 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13264 debug_tree (value);
13265 debug_tree (name);
13266 error_found = true;
13267 }
13268 if (TREE_CODE (name) != IDENTIFIER_NODE)
13269 {
13270 error ("Enum value name is not IDENTIFIER_NODE");
13271 debug_tree (value);
13272 debug_tree (name);
13273 error_found = true;
13274 }
13275 }
13276 else if (TREE_CODE (t) == ARRAY_TYPE)
13277 {
13278 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13279 {
13280 error ("Array TYPE_DOMAIN is not integer type");
13281 debug_tree (TYPE_DOMAIN (t));
13282 error_found = true;
13283 }
13284 }
13285 else if (RECORD_OR_UNION_TYPE_P (t))
13286 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13287 {
13288 /* TODO: verify properties of decls. */
13289 if (TREE_CODE (fld) == FIELD_DECL)
13290 ;
13291 else if (TREE_CODE (fld) == TYPE_DECL)
13292 ;
13293 else if (TREE_CODE (fld) == CONST_DECL)
13294 ;
13295 else if (TREE_CODE (fld) == VAR_DECL)
13296 ;
13297 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13298 ;
13299 else if (TREE_CODE (fld) == USING_DECL)
13300 ;
13301 else
13302 {
13303 error ("Wrong tree in TYPE_FIELDS list");
13304 debug_tree (fld);
13305 error_found = true;
13306 }
13307 }
13308 else if (TREE_CODE (t) == INTEGER_TYPE
13309 || TREE_CODE (t) == BOOLEAN_TYPE
13310 || TREE_CODE (t) == OFFSET_TYPE
13311 || TREE_CODE (t) == REFERENCE_TYPE
13312 || TREE_CODE (t) == NULLPTR_TYPE
13313 || TREE_CODE (t) == POINTER_TYPE)
13314 {
13315 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13316 {
13317 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13318 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13319 error_found = true;
13320 }
13321 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13322 {
13323 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13324 debug_tree (TYPE_CACHED_VALUES (t));
13325 error_found = true;
13326 }
13327 /* Verify just enough of cache to ensure that no one copied it to new type.
13328 All copying should go by copy_node that should clear it. */
13329 else if (TYPE_CACHED_VALUES_P (t))
13330 {
13331 int i;
13332 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13333 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13334 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13335 {
13336 error ("wrong TYPE_CACHED_VALUES entry");
13337 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13338 error_found = true;
13339 break;
13340 }
13341 }
13342 }
13343 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13344 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13345 {
13346 /* C++ FE uses TREE_PURPOSE to store initial values. */
13347 if (TREE_PURPOSE (l) && in_lto_p)
13348 {
13349 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13350 debug_tree (l);
13351 error_found = true;
13352 }
13353 if (!TYPE_P (TREE_VALUE (l)))
13354 {
13355 error ("Wrong entry in TYPE_ARG_TYPES list");
13356 debug_tree (l);
13357 error_found = true;
13358 }
13359 }
13360 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13361 {
13362 error ("TYPE_VALUES_RAW field is non-NULL");
13363 debug_tree (TYPE_VALUES_RAW (t));
13364 error_found = true;
13365 }
13366 if (TREE_CODE (t) != INTEGER_TYPE
13367 && TREE_CODE (t) != BOOLEAN_TYPE
13368 && TREE_CODE (t) != OFFSET_TYPE
13369 && TREE_CODE (t) != REFERENCE_TYPE
13370 && TREE_CODE (t) != NULLPTR_TYPE
13371 && TREE_CODE (t) != POINTER_TYPE
13372 && TYPE_CACHED_VALUES_P (t))
13373 {
13374 error ("TYPE_CACHED_VALUES_P is set while it should not");
13375 error_found = true;
13376 }
13377 if (TYPE_STRING_FLAG (t)
13378 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13379 {
13380 error ("TYPE_STRING_FLAG is set on wrong type code");
13381 error_found = true;
13382 }
13383 else if (TYPE_STRING_FLAG (t))
13384 {
13385 const_tree b = t;
13386 if (TREE_CODE (b) == ARRAY_TYPE)
13387 b = TREE_TYPE (t);
13388 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13389 that is 32bits. */
13390 if (TREE_CODE (b) != INTEGER_TYPE)
13391 {
13392 error ("TYPE_STRING_FLAG is set on type that does not look like "
13393 "char nor array of chars");
13394 error_found = true;
13395 }
13396 }
13397
13398 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13399 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13400 of a type. */
13401 if (TREE_CODE (t) == METHOD_TYPE
13402 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13403 {
13404 error ("TYPE_METHOD_BASETYPE is not main variant");
13405 error_found = true;
13406 }
13407
13408 if (error_found)
13409 {
13410 debug_tree (const_cast <tree> (t));
13411 internal_error ("verify_type failed");
13412 }
13413 }
13414
13415 #include "gt-tree.h"