lto.c (hash_canonical_type): Use tree_code_for_canonical_type_merging.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "hash-set.h"
36 #include "machmode.h"
37 #include "vec.h"
38 #include "double-int.h"
39 #include "input.h"
40 #include "alias.h"
41 #include "symtab.h"
42 #include "wide-int.h"
43 #include "inchash.h"
44 #include "tree.h"
45 #include "fold-const.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "attribs.h"
49 #include "varasm.h"
50 #include "tm_p.h"
51 #include "hashtab.h"
52 #include "hard-reg-set.h"
53 #include "function.h"
54 #include "obstack.h"
55 #include "toplev.h" /* get_random_seed */
56 #include "filenames.h"
57 #include "output.h"
58 #include "target.h"
59 #include "common/common-target.h"
60 #include "langhooks.h"
61 #include "tree-inline.h"
62 #include "tree-iterator.h"
63 #include "predict.h"
64 #include "dominance.h"
65 #include "cfg.h"
66 #include "basic-block.h"
67 #include "bitmap.h"
68 #include "tree-ssa-alias.h"
69 #include "internal-fn.h"
70 #include "gimple-expr.h"
71 #include "is-a.h"
72 #include "gimple.h"
73 #include "gimple-iterator.h"
74 #include "gimplify.h"
75 #include "gimple-ssa.h"
76 #include "hash-map.h"
77 #include "plugin-api.h"
78 #include "ipa-ref.h"
79 #include "cgraph.h"
80 #include "tree-phinodes.h"
81 #include "stringpool.h"
82 #include "tree-ssanames.h"
83 #include "rtl.h"
84 #include "statistics.h"
85 #include "real.h"
86 #include "fixed-value.h"
87 #include "insn-config.h"
88 #include "expmed.h"
89 #include "dojump.h"
90 #include "explow.h"
91 #include "emit-rtl.h"
92 #include "stmt.h"
93 #include "expr.h"
94 #include "tree-dfa.h"
95 #include "params.h"
96 #include "tree-pass.h"
97 #include "langhooks-def.h"
98 #include "diagnostic.h"
99 #include "tree-diagnostic.h"
100 #include "tree-pretty-print.h"
101 #include "except.h"
102 #include "debug.h"
103 #include "intl.h"
104 #include "builtins.h"
105 #include "print-tree.h"
106 #include "ipa-utils.h"
107
108 /* Tree code classes. */
109
110 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
111 #define END_OF_BASE_TREE_CODES tcc_exceptional,
112
113 const enum tree_code_class tree_code_type[] = {
114 #include "all-tree.def"
115 };
116
117 #undef DEFTREECODE
118 #undef END_OF_BASE_TREE_CODES
119
120 /* Table indexed by tree code giving number of expression
121 operands beyond the fixed part of the node structure.
122 Not used for types or decls. */
123
124 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
125 #define END_OF_BASE_TREE_CODES 0,
126
127 const unsigned char tree_code_length[] = {
128 #include "all-tree.def"
129 };
130
131 #undef DEFTREECODE
132 #undef END_OF_BASE_TREE_CODES
133
134 /* Names of tree components.
135 Used for printing out the tree and error messages. */
136 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
137 #define END_OF_BASE_TREE_CODES "@dummy",
138
139 static const char *const tree_code_name[] = {
140 #include "all-tree.def"
141 };
142
143 #undef DEFTREECODE
144 #undef END_OF_BASE_TREE_CODES
145
146 /* Each tree code class has an associated string representation.
147 These must correspond to the tree_code_class entries. */
148
149 const char *const tree_code_class_strings[] =
150 {
151 "exceptional",
152 "constant",
153 "type",
154 "declaration",
155 "reference",
156 "comparison",
157 "unary",
158 "binary",
159 "statement",
160 "vl_exp",
161 "expression"
162 };
163
164 /* obstack.[ch] explicitly declined to prototype this. */
165 extern int _obstack_allocated_p (struct obstack *h, void *obj);
166
167 /* Statistics-gathering stuff. */
168
169 static int tree_code_counts[MAX_TREE_CODES];
170 int tree_node_counts[(int) all_kinds];
171 int tree_node_sizes[(int) all_kinds];
172
173 /* Keep in sync with tree.h:enum tree_node_kind. */
174 static const char * const tree_node_kind_names[] = {
175 "decls",
176 "types",
177 "blocks",
178 "stmts",
179 "refs",
180 "exprs",
181 "constants",
182 "identifiers",
183 "vecs",
184 "binfos",
185 "ssa names",
186 "constructors",
187 "random kinds",
188 "lang_decl kinds",
189 "lang_type kinds",
190 "omp clauses",
191 };
192
193 /* Unique id for next decl created. */
194 static GTY(()) int next_decl_uid;
195 /* Unique id for next type created. */
196 static GTY(()) int next_type_uid = 1;
197 /* Unique id for next debug decl created. Use negative numbers,
198 to catch erroneous uses. */
199 static GTY(()) int next_debug_decl_uid;
200
201 /* Since we cannot rehash a type after it is in the table, we have to
202 keep the hash code. */
203
204 struct GTY((for_user)) type_hash {
205 unsigned long hash;
206 tree type;
207 };
208
209 /* Initial size of the hash table (rounded to next prime). */
210 #define TYPE_HASH_INITIAL_SIZE 1000
211
212 struct type_cache_hasher : ggc_cache_hasher<type_hash *>
213 {
214 static hashval_t hash (type_hash *t) { return t->hash; }
215 static bool equal (type_hash *a, type_hash *b);
216
217 static void
218 handle_cache_entry (type_hash *&t)
219 {
220 extern void gt_ggc_mx (type_hash *&);
221 if (t == HTAB_DELETED_ENTRY || t == HTAB_EMPTY_ENTRY)
222 return;
223 else if (ggc_marked_p (t->type))
224 gt_ggc_mx (t);
225 else
226 t = static_cast<type_hash *> (HTAB_DELETED_ENTRY);
227 }
228 };
229
230 /* Now here is the hash table. When recording a type, it is added to
231 the slot whose index is the hash code. Note that the hash table is
232 used for several kinds of types (function types, array types and
233 array index range types, for now). While all these live in the
234 same table, they are completely independent, and the hash code is
235 computed differently for each of these. */
236
237 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
238
239 /* Hash table and temporary node for larger integer const values. */
240 static GTY (()) tree int_cst_node;
241
242 struct int_cst_hasher : ggc_cache_hasher<tree>
243 {
244 static hashval_t hash (tree t);
245 static bool equal (tree x, tree y);
246 };
247
248 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
249
250 /* Hash table for optimization flags and target option flags. Use the same
251 hash table for both sets of options. Nodes for building the current
252 optimization and target option nodes. The assumption is most of the time
253 the options created will already be in the hash table, so we avoid
254 allocating and freeing up a node repeatably. */
255 static GTY (()) tree cl_optimization_node;
256 static GTY (()) tree cl_target_option_node;
257
258 struct cl_option_hasher : ggc_cache_hasher<tree>
259 {
260 static hashval_t hash (tree t);
261 static bool equal (tree x, tree y);
262 };
263
264 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
265
266 /* General tree->tree mapping structure for use in hash tables. */
267
268
269 static GTY ((cache))
270 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
271
272 static GTY ((cache))
273 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
274
275 struct tree_vec_map_cache_hasher : ggc_cache_hasher<tree_vec_map *>
276 {
277 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
278
279 static bool
280 equal (tree_vec_map *a, tree_vec_map *b)
281 {
282 return a->base.from == b->base.from;
283 }
284
285 static void
286 handle_cache_entry (tree_vec_map *&m)
287 {
288 extern void gt_ggc_mx (tree_vec_map *&);
289 if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
290 return;
291 else if (ggc_marked_p (m->base.from))
292 gt_ggc_mx (m);
293 else
294 m = static_cast<tree_vec_map *> (HTAB_DELETED_ENTRY);
295 }
296 };
297
298 static GTY ((cache))
299 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
300
301 static void set_type_quals (tree, int);
302 static void print_type_hash_statistics (void);
303 static void print_debug_expr_statistics (void);
304 static void print_value_expr_statistics (void);
305 static void type_hash_list (const_tree, inchash::hash &);
306 static void attribute_hash_list (const_tree, inchash::hash &);
307
308 tree global_trees[TI_MAX];
309 tree integer_types[itk_none];
310
311 bool int_n_enabled_p[NUM_INT_N_ENTS];
312 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
313
314 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
315
316 /* Number of operands for each OpenMP clause. */
317 unsigned const char omp_clause_num_ops[] =
318 {
319 0, /* OMP_CLAUSE_ERROR */
320 1, /* OMP_CLAUSE_PRIVATE */
321 1, /* OMP_CLAUSE_SHARED */
322 1, /* OMP_CLAUSE_FIRSTPRIVATE */
323 2, /* OMP_CLAUSE_LASTPRIVATE */
324 4, /* OMP_CLAUSE_REDUCTION */
325 1, /* OMP_CLAUSE_COPYIN */
326 1, /* OMP_CLAUSE_COPYPRIVATE */
327 3, /* OMP_CLAUSE_LINEAR */
328 2, /* OMP_CLAUSE_ALIGNED */
329 1, /* OMP_CLAUSE_DEPEND */
330 1, /* OMP_CLAUSE_UNIFORM */
331 2, /* OMP_CLAUSE_FROM */
332 2, /* OMP_CLAUSE_TO */
333 2, /* OMP_CLAUSE_MAP */
334 2, /* OMP_CLAUSE__CACHE_ */
335 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
336 1, /* OMP_CLAUSE_USE_DEVICE */
337 2, /* OMP_CLAUSE_GANG */
338 1, /* OMP_CLAUSE_ASYNC */
339 1, /* OMP_CLAUSE_WAIT */
340 0, /* OMP_CLAUSE_AUTO */
341 0, /* OMP_CLAUSE_SEQ */
342 1, /* OMP_CLAUSE__LOOPTEMP_ */
343 1, /* OMP_CLAUSE_IF */
344 1, /* OMP_CLAUSE_NUM_THREADS */
345 1, /* OMP_CLAUSE_SCHEDULE */
346 0, /* OMP_CLAUSE_NOWAIT */
347 0, /* OMP_CLAUSE_ORDERED */
348 0, /* OMP_CLAUSE_DEFAULT */
349 3, /* OMP_CLAUSE_COLLAPSE */
350 0, /* OMP_CLAUSE_UNTIED */
351 1, /* OMP_CLAUSE_FINAL */
352 0, /* OMP_CLAUSE_MERGEABLE */
353 1, /* OMP_CLAUSE_DEVICE */
354 1, /* OMP_CLAUSE_DIST_SCHEDULE */
355 0, /* OMP_CLAUSE_INBRANCH */
356 0, /* OMP_CLAUSE_NOTINBRANCH */
357 1, /* OMP_CLAUSE_NUM_TEAMS */
358 1, /* OMP_CLAUSE_THREAD_LIMIT */
359 0, /* OMP_CLAUSE_PROC_BIND */
360 1, /* OMP_CLAUSE_SAFELEN */
361 1, /* OMP_CLAUSE_SIMDLEN */
362 0, /* OMP_CLAUSE_FOR */
363 0, /* OMP_CLAUSE_PARALLEL */
364 0, /* OMP_CLAUSE_SECTIONS */
365 0, /* OMP_CLAUSE_TASKGROUP */
366 1, /* OMP_CLAUSE__SIMDUID_ */
367 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
368 0, /* OMP_CLAUSE_INDEPENDENT */
369 1, /* OMP_CLAUSE_WORKER */
370 1, /* OMP_CLAUSE_VECTOR */
371 1, /* OMP_CLAUSE_NUM_GANGS */
372 1, /* OMP_CLAUSE_NUM_WORKERS */
373 1, /* OMP_CLAUSE_VECTOR_LENGTH */
374 };
375
376 const char * const omp_clause_code_name[] =
377 {
378 "error_clause",
379 "private",
380 "shared",
381 "firstprivate",
382 "lastprivate",
383 "reduction",
384 "copyin",
385 "copyprivate",
386 "linear",
387 "aligned",
388 "depend",
389 "uniform",
390 "from",
391 "to",
392 "map",
393 "_cache_",
394 "device_resident",
395 "use_device",
396 "gang",
397 "async",
398 "wait",
399 "auto",
400 "seq",
401 "_looptemp_",
402 "if",
403 "num_threads",
404 "schedule",
405 "nowait",
406 "ordered",
407 "default",
408 "collapse",
409 "untied",
410 "final",
411 "mergeable",
412 "device",
413 "dist_schedule",
414 "inbranch",
415 "notinbranch",
416 "num_teams",
417 "thread_limit",
418 "proc_bind",
419 "safelen",
420 "simdlen",
421 "for",
422 "parallel",
423 "sections",
424 "taskgroup",
425 "_simduid_",
426 "_Cilk_for_count_",
427 "independent",
428 "worker",
429 "vector",
430 "num_gangs",
431 "num_workers",
432 "vector_length"
433 };
434
435
436 /* Return the tree node structure used by tree code CODE. */
437
438 static inline enum tree_node_structure_enum
439 tree_node_structure_for_code (enum tree_code code)
440 {
441 switch (TREE_CODE_CLASS (code))
442 {
443 case tcc_declaration:
444 {
445 switch (code)
446 {
447 case FIELD_DECL:
448 return TS_FIELD_DECL;
449 case PARM_DECL:
450 return TS_PARM_DECL;
451 case VAR_DECL:
452 return TS_VAR_DECL;
453 case LABEL_DECL:
454 return TS_LABEL_DECL;
455 case RESULT_DECL:
456 return TS_RESULT_DECL;
457 case DEBUG_EXPR_DECL:
458 return TS_DECL_WRTL;
459 case CONST_DECL:
460 return TS_CONST_DECL;
461 case TYPE_DECL:
462 return TS_TYPE_DECL;
463 case FUNCTION_DECL:
464 return TS_FUNCTION_DECL;
465 case TRANSLATION_UNIT_DECL:
466 return TS_TRANSLATION_UNIT_DECL;
467 default:
468 return TS_DECL_NON_COMMON;
469 }
470 }
471 case tcc_type:
472 return TS_TYPE_NON_COMMON;
473 case tcc_reference:
474 case tcc_comparison:
475 case tcc_unary:
476 case tcc_binary:
477 case tcc_expression:
478 case tcc_statement:
479 case tcc_vl_exp:
480 return TS_EXP;
481 default: /* tcc_constant and tcc_exceptional */
482 break;
483 }
484 switch (code)
485 {
486 /* tcc_constant cases. */
487 case VOID_CST: return TS_TYPED;
488 case INTEGER_CST: return TS_INT_CST;
489 case REAL_CST: return TS_REAL_CST;
490 case FIXED_CST: return TS_FIXED_CST;
491 case COMPLEX_CST: return TS_COMPLEX;
492 case VECTOR_CST: return TS_VECTOR;
493 case STRING_CST: return TS_STRING;
494 /* tcc_exceptional cases. */
495 case ERROR_MARK: return TS_COMMON;
496 case IDENTIFIER_NODE: return TS_IDENTIFIER;
497 case TREE_LIST: return TS_LIST;
498 case TREE_VEC: return TS_VEC;
499 case SSA_NAME: return TS_SSA_NAME;
500 case PLACEHOLDER_EXPR: return TS_COMMON;
501 case STATEMENT_LIST: return TS_STATEMENT_LIST;
502 case BLOCK: return TS_BLOCK;
503 case CONSTRUCTOR: return TS_CONSTRUCTOR;
504 case TREE_BINFO: return TS_BINFO;
505 case OMP_CLAUSE: return TS_OMP_CLAUSE;
506 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
507 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
508
509 default:
510 gcc_unreachable ();
511 }
512 }
513
514
515 /* Initialize tree_contains_struct to describe the hierarchy of tree
516 nodes. */
517
518 static void
519 initialize_tree_contains_struct (void)
520 {
521 unsigned i;
522
523 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
524 {
525 enum tree_code code;
526 enum tree_node_structure_enum ts_code;
527
528 code = (enum tree_code) i;
529 ts_code = tree_node_structure_for_code (code);
530
531 /* Mark the TS structure itself. */
532 tree_contains_struct[code][ts_code] = 1;
533
534 /* Mark all the structures that TS is derived from. */
535 switch (ts_code)
536 {
537 case TS_TYPED:
538 case TS_BLOCK:
539 MARK_TS_BASE (code);
540 break;
541
542 case TS_COMMON:
543 case TS_INT_CST:
544 case TS_REAL_CST:
545 case TS_FIXED_CST:
546 case TS_VECTOR:
547 case TS_STRING:
548 case TS_COMPLEX:
549 case TS_SSA_NAME:
550 case TS_CONSTRUCTOR:
551 case TS_EXP:
552 case TS_STATEMENT_LIST:
553 MARK_TS_TYPED (code);
554 break;
555
556 case TS_IDENTIFIER:
557 case TS_DECL_MINIMAL:
558 case TS_TYPE_COMMON:
559 case TS_LIST:
560 case TS_VEC:
561 case TS_BINFO:
562 case TS_OMP_CLAUSE:
563 case TS_OPTIMIZATION:
564 case TS_TARGET_OPTION:
565 MARK_TS_COMMON (code);
566 break;
567
568 case TS_TYPE_WITH_LANG_SPECIFIC:
569 MARK_TS_TYPE_COMMON (code);
570 break;
571
572 case TS_TYPE_NON_COMMON:
573 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
574 break;
575
576 case TS_DECL_COMMON:
577 MARK_TS_DECL_MINIMAL (code);
578 break;
579
580 case TS_DECL_WRTL:
581 case TS_CONST_DECL:
582 MARK_TS_DECL_COMMON (code);
583 break;
584
585 case TS_DECL_NON_COMMON:
586 MARK_TS_DECL_WITH_VIS (code);
587 break;
588
589 case TS_DECL_WITH_VIS:
590 case TS_PARM_DECL:
591 case TS_LABEL_DECL:
592 case TS_RESULT_DECL:
593 MARK_TS_DECL_WRTL (code);
594 break;
595
596 case TS_FIELD_DECL:
597 MARK_TS_DECL_COMMON (code);
598 break;
599
600 case TS_VAR_DECL:
601 MARK_TS_DECL_WITH_VIS (code);
602 break;
603
604 case TS_TYPE_DECL:
605 case TS_FUNCTION_DECL:
606 MARK_TS_DECL_NON_COMMON (code);
607 break;
608
609 case TS_TRANSLATION_UNIT_DECL:
610 MARK_TS_DECL_COMMON (code);
611 break;
612
613 default:
614 gcc_unreachable ();
615 }
616 }
617
618 /* Basic consistency checks for attributes used in fold. */
619 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
620 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
621 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
622 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
623 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
624 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
625 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
626 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
627 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
628 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
629 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
630 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
631 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
632 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
633 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
634 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
635 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
636 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
637 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
638 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
639 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
640 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
641 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
642 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
643 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
644 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
645 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
646 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
647 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
648 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
649 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
650 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
651 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
652 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
653 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
654 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
655 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
657 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
659 }
660
661
662 /* Init tree.c. */
663
664 void
665 init_ttree (void)
666 {
667 /* Initialize the hash table of types. */
668 type_hash_table
669 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
670
671 debug_expr_for_decl
672 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
673
674 value_expr_for_decl
675 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
676
677 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
678
679 int_cst_node = make_int_cst (1, 1);
680
681 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
682
683 cl_optimization_node = make_node (OPTIMIZATION_NODE);
684 cl_target_option_node = make_node (TARGET_OPTION_NODE);
685
686 /* Initialize the tree_contains_struct array. */
687 initialize_tree_contains_struct ();
688 lang_hooks.init_ts ();
689 }
690
691 \f
692 /* The name of the object as the assembler will see it (but before any
693 translations made by ASM_OUTPUT_LABELREF). Often this is the same
694 as DECL_NAME. It is an IDENTIFIER_NODE. */
695 tree
696 decl_assembler_name (tree decl)
697 {
698 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
699 lang_hooks.set_decl_assembler_name (decl);
700 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
701 }
702
703 /* When the target supports COMDAT groups, this indicates which group the
704 DECL is associated with. This can be either an IDENTIFIER_NODE or a
705 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
706 tree
707 decl_comdat_group (const_tree node)
708 {
709 struct symtab_node *snode = symtab_node::get (node);
710 if (!snode)
711 return NULL;
712 return snode->get_comdat_group ();
713 }
714
715 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
716 tree
717 decl_comdat_group_id (const_tree node)
718 {
719 struct symtab_node *snode = symtab_node::get (node);
720 if (!snode)
721 return NULL;
722 return snode->get_comdat_group_id ();
723 }
724
725 /* When the target supports named section, return its name as IDENTIFIER_NODE
726 or NULL if it is in no section. */
727 const char *
728 decl_section_name (const_tree node)
729 {
730 struct symtab_node *snode = symtab_node::get (node);
731 if (!snode)
732 return NULL;
733 return snode->get_section ();
734 }
735
736 /* Set section section name of NODE to VALUE (that is expected to
737 be identifier node) */
738 void
739 set_decl_section_name (tree node, const char *value)
740 {
741 struct symtab_node *snode;
742
743 if (value == NULL)
744 {
745 snode = symtab_node::get (node);
746 if (!snode)
747 return;
748 }
749 else if (TREE_CODE (node) == VAR_DECL)
750 snode = varpool_node::get_create (node);
751 else
752 snode = cgraph_node::get_create (node);
753 snode->set_section (value);
754 }
755
756 /* Return TLS model of a variable NODE. */
757 enum tls_model
758 decl_tls_model (const_tree node)
759 {
760 struct varpool_node *snode = varpool_node::get (node);
761 if (!snode)
762 return TLS_MODEL_NONE;
763 return snode->tls_model;
764 }
765
766 /* Set TLS model of variable NODE to MODEL. */
767 void
768 set_decl_tls_model (tree node, enum tls_model model)
769 {
770 struct varpool_node *vnode;
771
772 if (model == TLS_MODEL_NONE)
773 {
774 vnode = varpool_node::get (node);
775 if (!vnode)
776 return;
777 }
778 else
779 vnode = varpool_node::get_create (node);
780 vnode->tls_model = model;
781 }
782
783 /* Compute the number of bytes occupied by a tree with code CODE.
784 This function cannot be used for nodes that have variable sizes,
785 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
786 size_t
787 tree_code_size (enum tree_code code)
788 {
789 switch (TREE_CODE_CLASS (code))
790 {
791 case tcc_declaration: /* A decl node */
792 {
793 switch (code)
794 {
795 case FIELD_DECL:
796 return sizeof (struct tree_field_decl);
797 case PARM_DECL:
798 return sizeof (struct tree_parm_decl);
799 case VAR_DECL:
800 return sizeof (struct tree_var_decl);
801 case LABEL_DECL:
802 return sizeof (struct tree_label_decl);
803 case RESULT_DECL:
804 return sizeof (struct tree_result_decl);
805 case CONST_DECL:
806 return sizeof (struct tree_const_decl);
807 case TYPE_DECL:
808 return sizeof (struct tree_type_decl);
809 case FUNCTION_DECL:
810 return sizeof (struct tree_function_decl);
811 case DEBUG_EXPR_DECL:
812 return sizeof (struct tree_decl_with_rtl);
813 case TRANSLATION_UNIT_DECL:
814 return sizeof (struct tree_translation_unit_decl);
815 case NAMESPACE_DECL:
816 case IMPORTED_DECL:
817 case NAMELIST_DECL:
818 return sizeof (struct tree_decl_non_common);
819 default:
820 return lang_hooks.tree_size (code);
821 }
822 }
823
824 case tcc_type: /* a type node */
825 return sizeof (struct tree_type_non_common);
826
827 case tcc_reference: /* a reference */
828 case tcc_expression: /* an expression */
829 case tcc_statement: /* an expression with side effects */
830 case tcc_comparison: /* a comparison expression */
831 case tcc_unary: /* a unary arithmetic expression */
832 case tcc_binary: /* a binary arithmetic expression */
833 return (sizeof (struct tree_exp)
834 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
835
836 case tcc_constant: /* a constant */
837 switch (code)
838 {
839 case VOID_CST: return sizeof (struct tree_typed);
840 case INTEGER_CST: gcc_unreachable ();
841 case REAL_CST: return sizeof (struct tree_real_cst);
842 case FIXED_CST: return sizeof (struct tree_fixed_cst);
843 case COMPLEX_CST: return sizeof (struct tree_complex);
844 case VECTOR_CST: return sizeof (struct tree_vector);
845 case STRING_CST: gcc_unreachable ();
846 default:
847 return lang_hooks.tree_size (code);
848 }
849
850 case tcc_exceptional: /* something random, like an identifier. */
851 switch (code)
852 {
853 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
854 case TREE_LIST: return sizeof (struct tree_list);
855
856 case ERROR_MARK:
857 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
858
859 case TREE_VEC:
860 case OMP_CLAUSE: gcc_unreachable ();
861
862 case SSA_NAME: return sizeof (struct tree_ssa_name);
863
864 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
865 case BLOCK: return sizeof (struct tree_block);
866 case CONSTRUCTOR: return sizeof (struct tree_constructor);
867 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
868 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
869
870 default:
871 return lang_hooks.tree_size (code);
872 }
873
874 default:
875 gcc_unreachable ();
876 }
877 }
878
879 /* Compute the number of bytes occupied by NODE. This routine only
880 looks at TREE_CODE, except for those nodes that have variable sizes. */
881 size_t
882 tree_size (const_tree node)
883 {
884 const enum tree_code code = TREE_CODE (node);
885 switch (code)
886 {
887 case INTEGER_CST:
888 return (sizeof (struct tree_int_cst)
889 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
890
891 case TREE_BINFO:
892 return (offsetof (struct tree_binfo, base_binfos)
893 + vec<tree, va_gc>
894 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
895
896 case TREE_VEC:
897 return (sizeof (struct tree_vec)
898 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
899
900 case VECTOR_CST:
901 return (sizeof (struct tree_vector)
902 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
903
904 case STRING_CST:
905 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
906
907 case OMP_CLAUSE:
908 return (sizeof (struct tree_omp_clause)
909 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
910 * sizeof (tree));
911
912 default:
913 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
914 return (sizeof (struct tree_exp)
915 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
916 else
917 return tree_code_size (code);
918 }
919 }
920
921 /* Record interesting allocation statistics for a tree node with CODE
922 and LENGTH. */
923
924 static void
925 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
926 size_t length ATTRIBUTE_UNUSED)
927 {
928 enum tree_code_class type = TREE_CODE_CLASS (code);
929 tree_node_kind kind;
930
931 if (!GATHER_STATISTICS)
932 return;
933
934 switch (type)
935 {
936 case tcc_declaration: /* A decl node */
937 kind = d_kind;
938 break;
939
940 case tcc_type: /* a type node */
941 kind = t_kind;
942 break;
943
944 case tcc_statement: /* an expression with side effects */
945 kind = s_kind;
946 break;
947
948 case tcc_reference: /* a reference */
949 kind = r_kind;
950 break;
951
952 case tcc_expression: /* an expression */
953 case tcc_comparison: /* a comparison expression */
954 case tcc_unary: /* a unary arithmetic expression */
955 case tcc_binary: /* a binary arithmetic expression */
956 kind = e_kind;
957 break;
958
959 case tcc_constant: /* a constant */
960 kind = c_kind;
961 break;
962
963 case tcc_exceptional: /* something random, like an identifier. */
964 switch (code)
965 {
966 case IDENTIFIER_NODE:
967 kind = id_kind;
968 break;
969
970 case TREE_VEC:
971 kind = vec_kind;
972 break;
973
974 case TREE_BINFO:
975 kind = binfo_kind;
976 break;
977
978 case SSA_NAME:
979 kind = ssa_name_kind;
980 break;
981
982 case BLOCK:
983 kind = b_kind;
984 break;
985
986 case CONSTRUCTOR:
987 kind = constr_kind;
988 break;
989
990 case OMP_CLAUSE:
991 kind = omp_clause_kind;
992 break;
993
994 default:
995 kind = x_kind;
996 break;
997 }
998 break;
999
1000 case tcc_vl_exp:
1001 kind = e_kind;
1002 break;
1003
1004 default:
1005 gcc_unreachable ();
1006 }
1007
1008 tree_code_counts[(int) code]++;
1009 tree_node_counts[(int) kind]++;
1010 tree_node_sizes[(int) kind] += length;
1011 }
1012
1013 /* Allocate and return a new UID from the DECL_UID namespace. */
1014
1015 int
1016 allocate_decl_uid (void)
1017 {
1018 return next_decl_uid++;
1019 }
1020
1021 /* Return a newly allocated node of code CODE. For decl and type
1022 nodes, some other fields are initialized. The rest of the node is
1023 initialized to zero. This function cannot be used for TREE_VEC,
1024 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1025 tree_code_size.
1026
1027 Achoo! I got a code in the node. */
1028
1029 tree
1030 make_node_stat (enum tree_code code MEM_STAT_DECL)
1031 {
1032 tree t;
1033 enum tree_code_class type = TREE_CODE_CLASS (code);
1034 size_t length = tree_code_size (code);
1035
1036 record_node_allocation_statistics (code, length);
1037
1038 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1039 TREE_SET_CODE (t, code);
1040
1041 switch (type)
1042 {
1043 case tcc_statement:
1044 TREE_SIDE_EFFECTS (t) = 1;
1045 break;
1046
1047 case tcc_declaration:
1048 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1049 {
1050 if (code == FUNCTION_DECL)
1051 {
1052 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1053 DECL_MODE (t) = FUNCTION_MODE;
1054 }
1055 else
1056 DECL_ALIGN (t) = 1;
1057 }
1058 DECL_SOURCE_LOCATION (t) = input_location;
1059 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1060 DECL_UID (t) = --next_debug_decl_uid;
1061 else
1062 {
1063 DECL_UID (t) = allocate_decl_uid ();
1064 SET_DECL_PT_UID (t, -1);
1065 }
1066 if (TREE_CODE (t) == LABEL_DECL)
1067 LABEL_DECL_UID (t) = -1;
1068
1069 break;
1070
1071 case tcc_type:
1072 TYPE_UID (t) = next_type_uid++;
1073 TYPE_ALIGN (t) = BITS_PER_UNIT;
1074 TYPE_USER_ALIGN (t) = 0;
1075 TYPE_MAIN_VARIANT (t) = t;
1076 TYPE_CANONICAL (t) = t;
1077
1078 /* Default to no attributes for type, but let target change that. */
1079 TYPE_ATTRIBUTES (t) = NULL_TREE;
1080 targetm.set_default_type_attributes (t);
1081
1082 /* We have not yet computed the alias set for this type. */
1083 TYPE_ALIAS_SET (t) = -1;
1084 break;
1085
1086 case tcc_constant:
1087 TREE_CONSTANT (t) = 1;
1088 break;
1089
1090 case tcc_expression:
1091 switch (code)
1092 {
1093 case INIT_EXPR:
1094 case MODIFY_EXPR:
1095 case VA_ARG_EXPR:
1096 case PREDECREMENT_EXPR:
1097 case PREINCREMENT_EXPR:
1098 case POSTDECREMENT_EXPR:
1099 case POSTINCREMENT_EXPR:
1100 /* All of these have side-effects, no matter what their
1101 operands are. */
1102 TREE_SIDE_EFFECTS (t) = 1;
1103 break;
1104
1105 default:
1106 break;
1107 }
1108 break;
1109
1110 default:
1111 /* Other classes need no special treatment. */
1112 break;
1113 }
1114
1115 return t;
1116 }
1117 \f
1118 /* Return a new node with the same contents as NODE except that its
1119 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1120
1121 tree
1122 copy_node_stat (tree node MEM_STAT_DECL)
1123 {
1124 tree t;
1125 enum tree_code code = TREE_CODE (node);
1126 size_t length;
1127
1128 gcc_assert (code != STATEMENT_LIST);
1129
1130 length = tree_size (node);
1131 record_node_allocation_statistics (code, length);
1132 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1133 memcpy (t, node, length);
1134
1135 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1136 TREE_CHAIN (t) = 0;
1137 TREE_ASM_WRITTEN (t) = 0;
1138 TREE_VISITED (t) = 0;
1139
1140 if (TREE_CODE_CLASS (code) == tcc_declaration)
1141 {
1142 if (code == DEBUG_EXPR_DECL)
1143 DECL_UID (t) = --next_debug_decl_uid;
1144 else
1145 {
1146 DECL_UID (t) = allocate_decl_uid ();
1147 if (DECL_PT_UID_SET_P (node))
1148 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1149 }
1150 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1151 && DECL_HAS_VALUE_EXPR_P (node))
1152 {
1153 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1154 DECL_HAS_VALUE_EXPR_P (t) = 1;
1155 }
1156 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1157 if (TREE_CODE (node) == VAR_DECL)
1158 {
1159 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1160 t->decl_with_vis.symtab_node = NULL;
1161 }
1162 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1163 {
1164 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1165 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1166 }
1167 if (TREE_CODE (node) == FUNCTION_DECL)
1168 {
1169 DECL_STRUCT_FUNCTION (t) = NULL;
1170 t->decl_with_vis.symtab_node = NULL;
1171 }
1172 }
1173 else if (TREE_CODE_CLASS (code) == tcc_type)
1174 {
1175 TYPE_UID (t) = next_type_uid++;
1176 /* The following is so that the debug code for
1177 the copy is different from the original type.
1178 The two statements usually duplicate each other
1179 (because they clear fields of the same union),
1180 but the optimizer should catch that. */
1181 TYPE_SYMTAB_POINTER (t) = 0;
1182 TYPE_SYMTAB_ADDRESS (t) = 0;
1183
1184 /* Do not copy the values cache. */
1185 if (TYPE_CACHED_VALUES_P (t))
1186 {
1187 TYPE_CACHED_VALUES_P (t) = 0;
1188 TYPE_CACHED_VALUES (t) = NULL_TREE;
1189 }
1190 }
1191
1192 return t;
1193 }
1194
1195 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1196 For example, this can copy a list made of TREE_LIST nodes. */
1197
1198 tree
1199 copy_list (tree list)
1200 {
1201 tree head;
1202 tree prev, next;
1203
1204 if (list == 0)
1205 return 0;
1206
1207 head = prev = copy_node (list);
1208 next = TREE_CHAIN (list);
1209 while (next)
1210 {
1211 TREE_CHAIN (prev) = copy_node (next);
1212 prev = TREE_CHAIN (prev);
1213 next = TREE_CHAIN (next);
1214 }
1215 return head;
1216 }
1217
1218 \f
1219 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1220 INTEGER_CST with value CST and type TYPE. */
1221
1222 static unsigned int
1223 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1224 {
1225 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1226 /* We need an extra zero HWI if CST is an unsigned integer with its
1227 upper bit set, and if CST occupies a whole number of HWIs. */
1228 if (TYPE_UNSIGNED (type)
1229 && wi::neg_p (cst)
1230 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1231 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1232 return cst.get_len ();
1233 }
1234
1235 /* Return a new INTEGER_CST with value CST and type TYPE. */
1236
1237 static tree
1238 build_new_int_cst (tree type, const wide_int &cst)
1239 {
1240 unsigned int len = cst.get_len ();
1241 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1242 tree nt = make_int_cst (len, ext_len);
1243
1244 if (len < ext_len)
1245 {
1246 --ext_len;
1247 TREE_INT_CST_ELT (nt, ext_len) = 0;
1248 for (unsigned int i = len; i < ext_len; ++i)
1249 TREE_INT_CST_ELT (nt, i) = -1;
1250 }
1251 else if (TYPE_UNSIGNED (type)
1252 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1253 {
1254 len--;
1255 TREE_INT_CST_ELT (nt, len)
1256 = zext_hwi (cst.elt (len),
1257 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1258 }
1259
1260 for (unsigned int i = 0; i < len; i++)
1261 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1262 TREE_TYPE (nt) = type;
1263 return nt;
1264 }
1265
1266 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1267
1268 tree
1269 build_int_cst (tree type, HOST_WIDE_INT low)
1270 {
1271 /* Support legacy code. */
1272 if (!type)
1273 type = integer_type_node;
1274
1275 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1276 }
1277
1278 tree
1279 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1280 {
1281 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1282 }
1283
1284 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1285
1286 tree
1287 build_int_cst_type (tree type, HOST_WIDE_INT low)
1288 {
1289 gcc_assert (type);
1290 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1291 }
1292
1293 /* Constructs tree in type TYPE from with value given by CST. Signedness
1294 of CST is assumed to be the same as the signedness of TYPE. */
1295
1296 tree
1297 double_int_to_tree (tree type, double_int cst)
1298 {
1299 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1300 }
1301
1302 /* We force the wide_int CST to the range of the type TYPE by sign or
1303 zero extending it. OVERFLOWABLE indicates if we are interested in
1304 overflow of the value, when >0 we are only interested in signed
1305 overflow, for <0 we are interested in any overflow. OVERFLOWED
1306 indicates whether overflow has already occurred. CONST_OVERFLOWED
1307 indicates whether constant overflow has already occurred. We force
1308 T's value to be within range of T's type (by setting to 0 or 1 all
1309 the bits outside the type's range). We set TREE_OVERFLOWED if,
1310 OVERFLOWED is nonzero,
1311 or OVERFLOWABLE is >0 and signed overflow occurs
1312 or OVERFLOWABLE is <0 and any overflow occurs
1313 We return a new tree node for the extended wide_int. The node
1314 is shared if no overflow flags are set. */
1315
1316
1317 tree
1318 force_fit_type (tree type, const wide_int_ref &cst,
1319 int overflowable, bool overflowed)
1320 {
1321 signop sign = TYPE_SIGN (type);
1322
1323 /* If we need to set overflow flags, return a new unshared node. */
1324 if (overflowed || !wi::fits_to_tree_p (cst, type))
1325 {
1326 if (overflowed
1327 || overflowable < 0
1328 || (overflowable > 0 && sign == SIGNED))
1329 {
1330 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1331 tree t = build_new_int_cst (type, tmp);
1332 TREE_OVERFLOW (t) = 1;
1333 return t;
1334 }
1335 }
1336
1337 /* Else build a shared node. */
1338 return wide_int_to_tree (type, cst);
1339 }
1340
1341 /* These are the hash table functions for the hash table of INTEGER_CST
1342 nodes of a sizetype. */
1343
1344 /* Return the hash code code X, an INTEGER_CST. */
1345
1346 hashval_t
1347 int_cst_hasher::hash (tree x)
1348 {
1349 const_tree const t = x;
1350 hashval_t code = TYPE_UID (TREE_TYPE (t));
1351 int i;
1352
1353 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1354 code ^= TREE_INT_CST_ELT (t, i);
1355
1356 return code;
1357 }
1358
1359 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1360 is the same as that given by *Y, which is the same. */
1361
1362 bool
1363 int_cst_hasher::equal (tree x, tree y)
1364 {
1365 const_tree const xt = x;
1366 const_tree const yt = y;
1367
1368 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1369 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1370 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1371 return false;
1372
1373 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1374 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1375 return false;
1376
1377 return true;
1378 }
1379
1380 /* Create an INT_CST node of TYPE and value CST.
1381 The returned node is always shared. For small integers we use a
1382 per-type vector cache, for larger ones we use a single hash table.
1383 The value is extended from its precision according to the sign of
1384 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1385 the upper bits and ensures that hashing and value equality based
1386 upon the underlying HOST_WIDE_INTs works without masking. */
1387
1388 tree
1389 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1390 {
1391 tree t;
1392 int ix = -1;
1393 int limit = 0;
1394
1395 gcc_assert (type);
1396 unsigned int prec = TYPE_PRECISION (type);
1397 signop sgn = TYPE_SIGN (type);
1398
1399 /* Verify that everything is canonical. */
1400 int l = pcst.get_len ();
1401 if (l > 1)
1402 {
1403 if (pcst.elt (l - 1) == 0)
1404 gcc_checking_assert (pcst.elt (l - 2) < 0);
1405 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1406 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1407 }
1408
1409 wide_int cst = wide_int::from (pcst, prec, sgn);
1410 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1411
1412 if (ext_len == 1)
1413 {
1414 /* We just need to store a single HOST_WIDE_INT. */
1415 HOST_WIDE_INT hwi;
1416 if (TYPE_UNSIGNED (type))
1417 hwi = cst.to_uhwi ();
1418 else
1419 hwi = cst.to_shwi ();
1420
1421 switch (TREE_CODE (type))
1422 {
1423 case NULLPTR_TYPE:
1424 gcc_assert (hwi == 0);
1425 /* Fallthru. */
1426
1427 case POINTER_TYPE:
1428 case REFERENCE_TYPE:
1429 case POINTER_BOUNDS_TYPE:
1430 /* Cache NULL pointer and zero bounds. */
1431 if (hwi == 0)
1432 {
1433 limit = 1;
1434 ix = 0;
1435 }
1436 break;
1437
1438 case BOOLEAN_TYPE:
1439 /* Cache false or true. */
1440 limit = 2;
1441 if (hwi < 2)
1442 ix = hwi;
1443 break;
1444
1445 case INTEGER_TYPE:
1446 case OFFSET_TYPE:
1447 if (TYPE_SIGN (type) == UNSIGNED)
1448 {
1449 /* Cache [0, N). */
1450 limit = INTEGER_SHARE_LIMIT;
1451 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1452 ix = hwi;
1453 }
1454 else
1455 {
1456 /* Cache [-1, N). */
1457 limit = INTEGER_SHARE_LIMIT + 1;
1458 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1459 ix = hwi + 1;
1460 }
1461 break;
1462
1463 case ENUMERAL_TYPE:
1464 break;
1465
1466 default:
1467 gcc_unreachable ();
1468 }
1469
1470 if (ix >= 0)
1471 {
1472 /* Look for it in the type's vector of small shared ints. */
1473 if (!TYPE_CACHED_VALUES_P (type))
1474 {
1475 TYPE_CACHED_VALUES_P (type) = 1;
1476 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1477 }
1478
1479 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1480 if (t)
1481 /* Make sure no one is clobbering the shared constant. */
1482 gcc_checking_assert (TREE_TYPE (t) == type
1483 && TREE_INT_CST_NUNITS (t) == 1
1484 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1485 && TREE_INT_CST_EXT_NUNITS (t) == 1
1486 && TREE_INT_CST_ELT (t, 0) == hwi);
1487 else
1488 {
1489 /* Create a new shared int. */
1490 t = build_new_int_cst (type, cst);
1491 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1492 }
1493 }
1494 else
1495 {
1496 /* Use the cache of larger shared ints, using int_cst_node as
1497 a temporary. */
1498
1499 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1500 TREE_TYPE (int_cst_node) = type;
1501
1502 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1503 t = *slot;
1504 if (!t)
1505 {
1506 /* Insert this one into the hash table. */
1507 t = int_cst_node;
1508 *slot = t;
1509 /* Make a new node for next time round. */
1510 int_cst_node = make_int_cst (1, 1);
1511 }
1512 }
1513 }
1514 else
1515 {
1516 /* The value either hashes properly or we drop it on the floor
1517 for the gc to take care of. There will not be enough of them
1518 to worry about. */
1519
1520 tree nt = build_new_int_cst (type, cst);
1521 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1522 t = *slot;
1523 if (!t)
1524 {
1525 /* Insert this one into the hash table. */
1526 t = nt;
1527 *slot = t;
1528 }
1529 }
1530
1531 return t;
1532 }
1533
1534 void
1535 cache_integer_cst (tree t)
1536 {
1537 tree type = TREE_TYPE (t);
1538 int ix = -1;
1539 int limit = 0;
1540 int prec = TYPE_PRECISION (type);
1541
1542 gcc_assert (!TREE_OVERFLOW (t));
1543
1544 switch (TREE_CODE (type))
1545 {
1546 case NULLPTR_TYPE:
1547 gcc_assert (integer_zerop (t));
1548 /* Fallthru. */
1549
1550 case POINTER_TYPE:
1551 case REFERENCE_TYPE:
1552 /* Cache NULL pointer. */
1553 if (integer_zerop (t))
1554 {
1555 limit = 1;
1556 ix = 0;
1557 }
1558 break;
1559
1560 case BOOLEAN_TYPE:
1561 /* Cache false or true. */
1562 limit = 2;
1563 if (wi::ltu_p (t, 2))
1564 ix = TREE_INT_CST_ELT (t, 0);
1565 break;
1566
1567 case INTEGER_TYPE:
1568 case OFFSET_TYPE:
1569 if (TYPE_UNSIGNED (type))
1570 {
1571 /* Cache 0..N */
1572 limit = INTEGER_SHARE_LIMIT;
1573
1574 /* This is a little hokie, but if the prec is smaller than
1575 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1576 obvious test will not get the correct answer. */
1577 if (prec < HOST_BITS_PER_WIDE_INT)
1578 {
1579 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1580 ix = tree_to_uhwi (t);
1581 }
1582 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1583 ix = tree_to_uhwi (t);
1584 }
1585 else
1586 {
1587 /* Cache -1..N */
1588 limit = INTEGER_SHARE_LIMIT + 1;
1589
1590 if (integer_minus_onep (t))
1591 ix = 0;
1592 else if (!wi::neg_p (t))
1593 {
1594 if (prec < HOST_BITS_PER_WIDE_INT)
1595 {
1596 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1597 ix = tree_to_shwi (t) + 1;
1598 }
1599 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1600 ix = tree_to_shwi (t) + 1;
1601 }
1602 }
1603 break;
1604
1605 case ENUMERAL_TYPE:
1606 break;
1607
1608 default:
1609 gcc_unreachable ();
1610 }
1611
1612 if (ix >= 0)
1613 {
1614 /* Look for it in the type's vector of small shared ints. */
1615 if (!TYPE_CACHED_VALUES_P (type))
1616 {
1617 TYPE_CACHED_VALUES_P (type) = 1;
1618 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1619 }
1620
1621 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1622 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1623 }
1624 else
1625 {
1626 /* Use the cache of larger shared ints. */
1627 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1628 /* If there is already an entry for the number verify it's the
1629 same. */
1630 if (*slot)
1631 gcc_assert (wi::eq_p (tree (*slot), t));
1632 else
1633 /* Otherwise insert this one into the hash table. */
1634 *slot = t;
1635 }
1636 }
1637
1638
1639 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1640 and the rest are zeros. */
1641
1642 tree
1643 build_low_bits_mask (tree type, unsigned bits)
1644 {
1645 gcc_assert (bits <= TYPE_PRECISION (type));
1646
1647 return wide_int_to_tree (type, wi::mask (bits, false,
1648 TYPE_PRECISION (type)));
1649 }
1650
1651 /* Checks that X is integer constant that can be expressed in (unsigned)
1652 HOST_WIDE_INT without loss of precision. */
1653
1654 bool
1655 cst_and_fits_in_hwi (const_tree x)
1656 {
1657 if (TREE_CODE (x) != INTEGER_CST)
1658 return false;
1659
1660 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1661 return false;
1662
1663 return TREE_INT_CST_NUNITS (x) == 1;
1664 }
1665
1666 /* Build a newly constructed TREE_VEC node of length LEN. */
1667
1668 tree
1669 make_vector_stat (unsigned len MEM_STAT_DECL)
1670 {
1671 tree t;
1672 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1673
1674 record_node_allocation_statistics (VECTOR_CST, length);
1675
1676 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1677
1678 TREE_SET_CODE (t, VECTOR_CST);
1679 TREE_CONSTANT (t) = 1;
1680
1681 return t;
1682 }
1683
1684 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1685 are in a list pointed to by VALS. */
1686
1687 tree
1688 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1689 {
1690 int over = 0;
1691 unsigned cnt = 0;
1692 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1693 TREE_TYPE (v) = type;
1694
1695 /* Iterate through elements and check for overflow. */
1696 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1697 {
1698 tree value = vals[cnt];
1699
1700 VECTOR_CST_ELT (v, cnt) = value;
1701
1702 /* Don't crash if we get an address constant. */
1703 if (!CONSTANT_CLASS_P (value))
1704 continue;
1705
1706 over |= TREE_OVERFLOW (value);
1707 }
1708
1709 TREE_OVERFLOW (v) = over;
1710 return v;
1711 }
1712
1713 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1714 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1715
1716 tree
1717 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1718 {
1719 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1720 unsigned HOST_WIDE_INT idx;
1721 tree value;
1722
1723 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1724 vec[idx] = value;
1725 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1726 vec[idx] = build_zero_cst (TREE_TYPE (type));
1727
1728 return build_vector (type, vec);
1729 }
1730
1731 /* Build a vector of type VECTYPE where all the elements are SCs. */
1732 tree
1733 build_vector_from_val (tree vectype, tree sc)
1734 {
1735 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1736
1737 if (sc == error_mark_node)
1738 return sc;
1739
1740 /* Verify that the vector type is suitable for SC. Note that there
1741 is some inconsistency in the type-system with respect to restrict
1742 qualifications of pointers. Vector types always have a main-variant
1743 element type and the qualification is applied to the vector-type.
1744 So TREE_TYPE (vector-type) does not return a properly qualified
1745 vector element-type. */
1746 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1747 TREE_TYPE (vectype)));
1748
1749 if (CONSTANT_CLASS_P (sc))
1750 {
1751 tree *v = XALLOCAVEC (tree, nunits);
1752 for (i = 0; i < nunits; ++i)
1753 v[i] = sc;
1754 return build_vector (vectype, v);
1755 }
1756 else
1757 {
1758 vec<constructor_elt, va_gc> *v;
1759 vec_alloc (v, nunits);
1760 for (i = 0; i < nunits; ++i)
1761 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1762 return build_constructor (vectype, v);
1763 }
1764 }
1765
1766 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1767 are in the vec pointed to by VALS. */
1768 tree
1769 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1770 {
1771 tree c = make_node (CONSTRUCTOR);
1772 unsigned int i;
1773 constructor_elt *elt;
1774 bool constant_p = true;
1775 bool side_effects_p = false;
1776
1777 TREE_TYPE (c) = type;
1778 CONSTRUCTOR_ELTS (c) = vals;
1779
1780 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1781 {
1782 /* Mostly ctors will have elts that don't have side-effects, so
1783 the usual case is to scan all the elements. Hence a single
1784 loop for both const and side effects, rather than one loop
1785 each (with early outs). */
1786 if (!TREE_CONSTANT (elt->value))
1787 constant_p = false;
1788 if (TREE_SIDE_EFFECTS (elt->value))
1789 side_effects_p = true;
1790 }
1791
1792 TREE_SIDE_EFFECTS (c) = side_effects_p;
1793 TREE_CONSTANT (c) = constant_p;
1794
1795 return c;
1796 }
1797
1798 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1799 INDEX and VALUE. */
1800 tree
1801 build_constructor_single (tree type, tree index, tree value)
1802 {
1803 vec<constructor_elt, va_gc> *v;
1804 constructor_elt elt = {index, value};
1805
1806 vec_alloc (v, 1);
1807 v->quick_push (elt);
1808
1809 return build_constructor (type, v);
1810 }
1811
1812
1813 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1814 are in a list pointed to by VALS. */
1815 tree
1816 build_constructor_from_list (tree type, tree vals)
1817 {
1818 tree t;
1819 vec<constructor_elt, va_gc> *v = NULL;
1820
1821 if (vals)
1822 {
1823 vec_alloc (v, list_length (vals));
1824 for (t = vals; t; t = TREE_CHAIN (t))
1825 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1826 }
1827
1828 return build_constructor (type, v);
1829 }
1830
1831 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1832 of elements, provided as index/value pairs. */
1833
1834 tree
1835 build_constructor_va (tree type, int nelts, ...)
1836 {
1837 vec<constructor_elt, va_gc> *v = NULL;
1838 va_list p;
1839
1840 va_start (p, nelts);
1841 vec_alloc (v, nelts);
1842 while (nelts--)
1843 {
1844 tree index = va_arg (p, tree);
1845 tree value = va_arg (p, tree);
1846 CONSTRUCTOR_APPEND_ELT (v, index, value);
1847 }
1848 va_end (p);
1849 return build_constructor (type, v);
1850 }
1851
1852 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1853
1854 tree
1855 build_fixed (tree type, FIXED_VALUE_TYPE f)
1856 {
1857 tree v;
1858 FIXED_VALUE_TYPE *fp;
1859
1860 v = make_node (FIXED_CST);
1861 fp = ggc_alloc<fixed_value> ();
1862 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1863
1864 TREE_TYPE (v) = type;
1865 TREE_FIXED_CST_PTR (v) = fp;
1866 return v;
1867 }
1868
1869 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1870
1871 tree
1872 build_real (tree type, REAL_VALUE_TYPE d)
1873 {
1874 tree v;
1875 REAL_VALUE_TYPE *dp;
1876 int overflow = 0;
1877
1878 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1879 Consider doing it via real_convert now. */
1880
1881 v = make_node (REAL_CST);
1882 dp = ggc_alloc<real_value> ();
1883 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1884
1885 TREE_TYPE (v) = type;
1886 TREE_REAL_CST_PTR (v) = dp;
1887 TREE_OVERFLOW (v) = overflow;
1888 return v;
1889 }
1890
1891 /* Return a new REAL_CST node whose type is TYPE
1892 and whose value is the integer value of the INTEGER_CST node I. */
1893
1894 REAL_VALUE_TYPE
1895 real_value_from_int_cst (const_tree type, const_tree i)
1896 {
1897 REAL_VALUE_TYPE d;
1898
1899 /* Clear all bits of the real value type so that we can later do
1900 bitwise comparisons to see if two values are the same. */
1901 memset (&d, 0, sizeof d);
1902
1903 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1904 TYPE_SIGN (TREE_TYPE (i)));
1905 return d;
1906 }
1907
1908 /* Given a tree representing an integer constant I, return a tree
1909 representing the same value as a floating-point constant of type TYPE. */
1910
1911 tree
1912 build_real_from_int_cst (tree type, const_tree i)
1913 {
1914 tree v;
1915 int overflow = TREE_OVERFLOW (i);
1916
1917 v = build_real (type, real_value_from_int_cst (type, i));
1918
1919 TREE_OVERFLOW (v) |= overflow;
1920 return v;
1921 }
1922
1923 /* Return a newly constructed STRING_CST node whose value is
1924 the LEN characters at STR.
1925 Note that for a C string literal, LEN should include the trailing NUL.
1926 The TREE_TYPE is not initialized. */
1927
1928 tree
1929 build_string (int len, const char *str)
1930 {
1931 tree s;
1932 size_t length;
1933
1934 /* Do not waste bytes provided by padding of struct tree_string. */
1935 length = len + offsetof (struct tree_string, str) + 1;
1936
1937 record_node_allocation_statistics (STRING_CST, length);
1938
1939 s = (tree) ggc_internal_alloc (length);
1940
1941 memset (s, 0, sizeof (struct tree_typed));
1942 TREE_SET_CODE (s, STRING_CST);
1943 TREE_CONSTANT (s) = 1;
1944 TREE_STRING_LENGTH (s) = len;
1945 memcpy (s->string.str, str, len);
1946 s->string.str[len] = '\0';
1947
1948 return s;
1949 }
1950
1951 /* Return a newly constructed COMPLEX_CST node whose value is
1952 specified by the real and imaginary parts REAL and IMAG.
1953 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1954 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1955
1956 tree
1957 build_complex (tree type, tree real, tree imag)
1958 {
1959 tree t = make_node (COMPLEX_CST);
1960
1961 TREE_REALPART (t) = real;
1962 TREE_IMAGPART (t) = imag;
1963 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1964 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1965 return t;
1966 }
1967
1968 /* Return a constant of arithmetic type TYPE which is the
1969 multiplicative identity of the set TYPE. */
1970
1971 tree
1972 build_one_cst (tree type)
1973 {
1974 switch (TREE_CODE (type))
1975 {
1976 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1977 case POINTER_TYPE: case REFERENCE_TYPE:
1978 case OFFSET_TYPE:
1979 return build_int_cst (type, 1);
1980
1981 case REAL_TYPE:
1982 return build_real (type, dconst1);
1983
1984 case FIXED_POINT_TYPE:
1985 /* We can only generate 1 for accum types. */
1986 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1987 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1988
1989 case VECTOR_TYPE:
1990 {
1991 tree scalar = build_one_cst (TREE_TYPE (type));
1992
1993 return build_vector_from_val (type, scalar);
1994 }
1995
1996 case COMPLEX_TYPE:
1997 return build_complex (type,
1998 build_one_cst (TREE_TYPE (type)),
1999 build_zero_cst (TREE_TYPE (type)));
2000
2001 default:
2002 gcc_unreachable ();
2003 }
2004 }
2005
2006 /* Return an integer of type TYPE containing all 1's in as much precision as
2007 it contains, or a complex or vector whose subparts are such integers. */
2008
2009 tree
2010 build_all_ones_cst (tree type)
2011 {
2012 if (TREE_CODE (type) == COMPLEX_TYPE)
2013 {
2014 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2015 return build_complex (type, scalar, scalar);
2016 }
2017 else
2018 return build_minus_one_cst (type);
2019 }
2020
2021 /* Return a constant of arithmetic type TYPE which is the
2022 opposite of the multiplicative identity of the set TYPE. */
2023
2024 tree
2025 build_minus_one_cst (tree type)
2026 {
2027 switch (TREE_CODE (type))
2028 {
2029 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2030 case POINTER_TYPE: case REFERENCE_TYPE:
2031 case OFFSET_TYPE:
2032 return build_int_cst (type, -1);
2033
2034 case REAL_TYPE:
2035 return build_real (type, dconstm1);
2036
2037 case FIXED_POINT_TYPE:
2038 /* We can only generate 1 for accum types. */
2039 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2040 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2041 TYPE_MODE (type)));
2042
2043 case VECTOR_TYPE:
2044 {
2045 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2046
2047 return build_vector_from_val (type, scalar);
2048 }
2049
2050 case COMPLEX_TYPE:
2051 return build_complex (type,
2052 build_minus_one_cst (TREE_TYPE (type)),
2053 build_zero_cst (TREE_TYPE (type)));
2054
2055 default:
2056 gcc_unreachable ();
2057 }
2058 }
2059
2060 /* Build 0 constant of type TYPE. This is used by constructor folding
2061 and thus the constant should be represented in memory by
2062 zero(es). */
2063
2064 tree
2065 build_zero_cst (tree type)
2066 {
2067 switch (TREE_CODE (type))
2068 {
2069 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2070 case POINTER_TYPE: case REFERENCE_TYPE:
2071 case OFFSET_TYPE: case NULLPTR_TYPE:
2072 return build_int_cst (type, 0);
2073
2074 case REAL_TYPE:
2075 return build_real (type, dconst0);
2076
2077 case FIXED_POINT_TYPE:
2078 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2079
2080 case VECTOR_TYPE:
2081 {
2082 tree scalar = build_zero_cst (TREE_TYPE (type));
2083
2084 return build_vector_from_val (type, scalar);
2085 }
2086
2087 case COMPLEX_TYPE:
2088 {
2089 tree zero = build_zero_cst (TREE_TYPE (type));
2090
2091 return build_complex (type, zero, zero);
2092 }
2093
2094 default:
2095 if (!AGGREGATE_TYPE_P (type))
2096 return fold_convert (type, integer_zero_node);
2097 return build_constructor (type, NULL);
2098 }
2099 }
2100
2101
2102 /* Build a BINFO with LEN language slots. */
2103
2104 tree
2105 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2106 {
2107 tree t;
2108 size_t length = (offsetof (struct tree_binfo, base_binfos)
2109 + vec<tree, va_gc>::embedded_size (base_binfos));
2110
2111 record_node_allocation_statistics (TREE_BINFO, length);
2112
2113 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2114
2115 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2116
2117 TREE_SET_CODE (t, TREE_BINFO);
2118
2119 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2120
2121 return t;
2122 }
2123
2124 /* Create a CASE_LABEL_EXPR tree node and return it. */
2125
2126 tree
2127 build_case_label (tree low_value, tree high_value, tree label_decl)
2128 {
2129 tree t = make_node (CASE_LABEL_EXPR);
2130
2131 TREE_TYPE (t) = void_type_node;
2132 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2133
2134 CASE_LOW (t) = low_value;
2135 CASE_HIGH (t) = high_value;
2136 CASE_LABEL (t) = label_decl;
2137 CASE_CHAIN (t) = NULL_TREE;
2138
2139 return t;
2140 }
2141
2142 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2143 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2144 The latter determines the length of the HOST_WIDE_INT vector. */
2145
2146 tree
2147 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2148 {
2149 tree t;
2150 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2151 + sizeof (struct tree_int_cst));
2152
2153 gcc_assert (len);
2154 record_node_allocation_statistics (INTEGER_CST, length);
2155
2156 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2157
2158 TREE_SET_CODE (t, INTEGER_CST);
2159 TREE_INT_CST_NUNITS (t) = len;
2160 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2161 /* to_offset can only be applied to trees that are offset_int-sized
2162 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2163 must be exactly the precision of offset_int and so LEN is correct. */
2164 if (ext_len <= OFFSET_INT_ELTS)
2165 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2166 else
2167 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2168
2169 TREE_CONSTANT (t) = 1;
2170
2171 return t;
2172 }
2173
2174 /* Build a newly constructed TREE_VEC node of length LEN. */
2175
2176 tree
2177 make_tree_vec_stat (int len MEM_STAT_DECL)
2178 {
2179 tree t;
2180 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2181
2182 record_node_allocation_statistics (TREE_VEC, length);
2183
2184 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2185
2186 TREE_SET_CODE (t, TREE_VEC);
2187 TREE_VEC_LENGTH (t) = len;
2188
2189 return t;
2190 }
2191
2192 /* Grow a TREE_VEC node to new length LEN. */
2193
2194 tree
2195 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2196 {
2197 gcc_assert (TREE_CODE (v) == TREE_VEC);
2198
2199 int oldlen = TREE_VEC_LENGTH (v);
2200 gcc_assert (len > oldlen);
2201
2202 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2203 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2204
2205 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2206
2207 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2208
2209 TREE_VEC_LENGTH (v) = len;
2210
2211 return v;
2212 }
2213 \f
2214 /* Return 1 if EXPR is the integer constant zero or a complex constant
2215 of zero. */
2216
2217 int
2218 integer_zerop (const_tree expr)
2219 {
2220 STRIP_NOPS (expr);
2221
2222 switch (TREE_CODE (expr))
2223 {
2224 case INTEGER_CST:
2225 return wi::eq_p (expr, 0);
2226 case COMPLEX_CST:
2227 return (integer_zerop (TREE_REALPART (expr))
2228 && integer_zerop (TREE_IMAGPART (expr)));
2229 case VECTOR_CST:
2230 {
2231 unsigned i;
2232 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2233 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2234 return false;
2235 return true;
2236 }
2237 default:
2238 return false;
2239 }
2240 }
2241
2242 /* Return 1 if EXPR is the integer constant one or the corresponding
2243 complex constant. */
2244
2245 int
2246 integer_onep (const_tree expr)
2247 {
2248 STRIP_NOPS (expr);
2249
2250 switch (TREE_CODE (expr))
2251 {
2252 case INTEGER_CST:
2253 return wi::eq_p (wi::to_widest (expr), 1);
2254 case COMPLEX_CST:
2255 return (integer_onep (TREE_REALPART (expr))
2256 && integer_zerop (TREE_IMAGPART (expr)));
2257 case VECTOR_CST:
2258 {
2259 unsigned i;
2260 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2261 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2262 return false;
2263 return true;
2264 }
2265 default:
2266 return false;
2267 }
2268 }
2269
2270 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2271 return 1 if every piece is the integer constant one. */
2272
2273 int
2274 integer_each_onep (const_tree expr)
2275 {
2276 STRIP_NOPS (expr);
2277
2278 if (TREE_CODE (expr) == COMPLEX_CST)
2279 return (integer_onep (TREE_REALPART (expr))
2280 && integer_onep (TREE_IMAGPART (expr)));
2281 else
2282 return integer_onep (expr);
2283 }
2284
2285 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2286 it contains, or a complex or vector whose subparts are such integers. */
2287
2288 int
2289 integer_all_onesp (const_tree expr)
2290 {
2291 STRIP_NOPS (expr);
2292
2293 if (TREE_CODE (expr) == COMPLEX_CST
2294 && integer_all_onesp (TREE_REALPART (expr))
2295 && integer_all_onesp (TREE_IMAGPART (expr)))
2296 return 1;
2297
2298 else if (TREE_CODE (expr) == VECTOR_CST)
2299 {
2300 unsigned i;
2301 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2302 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2303 return 0;
2304 return 1;
2305 }
2306
2307 else if (TREE_CODE (expr) != INTEGER_CST)
2308 return 0;
2309
2310 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2311 }
2312
2313 /* Return 1 if EXPR is the integer constant minus one. */
2314
2315 int
2316 integer_minus_onep (const_tree expr)
2317 {
2318 STRIP_NOPS (expr);
2319
2320 if (TREE_CODE (expr) == COMPLEX_CST)
2321 return (integer_all_onesp (TREE_REALPART (expr))
2322 && integer_zerop (TREE_IMAGPART (expr)));
2323 else
2324 return integer_all_onesp (expr);
2325 }
2326
2327 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2328 one bit on). */
2329
2330 int
2331 integer_pow2p (const_tree expr)
2332 {
2333 STRIP_NOPS (expr);
2334
2335 if (TREE_CODE (expr) == COMPLEX_CST
2336 && integer_pow2p (TREE_REALPART (expr))
2337 && integer_zerop (TREE_IMAGPART (expr)))
2338 return 1;
2339
2340 if (TREE_CODE (expr) != INTEGER_CST)
2341 return 0;
2342
2343 return wi::popcount (expr) == 1;
2344 }
2345
2346 /* Return 1 if EXPR is an integer constant other than zero or a
2347 complex constant other than zero. */
2348
2349 int
2350 integer_nonzerop (const_tree expr)
2351 {
2352 STRIP_NOPS (expr);
2353
2354 return ((TREE_CODE (expr) == INTEGER_CST
2355 && !wi::eq_p (expr, 0))
2356 || (TREE_CODE (expr) == COMPLEX_CST
2357 && (integer_nonzerop (TREE_REALPART (expr))
2358 || integer_nonzerop (TREE_IMAGPART (expr)))));
2359 }
2360
2361 /* Return 1 if EXPR is the integer constant one. For vector,
2362 return 1 if every piece is the integer constant minus one
2363 (representing the value TRUE). */
2364
2365 int
2366 integer_truep (const_tree expr)
2367 {
2368 STRIP_NOPS (expr);
2369
2370 if (TREE_CODE (expr) == VECTOR_CST)
2371 return integer_all_onesp (expr);
2372 return integer_onep (expr);
2373 }
2374
2375 /* Return 1 if EXPR is the fixed-point constant zero. */
2376
2377 int
2378 fixed_zerop (const_tree expr)
2379 {
2380 return (TREE_CODE (expr) == FIXED_CST
2381 && TREE_FIXED_CST (expr).data.is_zero ());
2382 }
2383
2384 /* Return the power of two represented by a tree node known to be a
2385 power of two. */
2386
2387 int
2388 tree_log2 (const_tree expr)
2389 {
2390 STRIP_NOPS (expr);
2391
2392 if (TREE_CODE (expr) == COMPLEX_CST)
2393 return tree_log2 (TREE_REALPART (expr));
2394
2395 return wi::exact_log2 (expr);
2396 }
2397
2398 /* Similar, but return the largest integer Y such that 2 ** Y is less
2399 than or equal to EXPR. */
2400
2401 int
2402 tree_floor_log2 (const_tree expr)
2403 {
2404 STRIP_NOPS (expr);
2405
2406 if (TREE_CODE (expr) == COMPLEX_CST)
2407 return tree_log2 (TREE_REALPART (expr));
2408
2409 return wi::floor_log2 (expr);
2410 }
2411
2412 /* Return number of known trailing zero bits in EXPR, or, if the value of
2413 EXPR is known to be zero, the precision of it's type. */
2414
2415 unsigned int
2416 tree_ctz (const_tree expr)
2417 {
2418 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2419 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2420 return 0;
2421
2422 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2423 switch (TREE_CODE (expr))
2424 {
2425 case INTEGER_CST:
2426 ret1 = wi::ctz (expr);
2427 return MIN (ret1, prec);
2428 case SSA_NAME:
2429 ret1 = wi::ctz (get_nonzero_bits (expr));
2430 return MIN (ret1, prec);
2431 case PLUS_EXPR:
2432 case MINUS_EXPR:
2433 case BIT_IOR_EXPR:
2434 case BIT_XOR_EXPR:
2435 case MIN_EXPR:
2436 case MAX_EXPR:
2437 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2438 if (ret1 == 0)
2439 return ret1;
2440 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2441 return MIN (ret1, ret2);
2442 case POINTER_PLUS_EXPR:
2443 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2444 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2445 /* Second operand is sizetype, which could be in theory
2446 wider than pointer's precision. Make sure we never
2447 return more than prec. */
2448 ret2 = MIN (ret2, prec);
2449 return MIN (ret1, ret2);
2450 case BIT_AND_EXPR:
2451 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2452 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2453 return MAX (ret1, ret2);
2454 case MULT_EXPR:
2455 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2456 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2457 return MIN (ret1 + ret2, prec);
2458 case LSHIFT_EXPR:
2459 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2460 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2461 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2462 {
2463 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2464 return MIN (ret1 + ret2, prec);
2465 }
2466 return ret1;
2467 case RSHIFT_EXPR:
2468 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2469 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2470 {
2471 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2472 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2473 if (ret1 > ret2)
2474 return ret1 - ret2;
2475 }
2476 return 0;
2477 case TRUNC_DIV_EXPR:
2478 case CEIL_DIV_EXPR:
2479 case FLOOR_DIV_EXPR:
2480 case ROUND_DIV_EXPR:
2481 case EXACT_DIV_EXPR:
2482 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2483 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2484 {
2485 int l = tree_log2 (TREE_OPERAND (expr, 1));
2486 if (l >= 0)
2487 {
2488 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2489 ret2 = l;
2490 if (ret1 > ret2)
2491 return ret1 - ret2;
2492 }
2493 }
2494 return 0;
2495 CASE_CONVERT:
2496 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2497 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2498 ret1 = prec;
2499 return MIN (ret1, prec);
2500 case SAVE_EXPR:
2501 return tree_ctz (TREE_OPERAND (expr, 0));
2502 case COND_EXPR:
2503 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2504 if (ret1 == 0)
2505 return 0;
2506 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2507 return MIN (ret1, ret2);
2508 case COMPOUND_EXPR:
2509 return tree_ctz (TREE_OPERAND (expr, 1));
2510 case ADDR_EXPR:
2511 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2512 if (ret1 > BITS_PER_UNIT)
2513 {
2514 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2515 return MIN (ret1, prec);
2516 }
2517 return 0;
2518 default:
2519 return 0;
2520 }
2521 }
2522
2523 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2524 decimal float constants, so don't return 1 for them. */
2525
2526 int
2527 real_zerop (const_tree expr)
2528 {
2529 STRIP_NOPS (expr);
2530
2531 switch (TREE_CODE (expr))
2532 {
2533 case REAL_CST:
2534 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2535 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2536 case COMPLEX_CST:
2537 return real_zerop (TREE_REALPART (expr))
2538 && real_zerop (TREE_IMAGPART (expr));
2539 case VECTOR_CST:
2540 {
2541 unsigned i;
2542 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2543 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2544 return false;
2545 return true;
2546 }
2547 default:
2548 return false;
2549 }
2550 }
2551
2552 /* Return 1 if EXPR is the real constant one in real or complex form.
2553 Trailing zeroes matter for decimal float constants, so don't return
2554 1 for them. */
2555
2556 int
2557 real_onep (const_tree expr)
2558 {
2559 STRIP_NOPS (expr);
2560
2561 switch (TREE_CODE (expr))
2562 {
2563 case REAL_CST:
2564 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2565 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2566 case COMPLEX_CST:
2567 return real_onep (TREE_REALPART (expr))
2568 && real_zerop (TREE_IMAGPART (expr));
2569 case VECTOR_CST:
2570 {
2571 unsigned i;
2572 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2573 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2574 return false;
2575 return true;
2576 }
2577 default:
2578 return false;
2579 }
2580 }
2581
2582 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2583 matter for decimal float constants, so don't return 1 for them. */
2584
2585 int
2586 real_minus_onep (const_tree expr)
2587 {
2588 STRIP_NOPS (expr);
2589
2590 switch (TREE_CODE (expr))
2591 {
2592 case REAL_CST:
2593 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2594 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2595 case COMPLEX_CST:
2596 return real_minus_onep (TREE_REALPART (expr))
2597 && real_zerop (TREE_IMAGPART (expr));
2598 case VECTOR_CST:
2599 {
2600 unsigned i;
2601 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2602 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2603 return false;
2604 return true;
2605 }
2606 default:
2607 return false;
2608 }
2609 }
2610
2611 /* Nonzero if EXP is a constant or a cast of a constant. */
2612
2613 int
2614 really_constant_p (const_tree exp)
2615 {
2616 /* This is not quite the same as STRIP_NOPS. It does more. */
2617 while (CONVERT_EXPR_P (exp)
2618 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2619 exp = TREE_OPERAND (exp, 0);
2620 return TREE_CONSTANT (exp);
2621 }
2622 \f
2623 /* Return first list element whose TREE_VALUE is ELEM.
2624 Return 0 if ELEM is not in LIST. */
2625
2626 tree
2627 value_member (tree elem, tree list)
2628 {
2629 while (list)
2630 {
2631 if (elem == TREE_VALUE (list))
2632 return list;
2633 list = TREE_CHAIN (list);
2634 }
2635 return NULL_TREE;
2636 }
2637
2638 /* Return first list element whose TREE_PURPOSE is ELEM.
2639 Return 0 if ELEM is not in LIST. */
2640
2641 tree
2642 purpose_member (const_tree elem, tree list)
2643 {
2644 while (list)
2645 {
2646 if (elem == TREE_PURPOSE (list))
2647 return list;
2648 list = TREE_CHAIN (list);
2649 }
2650 return NULL_TREE;
2651 }
2652
2653 /* Return true if ELEM is in V. */
2654
2655 bool
2656 vec_member (const_tree elem, vec<tree, va_gc> *v)
2657 {
2658 unsigned ix;
2659 tree t;
2660 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2661 if (elem == t)
2662 return true;
2663 return false;
2664 }
2665
2666 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2667 NULL_TREE. */
2668
2669 tree
2670 chain_index (int idx, tree chain)
2671 {
2672 for (; chain && idx > 0; --idx)
2673 chain = TREE_CHAIN (chain);
2674 return chain;
2675 }
2676
2677 /* Return nonzero if ELEM is part of the chain CHAIN. */
2678
2679 int
2680 chain_member (const_tree elem, const_tree chain)
2681 {
2682 while (chain)
2683 {
2684 if (elem == chain)
2685 return 1;
2686 chain = DECL_CHAIN (chain);
2687 }
2688
2689 return 0;
2690 }
2691
2692 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2693 We expect a null pointer to mark the end of the chain.
2694 This is the Lisp primitive `length'. */
2695
2696 int
2697 list_length (const_tree t)
2698 {
2699 const_tree p = t;
2700 #ifdef ENABLE_TREE_CHECKING
2701 const_tree q = t;
2702 #endif
2703 int len = 0;
2704
2705 while (p)
2706 {
2707 p = TREE_CHAIN (p);
2708 #ifdef ENABLE_TREE_CHECKING
2709 if (len % 2)
2710 q = TREE_CHAIN (q);
2711 gcc_assert (p != q);
2712 #endif
2713 len++;
2714 }
2715
2716 return len;
2717 }
2718
2719 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2720 UNION_TYPE TYPE, or NULL_TREE if none. */
2721
2722 tree
2723 first_field (const_tree type)
2724 {
2725 tree t = TYPE_FIELDS (type);
2726 while (t && TREE_CODE (t) != FIELD_DECL)
2727 t = TREE_CHAIN (t);
2728 return t;
2729 }
2730
2731 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2732 by modifying the last node in chain 1 to point to chain 2.
2733 This is the Lisp primitive `nconc'. */
2734
2735 tree
2736 chainon (tree op1, tree op2)
2737 {
2738 tree t1;
2739
2740 if (!op1)
2741 return op2;
2742 if (!op2)
2743 return op1;
2744
2745 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2746 continue;
2747 TREE_CHAIN (t1) = op2;
2748
2749 #ifdef ENABLE_TREE_CHECKING
2750 {
2751 tree t2;
2752 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2753 gcc_assert (t2 != t1);
2754 }
2755 #endif
2756
2757 return op1;
2758 }
2759
2760 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2761
2762 tree
2763 tree_last (tree chain)
2764 {
2765 tree next;
2766 if (chain)
2767 while ((next = TREE_CHAIN (chain)))
2768 chain = next;
2769 return chain;
2770 }
2771
2772 /* Reverse the order of elements in the chain T,
2773 and return the new head of the chain (old last element). */
2774
2775 tree
2776 nreverse (tree t)
2777 {
2778 tree prev = 0, decl, next;
2779 for (decl = t; decl; decl = next)
2780 {
2781 /* We shouldn't be using this function to reverse BLOCK chains; we
2782 have blocks_nreverse for that. */
2783 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2784 next = TREE_CHAIN (decl);
2785 TREE_CHAIN (decl) = prev;
2786 prev = decl;
2787 }
2788 return prev;
2789 }
2790 \f
2791 /* Return a newly created TREE_LIST node whose
2792 purpose and value fields are PARM and VALUE. */
2793
2794 tree
2795 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2796 {
2797 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2798 TREE_PURPOSE (t) = parm;
2799 TREE_VALUE (t) = value;
2800 return t;
2801 }
2802
2803 /* Build a chain of TREE_LIST nodes from a vector. */
2804
2805 tree
2806 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2807 {
2808 tree ret = NULL_TREE;
2809 tree *pp = &ret;
2810 unsigned int i;
2811 tree t;
2812 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2813 {
2814 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2815 pp = &TREE_CHAIN (*pp);
2816 }
2817 return ret;
2818 }
2819
2820 /* Return a newly created TREE_LIST node whose
2821 purpose and value fields are PURPOSE and VALUE
2822 and whose TREE_CHAIN is CHAIN. */
2823
2824 tree
2825 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2826 {
2827 tree node;
2828
2829 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2830 memset (node, 0, sizeof (struct tree_common));
2831
2832 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2833
2834 TREE_SET_CODE (node, TREE_LIST);
2835 TREE_CHAIN (node) = chain;
2836 TREE_PURPOSE (node) = purpose;
2837 TREE_VALUE (node) = value;
2838 return node;
2839 }
2840
2841 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2842 trees. */
2843
2844 vec<tree, va_gc> *
2845 ctor_to_vec (tree ctor)
2846 {
2847 vec<tree, va_gc> *vec;
2848 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2849 unsigned int ix;
2850 tree val;
2851
2852 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2853 vec->quick_push (val);
2854
2855 return vec;
2856 }
2857 \f
2858 /* Return the size nominally occupied by an object of type TYPE
2859 when it resides in memory. The value is measured in units of bytes,
2860 and its data type is that normally used for type sizes
2861 (which is the first type created by make_signed_type or
2862 make_unsigned_type). */
2863
2864 tree
2865 size_in_bytes (const_tree type)
2866 {
2867 tree t;
2868
2869 if (type == error_mark_node)
2870 return integer_zero_node;
2871
2872 type = TYPE_MAIN_VARIANT (type);
2873 t = TYPE_SIZE_UNIT (type);
2874
2875 if (t == 0)
2876 {
2877 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2878 return size_zero_node;
2879 }
2880
2881 return t;
2882 }
2883
2884 /* Return the size of TYPE (in bytes) as a wide integer
2885 or return -1 if the size can vary or is larger than an integer. */
2886
2887 HOST_WIDE_INT
2888 int_size_in_bytes (const_tree type)
2889 {
2890 tree t;
2891
2892 if (type == error_mark_node)
2893 return 0;
2894
2895 type = TYPE_MAIN_VARIANT (type);
2896 t = TYPE_SIZE_UNIT (type);
2897
2898 if (t && tree_fits_uhwi_p (t))
2899 return TREE_INT_CST_LOW (t);
2900 else
2901 return -1;
2902 }
2903
2904 /* Return the maximum size of TYPE (in bytes) as a wide integer
2905 or return -1 if the size can vary or is larger than an integer. */
2906
2907 HOST_WIDE_INT
2908 max_int_size_in_bytes (const_tree type)
2909 {
2910 HOST_WIDE_INT size = -1;
2911 tree size_tree;
2912
2913 /* If this is an array type, check for a possible MAX_SIZE attached. */
2914
2915 if (TREE_CODE (type) == ARRAY_TYPE)
2916 {
2917 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2918
2919 if (size_tree && tree_fits_uhwi_p (size_tree))
2920 size = tree_to_uhwi (size_tree);
2921 }
2922
2923 /* If we still haven't been able to get a size, see if the language
2924 can compute a maximum size. */
2925
2926 if (size == -1)
2927 {
2928 size_tree = lang_hooks.types.max_size (type);
2929
2930 if (size_tree && tree_fits_uhwi_p (size_tree))
2931 size = tree_to_uhwi (size_tree);
2932 }
2933
2934 return size;
2935 }
2936 \f
2937 /* Return the bit position of FIELD, in bits from the start of the record.
2938 This is a tree of type bitsizetype. */
2939
2940 tree
2941 bit_position (const_tree field)
2942 {
2943 return bit_from_pos (DECL_FIELD_OFFSET (field),
2944 DECL_FIELD_BIT_OFFSET (field));
2945 }
2946 \f
2947 /* Return the byte position of FIELD, in bytes from the start of the record.
2948 This is a tree of type sizetype. */
2949
2950 tree
2951 byte_position (const_tree field)
2952 {
2953 return byte_from_pos (DECL_FIELD_OFFSET (field),
2954 DECL_FIELD_BIT_OFFSET (field));
2955 }
2956
2957 /* Likewise, but return as an integer. It must be representable in
2958 that way (since it could be a signed value, we don't have the
2959 option of returning -1 like int_size_in_byte can. */
2960
2961 HOST_WIDE_INT
2962 int_byte_position (const_tree field)
2963 {
2964 return tree_to_shwi (byte_position (field));
2965 }
2966 \f
2967 /* Return the strictest alignment, in bits, that T is known to have. */
2968
2969 unsigned int
2970 expr_align (const_tree t)
2971 {
2972 unsigned int align0, align1;
2973
2974 switch (TREE_CODE (t))
2975 {
2976 CASE_CONVERT: case NON_LVALUE_EXPR:
2977 /* If we have conversions, we know that the alignment of the
2978 object must meet each of the alignments of the types. */
2979 align0 = expr_align (TREE_OPERAND (t, 0));
2980 align1 = TYPE_ALIGN (TREE_TYPE (t));
2981 return MAX (align0, align1);
2982
2983 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2984 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2985 case CLEANUP_POINT_EXPR:
2986 /* These don't change the alignment of an object. */
2987 return expr_align (TREE_OPERAND (t, 0));
2988
2989 case COND_EXPR:
2990 /* The best we can do is say that the alignment is the least aligned
2991 of the two arms. */
2992 align0 = expr_align (TREE_OPERAND (t, 1));
2993 align1 = expr_align (TREE_OPERAND (t, 2));
2994 return MIN (align0, align1);
2995
2996 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2997 meaningfully, it's always 1. */
2998 case LABEL_DECL: case CONST_DECL:
2999 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3000 case FUNCTION_DECL:
3001 gcc_assert (DECL_ALIGN (t) != 0);
3002 return DECL_ALIGN (t);
3003
3004 default:
3005 break;
3006 }
3007
3008 /* Otherwise take the alignment from that of the type. */
3009 return TYPE_ALIGN (TREE_TYPE (t));
3010 }
3011 \f
3012 /* Return, as a tree node, the number of elements for TYPE (which is an
3013 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3014
3015 tree
3016 array_type_nelts (const_tree type)
3017 {
3018 tree index_type, min, max;
3019
3020 /* If they did it with unspecified bounds, then we should have already
3021 given an error about it before we got here. */
3022 if (! TYPE_DOMAIN (type))
3023 return error_mark_node;
3024
3025 index_type = TYPE_DOMAIN (type);
3026 min = TYPE_MIN_VALUE (index_type);
3027 max = TYPE_MAX_VALUE (index_type);
3028
3029 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3030 if (!max)
3031 return error_mark_node;
3032
3033 return (integer_zerop (min)
3034 ? max
3035 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3036 }
3037 \f
3038 /* If arg is static -- a reference to an object in static storage -- then
3039 return the object. This is not the same as the C meaning of `static'.
3040 If arg isn't static, return NULL. */
3041
3042 tree
3043 staticp (tree arg)
3044 {
3045 switch (TREE_CODE (arg))
3046 {
3047 case FUNCTION_DECL:
3048 /* Nested functions are static, even though taking their address will
3049 involve a trampoline as we unnest the nested function and create
3050 the trampoline on the tree level. */
3051 return arg;
3052
3053 case VAR_DECL:
3054 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3055 && ! DECL_THREAD_LOCAL_P (arg)
3056 && ! DECL_DLLIMPORT_P (arg)
3057 ? arg : NULL);
3058
3059 case CONST_DECL:
3060 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3061 ? arg : NULL);
3062
3063 case CONSTRUCTOR:
3064 return TREE_STATIC (arg) ? arg : NULL;
3065
3066 case LABEL_DECL:
3067 case STRING_CST:
3068 return arg;
3069
3070 case COMPONENT_REF:
3071 /* If the thing being referenced is not a field, then it is
3072 something language specific. */
3073 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3074
3075 /* If we are referencing a bitfield, we can't evaluate an
3076 ADDR_EXPR at compile time and so it isn't a constant. */
3077 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3078 return NULL;
3079
3080 return staticp (TREE_OPERAND (arg, 0));
3081
3082 case BIT_FIELD_REF:
3083 return NULL;
3084
3085 case INDIRECT_REF:
3086 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3087
3088 case ARRAY_REF:
3089 case ARRAY_RANGE_REF:
3090 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3091 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3092 return staticp (TREE_OPERAND (arg, 0));
3093 else
3094 return NULL;
3095
3096 case COMPOUND_LITERAL_EXPR:
3097 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3098
3099 default:
3100 return NULL;
3101 }
3102 }
3103
3104 \f
3105
3106
3107 /* Return whether OP is a DECL whose address is function-invariant. */
3108
3109 bool
3110 decl_address_invariant_p (const_tree op)
3111 {
3112 /* The conditions below are slightly less strict than the one in
3113 staticp. */
3114
3115 switch (TREE_CODE (op))
3116 {
3117 case PARM_DECL:
3118 case RESULT_DECL:
3119 case LABEL_DECL:
3120 case FUNCTION_DECL:
3121 return true;
3122
3123 case VAR_DECL:
3124 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3125 || DECL_THREAD_LOCAL_P (op)
3126 || DECL_CONTEXT (op) == current_function_decl
3127 || decl_function_context (op) == current_function_decl)
3128 return true;
3129 break;
3130
3131 case CONST_DECL:
3132 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3133 || decl_function_context (op) == current_function_decl)
3134 return true;
3135 break;
3136
3137 default:
3138 break;
3139 }
3140
3141 return false;
3142 }
3143
3144 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3145
3146 bool
3147 decl_address_ip_invariant_p (const_tree op)
3148 {
3149 /* The conditions below are slightly less strict than the one in
3150 staticp. */
3151
3152 switch (TREE_CODE (op))
3153 {
3154 case LABEL_DECL:
3155 case FUNCTION_DECL:
3156 case STRING_CST:
3157 return true;
3158
3159 case VAR_DECL:
3160 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3161 && !DECL_DLLIMPORT_P (op))
3162 || DECL_THREAD_LOCAL_P (op))
3163 return true;
3164 break;
3165
3166 case CONST_DECL:
3167 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3168 return true;
3169 break;
3170
3171 default:
3172 break;
3173 }
3174
3175 return false;
3176 }
3177
3178
3179 /* Return true if T is function-invariant (internal function, does
3180 not handle arithmetic; that's handled in skip_simple_arithmetic and
3181 tree_invariant_p). */
3182
3183 static bool tree_invariant_p (tree t);
3184
3185 static bool
3186 tree_invariant_p_1 (tree t)
3187 {
3188 tree op;
3189
3190 if (TREE_CONSTANT (t)
3191 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3192 return true;
3193
3194 switch (TREE_CODE (t))
3195 {
3196 case SAVE_EXPR:
3197 return true;
3198
3199 case ADDR_EXPR:
3200 op = TREE_OPERAND (t, 0);
3201 while (handled_component_p (op))
3202 {
3203 switch (TREE_CODE (op))
3204 {
3205 case ARRAY_REF:
3206 case ARRAY_RANGE_REF:
3207 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3208 || TREE_OPERAND (op, 2) != NULL_TREE
3209 || TREE_OPERAND (op, 3) != NULL_TREE)
3210 return false;
3211 break;
3212
3213 case COMPONENT_REF:
3214 if (TREE_OPERAND (op, 2) != NULL_TREE)
3215 return false;
3216 break;
3217
3218 default:;
3219 }
3220 op = TREE_OPERAND (op, 0);
3221 }
3222
3223 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3224
3225 default:
3226 break;
3227 }
3228
3229 return false;
3230 }
3231
3232 /* Return true if T is function-invariant. */
3233
3234 static bool
3235 tree_invariant_p (tree t)
3236 {
3237 tree inner = skip_simple_arithmetic (t);
3238 return tree_invariant_p_1 (inner);
3239 }
3240
3241 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3242 Do this to any expression which may be used in more than one place,
3243 but must be evaluated only once.
3244
3245 Normally, expand_expr would reevaluate the expression each time.
3246 Calling save_expr produces something that is evaluated and recorded
3247 the first time expand_expr is called on it. Subsequent calls to
3248 expand_expr just reuse the recorded value.
3249
3250 The call to expand_expr that generates code that actually computes
3251 the value is the first call *at compile time*. Subsequent calls
3252 *at compile time* generate code to use the saved value.
3253 This produces correct result provided that *at run time* control
3254 always flows through the insns made by the first expand_expr
3255 before reaching the other places where the save_expr was evaluated.
3256 You, the caller of save_expr, must make sure this is so.
3257
3258 Constants, and certain read-only nodes, are returned with no
3259 SAVE_EXPR because that is safe. Expressions containing placeholders
3260 are not touched; see tree.def for an explanation of what these
3261 are used for. */
3262
3263 tree
3264 save_expr (tree expr)
3265 {
3266 tree t = fold (expr);
3267 tree inner;
3268
3269 /* If the tree evaluates to a constant, then we don't want to hide that
3270 fact (i.e. this allows further folding, and direct checks for constants).
3271 However, a read-only object that has side effects cannot be bypassed.
3272 Since it is no problem to reevaluate literals, we just return the
3273 literal node. */
3274 inner = skip_simple_arithmetic (t);
3275 if (TREE_CODE (inner) == ERROR_MARK)
3276 return inner;
3277
3278 if (tree_invariant_p_1 (inner))
3279 return t;
3280
3281 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3282 it means that the size or offset of some field of an object depends on
3283 the value within another field.
3284
3285 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3286 and some variable since it would then need to be both evaluated once and
3287 evaluated more than once. Front-ends must assure this case cannot
3288 happen by surrounding any such subexpressions in their own SAVE_EXPR
3289 and forcing evaluation at the proper time. */
3290 if (contains_placeholder_p (inner))
3291 return t;
3292
3293 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3294 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3295
3296 /* This expression might be placed ahead of a jump to ensure that the
3297 value was computed on both sides of the jump. So make sure it isn't
3298 eliminated as dead. */
3299 TREE_SIDE_EFFECTS (t) = 1;
3300 return t;
3301 }
3302
3303 /* Look inside EXPR into any simple arithmetic operations. Return the
3304 outermost non-arithmetic or non-invariant node. */
3305
3306 tree
3307 skip_simple_arithmetic (tree expr)
3308 {
3309 /* We don't care about whether this can be used as an lvalue in this
3310 context. */
3311 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3312 expr = TREE_OPERAND (expr, 0);
3313
3314 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3315 a constant, it will be more efficient to not make another SAVE_EXPR since
3316 it will allow better simplification and GCSE will be able to merge the
3317 computations if they actually occur. */
3318 while (true)
3319 {
3320 if (UNARY_CLASS_P (expr))
3321 expr = TREE_OPERAND (expr, 0);
3322 else if (BINARY_CLASS_P (expr))
3323 {
3324 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3325 expr = TREE_OPERAND (expr, 0);
3326 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3327 expr = TREE_OPERAND (expr, 1);
3328 else
3329 break;
3330 }
3331 else
3332 break;
3333 }
3334
3335 return expr;
3336 }
3337
3338 /* Look inside EXPR into simple arithmetic operations involving constants.
3339 Return the outermost non-arithmetic or non-constant node. */
3340
3341 tree
3342 skip_simple_constant_arithmetic (tree expr)
3343 {
3344 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3345 expr = TREE_OPERAND (expr, 0);
3346
3347 while (true)
3348 {
3349 if (UNARY_CLASS_P (expr))
3350 expr = TREE_OPERAND (expr, 0);
3351 else if (BINARY_CLASS_P (expr))
3352 {
3353 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3354 expr = TREE_OPERAND (expr, 0);
3355 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3356 expr = TREE_OPERAND (expr, 1);
3357 else
3358 break;
3359 }
3360 else
3361 break;
3362 }
3363
3364 return expr;
3365 }
3366
3367 /* Return which tree structure is used by T. */
3368
3369 enum tree_node_structure_enum
3370 tree_node_structure (const_tree t)
3371 {
3372 const enum tree_code code = TREE_CODE (t);
3373 return tree_node_structure_for_code (code);
3374 }
3375
3376 /* Set various status flags when building a CALL_EXPR object T. */
3377
3378 static void
3379 process_call_operands (tree t)
3380 {
3381 bool side_effects = TREE_SIDE_EFFECTS (t);
3382 bool read_only = false;
3383 int i = call_expr_flags (t);
3384
3385 /* Calls have side-effects, except those to const or pure functions. */
3386 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3387 side_effects = true;
3388 /* Propagate TREE_READONLY of arguments for const functions. */
3389 if (i & ECF_CONST)
3390 read_only = true;
3391
3392 if (!side_effects || read_only)
3393 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3394 {
3395 tree op = TREE_OPERAND (t, i);
3396 if (op && TREE_SIDE_EFFECTS (op))
3397 side_effects = true;
3398 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3399 read_only = false;
3400 }
3401
3402 TREE_SIDE_EFFECTS (t) = side_effects;
3403 TREE_READONLY (t) = read_only;
3404 }
3405 \f
3406 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3407 size or offset that depends on a field within a record. */
3408
3409 bool
3410 contains_placeholder_p (const_tree exp)
3411 {
3412 enum tree_code code;
3413
3414 if (!exp)
3415 return 0;
3416
3417 code = TREE_CODE (exp);
3418 if (code == PLACEHOLDER_EXPR)
3419 return 1;
3420
3421 switch (TREE_CODE_CLASS (code))
3422 {
3423 case tcc_reference:
3424 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3425 position computations since they will be converted into a
3426 WITH_RECORD_EXPR involving the reference, which will assume
3427 here will be valid. */
3428 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3429
3430 case tcc_exceptional:
3431 if (code == TREE_LIST)
3432 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3433 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3434 break;
3435
3436 case tcc_unary:
3437 case tcc_binary:
3438 case tcc_comparison:
3439 case tcc_expression:
3440 switch (code)
3441 {
3442 case COMPOUND_EXPR:
3443 /* Ignoring the first operand isn't quite right, but works best. */
3444 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3445
3446 case COND_EXPR:
3447 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3448 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3449 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3450
3451 case SAVE_EXPR:
3452 /* The save_expr function never wraps anything containing
3453 a PLACEHOLDER_EXPR. */
3454 return 0;
3455
3456 default:
3457 break;
3458 }
3459
3460 switch (TREE_CODE_LENGTH (code))
3461 {
3462 case 1:
3463 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3464 case 2:
3465 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3466 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3467 default:
3468 return 0;
3469 }
3470
3471 case tcc_vl_exp:
3472 switch (code)
3473 {
3474 case CALL_EXPR:
3475 {
3476 const_tree arg;
3477 const_call_expr_arg_iterator iter;
3478 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3479 if (CONTAINS_PLACEHOLDER_P (arg))
3480 return 1;
3481 return 0;
3482 }
3483 default:
3484 return 0;
3485 }
3486
3487 default:
3488 return 0;
3489 }
3490 return 0;
3491 }
3492
3493 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3494 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3495 field positions. */
3496
3497 static bool
3498 type_contains_placeholder_1 (const_tree type)
3499 {
3500 /* If the size contains a placeholder or the parent type (component type in
3501 the case of arrays) type involves a placeholder, this type does. */
3502 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3503 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3504 || (!POINTER_TYPE_P (type)
3505 && TREE_TYPE (type)
3506 && type_contains_placeholder_p (TREE_TYPE (type))))
3507 return true;
3508
3509 /* Now do type-specific checks. Note that the last part of the check above
3510 greatly limits what we have to do below. */
3511 switch (TREE_CODE (type))
3512 {
3513 case VOID_TYPE:
3514 case POINTER_BOUNDS_TYPE:
3515 case COMPLEX_TYPE:
3516 case ENUMERAL_TYPE:
3517 case BOOLEAN_TYPE:
3518 case POINTER_TYPE:
3519 case OFFSET_TYPE:
3520 case REFERENCE_TYPE:
3521 case METHOD_TYPE:
3522 case FUNCTION_TYPE:
3523 case VECTOR_TYPE:
3524 case NULLPTR_TYPE:
3525 return false;
3526
3527 case INTEGER_TYPE:
3528 case REAL_TYPE:
3529 case FIXED_POINT_TYPE:
3530 /* Here we just check the bounds. */
3531 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3532 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3533
3534 case ARRAY_TYPE:
3535 /* We have already checked the component type above, so just check the
3536 domain type. */
3537 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3538
3539 case RECORD_TYPE:
3540 case UNION_TYPE:
3541 case QUAL_UNION_TYPE:
3542 {
3543 tree field;
3544
3545 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3546 if (TREE_CODE (field) == FIELD_DECL
3547 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3548 || (TREE_CODE (type) == QUAL_UNION_TYPE
3549 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3550 || type_contains_placeholder_p (TREE_TYPE (field))))
3551 return true;
3552
3553 return false;
3554 }
3555
3556 default:
3557 gcc_unreachable ();
3558 }
3559 }
3560
3561 /* Wrapper around above function used to cache its result. */
3562
3563 bool
3564 type_contains_placeholder_p (tree type)
3565 {
3566 bool result;
3567
3568 /* If the contains_placeholder_bits field has been initialized,
3569 then we know the answer. */
3570 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3571 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3572
3573 /* Indicate that we've seen this type node, and the answer is false.
3574 This is what we want to return if we run into recursion via fields. */
3575 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3576
3577 /* Compute the real value. */
3578 result = type_contains_placeholder_1 (type);
3579
3580 /* Store the real value. */
3581 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3582
3583 return result;
3584 }
3585 \f
3586 /* Push tree EXP onto vector QUEUE if it is not already present. */
3587
3588 static void
3589 push_without_duplicates (tree exp, vec<tree> *queue)
3590 {
3591 unsigned int i;
3592 tree iter;
3593
3594 FOR_EACH_VEC_ELT (*queue, i, iter)
3595 if (simple_cst_equal (iter, exp) == 1)
3596 break;
3597
3598 if (!iter)
3599 queue->safe_push (exp);
3600 }
3601
3602 /* Given a tree EXP, find all occurrences of references to fields
3603 in a PLACEHOLDER_EXPR and place them in vector REFS without
3604 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3605 we assume here that EXP contains only arithmetic expressions
3606 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3607 argument list. */
3608
3609 void
3610 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3611 {
3612 enum tree_code code = TREE_CODE (exp);
3613 tree inner;
3614 int i;
3615
3616 /* We handle TREE_LIST and COMPONENT_REF separately. */
3617 if (code == TREE_LIST)
3618 {
3619 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3620 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3621 }
3622 else if (code == COMPONENT_REF)
3623 {
3624 for (inner = TREE_OPERAND (exp, 0);
3625 REFERENCE_CLASS_P (inner);
3626 inner = TREE_OPERAND (inner, 0))
3627 ;
3628
3629 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3630 push_without_duplicates (exp, refs);
3631 else
3632 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3633 }
3634 else
3635 switch (TREE_CODE_CLASS (code))
3636 {
3637 case tcc_constant:
3638 break;
3639
3640 case tcc_declaration:
3641 /* Variables allocated to static storage can stay. */
3642 if (!TREE_STATIC (exp))
3643 push_without_duplicates (exp, refs);
3644 break;
3645
3646 case tcc_expression:
3647 /* This is the pattern built in ada/make_aligning_type. */
3648 if (code == ADDR_EXPR
3649 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3650 {
3651 push_without_duplicates (exp, refs);
3652 break;
3653 }
3654
3655 /* Fall through... */
3656
3657 case tcc_exceptional:
3658 case tcc_unary:
3659 case tcc_binary:
3660 case tcc_comparison:
3661 case tcc_reference:
3662 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3663 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3664 break;
3665
3666 case tcc_vl_exp:
3667 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3668 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3669 break;
3670
3671 default:
3672 gcc_unreachable ();
3673 }
3674 }
3675
3676 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3677 return a tree with all occurrences of references to F in a
3678 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3679 CONST_DECLs. Note that we assume here that EXP contains only
3680 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3681 occurring only in their argument list. */
3682
3683 tree
3684 substitute_in_expr (tree exp, tree f, tree r)
3685 {
3686 enum tree_code code = TREE_CODE (exp);
3687 tree op0, op1, op2, op3;
3688 tree new_tree;
3689
3690 /* We handle TREE_LIST and COMPONENT_REF separately. */
3691 if (code == TREE_LIST)
3692 {
3693 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3694 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3695 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3696 return exp;
3697
3698 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3699 }
3700 else if (code == COMPONENT_REF)
3701 {
3702 tree inner;
3703
3704 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3705 and it is the right field, replace it with R. */
3706 for (inner = TREE_OPERAND (exp, 0);
3707 REFERENCE_CLASS_P (inner);
3708 inner = TREE_OPERAND (inner, 0))
3709 ;
3710
3711 /* The field. */
3712 op1 = TREE_OPERAND (exp, 1);
3713
3714 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3715 return r;
3716
3717 /* If this expression hasn't been completed let, leave it alone. */
3718 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3719 return exp;
3720
3721 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3722 if (op0 == TREE_OPERAND (exp, 0))
3723 return exp;
3724
3725 new_tree
3726 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3727 }
3728 else
3729 switch (TREE_CODE_CLASS (code))
3730 {
3731 case tcc_constant:
3732 return exp;
3733
3734 case tcc_declaration:
3735 if (exp == f)
3736 return r;
3737 else
3738 return exp;
3739
3740 case tcc_expression:
3741 if (exp == f)
3742 return r;
3743
3744 /* Fall through... */
3745
3746 case tcc_exceptional:
3747 case tcc_unary:
3748 case tcc_binary:
3749 case tcc_comparison:
3750 case tcc_reference:
3751 switch (TREE_CODE_LENGTH (code))
3752 {
3753 case 0:
3754 return exp;
3755
3756 case 1:
3757 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3758 if (op0 == TREE_OPERAND (exp, 0))
3759 return exp;
3760
3761 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3762 break;
3763
3764 case 2:
3765 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3766 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3767
3768 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3769 return exp;
3770
3771 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3772 break;
3773
3774 case 3:
3775 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3776 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3777 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3778
3779 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3780 && op2 == TREE_OPERAND (exp, 2))
3781 return exp;
3782
3783 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3784 break;
3785
3786 case 4:
3787 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3788 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3789 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3790 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3791
3792 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3793 && op2 == TREE_OPERAND (exp, 2)
3794 && op3 == TREE_OPERAND (exp, 3))
3795 return exp;
3796
3797 new_tree
3798 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3799 break;
3800
3801 default:
3802 gcc_unreachable ();
3803 }
3804 break;
3805
3806 case tcc_vl_exp:
3807 {
3808 int i;
3809
3810 new_tree = NULL_TREE;
3811
3812 /* If we are trying to replace F with a constant, inline back
3813 functions which do nothing else than computing a value from
3814 the arguments they are passed. This makes it possible to
3815 fold partially or entirely the replacement expression. */
3816 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3817 {
3818 tree t = maybe_inline_call_in_expr (exp);
3819 if (t)
3820 return SUBSTITUTE_IN_EXPR (t, f, r);
3821 }
3822
3823 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3824 {
3825 tree op = TREE_OPERAND (exp, i);
3826 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3827 if (new_op != op)
3828 {
3829 if (!new_tree)
3830 new_tree = copy_node (exp);
3831 TREE_OPERAND (new_tree, i) = new_op;
3832 }
3833 }
3834
3835 if (new_tree)
3836 {
3837 new_tree = fold (new_tree);
3838 if (TREE_CODE (new_tree) == CALL_EXPR)
3839 process_call_operands (new_tree);
3840 }
3841 else
3842 return exp;
3843 }
3844 break;
3845
3846 default:
3847 gcc_unreachable ();
3848 }
3849
3850 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3851
3852 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3853 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3854
3855 return new_tree;
3856 }
3857
3858 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3859 for it within OBJ, a tree that is an object or a chain of references. */
3860
3861 tree
3862 substitute_placeholder_in_expr (tree exp, tree obj)
3863 {
3864 enum tree_code code = TREE_CODE (exp);
3865 tree op0, op1, op2, op3;
3866 tree new_tree;
3867
3868 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3869 in the chain of OBJ. */
3870 if (code == PLACEHOLDER_EXPR)
3871 {
3872 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3873 tree elt;
3874
3875 for (elt = obj; elt != 0;
3876 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3877 || TREE_CODE (elt) == COND_EXPR)
3878 ? TREE_OPERAND (elt, 1)
3879 : (REFERENCE_CLASS_P (elt)
3880 || UNARY_CLASS_P (elt)
3881 || BINARY_CLASS_P (elt)
3882 || VL_EXP_CLASS_P (elt)
3883 || EXPRESSION_CLASS_P (elt))
3884 ? TREE_OPERAND (elt, 0) : 0))
3885 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3886 return elt;
3887
3888 for (elt = obj; elt != 0;
3889 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3890 || TREE_CODE (elt) == COND_EXPR)
3891 ? TREE_OPERAND (elt, 1)
3892 : (REFERENCE_CLASS_P (elt)
3893 || UNARY_CLASS_P (elt)
3894 || BINARY_CLASS_P (elt)
3895 || VL_EXP_CLASS_P (elt)
3896 || EXPRESSION_CLASS_P (elt))
3897 ? TREE_OPERAND (elt, 0) : 0))
3898 if (POINTER_TYPE_P (TREE_TYPE (elt))
3899 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3900 == need_type))
3901 return fold_build1 (INDIRECT_REF, need_type, elt);
3902
3903 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3904 survives until RTL generation, there will be an error. */
3905 return exp;
3906 }
3907
3908 /* TREE_LIST is special because we need to look at TREE_VALUE
3909 and TREE_CHAIN, not TREE_OPERANDS. */
3910 else if (code == TREE_LIST)
3911 {
3912 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3913 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3914 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3915 return exp;
3916
3917 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3918 }
3919 else
3920 switch (TREE_CODE_CLASS (code))
3921 {
3922 case tcc_constant:
3923 case tcc_declaration:
3924 return exp;
3925
3926 case tcc_exceptional:
3927 case tcc_unary:
3928 case tcc_binary:
3929 case tcc_comparison:
3930 case tcc_expression:
3931 case tcc_reference:
3932 case tcc_statement:
3933 switch (TREE_CODE_LENGTH (code))
3934 {
3935 case 0:
3936 return exp;
3937
3938 case 1:
3939 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3940 if (op0 == TREE_OPERAND (exp, 0))
3941 return exp;
3942
3943 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3944 break;
3945
3946 case 2:
3947 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3948 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3949
3950 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3951 return exp;
3952
3953 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3954 break;
3955
3956 case 3:
3957 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3958 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3959 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3960
3961 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3962 && op2 == TREE_OPERAND (exp, 2))
3963 return exp;
3964
3965 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3966 break;
3967
3968 case 4:
3969 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3970 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3971 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3972 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3973
3974 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3975 && op2 == TREE_OPERAND (exp, 2)
3976 && op3 == TREE_OPERAND (exp, 3))
3977 return exp;
3978
3979 new_tree
3980 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3981 break;
3982
3983 default:
3984 gcc_unreachable ();
3985 }
3986 break;
3987
3988 case tcc_vl_exp:
3989 {
3990 int i;
3991
3992 new_tree = NULL_TREE;
3993
3994 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3995 {
3996 tree op = TREE_OPERAND (exp, i);
3997 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3998 if (new_op != op)
3999 {
4000 if (!new_tree)
4001 new_tree = copy_node (exp);
4002 TREE_OPERAND (new_tree, i) = new_op;
4003 }
4004 }
4005
4006 if (new_tree)
4007 {
4008 new_tree = fold (new_tree);
4009 if (TREE_CODE (new_tree) == CALL_EXPR)
4010 process_call_operands (new_tree);
4011 }
4012 else
4013 return exp;
4014 }
4015 break;
4016
4017 default:
4018 gcc_unreachable ();
4019 }
4020
4021 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4022
4023 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4024 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4025
4026 return new_tree;
4027 }
4028 \f
4029
4030 /* Subroutine of stabilize_reference; this is called for subtrees of
4031 references. Any expression with side-effects must be put in a SAVE_EXPR
4032 to ensure that it is only evaluated once.
4033
4034 We don't put SAVE_EXPR nodes around everything, because assigning very
4035 simple expressions to temporaries causes us to miss good opportunities
4036 for optimizations. Among other things, the opportunity to fold in the
4037 addition of a constant into an addressing mode often gets lost, e.g.
4038 "y[i+1] += x;". In general, we take the approach that we should not make
4039 an assignment unless we are forced into it - i.e., that any non-side effect
4040 operator should be allowed, and that cse should take care of coalescing
4041 multiple utterances of the same expression should that prove fruitful. */
4042
4043 static tree
4044 stabilize_reference_1 (tree e)
4045 {
4046 tree result;
4047 enum tree_code code = TREE_CODE (e);
4048
4049 /* We cannot ignore const expressions because it might be a reference
4050 to a const array but whose index contains side-effects. But we can
4051 ignore things that are actual constant or that already have been
4052 handled by this function. */
4053
4054 if (tree_invariant_p (e))
4055 return e;
4056
4057 switch (TREE_CODE_CLASS (code))
4058 {
4059 case tcc_exceptional:
4060 case tcc_type:
4061 case tcc_declaration:
4062 case tcc_comparison:
4063 case tcc_statement:
4064 case tcc_expression:
4065 case tcc_reference:
4066 case tcc_vl_exp:
4067 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4068 so that it will only be evaluated once. */
4069 /* The reference (r) and comparison (<) classes could be handled as
4070 below, but it is generally faster to only evaluate them once. */
4071 if (TREE_SIDE_EFFECTS (e))
4072 return save_expr (e);
4073 return e;
4074
4075 case tcc_constant:
4076 /* Constants need no processing. In fact, we should never reach
4077 here. */
4078 return e;
4079
4080 case tcc_binary:
4081 /* Division is slow and tends to be compiled with jumps,
4082 especially the division by powers of 2 that is often
4083 found inside of an array reference. So do it just once. */
4084 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4085 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4086 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4087 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4088 return save_expr (e);
4089 /* Recursively stabilize each operand. */
4090 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4091 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4092 break;
4093
4094 case tcc_unary:
4095 /* Recursively stabilize each operand. */
4096 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4097 break;
4098
4099 default:
4100 gcc_unreachable ();
4101 }
4102
4103 TREE_TYPE (result) = TREE_TYPE (e);
4104 TREE_READONLY (result) = TREE_READONLY (e);
4105 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4106 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4107
4108 return result;
4109 }
4110
4111 /* Stabilize a reference so that we can use it any number of times
4112 without causing its operands to be evaluated more than once.
4113 Returns the stabilized reference. This works by means of save_expr,
4114 so see the caveats in the comments about save_expr.
4115
4116 Also allows conversion expressions whose operands are references.
4117 Any other kind of expression is returned unchanged. */
4118
4119 tree
4120 stabilize_reference (tree ref)
4121 {
4122 tree result;
4123 enum tree_code code = TREE_CODE (ref);
4124
4125 switch (code)
4126 {
4127 case VAR_DECL:
4128 case PARM_DECL:
4129 case RESULT_DECL:
4130 /* No action is needed in this case. */
4131 return ref;
4132
4133 CASE_CONVERT:
4134 case FLOAT_EXPR:
4135 case FIX_TRUNC_EXPR:
4136 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4137 break;
4138
4139 case INDIRECT_REF:
4140 result = build_nt (INDIRECT_REF,
4141 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4142 break;
4143
4144 case COMPONENT_REF:
4145 result = build_nt (COMPONENT_REF,
4146 stabilize_reference (TREE_OPERAND (ref, 0)),
4147 TREE_OPERAND (ref, 1), NULL_TREE);
4148 break;
4149
4150 case BIT_FIELD_REF:
4151 result = build_nt (BIT_FIELD_REF,
4152 stabilize_reference (TREE_OPERAND (ref, 0)),
4153 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4154 break;
4155
4156 case ARRAY_REF:
4157 result = build_nt (ARRAY_REF,
4158 stabilize_reference (TREE_OPERAND (ref, 0)),
4159 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4160 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4161 break;
4162
4163 case ARRAY_RANGE_REF:
4164 result = build_nt (ARRAY_RANGE_REF,
4165 stabilize_reference (TREE_OPERAND (ref, 0)),
4166 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4167 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4168 break;
4169
4170 case COMPOUND_EXPR:
4171 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4172 it wouldn't be ignored. This matters when dealing with
4173 volatiles. */
4174 return stabilize_reference_1 (ref);
4175
4176 /* If arg isn't a kind of lvalue we recognize, make no change.
4177 Caller should recognize the error for an invalid lvalue. */
4178 default:
4179 return ref;
4180
4181 case ERROR_MARK:
4182 return error_mark_node;
4183 }
4184
4185 TREE_TYPE (result) = TREE_TYPE (ref);
4186 TREE_READONLY (result) = TREE_READONLY (ref);
4187 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4188 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4189
4190 return result;
4191 }
4192 \f
4193 /* Low-level constructors for expressions. */
4194
4195 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4196 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4197
4198 void
4199 recompute_tree_invariant_for_addr_expr (tree t)
4200 {
4201 tree node;
4202 bool tc = true, se = false;
4203
4204 /* We started out assuming this address is both invariant and constant, but
4205 does not have side effects. Now go down any handled components and see if
4206 any of them involve offsets that are either non-constant or non-invariant.
4207 Also check for side-effects.
4208
4209 ??? Note that this code makes no attempt to deal with the case where
4210 taking the address of something causes a copy due to misalignment. */
4211
4212 #define UPDATE_FLAGS(NODE) \
4213 do { tree _node = (NODE); \
4214 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4215 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4216
4217 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4218 node = TREE_OPERAND (node, 0))
4219 {
4220 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4221 array reference (probably made temporarily by the G++ front end),
4222 so ignore all the operands. */
4223 if ((TREE_CODE (node) == ARRAY_REF
4224 || TREE_CODE (node) == ARRAY_RANGE_REF)
4225 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4226 {
4227 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4228 if (TREE_OPERAND (node, 2))
4229 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4230 if (TREE_OPERAND (node, 3))
4231 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4232 }
4233 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4234 FIELD_DECL, apparently. The G++ front end can put something else
4235 there, at least temporarily. */
4236 else if (TREE_CODE (node) == COMPONENT_REF
4237 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4238 {
4239 if (TREE_OPERAND (node, 2))
4240 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4241 }
4242 }
4243
4244 node = lang_hooks.expr_to_decl (node, &tc, &se);
4245
4246 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4247 the address, since &(*a)->b is a form of addition. If it's a constant, the
4248 address is constant too. If it's a decl, its address is constant if the
4249 decl is static. Everything else is not constant and, furthermore,
4250 taking the address of a volatile variable is not volatile. */
4251 if (TREE_CODE (node) == INDIRECT_REF
4252 || TREE_CODE (node) == MEM_REF)
4253 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4254 else if (CONSTANT_CLASS_P (node))
4255 ;
4256 else if (DECL_P (node))
4257 tc &= (staticp (node) != NULL_TREE);
4258 else
4259 {
4260 tc = false;
4261 se |= TREE_SIDE_EFFECTS (node);
4262 }
4263
4264
4265 TREE_CONSTANT (t) = tc;
4266 TREE_SIDE_EFFECTS (t) = se;
4267 #undef UPDATE_FLAGS
4268 }
4269
4270 /* Build an expression of code CODE, data type TYPE, and operands as
4271 specified. Expressions and reference nodes can be created this way.
4272 Constants, decls, types and misc nodes cannot be.
4273
4274 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4275 enough for all extant tree codes. */
4276
4277 tree
4278 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4279 {
4280 tree t;
4281
4282 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4283
4284 t = make_node_stat (code PASS_MEM_STAT);
4285 TREE_TYPE (t) = tt;
4286
4287 return t;
4288 }
4289
4290 tree
4291 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4292 {
4293 int length = sizeof (struct tree_exp);
4294 tree t;
4295
4296 record_node_allocation_statistics (code, length);
4297
4298 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4299
4300 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4301
4302 memset (t, 0, sizeof (struct tree_common));
4303
4304 TREE_SET_CODE (t, code);
4305
4306 TREE_TYPE (t) = type;
4307 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4308 TREE_OPERAND (t, 0) = node;
4309 if (node && !TYPE_P (node))
4310 {
4311 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4312 TREE_READONLY (t) = TREE_READONLY (node);
4313 }
4314
4315 if (TREE_CODE_CLASS (code) == tcc_statement)
4316 TREE_SIDE_EFFECTS (t) = 1;
4317 else switch (code)
4318 {
4319 case VA_ARG_EXPR:
4320 /* All of these have side-effects, no matter what their
4321 operands are. */
4322 TREE_SIDE_EFFECTS (t) = 1;
4323 TREE_READONLY (t) = 0;
4324 break;
4325
4326 case INDIRECT_REF:
4327 /* Whether a dereference is readonly has nothing to do with whether
4328 its operand is readonly. */
4329 TREE_READONLY (t) = 0;
4330 break;
4331
4332 case ADDR_EXPR:
4333 if (node)
4334 recompute_tree_invariant_for_addr_expr (t);
4335 break;
4336
4337 default:
4338 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4339 && node && !TYPE_P (node)
4340 && TREE_CONSTANT (node))
4341 TREE_CONSTANT (t) = 1;
4342 if (TREE_CODE_CLASS (code) == tcc_reference
4343 && node && TREE_THIS_VOLATILE (node))
4344 TREE_THIS_VOLATILE (t) = 1;
4345 break;
4346 }
4347
4348 return t;
4349 }
4350
4351 #define PROCESS_ARG(N) \
4352 do { \
4353 TREE_OPERAND (t, N) = arg##N; \
4354 if (arg##N &&!TYPE_P (arg##N)) \
4355 { \
4356 if (TREE_SIDE_EFFECTS (arg##N)) \
4357 side_effects = 1; \
4358 if (!TREE_READONLY (arg##N) \
4359 && !CONSTANT_CLASS_P (arg##N)) \
4360 (void) (read_only = 0); \
4361 if (!TREE_CONSTANT (arg##N)) \
4362 (void) (constant = 0); \
4363 } \
4364 } while (0)
4365
4366 tree
4367 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4368 {
4369 bool constant, read_only, side_effects;
4370 tree t;
4371
4372 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4373
4374 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4375 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4376 /* When sizetype precision doesn't match that of pointers
4377 we need to be able to build explicit extensions or truncations
4378 of the offset argument. */
4379 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4380 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4381 && TREE_CODE (arg1) == INTEGER_CST);
4382
4383 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4384 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4385 && ptrofftype_p (TREE_TYPE (arg1)));
4386
4387 t = make_node_stat (code PASS_MEM_STAT);
4388 TREE_TYPE (t) = tt;
4389
4390 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4391 result based on those same flags for the arguments. But if the
4392 arguments aren't really even `tree' expressions, we shouldn't be trying
4393 to do this. */
4394
4395 /* Expressions without side effects may be constant if their
4396 arguments are as well. */
4397 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4398 || TREE_CODE_CLASS (code) == tcc_binary);
4399 read_only = 1;
4400 side_effects = TREE_SIDE_EFFECTS (t);
4401
4402 PROCESS_ARG (0);
4403 PROCESS_ARG (1);
4404
4405 TREE_SIDE_EFFECTS (t) = side_effects;
4406 if (code == MEM_REF)
4407 {
4408 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4409 {
4410 tree o = TREE_OPERAND (arg0, 0);
4411 TREE_READONLY (t) = TREE_READONLY (o);
4412 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4413 }
4414 }
4415 else
4416 {
4417 TREE_READONLY (t) = read_only;
4418 TREE_CONSTANT (t) = constant;
4419 TREE_THIS_VOLATILE (t)
4420 = (TREE_CODE_CLASS (code) == tcc_reference
4421 && arg0 && TREE_THIS_VOLATILE (arg0));
4422 }
4423
4424 return t;
4425 }
4426
4427
4428 tree
4429 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4430 tree arg2 MEM_STAT_DECL)
4431 {
4432 bool constant, read_only, side_effects;
4433 tree t;
4434
4435 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4436 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4437
4438 t = make_node_stat (code PASS_MEM_STAT);
4439 TREE_TYPE (t) = tt;
4440
4441 read_only = 1;
4442
4443 /* As a special exception, if COND_EXPR has NULL branches, we
4444 assume that it is a gimple statement and always consider
4445 it to have side effects. */
4446 if (code == COND_EXPR
4447 && tt == void_type_node
4448 && arg1 == NULL_TREE
4449 && arg2 == NULL_TREE)
4450 side_effects = true;
4451 else
4452 side_effects = TREE_SIDE_EFFECTS (t);
4453
4454 PROCESS_ARG (0);
4455 PROCESS_ARG (1);
4456 PROCESS_ARG (2);
4457
4458 if (code == COND_EXPR)
4459 TREE_READONLY (t) = read_only;
4460
4461 TREE_SIDE_EFFECTS (t) = side_effects;
4462 TREE_THIS_VOLATILE (t)
4463 = (TREE_CODE_CLASS (code) == tcc_reference
4464 && arg0 && TREE_THIS_VOLATILE (arg0));
4465
4466 return t;
4467 }
4468
4469 tree
4470 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4471 tree arg2, tree arg3 MEM_STAT_DECL)
4472 {
4473 bool constant, read_only, side_effects;
4474 tree t;
4475
4476 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4477
4478 t = make_node_stat (code PASS_MEM_STAT);
4479 TREE_TYPE (t) = tt;
4480
4481 side_effects = TREE_SIDE_EFFECTS (t);
4482
4483 PROCESS_ARG (0);
4484 PROCESS_ARG (1);
4485 PROCESS_ARG (2);
4486 PROCESS_ARG (3);
4487
4488 TREE_SIDE_EFFECTS (t) = side_effects;
4489 TREE_THIS_VOLATILE (t)
4490 = (TREE_CODE_CLASS (code) == tcc_reference
4491 && arg0 && TREE_THIS_VOLATILE (arg0));
4492
4493 return t;
4494 }
4495
4496 tree
4497 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4498 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4499 {
4500 bool constant, read_only, side_effects;
4501 tree t;
4502
4503 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4504
4505 t = make_node_stat (code PASS_MEM_STAT);
4506 TREE_TYPE (t) = tt;
4507
4508 side_effects = TREE_SIDE_EFFECTS (t);
4509
4510 PROCESS_ARG (0);
4511 PROCESS_ARG (1);
4512 PROCESS_ARG (2);
4513 PROCESS_ARG (3);
4514 PROCESS_ARG (4);
4515
4516 TREE_SIDE_EFFECTS (t) = side_effects;
4517 if (code == TARGET_MEM_REF)
4518 {
4519 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4520 {
4521 tree o = TREE_OPERAND (arg0, 0);
4522 TREE_READONLY (t) = TREE_READONLY (o);
4523 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4524 }
4525 }
4526 else
4527 TREE_THIS_VOLATILE (t)
4528 = (TREE_CODE_CLASS (code) == tcc_reference
4529 && arg0 && TREE_THIS_VOLATILE (arg0));
4530
4531 return t;
4532 }
4533
4534 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4535 on the pointer PTR. */
4536
4537 tree
4538 build_simple_mem_ref_loc (location_t loc, tree ptr)
4539 {
4540 HOST_WIDE_INT offset = 0;
4541 tree ptype = TREE_TYPE (ptr);
4542 tree tem;
4543 /* For convenience allow addresses that collapse to a simple base
4544 and offset. */
4545 if (TREE_CODE (ptr) == ADDR_EXPR
4546 && (handled_component_p (TREE_OPERAND (ptr, 0))
4547 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4548 {
4549 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4550 gcc_assert (ptr);
4551 ptr = build_fold_addr_expr (ptr);
4552 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4553 }
4554 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4555 ptr, build_int_cst (ptype, offset));
4556 SET_EXPR_LOCATION (tem, loc);
4557 return tem;
4558 }
4559
4560 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4561
4562 offset_int
4563 mem_ref_offset (const_tree t)
4564 {
4565 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4566 }
4567
4568 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4569 offsetted by OFFSET units. */
4570
4571 tree
4572 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4573 {
4574 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4575 build_fold_addr_expr (base),
4576 build_int_cst (ptr_type_node, offset));
4577 tree addr = build1 (ADDR_EXPR, type, ref);
4578 recompute_tree_invariant_for_addr_expr (addr);
4579 return addr;
4580 }
4581
4582 /* Similar except don't specify the TREE_TYPE
4583 and leave the TREE_SIDE_EFFECTS as 0.
4584 It is permissible for arguments to be null,
4585 or even garbage if their values do not matter. */
4586
4587 tree
4588 build_nt (enum tree_code code, ...)
4589 {
4590 tree t;
4591 int length;
4592 int i;
4593 va_list p;
4594
4595 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4596
4597 va_start (p, code);
4598
4599 t = make_node (code);
4600 length = TREE_CODE_LENGTH (code);
4601
4602 for (i = 0; i < length; i++)
4603 TREE_OPERAND (t, i) = va_arg (p, tree);
4604
4605 va_end (p);
4606 return t;
4607 }
4608
4609 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4610 tree vec. */
4611
4612 tree
4613 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4614 {
4615 tree ret, t;
4616 unsigned int ix;
4617
4618 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4619 CALL_EXPR_FN (ret) = fn;
4620 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4621 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4622 CALL_EXPR_ARG (ret, ix) = t;
4623 return ret;
4624 }
4625 \f
4626 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4627 We do NOT enter this node in any sort of symbol table.
4628
4629 LOC is the location of the decl.
4630
4631 layout_decl is used to set up the decl's storage layout.
4632 Other slots are initialized to 0 or null pointers. */
4633
4634 tree
4635 build_decl_stat (location_t loc, enum tree_code code, tree name,
4636 tree type MEM_STAT_DECL)
4637 {
4638 tree t;
4639
4640 t = make_node_stat (code PASS_MEM_STAT);
4641 DECL_SOURCE_LOCATION (t) = loc;
4642
4643 /* if (type == error_mark_node)
4644 type = integer_type_node; */
4645 /* That is not done, deliberately, so that having error_mark_node
4646 as the type can suppress useless errors in the use of this variable. */
4647
4648 DECL_NAME (t) = name;
4649 TREE_TYPE (t) = type;
4650
4651 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4652 layout_decl (t, 0);
4653
4654 return t;
4655 }
4656
4657 /* Builds and returns function declaration with NAME and TYPE. */
4658
4659 tree
4660 build_fn_decl (const char *name, tree type)
4661 {
4662 tree id = get_identifier (name);
4663 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4664
4665 DECL_EXTERNAL (decl) = 1;
4666 TREE_PUBLIC (decl) = 1;
4667 DECL_ARTIFICIAL (decl) = 1;
4668 TREE_NOTHROW (decl) = 1;
4669
4670 return decl;
4671 }
4672
4673 vec<tree, va_gc> *all_translation_units;
4674
4675 /* Builds a new translation-unit decl with name NAME, queues it in the
4676 global list of translation-unit decls and returns it. */
4677
4678 tree
4679 build_translation_unit_decl (tree name)
4680 {
4681 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4682 name, NULL_TREE);
4683 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4684 vec_safe_push (all_translation_units, tu);
4685 return tu;
4686 }
4687
4688 \f
4689 /* BLOCK nodes are used to represent the structure of binding contours
4690 and declarations, once those contours have been exited and their contents
4691 compiled. This information is used for outputting debugging info. */
4692
4693 tree
4694 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4695 {
4696 tree block = make_node (BLOCK);
4697
4698 BLOCK_VARS (block) = vars;
4699 BLOCK_SUBBLOCKS (block) = subblocks;
4700 BLOCK_SUPERCONTEXT (block) = supercontext;
4701 BLOCK_CHAIN (block) = chain;
4702 return block;
4703 }
4704
4705 \f
4706 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4707
4708 LOC is the location to use in tree T. */
4709
4710 void
4711 protected_set_expr_location (tree t, location_t loc)
4712 {
4713 if (CAN_HAVE_LOCATION_P (t))
4714 SET_EXPR_LOCATION (t, loc);
4715 }
4716 \f
4717 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4718 is ATTRIBUTE. */
4719
4720 tree
4721 build_decl_attribute_variant (tree ddecl, tree attribute)
4722 {
4723 DECL_ATTRIBUTES (ddecl) = attribute;
4724 return ddecl;
4725 }
4726
4727 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4728 is ATTRIBUTE and its qualifiers are QUALS.
4729
4730 Record such modified types already made so we don't make duplicates. */
4731
4732 tree
4733 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4734 {
4735 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4736 {
4737 inchash::hash hstate;
4738 tree ntype;
4739 int i;
4740 tree t;
4741 enum tree_code code = TREE_CODE (ttype);
4742
4743 /* Building a distinct copy of a tagged type is inappropriate; it
4744 causes breakage in code that expects there to be a one-to-one
4745 relationship between a struct and its fields.
4746 build_duplicate_type is another solution (as used in
4747 handle_transparent_union_attribute), but that doesn't play well
4748 with the stronger C++ type identity model. */
4749 if (TREE_CODE (ttype) == RECORD_TYPE
4750 || TREE_CODE (ttype) == UNION_TYPE
4751 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4752 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4753 {
4754 warning (OPT_Wattributes,
4755 "ignoring attributes applied to %qT after definition",
4756 TYPE_MAIN_VARIANT (ttype));
4757 return build_qualified_type (ttype, quals);
4758 }
4759
4760 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4761 ntype = build_distinct_type_copy (ttype);
4762
4763 TYPE_ATTRIBUTES (ntype) = attribute;
4764
4765 hstate.add_int (code);
4766 if (TREE_TYPE (ntype))
4767 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4768 attribute_hash_list (attribute, hstate);
4769
4770 switch (TREE_CODE (ntype))
4771 {
4772 case FUNCTION_TYPE:
4773 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4774 break;
4775 case ARRAY_TYPE:
4776 if (TYPE_DOMAIN (ntype))
4777 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4778 break;
4779 case INTEGER_TYPE:
4780 t = TYPE_MAX_VALUE (ntype);
4781 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4782 hstate.add_object (TREE_INT_CST_ELT (t, i));
4783 break;
4784 case REAL_TYPE:
4785 case FIXED_POINT_TYPE:
4786 {
4787 unsigned int precision = TYPE_PRECISION (ntype);
4788 hstate.add_object (precision);
4789 }
4790 break;
4791 default:
4792 break;
4793 }
4794
4795 ntype = type_hash_canon (hstate.end(), ntype);
4796
4797 /* If the target-dependent attributes make NTYPE different from
4798 its canonical type, we will need to use structural equality
4799 checks for this type. */
4800 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4801 || !comp_type_attributes (ntype, ttype))
4802 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4803 else if (TYPE_CANONICAL (ntype) == ntype)
4804 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4805
4806 ttype = build_qualified_type (ntype, quals);
4807 }
4808 else if (TYPE_QUALS (ttype) != quals)
4809 ttype = build_qualified_type (ttype, quals);
4810
4811 return ttype;
4812 }
4813
4814 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4815 the same. */
4816
4817 static bool
4818 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4819 {
4820 tree cl1, cl2;
4821 for (cl1 = clauses1, cl2 = clauses2;
4822 cl1 && cl2;
4823 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4824 {
4825 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4826 return false;
4827 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4828 {
4829 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4830 OMP_CLAUSE_DECL (cl2)) != 1)
4831 return false;
4832 }
4833 switch (OMP_CLAUSE_CODE (cl1))
4834 {
4835 case OMP_CLAUSE_ALIGNED:
4836 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4837 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4838 return false;
4839 break;
4840 case OMP_CLAUSE_LINEAR:
4841 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4842 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4843 return false;
4844 break;
4845 case OMP_CLAUSE_SIMDLEN:
4846 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4847 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4848 return false;
4849 default:
4850 break;
4851 }
4852 }
4853 return true;
4854 }
4855
4856 /* Compare two constructor-element-type constants. Return 1 if the lists
4857 are known to be equal; otherwise return 0. */
4858
4859 static bool
4860 simple_cst_list_equal (const_tree l1, const_tree l2)
4861 {
4862 while (l1 != NULL_TREE && l2 != NULL_TREE)
4863 {
4864 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4865 return false;
4866
4867 l1 = TREE_CHAIN (l1);
4868 l2 = TREE_CHAIN (l2);
4869 }
4870
4871 return l1 == l2;
4872 }
4873
4874 /* Compare two identifier nodes representing attributes. Either one may
4875 be in wrapped __ATTR__ form. Return true if they are the same, false
4876 otherwise. */
4877
4878 static bool
4879 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4880 {
4881 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4882 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4883 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4884
4885 /* Identifiers can be compared directly for equality. */
4886 if (attr1 == attr2)
4887 return true;
4888
4889 /* If they are not equal, they may still be one in the form
4890 'text' while the other one is in the form '__text__'. TODO:
4891 If we were storing attributes in normalized 'text' form, then
4892 this could all go away and we could take full advantage of
4893 the fact that we're comparing identifiers. :-) */
4894 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4895 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4896
4897 if (attr2_len == attr1_len + 4)
4898 {
4899 const char *p = IDENTIFIER_POINTER (attr2);
4900 const char *q = IDENTIFIER_POINTER (attr1);
4901 if (p[0] == '_' && p[1] == '_'
4902 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4903 && strncmp (q, p + 2, attr1_len) == 0)
4904 return true;;
4905 }
4906 else if (attr2_len + 4 == attr1_len)
4907 {
4908 const char *p = IDENTIFIER_POINTER (attr2);
4909 const char *q = IDENTIFIER_POINTER (attr1);
4910 if (q[0] == '_' && q[1] == '_'
4911 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4912 && strncmp (q + 2, p, attr2_len) == 0)
4913 return true;
4914 }
4915
4916 return false;
4917 }
4918
4919 /* Compare two attributes for their value identity. Return true if the
4920 attribute values are known to be equal; otherwise return false. */
4921
4922 bool
4923 attribute_value_equal (const_tree attr1, const_tree attr2)
4924 {
4925 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4926 return true;
4927
4928 if (TREE_VALUE (attr1) != NULL_TREE
4929 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4930 && TREE_VALUE (attr2) != NULL_TREE
4931 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4932 {
4933 /* Handle attribute format. */
4934 if (is_attribute_p ("format", TREE_PURPOSE (attr1)))
4935 {
4936 attr1 = TREE_VALUE (attr1);
4937 attr2 = TREE_VALUE (attr2);
4938 /* Compare the archetypes (printf/scanf/strftime/...). */
4939 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
4940 TREE_VALUE (attr2)))
4941 return false;
4942 /* Archetypes are the same. Compare the rest. */
4943 return (simple_cst_list_equal (TREE_CHAIN (attr1),
4944 TREE_CHAIN (attr2)) == 1);
4945 }
4946 return (simple_cst_list_equal (TREE_VALUE (attr1),
4947 TREE_VALUE (attr2)) == 1);
4948 }
4949
4950 if ((flag_openmp || flag_openmp_simd)
4951 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4952 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4953 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4954 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4955 TREE_VALUE (attr2));
4956
4957 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4958 }
4959
4960 /* Return 0 if the attributes for two types are incompatible, 1 if they
4961 are compatible, and 2 if they are nearly compatible (which causes a
4962 warning to be generated). */
4963 int
4964 comp_type_attributes (const_tree type1, const_tree type2)
4965 {
4966 const_tree a1 = TYPE_ATTRIBUTES (type1);
4967 const_tree a2 = TYPE_ATTRIBUTES (type2);
4968 const_tree a;
4969
4970 if (a1 == a2)
4971 return 1;
4972 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4973 {
4974 const struct attribute_spec *as;
4975 const_tree attr;
4976
4977 as = lookup_attribute_spec (get_attribute_name (a));
4978 if (!as || as->affects_type_identity == false)
4979 continue;
4980
4981 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4982 if (!attr || !attribute_value_equal (a, attr))
4983 break;
4984 }
4985 if (!a)
4986 {
4987 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4988 {
4989 const struct attribute_spec *as;
4990
4991 as = lookup_attribute_spec (get_attribute_name (a));
4992 if (!as || as->affects_type_identity == false)
4993 continue;
4994
4995 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4996 break;
4997 /* We don't need to compare trees again, as we did this
4998 already in first loop. */
4999 }
5000 /* All types - affecting identity - are equal, so
5001 there is no need to call target hook for comparison. */
5002 if (!a)
5003 return 1;
5004 }
5005 /* As some type combinations - like default calling-convention - might
5006 be compatible, we have to call the target hook to get the final result. */
5007 return targetm.comp_type_attributes (type1, type2);
5008 }
5009
5010 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5011 is ATTRIBUTE.
5012
5013 Record such modified types already made so we don't make duplicates. */
5014
5015 tree
5016 build_type_attribute_variant (tree ttype, tree attribute)
5017 {
5018 return build_type_attribute_qual_variant (ttype, attribute,
5019 TYPE_QUALS (ttype));
5020 }
5021
5022
5023 /* Reset the expression *EXPR_P, a size or position.
5024
5025 ??? We could reset all non-constant sizes or positions. But it's cheap
5026 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5027
5028 We need to reset self-referential sizes or positions because they cannot
5029 be gimplified and thus can contain a CALL_EXPR after the gimplification
5030 is finished, which will run afoul of LTO streaming. And they need to be
5031 reset to something essentially dummy but not constant, so as to preserve
5032 the properties of the object they are attached to. */
5033
5034 static inline void
5035 free_lang_data_in_one_sizepos (tree *expr_p)
5036 {
5037 tree expr = *expr_p;
5038 if (CONTAINS_PLACEHOLDER_P (expr))
5039 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5040 }
5041
5042
5043 /* Reset all the fields in a binfo node BINFO. We only keep
5044 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5045
5046 static void
5047 free_lang_data_in_binfo (tree binfo)
5048 {
5049 unsigned i;
5050 tree t;
5051
5052 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5053
5054 BINFO_VIRTUALS (binfo) = NULL_TREE;
5055 BINFO_BASE_ACCESSES (binfo) = NULL;
5056 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5057 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5058
5059 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5060 free_lang_data_in_binfo (t);
5061 }
5062
5063
5064 /* Reset all language specific information still present in TYPE. */
5065
5066 static void
5067 free_lang_data_in_type (tree type)
5068 {
5069 gcc_assert (TYPE_P (type));
5070
5071 /* Give the FE a chance to remove its own data first. */
5072 lang_hooks.free_lang_data (type);
5073
5074 TREE_LANG_FLAG_0 (type) = 0;
5075 TREE_LANG_FLAG_1 (type) = 0;
5076 TREE_LANG_FLAG_2 (type) = 0;
5077 TREE_LANG_FLAG_3 (type) = 0;
5078 TREE_LANG_FLAG_4 (type) = 0;
5079 TREE_LANG_FLAG_5 (type) = 0;
5080 TREE_LANG_FLAG_6 (type) = 0;
5081
5082 if (TREE_CODE (type) == FUNCTION_TYPE)
5083 {
5084 /* Remove the const and volatile qualifiers from arguments. The
5085 C++ front end removes them, but the C front end does not,
5086 leading to false ODR violation errors when merging two
5087 instances of the same function signature compiled by
5088 different front ends. */
5089 tree p;
5090
5091 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5092 {
5093 tree arg_type = TREE_VALUE (p);
5094
5095 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5096 {
5097 int quals = TYPE_QUALS (arg_type)
5098 & ~TYPE_QUAL_CONST
5099 & ~TYPE_QUAL_VOLATILE;
5100 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5101 free_lang_data_in_type (TREE_VALUE (p));
5102 }
5103 /* C++ FE uses TREE_PURPOSE to store initial values. */
5104 TREE_PURPOSE (p) = NULL;
5105 }
5106 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5107 TYPE_MINVAL (type) = NULL;
5108 }
5109 if (TREE_CODE (type) == METHOD_TYPE)
5110 {
5111 tree p;
5112
5113 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5114 {
5115 /* C++ FE uses TREE_PURPOSE to store initial values. */
5116 TREE_PURPOSE (p) = NULL;
5117 }
5118 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5119 TYPE_MINVAL (type) = NULL;
5120 }
5121
5122 /* Remove members that are not actually FIELD_DECLs from the field
5123 list of an aggregate. These occur in C++. */
5124 if (RECORD_OR_UNION_TYPE_P (type))
5125 {
5126 tree prev, member;
5127
5128 /* Note that TYPE_FIELDS can be shared across distinct
5129 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5130 to be removed, we cannot set its TREE_CHAIN to NULL.
5131 Otherwise, we would not be able to find all the other fields
5132 in the other instances of this TREE_TYPE.
5133
5134 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5135 prev = NULL_TREE;
5136 member = TYPE_FIELDS (type);
5137 while (member)
5138 {
5139 if (TREE_CODE (member) == FIELD_DECL
5140 || TREE_CODE (member) == TYPE_DECL)
5141 {
5142 if (prev)
5143 TREE_CHAIN (prev) = member;
5144 else
5145 TYPE_FIELDS (type) = member;
5146 prev = member;
5147 }
5148
5149 member = TREE_CHAIN (member);
5150 }
5151
5152 if (prev)
5153 TREE_CHAIN (prev) = NULL_TREE;
5154 else
5155 TYPE_FIELDS (type) = NULL_TREE;
5156
5157 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5158 and danagle the pointer from time to time. */
5159 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5160 TYPE_VFIELD (type) = NULL_TREE;
5161
5162 /* Remove TYPE_METHODS list. While it would be nice to keep it
5163 to enable ODR warnings about different method lists, doing so
5164 seems to impractically increase size of LTO data streamed.
5165 Keep the infrmation if TYPE_METHODS was non-NULL. This is used
5166 by function.c and pretty printers. */
5167 if (TYPE_METHODS (type))
5168 TYPE_METHODS (type) = error_mark_node;
5169 if (TYPE_BINFO (type))
5170 {
5171 free_lang_data_in_binfo (TYPE_BINFO (type));
5172 /* We need to preserve link to bases and virtual table for all
5173 polymorphic types to make devirtualization machinery working.
5174 Debug output cares only about bases, but output also
5175 virtual table pointers so merging of -fdevirtualize and
5176 -fno-devirtualize units is easier. */
5177 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5178 || !flag_devirtualize)
5179 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5180 && !BINFO_VTABLE (TYPE_BINFO (type)))
5181 || debug_info_level != DINFO_LEVEL_NONE))
5182 TYPE_BINFO (type) = NULL;
5183 }
5184 }
5185 else
5186 {
5187 /* For non-aggregate types, clear out the language slot (which
5188 overloads TYPE_BINFO). */
5189 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5190
5191 if (INTEGRAL_TYPE_P (type)
5192 || SCALAR_FLOAT_TYPE_P (type)
5193 || FIXED_POINT_TYPE_P (type))
5194 {
5195 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5196 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5197 }
5198 }
5199
5200 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5201 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5202
5203 if (TYPE_CONTEXT (type)
5204 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5205 {
5206 tree ctx = TYPE_CONTEXT (type);
5207 do
5208 {
5209 ctx = BLOCK_SUPERCONTEXT (ctx);
5210 }
5211 while (ctx && TREE_CODE (ctx) == BLOCK);
5212 TYPE_CONTEXT (type) = ctx;
5213 }
5214 }
5215
5216
5217 /* Return true if DECL may need an assembler name to be set. */
5218
5219 static inline bool
5220 need_assembler_name_p (tree decl)
5221 {
5222 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5223 Rule merging. This makes type_odr_p to return true on those types during
5224 LTO and by comparing the mangled name, we can say what types are intended
5225 to be equivalent across compilation unit.
5226
5227 We do not store names of type_in_anonymous_namespace_p.
5228
5229 Record, union and enumeration type have linkage that allows use
5230 to check type_in_anonymous_namespace_p. We do not mangle compound types
5231 that always can be compared structurally.
5232
5233 Similarly for builtin types, we compare properties of their main variant.
5234 A special case are integer types where mangling do make differences
5235 between char/signed char/unsigned char etc. Storing name for these makes
5236 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5237 See cp/mangle.c:write_builtin_type for details. */
5238
5239 if (flag_lto_odr_type_mering
5240 && TREE_CODE (decl) == TYPE_DECL
5241 && DECL_NAME (decl)
5242 && decl == TYPE_NAME (TREE_TYPE (decl))
5243 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5244 && (type_with_linkage_p (TREE_TYPE (decl))
5245 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5246 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5247 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5248 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5249 if (TREE_CODE (decl) != FUNCTION_DECL
5250 && TREE_CODE (decl) != VAR_DECL)
5251 return false;
5252
5253 /* If DECL already has its assembler name set, it does not need a
5254 new one. */
5255 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5256 || DECL_ASSEMBLER_NAME_SET_P (decl))
5257 return false;
5258
5259 /* Abstract decls do not need an assembler name. */
5260 if (DECL_ABSTRACT_P (decl))
5261 return false;
5262
5263 /* For VAR_DECLs, only static, public and external symbols need an
5264 assembler name. */
5265 if (TREE_CODE (decl) == VAR_DECL
5266 && !TREE_STATIC (decl)
5267 && !TREE_PUBLIC (decl)
5268 && !DECL_EXTERNAL (decl))
5269 return false;
5270
5271 if (TREE_CODE (decl) == FUNCTION_DECL)
5272 {
5273 /* Do not set assembler name on builtins. Allow RTL expansion to
5274 decide whether to expand inline or via a regular call. */
5275 if (DECL_BUILT_IN (decl)
5276 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5277 return false;
5278
5279 /* Functions represented in the callgraph need an assembler name. */
5280 if (cgraph_node::get (decl) != NULL)
5281 return true;
5282
5283 /* Unused and not public functions don't need an assembler name. */
5284 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5285 return false;
5286 }
5287
5288 return true;
5289 }
5290
5291
5292 /* Reset all language specific information still present in symbol
5293 DECL. */
5294
5295 static void
5296 free_lang_data_in_decl (tree decl)
5297 {
5298 gcc_assert (DECL_P (decl));
5299
5300 /* Give the FE a chance to remove its own data first. */
5301 lang_hooks.free_lang_data (decl);
5302
5303 TREE_LANG_FLAG_0 (decl) = 0;
5304 TREE_LANG_FLAG_1 (decl) = 0;
5305 TREE_LANG_FLAG_2 (decl) = 0;
5306 TREE_LANG_FLAG_3 (decl) = 0;
5307 TREE_LANG_FLAG_4 (decl) = 0;
5308 TREE_LANG_FLAG_5 (decl) = 0;
5309 TREE_LANG_FLAG_6 (decl) = 0;
5310
5311 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5312 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5313 if (TREE_CODE (decl) == FIELD_DECL)
5314 {
5315 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5316 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5317 DECL_QUALIFIER (decl) = NULL_TREE;
5318 }
5319
5320 if (TREE_CODE (decl) == FUNCTION_DECL)
5321 {
5322 struct cgraph_node *node;
5323 if (!(node = cgraph_node::get (decl))
5324 || (!node->definition && !node->clones))
5325 {
5326 if (node)
5327 node->release_body ();
5328 else
5329 {
5330 release_function_body (decl);
5331 DECL_ARGUMENTS (decl) = NULL;
5332 DECL_RESULT (decl) = NULL;
5333 DECL_INITIAL (decl) = error_mark_node;
5334 }
5335 }
5336 if (gimple_has_body_p (decl))
5337 {
5338 tree t;
5339
5340 /* If DECL has a gimple body, then the context for its
5341 arguments must be DECL. Otherwise, it doesn't really
5342 matter, as we will not be emitting any code for DECL. In
5343 general, there may be other instances of DECL created by
5344 the front end and since PARM_DECLs are generally shared,
5345 their DECL_CONTEXT changes as the replicas of DECL are
5346 created. The only time where DECL_CONTEXT is important
5347 is for the FUNCTION_DECLs that have a gimple body (since
5348 the PARM_DECL will be used in the function's body). */
5349 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5350 DECL_CONTEXT (t) = decl;
5351 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5352 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5353 = target_option_default_node;
5354 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5355 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5356 = optimization_default_node;
5357 }
5358
5359 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5360 At this point, it is not needed anymore. */
5361 DECL_SAVED_TREE (decl) = NULL_TREE;
5362
5363 /* Clear the abstract origin if it refers to a method. Otherwise
5364 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5365 origin will not be output correctly. */
5366 if (DECL_ABSTRACT_ORIGIN (decl)
5367 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5368 && RECORD_OR_UNION_TYPE_P
5369 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5370 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5371
5372 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5373 DECL_VINDEX referring to itself into a vtable slot number as it
5374 should. Happens with functions that are copied and then forgotten
5375 about. Just clear it, it won't matter anymore. */
5376 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5377 DECL_VINDEX (decl) = NULL_TREE;
5378 }
5379 else if (TREE_CODE (decl) == VAR_DECL)
5380 {
5381 if ((DECL_EXTERNAL (decl)
5382 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5383 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5384 DECL_INITIAL (decl) = NULL_TREE;
5385 }
5386 else if (TREE_CODE (decl) == TYPE_DECL
5387 || TREE_CODE (decl) == FIELD_DECL)
5388 DECL_INITIAL (decl) = NULL_TREE;
5389 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5390 && DECL_INITIAL (decl)
5391 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5392 {
5393 /* Strip builtins from the translation-unit BLOCK. We still have targets
5394 without builtin_decl_explicit support and also builtins are shared
5395 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5396 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5397 while (*nextp)
5398 {
5399 tree var = *nextp;
5400 if (TREE_CODE (var) == FUNCTION_DECL
5401 && DECL_BUILT_IN (var))
5402 *nextp = TREE_CHAIN (var);
5403 else
5404 nextp = &TREE_CHAIN (var);
5405 }
5406 }
5407 }
5408
5409
5410 /* Data used when collecting DECLs and TYPEs for language data removal. */
5411
5412 struct free_lang_data_d
5413 {
5414 /* Worklist to avoid excessive recursion. */
5415 vec<tree> worklist;
5416
5417 /* Set of traversed objects. Used to avoid duplicate visits. */
5418 hash_set<tree> *pset;
5419
5420 /* Array of symbols to process with free_lang_data_in_decl. */
5421 vec<tree> decls;
5422
5423 /* Array of types to process with free_lang_data_in_type. */
5424 vec<tree> types;
5425 };
5426
5427
5428 /* Save all language fields needed to generate proper debug information
5429 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5430
5431 static void
5432 save_debug_info_for_decl (tree t)
5433 {
5434 /*struct saved_debug_info_d *sdi;*/
5435
5436 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5437
5438 /* FIXME. Partial implementation for saving debug info removed. */
5439 }
5440
5441
5442 /* Save all language fields needed to generate proper debug information
5443 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5444
5445 static void
5446 save_debug_info_for_type (tree t)
5447 {
5448 /*struct saved_debug_info_d *sdi;*/
5449
5450 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5451
5452 /* FIXME. Partial implementation for saving debug info removed. */
5453 }
5454
5455
5456 /* Add type or decl T to one of the list of tree nodes that need their
5457 language data removed. The lists are held inside FLD. */
5458
5459 static void
5460 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5461 {
5462 if (DECL_P (t))
5463 {
5464 fld->decls.safe_push (t);
5465 if (debug_info_level > DINFO_LEVEL_TERSE)
5466 save_debug_info_for_decl (t);
5467 }
5468 else if (TYPE_P (t))
5469 {
5470 fld->types.safe_push (t);
5471 if (debug_info_level > DINFO_LEVEL_TERSE)
5472 save_debug_info_for_type (t);
5473 }
5474 else
5475 gcc_unreachable ();
5476 }
5477
5478 /* Push tree node T into FLD->WORKLIST. */
5479
5480 static inline void
5481 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5482 {
5483 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5484 fld->worklist.safe_push ((t));
5485 }
5486
5487
5488 /* Operand callback helper for free_lang_data_in_node. *TP is the
5489 subtree operand being considered. */
5490
5491 static tree
5492 find_decls_types_r (tree *tp, int *ws, void *data)
5493 {
5494 tree t = *tp;
5495 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5496
5497 if (TREE_CODE (t) == TREE_LIST)
5498 return NULL_TREE;
5499
5500 /* Language specific nodes will be removed, so there is no need
5501 to gather anything under them. */
5502 if (is_lang_specific (t))
5503 {
5504 *ws = 0;
5505 return NULL_TREE;
5506 }
5507
5508 if (DECL_P (t))
5509 {
5510 /* Note that walk_tree does not traverse every possible field in
5511 decls, so we have to do our own traversals here. */
5512 add_tree_to_fld_list (t, fld);
5513
5514 fld_worklist_push (DECL_NAME (t), fld);
5515 fld_worklist_push (DECL_CONTEXT (t), fld);
5516 fld_worklist_push (DECL_SIZE (t), fld);
5517 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5518
5519 /* We are going to remove everything under DECL_INITIAL for
5520 TYPE_DECLs. No point walking them. */
5521 if (TREE_CODE (t) != TYPE_DECL)
5522 fld_worklist_push (DECL_INITIAL (t), fld);
5523
5524 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5525 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5526
5527 if (TREE_CODE (t) == FUNCTION_DECL)
5528 {
5529 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5530 fld_worklist_push (DECL_RESULT (t), fld);
5531 }
5532 else if (TREE_CODE (t) == TYPE_DECL)
5533 {
5534 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5535 }
5536 else if (TREE_CODE (t) == FIELD_DECL)
5537 {
5538 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5539 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5540 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5541 fld_worklist_push (DECL_FCONTEXT (t), fld);
5542 }
5543
5544 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5545 && DECL_HAS_VALUE_EXPR_P (t))
5546 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5547
5548 if (TREE_CODE (t) != FIELD_DECL
5549 && TREE_CODE (t) != TYPE_DECL)
5550 fld_worklist_push (TREE_CHAIN (t), fld);
5551 *ws = 0;
5552 }
5553 else if (TYPE_P (t))
5554 {
5555 /* Note that walk_tree does not traverse every possible field in
5556 types, so we have to do our own traversals here. */
5557 add_tree_to_fld_list (t, fld);
5558
5559 if (!RECORD_OR_UNION_TYPE_P (t))
5560 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5561 fld_worklist_push (TYPE_SIZE (t), fld);
5562 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5563 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5564 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5565 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5566 fld_worklist_push (TYPE_NAME (t), fld);
5567 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5568 them and thus do not and want not to reach unused pointer types
5569 this way. */
5570 if (!POINTER_TYPE_P (t))
5571 fld_worklist_push (TYPE_MINVAL (t), fld);
5572 if (!RECORD_OR_UNION_TYPE_P (t))
5573 fld_worklist_push (TYPE_MAXVAL (t), fld);
5574 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5575 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5576 do not and want not to reach unused variants this way. */
5577 if (TYPE_CONTEXT (t))
5578 {
5579 tree ctx = TYPE_CONTEXT (t);
5580 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5581 So push that instead. */
5582 while (ctx && TREE_CODE (ctx) == BLOCK)
5583 ctx = BLOCK_SUPERCONTEXT (ctx);
5584 fld_worklist_push (ctx, fld);
5585 }
5586 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5587 and want not to reach unused types this way. */
5588
5589 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5590 {
5591 unsigned i;
5592 tree tem;
5593 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5594 fld_worklist_push (TREE_TYPE (tem), fld);
5595 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5596 if (tem
5597 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5598 && TREE_CODE (tem) == TREE_LIST)
5599 do
5600 {
5601 fld_worklist_push (TREE_VALUE (tem), fld);
5602 tem = TREE_CHAIN (tem);
5603 }
5604 while (tem);
5605 }
5606 if (RECORD_OR_UNION_TYPE_P (t))
5607 {
5608 tree tem;
5609 /* Push all TYPE_FIELDS - there can be interleaving interesting
5610 and non-interesting things. */
5611 tem = TYPE_FIELDS (t);
5612 while (tem)
5613 {
5614 if (TREE_CODE (tem) == FIELD_DECL
5615 || TREE_CODE (tem) == TYPE_DECL)
5616 fld_worklist_push (tem, fld);
5617 tem = TREE_CHAIN (tem);
5618 }
5619 }
5620
5621 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5622 *ws = 0;
5623 }
5624 else if (TREE_CODE (t) == BLOCK)
5625 {
5626 tree tem;
5627 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5628 fld_worklist_push (tem, fld);
5629 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5630 fld_worklist_push (tem, fld);
5631 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5632 }
5633
5634 if (TREE_CODE (t) != IDENTIFIER_NODE
5635 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5636 fld_worklist_push (TREE_TYPE (t), fld);
5637
5638 return NULL_TREE;
5639 }
5640
5641
5642 /* Find decls and types in T. */
5643
5644 static void
5645 find_decls_types (tree t, struct free_lang_data_d *fld)
5646 {
5647 while (1)
5648 {
5649 if (!fld->pset->contains (t))
5650 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5651 if (fld->worklist.is_empty ())
5652 break;
5653 t = fld->worklist.pop ();
5654 }
5655 }
5656
5657 /* Translate all the types in LIST with the corresponding runtime
5658 types. */
5659
5660 static tree
5661 get_eh_types_for_runtime (tree list)
5662 {
5663 tree head, prev;
5664
5665 if (list == NULL_TREE)
5666 return NULL_TREE;
5667
5668 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5669 prev = head;
5670 list = TREE_CHAIN (list);
5671 while (list)
5672 {
5673 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5674 TREE_CHAIN (prev) = n;
5675 prev = TREE_CHAIN (prev);
5676 list = TREE_CHAIN (list);
5677 }
5678
5679 return head;
5680 }
5681
5682
5683 /* Find decls and types referenced in EH region R and store them in
5684 FLD->DECLS and FLD->TYPES. */
5685
5686 static void
5687 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5688 {
5689 switch (r->type)
5690 {
5691 case ERT_CLEANUP:
5692 break;
5693
5694 case ERT_TRY:
5695 {
5696 eh_catch c;
5697
5698 /* The types referenced in each catch must first be changed to the
5699 EH types used at runtime. This removes references to FE types
5700 in the region. */
5701 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5702 {
5703 c->type_list = get_eh_types_for_runtime (c->type_list);
5704 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5705 }
5706 }
5707 break;
5708
5709 case ERT_ALLOWED_EXCEPTIONS:
5710 r->u.allowed.type_list
5711 = get_eh_types_for_runtime (r->u.allowed.type_list);
5712 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5713 break;
5714
5715 case ERT_MUST_NOT_THROW:
5716 walk_tree (&r->u.must_not_throw.failure_decl,
5717 find_decls_types_r, fld, fld->pset);
5718 break;
5719 }
5720 }
5721
5722
5723 /* Find decls and types referenced in cgraph node N and store them in
5724 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5725 look for *every* kind of DECL and TYPE node reachable from N,
5726 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5727 NAMESPACE_DECLs, etc). */
5728
5729 static void
5730 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5731 {
5732 basic_block bb;
5733 struct function *fn;
5734 unsigned ix;
5735 tree t;
5736
5737 find_decls_types (n->decl, fld);
5738
5739 if (!gimple_has_body_p (n->decl))
5740 return;
5741
5742 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5743
5744 fn = DECL_STRUCT_FUNCTION (n->decl);
5745
5746 /* Traverse locals. */
5747 FOR_EACH_LOCAL_DECL (fn, ix, t)
5748 find_decls_types (t, fld);
5749
5750 /* Traverse EH regions in FN. */
5751 {
5752 eh_region r;
5753 FOR_ALL_EH_REGION_FN (r, fn)
5754 find_decls_types_in_eh_region (r, fld);
5755 }
5756
5757 /* Traverse every statement in FN. */
5758 FOR_EACH_BB_FN (bb, fn)
5759 {
5760 gphi_iterator psi;
5761 gimple_stmt_iterator si;
5762 unsigned i;
5763
5764 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5765 {
5766 gphi *phi = psi.phi ();
5767
5768 for (i = 0; i < gimple_phi_num_args (phi); i++)
5769 {
5770 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5771 find_decls_types (*arg_p, fld);
5772 }
5773 }
5774
5775 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5776 {
5777 gimple stmt = gsi_stmt (si);
5778
5779 if (is_gimple_call (stmt))
5780 find_decls_types (gimple_call_fntype (stmt), fld);
5781
5782 for (i = 0; i < gimple_num_ops (stmt); i++)
5783 {
5784 tree arg = gimple_op (stmt, i);
5785 find_decls_types (arg, fld);
5786 }
5787 }
5788 }
5789 }
5790
5791
5792 /* Find decls and types referenced in varpool node N and store them in
5793 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5794 look for *every* kind of DECL and TYPE node reachable from N,
5795 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5796 NAMESPACE_DECLs, etc). */
5797
5798 static void
5799 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5800 {
5801 find_decls_types (v->decl, fld);
5802 }
5803
5804 /* If T needs an assembler name, have one created for it. */
5805
5806 void
5807 assign_assembler_name_if_neeeded (tree t)
5808 {
5809 if (need_assembler_name_p (t))
5810 {
5811 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5812 diagnostics that use input_location to show locus
5813 information. The problem here is that, at this point,
5814 input_location is generally anchored to the end of the file
5815 (since the parser is long gone), so we don't have a good
5816 position to pin it to.
5817
5818 To alleviate this problem, this uses the location of T's
5819 declaration. Examples of this are
5820 testsuite/g++.dg/template/cond2.C and
5821 testsuite/g++.dg/template/pr35240.C. */
5822 location_t saved_location = input_location;
5823 input_location = DECL_SOURCE_LOCATION (t);
5824
5825 decl_assembler_name (t);
5826
5827 input_location = saved_location;
5828 }
5829 }
5830
5831
5832 /* Free language specific information for every operand and expression
5833 in every node of the call graph. This process operates in three stages:
5834
5835 1- Every callgraph node and varpool node is traversed looking for
5836 decls and types embedded in them. This is a more exhaustive
5837 search than that done by find_referenced_vars, because it will
5838 also collect individual fields, decls embedded in types, etc.
5839
5840 2- All the decls found are sent to free_lang_data_in_decl.
5841
5842 3- All the types found are sent to free_lang_data_in_type.
5843
5844 The ordering between decls and types is important because
5845 free_lang_data_in_decl sets assembler names, which includes
5846 mangling. So types cannot be freed up until assembler names have
5847 been set up. */
5848
5849 static void
5850 free_lang_data_in_cgraph (void)
5851 {
5852 struct cgraph_node *n;
5853 varpool_node *v;
5854 struct free_lang_data_d fld;
5855 tree t;
5856 unsigned i;
5857 alias_pair *p;
5858
5859 /* Initialize sets and arrays to store referenced decls and types. */
5860 fld.pset = new hash_set<tree>;
5861 fld.worklist.create (0);
5862 fld.decls.create (100);
5863 fld.types.create (100);
5864
5865 /* Find decls and types in the body of every function in the callgraph. */
5866 FOR_EACH_FUNCTION (n)
5867 find_decls_types_in_node (n, &fld);
5868
5869 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5870 find_decls_types (p->decl, &fld);
5871
5872 /* Find decls and types in every varpool symbol. */
5873 FOR_EACH_VARIABLE (v)
5874 find_decls_types_in_var (v, &fld);
5875
5876 /* Set the assembler name on every decl found. We need to do this
5877 now because free_lang_data_in_decl will invalidate data needed
5878 for mangling. This breaks mangling on interdependent decls. */
5879 FOR_EACH_VEC_ELT (fld.decls, i, t)
5880 assign_assembler_name_if_neeeded (t);
5881
5882 /* Traverse every decl found freeing its language data. */
5883 FOR_EACH_VEC_ELT (fld.decls, i, t)
5884 free_lang_data_in_decl (t);
5885
5886 /* Traverse every type found freeing its language data. */
5887 FOR_EACH_VEC_ELT (fld.types, i, t)
5888 free_lang_data_in_type (t);
5889 #ifdef ENABLE_CHECKING
5890 FOR_EACH_VEC_ELT (fld.types, i, t)
5891 verify_type (t);
5892 #endif
5893
5894 delete fld.pset;
5895 fld.worklist.release ();
5896 fld.decls.release ();
5897 fld.types.release ();
5898 }
5899
5900
5901 /* Free resources that are used by FE but are not needed once they are done. */
5902
5903 static unsigned
5904 free_lang_data (void)
5905 {
5906 unsigned i;
5907
5908 /* If we are the LTO frontend we have freed lang-specific data already. */
5909 if (in_lto_p
5910 || (!flag_generate_lto && !flag_generate_offload))
5911 return 0;
5912
5913 /* Allocate and assign alias sets to the standard integer types
5914 while the slots are still in the way the frontends generated them. */
5915 for (i = 0; i < itk_none; ++i)
5916 if (integer_types[i])
5917 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5918
5919 /* Traverse the IL resetting language specific information for
5920 operands, expressions, etc. */
5921 free_lang_data_in_cgraph ();
5922
5923 /* Create gimple variants for common types. */
5924 ptrdiff_type_node = integer_type_node;
5925 fileptr_type_node = ptr_type_node;
5926
5927 /* Reset some langhooks. Do not reset types_compatible_p, it may
5928 still be used indirectly via the get_alias_set langhook. */
5929 lang_hooks.dwarf_name = lhd_dwarf_name;
5930 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5931 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5932
5933 /* We do not want the default decl_assembler_name implementation,
5934 rather if we have fixed everything we want a wrapper around it
5935 asserting that all non-local symbols already got their assembler
5936 name and only produce assembler names for local symbols. Or rather
5937 make sure we never call decl_assembler_name on local symbols and
5938 devise a separate, middle-end private scheme for it. */
5939
5940 /* Reset diagnostic machinery. */
5941 tree_diagnostics_defaults (global_dc);
5942
5943 return 0;
5944 }
5945
5946
5947 namespace {
5948
5949 const pass_data pass_data_ipa_free_lang_data =
5950 {
5951 SIMPLE_IPA_PASS, /* type */
5952 "*free_lang_data", /* name */
5953 OPTGROUP_NONE, /* optinfo_flags */
5954 TV_IPA_FREE_LANG_DATA, /* tv_id */
5955 0, /* properties_required */
5956 0, /* properties_provided */
5957 0, /* properties_destroyed */
5958 0, /* todo_flags_start */
5959 0, /* todo_flags_finish */
5960 };
5961
5962 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5963 {
5964 public:
5965 pass_ipa_free_lang_data (gcc::context *ctxt)
5966 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5967 {}
5968
5969 /* opt_pass methods: */
5970 virtual unsigned int execute (function *) { return free_lang_data (); }
5971
5972 }; // class pass_ipa_free_lang_data
5973
5974 } // anon namespace
5975
5976 simple_ipa_opt_pass *
5977 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5978 {
5979 return new pass_ipa_free_lang_data (ctxt);
5980 }
5981
5982 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5983 ATTR_NAME. Also used internally by remove_attribute(). */
5984 bool
5985 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5986 {
5987 size_t ident_len = IDENTIFIER_LENGTH (ident);
5988
5989 if (ident_len == attr_len)
5990 {
5991 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5992 return true;
5993 }
5994 else if (ident_len == attr_len + 4)
5995 {
5996 /* There is the possibility that ATTR is 'text' and IDENT is
5997 '__text__'. */
5998 const char *p = IDENTIFIER_POINTER (ident);
5999 if (p[0] == '_' && p[1] == '_'
6000 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6001 && strncmp (attr_name, p + 2, attr_len) == 0)
6002 return true;
6003 }
6004
6005 return false;
6006 }
6007
6008 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6009 of ATTR_NAME, and LIST is not NULL_TREE. */
6010 tree
6011 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6012 {
6013 while (list)
6014 {
6015 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6016
6017 if (ident_len == attr_len)
6018 {
6019 if (!strcmp (attr_name,
6020 IDENTIFIER_POINTER (get_attribute_name (list))))
6021 break;
6022 }
6023 /* TODO: If we made sure that attributes were stored in the
6024 canonical form without '__...__' (ie, as in 'text' as opposed
6025 to '__text__') then we could avoid the following case. */
6026 else if (ident_len == attr_len + 4)
6027 {
6028 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6029 if (p[0] == '_' && p[1] == '_'
6030 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6031 && strncmp (attr_name, p + 2, attr_len) == 0)
6032 break;
6033 }
6034 list = TREE_CHAIN (list);
6035 }
6036
6037 return list;
6038 }
6039
6040 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6041 return a pointer to the attribute's list first element if the attribute
6042 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6043 '__text__'). */
6044
6045 tree
6046 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6047 tree list)
6048 {
6049 while (list)
6050 {
6051 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6052
6053 if (attr_len > ident_len)
6054 {
6055 list = TREE_CHAIN (list);
6056 continue;
6057 }
6058
6059 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6060
6061 if (strncmp (attr_name, p, attr_len) == 0)
6062 break;
6063
6064 /* TODO: If we made sure that attributes were stored in the
6065 canonical form without '__...__' (ie, as in 'text' as opposed
6066 to '__text__') then we could avoid the following case. */
6067 if (p[0] == '_' && p[1] == '_' &&
6068 strncmp (attr_name, p + 2, attr_len) == 0)
6069 break;
6070
6071 list = TREE_CHAIN (list);
6072 }
6073
6074 return list;
6075 }
6076
6077
6078 /* A variant of lookup_attribute() that can be used with an identifier
6079 as the first argument, and where the identifier can be either
6080 'text' or '__text__'.
6081
6082 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6083 return a pointer to the attribute's list element if the attribute
6084 is part of the list, or NULL_TREE if not found. If the attribute
6085 appears more than once, this only returns the first occurrence; the
6086 TREE_CHAIN of the return value should be passed back in if further
6087 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6088 can be in the form 'text' or '__text__'. */
6089 static tree
6090 lookup_ident_attribute (tree attr_identifier, tree list)
6091 {
6092 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6093
6094 while (list)
6095 {
6096 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6097 == IDENTIFIER_NODE);
6098
6099 if (cmp_attrib_identifiers (attr_identifier,
6100 get_attribute_name (list)))
6101 /* Found it. */
6102 break;
6103 list = TREE_CHAIN (list);
6104 }
6105
6106 return list;
6107 }
6108
6109 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6110 modified list. */
6111
6112 tree
6113 remove_attribute (const char *attr_name, tree list)
6114 {
6115 tree *p;
6116 size_t attr_len = strlen (attr_name);
6117
6118 gcc_checking_assert (attr_name[0] != '_');
6119
6120 for (p = &list; *p; )
6121 {
6122 tree l = *p;
6123 /* TODO: If we were storing attributes in normalized form, here
6124 we could use a simple strcmp(). */
6125 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6126 *p = TREE_CHAIN (l);
6127 else
6128 p = &TREE_CHAIN (l);
6129 }
6130
6131 return list;
6132 }
6133
6134 /* Return an attribute list that is the union of a1 and a2. */
6135
6136 tree
6137 merge_attributes (tree a1, tree a2)
6138 {
6139 tree attributes;
6140
6141 /* Either one unset? Take the set one. */
6142
6143 if ((attributes = a1) == 0)
6144 attributes = a2;
6145
6146 /* One that completely contains the other? Take it. */
6147
6148 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6149 {
6150 if (attribute_list_contained (a2, a1))
6151 attributes = a2;
6152 else
6153 {
6154 /* Pick the longest list, and hang on the other list. */
6155
6156 if (list_length (a1) < list_length (a2))
6157 attributes = a2, a2 = a1;
6158
6159 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6160 {
6161 tree a;
6162 for (a = lookup_ident_attribute (get_attribute_name (a2),
6163 attributes);
6164 a != NULL_TREE && !attribute_value_equal (a, a2);
6165 a = lookup_ident_attribute (get_attribute_name (a2),
6166 TREE_CHAIN (a)))
6167 ;
6168 if (a == NULL_TREE)
6169 {
6170 a1 = copy_node (a2);
6171 TREE_CHAIN (a1) = attributes;
6172 attributes = a1;
6173 }
6174 }
6175 }
6176 }
6177 return attributes;
6178 }
6179
6180 /* Given types T1 and T2, merge their attributes and return
6181 the result. */
6182
6183 tree
6184 merge_type_attributes (tree t1, tree t2)
6185 {
6186 return merge_attributes (TYPE_ATTRIBUTES (t1),
6187 TYPE_ATTRIBUTES (t2));
6188 }
6189
6190 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6191 the result. */
6192
6193 tree
6194 merge_decl_attributes (tree olddecl, tree newdecl)
6195 {
6196 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6197 DECL_ATTRIBUTES (newdecl));
6198 }
6199
6200 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6201
6202 /* Specialization of merge_decl_attributes for various Windows targets.
6203
6204 This handles the following situation:
6205
6206 __declspec (dllimport) int foo;
6207 int foo;
6208
6209 The second instance of `foo' nullifies the dllimport. */
6210
6211 tree
6212 merge_dllimport_decl_attributes (tree old, tree new_tree)
6213 {
6214 tree a;
6215 int delete_dllimport_p = 1;
6216
6217 /* What we need to do here is remove from `old' dllimport if it doesn't
6218 appear in `new'. dllimport behaves like extern: if a declaration is
6219 marked dllimport and a definition appears later, then the object
6220 is not dllimport'd. We also remove a `new' dllimport if the old list
6221 contains dllexport: dllexport always overrides dllimport, regardless
6222 of the order of declaration. */
6223 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6224 delete_dllimport_p = 0;
6225 else if (DECL_DLLIMPORT_P (new_tree)
6226 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6227 {
6228 DECL_DLLIMPORT_P (new_tree) = 0;
6229 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6230 "dllimport ignored", new_tree);
6231 }
6232 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6233 {
6234 /* Warn about overriding a symbol that has already been used, e.g.:
6235 extern int __attribute__ ((dllimport)) foo;
6236 int* bar () {return &foo;}
6237 int foo;
6238 */
6239 if (TREE_USED (old))
6240 {
6241 warning (0, "%q+D redeclared without dllimport attribute "
6242 "after being referenced with dll linkage", new_tree);
6243 /* If we have used a variable's address with dllimport linkage,
6244 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6245 decl may already have had TREE_CONSTANT computed.
6246 We still remove the attribute so that assembler code refers
6247 to '&foo rather than '_imp__foo'. */
6248 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6249 DECL_DLLIMPORT_P (new_tree) = 1;
6250 }
6251
6252 /* Let an inline definition silently override the external reference,
6253 but otherwise warn about attribute inconsistency. */
6254 else if (TREE_CODE (new_tree) == VAR_DECL
6255 || !DECL_DECLARED_INLINE_P (new_tree))
6256 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6257 "previous dllimport ignored", new_tree);
6258 }
6259 else
6260 delete_dllimport_p = 0;
6261
6262 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6263
6264 if (delete_dllimport_p)
6265 a = remove_attribute ("dllimport", a);
6266
6267 return a;
6268 }
6269
6270 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6271 struct attribute_spec.handler. */
6272
6273 tree
6274 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6275 bool *no_add_attrs)
6276 {
6277 tree node = *pnode;
6278 bool is_dllimport;
6279
6280 /* These attributes may apply to structure and union types being created,
6281 but otherwise should pass to the declaration involved. */
6282 if (!DECL_P (node))
6283 {
6284 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6285 | (int) ATTR_FLAG_ARRAY_NEXT))
6286 {
6287 *no_add_attrs = true;
6288 return tree_cons (name, args, NULL_TREE);
6289 }
6290 if (TREE_CODE (node) == RECORD_TYPE
6291 || TREE_CODE (node) == UNION_TYPE)
6292 {
6293 node = TYPE_NAME (node);
6294 if (!node)
6295 return NULL_TREE;
6296 }
6297 else
6298 {
6299 warning (OPT_Wattributes, "%qE attribute ignored",
6300 name);
6301 *no_add_attrs = true;
6302 return NULL_TREE;
6303 }
6304 }
6305
6306 if (TREE_CODE (node) != FUNCTION_DECL
6307 && TREE_CODE (node) != VAR_DECL
6308 && TREE_CODE (node) != TYPE_DECL)
6309 {
6310 *no_add_attrs = true;
6311 warning (OPT_Wattributes, "%qE attribute ignored",
6312 name);
6313 return NULL_TREE;
6314 }
6315
6316 if (TREE_CODE (node) == TYPE_DECL
6317 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6318 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6319 {
6320 *no_add_attrs = true;
6321 warning (OPT_Wattributes, "%qE attribute ignored",
6322 name);
6323 return NULL_TREE;
6324 }
6325
6326 is_dllimport = is_attribute_p ("dllimport", name);
6327
6328 /* Report error on dllimport ambiguities seen now before they cause
6329 any damage. */
6330 if (is_dllimport)
6331 {
6332 /* Honor any target-specific overrides. */
6333 if (!targetm.valid_dllimport_attribute_p (node))
6334 *no_add_attrs = true;
6335
6336 else if (TREE_CODE (node) == FUNCTION_DECL
6337 && DECL_DECLARED_INLINE_P (node))
6338 {
6339 warning (OPT_Wattributes, "inline function %q+D declared as "
6340 " dllimport: attribute ignored", node);
6341 *no_add_attrs = true;
6342 }
6343 /* Like MS, treat definition of dllimported variables and
6344 non-inlined functions on declaration as syntax errors. */
6345 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6346 {
6347 error ("function %q+D definition is marked dllimport", node);
6348 *no_add_attrs = true;
6349 }
6350
6351 else if (TREE_CODE (node) == VAR_DECL)
6352 {
6353 if (DECL_INITIAL (node))
6354 {
6355 error ("variable %q+D definition is marked dllimport",
6356 node);
6357 *no_add_attrs = true;
6358 }
6359
6360 /* `extern' needn't be specified with dllimport.
6361 Specify `extern' now and hope for the best. Sigh. */
6362 DECL_EXTERNAL (node) = 1;
6363 /* Also, implicitly give dllimport'd variables declared within
6364 a function global scope, unless declared static. */
6365 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6366 TREE_PUBLIC (node) = 1;
6367 }
6368
6369 if (*no_add_attrs == false)
6370 DECL_DLLIMPORT_P (node) = 1;
6371 }
6372 else if (TREE_CODE (node) == FUNCTION_DECL
6373 && DECL_DECLARED_INLINE_P (node)
6374 && flag_keep_inline_dllexport)
6375 /* An exported function, even if inline, must be emitted. */
6376 DECL_EXTERNAL (node) = 0;
6377
6378 /* Report error if symbol is not accessible at global scope. */
6379 if (!TREE_PUBLIC (node)
6380 && (TREE_CODE (node) == VAR_DECL
6381 || TREE_CODE (node) == FUNCTION_DECL))
6382 {
6383 error ("external linkage required for symbol %q+D because of "
6384 "%qE attribute", node, name);
6385 *no_add_attrs = true;
6386 }
6387
6388 /* A dllexport'd entity must have default visibility so that other
6389 program units (shared libraries or the main executable) can see
6390 it. A dllimport'd entity must have default visibility so that
6391 the linker knows that undefined references within this program
6392 unit can be resolved by the dynamic linker. */
6393 if (!*no_add_attrs)
6394 {
6395 if (DECL_VISIBILITY_SPECIFIED (node)
6396 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6397 error ("%qE implies default visibility, but %qD has already "
6398 "been declared with a different visibility",
6399 name, node);
6400 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6401 DECL_VISIBILITY_SPECIFIED (node) = 1;
6402 }
6403
6404 return NULL_TREE;
6405 }
6406
6407 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6408 \f
6409 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6410 of the various TYPE_QUAL values. */
6411
6412 static void
6413 set_type_quals (tree type, int type_quals)
6414 {
6415 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6416 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6417 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6418 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6419 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6420 }
6421
6422 /* Returns true iff unqualified CAND and BASE are equivalent. */
6423
6424 bool
6425 check_base_type (const_tree cand, const_tree base)
6426 {
6427 return (TYPE_NAME (cand) == TYPE_NAME (base)
6428 /* Apparently this is needed for Objective-C. */
6429 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6430 /* Check alignment. */
6431 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6432 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6433 TYPE_ATTRIBUTES (base)));
6434 }
6435
6436 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6437
6438 bool
6439 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6440 {
6441 return (TYPE_QUALS (cand) == type_quals
6442 && check_base_type (cand, base));
6443 }
6444
6445 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6446
6447 static bool
6448 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6449 {
6450 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6451 && TYPE_NAME (cand) == TYPE_NAME (base)
6452 /* Apparently this is needed for Objective-C. */
6453 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6454 /* Check alignment. */
6455 && TYPE_ALIGN (cand) == align
6456 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6457 TYPE_ATTRIBUTES (base)));
6458 }
6459
6460 /* This function checks to see if TYPE matches the size one of the built-in
6461 atomic types, and returns that core atomic type. */
6462
6463 static tree
6464 find_atomic_core_type (tree type)
6465 {
6466 tree base_atomic_type;
6467
6468 /* Only handle complete types. */
6469 if (TYPE_SIZE (type) == NULL_TREE)
6470 return NULL_TREE;
6471
6472 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6473 switch (type_size)
6474 {
6475 case 8:
6476 base_atomic_type = atomicQI_type_node;
6477 break;
6478
6479 case 16:
6480 base_atomic_type = atomicHI_type_node;
6481 break;
6482
6483 case 32:
6484 base_atomic_type = atomicSI_type_node;
6485 break;
6486
6487 case 64:
6488 base_atomic_type = atomicDI_type_node;
6489 break;
6490
6491 case 128:
6492 base_atomic_type = atomicTI_type_node;
6493 break;
6494
6495 default:
6496 base_atomic_type = NULL_TREE;
6497 }
6498
6499 return base_atomic_type;
6500 }
6501
6502 /* Return a version of the TYPE, qualified as indicated by the
6503 TYPE_QUALS, if one exists. If no qualified version exists yet,
6504 return NULL_TREE. */
6505
6506 tree
6507 get_qualified_type (tree type, int type_quals)
6508 {
6509 tree t;
6510
6511 if (TYPE_QUALS (type) == type_quals)
6512 return type;
6513
6514 /* Search the chain of variants to see if there is already one there just
6515 like the one we need to have. If so, use that existing one. We must
6516 preserve the TYPE_NAME, since there is code that depends on this. */
6517 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6518 if (check_qualified_type (t, type, type_quals))
6519 return t;
6520
6521 return NULL_TREE;
6522 }
6523
6524 /* Like get_qualified_type, but creates the type if it does not
6525 exist. This function never returns NULL_TREE. */
6526
6527 tree
6528 build_qualified_type (tree type, int type_quals)
6529 {
6530 tree t;
6531
6532 /* See if we already have the appropriate qualified variant. */
6533 t = get_qualified_type (type, type_quals);
6534
6535 /* If not, build it. */
6536 if (!t)
6537 {
6538 t = build_variant_type_copy (type);
6539 set_type_quals (t, type_quals);
6540
6541 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6542 {
6543 /* See if this object can map to a basic atomic type. */
6544 tree atomic_type = find_atomic_core_type (type);
6545 if (atomic_type)
6546 {
6547 /* Ensure the alignment of this type is compatible with
6548 the required alignment of the atomic type. */
6549 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6550 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6551 }
6552 }
6553
6554 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6555 /* Propagate structural equality. */
6556 SET_TYPE_STRUCTURAL_EQUALITY (t);
6557 else if (TYPE_CANONICAL (type) != type)
6558 /* Build the underlying canonical type, since it is different
6559 from TYPE. */
6560 {
6561 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6562 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6563 }
6564 else
6565 /* T is its own canonical type. */
6566 TYPE_CANONICAL (t) = t;
6567
6568 }
6569
6570 return t;
6571 }
6572
6573 /* Create a variant of type T with alignment ALIGN. */
6574
6575 tree
6576 build_aligned_type (tree type, unsigned int align)
6577 {
6578 tree t;
6579
6580 if (TYPE_PACKED (type)
6581 || TYPE_ALIGN (type) == align)
6582 return type;
6583
6584 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6585 if (check_aligned_type (t, type, align))
6586 return t;
6587
6588 t = build_variant_type_copy (type);
6589 TYPE_ALIGN (t) = align;
6590
6591 return t;
6592 }
6593
6594 /* Create a new distinct copy of TYPE. The new type is made its own
6595 MAIN_VARIANT. If TYPE requires structural equality checks, the
6596 resulting type requires structural equality checks; otherwise, its
6597 TYPE_CANONICAL points to itself. */
6598
6599 tree
6600 build_distinct_type_copy (tree type)
6601 {
6602 tree t = copy_node (type);
6603
6604 TYPE_POINTER_TO (t) = 0;
6605 TYPE_REFERENCE_TO (t) = 0;
6606
6607 /* Set the canonical type either to a new equivalence class, or
6608 propagate the need for structural equality checks. */
6609 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6610 SET_TYPE_STRUCTURAL_EQUALITY (t);
6611 else
6612 TYPE_CANONICAL (t) = t;
6613
6614 /* Make it its own variant. */
6615 TYPE_MAIN_VARIANT (t) = t;
6616 TYPE_NEXT_VARIANT (t) = 0;
6617
6618 /* We do not record methods in type copies nor variants
6619 so we do not need to keep them up to date when new method
6620 is inserted. */
6621 if (RECORD_OR_UNION_TYPE_P (t))
6622 TYPE_METHODS (t) = NULL_TREE;
6623
6624 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6625 whose TREE_TYPE is not t. This can also happen in the Ada
6626 frontend when using subtypes. */
6627
6628 return t;
6629 }
6630
6631 /* Create a new variant of TYPE, equivalent but distinct. This is so
6632 the caller can modify it. TYPE_CANONICAL for the return type will
6633 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6634 are considered equal by the language itself (or that both types
6635 require structural equality checks). */
6636
6637 tree
6638 build_variant_type_copy (tree type)
6639 {
6640 tree t, m = TYPE_MAIN_VARIANT (type);
6641
6642 t = build_distinct_type_copy (type);
6643
6644 /* Since we're building a variant, assume that it is a non-semantic
6645 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6646 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6647
6648 /* Add the new type to the chain of variants of TYPE. */
6649 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6650 TYPE_NEXT_VARIANT (m) = t;
6651 TYPE_MAIN_VARIANT (t) = m;
6652
6653 return t;
6654 }
6655 \f
6656 /* Return true if the from tree in both tree maps are equal. */
6657
6658 int
6659 tree_map_base_eq (const void *va, const void *vb)
6660 {
6661 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6662 *const b = (const struct tree_map_base *) vb;
6663 return (a->from == b->from);
6664 }
6665
6666 /* Hash a from tree in a tree_base_map. */
6667
6668 unsigned int
6669 tree_map_base_hash (const void *item)
6670 {
6671 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6672 }
6673
6674 /* Return true if this tree map structure is marked for garbage collection
6675 purposes. We simply return true if the from tree is marked, so that this
6676 structure goes away when the from tree goes away. */
6677
6678 int
6679 tree_map_base_marked_p (const void *p)
6680 {
6681 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6682 }
6683
6684 /* Hash a from tree in a tree_map. */
6685
6686 unsigned int
6687 tree_map_hash (const void *item)
6688 {
6689 return (((const struct tree_map *) item)->hash);
6690 }
6691
6692 /* Hash a from tree in a tree_decl_map. */
6693
6694 unsigned int
6695 tree_decl_map_hash (const void *item)
6696 {
6697 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6698 }
6699
6700 /* Return the initialization priority for DECL. */
6701
6702 priority_type
6703 decl_init_priority_lookup (tree decl)
6704 {
6705 symtab_node *snode = symtab_node::get (decl);
6706
6707 if (!snode)
6708 return DEFAULT_INIT_PRIORITY;
6709 return
6710 snode->get_init_priority ();
6711 }
6712
6713 /* Return the finalization priority for DECL. */
6714
6715 priority_type
6716 decl_fini_priority_lookup (tree decl)
6717 {
6718 cgraph_node *node = cgraph_node::get (decl);
6719
6720 if (!node)
6721 return DEFAULT_INIT_PRIORITY;
6722 return
6723 node->get_fini_priority ();
6724 }
6725
6726 /* Set the initialization priority for DECL to PRIORITY. */
6727
6728 void
6729 decl_init_priority_insert (tree decl, priority_type priority)
6730 {
6731 struct symtab_node *snode;
6732
6733 if (priority == DEFAULT_INIT_PRIORITY)
6734 {
6735 snode = symtab_node::get (decl);
6736 if (!snode)
6737 return;
6738 }
6739 else if (TREE_CODE (decl) == VAR_DECL)
6740 snode = varpool_node::get_create (decl);
6741 else
6742 snode = cgraph_node::get_create (decl);
6743 snode->set_init_priority (priority);
6744 }
6745
6746 /* Set the finalization priority for DECL to PRIORITY. */
6747
6748 void
6749 decl_fini_priority_insert (tree decl, priority_type priority)
6750 {
6751 struct cgraph_node *node;
6752
6753 if (priority == DEFAULT_INIT_PRIORITY)
6754 {
6755 node = cgraph_node::get (decl);
6756 if (!node)
6757 return;
6758 }
6759 else
6760 node = cgraph_node::get_create (decl);
6761 node->set_fini_priority (priority);
6762 }
6763
6764 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6765
6766 static void
6767 print_debug_expr_statistics (void)
6768 {
6769 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6770 (long) debug_expr_for_decl->size (),
6771 (long) debug_expr_for_decl->elements (),
6772 debug_expr_for_decl->collisions ());
6773 }
6774
6775 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6776
6777 static void
6778 print_value_expr_statistics (void)
6779 {
6780 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6781 (long) value_expr_for_decl->size (),
6782 (long) value_expr_for_decl->elements (),
6783 value_expr_for_decl->collisions ());
6784 }
6785
6786 /* Lookup a debug expression for FROM, and return it if we find one. */
6787
6788 tree
6789 decl_debug_expr_lookup (tree from)
6790 {
6791 struct tree_decl_map *h, in;
6792 in.base.from = from;
6793
6794 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6795 if (h)
6796 return h->to;
6797 return NULL_TREE;
6798 }
6799
6800 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6801
6802 void
6803 decl_debug_expr_insert (tree from, tree to)
6804 {
6805 struct tree_decl_map *h;
6806
6807 h = ggc_alloc<tree_decl_map> ();
6808 h->base.from = from;
6809 h->to = to;
6810 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6811 }
6812
6813 /* Lookup a value expression for FROM, and return it if we find one. */
6814
6815 tree
6816 decl_value_expr_lookup (tree from)
6817 {
6818 struct tree_decl_map *h, in;
6819 in.base.from = from;
6820
6821 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6822 if (h)
6823 return h->to;
6824 return NULL_TREE;
6825 }
6826
6827 /* Insert a mapping FROM->TO in the value expression hashtable. */
6828
6829 void
6830 decl_value_expr_insert (tree from, tree to)
6831 {
6832 struct tree_decl_map *h;
6833
6834 h = ggc_alloc<tree_decl_map> ();
6835 h->base.from = from;
6836 h->to = to;
6837 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6838 }
6839
6840 /* Lookup a vector of debug arguments for FROM, and return it if we
6841 find one. */
6842
6843 vec<tree, va_gc> **
6844 decl_debug_args_lookup (tree from)
6845 {
6846 struct tree_vec_map *h, in;
6847
6848 if (!DECL_HAS_DEBUG_ARGS_P (from))
6849 return NULL;
6850 gcc_checking_assert (debug_args_for_decl != NULL);
6851 in.base.from = from;
6852 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6853 if (h)
6854 return &h->to;
6855 return NULL;
6856 }
6857
6858 /* Insert a mapping FROM->empty vector of debug arguments in the value
6859 expression hashtable. */
6860
6861 vec<tree, va_gc> **
6862 decl_debug_args_insert (tree from)
6863 {
6864 struct tree_vec_map *h;
6865 tree_vec_map **loc;
6866
6867 if (DECL_HAS_DEBUG_ARGS_P (from))
6868 return decl_debug_args_lookup (from);
6869 if (debug_args_for_decl == NULL)
6870 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6871 h = ggc_alloc<tree_vec_map> ();
6872 h->base.from = from;
6873 h->to = NULL;
6874 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6875 *loc = h;
6876 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6877 return &h->to;
6878 }
6879
6880 /* Hashing of types so that we don't make duplicates.
6881 The entry point is `type_hash_canon'. */
6882
6883 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6884 with types in the TREE_VALUE slots), by adding the hash codes
6885 of the individual types. */
6886
6887 static void
6888 type_hash_list (const_tree list, inchash::hash &hstate)
6889 {
6890 const_tree tail;
6891
6892 for (tail = list; tail; tail = TREE_CHAIN (tail))
6893 if (TREE_VALUE (tail) != error_mark_node)
6894 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6895 }
6896
6897 /* These are the Hashtable callback functions. */
6898
6899 /* Returns true iff the types are equivalent. */
6900
6901 bool
6902 type_cache_hasher::equal (type_hash *a, type_hash *b)
6903 {
6904 /* First test the things that are the same for all types. */
6905 if (a->hash != b->hash
6906 || TREE_CODE (a->type) != TREE_CODE (b->type)
6907 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6908 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6909 TYPE_ATTRIBUTES (b->type))
6910 || (TREE_CODE (a->type) != COMPLEX_TYPE
6911 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6912 return 0;
6913
6914 /* Be careful about comparing arrays before and after the element type
6915 has been completed; don't compare TYPE_ALIGN unless both types are
6916 complete. */
6917 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6918 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6919 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6920 return 0;
6921
6922 switch (TREE_CODE (a->type))
6923 {
6924 case VOID_TYPE:
6925 case COMPLEX_TYPE:
6926 case POINTER_TYPE:
6927 case REFERENCE_TYPE:
6928 case NULLPTR_TYPE:
6929 return 1;
6930
6931 case VECTOR_TYPE:
6932 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6933
6934 case ENUMERAL_TYPE:
6935 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6936 && !(TYPE_VALUES (a->type)
6937 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6938 && TYPE_VALUES (b->type)
6939 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6940 && type_list_equal (TYPE_VALUES (a->type),
6941 TYPE_VALUES (b->type))))
6942 return 0;
6943
6944 /* ... fall through ... */
6945
6946 case INTEGER_TYPE:
6947 case REAL_TYPE:
6948 case BOOLEAN_TYPE:
6949 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6950 return false;
6951 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6952 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6953 TYPE_MAX_VALUE (b->type)))
6954 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6955 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6956 TYPE_MIN_VALUE (b->type))));
6957
6958 case FIXED_POINT_TYPE:
6959 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6960
6961 case OFFSET_TYPE:
6962 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6963
6964 case METHOD_TYPE:
6965 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6966 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6967 || (TYPE_ARG_TYPES (a->type)
6968 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6969 && TYPE_ARG_TYPES (b->type)
6970 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6971 && type_list_equal (TYPE_ARG_TYPES (a->type),
6972 TYPE_ARG_TYPES (b->type)))))
6973 break;
6974 return 0;
6975 case ARRAY_TYPE:
6976 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6977
6978 case RECORD_TYPE:
6979 case UNION_TYPE:
6980 case QUAL_UNION_TYPE:
6981 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6982 || (TYPE_FIELDS (a->type)
6983 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6984 && TYPE_FIELDS (b->type)
6985 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6986 && type_list_equal (TYPE_FIELDS (a->type),
6987 TYPE_FIELDS (b->type))));
6988
6989 case FUNCTION_TYPE:
6990 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6991 || (TYPE_ARG_TYPES (a->type)
6992 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6993 && TYPE_ARG_TYPES (b->type)
6994 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6995 && type_list_equal (TYPE_ARG_TYPES (a->type),
6996 TYPE_ARG_TYPES (b->type))))
6997 break;
6998 return 0;
6999
7000 default:
7001 return 0;
7002 }
7003
7004 if (lang_hooks.types.type_hash_eq != NULL)
7005 return lang_hooks.types.type_hash_eq (a->type, b->type);
7006
7007 return 1;
7008 }
7009
7010 /* Given TYPE, and HASHCODE its hash code, return the canonical
7011 object for an identical type if one already exists.
7012 Otherwise, return TYPE, and record it as the canonical object.
7013
7014 To use this function, first create a type of the sort you want.
7015 Then compute its hash code from the fields of the type that
7016 make it different from other similar types.
7017 Then call this function and use the value. */
7018
7019 tree
7020 type_hash_canon (unsigned int hashcode, tree type)
7021 {
7022 type_hash in;
7023 type_hash **loc;
7024
7025 /* The hash table only contains main variants, so ensure that's what we're
7026 being passed. */
7027 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7028
7029 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7030 must call that routine before comparing TYPE_ALIGNs. */
7031 layout_type (type);
7032
7033 in.hash = hashcode;
7034 in.type = type;
7035
7036 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7037 if (*loc)
7038 {
7039 tree t1 = ((type_hash *) *loc)->type;
7040 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7041 if (GATHER_STATISTICS)
7042 {
7043 tree_code_counts[(int) TREE_CODE (type)]--;
7044 tree_node_counts[(int) t_kind]--;
7045 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
7046 }
7047 return t1;
7048 }
7049 else
7050 {
7051 struct type_hash *h;
7052
7053 h = ggc_alloc<type_hash> ();
7054 h->hash = hashcode;
7055 h->type = type;
7056 *loc = h;
7057
7058 return type;
7059 }
7060 }
7061
7062 static void
7063 print_type_hash_statistics (void)
7064 {
7065 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7066 (long) type_hash_table->size (),
7067 (long) type_hash_table->elements (),
7068 type_hash_table->collisions ());
7069 }
7070
7071 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7072 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7073 by adding the hash codes of the individual attributes. */
7074
7075 static void
7076 attribute_hash_list (const_tree list, inchash::hash &hstate)
7077 {
7078 const_tree tail;
7079
7080 for (tail = list; tail; tail = TREE_CHAIN (tail))
7081 /* ??? Do we want to add in TREE_VALUE too? */
7082 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7083 }
7084
7085 /* Given two lists of attributes, return true if list l2 is
7086 equivalent to l1. */
7087
7088 int
7089 attribute_list_equal (const_tree l1, const_tree l2)
7090 {
7091 if (l1 == l2)
7092 return 1;
7093
7094 return attribute_list_contained (l1, l2)
7095 && attribute_list_contained (l2, l1);
7096 }
7097
7098 /* Given two lists of attributes, return true if list L2 is
7099 completely contained within L1. */
7100 /* ??? This would be faster if attribute names were stored in a canonicalized
7101 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7102 must be used to show these elements are equivalent (which they are). */
7103 /* ??? It's not clear that attributes with arguments will always be handled
7104 correctly. */
7105
7106 int
7107 attribute_list_contained (const_tree l1, const_tree l2)
7108 {
7109 const_tree t1, t2;
7110
7111 /* First check the obvious, maybe the lists are identical. */
7112 if (l1 == l2)
7113 return 1;
7114
7115 /* Maybe the lists are similar. */
7116 for (t1 = l1, t2 = l2;
7117 t1 != 0 && t2 != 0
7118 && get_attribute_name (t1) == get_attribute_name (t2)
7119 && TREE_VALUE (t1) == TREE_VALUE (t2);
7120 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7121 ;
7122
7123 /* Maybe the lists are equal. */
7124 if (t1 == 0 && t2 == 0)
7125 return 1;
7126
7127 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7128 {
7129 const_tree attr;
7130 /* This CONST_CAST is okay because lookup_attribute does not
7131 modify its argument and the return value is assigned to a
7132 const_tree. */
7133 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7134 CONST_CAST_TREE (l1));
7135 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7136 attr = lookup_ident_attribute (get_attribute_name (t2),
7137 TREE_CHAIN (attr)))
7138 ;
7139
7140 if (attr == NULL_TREE)
7141 return 0;
7142 }
7143
7144 return 1;
7145 }
7146
7147 /* Given two lists of types
7148 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7149 return 1 if the lists contain the same types in the same order.
7150 Also, the TREE_PURPOSEs must match. */
7151
7152 int
7153 type_list_equal (const_tree l1, const_tree l2)
7154 {
7155 const_tree t1, t2;
7156
7157 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7158 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7159 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7160 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7161 && (TREE_TYPE (TREE_PURPOSE (t1))
7162 == TREE_TYPE (TREE_PURPOSE (t2))))))
7163 return 0;
7164
7165 return t1 == t2;
7166 }
7167
7168 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7169 given by TYPE. If the argument list accepts variable arguments,
7170 then this function counts only the ordinary arguments. */
7171
7172 int
7173 type_num_arguments (const_tree type)
7174 {
7175 int i = 0;
7176 tree t;
7177
7178 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7179 /* If the function does not take a variable number of arguments,
7180 the last element in the list will have type `void'. */
7181 if (VOID_TYPE_P (TREE_VALUE (t)))
7182 break;
7183 else
7184 ++i;
7185
7186 return i;
7187 }
7188
7189 /* Nonzero if integer constants T1 and T2
7190 represent the same constant value. */
7191
7192 int
7193 tree_int_cst_equal (const_tree t1, const_tree t2)
7194 {
7195 if (t1 == t2)
7196 return 1;
7197
7198 if (t1 == 0 || t2 == 0)
7199 return 0;
7200
7201 if (TREE_CODE (t1) == INTEGER_CST
7202 && TREE_CODE (t2) == INTEGER_CST
7203 && wi::to_widest (t1) == wi::to_widest (t2))
7204 return 1;
7205
7206 return 0;
7207 }
7208
7209 /* Return true if T is an INTEGER_CST whose numerical value (extended
7210 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7211
7212 bool
7213 tree_fits_shwi_p (const_tree t)
7214 {
7215 return (t != NULL_TREE
7216 && TREE_CODE (t) == INTEGER_CST
7217 && wi::fits_shwi_p (wi::to_widest (t)));
7218 }
7219
7220 /* Return true if T is an INTEGER_CST whose numerical value (extended
7221 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7222
7223 bool
7224 tree_fits_uhwi_p (const_tree t)
7225 {
7226 return (t != NULL_TREE
7227 && TREE_CODE (t) == INTEGER_CST
7228 && wi::fits_uhwi_p (wi::to_widest (t)));
7229 }
7230
7231 /* T is an INTEGER_CST whose numerical value (extended according to
7232 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7233 HOST_WIDE_INT. */
7234
7235 HOST_WIDE_INT
7236 tree_to_shwi (const_tree t)
7237 {
7238 gcc_assert (tree_fits_shwi_p (t));
7239 return TREE_INT_CST_LOW (t);
7240 }
7241
7242 /* T is an INTEGER_CST whose numerical value (extended according to
7243 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7244 HOST_WIDE_INT. */
7245
7246 unsigned HOST_WIDE_INT
7247 tree_to_uhwi (const_tree t)
7248 {
7249 gcc_assert (tree_fits_uhwi_p (t));
7250 return TREE_INT_CST_LOW (t);
7251 }
7252
7253 /* Return the most significant (sign) bit of T. */
7254
7255 int
7256 tree_int_cst_sign_bit (const_tree t)
7257 {
7258 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7259
7260 return wi::extract_uhwi (t, bitno, 1);
7261 }
7262
7263 /* Return an indication of the sign of the integer constant T.
7264 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7265 Note that -1 will never be returned if T's type is unsigned. */
7266
7267 int
7268 tree_int_cst_sgn (const_tree t)
7269 {
7270 if (wi::eq_p (t, 0))
7271 return 0;
7272 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7273 return 1;
7274 else if (wi::neg_p (t))
7275 return -1;
7276 else
7277 return 1;
7278 }
7279
7280 /* Return the minimum number of bits needed to represent VALUE in a
7281 signed or unsigned type, UNSIGNEDP says which. */
7282
7283 unsigned int
7284 tree_int_cst_min_precision (tree value, signop sgn)
7285 {
7286 /* If the value is negative, compute its negative minus 1. The latter
7287 adjustment is because the absolute value of the largest negative value
7288 is one larger than the largest positive value. This is equivalent to
7289 a bit-wise negation, so use that operation instead. */
7290
7291 if (tree_int_cst_sgn (value) < 0)
7292 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7293
7294 /* Return the number of bits needed, taking into account the fact
7295 that we need one more bit for a signed than unsigned type.
7296 If value is 0 or -1, the minimum precision is 1 no matter
7297 whether unsignedp is true or false. */
7298
7299 if (integer_zerop (value))
7300 return 1;
7301 else
7302 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7303 }
7304
7305 /* Return truthvalue of whether T1 is the same tree structure as T2.
7306 Return 1 if they are the same.
7307 Return 0 if they are understandably different.
7308 Return -1 if either contains tree structure not understood by
7309 this function. */
7310
7311 int
7312 simple_cst_equal (const_tree t1, const_tree t2)
7313 {
7314 enum tree_code code1, code2;
7315 int cmp;
7316 int i;
7317
7318 if (t1 == t2)
7319 return 1;
7320 if (t1 == 0 || t2 == 0)
7321 return 0;
7322
7323 code1 = TREE_CODE (t1);
7324 code2 = TREE_CODE (t2);
7325
7326 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7327 {
7328 if (CONVERT_EXPR_CODE_P (code2)
7329 || code2 == NON_LVALUE_EXPR)
7330 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7331 else
7332 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7333 }
7334
7335 else if (CONVERT_EXPR_CODE_P (code2)
7336 || code2 == NON_LVALUE_EXPR)
7337 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7338
7339 if (code1 != code2)
7340 return 0;
7341
7342 switch (code1)
7343 {
7344 case INTEGER_CST:
7345 return wi::to_widest (t1) == wi::to_widest (t2);
7346
7347 case REAL_CST:
7348 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7349
7350 case FIXED_CST:
7351 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7352
7353 case STRING_CST:
7354 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7355 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7356 TREE_STRING_LENGTH (t1)));
7357
7358 case CONSTRUCTOR:
7359 {
7360 unsigned HOST_WIDE_INT idx;
7361 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7362 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7363
7364 if (vec_safe_length (v1) != vec_safe_length (v2))
7365 return false;
7366
7367 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7368 /* ??? Should we handle also fields here? */
7369 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7370 return false;
7371 return true;
7372 }
7373
7374 case SAVE_EXPR:
7375 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7376
7377 case CALL_EXPR:
7378 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7379 if (cmp <= 0)
7380 return cmp;
7381 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7382 return 0;
7383 {
7384 const_tree arg1, arg2;
7385 const_call_expr_arg_iterator iter1, iter2;
7386 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7387 arg2 = first_const_call_expr_arg (t2, &iter2);
7388 arg1 && arg2;
7389 arg1 = next_const_call_expr_arg (&iter1),
7390 arg2 = next_const_call_expr_arg (&iter2))
7391 {
7392 cmp = simple_cst_equal (arg1, arg2);
7393 if (cmp <= 0)
7394 return cmp;
7395 }
7396 return arg1 == arg2;
7397 }
7398
7399 case TARGET_EXPR:
7400 /* Special case: if either target is an unallocated VAR_DECL,
7401 it means that it's going to be unified with whatever the
7402 TARGET_EXPR is really supposed to initialize, so treat it
7403 as being equivalent to anything. */
7404 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7405 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7406 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7407 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7408 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7409 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7410 cmp = 1;
7411 else
7412 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7413
7414 if (cmp <= 0)
7415 return cmp;
7416
7417 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7418
7419 case WITH_CLEANUP_EXPR:
7420 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7421 if (cmp <= 0)
7422 return cmp;
7423
7424 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7425
7426 case COMPONENT_REF:
7427 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7428 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7429
7430 return 0;
7431
7432 case VAR_DECL:
7433 case PARM_DECL:
7434 case CONST_DECL:
7435 case FUNCTION_DECL:
7436 return 0;
7437
7438 default:
7439 break;
7440 }
7441
7442 /* This general rule works for most tree codes. All exceptions should be
7443 handled above. If this is a language-specific tree code, we can't
7444 trust what might be in the operand, so say we don't know
7445 the situation. */
7446 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7447 return -1;
7448
7449 switch (TREE_CODE_CLASS (code1))
7450 {
7451 case tcc_unary:
7452 case tcc_binary:
7453 case tcc_comparison:
7454 case tcc_expression:
7455 case tcc_reference:
7456 case tcc_statement:
7457 cmp = 1;
7458 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7459 {
7460 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7461 if (cmp <= 0)
7462 return cmp;
7463 }
7464
7465 return cmp;
7466
7467 default:
7468 return -1;
7469 }
7470 }
7471
7472 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7473 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7474 than U, respectively. */
7475
7476 int
7477 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7478 {
7479 if (tree_int_cst_sgn (t) < 0)
7480 return -1;
7481 else if (!tree_fits_uhwi_p (t))
7482 return 1;
7483 else if (TREE_INT_CST_LOW (t) == u)
7484 return 0;
7485 else if (TREE_INT_CST_LOW (t) < u)
7486 return -1;
7487 else
7488 return 1;
7489 }
7490
7491 /* Return true if SIZE represents a constant size that is in bounds of
7492 what the middle-end and the backend accepts (covering not more than
7493 half of the address-space). */
7494
7495 bool
7496 valid_constant_size_p (const_tree size)
7497 {
7498 if (! tree_fits_uhwi_p (size)
7499 || TREE_OVERFLOW (size)
7500 || tree_int_cst_sign_bit (size) != 0)
7501 return false;
7502 return true;
7503 }
7504
7505 /* Return the precision of the type, or for a complex or vector type the
7506 precision of the type of its elements. */
7507
7508 unsigned int
7509 element_precision (const_tree type)
7510 {
7511 enum tree_code code = TREE_CODE (type);
7512 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7513 type = TREE_TYPE (type);
7514
7515 return TYPE_PRECISION (type);
7516 }
7517
7518 /* Return true if CODE represents an associative tree code. Otherwise
7519 return false. */
7520 bool
7521 associative_tree_code (enum tree_code code)
7522 {
7523 switch (code)
7524 {
7525 case BIT_IOR_EXPR:
7526 case BIT_AND_EXPR:
7527 case BIT_XOR_EXPR:
7528 case PLUS_EXPR:
7529 case MULT_EXPR:
7530 case MIN_EXPR:
7531 case MAX_EXPR:
7532 return true;
7533
7534 default:
7535 break;
7536 }
7537 return false;
7538 }
7539
7540 /* Return true if CODE represents a commutative tree code. Otherwise
7541 return false. */
7542 bool
7543 commutative_tree_code (enum tree_code code)
7544 {
7545 switch (code)
7546 {
7547 case PLUS_EXPR:
7548 case MULT_EXPR:
7549 case MULT_HIGHPART_EXPR:
7550 case MIN_EXPR:
7551 case MAX_EXPR:
7552 case BIT_IOR_EXPR:
7553 case BIT_XOR_EXPR:
7554 case BIT_AND_EXPR:
7555 case NE_EXPR:
7556 case EQ_EXPR:
7557 case UNORDERED_EXPR:
7558 case ORDERED_EXPR:
7559 case UNEQ_EXPR:
7560 case LTGT_EXPR:
7561 case TRUTH_AND_EXPR:
7562 case TRUTH_XOR_EXPR:
7563 case TRUTH_OR_EXPR:
7564 case WIDEN_MULT_EXPR:
7565 case VEC_WIDEN_MULT_HI_EXPR:
7566 case VEC_WIDEN_MULT_LO_EXPR:
7567 case VEC_WIDEN_MULT_EVEN_EXPR:
7568 case VEC_WIDEN_MULT_ODD_EXPR:
7569 return true;
7570
7571 default:
7572 break;
7573 }
7574 return false;
7575 }
7576
7577 /* Return true if CODE represents a ternary tree code for which the
7578 first two operands are commutative. Otherwise return false. */
7579 bool
7580 commutative_ternary_tree_code (enum tree_code code)
7581 {
7582 switch (code)
7583 {
7584 case WIDEN_MULT_PLUS_EXPR:
7585 case WIDEN_MULT_MINUS_EXPR:
7586 case DOT_PROD_EXPR:
7587 case FMA_EXPR:
7588 return true;
7589
7590 default:
7591 break;
7592 }
7593 return false;
7594 }
7595
7596 namespace inchash
7597 {
7598
7599 /* Generate a hash value for an expression. This can be used iteratively
7600 by passing a previous result as the HSTATE argument.
7601
7602 This function is intended to produce the same hash for expressions which
7603 would compare equal using operand_equal_p. */
7604 void
7605 add_expr (const_tree t, inchash::hash &hstate)
7606 {
7607 int i;
7608 enum tree_code code;
7609 enum tree_code_class tclass;
7610
7611 if (t == NULL_TREE)
7612 {
7613 hstate.merge_hash (0);
7614 return;
7615 }
7616
7617 code = TREE_CODE (t);
7618
7619 switch (code)
7620 {
7621 /* Alas, constants aren't shared, so we can't rely on pointer
7622 identity. */
7623 case VOID_CST:
7624 hstate.merge_hash (0);
7625 return;
7626 case INTEGER_CST:
7627 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7628 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7629 return;
7630 case REAL_CST:
7631 {
7632 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7633 hstate.merge_hash (val2);
7634 return;
7635 }
7636 case FIXED_CST:
7637 {
7638 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7639 hstate.merge_hash (val2);
7640 return;
7641 }
7642 case STRING_CST:
7643 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7644 return;
7645 case COMPLEX_CST:
7646 inchash::add_expr (TREE_REALPART (t), hstate);
7647 inchash::add_expr (TREE_IMAGPART (t), hstate);
7648 return;
7649 case VECTOR_CST:
7650 {
7651 unsigned i;
7652 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7653 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7654 return;
7655 }
7656 case SSA_NAME:
7657 /* We can just compare by pointer. */
7658 hstate.add_wide_int (SSA_NAME_VERSION (t));
7659 return;
7660 case PLACEHOLDER_EXPR:
7661 /* The node itself doesn't matter. */
7662 return;
7663 case TREE_LIST:
7664 /* A list of expressions, for a CALL_EXPR or as the elements of a
7665 VECTOR_CST. */
7666 for (; t; t = TREE_CHAIN (t))
7667 inchash::add_expr (TREE_VALUE (t), hstate);
7668 return;
7669 case CONSTRUCTOR:
7670 {
7671 unsigned HOST_WIDE_INT idx;
7672 tree field, value;
7673 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7674 {
7675 inchash::add_expr (field, hstate);
7676 inchash::add_expr (value, hstate);
7677 }
7678 return;
7679 }
7680 case FUNCTION_DECL:
7681 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7682 Otherwise nodes that compare equal according to operand_equal_p might
7683 get different hash codes. However, don't do this for machine specific
7684 or front end builtins, since the function code is overloaded in those
7685 cases. */
7686 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7687 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7688 {
7689 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7690 code = TREE_CODE (t);
7691 }
7692 /* FALL THROUGH */
7693 default:
7694 tclass = TREE_CODE_CLASS (code);
7695
7696 if (tclass == tcc_declaration)
7697 {
7698 /* DECL's have a unique ID */
7699 hstate.add_wide_int (DECL_UID (t));
7700 }
7701 else
7702 {
7703 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7704
7705 hstate.add_object (code);
7706
7707 /* Don't hash the type, that can lead to having nodes which
7708 compare equal according to operand_equal_p, but which
7709 have different hash codes. */
7710 if (CONVERT_EXPR_CODE_P (code)
7711 || code == NON_LVALUE_EXPR)
7712 {
7713 /* Make sure to include signness in the hash computation. */
7714 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7715 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7716 }
7717
7718 else if (commutative_tree_code (code))
7719 {
7720 /* It's a commutative expression. We want to hash it the same
7721 however it appears. We do this by first hashing both operands
7722 and then rehashing based on the order of their independent
7723 hashes. */
7724 inchash::hash one, two;
7725 inchash::add_expr (TREE_OPERAND (t, 0), one);
7726 inchash::add_expr (TREE_OPERAND (t, 1), two);
7727 hstate.add_commutative (one, two);
7728 }
7729 else
7730 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7731 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7732 }
7733 return;
7734 }
7735 }
7736
7737 }
7738
7739 /* Constructors for pointer, array and function types.
7740 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7741 constructed by language-dependent code, not here.) */
7742
7743 /* Construct, lay out and return the type of pointers to TO_TYPE with
7744 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7745 reference all of memory. If such a type has already been
7746 constructed, reuse it. */
7747
7748 tree
7749 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7750 bool can_alias_all)
7751 {
7752 tree t;
7753 bool could_alias = can_alias_all;
7754
7755 if (to_type == error_mark_node)
7756 return error_mark_node;
7757
7758 /* If the pointed-to type has the may_alias attribute set, force
7759 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7760 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7761 can_alias_all = true;
7762
7763 /* In some cases, languages will have things that aren't a POINTER_TYPE
7764 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7765 In that case, return that type without regard to the rest of our
7766 operands.
7767
7768 ??? This is a kludge, but consistent with the way this function has
7769 always operated and there doesn't seem to be a good way to avoid this
7770 at the moment. */
7771 if (TYPE_POINTER_TO (to_type) != 0
7772 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7773 return TYPE_POINTER_TO (to_type);
7774
7775 /* First, if we already have a type for pointers to TO_TYPE and it's
7776 the proper mode, use it. */
7777 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7778 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7779 return t;
7780
7781 t = make_node (POINTER_TYPE);
7782
7783 TREE_TYPE (t) = to_type;
7784 SET_TYPE_MODE (t, mode);
7785 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7786 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7787 TYPE_POINTER_TO (to_type) = t;
7788
7789 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7790 SET_TYPE_STRUCTURAL_EQUALITY (t);
7791 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7792 TYPE_CANONICAL (t)
7793 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7794 mode, false);
7795
7796 /* Lay out the type. This function has many callers that are concerned
7797 with expression-construction, and this simplifies them all. */
7798 layout_type (t);
7799
7800 return t;
7801 }
7802
7803 /* By default build pointers in ptr_mode. */
7804
7805 tree
7806 build_pointer_type (tree to_type)
7807 {
7808 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7809 : TYPE_ADDR_SPACE (to_type);
7810 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7811 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7812 }
7813
7814 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7815
7816 tree
7817 build_reference_type_for_mode (tree to_type, machine_mode mode,
7818 bool can_alias_all)
7819 {
7820 tree t;
7821 bool could_alias = can_alias_all;
7822
7823 if (to_type == error_mark_node)
7824 return error_mark_node;
7825
7826 /* If the pointed-to type has the may_alias attribute set, force
7827 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7828 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7829 can_alias_all = true;
7830
7831 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7832 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7833 In that case, return that type without regard to the rest of our
7834 operands.
7835
7836 ??? This is a kludge, but consistent with the way this function has
7837 always operated and there doesn't seem to be a good way to avoid this
7838 at the moment. */
7839 if (TYPE_REFERENCE_TO (to_type) != 0
7840 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7841 return TYPE_REFERENCE_TO (to_type);
7842
7843 /* First, if we already have a type for pointers to TO_TYPE and it's
7844 the proper mode, use it. */
7845 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7846 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7847 return t;
7848
7849 t = make_node (REFERENCE_TYPE);
7850
7851 TREE_TYPE (t) = to_type;
7852 SET_TYPE_MODE (t, mode);
7853 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7854 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7855 TYPE_REFERENCE_TO (to_type) = t;
7856
7857 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7858 SET_TYPE_STRUCTURAL_EQUALITY (t);
7859 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7860 TYPE_CANONICAL (t)
7861 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7862 mode, false);
7863
7864 layout_type (t);
7865
7866 return t;
7867 }
7868
7869
7870 /* Build the node for the type of references-to-TO_TYPE by default
7871 in ptr_mode. */
7872
7873 tree
7874 build_reference_type (tree to_type)
7875 {
7876 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7877 : TYPE_ADDR_SPACE (to_type);
7878 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7879 return build_reference_type_for_mode (to_type, pointer_mode, false);
7880 }
7881
7882 #define MAX_INT_CACHED_PREC \
7883 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7884 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7885
7886 /* Builds a signed or unsigned integer type of precision PRECISION.
7887 Used for C bitfields whose precision does not match that of
7888 built-in target types. */
7889 tree
7890 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7891 int unsignedp)
7892 {
7893 tree itype, ret;
7894
7895 if (unsignedp)
7896 unsignedp = MAX_INT_CACHED_PREC + 1;
7897
7898 if (precision <= MAX_INT_CACHED_PREC)
7899 {
7900 itype = nonstandard_integer_type_cache[precision + unsignedp];
7901 if (itype)
7902 return itype;
7903 }
7904
7905 itype = make_node (INTEGER_TYPE);
7906 TYPE_PRECISION (itype) = precision;
7907
7908 if (unsignedp)
7909 fixup_unsigned_type (itype);
7910 else
7911 fixup_signed_type (itype);
7912
7913 ret = itype;
7914 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7915 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7916 if (precision <= MAX_INT_CACHED_PREC)
7917 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7918
7919 return ret;
7920 }
7921
7922 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7923 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7924 is true, reuse such a type that has already been constructed. */
7925
7926 static tree
7927 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7928 {
7929 tree itype = make_node (INTEGER_TYPE);
7930 inchash::hash hstate;
7931
7932 TREE_TYPE (itype) = type;
7933
7934 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7935 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7936
7937 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7938 SET_TYPE_MODE (itype, TYPE_MODE (type));
7939 TYPE_SIZE (itype) = TYPE_SIZE (type);
7940 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7941 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7942 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7943
7944 if (!shared)
7945 return itype;
7946
7947 if ((TYPE_MIN_VALUE (itype)
7948 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7949 || (TYPE_MAX_VALUE (itype)
7950 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7951 {
7952 /* Since we cannot reliably merge this type, we need to compare it using
7953 structural equality checks. */
7954 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7955 return itype;
7956 }
7957
7958 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7959 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7960 hstate.merge_hash (TYPE_HASH (type));
7961 itype = type_hash_canon (hstate.end (), itype);
7962
7963 return itype;
7964 }
7965
7966 /* Wrapper around build_range_type_1 with SHARED set to true. */
7967
7968 tree
7969 build_range_type (tree type, tree lowval, tree highval)
7970 {
7971 return build_range_type_1 (type, lowval, highval, true);
7972 }
7973
7974 /* Wrapper around build_range_type_1 with SHARED set to false. */
7975
7976 tree
7977 build_nonshared_range_type (tree type, tree lowval, tree highval)
7978 {
7979 return build_range_type_1 (type, lowval, highval, false);
7980 }
7981
7982 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7983 MAXVAL should be the maximum value in the domain
7984 (one less than the length of the array).
7985
7986 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7987 We don't enforce this limit, that is up to caller (e.g. language front end).
7988 The limit exists because the result is a signed type and we don't handle
7989 sizes that use more than one HOST_WIDE_INT. */
7990
7991 tree
7992 build_index_type (tree maxval)
7993 {
7994 return build_range_type (sizetype, size_zero_node, maxval);
7995 }
7996
7997 /* Return true if the debug information for TYPE, a subtype, should be emitted
7998 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7999 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8000 debug info and doesn't reflect the source code. */
8001
8002 bool
8003 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8004 {
8005 tree base_type = TREE_TYPE (type), low, high;
8006
8007 /* Subrange types have a base type which is an integral type. */
8008 if (!INTEGRAL_TYPE_P (base_type))
8009 return false;
8010
8011 /* Get the real bounds of the subtype. */
8012 if (lang_hooks.types.get_subrange_bounds)
8013 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8014 else
8015 {
8016 low = TYPE_MIN_VALUE (type);
8017 high = TYPE_MAX_VALUE (type);
8018 }
8019
8020 /* If the type and its base type have the same representation and the same
8021 name, then the type is not a subrange but a copy of the base type. */
8022 if ((TREE_CODE (base_type) == INTEGER_TYPE
8023 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8024 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8025 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8026 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8027 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8028 return false;
8029
8030 if (lowval)
8031 *lowval = low;
8032 if (highval)
8033 *highval = high;
8034 return true;
8035 }
8036
8037 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8038 and number of elements specified by the range of values of INDEX_TYPE.
8039 If SHARED is true, reuse such a type that has already been constructed. */
8040
8041 static tree
8042 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8043 {
8044 tree t;
8045
8046 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8047 {
8048 error ("arrays of functions are not meaningful");
8049 elt_type = integer_type_node;
8050 }
8051
8052 t = make_node (ARRAY_TYPE);
8053 TREE_TYPE (t) = elt_type;
8054 TYPE_DOMAIN (t) = index_type;
8055 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8056 layout_type (t);
8057
8058 /* If the element type is incomplete at this point we get marked for
8059 structural equality. Do not record these types in the canonical
8060 type hashtable. */
8061 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8062 return t;
8063
8064 if (shared)
8065 {
8066 inchash::hash hstate;
8067 hstate.add_object (TYPE_HASH (elt_type));
8068 if (index_type)
8069 hstate.add_object (TYPE_HASH (index_type));
8070 t = type_hash_canon (hstate.end (), t);
8071 }
8072
8073 if (TYPE_CANONICAL (t) == t)
8074 {
8075 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8076 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
8077 SET_TYPE_STRUCTURAL_EQUALITY (t);
8078 else if (TYPE_CANONICAL (elt_type) != elt_type
8079 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8080 TYPE_CANONICAL (t)
8081 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8082 index_type
8083 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8084 shared);
8085 }
8086
8087 return t;
8088 }
8089
8090 /* Wrapper around build_array_type_1 with SHARED set to true. */
8091
8092 tree
8093 build_array_type (tree elt_type, tree index_type)
8094 {
8095 return build_array_type_1 (elt_type, index_type, true);
8096 }
8097
8098 /* Wrapper around build_array_type_1 with SHARED set to false. */
8099
8100 tree
8101 build_nonshared_array_type (tree elt_type, tree index_type)
8102 {
8103 return build_array_type_1 (elt_type, index_type, false);
8104 }
8105
8106 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8107 sizetype. */
8108
8109 tree
8110 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8111 {
8112 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8113 }
8114
8115 /* Recursively examines the array elements of TYPE, until a non-array
8116 element type is found. */
8117
8118 tree
8119 strip_array_types (tree type)
8120 {
8121 while (TREE_CODE (type) == ARRAY_TYPE)
8122 type = TREE_TYPE (type);
8123
8124 return type;
8125 }
8126
8127 /* Computes the canonical argument types from the argument type list
8128 ARGTYPES.
8129
8130 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8131 on entry to this function, or if any of the ARGTYPES are
8132 structural.
8133
8134 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8135 true on entry to this function, or if any of the ARGTYPES are
8136 non-canonical.
8137
8138 Returns a canonical argument list, which may be ARGTYPES when the
8139 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8140 true) or would not differ from ARGTYPES. */
8141
8142 static tree
8143 maybe_canonicalize_argtypes (tree argtypes,
8144 bool *any_structural_p,
8145 bool *any_noncanonical_p)
8146 {
8147 tree arg;
8148 bool any_noncanonical_argtypes_p = false;
8149
8150 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8151 {
8152 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8153 /* Fail gracefully by stating that the type is structural. */
8154 *any_structural_p = true;
8155 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8156 *any_structural_p = true;
8157 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8158 || TREE_PURPOSE (arg))
8159 /* If the argument has a default argument, we consider it
8160 non-canonical even though the type itself is canonical.
8161 That way, different variants of function and method types
8162 with default arguments will all point to the variant with
8163 no defaults as their canonical type. */
8164 any_noncanonical_argtypes_p = true;
8165 }
8166
8167 if (*any_structural_p)
8168 return argtypes;
8169
8170 if (any_noncanonical_argtypes_p)
8171 {
8172 /* Build the canonical list of argument types. */
8173 tree canon_argtypes = NULL_TREE;
8174 bool is_void = false;
8175
8176 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8177 {
8178 if (arg == void_list_node)
8179 is_void = true;
8180 else
8181 canon_argtypes = tree_cons (NULL_TREE,
8182 TYPE_CANONICAL (TREE_VALUE (arg)),
8183 canon_argtypes);
8184 }
8185
8186 canon_argtypes = nreverse (canon_argtypes);
8187 if (is_void)
8188 canon_argtypes = chainon (canon_argtypes, void_list_node);
8189
8190 /* There is a non-canonical type. */
8191 *any_noncanonical_p = true;
8192 return canon_argtypes;
8193 }
8194
8195 /* The canonical argument types are the same as ARGTYPES. */
8196 return argtypes;
8197 }
8198
8199 /* Construct, lay out and return
8200 the type of functions returning type VALUE_TYPE
8201 given arguments of types ARG_TYPES.
8202 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8203 are data type nodes for the arguments of the function.
8204 If such a type has already been constructed, reuse it. */
8205
8206 tree
8207 build_function_type (tree value_type, tree arg_types)
8208 {
8209 tree t;
8210 inchash::hash hstate;
8211 bool any_structural_p, any_noncanonical_p;
8212 tree canon_argtypes;
8213
8214 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8215 {
8216 error ("function return type cannot be function");
8217 value_type = integer_type_node;
8218 }
8219
8220 /* Make a node of the sort we want. */
8221 t = make_node (FUNCTION_TYPE);
8222 TREE_TYPE (t) = value_type;
8223 TYPE_ARG_TYPES (t) = arg_types;
8224
8225 /* If we already have such a type, use the old one. */
8226 hstate.add_object (TYPE_HASH (value_type));
8227 type_hash_list (arg_types, hstate);
8228 t = type_hash_canon (hstate.end (), t);
8229
8230 /* Set up the canonical type. */
8231 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8232 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8233 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8234 &any_structural_p,
8235 &any_noncanonical_p);
8236 if (any_structural_p)
8237 SET_TYPE_STRUCTURAL_EQUALITY (t);
8238 else if (any_noncanonical_p)
8239 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8240 canon_argtypes);
8241
8242 if (!COMPLETE_TYPE_P (t))
8243 layout_type (t);
8244 return t;
8245 }
8246
8247 /* Build a function type. The RETURN_TYPE is the type returned by the
8248 function. If VAARGS is set, no void_type_node is appended to the
8249 the list. ARGP must be always be terminated be a NULL_TREE. */
8250
8251 static tree
8252 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8253 {
8254 tree t, args, last;
8255
8256 t = va_arg (argp, tree);
8257 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8258 args = tree_cons (NULL_TREE, t, args);
8259
8260 if (vaargs)
8261 {
8262 last = args;
8263 if (args != NULL_TREE)
8264 args = nreverse (args);
8265 gcc_assert (last != void_list_node);
8266 }
8267 else if (args == NULL_TREE)
8268 args = void_list_node;
8269 else
8270 {
8271 last = args;
8272 args = nreverse (args);
8273 TREE_CHAIN (last) = void_list_node;
8274 }
8275 args = build_function_type (return_type, args);
8276
8277 return args;
8278 }
8279
8280 /* Build a function type. The RETURN_TYPE is the type returned by the
8281 function. If additional arguments are provided, they are
8282 additional argument types. The list of argument types must always
8283 be terminated by NULL_TREE. */
8284
8285 tree
8286 build_function_type_list (tree return_type, ...)
8287 {
8288 tree args;
8289 va_list p;
8290
8291 va_start (p, return_type);
8292 args = build_function_type_list_1 (false, return_type, p);
8293 va_end (p);
8294 return args;
8295 }
8296
8297 /* Build a variable argument function type. The RETURN_TYPE is the
8298 type returned by the function. If additional arguments are provided,
8299 they are additional argument types. The list of argument types must
8300 always be terminated by NULL_TREE. */
8301
8302 tree
8303 build_varargs_function_type_list (tree return_type, ...)
8304 {
8305 tree args;
8306 va_list p;
8307
8308 va_start (p, return_type);
8309 args = build_function_type_list_1 (true, return_type, p);
8310 va_end (p);
8311
8312 return args;
8313 }
8314
8315 /* Build a function type. RETURN_TYPE is the type returned by the
8316 function; VAARGS indicates whether the function takes varargs. The
8317 function takes N named arguments, the types of which are provided in
8318 ARG_TYPES. */
8319
8320 static tree
8321 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8322 tree *arg_types)
8323 {
8324 int i;
8325 tree t = vaargs ? NULL_TREE : void_list_node;
8326
8327 for (i = n - 1; i >= 0; i--)
8328 t = tree_cons (NULL_TREE, arg_types[i], t);
8329
8330 return build_function_type (return_type, t);
8331 }
8332
8333 /* Build a function type. RETURN_TYPE is the type returned by the
8334 function. The function takes N named arguments, the types of which
8335 are provided in ARG_TYPES. */
8336
8337 tree
8338 build_function_type_array (tree return_type, int n, tree *arg_types)
8339 {
8340 return build_function_type_array_1 (false, return_type, n, arg_types);
8341 }
8342
8343 /* Build a variable argument function type. RETURN_TYPE is the type
8344 returned by the function. The function takes N named arguments, the
8345 types of which are provided in ARG_TYPES. */
8346
8347 tree
8348 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8349 {
8350 return build_function_type_array_1 (true, return_type, n, arg_types);
8351 }
8352
8353 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8354 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8355 for the method. An implicit additional parameter (of type
8356 pointer-to-BASETYPE) is added to the ARGTYPES. */
8357
8358 tree
8359 build_method_type_directly (tree basetype,
8360 tree rettype,
8361 tree argtypes)
8362 {
8363 tree t;
8364 tree ptype;
8365 inchash::hash hstate;
8366 bool any_structural_p, any_noncanonical_p;
8367 tree canon_argtypes;
8368
8369 /* Make a node of the sort we want. */
8370 t = make_node (METHOD_TYPE);
8371
8372 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8373 TREE_TYPE (t) = rettype;
8374 ptype = build_pointer_type (basetype);
8375
8376 /* The actual arglist for this function includes a "hidden" argument
8377 which is "this". Put it into the list of argument types. */
8378 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8379 TYPE_ARG_TYPES (t) = argtypes;
8380
8381 /* If we already have such a type, use the old one. */
8382 hstate.add_object (TYPE_HASH (basetype));
8383 hstate.add_object (TYPE_HASH (rettype));
8384 type_hash_list (argtypes, hstate);
8385 t = type_hash_canon (hstate.end (), t);
8386
8387 /* Set up the canonical type. */
8388 any_structural_p
8389 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8390 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8391 any_noncanonical_p
8392 = (TYPE_CANONICAL (basetype) != basetype
8393 || TYPE_CANONICAL (rettype) != rettype);
8394 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8395 &any_structural_p,
8396 &any_noncanonical_p);
8397 if (any_structural_p)
8398 SET_TYPE_STRUCTURAL_EQUALITY (t);
8399 else if (any_noncanonical_p)
8400 TYPE_CANONICAL (t)
8401 = build_method_type_directly (TYPE_CANONICAL (basetype),
8402 TYPE_CANONICAL (rettype),
8403 canon_argtypes);
8404 if (!COMPLETE_TYPE_P (t))
8405 layout_type (t);
8406
8407 return t;
8408 }
8409
8410 /* Construct, lay out and return the type of methods belonging to class
8411 BASETYPE and whose arguments and values are described by TYPE.
8412 If that type exists already, reuse it.
8413 TYPE must be a FUNCTION_TYPE node. */
8414
8415 tree
8416 build_method_type (tree basetype, tree type)
8417 {
8418 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8419
8420 return build_method_type_directly (basetype,
8421 TREE_TYPE (type),
8422 TYPE_ARG_TYPES (type));
8423 }
8424
8425 /* Construct, lay out and return the type of offsets to a value
8426 of type TYPE, within an object of type BASETYPE.
8427 If a suitable offset type exists already, reuse it. */
8428
8429 tree
8430 build_offset_type (tree basetype, tree type)
8431 {
8432 tree t;
8433 inchash::hash hstate;
8434
8435 /* Make a node of the sort we want. */
8436 t = make_node (OFFSET_TYPE);
8437
8438 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8439 TREE_TYPE (t) = type;
8440
8441 /* If we already have such a type, use the old one. */
8442 hstate.add_object (TYPE_HASH (basetype));
8443 hstate.add_object (TYPE_HASH (type));
8444 t = type_hash_canon (hstate.end (), t);
8445
8446 if (!COMPLETE_TYPE_P (t))
8447 layout_type (t);
8448
8449 if (TYPE_CANONICAL (t) == t)
8450 {
8451 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8452 || TYPE_STRUCTURAL_EQUALITY_P (type))
8453 SET_TYPE_STRUCTURAL_EQUALITY (t);
8454 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8455 || TYPE_CANONICAL (type) != type)
8456 TYPE_CANONICAL (t)
8457 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8458 TYPE_CANONICAL (type));
8459 }
8460
8461 return t;
8462 }
8463
8464 /* Create a complex type whose components are COMPONENT_TYPE. */
8465
8466 tree
8467 build_complex_type (tree component_type)
8468 {
8469 tree t;
8470 inchash::hash hstate;
8471
8472 gcc_assert (INTEGRAL_TYPE_P (component_type)
8473 || SCALAR_FLOAT_TYPE_P (component_type)
8474 || FIXED_POINT_TYPE_P (component_type));
8475
8476 /* Make a node of the sort we want. */
8477 t = make_node (COMPLEX_TYPE);
8478
8479 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8480
8481 /* If we already have such a type, use the old one. */
8482 hstate.add_object (TYPE_HASH (component_type));
8483 t = type_hash_canon (hstate.end (), t);
8484
8485 if (!COMPLETE_TYPE_P (t))
8486 layout_type (t);
8487
8488 if (TYPE_CANONICAL (t) == t)
8489 {
8490 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8491 SET_TYPE_STRUCTURAL_EQUALITY (t);
8492 else if (TYPE_CANONICAL (component_type) != component_type)
8493 TYPE_CANONICAL (t)
8494 = build_complex_type (TYPE_CANONICAL (component_type));
8495 }
8496
8497 /* We need to create a name, since complex is a fundamental type. */
8498 if (! TYPE_NAME (t))
8499 {
8500 const char *name;
8501 if (component_type == char_type_node)
8502 name = "complex char";
8503 else if (component_type == signed_char_type_node)
8504 name = "complex signed char";
8505 else if (component_type == unsigned_char_type_node)
8506 name = "complex unsigned char";
8507 else if (component_type == short_integer_type_node)
8508 name = "complex short int";
8509 else if (component_type == short_unsigned_type_node)
8510 name = "complex short unsigned int";
8511 else if (component_type == integer_type_node)
8512 name = "complex int";
8513 else if (component_type == unsigned_type_node)
8514 name = "complex unsigned int";
8515 else if (component_type == long_integer_type_node)
8516 name = "complex long int";
8517 else if (component_type == long_unsigned_type_node)
8518 name = "complex long unsigned int";
8519 else if (component_type == long_long_integer_type_node)
8520 name = "complex long long int";
8521 else if (component_type == long_long_unsigned_type_node)
8522 name = "complex long long unsigned int";
8523 else
8524 name = 0;
8525
8526 if (name != 0)
8527 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8528 get_identifier (name), t);
8529 }
8530
8531 return build_qualified_type (t, TYPE_QUALS (component_type));
8532 }
8533
8534 /* If TYPE is a real or complex floating-point type and the target
8535 does not directly support arithmetic on TYPE then return the wider
8536 type to be used for arithmetic on TYPE. Otherwise, return
8537 NULL_TREE. */
8538
8539 tree
8540 excess_precision_type (tree type)
8541 {
8542 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8543 {
8544 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8545 switch (TREE_CODE (type))
8546 {
8547 case REAL_TYPE:
8548 switch (flt_eval_method)
8549 {
8550 case 1:
8551 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8552 return double_type_node;
8553 break;
8554 case 2:
8555 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8556 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8557 return long_double_type_node;
8558 break;
8559 default:
8560 gcc_unreachable ();
8561 }
8562 break;
8563 case COMPLEX_TYPE:
8564 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8565 return NULL_TREE;
8566 switch (flt_eval_method)
8567 {
8568 case 1:
8569 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8570 return complex_double_type_node;
8571 break;
8572 case 2:
8573 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8574 || (TYPE_MODE (TREE_TYPE (type))
8575 == TYPE_MODE (double_type_node)))
8576 return complex_long_double_type_node;
8577 break;
8578 default:
8579 gcc_unreachable ();
8580 }
8581 break;
8582 default:
8583 break;
8584 }
8585 }
8586 return NULL_TREE;
8587 }
8588 \f
8589 /* Return OP, stripped of any conversions to wider types as much as is safe.
8590 Converting the value back to OP's type makes a value equivalent to OP.
8591
8592 If FOR_TYPE is nonzero, we return a value which, if converted to
8593 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8594
8595 OP must have integer, real or enumeral type. Pointers are not allowed!
8596
8597 There are some cases where the obvious value we could return
8598 would regenerate to OP if converted to OP's type,
8599 but would not extend like OP to wider types.
8600 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8601 For example, if OP is (unsigned short)(signed char)-1,
8602 we avoid returning (signed char)-1 if FOR_TYPE is int,
8603 even though extending that to an unsigned short would regenerate OP,
8604 since the result of extending (signed char)-1 to (int)
8605 is different from (int) OP. */
8606
8607 tree
8608 get_unwidened (tree op, tree for_type)
8609 {
8610 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8611 tree type = TREE_TYPE (op);
8612 unsigned final_prec
8613 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8614 int uns
8615 = (for_type != 0 && for_type != type
8616 && final_prec > TYPE_PRECISION (type)
8617 && TYPE_UNSIGNED (type));
8618 tree win = op;
8619
8620 while (CONVERT_EXPR_P (op))
8621 {
8622 int bitschange;
8623
8624 /* TYPE_PRECISION on vector types has different meaning
8625 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8626 so avoid them here. */
8627 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8628 break;
8629
8630 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8631 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8632
8633 /* Truncations are many-one so cannot be removed.
8634 Unless we are later going to truncate down even farther. */
8635 if (bitschange < 0
8636 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8637 break;
8638
8639 /* See what's inside this conversion. If we decide to strip it,
8640 we will set WIN. */
8641 op = TREE_OPERAND (op, 0);
8642
8643 /* If we have not stripped any zero-extensions (uns is 0),
8644 we can strip any kind of extension.
8645 If we have previously stripped a zero-extension,
8646 only zero-extensions can safely be stripped.
8647 Any extension can be stripped if the bits it would produce
8648 are all going to be discarded later by truncating to FOR_TYPE. */
8649
8650 if (bitschange > 0)
8651 {
8652 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8653 win = op;
8654 /* TYPE_UNSIGNED says whether this is a zero-extension.
8655 Let's avoid computing it if it does not affect WIN
8656 and if UNS will not be needed again. */
8657 if ((uns
8658 || CONVERT_EXPR_P (op))
8659 && TYPE_UNSIGNED (TREE_TYPE (op)))
8660 {
8661 uns = 1;
8662 win = op;
8663 }
8664 }
8665 }
8666
8667 /* If we finally reach a constant see if it fits in for_type and
8668 in that case convert it. */
8669 if (for_type
8670 && TREE_CODE (win) == INTEGER_CST
8671 && TREE_TYPE (win) != for_type
8672 && int_fits_type_p (win, for_type))
8673 win = fold_convert (for_type, win);
8674
8675 return win;
8676 }
8677 \f
8678 /* Return OP or a simpler expression for a narrower value
8679 which can be sign-extended or zero-extended to give back OP.
8680 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8681 or 0 if the value should be sign-extended. */
8682
8683 tree
8684 get_narrower (tree op, int *unsignedp_ptr)
8685 {
8686 int uns = 0;
8687 int first = 1;
8688 tree win = op;
8689 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8690
8691 while (TREE_CODE (op) == NOP_EXPR)
8692 {
8693 int bitschange
8694 = (TYPE_PRECISION (TREE_TYPE (op))
8695 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8696
8697 /* Truncations are many-one so cannot be removed. */
8698 if (bitschange < 0)
8699 break;
8700
8701 /* See what's inside this conversion. If we decide to strip it,
8702 we will set WIN. */
8703
8704 if (bitschange > 0)
8705 {
8706 op = TREE_OPERAND (op, 0);
8707 /* An extension: the outermost one can be stripped,
8708 but remember whether it is zero or sign extension. */
8709 if (first)
8710 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8711 /* Otherwise, if a sign extension has been stripped,
8712 only sign extensions can now be stripped;
8713 if a zero extension has been stripped, only zero-extensions. */
8714 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8715 break;
8716 first = 0;
8717 }
8718 else /* bitschange == 0 */
8719 {
8720 /* A change in nominal type can always be stripped, but we must
8721 preserve the unsignedness. */
8722 if (first)
8723 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8724 first = 0;
8725 op = TREE_OPERAND (op, 0);
8726 /* Keep trying to narrow, but don't assign op to win if it
8727 would turn an integral type into something else. */
8728 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8729 continue;
8730 }
8731
8732 win = op;
8733 }
8734
8735 if (TREE_CODE (op) == COMPONENT_REF
8736 /* Since type_for_size always gives an integer type. */
8737 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8738 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8739 /* Ensure field is laid out already. */
8740 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8741 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8742 {
8743 unsigned HOST_WIDE_INT innerprec
8744 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8745 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8746 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8747 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8748
8749 /* We can get this structure field in a narrower type that fits it,
8750 but the resulting extension to its nominal type (a fullword type)
8751 must satisfy the same conditions as for other extensions.
8752
8753 Do this only for fields that are aligned (not bit-fields),
8754 because when bit-field insns will be used there is no
8755 advantage in doing this. */
8756
8757 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8758 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8759 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8760 && type != 0)
8761 {
8762 if (first)
8763 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8764 win = fold_convert (type, op);
8765 }
8766 }
8767
8768 *unsignedp_ptr = uns;
8769 return win;
8770 }
8771 \f
8772 /* Returns true if integer constant C has a value that is permissible
8773 for type TYPE (an INTEGER_TYPE). */
8774
8775 bool
8776 int_fits_type_p (const_tree c, const_tree type)
8777 {
8778 tree type_low_bound, type_high_bound;
8779 bool ok_for_low_bound, ok_for_high_bound;
8780 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8781
8782 retry:
8783 type_low_bound = TYPE_MIN_VALUE (type);
8784 type_high_bound = TYPE_MAX_VALUE (type);
8785
8786 /* If at least one bound of the type is a constant integer, we can check
8787 ourselves and maybe make a decision. If no such decision is possible, but
8788 this type is a subtype, try checking against that. Otherwise, use
8789 fits_to_tree_p, which checks against the precision.
8790
8791 Compute the status for each possibly constant bound, and return if we see
8792 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8793 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8794 for "constant known to fit". */
8795
8796 /* Check if c >= type_low_bound. */
8797 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8798 {
8799 if (tree_int_cst_lt (c, type_low_bound))
8800 return false;
8801 ok_for_low_bound = true;
8802 }
8803 else
8804 ok_for_low_bound = false;
8805
8806 /* Check if c <= type_high_bound. */
8807 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8808 {
8809 if (tree_int_cst_lt (type_high_bound, c))
8810 return false;
8811 ok_for_high_bound = true;
8812 }
8813 else
8814 ok_for_high_bound = false;
8815
8816 /* If the constant fits both bounds, the result is known. */
8817 if (ok_for_low_bound && ok_for_high_bound)
8818 return true;
8819
8820 /* Perform some generic filtering which may allow making a decision
8821 even if the bounds are not constant. First, negative integers
8822 never fit in unsigned types, */
8823 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8824 return false;
8825
8826 /* Second, narrower types always fit in wider ones. */
8827 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8828 return true;
8829
8830 /* Third, unsigned integers with top bit set never fit signed types. */
8831 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8832 {
8833 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8834 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8835 {
8836 /* When a tree_cst is converted to a wide-int, the precision
8837 is taken from the type. However, if the precision of the
8838 mode underneath the type is smaller than that, it is
8839 possible that the value will not fit. The test below
8840 fails if any bit is set between the sign bit of the
8841 underlying mode and the top bit of the type. */
8842 if (wi::ne_p (wi::zext (c, prec - 1), c))
8843 return false;
8844 }
8845 else if (wi::neg_p (c))
8846 return false;
8847 }
8848
8849 /* If we haven't been able to decide at this point, there nothing more we
8850 can check ourselves here. Look at the base type if we have one and it
8851 has the same precision. */
8852 if (TREE_CODE (type) == INTEGER_TYPE
8853 && TREE_TYPE (type) != 0
8854 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8855 {
8856 type = TREE_TYPE (type);
8857 goto retry;
8858 }
8859
8860 /* Or to fits_to_tree_p, if nothing else. */
8861 return wi::fits_to_tree_p (c, type);
8862 }
8863
8864 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8865 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8866 represented (assuming two's-complement arithmetic) within the bit
8867 precision of the type are returned instead. */
8868
8869 void
8870 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8871 {
8872 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8873 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8874 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8875 else
8876 {
8877 if (TYPE_UNSIGNED (type))
8878 mpz_set_ui (min, 0);
8879 else
8880 {
8881 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8882 wi::to_mpz (mn, min, SIGNED);
8883 }
8884 }
8885
8886 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8887 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8888 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8889 else
8890 {
8891 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8892 wi::to_mpz (mn, max, TYPE_SIGN (type));
8893 }
8894 }
8895
8896 /* Return true if VAR is an automatic variable defined in function FN. */
8897
8898 bool
8899 auto_var_in_fn_p (const_tree var, const_tree fn)
8900 {
8901 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8902 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8903 || TREE_CODE (var) == PARM_DECL)
8904 && ! TREE_STATIC (var))
8905 || TREE_CODE (var) == LABEL_DECL
8906 || TREE_CODE (var) == RESULT_DECL));
8907 }
8908
8909 /* Subprogram of following function. Called by walk_tree.
8910
8911 Return *TP if it is an automatic variable or parameter of the
8912 function passed in as DATA. */
8913
8914 static tree
8915 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8916 {
8917 tree fn = (tree) data;
8918
8919 if (TYPE_P (*tp))
8920 *walk_subtrees = 0;
8921
8922 else if (DECL_P (*tp)
8923 && auto_var_in_fn_p (*tp, fn))
8924 return *tp;
8925
8926 return NULL_TREE;
8927 }
8928
8929 /* Returns true if T is, contains, or refers to a type with variable
8930 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8931 arguments, but not the return type. If FN is nonzero, only return
8932 true if a modifier of the type or position of FN is a variable or
8933 parameter inside FN.
8934
8935 This concept is more general than that of C99 'variably modified types':
8936 in C99, a struct type is never variably modified because a VLA may not
8937 appear as a structure member. However, in GNU C code like:
8938
8939 struct S { int i[f()]; };
8940
8941 is valid, and other languages may define similar constructs. */
8942
8943 bool
8944 variably_modified_type_p (tree type, tree fn)
8945 {
8946 tree t;
8947
8948 /* Test if T is either variable (if FN is zero) or an expression containing
8949 a variable in FN. If TYPE isn't gimplified, return true also if
8950 gimplify_one_sizepos would gimplify the expression into a local
8951 variable. */
8952 #define RETURN_TRUE_IF_VAR(T) \
8953 do { tree _t = (T); \
8954 if (_t != NULL_TREE \
8955 && _t != error_mark_node \
8956 && TREE_CODE (_t) != INTEGER_CST \
8957 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8958 && (!fn \
8959 || (!TYPE_SIZES_GIMPLIFIED (type) \
8960 && !is_gimple_sizepos (_t)) \
8961 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8962 return true; } while (0)
8963
8964 if (type == error_mark_node)
8965 return false;
8966
8967 /* If TYPE itself has variable size, it is variably modified. */
8968 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8969 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8970
8971 switch (TREE_CODE (type))
8972 {
8973 case POINTER_TYPE:
8974 case REFERENCE_TYPE:
8975 case VECTOR_TYPE:
8976 if (variably_modified_type_p (TREE_TYPE (type), fn))
8977 return true;
8978 break;
8979
8980 case FUNCTION_TYPE:
8981 case METHOD_TYPE:
8982 /* If TYPE is a function type, it is variably modified if the
8983 return type is variably modified. */
8984 if (variably_modified_type_p (TREE_TYPE (type), fn))
8985 return true;
8986 break;
8987
8988 case INTEGER_TYPE:
8989 case REAL_TYPE:
8990 case FIXED_POINT_TYPE:
8991 case ENUMERAL_TYPE:
8992 case BOOLEAN_TYPE:
8993 /* Scalar types are variably modified if their end points
8994 aren't constant. */
8995 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8996 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8997 break;
8998
8999 case RECORD_TYPE:
9000 case UNION_TYPE:
9001 case QUAL_UNION_TYPE:
9002 /* We can't see if any of the fields are variably-modified by the
9003 definition we normally use, since that would produce infinite
9004 recursion via pointers. */
9005 /* This is variably modified if some field's type is. */
9006 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9007 if (TREE_CODE (t) == FIELD_DECL)
9008 {
9009 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9010 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9011 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9012
9013 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9014 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9015 }
9016 break;
9017
9018 case ARRAY_TYPE:
9019 /* Do not call ourselves to avoid infinite recursion. This is
9020 variably modified if the element type is. */
9021 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9022 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9023 break;
9024
9025 default:
9026 break;
9027 }
9028
9029 /* The current language may have other cases to check, but in general,
9030 all other types are not variably modified. */
9031 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9032
9033 #undef RETURN_TRUE_IF_VAR
9034 }
9035
9036 /* Given a DECL or TYPE, return the scope in which it was declared, or
9037 NULL_TREE if there is no containing scope. */
9038
9039 tree
9040 get_containing_scope (const_tree t)
9041 {
9042 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9043 }
9044
9045 /* Return the innermost context enclosing DECL that is
9046 a FUNCTION_DECL, or zero if none. */
9047
9048 tree
9049 decl_function_context (const_tree decl)
9050 {
9051 tree context;
9052
9053 if (TREE_CODE (decl) == ERROR_MARK)
9054 return 0;
9055
9056 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9057 where we look up the function at runtime. Such functions always take
9058 a first argument of type 'pointer to real context'.
9059
9060 C++ should really be fixed to use DECL_CONTEXT for the real context,
9061 and use something else for the "virtual context". */
9062 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9063 context
9064 = TYPE_MAIN_VARIANT
9065 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9066 else
9067 context = DECL_CONTEXT (decl);
9068
9069 while (context && TREE_CODE (context) != FUNCTION_DECL)
9070 {
9071 if (TREE_CODE (context) == BLOCK)
9072 context = BLOCK_SUPERCONTEXT (context);
9073 else
9074 context = get_containing_scope (context);
9075 }
9076
9077 return context;
9078 }
9079
9080 /* Return the innermost context enclosing DECL that is
9081 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9082 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9083
9084 tree
9085 decl_type_context (const_tree decl)
9086 {
9087 tree context = DECL_CONTEXT (decl);
9088
9089 while (context)
9090 switch (TREE_CODE (context))
9091 {
9092 case NAMESPACE_DECL:
9093 case TRANSLATION_UNIT_DECL:
9094 return NULL_TREE;
9095
9096 case RECORD_TYPE:
9097 case UNION_TYPE:
9098 case QUAL_UNION_TYPE:
9099 return context;
9100
9101 case TYPE_DECL:
9102 case FUNCTION_DECL:
9103 context = DECL_CONTEXT (context);
9104 break;
9105
9106 case BLOCK:
9107 context = BLOCK_SUPERCONTEXT (context);
9108 break;
9109
9110 default:
9111 gcc_unreachable ();
9112 }
9113
9114 return NULL_TREE;
9115 }
9116
9117 /* CALL is a CALL_EXPR. Return the declaration for the function
9118 called, or NULL_TREE if the called function cannot be
9119 determined. */
9120
9121 tree
9122 get_callee_fndecl (const_tree call)
9123 {
9124 tree addr;
9125
9126 if (call == error_mark_node)
9127 return error_mark_node;
9128
9129 /* It's invalid to call this function with anything but a
9130 CALL_EXPR. */
9131 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9132
9133 /* The first operand to the CALL is the address of the function
9134 called. */
9135 addr = CALL_EXPR_FN (call);
9136
9137 /* If there is no function, return early. */
9138 if (addr == NULL_TREE)
9139 return NULL_TREE;
9140
9141 STRIP_NOPS (addr);
9142
9143 /* If this is a readonly function pointer, extract its initial value. */
9144 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9145 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9146 && DECL_INITIAL (addr))
9147 addr = DECL_INITIAL (addr);
9148
9149 /* If the address is just `&f' for some function `f', then we know
9150 that `f' is being called. */
9151 if (TREE_CODE (addr) == ADDR_EXPR
9152 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9153 return TREE_OPERAND (addr, 0);
9154
9155 /* We couldn't figure out what was being called. */
9156 return NULL_TREE;
9157 }
9158
9159 #define TREE_MEM_USAGE_SPACES 40
9160
9161 /* Print debugging information about tree nodes generated during the compile,
9162 and any language-specific information. */
9163
9164 void
9165 dump_tree_statistics (void)
9166 {
9167 if (GATHER_STATISTICS)
9168 {
9169 int i;
9170 int total_nodes, total_bytes;
9171 fprintf (stderr, "\nKind Nodes Bytes\n");
9172 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9173 total_nodes = total_bytes = 0;
9174 for (i = 0; i < (int) all_kinds; i++)
9175 {
9176 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9177 tree_node_counts[i], tree_node_sizes[i]);
9178 total_nodes += tree_node_counts[i];
9179 total_bytes += tree_node_sizes[i];
9180 }
9181 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9182 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9183 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9184 fprintf (stderr, "Code Nodes\n");
9185 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9186 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9187 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9188 tree_code_counts[i]);
9189 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9190 fprintf (stderr, "\n");
9191 ssanames_print_statistics ();
9192 fprintf (stderr, "\n");
9193 phinodes_print_statistics ();
9194 fprintf (stderr, "\n");
9195 }
9196 else
9197 fprintf (stderr, "(No per-node statistics)\n");
9198
9199 print_type_hash_statistics ();
9200 print_debug_expr_statistics ();
9201 print_value_expr_statistics ();
9202 lang_hooks.print_statistics ();
9203 }
9204 \f
9205 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9206
9207 /* Generate a crc32 of a byte. */
9208
9209 static unsigned
9210 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9211 {
9212 unsigned ix;
9213
9214 for (ix = bits; ix--; value <<= 1)
9215 {
9216 unsigned feedback;
9217
9218 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9219 chksum <<= 1;
9220 chksum ^= feedback;
9221 }
9222 return chksum;
9223 }
9224
9225 /* Generate a crc32 of a 32-bit unsigned. */
9226
9227 unsigned
9228 crc32_unsigned (unsigned chksum, unsigned value)
9229 {
9230 return crc32_unsigned_bits (chksum, value, 32);
9231 }
9232
9233 /* Generate a crc32 of a byte. */
9234
9235 unsigned
9236 crc32_byte (unsigned chksum, char byte)
9237 {
9238 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9239 }
9240
9241 /* Generate a crc32 of a string. */
9242
9243 unsigned
9244 crc32_string (unsigned chksum, const char *string)
9245 {
9246 do
9247 {
9248 chksum = crc32_byte (chksum, *string);
9249 }
9250 while (*string++);
9251 return chksum;
9252 }
9253
9254 /* P is a string that will be used in a symbol. Mask out any characters
9255 that are not valid in that context. */
9256
9257 void
9258 clean_symbol_name (char *p)
9259 {
9260 for (; *p; p++)
9261 if (! (ISALNUM (*p)
9262 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9263 || *p == '$'
9264 #endif
9265 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9266 || *p == '.'
9267 #endif
9268 ))
9269 *p = '_';
9270 }
9271
9272 /* Generate a name for a special-purpose function.
9273 The generated name may need to be unique across the whole link.
9274 Changes to this function may also require corresponding changes to
9275 xstrdup_mask_random.
9276 TYPE is some string to identify the purpose of this function to the
9277 linker or collect2; it must start with an uppercase letter,
9278 one of:
9279 I - for constructors
9280 D - for destructors
9281 N - for C++ anonymous namespaces
9282 F - for DWARF unwind frame information. */
9283
9284 tree
9285 get_file_function_name (const char *type)
9286 {
9287 char *buf;
9288 const char *p;
9289 char *q;
9290
9291 /* If we already have a name we know to be unique, just use that. */
9292 if (first_global_object_name)
9293 p = q = ASTRDUP (first_global_object_name);
9294 /* If the target is handling the constructors/destructors, they
9295 will be local to this file and the name is only necessary for
9296 debugging purposes.
9297 We also assign sub_I and sub_D sufixes to constructors called from
9298 the global static constructors. These are always local. */
9299 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9300 || (strncmp (type, "sub_", 4) == 0
9301 && (type[4] == 'I' || type[4] == 'D')))
9302 {
9303 const char *file = main_input_filename;
9304 if (! file)
9305 file = LOCATION_FILE (input_location);
9306 /* Just use the file's basename, because the full pathname
9307 might be quite long. */
9308 p = q = ASTRDUP (lbasename (file));
9309 }
9310 else
9311 {
9312 /* Otherwise, the name must be unique across the entire link.
9313 We don't have anything that we know to be unique to this translation
9314 unit, so use what we do have and throw in some randomness. */
9315 unsigned len;
9316 const char *name = weak_global_object_name;
9317 const char *file = main_input_filename;
9318
9319 if (! name)
9320 name = "";
9321 if (! file)
9322 file = LOCATION_FILE (input_location);
9323
9324 len = strlen (file);
9325 q = (char *) alloca (9 + 17 + len + 1);
9326 memcpy (q, file, len + 1);
9327
9328 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9329 crc32_string (0, name), get_random_seed (false));
9330
9331 p = q;
9332 }
9333
9334 clean_symbol_name (q);
9335 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9336 + strlen (type));
9337
9338 /* Set up the name of the file-level functions we may need.
9339 Use a global object (which is already required to be unique over
9340 the program) rather than the file name (which imposes extra
9341 constraints). */
9342 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9343
9344 return get_identifier (buf);
9345 }
9346 \f
9347 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9348
9349 /* Complain that the tree code of NODE does not match the expected 0
9350 terminated list of trailing codes. The trailing code list can be
9351 empty, for a more vague error message. FILE, LINE, and FUNCTION
9352 are of the caller. */
9353
9354 void
9355 tree_check_failed (const_tree node, const char *file,
9356 int line, const char *function, ...)
9357 {
9358 va_list args;
9359 const char *buffer;
9360 unsigned length = 0;
9361 enum tree_code code;
9362
9363 va_start (args, function);
9364 while ((code = (enum tree_code) va_arg (args, int)))
9365 length += 4 + strlen (get_tree_code_name (code));
9366 va_end (args);
9367 if (length)
9368 {
9369 char *tmp;
9370 va_start (args, function);
9371 length += strlen ("expected ");
9372 buffer = tmp = (char *) alloca (length);
9373 length = 0;
9374 while ((code = (enum tree_code) va_arg (args, int)))
9375 {
9376 const char *prefix = length ? " or " : "expected ";
9377
9378 strcpy (tmp + length, prefix);
9379 length += strlen (prefix);
9380 strcpy (tmp + length, get_tree_code_name (code));
9381 length += strlen (get_tree_code_name (code));
9382 }
9383 va_end (args);
9384 }
9385 else
9386 buffer = "unexpected node";
9387
9388 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9389 buffer, get_tree_code_name (TREE_CODE (node)),
9390 function, trim_filename (file), line);
9391 }
9392
9393 /* Complain that the tree code of NODE does match the expected 0
9394 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9395 the caller. */
9396
9397 void
9398 tree_not_check_failed (const_tree node, const char *file,
9399 int line, const char *function, ...)
9400 {
9401 va_list args;
9402 char *buffer;
9403 unsigned length = 0;
9404 enum tree_code code;
9405
9406 va_start (args, function);
9407 while ((code = (enum tree_code) va_arg (args, int)))
9408 length += 4 + strlen (get_tree_code_name (code));
9409 va_end (args);
9410 va_start (args, function);
9411 buffer = (char *) alloca (length);
9412 length = 0;
9413 while ((code = (enum tree_code) va_arg (args, int)))
9414 {
9415 if (length)
9416 {
9417 strcpy (buffer + length, " or ");
9418 length += 4;
9419 }
9420 strcpy (buffer + length, get_tree_code_name (code));
9421 length += strlen (get_tree_code_name (code));
9422 }
9423 va_end (args);
9424
9425 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9426 buffer, get_tree_code_name (TREE_CODE (node)),
9427 function, trim_filename (file), line);
9428 }
9429
9430 /* Similar to tree_check_failed, except that we check for a class of tree
9431 code, given in CL. */
9432
9433 void
9434 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9435 const char *file, int line, const char *function)
9436 {
9437 internal_error
9438 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9439 TREE_CODE_CLASS_STRING (cl),
9440 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9441 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9442 }
9443
9444 /* Similar to tree_check_failed, except that instead of specifying a
9445 dozen codes, use the knowledge that they're all sequential. */
9446
9447 void
9448 tree_range_check_failed (const_tree node, const char *file, int line,
9449 const char *function, enum tree_code c1,
9450 enum tree_code c2)
9451 {
9452 char *buffer;
9453 unsigned length = 0;
9454 unsigned int c;
9455
9456 for (c = c1; c <= c2; ++c)
9457 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9458
9459 length += strlen ("expected ");
9460 buffer = (char *) alloca (length);
9461 length = 0;
9462
9463 for (c = c1; c <= c2; ++c)
9464 {
9465 const char *prefix = length ? " or " : "expected ";
9466
9467 strcpy (buffer + length, prefix);
9468 length += strlen (prefix);
9469 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9470 length += strlen (get_tree_code_name ((enum tree_code) c));
9471 }
9472
9473 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9474 buffer, get_tree_code_name (TREE_CODE (node)),
9475 function, trim_filename (file), line);
9476 }
9477
9478
9479 /* Similar to tree_check_failed, except that we check that a tree does
9480 not have the specified code, given in CL. */
9481
9482 void
9483 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9484 const char *file, int line, const char *function)
9485 {
9486 internal_error
9487 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9488 TREE_CODE_CLASS_STRING (cl),
9489 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9490 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9491 }
9492
9493
9494 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9495
9496 void
9497 omp_clause_check_failed (const_tree node, const char *file, int line,
9498 const char *function, enum omp_clause_code code)
9499 {
9500 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9501 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9502 function, trim_filename (file), line);
9503 }
9504
9505
9506 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9507
9508 void
9509 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9510 const char *function, enum omp_clause_code c1,
9511 enum omp_clause_code c2)
9512 {
9513 char *buffer;
9514 unsigned length = 0;
9515 unsigned int c;
9516
9517 for (c = c1; c <= c2; ++c)
9518 length += 4 + strlen (omp_clause_code_name[c]);
9519
9520 length += strlen ("expected ");
9521 buffer = (char *) alloca (length);
9522 length = 0;
9523
9524 for (c = c1; c <= c2; ++c)
9525 {
9526 const char *prefix = length ? " or " : "expected ";
9527
9528 strcpy (buffer + length, prefix);
9529 length += strlen (prefix);
9530 strcpy (buffer + length, omp_clause_code_name[c]);
9531 length += strlen (omp_clause_code_name[c]);
9532 }
9533
9534 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9535 buffer, omp_clause_code_name[TREE_CODE (node)],
9536 function, trim_filename (file), line);
9537 }
9538
9539
9540 #undef DEFTREESTRUCT
9541 #define DEFTREESTRUCT(VAL, NAME) NAME,
9542
9543 static const char *ts_enum_names[] = {
9544 #include "treestruct.def"
9545 };
9546 #undef DEFTREESTRUCT
9547
9548 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9549
9550 /* Similar to tree_class_check_failed, except that we check for
9551 whether CODE contains the tree structure identified by EN. */
9552
9553 void
9554 tree_contains_struct_check_failed (const_tree node,
9555 const enum tree_node_structure_enum en,
9556 const char *file, int line,
9557 const char *function)
9558 {
9559 internal_error
9560 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9561 TS_ENUM_NAME (en),
9562 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9563 }
9564
9565
9566 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9567 (dynamically sized) vector. */
9568
9569 void
9570 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9571 const char *function)
9572 {
9573 internal_error
9574 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9575 idx + 1, len, function, trim_filename (file), line);
9576 }
9577
9578 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9579 (dynamically sized) vector. */
9580
9581 void
9582 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9583 const char *function)
9584 {
9585 internal_error
9586 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9587 idx + 1, len, function, trim_filename (file), line);
9588 }
9589
9590 /* Similar to above, except that the check is for the bounds of the operand
9591 vector of an expression node EXP. */
9592
9593 void
9594 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9595 int line, const char *function)
9596 {
9597 enum tree_code code = TREE_CODE (exp);
9598 internal_error
9599 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9600 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9601 function, trim_filename (file), line);
9602 }
9603
9604 /* Similar to above, except that the check is for the number of
9605 operands of an OMP_CLAUSE node. */
9606
9607 void
9608 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9609 int line, const char *function)
9610 {
9611 internal_error
9612 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9613 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9614 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9615 trim_filename (file), line);
9616 }
9617 #endif /* ENABLE_TREE_CHECKING */
9618 \f
9619 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9620 and mapped to the machine mode MODE. Initialize its fields and build
9621 the information necessary for debugging output. */
9622
9623 static tree
9624 make_vector_type (tree innertype, int nunits, machine_mode mode)
9625 {
9626 tree t;
9627 inchash::hash hstate;
9628
9629 t = make_node (VECTOR_TYPE);
9630 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9631 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9632 SET_TYPE_MODE (t, mode);
9633
9634 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9635 SET_TYPE_STRUCTURAL_EQUALITY (t);
9636 else if (TYPE_CANONICAL (innertype) != innertype
9637 || mode != VOIDmode)
9638 TYPE_CANONICAL (t)
9639 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9640
9641 layout_type (t);
9642
9643 hstate.add_wide_int (VECTOR_TYPE);
9644 hstate.add_wide_int (nunits);
9645 hstate.add_wide_int (mode);
9646 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9647 t = type_hash_canon (hstate.end (), t);
9648
9649 /* We have built a main variant, based on the main variant of the
9650 inner type. Use it to build the variant we return. */
9651 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9652 && TREE_TYPE (t) != innertype)
9653 return build_type_attribute_qual_variant (t,
9654 TYPE_ATTRIBUTES (innertype),
9655 TYPE_QUALS (innertype));
9656
9657 return t;
9658 }
9659
9660 static tree
9661 make_or_reuse_type (unsigned size, int unsignedp)
9662 {
9663 int i;
9664
9665 if (size == INT_TYPE_SIZE)
9666 return unsignedp ? unsigned_type_node : integer_type_node;
9667 if (size == CHAR_TYPE_SIZE)
9668 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9669 if (size == SHORT_TYPE_SIZE)
9670 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9671 if (size == LONG_TYPE_SIZE)
9672 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9673 if (size == LONG_LONG_TYPE_SIZE)
9674 return (unsignedp ? long_long_unsigned_type_node
9675 : long_long_integer_type_node);
9676
9677 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9678 if (size == int_n_data[i].bitsize
9679 && int_n_enabled_p[i])
9680 return (unsignedp ? int_n_trees[i].unsigned_type
9681 : int_n_trees[i].signed_type);
9682
9683 if (unsignedp)
9684 return make_unsigned_type (size);
9685 else
9686 return make_signed_type (size);
9687 }
9688
9689 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9690
9691 static tree
9692 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9693 {
9694 if (satp)
9695 {
9696 if (size == SHORT_FRACT_TYPE_SIZE)
9697 return unsignedp ? sat_unsigned_short_fract_type_node
9698 : sat_short_fract_type_node;
9699 if (size == FRACT_TYPE_SIZE)
9700 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9701 if (size == LONG_FRACT_TYPE_SIZE)
9702 return unsignedp ? sat_unsigned_long_fract_type_node
9703 : sat_long_fract_type_node;
9704 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9705 return unsignedp ? sat_unsigned_long_long_fract_type_node
9706 : sat_long_long_fract_type_node;
9707 }
9708 else
9709 {
9710 if (size == SHORT_FRACT_TYPE_SIZE)
9711 return unsignedp ? unsigned_short_fract_type_node
9712 : short_fract_type_node;
9713 if (size == FRACT_TYPE_SIZE)
9714 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9715 if (size == LONG_FRACT_TYPE_SIZE)
9716 return unsignedp ? unsigned_long_fract_type_node
9717 : long_fract_type_node;
9718 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9719 return unsignedp ? unsigned_long_long_fract_type_node
9720 : long_long_fract_type_node;
9721 }
9722
9723 return make_fract_type (size, unsignedp, satp);
9724 }
9725
9726 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9727
9728 static tree
9729 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9730 {
9731 if (satp)
9732 {
9733 if (size == SHORT_ACCUM_TYPE_SIZE)
9734 return unsignedp ? sat_unsigned_short_accum_type_node
9735 : sat_short_accum_type_node;
9736 if (size == ACCUM_TYPE_SIZE)
9737 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9738 if (size == LONG_ACCUM_TYPE_SIZE)
9739 return unsignedp ? sat_unsigned_long_accum_type_node
9740 : sat_long_accum_type_node;
9741 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9742 return unsignedp ? sat_unsigned_long_long_accum_type_node
9743 : sat_long_long_accum_type_node;
9744 }
9745 else
9746 {
9747 if (size == SHORT_ACCUM_TYPE_SIZE)
9748 return unsignedp ? unsigned_short_accum_type_node
9749 : short_accum_type_node;
9750 if (size == ACCUM_TYPE_SIZE)
9751 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9752 if (size == LONG_ACCUM_TYPE_SIZE)
9753 return unsignedp ? unsigned_long_accum_type_node
9754 : long_accum_type_node;
9755 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9756 return unsignedp ? unsigned_long_long_accum_type_node
9757 : long_long_accum_type_node;
9758 }
9759
9760 return make_accum_type (size, unsignedp, satp);
9761 }
9762
9763
9764 /* Create an atomic variant node for TYPE. This routine is called
9765 during initialization of data types to create the 5 basic atomic
9766 types. The generic build_variant_type function requires these to
9767 already be set up in order to function properly, so cannot be
9768 called from there. If ALIGN is non-zero, then ensure alignment is
9769 overridden to this value. */
9770
9771 static tree
9772 build_atomic_base (tree type, unsigned int align)
9773 {
9774 tree t;
9775
9776 /* Make sure its not already registered. */
9777 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9778 return t;
9779
9780 t = build_variant_type_copy (type);
9781 set_type_quals (t, TYPE_QUAL_ATOMIC);
9782
9783 if (align)
9784 TYPE_ALIGN (t) = align;
9785
9786 return t;
9787 }
9788
9789 /* Create nodes for all integer types (and error_mark_node) using the sizes
9790 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9791 SHORT_DOUBLE specifies whether double should be of the same precision
9792 as float. */
9793
9794 void
9795 build_common_tree_nodes (bool signed_char, bool short_double)
9796 {
9797 int i;
9798
9799 error_mark_node = make_node (ERROR_MARK);
9800 TREE_TYPE (error_mark_node) = error_mark_node;
9801
9802 initialize_sizetypes ();
9803
9804 /* Define both `signed char' and `unsigned char'. */
9805 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9806 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9807 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9808 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9809
9810 /* Define `char', which is like either `signed char' or `unsigned char'
9811 but not the same as either. */
9812 char_type_node
9813 = (signed_char
9814 ? make_signed_type (CHAR_TYPE_SIZE)
9815 : make_unsigned_type (CHAR_TYPE_SIZE));
9816 TYPE_STRING_FLAG (char_type_node) = 1;
9817
9818 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9819 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9820 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9821 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9822 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9823 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9824 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9825 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9826
9827 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9828 {
9829 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9830 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9831 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9832 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9833
9834 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9835 && int_n_enabled_p[i])
9836 {
9837 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9838 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9839 }
9840 }
9841
9842 /* Define a boolean type. This type only represents boolean values but
9843 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9844 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9845 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9846 TYPE_PRECISION (boolean_type_node) = 1;
9847 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9848
9849 /* Define what type to use for size_t. */
9850 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9851 size_type_node = unsigned_type_node;
9852 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9853 size_type_node = long_unsigned_type_node;
9854 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9855 size_type_node = long_long_unsigned_type_node;
9856 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9857 size_type_node = short_unsigned_type_node;
9858 else
9859 {
9860 int i;
9861
9862 size_type_node = NULL_TREE;
9863 for (i = 0; i < NUM_INT_N_ENTS; i++)
9864 if (int_n_enabled_p[i])
9865 {
9866 char name[50];
9867 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9868
9869 if (strcmp (name, SIZE_TYPE) == 0)
9870 {
9871 size_type_node = int_n_trees[i].unsigned_type;
9872 }
9873 }
9874 if (size_type_node == NULL_TREE)
9875 gcc_unreachable ();
9876 }
9877
9878 /* Fill in the rest of the sized types. Reuse existing type nodes
9879 when possible. */
9880 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9881 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9882 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9883 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9884 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9885
9886 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9887 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9888 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9889 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9890 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9891
9892 /* Don't call build_qualified type for atomics. That routine does
9893 special processing for atomics, and until they are initialized
9894 it's better not to make that call.
9895
9896 Check to see if there is a target override for atomic types. */
9897
9898 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9899 targetm.atomic_align_for_mode (QImode));
9900 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9901 targetm.atomic_align_for_mode (HImode));
9902 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9903 targetm.atomic_align_for_mode (SImode));
9904 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9905 targetm.atomic_align_for_mode (DImode));
9906 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9907 targetm.atomic_align_for_mode (TImode));
9908
9909 access_public_node = get_identifier ("public");
9910 access_protected_node = get_identifier ("protected");
9911 access_private_node = get_identifier ("private");
9912
9913 /* Define these next since types below may used them. */
9914 integer_zero_node = build_int_cst (integer_type_node, 0);
9915 integer_one_node = build_int_cst (integer_type_node, 1);
9916 integer_three_node = build_int_cst (integer_type_node, 3);
9917 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9918
9919 size_zero_node = size_int (0);
9920 size_one_node = size_int (1);
9921 bitsize_zero_node = bitsize_int (0);
9922 bitsize_one_node = bitsize_int (1);
9923 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9924
9925 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9926 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9927
9928 void_type_node = make_node (VOID_TYPE);
9929 layout_type (void_type_node);
9930
9931 pointer_bounds_type_node = targetm.chkp_bound_type ();
9932
9933 /* We are not going to have real types in C with less than byte alignment,
9934 so we might as well not have any types that claim to have it. */
9935 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9936 TYPE_USER_ALIGN (void_type_node) = 0;
9937
9938 void_node = make_node (VOID_CST);
9939 TREE_TYPE (void_node) = void_type_node;
9940
9941 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9942 layout_type (TREE_TYPE (null_pointer_node));
9943
9944 ptr_type_node = build_pointer_type (void_type_node);
9945 const_ptr_type_node
9946 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9947 fileptr_type_node = ptr_type_node;
9948
9949 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9950
9951 float_type_node = make_node (REAL_TYPE);
9952 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9953 layout_type (float_type_node);
9954
9955 double_type_node = make_node (REAL_TYPE);
9956 if (short_double)
9957 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9958 else
9959 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9960 layout_type (double_type_node);
9961
9962 long_double_type_node = make_node (REAL_TYPE);
9963 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9964 layout_type (long_double_type_node);
9965
9966 float_ptr_type_node = build_pointer_type (float_type_node);
9967 double_ptr_type_node = build_pointer_type (double_type_node);
9968 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9969 integer_ptr_type_node = build_pointer_type (integer_type_node);
9970
9971 /* Fixed size integer types. */
9972 uint16_type_node = make_or_reuse_type (16, 1);
9973 uint32_type_node = make_or_reuse_type (32, 1);
9974 uint64_type_node = make_or_reuse_type (64, 1);
9975
9976 /* Decimal float types. */
9977 dfloat32_type_node = make_node (REAL_TYPE);
9978 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9979 layout_type (dfloat32_type_node);
9980 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9981 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9982
9983 dfloat64_type_node = make_node (REAL_TYPE);
9984 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9985 layout_type (dfloat64_type_node);
9986 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9987 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9988
9989 dfloat128_type_node = make_node (REAL_TYPE);
9990 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9991 layout_type (dfloat128_type_node);
9992 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9993 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9994
9995 complex_integer_type_node = build_complex_type (integer_type_node);
9996 complex_float_type_node = build_complex_type (float_type_node);
9997 complex_double_type_node = build_complex_type (double_type_node);
9998 complex_long_double_type_node = build_complex_type (long_double_type_node);
9999
10000 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10001 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10002 sat_ ## KIND ## _type_node = \
10003 make_sat_signed_ ## KIND ## _type (SIZE); \
10004 sat_unsigned_ ## KIND ## _type_node = \
10005 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10006 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10007 unsigned_ ## KIND ## _type_node = \
10008 make_unsigned_ ## KIND ## _type (SIZE);
10009
10010 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10011 sat_ ## WIDTH ## KIND ## _type_node = \
10012 make_sat_signed_ ## KIND ## _type (SIZE); \
10013 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10014 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10015 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10016 unsigned_ ## WIDTH ## KIND ## _type_node = \
10017 make_unsigned_ ## KIND ## _type (SIZE);
10018
10019 /* Make fixed-point type nodes based on four different widths. */
10020 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10021 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10022 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10023 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10024 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10025
10026 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10027 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10028 NAME ## _type_node = \
10029 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10030 u ## NAME ## _type_node = \
10031 make_or_reuse_unsigned_ ## KIND ## _type \
10032 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10033 sat_ ## NAME ## _type_node = \
10034 make_or_reuse_sat_signed_ ## KIND ## _type \
10035 (GET_MODE_BITSIZE (MODE ## mode)); \
10036 sat_u ## NAME ## _type_node = \
10037 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10038 (GET_MODE_BITSIZE (U ## MODE ## mode));
10039
10040 /* Fixed-point type and mode nodes. */
10041 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10042 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10043 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10044 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10045 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10046 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10047 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10048 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10049 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10050 MAKE_FIXED_MODE_NODE (accum, da, DA)
10051 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10052
10053 {
10054 tree t = targetm.build_builtin_va_list ();
10055
10056 /* Many back-ends define record types without setting TYPE_NAME.
10057 If we copied the record type here, we'd keep the original
10058 record type without a name. This breaks name mangling. So,
10059 don't copy record types and let c_common_nodes_and_builtins()
10060 declare the type to be __builtin_va_list. */
10061 if (TREE_CODE (t) != RECORD_TYPE)
10062 t = build_variant_type_copy (t);
10063
10064 va_list_type_node = t;
10065 }
10066 }
10067
10068 /* Modify DECL for given flags.
10069 TM_PURE attribute is set only on types, so the function will modify
10070 DECL's type when ECF_TM_PURE is used. */
10071
10072 void
10073 set_call_expr_flags (tree decl, int flags)
10074 {
10075 if (flags & ECF_NOTHROW)
10076 TREE_NOTHROW (decl) = 1;
10077 if (flags & ECF_CONST)
10078 TREE_READONLY (decl) = 1;
10079 if (flags & ECF_PURE)
10080 DECL_PURE_P (decl) = 1;
10081 if (flags & ECF_LOOPING_CONST_OR_PURE)
10082 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10083 if (flags & ECF_NOVOPS)
10084 DECL_IS_NOVOPS (decl) = 1;
10085 if (flags & ECF_NORETURN)
10086 TREE_THIS_VOLATILE (decl) = 1;
10087 if (flags & ECF_MALLOC)
10088 DECL_IS_MALLOC (decl) = 1;
10089 if (flags & ECF_RETURNS_TWICE)
10090 DECL_IS_RETURNS_TWICE (decl) = 1;
10091 if (flags & ECF_LEAF)
10092 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10093 NULL, DECL_ATTRIBUTES (decl));
10094 if ((flags & ECF_TM_PURE) && flag_tm)
10095 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10096 /* Looping const or pure is implied by noreturn.
10097 There is currently no way to declare looping const or looping pure alone. */
10098 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10099 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10100 }
10101
10102
10103 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10104
10105 static void
10106 local_define_builtin (const char *name, tree type, enum built_in_function code,
10107 const char *library_name, int ecf_flags)
10108 {
10109 tree decl;
10110
10111 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10112 library_name, NULL_TREE);
10113 set_call_expr_flags (decl, ecf_flags);
10114
10115 set_builtin_decl (code, decl, true);
10116 }
10117
10118 /* Call this function after instantiating all builtins that the language
10119 front end cares about. This will build the rest of the builtins
10120 and internal functions that are relied upon by the tree optimizers and
10121 the middle-end. */
10122
10123 void
10124 build_common_builtin_nodes (void)
10125 {
10126 tree tmp, ftype;
10127 int ecf_flags;
10128
10129 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10130 {
10131 ftype = build_function_type (void_type_node, void_list_node);
10132 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10133 "__builtin_unreachable",
10134 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10135 | ECF_CONST);
10136 }
10137
10138 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10139 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10140 {
10141 ftype = build_function_type_list (ptr_type_node,
10142 ptr_type_node, const_ptr_type_node,
10143 size_type_node, NULL_TREE);
10144
10145 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10146 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10147 "memcpy", ECF_NOTHROW | ECF_LEAF);
10148 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10149 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10150 "memmove", ECF_NOTHROW | ECF_LEAF);
10151 }
10152
10153 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10154 {
10155 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10156 const_ptr_type_node, size_type_node,
10157 NULL_TREE);
10158 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10159 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10160 }
10161
10162 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10163 {
10164 ftype = build_function_type_list (ptr_type_node,
10165 ptr_type_node, integer_type_node,
10166 size_type_node, NULL_TREE);
10167 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10168 "memset", ECF_NOTHROW | ECF_LEAF);
10169 }
10170
10171 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10172 {
10173 ftype = build_function_type_list (ptr_type_node,
10174 size_type_node, NULL_TREE);
10175 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10176 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10177 }
10178
10179 ftype = build_function_type_list (ptr_type_node, size_type_node,
10180 size_type_node, NULL_TREE);
10181 local_define_builtin ("__builtin_alloca_with_align", ftype,
10182 BUILT_IN_ALLOCA_WITH_ALIGN,
10183 "__builtin_alloca_with_align",
10184 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10185
10186 /* If we're checking the stack, `alloca' can throw. */
10187 if (flag_stack_check)
10188 {
10189 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10190 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10191 }
10192
10193 ftype = build_function_type_list (void_type_node,
10194 ptr_type_node, ptr_type_node,
10195 ptr_type_node, NULL_TREE);
10196 local_define_builtin ("__builtin_init_trampoline", ftype,
10197 BUILT_IN_INIT_TRAMPOLINE,
10198 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10199 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10200 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10201 "__builtin_init_heap_trampoline",
10202 ECF_NOTHROW | ECF_LEAF);
10203
10204 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10205 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10206 BUILT_IN_ADJUST_TRAMPOLINE,
10207 "__builtin_adjust_trampoline",
10208 ECF_CONST | ECF_NOTHROW);
10209
10210 ftype = build_function_type_list (void_type_node,
10211 ptr_type_node, ptr_type_node, NULL_TREE);
10212 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10213 BUILT_IN_NONLOCAL_GOTO,
10214 "__builtin_nonlocal_goto",
10215 ECF_NORETURN | ECF_NOTHROW);
10216
10217 ftype = build_function_type_list (void_type_node,
10218 ptr_type_node, ptr_type_node, NULL_TREE);
10219 local_define_builtin ("__builtin_setjmp_setup", ftype,
10220 BUILT_IN_SETJMP_SETUP,
10221 "__builtin_setjmp_setup", ECF_NOTHROW);
10222
10223 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10224 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10225 BUILT_IN_SETJMP_RECEIVER,
10226 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10227
10228 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10229 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10230 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10231
10232 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10233 local_define_builtin ("__builtin_stack_restore", ftype,
10234 BUILT_IN_STACK_RESTORE,
10235 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10236
10237 /* If there's a possibility that we might use the ARM EABI, build the
10238 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10239 if (targetm.arm_eabi_unwinder)
10240 {
10241 ftype = build_function_type_list (void_type_node, NULL_TREE);
10242 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10243 BUILT_IN_CXA_END_CLEANUP,
10244 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10245 }
10246
10247 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10248 local_define_builtin ("__builtin_unwind_resume", ftype,
10249 BUILT_IN_UNWIND_RESUME,
10250 ((targetm_common.except_unwind_info (&global_options)
10251 == UI_SJLJ)
10252 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10253 ECF_NORETURN);
10254
10255 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10256 {
10257 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10258 NULL_TREE);
10259 local_define_builtin ("__builtin_return_address", ftype,
10260 BUILT_IN_RETURN_ADDRESS,
10261 "__builtin_return_address",
10262 ECF_NOTHROW);
10263 }
10264
10265 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10266 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10267 {
10268 ftype = build_function_type_list (void_type_node, ptr_type_node,
10269 ptr_type_node, NULL_TREE);
10270 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10271 local_define_builtin ("__cyg_profile_func_enter", ftype,
10272 BUILT_IN_PROFILE_FUNC_ENTER,
10273 "__cyg_profile_func_enter", 0);
10274 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10275 local_define_builtin ("__cyg_profile_func_exit", ftype,
10276 BUILT_IN_PROFILE_FUNC_EXIT,
10277 "__cyg_profile_func_exit", 0);
10278 }
10279
10280 /* The exception object and filter values from the runtime. The argument
10281 must be zero before exception lowering, i.e. from the front end. After
10282 exception lowering, it will be the region number for the exception
10283 landing pad. These functions are PURE instead of CONST to prevent
10284 them from being hoisted past the exception edge that will initialize
10285 its value in the landing pad. */
10286 ftype = build_function_type_list (ptr_type_node,
10287 integer_type_node, NULL_TREE);
10288 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10289 /* Only use TM_PURE if we we have TM language support. */
10290 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10291 ecf_flags |= ECF_TM_PURE;
10292 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10293 "__builtin_eh_pointer", ecf_flags);
10294
10295 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10296 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10297 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10298 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10299
10300 ftype = build_function_type_list (void_type_node,
10301 integer_type_node, integer_type_node,
10302 NULL_TREE);
10303 local_define_builtin ("__builtin_eh_copy_values", ftype,
10304 BUILT_IN_EH_COPY_VALUES,
10305 "__builtin_eh_copy_values", ECF_NOTHROW);
10306
10307 /* Complex multiplication and division. These are handled as builtins
10308 rather than optabs because emit_library_call_value doesn't support
10309 complex. Further, we can do slightly better with folding these
10310 beasties if the real and complex parts of the arguments are separate. */
10311 {
10312 int mode;
10313
10314 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10315 {
10316 char mode_name_buf[4], *q;
10317 const char *p;
10318 enum built_in_function mcode, dcode;
10319 tree type, inner_type;
10320 const char *prefix = "__";
10321
10322 if (targetm.libfunc_gnu_prefix)
10323 prefix = "__gnu_";
10324
10325 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10326 if (type == NULL)
10327 continue;
10328 inner_type = TREE_TYPE (type);
10329
10330 ftype = build_function_type_list (type, inner_type, inner_type,
10331 inner_type, inner_type, NULL_TREE);
10332
10333 mcode = ((enum built_in_function)
10334 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10335 dcode = ((enum built_in_function)
10336 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10337
10338 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10339 *q = TOLOWER (*p);
10340 *q = '\0';
10341
10342 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10343 NULL);
10344 local_define_builtin (built_in_names[mcode], ftype, mcode,
10345 built_in_names[mcode],
10346 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10347
10348 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10349 NULL);
10350 local_define_builtin (built_in_names[dcode], ftype, dcode,
10351 built_in_names[dcode],
10352 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10353 }
10354 }
10355
10356 init_internal_fns ();
10357 }
10358
10359 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10360 better way.
10361
10362 If we requested a pointer to a vector, build up the pointers that
10363 we stripped off while looking for the inner type. Similarly for
10364 return values from functions.
10365
10366 The argument TYPE is the top of the chain, and BOTTOM is the
10367 new type which we will point to. */
10368
10369 tree
10370 reconstruct_complex_type (tree type, tree bottom)
10371 {
10372 tree inner, outer;
10373
10374 if (TREE_CODE (type) == POINTER_TYPE)
10375 {
10376 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10377 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10378 TYPE_REF_CAN_ALIAS_ALL (type));
10379 }
10380 else if (TREE_CODE (type) == REFERENCE_TYPE)
10381 {
10382 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10383 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10384 TYPE_REF_CAN_ALIAS_ALL (type));
10385 }
10386 else if (TREE_CODE (type) == ARRAY_TYPE)
10387 {
10388 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10389 outer = build_array_type (inner, TYPE_DOMAIN (type));
10390 }
10391 else if (TREE_CODE (type) == FUNCTION_TYPE)
10392 {
10393 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10394 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10395 }
10396 else if (TREE_CODE (type) == METHOD_TYPE)
10397 {
10398 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10399 /* The build_method_type_directly() routine prepends 'this' to argument list,
10400 so we must compensate by getting rid of it. */
10401 outer
10402 = build_method_type_directly
10403 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10404 inner,
10405 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10406 }
10407 else if (TREE_CODE (type) == OFFSET_TYPE)
10408 {
10409 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10410 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10411 }
10412 else
10413 return bottom;
10414
10415 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10416 TYPE_QUALS (type));
10417 }
10418
10419 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10420 the inner type. */
10421 tree
10422 build_vector_type_for_mode (tree innertype, machine_mode mode)
10423 {
10424 int nunits;
10425
10426 switch (GET_MODE_CLASS (mode))
10427 {
10428 case MODE_VECTOR_INT:
10429 case MODE_VECTOR_FLOAT:
10430 case MODE_VECTOR_FRACT:
10431 case MODE_VECTOR_UFRACT:
10432 case MODE_VECTOR_ACCUM:
10433 case MODE_VECTOR_UACCUM:
10434 nunits = GET_MODE_NUNITS (mode);
10435 break;
10436
10437 case MODE_INT:
10438 /* Check that there are no leftover bits. */
10439 gcc_assert (GET_MODE_BITSIZE (mode)
10440 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10441
10442 nunits = GET_MODE_BITSIZE (mode)
10443 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10444 break;
10445
10446 default:
10447 gcc_unreachable ();
10448 }
10449
10450 return make_vector_type (innertype, nunits, mode);
10451 }
10452
10453 /* Similarly, but takes the inner type and number of units, which must be
10454 a power of two. */
10455
10456 tree
10457 build_vector_type (tree innertype, int nunits)
10458 {
10459 return make_vector_type (innertype, nunits, VOIDmode);
10460 }
10461
10462 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10463
10464 tree
10465 build_opaque_vector_type (tree innertype, int nunits)
10466 {
10467 tree t = make_vector_type (innertype, nunits, VOIDmode);
10468 tree cand;
10469 /* We always build the non-opaque variant before the opaque one,
10470 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10471 cand = TYPE_NEXT_VARIANT (t);
10472 if (cand
10473 && TYPE_VECTOR_OPAQUE (cand)
10474 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10475 return cand;
10476 /* Othewise build a variant type and make sure to queue it after
10477 the non-opaque type. */
10478 cand = build_distinct_type_copy (t);
10479 TYPE_VECTOR_OPAQUE (cand) = true;
10480 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10481 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10482 TYPE_NEXT_VARIANT (t) = cand;
10483 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10484 return cand;
10485 }
10486
10487
10488 /* Given an initializer INIT, return TRUE if INIT is zero or some
10489 aggregate of zeros. Otherwise return FALSE. */
10490 bool
10491 initializer_zerop (const_tree init)
10492 {
10493 tree elt;
10494
10495 STRIP_NOPS (init);
10496
10497 switch (TREE_CODE (init))
10498 {
10499 case INTEGER_CST:
10500 return integer_zerop (init);
10501
10502 case REAL_CST:
10503 /* ??? Note that this is not correct for C4X float formats. There,
10504 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10505 negative exponent. */
10506 return real_zerop (init)
10507 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10508
10509 case FIXED_CST:
10510 return fixed_zerop (init);
10511
10512 case COMPLEX_CST:
10513 return integer_zerop (init)
10514 || (real_zerop (init)
10515 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10516 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10517
10518 case VECTOR_CST:
10519 {
10520 unsigned i;
10521 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10522 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10523 return false;
10524 return true;
10525 }
10526
10527 case CONSTRUCTOR:
10528 {
10529 unsigned HOST_WIDE_INT idx;
10530
10531 if (TREE_CLOBBER_P (init))
10532 return false;
10533 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10534 if (!initializer_zerop (elt))
10535 return false;
10536 return true;
10537 }
10538
10539 case STRING_CST:
10540 {
10541 int i;
10542
10543 /* We need to loop through all elements to handle cases like
10544 "\0" and "\0foobar". */
10545 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10546 if (TREE_STRING_POINTER (init)[i] != '\0')
10547 return false;
10548
10549 return true;
10550 }
10551
10552 default:
10553 return false;
10554 }
10555 }
10556
10557 /* Check if vector VEC consists of all the equal elements and
10558 that the number of elements corresponds to the type of VEC.
10559 The function returns first element of the vector
10560 or NULL_TREE if the vector is not uniform. */
10561 tree
10562 uniform_vector_p (const_tree vec)
10563 {
10564 tree first, t;
10565 unsigned i;
10566
10567 if (vec == NULL_TREE)
10568 return NULL_TREE;
10569
10570 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10571
10572 if (TREE_CODE (vec) == VECTOR_CST)
10573 {
10574 first = VECTOR_CST_ELT (vec, 0);
10575 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10576 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10577 return NULL_TREE;
10578
10579 return first;
10580 }
10581
10582 else if (TREE_CODE (vec) == CONSTRUCTOR)
10583 {
10584 first = error_mark_node;
10585
10586 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10587 {
10588 if (i == 0)
10589 {
10590 first = t;
10591 continue;
10592 }
10593 if (!operand_equal_p (first, t, 0))
10594 return NULL_TREE;
10595 }
10596 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10597 return NULL_TREE;
10598
10599 return first;
10600 }
10601
10602 return NULL_TREE;
10603 }
10604
10605 /* Build an empty statement at location LOC. */
10606
10607 tree
10608 build_empty_stmt (location_t loc)
10609 {
10610 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10611 SET_EXPR_LOCATION (t, loc);
10612 return t;
10613 }
10614
10615
10616 /* Build an OpenMP clause with code CODE. LOC is the location of the
10617 clause. */
10618
10619 tree
10620 build_omp_clause (location_t loc, enum omp_clause_code code)
10621 {
10622 tree t;
10623 int size, length;
10624
10625 length = omp_clause_num_ops[code];
10626 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10627
10628 record_node_allocation_statistics (OMP_CLAUSE, size);
10629
10630 t = (tree) ggc_internal_alloc (size);
10631 memset (t, 0, size);
10632 TREE_SET_CODE (t, OMP_CLAUSE);
10633 OMP_CLAUSE_SET_CODE (t, code);
10634 OMP_CLAUSE_LOCATION (t) = loc;
10635
10636 return t;
10637 }
10638
10639 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10640 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10641 Except for the CODE and operand count field, other storage for the
10642 object is initialized to zeros. */
10643
10644 tree
10645 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10646 {
10647 tree t;
10648 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10649
10650 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10651 gcc_assert (len >= 1);
10652
10653 record_node_allocation_statistics (code, length);
10654
10655 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10656
10657 TREE_SET_CODE (t, code);
10658
10659 /* Can't use TREE_OPERAND to store the length because if checking is
10660 enabled, it will try to check the length before we store it. :-P */
10661 t->exp.operands[0] = build_int_cst (sizetype, len);
10662
10663 return t;
10664 }
10665
10666 /* Helper function for build_call_* functions; build a CALL_EXPR with
10667 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10668 the argument slots. */
10669
10670 static tree
10671 build_call_1 (tree return_type, tree fn, int nargs)
10672 {
10673 tree t;
10674
10675 t = build_vl_exp (CALL_EXPR, nargs + 3);
10676 TREE_TYPE (t) = return_type;
10677 CALL_EXPR_FN (t) = fn;
10678 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10679
10680 return t;
10681 }
10682
10683 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10684 FN and a null static chain slot. NARGS is the number of call arguments
10685 which are specified as "..." arguments. */
10686
10687 tree
10688 build_call_nary (tree return_type, tree fn, int nargs, ...)
10689 {
10690 tree ret;
10691 va_list args;
10692 va_start (args, nargs);
10693 ret = build_call_valist (return_type, fn, nargs, args);
10694 va_end (args);
10695 return ret;
10696 }
10697
10698 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10699 FN and a null static chain slot. NARGS is the number of call arguments
10700 which are specified as a va_list ARGS. */
10701
10702 tree
10703 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10704 {
10705 tree t;
10706 int i;
10707
10708 t = build_call_1 (return_type, fn, nargs);
10709 for (i = 0; i < nargs; i++)
10710 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10711 process_call_operands (t);
10712 return t;
10713 }
10714
10715 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10716 FN and a null static chain slot. NARGS is the number of call arguments
10717 which are specified as a tree array ARGS. */
10718
10719 tree
10720 build_call_array_loc (location_t loc, tree return_type, tree fn,
10721 int nargs, const tree *args)
10722 {
10723 tree t;
10724 int i;
10725
10726 t = build_call_1 (return_type, fn, nargs);
10727 for (i = 0; i < nargs; i++)
10728 CALL_EXPR_ARG (t, i) = args[i];
10729 process_call_operands (t);
10730 SET_EXPR_LOCATION (t, loc);
10731 return t;
10732 }
10733
10734 /* Like build_call_array, but takes a vec. */
10735
10736 tree
10737 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10738 {
10739 tree ret, t;
10740 unsigned int ix;
10741
10742 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10743 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10744 CALL_EXPR_ARG (ret, ix) = t;
10745 process_call_operands (ret);
10746 return ret;
10747 }
10748
10749 /* Conveniently construct a function call expression. FNDECL names the
10750 function to be called and N arguments are passed in the array
10751 ARGARRAY. */
10752
10753 tree
10754 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10755 {
10756 tree fntype = TREE_TYPE (fndecl);
10757 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10758
10759 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10760 }
10761
10762 /* Conveniently construct a function call expression. FNDECL names the
10763 function to be called and the arguments are passed in the vector
10764 VEC. */
10765
10766 tree
10767 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10768 {
10769 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10770 vec_safe_address (vec));
10771 }
10772
10773
10774 /* Conveniently construct a function call expression. FNDECL names the
10775 function to be called, N is the number of arguments, and the "..."
10776 parameters are the argument expressions. */
10777
10778 tree
10779 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10780 {
10781 va_list ap;
10782 tree *argarray = XALLOCAVEC (tree, n);
10783 int i;
10784
10785 va_start (ap, n);
10786 for (i = 0; i < n; i++)
10787 argarray[i] = va_arg (ap, tree);
10788 va_end (ap);
10789 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10790 }
10791
10792 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10793 varargs macros aren't supported by all bootstrap compilers. */
10794
10795 tree
10796 build_call_expr (tree fndecl, int n, ...)
10797 {
10798 va_list ap;
10799 tree *argarray = XALLOCAVEC (tree, n);
10800 int i;
10801
10802 va_start (ap, n);
10803 for (i = 0; i < n; i++)
10804 argarray[i] = va_arg (ap, tree);
10805 va_end (ap);
10806 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10807 }
10808
10809 /* Build internal call expression. This is just like CALL_EXPR, except
10810 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10811 internal function. */
10812
10813 tree
10814 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10815 tree type, int n, ...)
10816 {
10817 va_list ap;
10818 int i;
10819
10820 tree fn = build_call_1 (type, NULL_TREE, n);
10821 va_start (ap, n);
10822 for (i = 0; i < n; i++)
10823 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10824 va_end (ap);
10825 SET_EXPR_LOCATION (fn, loc);
10826 CALL_EXPR_IFN (fn) = ifn;
10827 return fn;
10828 }
10829
10830 /* Create a new constant string literal and return a char* pointer to it.
10831 The STRING_CST value is the LEN characters at STR. */
10832 tree
10833 build_string_literal (int len, const char *str)
10834 {
10835 tree t, elem, index, type;
10836
10837 t = build_string (len, str);
10838 elem = build_type_variant (char_type_node, 1, 0);
10839 index = build_index_type (size_int (len - 1));
10840 type = build_array_type (elem, index);
10841 TREE_TYPE (t) = type;
10842 TREE_CONSTANT (t) = 1;
10843 TREE_READONLY (t) = 1;
10844 TREE_STATIC (t) = 1;
10845
10846 type = build_pointer_type (elem);
10847 t = build1 (ADDR_EXPR, type,
10848 build4 (ARRAY_REF, elem,
10849 t, integer_zero_node, NULL_TREE, NULL_TREE));
10850 return t;
10851 }
10852
10853
10854
10855 /* Return true if T (assumed to be a DECL) must be assigned a memory
10856 location. */
10857
10858 bool
10859 needs_to_live_in_memory (const_tree t)
10860 {
10861 return (TREE_ADDRESSABLE (t)
10862 || is_global_var (t)
10863 || (TREE_CODE (t) == RESULT_DECL
10864 && !DECL_BY_REFERENCE (t)
10865 && aggregate_value_p (t, current_function_decl)));
10866 }
10867
10868 /* Return value of a constant X and sign-extend it. */
10869
10870 HOST_WIDE_INT
10871 int_cst_value (const_tree x)
10872 {
10873 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10874 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10875
10876 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10877 gcc_assert (cst_and_fits_in_hwi (x));
10878
10879 if (bits < HOST_BITS_PER_WIDE_INT)
10880 {
10881 bool negative = ((val >> (bits - 1)) & 1) != 0;
10882 if (negative)
10883 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10884 else
10885 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10886 }
10887
10888 return val;
10889 }
10890
10891 /* If TYPE is an integral or pointer type, return an integer type with
10892 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10893 if TYPE is already an integer type of signedness UNSIGNEDP. */
10894
10895 tree
10896 signed_or_unsigned_type_for (int unsignedp, tree type)
10897 {
10898 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10899 return type;
10900
10901 if (TREE_CODE (type) == VECTOR_TYPE)
10902 {
10903 tree inner = TREE_TYPE (type);
10904 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10905 if (!inner2)
10906 return NULL_TREE;
10907 if (inner == inner2)
10908 return type;
10909 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10910 }
10911
10912 if (!INTEGRAL_TYPE_P (type)
10913 && !POINTER_TYPE_P (type)
10914 && TREE_CODE (type) != OFFSET_TYPE)
10915 return NULL_TREE;
10916
10917 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10918 }
10919
10920 /* If TYPE is an integral or pointer type, return an integer type with
10921 the same precision which is unsigned, or itself if TYPE is already an
10922 unsigned integer type. */
10923
10924 tree
10925 unsigned_type_for (tree type)
10926 {
10927 return signed_or_unsigned_type_for (1, type);
10928 }
10929
10930 /* If TYPE is an integral or pointer type, return an integer type with
10931 the same precision which is signed, or itself if TYPE is already a
10932 signed integer type. */
10933
10934 tree
10935 signed_type_for (tree type)
10936 {
10937 return signed_or_unsigned_type_for (0, type);
10938 }
10939
10940 /* If TYPE is a vector type, return a signed integer vector type with the
10941 same width and number of subparts. Otherwise return boolean_type_node. */
10942
10943 tree
10944 truth_type_for (tree type)
10945 {
10946 if (TREE_CODE (type) == VECTOR_TYPE)
10947 {
10948 tree elem = lang_hooks.types.type_for_size
10949 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10950 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10951 }
10952 else
10953 return boolean_type_node;
10954 }
10955
10956 /* Returns the largest value obtainable by casting something in INNER type to
10957 OUTER type. */
10958
10959 tree
10960 upper_bound_in_type (tree outer, tree inner)
10961 {
10962 unsigned int det = 0;
10963 unsigned oprec = TYPE_PRECISION (outer);
10964 unsigned iprec = TYPE_PRECISION (inner);
10965 unsigned prec;
10966
10967 /* Compute a unique number for every combination. */
10968 det |= (oprec > iprec) ? 4 : 0;
10969 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10970 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10971
10972 /* Determine the exponent to use. */
10973 switch (det)
10974 {
10975 case 0:
10976 case 1:
10977 /* oprec <= iprec, outer: signed, inner: don't care. */
10978 prec = oprec - 1;
10979 break;
10980 case 2:
10981 case 3:
10982 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10983 prec = oprec;
10984 break;
10985 case 4:
10986 /* oprec > iprec, outer: signed, inner: signed. */
10987 prec = iprec - 1;
10988 break;
10989 case 5:
10990 /* oprec > iprec, outer: signed, inner: unsigned. */
10991 prec = iprec;
10992 break;
10993 case 6:
10994 /* oprec > iprec, outer: unsigned, inner: signed. */
10995 prec = oprec;
10996 break;
10997 case 7:
10998 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10999 prec = iprec;
11000 break;
11001 default:
11002 gcc_unreachable ();
11003 }
11004
11005 return wide_int_to_tree (outer,
11006 wi::mask (prec, false, TYPE_PRECISION (outer)));
11007 }
11008
11009 /* Returns the smallest value obtainable by casting something in INNER type to
11010 OUTER type. */
11011
11012 tree
11013 lower_bound_in_type (tree outer, tree inner)
11014 {
11015 unsigned oprec = TYPE_PRECISION (outer);
11016 unsigned iprec = TYPE_PRECISION (inner);
11017
11018 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11019 and obtain 0. */
11020 if (TYPE_UNSIGNED (outer)
11021 /* If we are widening something of an unsigned type, OUTER type
11022 contains all values of INNER type. In particular, both INNER
11023 and OUTER types have zero in common. */
11024 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11025 return build_int_cst (outer, 0);
11026 else
11027 {
11028 /* If we are widening a signed type to another signed type, we
11029 want to obtain -2^^(iprec-1). If we are keeping the
11030 precision or narrowing to a signed type, we want to obtain
11031 -2^(oprec-1). */
11032 unsigned prec = oprec > iprec ? iprec : oprec;
11033 return wide_int_to_tree (outer,
11034 wi::mask (prec - 1, true,
11035 TYPE_PRECISION (outer)));
11036 }
11037 }
11038
11039 /* Return nonzero if two operands that are suitable for PHI nodes are
11040 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11041 SSA_NAME or invariant. Note that this is strictly an optimization.
11042 That is, callers of this function can directly call operand_equal_p
11043 and get the same result, only slower. */
11044
11045 int
11046 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11047 {
11048 if (arg0 == arg1)
11049 return 1;
11050 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11051 return 0;
11052 return operand_equal_p (arg0, arg1, 0);
11053 }
11054
11055 /* Returns number of zeros at the end of binary representation of X. */
11056
11057 tree
11058 num_ending_zeros (const_tree x)
11059 {
11060 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11061 }
11062
11063
11064 #define WALK_SUBTREE(NODE) \
11065 do \
11066 { \
11067 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11068 if (result) \
11069 return result; \
11070 } \
11071 while (0)
11072
11073 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11074 be walked whenever a type is seen in the tree. Rest of operands and return
11075 value are as for walk_tree. */
11076
11077 static tree
11078 walk_type_fields (tree type, walk_tree_fn func, void *data,
11079 hash_set<tree> *pset, walk_tree_lh lh)
11080 {
11081 tree result = NULL_TREE;
11082
11083 switch (TREE_CODE (type))
11084 {
11085 case POINTER_TYPE:
11086 case REFERENCE_TYPE:
11087 case VECTOR_TYPE:
11088 /* We have to worry about mutually recursive pointers. These can't
11089 be written in C. They can in Ada. It's pathological, but
11090 there's an ACATS test (c38102a) that checks it. Deal with this
11091 by checking if we're pointing to another pointer, that one
11092 points to another pointer, that one does too, and we have no htab.
11093 If so, get a hash table. We check three levels deep to avoid
11094 the cost of the hash table if we don't need one. */
11095 if (POINTER_TYPE_P (TREE_TYPE (type))
11096 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11097 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11098 && !pset)
11099 {
11100 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11101 func, data);
11102 if (result)
11103 return result;
11104
11105 break;
11106 }
11107
11108 /* ... fall through ... */
11109
11110 case COMPLEX_TYPE:
11111 WALK_SUBTREE (TREE_TYPE (type));
11112 break;
11113
11114 case METHOD_TYPE:
11115 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11116
11117 /* Fall through. */
11118
11119 case FUNCTION_TYPE:
11120 WALK_SUBTREE (TREE_TYPE (type));
11121 {
11122 tree arg;
11123
11124 /* We never want to walk into default arguments. */
11125 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11126 WALK_SUBTREE (TREE_VALUE (arg));
11127 }
11128 break;
11129
11130 case ARRAY_TYPE:
11131 /* Don't follow this nodes's type if a pointer for fear that
11132 we'll have infinite recursion. If we have a PSET, then we
11133 need not fear. */
11134 if (pset
11135 || (!POINTER_TYPE_P (TREE_TYPE (type))
11136 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11137 WALK_SUBTREE (TREE_TYPE (type));
11138 WALK_SUBTREE (TYPE_DOMAIN (type));
11139 break;
11140
11141 case OFFSET_TYPE:
11142 WALK_SUBTREE (TREE_TYPE (type));
11143 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11144 break;
11145
11146 default:
11147 break;
11148 }
11149
11150 return NULL_TREE;
11151 }
11152
11153 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11154 called with the DATA and the address of each sub-tree. If FUNC returns a
11155 non-NULL value, the traversal is stopped, and the value returned by FUNC
11156 is returned. If PSET is non-NULL it is used to record the nodes visited,
11157 and to avoid visiting a node more than once. */
11158
11159 tree
11160 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11161 hash_set<tree> *pset, walk_tree_lh lh)
11162 {
11163 enum tree_code code;
11164 int walk_subtrees;
11165 tree result;
11166
11167 #define WALK_SUBTREE_TAIL(NODE) \
11168 do \
11169 { \
11170 tp = & (NODE); \
11171 goto tail_recurse; \
11172 } \
11173 while (0)
11174
11175 tail_recurse:
11176 /* Skip empty subtrees. */
11177 if (!*tp)
11178 return NULL_TREE;
11179
11180 /* Don't walk the same tree twice, if the user has requested
11181 that we avoid doing so. */
11182 if (pset && pset->add (*tp))
11183 return NULL_TREE;
11184
11185 /* Call the function. */
11186 walk_subtrees = 1;
11187 result = (*func) (tp, &walk_subtrees, data);
11188
11189 /* If we found something, return it. */
11190 if (result)
11191 return result;
11192
11193 code = TREE_CODE (*tp);
11194
11195 /* Even if we didn't, FUNC may have decided that there was nothing
11196 interesting below this point in the tree. */
11197 if (!walk_subtrees)
11198 {
11199 /* But we still need to check our siblings. */
11200 if (code == TREE_LIST)
11201 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11202 else if (code == OMP_CLAUSE)
11203 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11204 else
11205 return NULL_TREE;
11206 }
11207
11208 if (lh)
11209 {
11210 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11211 if (result || !walk_subtrees)
11212 return result;
11213 }
11214
11215 switch (code)
11216 {
11217 case ERROR_MARK:
11218 case IDENTIFIER_NODE:
11219 case INTEGER_CST:
11220 case REAL_CST:
11221 case FIXED_CST:
11222 case VECTOR_CST:
11223 case STRING_CST:
11224 case BLOCK:
11225 case PLACEHOLDER_EXPR:
11226 case SSA_NAME:
11227 case FIELD_DECL:
11228 case RESULT_DECL:
11229 /* None of these have subtrees other than those already walked
11230 above. */
11231 break;
11232
11233 case TREE_LIST:
11234 WALK_SUBTREE (TREE_VALUE (*tp));
11235 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11236 break;
11237
11238 case TREE_VEC:
11239 {
11240 int len = TREE_VEC_LENGTH (*tp);
11241
11242 if (len == 0)
11243 break;
11244
11245 /* Walk all elements but the first. */
11246 while (--len)
11247 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11248
11249 /* Now walk the first one as a tail call. */
11250 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11251 }
11252
11253 case COMPLEX_CST:
11254 WALK_SUBTREE (TREE_REALPART (*tp));
11255 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11256
11257 case CONSTRUCTOR:
11258 {
11259 unsigned HOST_WIDE_INT idx;
11260 constructor_elt *ce;
11261
11262 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11263 idx++)
11264 WALK_SUBTREE (ce->value);
11265 }
11266 break;
11267
11268 case SAVE_EXPR:
11269 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11270
11271 case BIND_EXPR:
11272 {
11273 tree decl;
11274 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11275 {
11276 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11277 into declarations that are just mentioned, rather than
11278 declared; they don't really belong to this part of the tree.
11279 And, we can see cycles: the initializer for a declaration
11280 can refer to the declaration itself. */
11281 WALK_SUBTREE (DECL_INITIAL (decl));
11282 WALK_SUBTREE (DECL_SIZE (decl));
11283 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11284 }
11285 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11286 }
11287
11288 case STATEMENT_LIST:
11289 {
11290 tree_stmt_iterator i;
11291 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11292 WALK_SUBTREE (*tsi_stmt_ptr (i));
11293 }
11294 break;
11295
11296 case OMP_CLAUSE:
11297 switch (OMP_CLAUSE_CODE (*tp))
11298 {
11299 case OMP_CLAUSE_GANG:
11300 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11301 /* FALLTHRU */
11302
11303 case OMP_CLAUSE_DEVICE_RESIDENT:
11304 case OMP_CLAUSE_USE_DEVICE:
11305 case OMP_CLAUSE_ASYNC:
11306 case OMP_CLAUSE_WAIT:
11307 case OMP_CLAUSE_WORKER:
11308 case OMP_CLAUSE_VECTOR:
11309 case OMP_CLAUSE_NUM_GANGS:
11310 case OMP_CLAUSE_NUM_WORKERS:
11311 case OMP_CLAUSE_VECTOR_LENGTH:
11312 case OMP_CLAUSE_PRIVATE:
11313 case OMP_CLAUSE_SHARED:
11314 case OMP_CLAUSE_FIRSTPRIVATE:
11315 case OMP_CLAUSE_COPYIN:
11316 case OMP_CLAUSE_COPYPRIVATE:
11317 case OMP_CLAUSE_FINAL:
11318 case OMP_CLAUSE_IF:
11319 case OMP_CLAUSE_NUM_THREADS:
11320 case OMP_CLAUSE_SCHEDULE:
11321 case OMP_CLAUSE_UNIFORM:
11322 case OMP_CLAUSE_DEPEND:
11323 case OMP_CLAUSE_NUM_TEAMS:
11324 case OMP_CLAUSE_THREAD_LIMIT:
11325 case OMP_CLAUSE_DEVICE:
11326 case OMP_CLAUSE_DIST_SCHEDULE:
11327 case OMP_CLAUSE_SAFELEN:
11328 case OMP_CLAUSE_SIMDLEN:
11329 case OMP_CLAUSE__LOOPTEMP_:
11330 case OMP_CLAUSE__SIMDUID_:
11331 case OMP_CLAUSE__CILK_FOR_COUNT_:
11332 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11333 /* FALLTHRU */
11334
11335 case OMP_CLAUSE_INDEPENDENT:
11336 case OMP_CLAUSE_NOWAIT:
11337 case OMP_CLAUSE_ORDERED:
11338 case OMP_CLAUSE_DEFAULT:
11339 case OMP_CLAUSE_UNTIED:
11340 case OMP_CLAUSE_MERGEABLE:
11341 case OMP_CLAUSE_PROC_BIND:
11342 case OMP_CLAUSE_INBRANCH:
11343 case OMP_CLAUSE_NOTINBRANCH:
11344 case OMP_CLAUSE_FOR:
11345 case OMP_CLAUSE_PARALLEL:
11346 case OMP_CLAUSE_SECTIONS:
11347 case OMP_CLAUSE_TASKGROUP:
11348 case OMP_CLAUSE_AUTO:
11349 case OMP_CLAUSE_SEQ:
11350 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11351
11352 case OMP_CLAUSE_LASTPRIVATE:
11353 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11354 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11355 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11356
11357 case OMP_CLAUSE_COLLAPSE:
11358 {
11359 int i;
11360 for (i = 0; i < 3; i++)
11361 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11362 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11363 }
11364
11365 case OMP_CLAUSE_LINEAR:
11366 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11367 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11368 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11369 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11370
11371 case OMP_CLAUSE_ALIGNED:
11372 case OMP_CLAUSE_FROM:
11373 case OMP_CLAUSE_TO:
11374 case OMP_CLAUSE_MAP:
11375 case OMP_CLAUSE__CACHE_:
11376 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11377 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11378 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11379
11380 case OMP_CLAUSE_REDUCTION:
11381 {
11382 int i;
11383 for (i = 0; i < 4; i++)
11384 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11385 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11386 }
11387
11388 default:
11389 gcc_unreachable ();
11390 }
11391 break;
11392
11393 case TARGET_EXPR:
11394 {
11395 int i, len;
11396
11397 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11398 But, we only want to walk once. */
11399 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11400 for (i = 0; i < len; ++i)
11401 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11402 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11403 }
11404
11405 case DECL_EXPR:
11406 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11407 defining. We only want to walk into these fields of a type in this
11408 case and not in the general case of a mere reference to the type.
11409
11410 The criterion is as follows: if the field can be an expression, it
11411 must be walked only here. This should be in keeping with the fields
11412 that are directly gimplified in gimplify_type_sizes in order for the
11413 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11414 variable-sized types.
11415
11416 Note that DECLs get walked as part of processing the BIND_EXPR. */
11417 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11418 {
11419 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11420 if (TREE_CODE (*type_p) == ERROR_MARK)
11421 return NULL_TREE;
11422
11423 /* Call the function for the type. See if it returns anything or
11424 doesn't want us to continue. If we are to continue, walk both
11425 the normal fields and those for the declaration case. */
11426 result = (*func) (type_p, &walk_subtrees, data);
11427 if (result || !walk_subtrees)
11428 return result;
11429
11430 /* But do not walk a pointed-to type since it may itself need to
11431 be walked in the declaration case if it isn't anonymous. */
11432 if (!POINTER_TYPE_P (*type_p))
11433 {
11434 result = walk_type_fields (*type_p, func, data, pset, lh);
11435 if (result)
11436 return result;
11437 }
11438
11439 /* If this is a record type, also walk the fields. */
11440 if (RECORD_OR_UNION_TYPE_P (*type_p))
11441 {
11442 tree field;
11443
11444 for (field = TYPE_FIELDS (*type_p); field;
11445 field = DECL_CHAIN (field))
11446 {
11447 /* We'd like to look at the type of the field, but we can
11448 easily get infinite recursion. So assume it's pointed
11449 to elsewhere in the tree. Also, ignore things that
11450 aren't fields. */
11451 if (TREE_CODE (field) != FIELD_DECL)
11452 continue;
11453
11454 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11455 WALK_SUBTREE (DECL_SIZE (field));
11456 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11457 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11458 WALK_SUBTREE (DECL_QUALIFIER (field));
11459 }
11460 }
11461
11462 /* Same for scalar types. */
11463 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11464 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11465 || TREE_CODE (*type_p) == INTEGER_TYPE
11466 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11467 || TREE_CODE (*type_p) == REAL_TYPE)
11468 {
11469 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11470 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11471 }
11472
11473 WALK_SUBTREE (TYPE_SIZE (*type_p));
11474 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11475 }
11476 /* FALLTHRU */
11477
11478 default:
11479 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11480 {
11481 int i, len;
11482
11483 /* Walk over all the sub-trees of this operand. */
11484 len = TREE_OPERAND_LENGTH (*tp);
11485
11486 /* Go through the subtrees. We need to do this in forward order so
11487 that the scope of a FOR_EXPR is handled properly. */
11488 if (len)
11489 {
11490 for (i = 0; i < len - 1; ++i)
11491 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11492 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11493 }
11494 }
11495 /* If this is a type, walk the needed fields in the type. */
11496 else if (TYPE_P (*tp))
11497 return walk_type_fields (*tp, func, data, pset, lh);
11498 break;
11499 }
11500
11501 /* We didn't find what we were looking for. */
11502 return NULL_TREE;
11503
11504 #undef WALK_SUBTREE_TAIL
11505 }
11506 #undef WALK_SUBTREE
11507
11508 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11509
11510 tree
11511 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11512 walk_tree_lh lh)
11513 {
11514 tree result;
11515
11516 hash_set<tree> pset;
11517 result = walk_tree_1 (tp, func, data, &pset, lh);
11518 return result;
11519 }
11520
11521
11522 tree
11523 tree_block (tree t)
11524 {
11525 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11526
11527 if (IS_EXPR_CODE_CLASS (c))
11528 return LOCATION_BLOCK (t->exp.locus);
11529 gcc_unreachable ();
11530 return NULL;
11531 }
11532
11533 void
11534 tree_set_block (tree t, tree b)
11535 {
11536 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11537
11538 if (IS_EXPR_CODE_CLASS (c))
11539 {
11540 if (b)
11541 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11542 else
11543 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11544 }
11545 else
11546 gcc_unreachable ();
11547 }
11548
11549 /* Create a nameless artificial label and put it in the current
11550 function context. The label has a location of LOC. Returns the
11551 newly created label. */
11552
11553 tree
11554 create_artificial_label (location_t loc)
11555 {
11556 tree lab = build_decl (loc,
11557 LABEL_DECL, NULL_TREE, void_type_node);
11558
11559 DECL_ARTIFICIAL (lab) = 1;
11560 DECL_IGNORED_P (lab) = 1;
11561 DECL_CONTEXT (lab) = current_function_decl;
11562 return lab;
11563 }
11564
11565 /* Given a tree, try to return a useful variable name that we can use
11566 to prefix a temporary that is being assigned the value of the tree.
11567 I.E. given <temp> = &A, return A. */
11568
11569 const char *
11570 get_name (tree t)
11571 {
11572 tree stripped_decl;
11573
11574 stripped_decl = t;
11575 STRIP_NOPS (stripped_decl);
11576 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11577 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11578 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11579 {
11580 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11581 if (!name)
11582 return NULL;
11583 return IDENTIFIER_POINTER (name);
11584 }
11585 else
11586 {
11587 switch (TREE_CODE (stripped_decl))
11588 {
11589 case ADDR_EXPR:
11590 return get_name (TREE_OPERAND (stripped_decl, 0));
11591 default:
11592 return NULL;
11593 }
11594 }
11595 }
11596
11597 /* Return true if TYPE has a variable argument list. */
11598
11599 bool
11600 stdarg_p (const_tree fntype)
11601 {
11602 function_args_iterator args_iter;
11603 tree n = NULL_TREE, t;
11604
11605 if (!fntype)
11606 return false;
11607
11608 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11609 {
11610 n = t;
11611 }
11612
11613 return n != NULL_TREE && n != void_type_node;
11614 }
11615
11616 /* Return true if TYPE has a prototype. */
11617
11618 bool
11619 prototype_p (const_tree fntype)
11620 {
11621 tree t;
11622
11623 gcc_assert (fntype != NULL_TREE);
11624
11625 t = TYPE_ARG_TYPES (fntype);
11626 return (t != NULL_TREE);
11627 }
11628
11629 /* If BLOCK is inlined from an __attribute__((__artificial__))
11630 routine, return pointer to location from where it has been
11631 called. */
11632 location_t *
11633 block_nonartificial_location (tree block)
11634 {
11635 location_t *ret = NULL;
11636
11637 while (block && TREE_CODE (block) == BLOCK
11638 && BLOCK_ABSTRACT_ORIGIN (block))
11639 {
11640 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11641
11642 while (TREE_CODE (ao) == BLOCK
11643 && BLOCK_ABSTRACT_ORIGIN (ao)
11644 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11645 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11646
11647 if (TREE_CODE (ao) == FUNCTION_DECL)
11648 {
11649 /* If AO is an artificial inline, point RET to the
11650 call site locus at which it has been inlined and continue
11651 the loop, in case AO's caller is also an artificial
11652 inline. */
11653 if (DECL_DECLARED_INLINE_P (ao)
11654 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11655 ret = &BLOCK_SOURCE_LOCATION (block);
11656 else
11657 break;
11658 }
11659 else if (TREE_CODE (ao) != BLOCK)
11660 break;
11661
11662 block = BLOCK_SUPERCONTEXT (block);
11663 }
11664 return ret;
11665 }
11666
11667
11668 /* If EXP is inlined from an __attribute__((__artificial__))
11669 function, return the location of the original call expression. */
11670
11671 location_t
11672 tree_nonartificial_location (tree exp)
11673 {
11674 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11675
11676 if (loc)
11677 return *loc;
11678 else
11679 return EXPR_LOCATION (exp);
11680 }
11681
11682
11683 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11684 nodes. */
11685
11686 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11687
11688 hashval_t
11689 cl_option_hasher::hash (tree x)
11690 {
11691 const_tree const t = x;
11692 const char *p;
11693 size_t i;
11694 size_t len = 0;
11695 hashval_t hash = 0;
11696
11697 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11698 {
11699 p = (const char *)TREE_OPTIMIZATION (t);
11700 len = sizeof (struct cl_optimization);
11701 }
11702
11703 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11704 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11705
11706 else
11707 gcc_unreachable ();
11708
11709 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11710 something else. */
11711 for (i = 0; i < len; i++)
11712 if (p[i])
11713 hash = (hash << 4) ^ ((i << 2) | p[i]);
11714
11715 return hash;
11716 }
11717
11718 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11719 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11720 same. */
11721
11722 bool
11723 cl_option_hasher::equal (tree x, tree y)
11724 {
11725 const_tree const xt = x;
11726 const_tree const yt = y;
11727 const char *xp;
11728 const char *yp;
11729 size_t len;
11730
11731 if (TREE_CODE (xt) != TREE_CODE (yt))
11732 return 0;
11733
11734 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11735 {
11736 xp = (const char *)TREE_OPTIMIZATION (xt);
11737 yp = (const char *)TREE_OPTIMIZATION (yt);
11738 len = sizeof (struct cl_optimization);
11739 }
11740
11741 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11742 {
11743 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11744 TREE_TARGET_OPTION (yt));
11745 }
11746
11747 else
11748 gcc_unreachable ();
11749
11750 return (memcmp (xp, yp, len) == 0);
11751 }
11752
11753 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11754
11755 tree
11756 build_optimization_node (struct gcc_options *opts)
11757 {
11758 tree t;
11759
11760 /* Use the cache of optimization nodes. */
11761
11762 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11763 opts);
11764
11765 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11766 t = *slot;
11767 if (!t)
11768 {
11769 /* Insert this one into the hash table. */
11770 t = cl_optimization_node;
11771 *slot = t;
11772
11773 /* Make a new node for next time round. */
11774 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11775 }
11776
11777 return t;
11778 }
11779
11780 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11781
11782 tree
11783 build_target_option_node (struct gcc_options *opts)
11784 {
11785 tree t;
11786
11787 /* Use the cache of optimization nodes. */
11788
11789 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11790 opts);
11791
11792 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11793 t = *slot;
11794 if (!t)
11795 {
11796 /* Insert this one into the hash table. */
11797 t = cl_target_option_node;
11798 *slot = t;
11799
11800 /* Make a new node for next time round. */
11801 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11802 }
11803
11804 return t;
11805 }
11806
11807 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11808 so that they aren't saved during PCH writing. */
11809
11810 void
11811 prepare_target_option_nodes_for_pch (void)
11812 {
11813 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11814 for (; iter != cl_option_hash_table->end (); ++iter)
11815 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11816 TREE_TARGET_GLOBALS (*iter) = NULL;
11817 }
11818
11819 /* Determine the "ultimate origin" of a block. The block may be an inlined
11820 instance of an inlined instance of a block which is local to an inline
11821 function, so we have to trace all of the way back through the origin chain
11822 to find out what sort of node actually served as the original seed for the
11823 given block. */
11824
11825 tree
11826 block_ultimate_origin (const_tree block)
11827 {
11828 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11829
11830 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11831 we're trying to output the abstract instance of this function. */
11832 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11833 return NULL_TREE;
11834
11835 if (immediate_origin == NULL_TREE)
11836 return NULL_TREE;
11837 else
11838 {
11839 tree ret_val;
11840 tree lookahead = immediate_origin;
11841
11842 do
11843 {
11844 ret_val = lookahead;
11845 lookahead = (TREE_CODE (ret_val) == BLOCK
11846 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11847 }
11848 while (lookahead != NULL && lookahead != ret_val);
11849
11850 /* The block's abstract origin chain may not be the *ultimate* origin of
11851 the block. It could lead to a DECL that has an abstract origin set.
11852 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11853 will give us if it has one). Note that DECL's abstract origins are
11854 supposed to be the most distant ancestor (or so decl_ultimate_origin
11855 claims), so we don't need to loop following the DECL origins. */
11856 if (DECL_P (ret_val))
11857 return DECL_ORIGIN (ret_val);
11858
11859 return ret_val;
11860 }
11861 }
11862
11863 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11864 no instruction. */
11865
11866 bool
11867 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11868 {
11869 /* Use precision rather then machine mode when we can, which gives
11870 the correct answer even for submode (bit-field) types. */
11871 if ((INTEGRAL_TYPE_P (outer_type)
11872 || POINTER_TYPE_P (outer_type)
11873 || TREE_CODE (outer_type) == OFFSET_TYPE)
11874 && (INTEGRAL_TYPE_P (inner_type)
11875 || POINTER_TYPE_P (inner_type)
11876 || TREE_CODE (inner_type) == OFFSET_TYPE))
11877 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11878
11879 /* Otherwise fall back on comparing machine modes (e.g. for
11880 aggregate types, floats). */
11881 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11882 }
11883
11884 /* Return true iff conversion in EXP generates no instruction. Mark
11885 it inline so that we fully inline into the stripping functions even
11886 though we have two uses of this function. */
11887
11888 static inline bool
11889 tree_nop_conversion (const_tree exp)
11890 {
11891 tree outer_type, inner_type;
11892
11893 if (!CONVERT_EXPR_P (exp)
11894 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11895 return false;
11896 if (TREE_OPERAND (exp, 0) == error_mark_node)
11897 return false;
11898
11899 outer_type = TREE_TYPE (exp);
11900 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11901
11902 if (!inner_type)
11903 return false;
11904
11905 return tree_nop_conversion_p (outer_type, inner_type);
11906 }
11907
11908 /* Return true iff conversion in EXP generates no instruction. Don't
11909 consider conversions changing the signedness. */
11910
11911 static bool
11912 tree_sign_nop_conversion (const_tree exp)
11913 {
11914 tree outer_type, inner_type;
11915
11916 if (!tree_nop_conversion (exp))
11917 return false;
11918
11919 outer_type = TREE_TYPE (exp);
11920 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11921
11922 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11923 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11924 }
11925
11926 /* Strip conversions from EXP according to tree_nop_conversion and
11927 return the resulting expression. */
11928
11929 tree
11930 tree_strip_nop_conversions (tree exp)
11931 {
11932 while (tree_nop_conversion (exp))
11933 exp = TREE_OPERAND (exp, 0);
11934 return exp;
11935 }
11936
11937 /* Strip conversions from EXP according to tree_sign_nop_conversion
11938 and return the resulting expression. */
11939
11940 tree
11941 tree_strip_sign_nop_conversions (tree exp)
11942 {
11943 while (tree_sign_nop_conversion (exp))
11944 exp = TREE_OPERAND (exp, 0);
11945 return exp;
11946 }
11947
11948 /* Avoid any floating point extensions from EXP. */
11949 tree
11950 strip_float_extensions (tree exp)
11951 {
11952 tree sub, expt, subt;
11953
11954 /* For floating point constant look up the narrowest type that can hold
11955 it properly and handle it like (type)(narrowest_type)constant.
11956 This way we can optimize for instance a=a*2.0 where "a" is float
11957 but 2.0 is double constant. */
11958 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11959 {
11960 REAL_VALUE_TYPE orig;
11961 tree type = NULL;
11962
11963 orig = TREE_REAL_CST (exp);
11964 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11965 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11966 type = float_type_node;
11967 else if (TYPE_PRECISION (TREE_TYPE (exp))
11968 > TYPE_PRECISION (double_type_node)
11969 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11970 type = double_type_node;
11971 if (type)
11972 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11973 }
11974
11975 if (!CONVERT_EXPR_P (exp))
11976 return exp;
11977
11978 sub = TREE_OPERAND (exp, 0);
11979 subt = TREE_TYPE (sub);
11980 expt = TREE_TYPE (exp);
11981
11982 if (!FLOAT_TYPE_P (subt))
11983 return exp;
11984
11985 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11986 return exp;
11987
11988 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11989 return exp;
11990
11991 return strip_float_extensions (sub);
11992 }
11993
11994 /* Strip out all handled components that produce invariant
11995 offsets. */
11996
11997 const_tree
11998 strip_invariant_refs (const_tree op)
11999 {
12000 while (handled_component_p (op))
12001 {
12002 switch (TREE_CODE (op))
12003 {
12004 case ARRAY_REF:
12005 case ARRAY_RANGE_REF:
12006 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12007 || TREE_OPERAND (op, 2) != NULL_TREE
12008 || TREE_OPERAND (op, 3) != NULL_TREE)
12009 return NULL;
12010 break;
12011
12012 case COMPONENT_REF:
12013 if (TREE_OPERAND (op, 2) != NULL_TREE)
12014 return NULL;
12015 break;
12016
12017 default:;
12018 }
12019 op = TREE_OPERAND (op, 0);
12020 }
12021
12022 return op;
12023 }
12024
12025 static GTY(()) tree gcc_eh_personality_decl;
12026
12027 /* Return the GCC personality function decl. */
12028
12029 tree
12030 lhd_gcc_personality (void)
12031 {
12032 if (!gcc_eh_personality_decl)
12033 gcc_eh_personality_decl = build_personality_function ("gcc");
12034 return gcc_eh_personality_decl;
12035 }
12036
12037 /* TARGET is a call target of GIMPLE call statement
12038 (obtained by gimple_call_fn). Return true if it is
12039 OBJ_TYPE_REF representing an virtual call of C++ method.
12040 (As opposed to OBJ_TYPE_REF representing objc calls
12041 through a cast where middle-end devirtualization machinery
12042 can't apply.) */
12043
12044 bool
12045 virtual_method_call_p (const_tree target)
12046 {
12047 if (TREE_CODE (target) != OBJ_TYPE_REF)
12048 return false;
12049 tree t = TREE_TYPE (target);
12050 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12051 t = TREE_TYPE (t);
12052 if (TREE_CODE (t) == FUNCTION_TYPE)
12053 return false;
12054 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12055 /* If we do not have BINFO associated, it means that type was built
12056 without devirtualization enabled. Do not consider this a virtual
12057 call. */
12058 if (!TYPE_BINFO (obj_type_ref_class (target)))
12059 return false;
12060 return true;
12061 }
12062
12063 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12064
12065 tree
12066 obj_type_ref_class (const_tree ref)
12067 {
12068 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12069 ref = TREE_TYPE (ref);
12070 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12071 ref = TREE_TYPE (ref);
12072 /* We look for type THIS points to. ObjC also builds
12073 OBJ_TYPE_REF with non-method calls, Their first parameter
12074 ID however also corresponds to class type. */
12075 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12076 || TREE_CODE (ref) == FUNCTION_TYPE);
12077 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12078 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12079 return TREE_TYPE (ref);
12080 }
12081
12082 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12083
12084 static tree
12085 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12086 {
12087 unsigned int i;
12088 tree base_binfo, b;
12089
12090 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12091 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12092 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12093 return base_binfo;
12094 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12095 return b;
12096 return NULL;
12097 }
12098
12099 /* Try to find a base info of BINFO that would have its field decl at offset
12100 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12101 found, return, otherwise return NULL_TREE. */
12102
12103 tree
12104 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12105 {
12106 tree type = BINFO_TYPE (binfo);
12107
12108 while (true)
12109 {
12110 HOST_WIDE_INT pos, size;
12111 tree fld;
12112 int i;
12113
12114 if (types_same_for_odr (type, expected_type))
12115 return binfo;
12116 if (offset < 0)
12117 return NULL_TREE;
12118
12119 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12120 {
12121 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12122 continue;
12123
12124 pos = int_bit_position (fld);
12125 size = tree_to_uhwi (DECL_SIZE (fld));
12126 if (pos <= offset && (pos + size) > offset)
12127 break;
12128 }
12129 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12130 return NULL_TREE;
12131
12132 /* Offset 0 indicates the primary base, whose vtable contents are
12133 represented in the binfo for the derived class. */
12134 else if (offset != 0)
12135 {
12136 tree found_binfo = NULL, base_binfo;
12137 /* Offsets in BINFO are in bytes relative to the whole structure
12138 while POS is in bits relative to the containing field. */
12139 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12140 / BITS_PER_UNIT);
12141
12142 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12143 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12144 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12145 {
12146 found_binfo = base_binfo;
12147 break;
12148 }
12149 if (found_binfo)
12150 binfo = found_binfo;
12151 else
12152 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12153 binfo_offset);
12154 }
12155
12156 type = TREE_TYPE (fld);
12157 offset -= pos;
12158 }
12159 }
12160
12161 /* Returns true if X is a typedef decl. */
12162
12163 bool
12164 is_typedef_decl (const_tree x)
12165 {
12166 return (x && TREE_CODE (x) == TYPE_DECL
12167 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12168 }
12169
12170 /* Returns true iff TYPE is a type variant created for a typedef. */
12171
12172 bool
12173 typedef_variant_p (const_tree type)
12174 {
12175 return is_typedef_decl (TYPE_NAME (type));
12176 }
12177
12178 /* Warn about a use of an identifier which was marked deprecated. */
12179 void
12180 warn_deprecated_use (tree node, tree attr)
12181 {
12182 const char *msg;
12183
12184 if (node == 0 || !warn_deprecated_decl)
12185 return;
12186
12187 if (!attr)
12188 {
12189 if (DECL_P (node))
12190 attr = DECL_ATTRIBUTES (node);
12191 else if (TYPE_P (node))
12192 {
12193 tree decl = TYPE_STUB_DECL (node);
12194 if (decl)
12195 attr = lookup_attribute ("deprecated",
12196 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12197 }
12198 }
12199
12200 if (attr)
12201 attr = lookup_attribute ("deprecated", attr);
12202
12203 if (attr)
12204 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12205 else
12206 msg = NULL;
12207
12208 bool w;
12209 if (DECL_P (node))
12210 {
12211 if (msg)
12212 w = warning (OPT_Wdeprecated_declarations,
12213 "%qD is deprecated: %s", node, msg);
12214 else
12215 w = warning (OPT_Wdeprecated_declarations,
12216 "%qD is deprecated", node);
12217 if (w)
12218 inform (DECL_SOURCE_LOCATION (node), "declared here");
12219 }
12220 else if (TYPE_P (node))
12221 {
12222 tree what = NULL_TREE;
12223 tree decl = TYPE_STUB_DECL (node);
12224
12225 if (TYPE_NAME (node))
12226 {
12227 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12228 what = TYPE_NAME (node);
12229 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12230 && DECL_NAME (TYPE_NAME (node)))
12231 what = DECL_NAME (TYPE_NAME (node));
12232 }
12233
12234 if (decl)
12235 {
12236 if (what)
12237 {
12238 if (msg)
12239 w = warning (OPT_Wdeprecated_declarations,
12240 "%qE is deprecated: %s", what, msg);
12241 else
12242 w = warning (OPT_Wdeprecated_declarations,
12243 "%qE is deprecated", what);
12244 }
12245 else
12246 {
12247 if (msg)
12248 w = warning (OPT_Wdeprecated_declarations,
12249 "type is deprecated: %s", msg);
12250 else
12251 w = warning (OPT_Wdeprecated_declarations,
12252 "type is deprecated");
12253 }
12254 if (w)
12255 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12256 }
12257 else
12258 {
12259 if (what)
12260 {
12261 if (msg)
12262 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12263 what, msg);
12264 else
12265 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12266 }
12267 else
12268 {
12269 if (msg)
12270 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12271 msg);
12272 else
12273 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12274 }
12275 }
12276 }
12277 }
12278
12279 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12280 somewhere in it. */
12281
12282 bool
12283 contains_bitfld_component_ref_p (const_tree ref)
12284 {
12285 while (handled_component_p (ref))
12286 {
12287 if (TREE_CODE (ref) == COMPONENT_REF
12288 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12289 return true;
12290 ref = TREE_OPERAND (ref, 0);
12291 }
12292
12293 return false;
12294 }
12295
12296 /* Try to determine whether a TRY_CATCH expression can fall through.
12297 This is a subroutine of block_may_fallthru. */
12298
12299 static bool
12300 try_catch_may_fallthru (const_tree stmt)
12301 {
12302 tree_stmt_iterator i;
12303
12304 /* If the TRY block can fall through, the whole TRY_CATCH can
12305 fall through. */
12306 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12307 return true;
12308
12309 i = tsi_start (TREE_OPERAND (stmt, 1));
12310 switch (TREE_CODE (tsi_stmt (i)))
12311 {
12312 case CATCH_EXPR:
12313 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12314 catch expression and a body. The whole TRY_CATCH may fall
12315 through iff any of the catch bodies falls through. */
12316 for (; !tsi_end_p (i); tsi_next (&i))
12317 {
12318 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12319 return true;
12320 }
12321 return false;
12322
12323 case EH_FILTER_EXPR:
12324 /* The exception filter expression only matters if there is an
12325 exception. If the exception does not match EH_FILTER_TYPES,
12326 we will execute EH_FILTER_FAILURE, and we will fall through
12327 if that falls through. If the exception does match
12328 EH_FILTER_TYPES, the stack unwinder will continue up the
12329 stack, so we will not fall through. We don't know whether we
12330 will throw an exception which matches EH_FILTER_TYPES or not,
12331 so we just ignore EH_FILTER_TYPES and assume that we might
12332 throw an exception which doesn't match. */
12333 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12334
12335 default:
12336 /* This case represents statements to be executed when an
12337 exception occurs. Those statements are implicitly followed
12338 by a RESX statement to resume execution after the exception.
12339 So in this case the TRY_CATCH never falls through. */
12340 return false;
12341 }
12342 }
12343
12344 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12345 need not be 100% accurate; simply be conservative and return true if we
12346 don't know. This is used only to avoid stupidly generating extra code.
12347 If we're wrong, we'll just delete the extra code later. */
12348
12349 bool
12350 block_may_fallthru (const_tree block)
12351 {
12352 /* This CONST_CAST is okay because expr_last returns its argument
12353 unmodified and we assign it to a const_tree. */
12354 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12355
12356 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12357 {
12358 case GOTO_EXPR:
12359 case RETURN_EXPR:
12360 /* Easy cases. If the last statement of the block implies
12361 control transfer, then we can't fall through. */
12362 return false;
12363
12364 case SWITCH_EXPR:
12365 /* If SWITCH_LABELS is set, this is lowered, and represents a
12366 branch to a selected label and hence can not fall through.
12367 Otherwise SWITCH_BODY is set, and the switch can fall
12368 through. */
12369 return SWITCH_LABELS (stmt) == NULL_TREE;
12370
12371 case COND_EXPR:
12372 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12373 return true;
12374 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12375
12376 case BIND_EXPR:
12377 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12378
12379 case TRY_CATCH_EXPR:
12380 return try_catch_may_fallthru (stmt);
12381
12382 case TRY_FINALLY_EXPR:
12383 /* The finally clause is always executed after the try clause,
12384 so if it does not fall through, then the try-finally will not
12385 fall through. Otherwise, if the try clause does not fall
12386 through, then when the finally clause falls through it will
12387 resume execution wherever the try clause was going. So the
12388 whole try-finally will only fall through if both the try
12389 clause and the finally clause fall through. */
12390 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12391 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12392
12393 case MODIFY_EXPR:
12394 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12395 stmt = TREE_OPERAND (stmt, 1);
12396 else
12397 return true;
12398 /* FALLTHRU */
12399
12400 case CALL_EXPR:
12401 /* Functions that do not return do not fall through. */
12402 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12403
12404 case CLEANUP_POINT_EXPR:
12405 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12406
12407 case TARGET_EXPR:
12408 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12409
12410 case ERROR_MARK:
12411 return true;
12412
12413 default:
12414 return lang_hooks.block_may_fallthru (stmt);
12415 }
12416 }
12417
12418 /* True if we are using EH to handle cleanups. */
12419 static bool using_eh_for_cleanups_flag = false;
12420
12421 /* This routine is called from front ends to indicate eh should be used for
12422 cleanups. */
12423 void
12424 using_eh_for_cleanups (void)
12425 {
12426 using_eh_for_cleanups_flag = true;
12427 }
12428
12429 /* Query whether EH is used for cleanups. */
12430 bool
12431 using_eh_for_cleanups_p (void)
12432 {
12433 return using_eh_for_cleanups_flag;
12434 }
12435
12436 /* Wrapper for tree_code_name to ensure that tree code is valid */
12437 const char *
12438 get_tree_code_name (enum tree_code code)
12439 {
12440 const char *invalid = "<invalid tree code>";
12441
12442 if (code >= MAX_TREE_CODES)
12443 return invalid;
12444
12445 return tree_code_name[code];
12446 }
12447
12448 /* Drops the TREE_OVERFLOW flag from T. */
12449
12450 tree
12451 drop_tree_overflow (tree t)
12452 {
12453 gcc_checking_assert (TREE_OVERFLOW (t));
12454
12455 /* For tree codes with a sharing machinery re-build the result. */
12456 if (TREE_CODE (t) == INTEGER_CST)
12457 return wide_int_to_tree (TREE_TYPE (t), t);
12458
12459 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12460 and drop the flag. */
12461 t = copy_node (t);
12462 TREE_OVERFLOW (t) = 0;
12463 return t;
12464 }
12465
12466 /* Given a memory reference expression T, return its base address.
12467 The base address of a memory reference expression is the main
12468 object being referenced. For instance, the base address for
12469 'array[i].fld[j]' is 'array'. You can think of this as stripping
12470 away the offset part from a memory address.
12471
12472 This function calls handled_component_p to strip away all the inner
12473 parts of the memory reference until it reaches the base object. */
12474
12475 tree
12476 get_base_address (tree t)
12477 {
12478 while (handled_component_p (t))
12479 t = TREE_OPERAND (t, 0);
12480
12481 if ((TREE_CODE (t) == MEM_REF
12482 || TREE_CODE (t) == TARGET_MEM_REF)
12483 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12484 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12485
12486 /* ??? Either the alias oracle or all callers need to properly deal
12487 with WITH_SIZE_EXPRs before we can look through those. */
12488 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12489 return NULL_TREE;
12490
12491 return t;
12492 }
12493
12494 /* Return a tree of sizetype representing the size, in bytes, of the element
12495 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12496
12497 tree
12498 array_ref_element_size (tree exp)
12499 {
12500 tree aligned_size = TREE_OPERAND (exp, 3);
12501 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12502 location_t loc = EXPR_LOCATION (exp);
12503
12504 /* If a size was specified in the ARRAY_REF, it's the size measured
12505 in alignment units of the element type. So multiply by that value. */
12506 if (aligned_size)
12507 {
12508 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12509 sizetype from another type of the same width and signedness. */
12510 if (TREE_TYPE (aligned_size) != sizetype)
12511 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12512 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12513 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12514 }
12515
12516 /* Otherwise, take the size from that of the element type. Substitute
12517 any PLACEHOLDER_EXPR that we have. */
12518 else
12519 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12520 }
12521
12522 /* Return a tree representing the lower bound of the array mentioned in
12523 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12524
12525 tree
12526 array_ref_low_bound (tree exp)
12527 {
12528 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12529
12530 /* If a lower bound is specified in EXP, use it. */
12531 if (TREE_OPERAND (exp, 2))
12532 return TREE_OPERAND (exp, 2);
12533
12534 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12535 substituting for a PLACEHOLDER_EXPR as needed. */
12536 if (domain_type && TYPE_MIN_VALUE (domain_type))
12537 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12538
12539 /* Otherwise, return a zero of the appropriate type. */
12540 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12541 }
12542
12543 /* Return a tree representing the upper bound of the array mentioned in
12544 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12545
12546 tree
12547 array_ref_up_bound (tree exp)
12548 {
12549 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12550
12551 /* If there is a domain type and it has an upper bound, use it, substituting
12552 for a PLACEHOLDER_EXPR as needed. */
12553 if (domain_type && TYPE_MAX_VALUE (domain_type))
12554 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12555
12556 /* Otherwise fail. */
12557 return NULL_TREE;
12558 }
12559
12560 /* Returns true if REF is an array reference to an array at the end of
12561 a structure. If this is the case, the array may be allocated larger
12562 than its upper bound implies. */
12563
12564 bool
12565 array_at_struct_end_p (tree ref)
12566 {
12567 if (TREE_CODE (ref) != ARRAY_REF
12568 && TREE_CODE (ref) != ARRAY_RANGE_REF)
12569 return false;
12570
12571 while (handled_component_p (ref))
12572 {
12573 /* If the reference chain contains a component reference to a
12574 non-union type and there follows another field the reference
12575 is not at the end of a structure. */
12576 if (TREE_CODE (ref) == COMPONENT_REF
12577 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12578 {
12579 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12580 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12581 nextf = DECL_CHAIN (nextf);
12582 if (nextf)
12583 return false;
12584 }
12585
12586 ref = TREE_OPERAND (ref, 0);
12587 }
12588
12589 /* If the reference is based on a declared entity, the size of the array
12590 is constrained by its given domain. */
12591 if (DECL_P (ref))
12592 return false;
12593
12594 return true;
12595 }
12596
12597 /* Return a tree representing the offset, in bytes, of the field referenced
12598 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12599
12600 tree
12601 component_ref_field_offset (tree exp)
12602 {
12603 tree aligned_offset = TREE_OPERAND (exp, 2);
12604 tree field = TREE_OPERAND (exp, 1);
12605 location_t loc = EXPR_LOCATION (exp);
12606
12607 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12608 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12609 value. */
12610 if (aligned_offset)
12611 {
12612 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12613 sizetype from another type of the same width and signedness. */
12614 if (TREE_TYPE (aligned_offset) != sizetype)
12615 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12616 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12617 size_int (DECL_OFFSET_ALIGN (field)
12618 / BITS_PER_UNIT));
12619 }
12620
12621 /* Otherwise, take the offset from that of the field. Substitute
12622 any PLACEHOLDER_EXPR that we have. */
12623 else
12624 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12625 }
12626
12627 /* Return the machine mode of T. For vectors, returns the mode of the
12628 inner type. The main use case is to feed the result to HONOR_NANS,
12629 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12630
12631 machine_mode
12632 element_mode (const_tree t)
12633 {
12634 if (!TYPE_P (t))
12635 t = TREE_TYPE (t);
12636 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12637 t = TREE_TYPE (t);
12638 return TYPE_MODE (t);
12639 }
12640
12641
12642 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12643 TV. TV should be the more specified variant (i.e. the main variant). */
12644
12645 static bool
12646 verify_type_variant (const_tree t, tree tv)
12647 {
12648 /* Type variant can differ by:
12649
12650 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12651 ENCODE_QUAL_ADDR_SPACE.
12652 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12653 in this case some values may not be set in the variant types
12654 (see TYPE_COMPLETE_P checks).
12655 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12656 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12657 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12658 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12659 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12660 this is necessary to make it possible to merge types form different TUs
12661 - arrays, pointers and references may have TREE_TYPE that is a variant
12662 of TREE_TYPE of their main variants.
12663 - aggregates may have new TYPE_FIELDS list that list variants of
12664 the main variant TYPE_FIELDS.
12665 - vector types may differ by TYPE_VECTOR_OPAQUE
12666 - TYPE_METHODS is always NULL for vairant types and maintained for
12667 main variant only.
12668 */
12669
12670 /* Convenience macro for matching individual fields. */
12671 #define verify_variant_match(flag) \
12672 do { \
12673 if (flag (tv) != flag (t)) \
12674 { \
12675 error ("type variant differs by " #flag "."); \
12676 debug_tree (tv); \
12677 return false; \
12678 } \
12679 } while (false)
12680
12681 /* tree_base checks. */
12682
12683 verify_variant_match (TREE_CODE);
12684 /* FIXME: Ada builds non-artificial variants of artificial types. */
12685 if (TYPE_ARTIFICIAL (tv) && 0)
12686 verify_variant_match (TYPE_ARTIFICIAL);
12687 if (POINTER_TYPE_P (tv))
12688 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12689 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12690 verify_variant_match (TYPE_UNSIGNED);
12691 verify_variant_match (TYPE_ALIGN_OK);
12692 verify_variant_match (TYPE_PACKED);
12693 if (TREE_CODE (t) == REFERENCE_TYPE)
12694 verify_variant_match (TYPE_REF_IS_RVALUE);
12695 verify_variant_match (TYPE_SATURATING);
12696 /* FIXME: This check trigger during libstdc++ build. */
12697 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12698 verify_variant_match (TYPE_FINAL_P);
12699
12700 /* tree_type_common checks. */
12701
12702 if (COMPLETE_TYPE_P (t))
12703 {
12704 verify_variant_match (TYPE_SIZE);
12705 verify_variant_match (TYPE_MODE);
12706 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
12707 /* FIXME: ideally we should compare pointer equality, but java FE
12708 produce variants where size is INTEGER_CST of different type (int
12709 wrt size_type) during libjava biuld. */
12710 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
12711 {
12712 error ("type variant has different TYPE_SIZE_UNIT");
12713 debug_tree (tv);
12714 error ("type variant's TYPE_SIZE_UNIT");
12715 debug_tree (TYPE_SIZE_UNIT (tv));
12716 error ("type's TYPE_SIZE_UNIT");
12717 debug_tree (TYPE_SIZE_UNIT (t));
12718 return false;
12719 }
12720 }
12721 verify_variant_match (TYPE_PRECISION);
12722 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12723 if (RECORD_OR_UNION_TYPE_P (t))
12724 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12725 else if (TREE_CODE (t) == ARRAY_TYPE)
12726 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12727 /* During LTO we merge variant lists from diferent translation units
12728 that may differ BY TYPE_CONTEXT that in turn may point
12729 to TRANSLATION_UNIT_DECL.
12730 Ada also builds variants of types with different TYPE_CONTEXT. */
12731 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12732 verify_variant_match (TYPE_CONTEXT);
12733 verify_variant_match (TYPE_STRING_FLAG);
12734 if (TYPE_ALIAS_SET_KNOWN_P (t) && TYPE_ALIAS_SET_KNOWN_P (tv))
12735 verify_variant_match (TYPE_ALIAS_SET);
12736
12737 /* tree_type_non_common checks. */
12738
12739 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12740 and dangle the pointer from time to time. */
12741 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12742 && (in_lto_p || !TYPE_VFIELD (tv)
12743 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12744 {
12745 error ("type variant has different TYPE_VFIELD");
12746 debug_tree (tv);
12747 return false;
12748 }
12749 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
12750 || TREE_CODE (t) == INTEGER_TYPE
12751 || TREE_CODE (t) == BOOLEAN_TYPE
12752 || TREE_CODE (t) == REAL_TYPE
12753 || TREE_CODE (t) == FIXED_POINT_TYPE)
12754 {
12755 verify_variant_match (TYPE_MAX_VALUE);
12756 verify_variant_match (TYPE_MIN_VALUE);
12757 }
12758 if (TREE_CODE (t) == METHOD_TYPE)
12759 verify_variant_match (TYPE_METHOD_BASETYPE);
12760 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
12761 {
12762 error ("type variant has TYPE_METHODS");
12763 debug_tree (tv);
12764 return false;
12765 }
12766 if (TREE_CODE (t) == OFFSET_TYPE)
12767 verify_variant_match (TYPE_OFFSET_BASETYPE);
12768 if (TREE_CODE (t) == ARRAY_TYPE)
12769 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
12770 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
12771 or even type's main variant. This is needed to make bootstrap pass
12772 and the bug seems new in GCC 5.
12773 C++ FE should be updated to make this consistent and we should check
12774 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
12775 is a match with main variant.
12776
12777 Also disable the check for Java for now because of parser hack that builds
12778 first an dummy BINFO and then sometimes replace it by real BINFO in some
12779 of the copies. */
12780 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
12781 && TYPE_BINFO (t) != TYPE_BINFO (tv)
12782 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
12783 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
12784 at LTO time only. */
12785 && (in_lto_p && odr_type_p (t)))
12786 {
12787 error ("type variant has different TYPE_BINFO");
12788 debug_tree (tv);
12789 error ("type variant's TYPE_BINFO");
12790 debug_tree (TYPE_BINFO (tv));
12791 error ("type's TYPE_BINFO");
12792 debug_tree (TYPE_BINFO (t));
12793 return false;
12794 }
12795
12796 /* Check various uses of TYPE_VALUES_RAW. */
12797 if (TREE_CODE (t) == ENUMERAL_TYPE)
12798 verify_variant_match (TYPE_VALUES);
12799 else if (TREE_CODE (t) == ARRAY_TYPE)
12800 verify_variant_match (TYPE_DOMAIN);
12801 /* Permit incomplete variants of complete type. While FEs may complete
12802 all variants, this does not happen for C++ templates in all cases. */
12803 else if (RECORD_OR_UNION_TYPE_P (t)
12804 && COMPLETE_TYPE_P (t)
12805 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
12806 {
12807 tree f1, f2;
12808
12809 /* Fortran builds qualified variants as new records with items of
12810 qualified type. Verify that they looks same. */
12811 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
12812 f1 && f2;
12813 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
12814 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
12815 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
12816 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
12817 /* FIXME: gfc_nonrestricted_type builds all types as variants
12818 with exception of pointer types. It deeply copies the type
12819 which means that we may end up with a variant type
12820 referring non-variant pointer. We may change it to
12821 produce types as variants, too, like
12822 objc_get_protocol_qualified_type does. */
12823 && !POINTER_TYPE_P (TREE_TYPE (f1)))
12824 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
12825 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
12826 break;
12827 if (f1 || f2)
12828 {
12829 error ("type variant has different TYPE_FIELDS");
12830 debug_tree (tv);
12831 error ("first mismatch is field");
12832 debug_tree (f1);
12833 error ("and field");
12834 debug_tree (f2);
12835 return false;
12836 }
12837 }
12838 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
12839 verify_variant_match (TYPE_ARG_TYPES);
12840 /* For C++ the qualified variant of array type is really an array type
12841 of qualified TREE_TYPE.
12842 objc builds variants of pointer where pointer to type is a variant, too
12843 in objc_get_protocol_qualified_type. */
12844 if (TREE_TYPE (t) != TREE_TYPE (tv)
12845 && ((TREE_CODE (t) != ARRAY_TYPE
12846 && !POINTER_TYPE_P (t))
12847 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
12848 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
12849 {
12850 error ("type variant has different TREE_TYPE");
12851 debug_tree (tv);
12852 error ("type variant's TREE_TYPE");
12853 debug_tree (TREE_TYPE (tv));
12854 error ("type's TREE_TYPE");
12855 debug_tree (TREE_TYPE (t));
12856 return false;
12857 }
12858 if (type_with_alias_set_p (t)
12859 && !gimple_canonical_types_compatible_p (t, tv, false))
12860 {
12861 error ("type is not compatible with its vairant");
12862 debug_tree (tv);
12863 error ("type variant's TREE_TYPE");
12864 debug_tree (TREE_TYPE (tv));
12865 error ("type's TREE_TYPE");
12866 debug_tree (TREE_TYPE (t));
12867 return false;
12868 }
12869 return true;
12870 #undef verify_variant_match
12871 }
12872
12873
12874 /* The TYPE_CANONICAL merging machinery. It should closely resemble
12875 the middle-end types_compatible_p function. It needs to avoid
12876 claiming types are different for types that should be treated
12877 the same with respect to TBAA. Canonical types are also used
12878 for IL consistency checks via the useless_type_conversion_p
12879 predicate which does not handle all type kinds itself but falls
12880 back to pointer-comparison of TYPE_CANONICAL for aggregates
12881 for example. */
12882
12883 /* Return true iff T1 and T2 are structurally identical for what
12884 TBAA is concerned.
12885 This function is used both by lto.c canonical type merging and by the
12886 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
12887 that have TYPE_CANONICAL defined and assume them equivalent. */
12888
12889 bool
12890 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
12891 bool trust_type_canonical)
12892 {
12893 /* Type variants should be same as the main variant. When not doing sanity
12894 checking to verify this fact, go to main variants and save some work. */
12895 if (trust_type_canonical)
12896 {
12897 t1 = TYPE_MAIN_VARIANT (t1);
12898 t2 = TYPE_MAIN_VARIANT (t2);
12899 }
12900
12901 /* Check first for the obvious case of pointer identity. */
12902 if (t1 == t2)
12903 return true;
12904
12905 /* Check that we have two types to compare. */
12906 if (t1 == NULL_TREE || t2 == NULL_TREE)
12907 return false;
12908
12909 /* We consider complete types always compatible with incomplete type.
12910 This does not make sense for canonical type calculation and thus we
12911 need to ensure that we are never called on it.
12912
12913 FIXME: For more correctness the function probably should have three modes
12914 1) mode assuming that types are complete mathcing their structure
12915 2) mode allowing incomplete types but producing equivalence classes
12916 and thus ignoring all info from complete types
12917 3) mode allowing incomplete types to match complete but checking
12918 compatibility between complete types.
12919
12920 1 and 2 can be used for canonical type calculation. 3 is the real
12921 definition of type compatibility that can be used i.e. for warnings during
12922 declaration merging. */
12923
12924 gcc_assert (!trust_type_canonical
12925 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
12926 /* If the types have been previously registered and found equal
12927 they still are. */
12928 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
12929 && trust_type_canonical)
12930 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
12931
12932 /* Can't be the same type if the types don't have the same code. */
12933 if (tree_code_for_canonical_type_merging (TREE_CODE (t1))
12934 != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
12935 return false;
12936
12937 /* Qualifiers do not matter for canonical type comparison purposes. */
12938
12939 /* Void types and nullptr types are always the same. */
12940 if (TREE_CODE (t1) == VOID_TYPE
12941 || TREE_CODE (t1) == NULLPTR_TYPE)
12942 return true;
12943
12944 /* Can't be the same type if they have different mode. */
12945 if (TYPE_MODE (t1) != TYPE_MODE (t2))
12946 return false;
12947
12948 /* Non-aggregate types can be handled cheaply. */
12949 if (INTEGRAL_TYPE_P (t1)
12950 || SCALAR_FLOAT_TYPE_P (t1)
12951 || FIXED_POINT_TYPE_P (t1)
12952 || TREE_CODE (t1) == VECTOR_TYPE
12953 || TREE_CODE (t1) == COMPLEX_TYPE
12954 || TREE_CODE (t1) == OFFSET_TYPE
12955 || POINTER_TYPE_P (t1))
12956 {
12957 /* Can't be the same type if they have different sign or precision. */
12958 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
12959 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
12960 return false;
12961
12962 if (TREE_CODE (t1) == INTEGER_TYPE
12963 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
12964 return false;
12965
12966 /* For canonical type comparisons we do not want to build SCCs
12967 so we cannot compare pointed-to types. But we can, for now,
12968 require the same pointed-to type kind and match what
12969 useless_type_conversion_p would do. */
12970 if (POINTER_TYPE_P (t1))
12971 {
12972 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
12973 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
12974 return false;
12975
12976 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
12977 return false;
12978 }
12979
12980 /* Tail-recurse to components. */
12981 if (TREE_CODE (t1) == VECTOR_TYPE
12982 || TREE_CODE (t1) == COMPLEX_TYPE)
12983 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
12984 TREE_TYPE (t2),
12985 trust_type_canonical);
12986
12987 return true;
12988 }
12989
12990 /* Do type-specific comparisons. */
12991 switch (TREE_CODE (t1))
12992 {
12993 case ARRAY_TYPE:
12994 /* Array types are the same if the element types are the same and
12995 the number of elements are the same. */
12996 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
12997 trust_type_canonical)
12998 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
12999 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13000 return false;
13001 else
13002 {
13003 tree i1 = TYPE_DOMAIN (t1);
13004 tree i2 = TYPE_DOMAIN (t2);
13005
13006 /* For an incomplete external array, the type domain can be
13007 NULL_TREE. Check this condition also. */
13008 if (i1 == NULL_TREE && i2 == NULL_TREE)
13009 return true;
13010 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13011 return false;
13012 else
13013 {
13014 tree min1 = TYPE_MIN_VALUE (i1);
13015 tree min2 = TYPE_MIN_VALUE (i2);
13016 tree max1 = TYPE_MAX_VALUE (i1);
13017 tree max2 = TYPE_MAX_VALUE (i2);
13018
13019 /* The minimum/maximum values have to be the same. */
13020 if ((min1 == min2
13021 || (min1 && min2
13022 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13023 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13024 || operand_equal_p (min1, min2, 0))))
13025 && (max1 == max2
13026 || (max1 && max2
13027 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13028 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13029 || operand_equal_p (max1, max2, 0)))))
13030 return true;
13031 else
13032 return false;
13033 }
13034 }
13035
13036 case METHOD_TYPE:
13037 case FUNCTION_TYPE:
13038 /* Function types are the same if the return type and arguments types
13039 are the same. */
13040 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13041 trust_type_canonical))
13042 return false;
13043
13044 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13045 return true;
13046 else
13047 {
13048 tree parms1, parms2;
13049
13050 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13051 parms1 && parms2;
13052 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13053 {
13054 if (!gimple_canonical_types_compatible_p
13055 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13056 trust_type_canonical))
13057 return false;
13058 }
13059
13060 if (parms1 || parms2)
13061 return false;
13062
13063 return true;
13064 }
13065
13066 case RECORD_TYPE:
13067 case UNION_TYPE:
13068 case QUAL_UNION_TYPE:
13069 {
13070 tree f1, f2;
13071
13072 /* For aggregate types, all the fields must be the same. */
13073 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13074 f1 || f2;
13075 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13076 {
13077 /* Skip non-fields. */
13078 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13079 f1 = TREE_CHAIN (f1);
13080 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13081 f2 = TREE_CHAIN (f2);
13082 if (!f1 || !f2)
13083 break;
13084 /* The fields must have the same name, offset and type. */
13085 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13086 || !gimple_compare_field_offset (f1, f2)
13087 || !gimple_canonical_types_compatible_p
13088 (TREE_TYPE (f1), TREE_TYPE (f2),
13089 trust_type_canonical))
13090 return false;
13091 }
13092
13093 /* If one aggregate has more fields than the other, they
13094 are not the same. */
13095 if (f1 || f2)
13096 return false;
13097
13098 return true;
13099 }
13100
13101 default:
13102 /* Consider all types with language specific trees in them mutually
13103 compatible. This is executed only from verify_type and false
13104 positives can be tolerated. */
13105 gcc_assert (!in_lto_p);
13106 return true;
13107 }
13108 }
13109
13110 /* Verify type T. */
13111
13112 void
13113 verify_type (const_tree t)
13114 {
13115 bool error_found = false;
13116 tree mv = TYPE_MAIN_VARIANT (t);
13117 if (!mv)
13118 {
13119 error ("Main variant is not defined");
13120 error_found = true;
13121 }
13122 else if (mv != TYPE_MAIN_VARIANT (mv))
13123 {
13124 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13125 debug_tree (mv);
13126 error_found = true;
13127 }
13128 else if (t != mv && !verify_type_variant (t, mv))
13129 error_found = true;
13130
13131 tree ct = TYPE_CANONICAL (t);
13132 if (!ct)
13133 ;
13134 else if (TYPE_CANONICAL (t) != ct)
13135 {
13136 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13137 debug_tree (ct);
13138 error_found = true;
13139 }
13140 /* Method and function types can not be used to address memory and thus
13141 TYPE_CANONICAL really matters only for determining useless conversions.
13142
13143 FIXME: C++ FE produce declarations of builtin functions that are not
13144 compatible with main variants. */
13145 else if (TREE_CODE (t) == FUNCTION_TYPE)
13146 ;
13147 else if (t != ct
13148 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13149 with variably sized arrays because their sizes possibly
13150 gimplified to different variables. */
13151 && !variably_modified_type_p (ct, NULL)
13152 && !gimple_canonical_types_compatible_p (t, ct, false))
13153 {
13154 error ("TYPE_CANONICAL is not compatible");
13155 debug_tree (ct);
13156 error_found = true;
13157 }
13158
13159
13160 /* Check various uses of TYPE_MINVAL. */
13161 if (RECORD_OR_UNION_TYPE_P (t))
13162 {
13163 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13164 and danagle the pointer from time to time. */
13165 if (TYPE_VFIELD (t)
13166 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13167 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13168 {
13169 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13170 debug_tree (TYPE_VFIELD (t));
13171 error_found = true;
13172 }
13173 }
13174 else if (TREE_CODE (t) == POINTER_TYPE)
13175 {
13176 if (TYPE_NEXT_PTR_TO (t)
13177 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13178 {
13179 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13180 debug_tree (TYPE_NEXT_PTR_TO (t));
13181 error_found = true;
13182 }
13183 }
13184 else if (TREE_CODE (t) == REFERENCE_TYPE)
13185 {
13186 if (TYPE_NEXT_REF_TO (t)
13187 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13188 {
13189 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13190 debug_tree (TYPE_NEXT_REF_TO (t));
13191 error_found = true;
13192 }
13193 }
13194 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13195 || TREE_CODE (t) == FIXED_POINT_TYPE)
13196 {
13197 /* FIXME: The following check should pass:
13198 useless_type_conversion_p (const_cast <tree> (t),
13199 TREE_TYPE (TYPE_MIN_VALUE (t))
13200 but does not for C sizetypes in LTO. */
13201 }
13202 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13203 else if (TYPE_MINVAL (t)
13204 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13205 || in_lto_p))
13206 {
13207 error ("TYPE_MINVAL non-NULL");
13208 debug_tree (TYPE_MINVAL (t));
13209 error_found = true;
13210 }
13211
13212 /* Check various uses of TYPE_MAXVAL. */
13213 if (RECORD_OR_UNION_TYPE_P (t))
13214 {
13215 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13216 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13217 && TYPE_METHODS (t) != error_mark_node)
13218 {
13219 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13220 debug_tree (TYPE_METHODS (t));
13221 error_found = true;
13222 }
13223 }
13224 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13225 {
13226 if (TYPE_METHOD_BASETYPE (t)
13227 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13228 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13229 {
13230 error ("TYPE_METHOD_BASETYPE is not record nor union");
13231 debug_tree (TYPE_METHOD_BASETYPE (t));
13232 error_found = true;
13233 }
13234 }
13235 else if (TREE_CODE (t) == OFFSET_TYPE)
13236 {
13237 if (TYPE_OFFSET_BASETYPE (t)
13238 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13239 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13240 {
13241 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13242 debug_tree (TYPE_OFFSET_BASETYPE (t));
13243 error_found = true;
13244 }
13245 }
13246 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13247 || TREE_CODE (t) == FIXED_POINT_TYPE)
13248 {
13249 /* FIXME: The following check should pass:
13250 useless_type_conversion_p (const_cast <tree> (t),
13251 TREE_TYPE (TYPE_MAX_VALUE (t))
13252 but does not for C sizetypes in LTO. */
13253 }
13254 else if (TREE_CODE (t) == ARRAY_TYPE)
13255 {
13256 if (TYPE_ARRAY_MAX_SIZE (t)
13257 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13258 {
13259 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13260 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13261 error_found = true;
13262 }
13263 }
13264 else if (TYPE_MAXVAL (t))
13265 {
13266 error ("TYPE_MAXVAL non-NULL");
13267 debug_tree (TYPE_MAXVAL (t));
13268 error_found = true;
13269 }
13270
13271 /* Check various uses of TYPE_BINFO. */
13272 if (RECORD_OR_UNION_TYPE_P (t))
13273 {
13274 if (!TYPE_BINFO (t))
13275 ;
13276 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13277 {
13278 error ("TYPE_BINFO is not TREE_BINFO");
13279 debug_tree (TYPE_BINFO (t));
13280 error_found = true;
13281 }
13282 /* FIXME: Java builds invalid empty binfos that do not have
13283 TREE_TYPE set. */
13284 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13285 {
13286 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13287 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13288 error_found = true;
13289 }
13290 }
13291 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13292 {
13293 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13294 debug_tree (TYPE_LANG_SLOT_1 (t));
13295 error_found = true;
13296 }
13297
13298 /* Check various uses of TYPE_VALUES_RAW. */
13299 if (TREE_CODE (t) == ENUMERAL_TYPE)
13300 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13301 {
13302 tree value = TREE_VALUE (l);
13303 tree name = TREE_PURPOSE (l);
13304
13305 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13306 CONST_DECL of ENUMERAL TYPE. */
13307 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13308 {
13309 error ("Enum value is not CONST_DECL or INTEGER_CST");
13310 debug_tree (value);
13311 debug_tree (name);
13312 error_found = true;
13313 }
13314 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13315 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13316 {
13317 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13318 debug_tree (value);
13319 debug_tree (name);
13320 error_found = true;
13321 }
13322 if (TREE_CODE (name) != IDENTIFIER_NODE)
13323 {
13324 error ("Enum value name is not IDENTIFIER_NODE");
13325 debug_tree (value);
13326 debug_tree (name);
13327 error_found = true;
13328 }
13329 }
13330 else if (TREE_CODE (t) == ARRAY_TYPE)
13331 {
13332 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13333 {
13334 error ("Array TYPE_DOMAIN is not integer type");
13335 debug_tree (TYPE_DOMAIN (t));
13336 error_found = true;
13337 }
13338 }
13339 else if (RECORD_OR_UNION_TYPE_P (t))
13340 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13341 {
13342 /* TODO: verify properties of decls. */
13343 if (TREE_CODE (fld) == FIELD_DECL)
13344 ;
13345 else if (TREE_CODE (fld) == TYPE_DECL)
13346 ;
13347 else if (TREE_CODE (fld) == CONST_DECL)
13348 ;
13349 else if (TREE_CODE (fld) == VAR_DECL)
13350 ;
13351 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13352 ;
13353 else if (TREE_CODE (fld) == USING_DECL)
13354 ;
13355 else
13356 {
13357 error ("Wrong tree in TYPE_FIELDS list");
13358 debug_tree (fld);
13359 error_found = true;
13360 }
13361 }
13362 else if (TREE_CODE (t) == INTEGER_TYPE
13363 || TREE_CODE (t) == BOOLEAN_TYPE
13364 || TREE_CODE (t) == OFFSET_TYPE
13365 || TREE_CODE (t) == REFERENCE_TYPE
13366 || TREE_CODE (t) == NULLPTR_TYPE
13367 || TREE_CODE (t) == POINTER_TYPE)
13368 {
13369 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13370 {
13371 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13372 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13373 error_found = true;
13374 }
13375 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13376 {
13377 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13378 debug_tree (TYPE_CACHED_VALUES (t));
13379 error_found = true;
13380 }
13381 /* Verify just enough of cache to ensure that no one copied it to new type.
13382 All copying should go by copy_node that should clear it. */
13383 else if (TYPE_CACHED_VALUES_P (t))
13384 {
13385 int i;
13386 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13387 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13388 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13389 {
13390 error ("wrong TYPE_CACHED_VALUES entry");
13391 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13392 error_found = true;
13393 break;
13394 }
13395 }
13396 }
13397 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13398 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13399 {
13400 /* C++ FE uses TREE_PURPOSE to store initial values. */
13401 if (TREE_PURPOSE (l) && in_lto_p)
13402 {
13403 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13404 debug_tree (l);
13405 error_found = true;
13406 }
13407 if (!TYPE_P (TREE_VALUE (l)))
13408 {
13409 error ("Wrong entry in TYPE_ARG_TYPES list");
13410 debug_tree (l);
13411 error_found = true;
13412 }
13413 }
13414 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13415 {
13416 error ("TYPE_VALUES_RAW field is non-NULL");
13417 debug_tree (TYPE_VALUES_RAW (t));
13418 error_found = true;
13419 }
13420 if (TREE_CODE (t) != INTEGER_TYPE
13421 && TREE_CODE (t) != BOOLEAN_TYPE
13422 && TREE_CODE (t) != OFFSET_TYPE
13423 && TREE_CODE (t) != REFERENCE_TYPE
13424 && TREE_CODE (t) != NULLPTR_TYPE
13425 && TREE_CODE (t) != POINTER_TYPE
13426 && TYPE_CACHED_VALUES_P (t))
13427 {
13428 error ("TYPE_CACHED_VALUES_P is set while it should not");
13429 error_found = true;
13430 }
13431 if (TYPE_STRING_FLAG (t)
13432 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13433 {
13434 error ("TYPE_STRING_FLAG is set on wrong type code");
13435 error_found = true;
13436 }
13437 else if (TYPE_STRING_FLAG (t))
13438 {
13439 const_tree b = t;
13440 if (TREE_CODE (b) == ARRAY_TYPE)
13441 b = TREE_TYPE (t);
13442 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13443 that is 32bits. */
13444 if (TREE_CODE (b) != INTEGER_TYPE)
13445 {
13446 error ("TYPE_STRING_FLAG is set on type that does not look like "
13447 "char nor array of chars");
13448 error_found = true;
13449 }
13450 }
13451
13452 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13453 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13454 of a type. */
13455 if (TREE_CODE (t) == METHOD_TYPE
13456 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13457 {
13458 error ("TYPE_METHOD_BASETYPE is not main variant");
13459 error_found = true;
13460 }
13461
13462 if (error_found)
13463 {
13464 debug_tree (const_cast <tree> (t));
13465 internal_error ("verify_type failed");
13466 }
13467 }
13468
13469 #include "gt-tree.h"