tree.c (free_lang_data_in_type): Free TREE_PURPOSE of TYPE_ARG_TYPES list.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "hash-set.h"
36 #include "machmode.h"
37 #include "vec.h"
38 #include "double-int.h"
39 #include "input.h"
40 #include "alias.h"
41 #include "symtab.h"
42 #include "wide-int.h"
43 #include "inchash.h"
44 #include "tree.h"
45 #include "fold-const.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "attribs.h"
49 #include "varasm.h"
50 #include "tm_p.h"
51 #include "hashtab.h"
52 #include "hard-reg-set.h"
53 #include "function.h"
54 #include "obstack.h"
55 #include "toplev.h" /* get_random_seed */
56 #include "filenames.h"
57 #include "output.h"
58 #include "target.h"
59 #include "common/common-target.h"
60 #include "langhooks.h"
61 #include "tree-inline.h"
62 #include "tree-iterator.h"
63 #include "predict.h"
64 #include "dominance.h"
65 #include "cfg.h"
66 #include "basic-block.h"
67 #include "bitmap.h"
68 #include "tree-ssa-alias.h"
69 #include "internal-fn.h"
70 #include "gimple-expr.h"
71 #include "is-a.h"
72 #include "gimple.h"
73 #include "gimple-iterator.h"
74 #include "gimplify.h"
75 #include "gimple-ssa.h"
76 #include "hash-map.h"
77 #include "plugin-api.h"
78 #include "ipa-ref.h"
79 #include "cgraph.h"
80 #include "tree-phinodes.h"
81 #include "stringpool.h"
82 #include "tree-ssanames.h"
83 #include "rtl.h"
84 #include "statistics.h"
85 #include "real.h"
86 #include "fixed-value.h"
87 #include "insn-config.h"
88 #include "expmed.h"
89 #include "dojump.h"
90 #include "explow.h"
91 #include "emit-rtl.h"
92 #include "stmt.h"
93 #include "expr.h"
94 #include "tree-dfa.h"
95 #include "params.h"
96 #include "tree-pass.h"
97 #include "langhooks-def.h"
98 #include "diagnostic.h"
99 #include "tree-diagnostic.h"
100 #include "tree-pretty-print.h"
101 #include "except.h"
102 #include "debug.h"
103 #include "intl.h"
104 #include "builtins.h"
105 #include "print-tree.h"
106 #include "ipa-utils.h"
107
108 /* Tree code classes. */
109
110 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
111 #define END_OF_BASE_TREE_CODES tcc_exceptional,
112
113 const enum tree_code_class tree_code_type[] = {
114 #include "all-tree.def"
115 };
116
117 #undef DEFTREECODE
118 #undef END_OF_BASE_TREE_CODES
119
120 /* Table indexed by tree code giving number of expression
121 operands beyond the fixed part of the node structure.
122 Not used for types or decls. */
123
124 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
125 #define END_OF_BASE_TREE_CODES 0,
126
127 const unsigned char tree_code_length[] = {
128 #include "all-tree.def"
129 };
130
131 #undef DEFTREECODE
132 #undef END_OF_BASE_TREE_CODES
133
134 /* Names of tree components.
135 Used for printing out the tree and error messages. */
136 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
137 #define END_OF_BASE_TREE_CODES "@dummy",
138
139 static const char *const tree_code_name[] = {
140 #include "all-tree.def"
141 };
142
143 #undef DEFTREECODE
144 #undef END_OF_BASE_TREE_CODES
145
146 /* Each tree code class has an associated string representation.
147 These must correspond to the tree_code_class entries. */
148
149 const char *const tree_code_class_strings[] =
150 {
151 "exceptional",
152 "constant",
153 "type",
154 "declaration",
155 "reference",
156 "comparison",
157 "unary",
158 "binary",
159 "statement",
160 "vl_exp",
161 "expression"
162 };
163
164 /* obstack.[ch] explicitly declined to prototype this. */
165 extern int _obstack_allocated_p (struct obstack *h, void *obj);
166
167 /* Statistics-gathering stuff. */
168
169 static int tree_code_counts[MAX_TREE_CODES];
170 int tree_node_counts[(int) all_kinds];
171 int tree_node_sizes[(int) all_kinds];
172
173 /* Keep in sync with tree.h:enum tree_node_kind. */
174 static const char * const tree_node_kind_names[] = {
175 "decls",
176 "types",
177 "blocks",
178 "stmts",
179 "refs",
180 "exprs",
181 "constants",
182 "identifiers",
183 "vecs",
184 "binfos",
185 "ssa names",
186 "constructors",
187 "random kinds",
188 "lang_decl kinds",
189 "lang_type kinds",
190 "omp clauses",
191 };
192
193 /* Unique id for next decl created. */
194 static GTY(()) int next_decl_uid;
195 /* Unique id for next type created. */
196 static GTY(()) int next_type_uid = 1;
197 /* Unique id for next debug decl created. Use negative numbers,
198 to catch erroneous uses. */
199 static GTY(()) int next_debug_decl_uid;
200
201 /* Since we cannot rehash a type after it is in the table, we have to
202 keep the hash code. */
203
204 struct GTY((for_user)) type_hash {
205 unsigned long hash;
206 tree type;
207 };
208
209 /* Initial size of the hash table (rounded to next prime). */
210 #define TYPE_HASH_INITIAL_SIZE 1000
211
212 struct type_cache_hasher : ggc_cache_hasher<type_hash *>
213 {
214 static hashval_t hash (type_hash *t) { return t->hash; }
215 static bool equal (type_hash *a, type_hash *b);
216
217 static void
218 handle_cache_entry (type_hash *&t)
219 {
220 extern void gt_ggc_mx (type_hash *&);
221 if (t == HTAB_DELETED_ENTRY || t == HTAB_EMPTY_ENTRY)
222 return;
223 else if (ggc_marked_p (t->type))
224 gt_ggc_mx (t);
225 else
226 t = static_cast<type_hash *> (HTAB_DELETED_ENTRY);
227 }
228 };
229
230 /* Now here is the hash table. When recording a type, it is added to
231 the slot whose index is the hash code. Note that the hash table is
232 used for several kinds of types (function types, array types and
233 array index range types, for now). While all these live in the
234 same table, they are completely independent, and the hash code is
235 computed differently for each of these. */
236
237 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
238
239 /* Hash table and temporary node for larger integer const values. */
240 static GTY (()) tree int_cst_node;
241
242 struct int_cst_hasher : ggc_cache_hasher<tree>
243 {
244 static hashval_t hash (tree t);
245 static bool equal (tree x, tree y);
246 };
247
248 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
249
250 /* Hash table for optimization flags and target option flags. Use the same
251 hash table for both sets of options. Nodes for building the current
252 optimization and target option nodes. The assumption is most of the time
253 the options created will already be in the hash table, so we avoid
254 allocating and freeing up a node repeatably. */
255 static GTY (()) tree cl_optimization_node;
256 static GTY (()) tree cl_target_option_node;
257
258 struct cl_option_hasher : ggc_cache_hasher<tree>
259 {
260 static hashval_t hash (tree t);
261 static bool equal (tree x, tree y);
262 };
263
264 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
265
266 /* General tree->tree mapping structure for use in hash tables. */
267
268
269 static GTY ((cache))
270 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
271
272 static GTY ((cache))
273 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
274
275 struct tree_vec_map_cache_hasher : ggc_cache_hasher<tree_vec_map *>
276 {
277 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
278
279 static bool
280 equal (tree_vec_map *a, tree_vec_map *b)
281 {
282 return a->base.from == b->base.from;
283 }
284
285 static void
286 handle_cache_entry (tree_vec_map *&m)
287 {
288 extern void gt_ggc_mx (tree_vec_map *&);
289 if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
290 return;
291 else if (ggc_marked_p (m->base.from))
292 gt_ggc_mx (m);
293 else
294 m = static_cast<tree_vec_map *> (HTAB_DELETED_ENTRY);
295 }
296 };
297
298 static GTY ((cache))
299 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
300
301 static void set_type_quals (tree, int);
302 static void print_type_hash_statistics (void);
303 static void print_debug_expr_statistics (void);
304 static void print_value_expr_statistics (void);
305 static void type_hash_list (const_tree, inchash::hash &);
306 static void attribute_hash_list (const_tree, inchash::hash &);
307
308 tree global_trees[TI_MAX];
309 tree integer_types[itk_none];
310
311 bool int_n_enabled_p[NUM_INT_N_ENTS];
312 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
313
314 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
315
316 /* Number of operands for each OpenMP clause. */
317 unsigned const char omp_clause_num_ops[] =
318 {
319 0, /* OMP_CLAUSE_ERROR */
320 1, /* OMP_CLAUSE_PRIVATE */
321 1, /* OMP_CLAUSE_SHARED */
322 1, /* OMP_CLAUSE_FIRSTPRIVATE */
323 2, /* OMP_CLAUSE_LASTPRIVATE */
324 4, /* OMP_CLAUSE_REDUCTION */
325 1, /* OMP_CLAUSE_COPYIN */
326 1, /* OMP_CLAUSE_COPYPRIVATE */
327 3, /* OMP_CLAUSE_LINEAR */
328 2, /* OMP_CLAUSE_ALIGNED */
329 1, /* OMP_CLAUSE_DEPEND */
330 1, /* OMP_CLAUSE_UNIFORM */
331 2, /* OMP_CLAUSE_FROM */
332 2, /* OMP_CLAUSE_TO */
333 2, /* OMP_CLAUSE_MAP */
334 2, /* OMP_CLAUSE__CACHE_ */
335 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
336 1, /* OMP_CLAUSE_USE_DEVICE */
337 2, /* OMP_CLAUSE_GANG */
338 1, /* OMP_CLAUSE_ASYNC */
339 1, /* OMP_CLAUSE_WAIT */
340 0, /* OMP_CLAUSE_AUTO */
341 0, /* OMP_CLAUSE_SEQ */
342 1, /* OMP_CLAUSE__LOOPTEMP_ */
343 1, /* OMP_CLAUSE_IF */
344 1, /* OMP_CLAUSE_NUM_THREADS */
345 1, /* OMP_CLAUSE_SCHEDULE */
346 0, /* OMP_CLAUSE_NOWAIT */
347 0, /* OMP_CLAUSE_ORDERED */
348 0, /* OMP_CLAUSE_DEFAULT */
349 3, /* OMP_CLAUSE_COLLAPSE */
350 0, /* OMP_CLAUSE_UNTIED */
351 1, /* OMP_CLAUSE_FINAL */
352 0, /* OMP_CLAUSE_MERGEABLE */
353 1, /* OMP_CLAUSE_DEVICE */
354 1, /* OMP_CLAUSE_DIST_SCHEDULE */
355 0, /* OMP_CLAUSE_INBRANCH */
356 0, /* OMP_CLAUSE_NOTINBRANCH */
357 1, /* OMP_CLAUSE_NUM_TEAMS */
358 1, /* OMP_CLAUSE_THREAD_LIMIT */
359 0, /* OMP_CLAUSE_PROC_BIND */
360 1, /* OMP_CLAUSE_SAFELEN */
361 1, /* OMP_CLAUSE_SIMDLEN */
362 0, /* OMP_CLAUSE_FOR */
363 0, /* OMP_CLAUSE_PARALLEL */
364 0, /* OMP_CLAUSE_SECTIONS */
365 0, /* OMP_CLAUSE_TASKGROUP */
366 1, /* OMP_CLAUSE__SIMDUID_ */
367 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
368 0, /* OMP_CLAUSE_INDEPENDENT */
369 1, /* OMP_CLAUSE_WORKER */
370 1, /* OMP_CLAUSE_VECTOR */
371 1, /* OMP_CLAUSE_NUM_GANGS */
372 1, /* OMP_CLAUSE_NUM_WORKERS */
373 1, /* OMP_CLAUSE_VECTOR_LENGTH */
374 };
375
376 const char * const omp_clause_code_name[] =
377 {
378 "error_clause",
379 "private",
380 "shared",
381 "firstprivate",
382 "lastprivate",
383 "reduction",
384 "copyin",
385 "copyprivate",
386 "linear",
387 "aligned",
388 "depend",
389 "uniform",
390 "from",
391 "to",
392 "map",
393 "_cache_",
394 "device_resident",
395 "use_device",
396 "gang",
397 "async",
398 "wait",
399 "auto",
400 "seq",
401 "_looptemp_",
402 "if",
403 "num_threads",
404 "schedule",
405 "nowait",
406 "ordered",
407 "default",
408 "collapse",
409 "untied",
410 "final",
411 "mergeable",
412 "device",
413 "dist_schedule",
414 "inbranch",
415 "notinbranch",
416 "num_teams",
417 "thread_limit",
418 "proc_bind",
419 "safelen",
420 "simdlen",
421 "for",
422 "parallel",
423 "sections",
424 "taskgroup",
425 "_simduid_",
426 "_Cilk_for_count_",
427 "independent",
428 "worker",
429 "vector",
430 "num_gangs",
431 "num_workers",
432 "vector_length"
433 };
434
435
436 /* Return the tree node structure used by tree code CODE. */
437
438 static inline enum tree_node_structure_enum
439 tree_node_structure_for_code (enum tree_code code)
440 {
441 switch (TREE_CODE_CLASS (code))
442 {
443 case tcc_declaration:
444 {
445 switch (code)
446 {
447 case FIELD_DECL:
448 return TS_FIELD_DECL;
449 case PARM_DECL:
450 return TS_PARM_DECL;
451 case VAR_DECL:
452 return TS_VAR_DECL;
453 case LABEL_DECL:
454 return TS_LABEL_DECL;
455 case RESULT_DECL:
456 return TS_RESULT_DECL;
457 case DEBUG_EXPR_DECL:
458 return TS_DECL_WRTL;
459 case CONST_DECL:
460 return TS_CONST_DECL;
461 case TYPE_DECL:
462 return TS_TYPE_DECL;
463 case FUNCTION_DECL:
464 return TS_FUNCTION_DECL;
465 case TRANSLATION_UNIT_DECL:
466 return TS_TRANSLATION_UNIT_DECL;
467 default:
468 return TS_DECL_NON_COMMON;
469 }
470 }
471 case tcc_type:
472 return TS_TYPE_NON_COMMON;
473 case tcc_reference:
474 case tcc_comparison:
475 case tcc_unary:
476 case tcc_binary:
477 case tcc_expression:
478 case tcc_statement:
479 case tcc_vl_exp:
480 return TS_EXP;
481 default: /* tcc_constant and tcc_exceptional */
482 break;
483 }
484 switch (code)
485 {
486 /* tcc_constant cases. */
487 case VOID_CST: return TS_TYPED;
488 case INTEGER_CST: return TS_INT_CST;
489 case REAL_CST: return TS_REAL_CST;
490 case FIXED_CST: return TS_FIXED_CST;
491 case COMPLEX_CST: return TS_COMPLEX;
492 case VECTOR_CST: return TS_VECTOR;
493 case STRING_CST: return TS_STRING;
494 /* tcc_exceptional cases. */
495 case ERROR_MARK: return TS_COMMON;
496 case IDENTIFIER_NODE: return TS_IDENTIFIER;
497 case TREE_LIST: return TS_LIST;
498 case TREE_VEC: return TS_VEC;
499 case SSA_NAME: return TS_SSA_NAME;
500 case PLACEHOLDER_EXPR: return TS_COMMON;
501 case STATEMENT_LIST: return TS_STATEMENT_LIST;
502 case BLOCK: return TS_BLOCK;
503 case CONSTRUCTOR: return TS_CONSTRUCTOR;
504 case TREE_BINFO: return TS_BINFO;
505 case OMP_CLAUSE: return TS_OMP_CLAUSE;
506 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
507 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
508
509 default:
510 gcc_unreachable ();
511 }
512 }
513
514
515 /* Initialize tree_contains_struct to describe the hierarchy of tree
516 nodes. */
517
518 static void
519 initialize_tree_contains_struct (void)
520 {
521 unsigned i;
522
523 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
524 {
525 enum tree_code code;
526 enum tree_node_structure_enum ts_code;
527
528 code = (enum tree_code) i;
529 ts_code = tree_node_structure_for_code (code);
530
531 /* Mark the TS structure itself. */
532 tree_contains_struct[code][ts_code] = 1;
533
534 /* Mark all the structures that TS is derived from. */
535 switch (ts_code)
536 {
537 case TS_TYPED:
538 case TS_BLOCK:
539 MARK_TS_BASE (code);
540 break;
541
542 case TS_COMMON:
543 case TS_INT_CST:
544 case TS_REAL_CST:
545 case TS_FIXED_CST:
546 case TS_VECTOR:
547 case TS_STRING:
548 case TS_COMPLEX:
549 case TS_SSA_NAME:
550 case TS_CONSTRUCTOR:
551 case TS_EXP:
552 case TS_STATEMENT_LIST:
553 MARK_TS_TYPED (code);
554 break;
555
556 case TS_IDENTIFIER:
557 case TS_DECL_MINIMAL:
558 case TS_TYPE_COMMON:
559 case TS_LIST:
560 case TS_VEC:
561 case TS_BINFO:
562 case TS_OMP_CLAUSE:
563 case TS_OPTIMIZATION:
564 case TS_TARGET_OPTION:
565 MARK_TS_COMMON (code);
566 break;
567
568 case TS_TYPE_WITH_LANG_SPECIFIC:
569 MARK_TS_TYPE_COMMON (code);
570 break;
571
572 case TS_TYPE_NON_COMMON:
573 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
574 break;
575
576 case TS_DECL_COMMON:
577 MARK_TS_DECL_MINIMAL (code);
578 break;
579
580 case TS_DECL_WRTL:
581 case TS_CONST_DECL:
582 MARK_TS_DECL_COMMON (code);
583 break;
584
585 case TS_DECL_NON_COMMON:
586 MARK_TS_DECL_WITH_VIS (code);
587 break;
588
589 case TS_DECL_WITH_VIS:
590 case TS_PARM_DECL:
591 case TS_LABEL_DECL:
592 case TS_RESULT_DECL:
593 MARK_TS_DECL_WRTL (code);
594 break;
595
596 case TS_FIELD_DECL:
597 MARK_TS_DECL_COMMON (code);
598 break;
599
600 case TS_VAR_DECL:
601 MARK_TS_DECL_WITH_VIS (code);
602 break;
603
604 case TS_TYPE_DECL:
605 case TS_FUNCTION_DECL:
606 MARK_TS_DECL_NON_COMMON (code);
607 break;
608
609 case TS_TRANSLATION_UNIT_DECL:
610 MARK_TS_DECL_COMMON (code);
611 break;
612
613 default:
614 gcc_unreachable ();
615 }
616 }
617
618 /* Basic consistency checks for attributes used in fold. */
619 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
620 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
621 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
622 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
623 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
624 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
625 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
626 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
627 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
628 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
629 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
630 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
631 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
632 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
633 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
634 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
635 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
636 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
637 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
638 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
639 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
640 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
641 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
642 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
643 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
644 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
645 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
646 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
647 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
648 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
649 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
650 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
651 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
652 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
653 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
654 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
655 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
657 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
659 }
660
661
662 /* Init tree.c. */
663
664 void
665 init_ttree (void)
666 {
667 /* Initialize the hash table of types. */
668 type_hash_table
669 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
670
671 debug_expr_for_decl
672 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
673
674 value_expr_for_decl
675 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
676
677 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
678
679 int_cst_node = make_int_cst (1, 1);
680
681 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
682
683 cl_optimization_node = make_node (OPTIMIZATION_NODE);
684 cl_target_option_node = make_node (TARGET_OPTION_NODE);
685
686 /* Initialize the tree_contains_struct array. */
687 initialize_tree_contains_struct ();
688 lang_hooks.init_ts ();
689 }
690
691 \f
692 /* The name of the object as the assembler will see it (but before any
693 translations made by ASM_OUTPUT_LABELREF). Often this is the same
694 as DECL_NAME. It is an IDENTIFIER_NODE. */
695 tree
696 decl_assembler_name (tree decl)
697 {
698 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
699 lang_hooks.set_decl_assembler_name (decl);
700 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
701 }
702
703 /* When the target supports COMDAT groups, this indicates which group the
704 DECL is associated with. This can be either an IDENTIFIER_NODE or a
705 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
706 tree
707 decl_comdat_group (const_tree node)
708 {
709 struct symtab_node *snode = symtab_node::get (node);
710 if (!snode)
711 return NULL;
712 return snode->get_comdat_group ();
713 }
714
715 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
716 tree
717 decl_comdat_group_id (const_tree node)
718 {
719 struct symtab_node *snode = symtab_node::get (node);
720 if (!snode)
721 return NULL;
722 return snode->get_comdat_group_id ();
723 }
724
725 /* When the target supports named section, return its name as IDENTIFIER_NODE
726 or NULL if it is in no section. */
727 const char *
728 decl_section_name (const_tree node)
729 {
730 struct symtab_node *snode = symtab_node::get (node);
731 if (!snode)
732 return NULL;
733 return snode->get_section ();
734 }
735
736 /* Set section section name of NODE to VALUE (that is expected to
737 be identifier node) */
738 void
739 set_decl_section_name (tree node, const char *value)
740 {
741 struct symtab_node *snode;
742
743 if (value == NULL)
744 {
745 snode = symtab_node::get (node);
746 if (!snode)
747 return;
748 }
749 else if (TREE_CODE (node) == VAR_DECL)
750 snode = varpool_node::get_create (node);
751 else
752 snode = cgraph_node::get_create (node);
753 snode->set_section (value);
754 }
755
756 /* Return TLS model of a variable NODE. */
757 enum tls_model
758 decl_tls_model (const_tree node)
759 {
760 struct varpool_node *snode = varpool_node::get (node);
761 if (!snode)
762 return TLS_MODEL_NONE;
763 return snode->tls_model;
764 }
765
766 /* Set TLS model of variable NODE to MODEL. */
767 void
768 set_decl_tls_model (tree node, enum tls_model model)
769 {
770 struct varpool_node *vnode;
771
772 if (model == TLS_MODEL_NONE)
773 {
774 vnode = varpool_node::get (node);
775 if (!vnode)
776 return;
777 }
778 else
779 vnode = varpool_node::get_create (node);
780 vnode->tls_model = model;
781 }
782
783 /* Compute the number of bytes occupied by a tree with code CODE.
784 This function cannot be used for nodes that have variable sizes,
785 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
786 size_t
787 tree_code_size (enum tree_code code)
788 {
789 switch (TREE_CODE_CLASS (code))
790 {
791 case tcc_declaration: /* A decl node */
792 {
793 switch (code)
794 {
795 case FIELD_DECL:
796 return sizeof (struct tree_field_decl);
797 case PARM_DECL:
798 return sizeof (struct tree_parm_decl);
799 case VAR_DECL:
800 return sizeof (struct tree_var_decl);
801 case LABEL_DECL:
802 return sizeof (struct tree_label_decl);
803 case RESULT_DECL:
804 return sizeof (struct tree_result_decl);
805 case CONST_DECL:
806 return sizeof (struct tree_const_decl);
807 case TYPE_DECL:
808 return sizeof (struct tree_type_decl);
809 case FUNCTION_DECL:
810 return sizeof (struct tree_function_decl);
811 case DEBUG_EXPR_DECL:
812 return sizeof (struct tree_decl_with_rtl);
813 case TRANSLATION_UNIT_DECL:
814 return sizeof (struct tree_translation_unit_decl);
815 case NAMESPACE_DECL:
816 case IMPORTED_DECL:
817 case NAMELIST_DECL:
818 return sizeof (struct tree_decl_non_common);
819 default:
820 return lang_hooks.tree_size (code);
821 }
822 }
823
824 case tcc_type: /* a type node */
825 return sizeof (struct tree_type_non_common);
826
827 case tcc_reference: /* a reference */
828 case tcc_expression: /* an expression */
829 case tcc_statement: /* an expression with side effects */
830 case tcc_comparison: /* a comparison expression */
831 case tcc_unary: /* a unary arithmetic expression */
832 case tcc_binary: /* a binary arithmetic expression */
833 return (sizeof (struct tree_exp)
834 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
835
836 case tcc_constant: /* a constant */
837 switch (code)
838 {
839 case VOID_CST: return sizeof (struct tree_typed);
840 case INTEGER_CST: gcc_unreachable ();
841 case REAL_CST: return sizeof (struct tree_real_cst);
842 case FIXED_CST: return sizeof (struct tree_fixed_cst);
843 case COMPLEX_CST: return sizeof (struct tree_complex);
844 case VECTOR_CST: return sizeof (struct tree_vector);
845 case STRING_CST: gcc_unreachable ();
846 default:
847 return lang_hooks.tree_size (code);
848 }
849
850 case tcc_exceptional: /* something random, like an identifier. */
851 switch (code)
852 {
853 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
854 case TREE_LIST: return sizeof (struct tree_list);
855
856 case ERROR_MARK:
857 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
858
859 case TREE_VEC:
860 case OMP_CLAUSE: gcc_unreachable ();
861
862 case SSA_NAME: return sizeof (struct tree_ssa_name);
863
864 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
865 case BLOCK: return sizeof (struct tree_block);
866 case CONSTRUCTOR: return sizeof (struct tree_constructor);
867 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
868 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
869
870 default:
871 return lang_hooks.tree_size (code);
872 }
873
874 default:
875 gcc_unreachable ();
876 }
877 }
878
879 /* Compute the number of bytes occupied by NODE. This routine only
880 looks at TREE_CODE, except for those nodes that have variable sizes. */
881 size_t
882 tree_size (const_tree node)
883 {
884 const enum tree_code code = TREE_CODE (node);
885 switch (code)
886 {
887 case INTEGER_CST:
888 return (sizeof (struct tree_int_cst)
889 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
890
891 case TREE_BINFO:
892 return (offsetof (struct tree_binfo, base_binfos)
893 + vec<tree, va_gc>
894 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
895
896 case TREE_VEC:
897 return (sizeof (struct tree_vec)
898 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
899
900 case VECTOR_CST:
901 return (sizeof (struct tree_vector)
902 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
903
904 case STRING_CST:
905 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
906
907 case OMP_CLAUSE:
908 return (sizeof (struct tree_omp_clause)
909 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
910 * sizeof (tree));
911
912 default:
913 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
914 return (sizeof (struct tree_exp)
915 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
916 else
917 return tree_code_size (code);
918 }
919 }
920
921 /* Record interesting allocation statistics for a tree node with CODE
922 and LENGTH. */
923
924 static void
925 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
926 size_t length ATTRIBUTE_UNUSED)
927 {
928 enum tree_code_class type = TREE_CODE_CLASS (code);
929 tree_node_kind kind;
930
931 if (!GATHER_STATISTICS)
932 return;
933
934 switch (type)
935 {
936 case tcc_declaration: /* A decl node */
937 kind = d_kind;
938 break;
939
940 case tcc_type: /* a type node */
941 kind = t_kind;
942 break;
943
944 case tcc_statement: /* an expression with side effects */
945 kind = s_kind;
946 break;
947
948 case tcc_reference: /* a reference */
949 kind = r_kind;
950 break;
951
952 case tcc_expression: /* an expression */
953 case tcc_comparison: /* a comparison expression */
954 case tcc_unary: /* a unary arithmetic expression */
955 case tcc_binary: /* a binary arithmetic expression */
956 kind = e_kind;
957 break;
958
959 case tcc_constant: /* a constant */
960 kind = c_kind;
961 break;
962
963 case tcc_exceptional: /* something random, like an identifier. */
964 switch (code)
965 {
966 case IDENTIFIER_NODE:
967 kind = id_kind;
968 break;
969
970 case TREE_VEC:
971 kind = vec_kind;
972 break;
973
974 case TREE_BINFO:
975 kind = binfo_kind;
976 break;
977
978 case SSA_NAME:
979 kind = ssa_name_kind;
980 break;
981
982 case BLOCK:
983 kind = b_kind;
984 break;
985
986 case CONSTRUCTOR:
987 kind = constr_kind;
988 break;
989
990 case OMP_CLAUSE:
991 kind = omp_clause_kind;
992 break;
993
994 default:
995 kind = x_kind;
996 break;
997 }
998 break;
999
1000 case tcc_vl_exp:
1001 kind = e_kind;
1002 break;
1003
1004 default:
1005 gcc_unreachable ();
1006 }
1007
1008 tree_code_counts[(int) code]++;
1009 tree_node_counts[(int) kind]++;
1010 tree_node_sizes[(int) kind] += length;
1011 }
1012
1013 /* Allocate and return a new UID from the DECL_UID namespace. */
1014
1015 int
1016 allocate_decl_uid (void)
1017 {
1018 return next_decl_uid++;
1019 }
1020
1021 /* Return a newly allocated node of code CODE. For decl and type
1022 nodes, some other fields are initialized. The rest of the node is
1023 initialized to zero. This function cannot be used for TREE_VEC,
1024 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1025 tree_code_size.
1026
1027 Achoo! I got a code in the node. */
1028
1029 tree
1030 make_node_stat (enum tree_code code MEM_STAT_DECL)
1031 {
1032 tree t;
1033 enum tree_code_class type = TREE_CODE_CLASS (code);
1034 size_t length = tree_code_size (code);
1035
1036 record_node_allocation_statistics (code, length);
1037
1038 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1039 TREE_SET_CODE (t, code);
1040
1041 switch (type)
1042 {
1043 case tcc_statement:
1044 TREE_SIDE_EFFECTS (t) = 1;
1045 break;
1046
1047 case tcc_declaration:
1048 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1049 {
1050 if (code == FUNCTION_DECL)
1051 {
1052 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1053 DECL_MODE (t) = FUNCTION_MODE;
1054 }
1055 else
1056 DECL_ALIGN (t) = 1;
1057 }
1058 DECL_SOURCE_LOCATION (t) = input_location;
1059 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1060 DECL_UID (t) = --next_debug_decl_uid;
1061 else
1062 {
1063 DECL_UID (t) = allocate_decl_uid ();
1064 SET_DECL_PT_UID (t, -1);
1065 }
1066 if (TREE_CODE (t) == LABEL_DECL)
1067 LABEL_DECL_UID (t) = -1;
1068
1069 break;
1070
1071 case tcc_type:
1072 TYPE_UID (t) = next_type_uid++;
1073 TYPE_ALIGN (t) = BITS_PER_UNIT;
1074 TYPE_USER_ALIGN (t) = 0;
1075 TYPE_MAIN_VARIANT (t) = t;
1076 TYPE_CANONICAL (t) = t;
1077
1078 /* Default to no attributes for type, but let target change that. */
1079 TYPE_ATTRIBUTES (t) = NULL_TREE;
1080 targetm.set_default_type_attributes (t);
1081
1082 /* We have not yet computed the alias set for this type. */
1083 TYPE_ALIAS_SET (t) = -1;
1084 break;
1085
1086 case tcc_constant:
1087 TREE_CONSTANT (t) = 1;
1088 break;
1089
1090 case tcc_expression:
1091 switch (code)
1092 {
1093 case INIT_EXPR:
1094 case MODIFY_EXPR:
1095 case VA_ARG_EXPR:
1096 case PREDECREMENT_EXPR:
1097 case PREINCREMENT_EXPR:
1098 case POSTDECREMENT_EXPR:
1099 case POSTINCREMENT_EXPR:
1100 /* All of these have side-effects, no matter what their
1101 operands are. */
1102 TREE_SIDE_EFFECTS (t) = 1;
1103 break;
1104
1105 default:
1106 break;
1107 }
1108 break;
1109
1110 default:
1111 /* Other classes need no special treatment. */
1112 break;
1113 }
1114
1115 return t;
1116 }
1117 \f
1118 /* Return a new node with the same contents as NODE except that its
1119 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1120
1121 tree
1122 copy_node_stat (tree node MEM_STAT_DECL)
1123 {
1124 tree t;
1125 enum tree_code code = TREE_CODE (node);
1126 size_t length;
1127
1128 gcc_assert (code != STATEMENT_LIST);
1129
1130 length = tree_size (node);
1131 record_node_allocation_statistics (code, length);
1132 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1133 memcpy (t, node, length);
1134
1135 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1136 TREE_CHAIN (t) = 0;
1137 TREE_ASM_WRITTEN (t) = 0;
1138 TREE_VISITED (t) = 0;
1139
1140 if (TREE_CODE_CLASS (code) == tcc_declaration)
1141 {
1142 if (code == DEBUG_EXPR_DECL)
1143 DECL_UID (t) = --next_debug_decl_uid;
1144 else
1145 {
1146 DECL_UID (t) = allocate_decl_uid ();
1147 if (DECL_PT_UID_SET_P (node))
1148 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1149 }
1150 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1151 && DECL_HAS_VALUE_EXPR_P (node))
1152 {
1153 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1154 DECL_HAS_VALUE_EXPR_P (t) = 1;
1155 }
1156 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1157 if (TREE_CODE (node) == VAR_DECL)
1158 {
1159 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1160 t->decl_with_vis.symtab_node = NULL;
1161 }
1162 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1163 {
1164 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1165 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1166 }
1167 if (TREE_CODE (node) == FUNCTION_DECL)
1168 {
1169 DECL_STRUCT_FUNCTION (t) = NULL;
1170 t->decl_with_vis.symtab_node = NULL;
1171 }
1172 }
1173 else if (TREE_CODE_CLASS (code) == tcc_type)
1174 {
1175 TYPE_UID (t) = next_type_uid++;
1176 /* The following is so that the debug code for
1177 the copy is different from the original type.
1178 The two statements usually duplicate each other
1179 (because they clear fields of the same union),
1180 but the optimizer should catch that. */
1181 TYPE_SYMTAB_POINTER (t) = 0;
1182 TYPE_SYMTAB_ADDRESS (t) = 0;
1183
1184 /* Do not copy the values cache. */
1185 if (TYPE_CACHED_VALUES_P (t))
1186 {
1187 TYPE_CACHED_VALUES_P (t) = 0;
1188 TYPE_CACHED_VALUES (t) = NULL_TREE;
1189 }
1190 }
1191
1192 return t;
1193 }
1194
1195 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1196 For example, this can copy a list made of TREE_LIST nodes. */
1197
1198 tree
1199 copy_list (tree list)
1200 {
1201 tree head;
1202 tree prev, next;
1203
1204 if (list == 0)
1205 return 0;
1206
1207 head = prev = copy_node (list);
1208 next = TREE_CHAIN (list);
1209 while (next)
1210 {
1211 TREE_CHAIN (prev) = copy_node (next);
1212 prev = TREE_CHAIN (prev);
1213 next = TREE_CHAIN (next);
1214 }
1215 return head;
1216 }
1217
1218 \f
1219 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1220 INTEGER_CST with value CST and type TYPE. */
1221
1222 static unsigned int
1223 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1224 {
1225 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1226 /* We need an extra zero HWI if CST is an unsigned integer with its
1227 upper bit set, and if CST occupies a whole number of HWIs. */
1228 if (TYPE_UNSIGNED (type)
1229 && wi::neg_p (cst)
1230 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1231 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1232 return cst.get_len ();
1233 }
1234
1235 /* Return a new INTEGER_CST with value CST and type TYPE. */
1236
1237 static tree
1238 build_new_int_cst (tree type, const wide_int &cst)
1239 {
1240 unsigned int len = cst.get_len ();
1241 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1242 tree nt = make_int_cst (len, ext_len);
1243
1244 if (len < ext_len)
1245 {
1246 --ext_len;
1247 TREE_INT_CST_ELT (nt, ext_len) = 0;
1248 for (unsigned int i = len; i < ext_len; ++i)
1249 TREE_INT_CST_ELT (nt, i) = -1;
1250 }
1251 else if (TYPE_UNSIGNED (type)
1252 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1253 {
1254 len--;
1255 TREE_INT_CST_ELT (nt, len)
1256 = zext_hwi (cst.elt (len),
1257 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1258 }
1259
1260 for (unsigned int i = 0; i < len; i++)
1261 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1262 TREE_TYPE (nt) = type;
1263 return nt;
1264 }
1265
1266 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1267
1268 tree
1269 build_int_cst (tree type, HOST_WIDE_INT low)
1270 {
1271 /* Support legacy code. */
1272 if (!type)
1273 type = integer_type_node;
1274
1275 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1276 }
1277
1278 tree
1279 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1280 {
1281 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1282 }
1283
1284 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1285
1286 tree
1287 build_int_cst_type (tree type, HOST_WIDE_INT low)
1288 {
1289 gcc_assert (type);
1290 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1291 }
1292
1293 /* Constructs tree in type TYPE from with value given by CST. Signedness
1294 of CST is assumed to be the same as the signedness of TYPE. */
1295
1296 tree
1297 double_int_to_tree (tree type, double_int cst)
1298 {
1299 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1300 }
1301
1302 /* We force the wide_int CST to the range of the type TYPE by sign or
1303 zero extending it. OVERFLOWABLE indicates if we are interested in
1304 overflow of the value, when >0 we are only interested in signed
1305 overflow, for <0 we are interested in any overflow. OVERFLOWED
1306 indicates whether overflow has already occurred. CONST_OVERFLOWED
1307 indicates whether constant overflow has already occurred. We force
1308 T's value to be within range of T's type (by setting to 0 or 1 all
1309 the bits outside the type's range). We set TREE_OVERFLOWED if,
1310 OVERFLOWED is nonzero,
1311 or OVERFLOWABLE is >0 and signed overflow occurs
1312 or OVERFLOWABLE is <0 and any overflow occurs
1313 We return a new tree node for the extended wide_int. The node
1314 is shared if no overflow flags are set. */
1315
1316
1317 tree
1318 force_fit_type (tree type, const wide_int_ref &cst,
1319 int overflowable, bool overflowed)
1320 {
1321 signop sign = TYPE_SIGN (type);
1322
1323 /* If we need to set overflow flags, return a new unshared node. */
1324 if (overflowed || !wi::fits_to_tree_p (cst, type))
1325 {
1326 if (overflowed
1327 || overflowable < 0
1328 || (overflowable > 0 && sign == SIGNED))
1329 {
1330 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1331 tree t = build_new_int_cst (type, tmp);
1332 TREE_OVERFLOW (t) = 1;
1333 return t;
1334 }
1335 }
1336
1337 /* Else build a shared node. */
1338 return wide_int_to_tree (type, cst);
1339 }
1340
1341 /* These are the hash table functions for the hash table of INTEGER_CST
1342 nodes of a sizetype. */
1343
1344 /* Return the hash code code X, an INTEGER_CST. */
1345
1346 hashval_t
1347 int_cst_hasher::hash (tree x)
1348 {
1349 const_tree const t = x;
1350 hashval_t code = TYPE_UID (TREE_TYPE (t));
1351 int i;
1352
1353 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1354 code ^= TREE_INT_CST_ELT (t, i);
1355
1356 return code;
1357 }
1358
1359 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1360 is the same as that given by *Y, which is the same. */
1361
1362 bool
1363 int_cst_hasher::equal (tree x, tree y)
1364 {
1365 const_tree const xt = x;
1366 const_tree const yt = y;
1367
1368 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1369 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1370 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1371 return false;
1372
1373 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1374 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1375 return false;
1376
1377 return true;
1378 }
1379
1380 /* Create an INT_CST node of TYPE and value CST.
1381 The returned node is always shared. For small integers we use a
1382 per-type vector cache, for larger ones we use a single hash table.
1383 The value is extended from its precision according to the sign of
1384 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1385 the upper bits and ensures that hashing and value equality based
1386 upon the underlying HOST_WIDE_INTs works without masking. */
1387
1388 tree
1389 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1390 {
1391 tree t;
1392 int ix = -1;
1393 int limit = 0;
1394
1395 gcc_assert (type);
1396 unsigned int prec = TYPE_PRECISION (type);
1397 signop sgn = TYPE_SIGN (type);
1398
1399 /* Verify that everything is canonical. */
1400 int l = pcst.get_len ();
1401 if (l > 1)
1402 {
1403 if (pcst.elt (l - 1) == 0)
1404 gcc_checking_assert (pcst.elt (l - 2) < 0);
1405 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1406 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1407 }
1408
1409 wide_int cst = wide_int::from (pcst, prec, sgn);
1410 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1411
1412 if (ext_len == 1)
1413 {
1414 /* We just need to store a single HOST_WIDE_INT. */
1415 HOST_WIDE_INT hwi;
1416 if (TYPE_UNSIGNED (type))
1417 hwi = cst.to_uhwi ();
1418 else
1419 hwi = cst.to_shwi ();
1420
1421 switch (TREE_CODE (type))
1422 {
1423 case NULLPTR_TYPE:
1424 gcc_assert (hwi == 0);
1425 /* Fallthru. */
1426
1427 case POINTER_TYPE:
1428 case REFERENCE_TYPE:
1429 case POINTER_BOUNDS_TYPE:
1430 /* Cache NULL pointer and zero bounds. */
1431 if (hwi == 0)
1432 {
1433 limit = 1;
1434 ix = 0;
1435 }
1436 break;
1437
1438 case BOOLEAN_TYPE:
1439 /* Cache false or true. */
1440 limit = 2;
1441 if (hwi < 2)
1442 ix = hwi;
1443 break;
1444
1445 case INTEGER_TYPE:
1446 case OFFSET_TYPE:
1447 if (TYPE_SIGN (type) == UNSIGNED)
1448 {
1449 /* Cache [0, N). */
1450 limit = INTEGER_SHARE_LIMIT;
1451 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1452 ix = hwi;
1453 }
1454 else
1455 {
1456 /* Cache [-1, N). */
1457 limit = INTEGER_SHARE_LIMIT + 1;
1458 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1459 ix = hwi + 1;
1460 }
1461 break;
1462
1463 case ENUMERAL_TYPE:
1464 break;
1465
1466 default:
1467 gcc_unreachable ();
1468 }
1469
1470 if (ix >= 0)
1471 {
1472 /* Look for it in the type's vector of small shared ints. */
1473 if (!TYPE_CACHED_VALUES_P (type))
1474 {
1475 TYPE_CACHED_VALUES_P (type) = 1;
1476 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1477 }
1478
1479 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1480 if (t)
1481 /* Make sure no one is clobbering the shared constant. */
1482 gcc_checking_assert (TREE_TYPE (t) == type
1483 && TREE_INT_CST_NUNITS (t) == 1
1484 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1485 && TREE_INT_CST_EXT_NUNITS (t) == 1
1486 && TREE_INT_CST_ELT (t, 0) == hwi);
1487 else
1488 {
1489 /* Create a new shared int. */
1490 t = build_new_int_cst (type, cst);
1491 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1492 }
1493 }
1494 else
1495 {
1496 /* Use the cache of larger shared ints, using int_cst_node as
1497 a temporary. */
1498
1499 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1500 TREE_TYPE (int_cst_node) = type;
1501
1502 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1503 t = *slot;
1504 if (!t)
1505 {
1506 /* Insert this one into the hash table. */
1507 t = int_cst_node;
1508 *slot = t;
1509 /* Make a new node for next time round. */
1510 int_cst_node = make_int_cst (1, 1);
1511 }
1512 }
1513 }
1514 else
1515 {
1516 /* The value either hashes properly or we drop it on the floor
1517 for the gc to take care of. There will not be enough of them
1518 to worry about. */
1519
1520 tree nt = build_new_int_cst (type, cst);
1521 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1522 t = *slot;
1523 if (!t)
1524 {
1525 /* Insert this one into the hash table. */
1526 t = nt;
1527 *slot = t;
1528 }
1529 }
1530
1531 return t;
1532 }
1533
1534 void
1535 cache_integer_cst (tree t)
1536 {
1537 tree type = TREE_TYPE (t);
1538 int ix = -1;
1539 int limit = 0;
1540 int prec = TYPE_PRECISION (type);
1541
1542 gcc_assert (!TREE_OVERFLOW (t));
1543
1544 switch (TREE_CODE (type))
1545 {
1546 case NULLPTR_TYPE:
1547 gcc_assert (integer_zerop (t));
1548 /* Fallthru. */
1549
1550 case POINTER_TYPE:
1551 case REFERENCE_TYPE:
1552 /* Cache NULL pointer. */
1553 if (integer_zerop (t))
1554 {
1555 limit = 1;
1556 ix = 0;
1557 }
1558 break;
1559
1560 case BOOLEAN_TYPE:
1561 /* Cache false or true. */
1562 limit = 2;
1563 if (wi::ltu_p (t, 2))
1564 ix = TREE_INT_CST_ELT (t, 0);
1565 break;
1566
1567 case INTEGER_TYPE:
1568 case OFFSET_TYPE:
1569 if (TYPE_UNSIGNED (type))
1570 {
1571 /* Cache 0..N */
1572 limit = INTEGER_SHARE_LIMIT;
1573
1574 /* This is a little hokie, but if the prec is smaller than
1575 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1576 obvious test will not get the correct answer. */
1577 if (prec < HOST_BITS_PER_WIDE_INT)
1578 {
1579 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1580 ix = tree_to_uhwi (t);
1581 }
1582 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1583 ix = tree_to_uhwi (t);
1584 }
1585 else
1586 {
1587 /* Cache -1..N */
1588 limit = INTEGER_SHARE_LIMIT + 1;
1589
1590 if (integer_minus_onep (t))
1591 ix = 0;
1592 else if (!wi::neg_p (t))
1593 {
1594 if (prec < HOST_BITS_PER_WIDE_INT)
1595 {
1596 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1597 ix = tree_to_shwi (t) + 1;
1598 }
1599 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1600 ix = tree_to_shwi (t) + 1;
1601 }
1602 }
1603 break;
1604
1605 case ENUMERAL_TYPE:
1606 break;
1607
1608 default:
1609 gcc_unreachable ();
1610 }
1611
1612 if (ix >= 0)
1613 {
1614 /* Look for it in the type's vector of small shared ints. */
1615 if (!TYPE_CACHED_VALUES_P (type))
1616 {
1617 TYPE_CACHED_VALUES_P (type) = 1;
1618 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1619 }
1620
1621 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1622 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1623 }
1624 else
1625 {
1626 /* Use the cache of larger shared ints. */
1627 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1628 /* If there is already an entry for the number verify it's the
1629 same. */
1630 if (*slot)
1631 gcc_assert (wi::eq_p (tree (*slot), t));
1632 else
1633 /* Otherwise insert this one into the hash table. */
1634 *slot = t;
1635 }
1636 }
1637
1638
1639 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1640 and the rest are zeros. */
1641
1642 tree
1643 build_low_bits_mask (tree type, unsigned bits)
1644 {
1645 gcc_assert (bits <= TYPE_PRECISION (type));
1646
1647 return wide_int_to_tree (type, wi::mask (bits, false,
1648 TYPE_PRECISION (type)));
1649 }
1650
1651 /* Checks that X is integer constant that can be expressed in (unsigned)
1652 HOST_WIDE_INT without loss of precision. */
1653
1654 bool
1655 cst_and_fits_in_hwi (const_tree x)
1656 {
1657 if (TREE_CODE (x) != INTEGER_CST)
1658 return false;
1659
1660 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1661 return false;
1662
1663 return TREE_INT_CST_NUNITS (x) == 1;
1664 }
1665
1666 /* Build a newly constructed TREE_VEC node of length LEN. */
1667
1668 tree
1669 make_vector_stat (unsigned len MEM_STAT_DECL)
1670 {
1671 tree t;
1672 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1673
1674 record_node_allocation_statistics (VECTOR_CST, length);
1675
1676 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1677
1678 TREE_SET_CODE (t, VECTOR_CST);
1679 TREE_CONSTANT (t) = 1;
1680
1681 return t;
1682 }
1683
1684 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1685 are in a list pointed to by VALS. */
1686
1687 tree
1688 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1689 {
1690 int over = 0;
1691 unsigned cnt = 0;
1692 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1693 TREE_TYPE (v) = type;
1694
1695 /* Iterate through elements and check for overflow. */
1696 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1697 {
1698 tree value = vals[cnt];
1699
1700 VECTOR_CST_ELT (v, cnt) = value;
1701
1702 /* Don't crash if we get an address constant. */
1703 if (!CONSTANT_CLASS_P (value))
1704 continue;
1705
1706 over |= TREE_OVERFLOW (value);
1707 }
1708
1709 TREE_OVERFLOW (v) = over;
1710 return v;
1711 }
1712
1713 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1714 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1715
1716 tree
1717 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1718 {
1719 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1720 unsigned HOST_WIDE_INT idx;
1721 tree value;
1722
1723 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1724 vec[idx] = value;
1725 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1726 vec[idx] = build_zero_cst (TREE_TYPE (type));
1727
1728 return build_vector (type, vec);
1729 }
1730
1731 /* Build a vector of type VECTYPE where all the elements are SCs. */
1732 tree
1733 build_vector_from_val (tree vectype, tree sc)
1734 {
1735 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1736
1737 if (sc == error_mark_node)
1738 return sc;
1739
1740 /* Verify that the vector type is suitable for SC. Note that there
1741 is some inconsistency in the type-system with respect to restrict
1742 qualifications of pointers. Vector types always have a main-variant
1743 element type and the qualification is applied to the vector-type.
1744 So TREE_TYPE (vector-type) does not return a properly qualified
1745 vector element-type. */
1746 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1747 TREE_TYPE (vectype)));
1748
1749 if (CONSTANT_CLASS_P (sc))
1750 {
1751 tree *v = XALLOCAVEC (tree, nunits);
1752 for (i = 0; i < nunits; ++i)
1753 v[i] = sc;
1754 return build_vector (vectype, v);
1755 }
1756 else
1757 {
1758 vec<constructor_elt, va_gc> *v;
1759 vec_alloc (v, nunits);
1760 for (i = 0; i < nunits; ++i)
1761 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1762 return build_constructor (vectype, v);
1763 }
1764 }
1765
1766 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1767 are in the vec pointed to by VALS. */
1768 tree
1769 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1770 {
1771 tree c = make_node (CONSTRUCTOR);
1772 unsigned int i;
1773 constructor_elt *elt;
1774 bool constant_p = true;
1775 bool side_effects_p = false;
1776
1777 TREE_TYPE (c) = type;
1778 CONSTRUCTOR_ELTS (c) = vals;
1779
1780 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1781 {
1782 /* Mostly ctors will have elts that don't have side-effects, so
1783 the usual case is to scan all the elements. Hence a single
1784 loop for both const and side effects, rather than one loop
1785 each (with early outs). */
1786 if (!TREE_CONSTANT (elt->value))
1787 constant_p = false;
1788 if (TREE_SIDE_EFFECTS (elt->value))
1789 side_effects_p = true;
1790 }
1791
1792 TREE_SIDE_EFFECTS (c) = side_effects_p;
1793 TREE_CONSTANT (c) = constant_p;
1794
1795 return c;
1796 }
1797
1798 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1799 INDEX and VALUE. */
1800 tree
1801 build_constructor_single (tree type, tree index, tree value)
1802 {
1803 vec<constructor_elt, va_gc> *v;
1804 constructor_elt elt = {index, value};
1805
1806 vec_alloc (v, 1);
1807 v->quick_push (elt);
1808
1809 return build_constructor (type, v);
1810 }
1811
1812
1813 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1814 are in a list pointed to by VALS. */
1815 tree
1816 build_constructor_from_list (tree type, tree vals)
1817 {
1818 tree t;
1819 vec<constructor_elt, va_gc> *v = NULL;
1820
1821 if (vals)
1822 {
1823 vec_alloc (v, list_length (vals));
1824 for (t = vals; t; t = TREE_CHAIN (t))
1825 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1826 }
1827
1828 return build_constructor (type, v);
1829 }
1830
1831 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1832 of elements, provided as index/value pairs. */
1833
1834 tree
1835 build_constructor_va (tree type, int nelts, ...)
1836 {
1837 vec<constructor_elt, va_gc> *v = NULL;
1838 va_list p;
1839
1840 va_start (p, nelts);
1841 vec_alloc (v, nelts);
1842 while (nelts--)
1843 {
1844 tree index = va_arg (p, tree);
1845 tree value = va_arg (p, tree);
1846 CONSTRUCTOR_APPEND_ELT (v, index, value);
1847 }
1848 va_end (p);
1849 return build_constructor (type, v);
1850 }
1851
1852 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1853
1854 tree
1855 build_fixed (tree type, FIXED_VALUE_TYPE f)
1856 {
1857 tree v;
1858 FIXED_VALUE_TYPE *fp;
1859
1860 v = make_node (FIXED_CST);
1861 fp = ggc_alloc<fixed_value> ();
1862 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1863
1864 TREE_TYPE (v) = type;
1865 TREE_FIXED_CST_PTR (v) = fp;
1866 return v;
1867 }
1868
1869 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1870
1871 tree
1872 build_real (tree type, REAL_VALUE_TYPE d)
1873 {
1874 tree v;
1875 REAL_VALUE_TYPE *dp;
1876 int overflow = 0;
1877
1878 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1879 Consider doing it via real_convert now. */
1880
1881 v = make_node (REAL_CST);
1882 dp = ggc_alloc<real_value> ();
1883 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1884
1885 TREE_TYPE (v) = type;
1886 TREE_REAL_CST_PTR (v) = dp;
1887 TREE_OVERFLOW (v) = overflow;
1888 return v;
1889 }
1890
1891 /* Return a new REAL_CST node whose type is TYPE
1892 and whose value is the integer value of the INTEGER_CST node I. */
1893
1894 REAL_VALUE_TYPE
1895 real_value_from_int_cst (const_tree type, const_tree i)
1896 {
1897 REAL_VALUE_TYPE d;
1898
1899 /* Clear all bits of the real value type so that we can later do
1900 bitwise comparisons to see if two values are the same. */
1901 memset (&d, 0, sizeof d);
1902
1903 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1904 TYPE_SIGN (TREE_TYPE (i)));
1905 return d;
1906 }
1907
1908 /* Given a tree representing an integer constant I, return a tree
1909 representing the same value as a floating-point constant of type TYPE. */
1910
1911 tree
1912 build_real_from_int_cst (tree type, const_tree i)
1913 {
1914 tree v;
1915 int overflow = TREE_OVERFLOW (i);
1916
1917 v = build_real (type, real_value_from_int_cst (type, i));
1918
1919 TREE_OVERFLOW (v) |= overflow;
1920 return v;
1921 }
1922
1923 /* Return a newly constructed STRING_CST node whose value is
1924 the LEN characters at STR.
1925 Note that for a C string literal, LEN should include the trailing NUL.
1926 The TREE_TYPE is not initialized. */
1927
1928 tree
1929 build_string (int len, const char *str)
1930 {
1931 tree s;
1932 size_t length;
1933
1934 /* Do not waste bytes provided by padding of struct tree_string. */
1935 length = len + offsetof (struct tree_string, str) + 1;
1936
1937 record_node_allocation_statistics (STRING_CST, length);
1938
1939 s = (tree) ggc_internal_alloc (length);
1940
1941 memset (s, 0, sizeof (struct tree_typed));
1942 TREE_SET_CODE (s, STRING_CST);
1943 TREE_CONSTANT (s) = 1;
1944 TREE_STRING_LENGTH (s) = len;
1945 memcpy (s->string.str, str, len);
1946 s->string.str[len] = '\0';
1947
1948 return s;
1949 }
1950
1951 /* Return a newly constructed COMPLEX_CST node whose value is
1952 specified by the real and imaginary parts REAL and IMAG.
1953 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1954 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1955
1956 tree
1957 build_complex (tree type, tree real, tree imag)
1958 {
1959 tree t = make_node (COMPLEX_CST);
1960
1961 TREE_REALPART (t) = real;
1962 TREE_IMAGPART (t) = imag;
1963 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1964 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1965 return t;
1966 }
1967
1968 /* Return a constant of arithmetic type TYPE which is the
1969 multiplicative identity of the set TYPE. */
1970
1971 tree
1972 build_one_cst (tree type)
1973 {
1974 switch (TREE_CODE (type))
1975 {
1976 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1977 case POINTER_TYPE: case REFERENCE_TYPE:
1978 case OFFSET_TYPE:
1979 return build_int_cst (type, 1);
1980
1981 case REAL_TYPE:
1982 return build_real (type, dconst1);
1983
1984 case FIXED_POINT_TYPE:
1985 /* We can only generate 1 for accum types. */
1986 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1987 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1988
1989 case VECTOR_TYPE:
1990 {
1991 tree scalar = build_one_cst (TREE_TYPE (type));
1992
1993 return build_vector_from_val (type, scalar);
1994 }
1995
1996 case COMPLEX_TYPE:
1997 return build_complex (type,
1998 build_one_cst (TREE_TYPE (type)),
1999 build_zero_cst (TREE_TYPE (type)));
2000
2001 default:
2002 gcc_unreachable ();
2003 }
2004 }
2005
2006 /* Return an integer of type TYPE containing all 1's in as much precision as
2007 it contains, or a complex or vector whose subparts are such integers. */
2008
2009 tree
2010 build_all_ones_cst (tree type)
2011 {
2012 if (TREE_CODE (type) == COMPLEX_TYPE)
2013 {
2014 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2015 return build_complex (type, scalar, scalar);
2016 }
2017 else
2018 return build_minus_one_cst (type);
2019 }
2020
2021 /* Return a constant of arithmetic type TYPE which is the
2022 opposite of the multiplicative identity of the set TYPE. */
2023
2024 tree
2025 build_minus_one_cst (tree type)
2026 {
2027 switch (TREE_CODE (type))
2028 {
2029 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2030 case POINTER_TYPE: case REFERENCE_TYPE:
2031 case OFFSET_TYPE:
2032 return build_int_cst (type, -1);
2033
2034 case REAL_TYPE:
2035 return build_real (type, dconstm1);
2036
2037 case FIXED_POINT_TYPE:
2038 /* We can only generate 1 for accum types. */
2039 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2040 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2041 TYPE_MODE (type)));
2042
2043 case VECTOR_TYPE:
2044 {
2045 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2046
2047 return build_vector_from_val (type, scalar);
2048 }
2049
2050 case COMPLEX_TYPE:
2051 return build_complex (type,
2052 build_minus_one_cst (TREE_TYPE (type)),
2053 build_zero_cst (TREE_TYPE (type)));
2054
2055 default:
2056 gcc_unreachable ();
2057 }
2058 }
2059
2060 /* Build 0 constant of type TYPE. This is used by constructor folding
2061 and thus the constant should be represented in memory by
2062 zero(es). */
2063
2064 tree
2065 build_zero_cst (tree type)
2066 {
2067 switch (TREE_CODE (type))
2068 {
2069 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2070 case POINTER_TYPE: case REFERENCE_TYPE:
2071 case OFFSET_TYPE: case NULLPTR_TYPE:
2072 return build_int_cst (type, 0);
2073
2074 case REAL_TYPE:
2075 return build_real (type, dconst0);
2076
2077 case FIXED_POINT_TYPE:
2078 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2079
2080 case VECTOR_TYPE:
2081 {
2082 tree scalar = build_zero_cst (TREE_TYPE (type));
2083
2084 return build_vector_from_val (type, scalar);
2085 }
2086
2087 case COMPLEX_TYPE:
2088 {
2089 tree zero = build_zero_cst (TREE_TYPE (type));
2090
2091 return build_complex (type, zero, zero);
2092 }
2093
2094 default:
2095 if (!AGGREGATE_TYPE_P (type))
2096 return fold_convert (type, integer_zero_node);
2097 return build_constructor (type, NULL);
2098 }
2099 }
2100
2101
2102 /* Build a BINFO with LEN language slots. */
2103
2104 tree
2105 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2106 {
2107 tree t;
2108 size_t length = (offsetof (struct tree_binfo, base_binfos)
2109 + vec<tree, va_gc>::embedded_size (base_binfos));
2110
2111 record_node_allocation_statistics (TREE_BINFO, length);
2112
2113 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2114
2115 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2116
2117 TREE_SET_CODE (t, TREE_BINFO);
2118
2119 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2120
2121 return t;
2122 }
2123
2124 /* Create a CASE_LABEL_EXPR tree node and return it. */
2125
2126 tree
2127 build_case_label (tree low_value, tree high_value, tree label_decl)
2128 {
2129 tree t = make_node (CASE_LABEL_EXPR);
2130
2131 TREE_TYPE (t) = void_type_node;
2132 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2133
2134 CASE_LOW (t) = low_value;
2135 CASE_HIGH (t) = high_value;
2136 CASE_LABEL (t) = label_decl;
2137 CASE_CHAIN (t) = NULL_TREE;
2138
2139 return t;
2140 }
2141
2142 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2143 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2144 The latter determines the length of the HOST_WIDE_INT vector. */
2145
2146 tree
2147 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2148 {
2149 tree t;
2150 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2151 + sizeof (struct tree_int_cst));
2152
2153 gcc_assert (len);
2154 record_node_allocation_statistics (INTEGER_CST, length);
2155
2156 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2157
2158 TREE_SET_CODE (t, INTEGER_CST);
2159 TREE_INT_CST_NUNITS (t) = len;
2160 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2161 /* to_offset can only be applied to trees that are offset_int-sized
2162 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2163 must be exactly the precision of offset_int and so LEN is correct. */
2164 if (ext_len <= OFFSET_INT_ELTS)
2165 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2166 else
2167 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2168
2169 TREE_CONSTANT (t) = 1;
2170
2171 return t;
2172 }
2173
2174 /* Build a newly constructed TREE_VEC node of length LEN. */
2175
2176 tree
2177 make_tree_vec_stat (int len MEM_STAT_DECL)
2178 {
2179 tree t;
2180 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2181
2182 record_node_allocation_statistics (TREE_VEC, length);
2183
2184 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2185
2186 TREE_SET_CODE (t, TREE_VEC);
2187 TREE_VEC_LENGTH (t) = len;
2188
2189 return t;
2190 }
2191
2192 /* Grow a TREE_VEC node to new length LEN. */
2193
2194 tree
2195 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2196 {
2197 gcc_assert (TREE_CODE (v) == TREE_VEC);
2198
2199 int oldlen = TREE_VEC_LENGTH (v);
2200 gcc_assert (len > oldlen);
2201
2202 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2203 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2204
2205 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2206
2207 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2208
2209 TREE_VEC_LENGTH (v) = len;
2210
2211 return v;
2212 }
2213 \f
2214 /* Return 1 if EXPR is the integer constant zero or a complex constant
2215 of zero. */
2216
2217 int
2218 integer_zerop (const_tree expr)
2219 {
2220 STRIP_NOPS (expr);
2221
2222 switch (TREE_CODE (expr))
2223 {
2224 case INTEGER_CST:
2225 return wi::eq_p (expr, 0);
2226 case COMPLEX_CST:
2227 return (integer_zerop (TREE_REALPART (expr))
2228 && integer_zerop (TREE_IMAGPART (expr)));
2229 case VECTOR_CST:
2230 {
2231 unsigned i;
2232 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2233 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2234 return false;
2235 return true;
2236 }
2237 default:
2238 return false;
2239 }
2240 }
2241
2242 /* Return 1 if EXPR is the integer constant one or the corresponding
2243 complex constant. */
2244
2245 int
2246 integer_onep (const_tree expr)
2247 {
2248 STRIP_NOPS (expr);
2249
2250 switch (TREE_CODE (expr))
2251 {
2252 case INTEGER_CST:
2253 return wi::eq_p (wi::to_widest (expr), 1);
2254 case COMPLEX_CST:
2255 return (integer_onep (TREE_REALPART (expr))
2256 && integer_zerop (TREE_IMAGPART (expr)));
2257 case VECTOR_CST:
2258 {
2259 unsigned i;
2260 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2261 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2262 return false;
2263 return true;
2264 }
2265 default:
2266 return false;
2267 }
2268 }
2269
2270 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2271 return 1 if every piece is the integer constant one. */
2272
2273 int
2274 integer_each_onep (const_tree expr)
2275 {
2276 STRIP_NOPS (expr);
2277
2278 if (TREE_CODE (expr) == COMPLEX_CST)
2279 return (integer_onep (TREE_REALPART (expr))
2280 && integer_onep (TREE_IMAGPART (expr)));
2281 else
2282 return integer_onep (expr);
2283 }
2284
2285 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2286 it contains, or a complex or vector whose subparts are such integers. */
2287
2288 int
2289 integer_all_onesp (const_tree expr)
2290 {
2291 STRIP_NOPS (expr);
2292
2293 if (TREE_CODE (expr) == COMPLEX_CST
2294 && integer_all_onesp (TREE_REALPART (expr))
2295 && integer_all_onesp (TREE_IMAGPART (expr)))
2296 return 1;
2297
2298 else if (TREE_CODE (expr) == VECTOR_CST)
2299 {
2300 unsigned i;
2301 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2302 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2303 return 0;
2304 return 1;
2305 }
2306
2307 else if (TREE_CODE (expr) != INTEGER_CST)
2308 return 0;
2309
2310 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2311 }
2312
2313 /* Return 1 if EXPR is the integer constant minus one. */
2314
2315 int
2316 integer_minus_onep (const_tree expr)
2317 {
2318 STRIP_NOPS (expr);
2319
2320 if (TREE_CODE (expr) == COMPLEX_CST)
2321 return (integer_all_onesp (TREE_REALPART (expr))
2322 && integer_zerop (TREE_IMAGPART (expr)));
2323 else
2324 return integer_all_onesp (expr);
2325 }
2326
2327 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2328 one bit on). */
2329
2330 int
2331 integer_pow2p (const_tree expr)
2332 {
2333 STRIP_NOPS (expr);
2334
2335 if (TREE_CODE (expr) == COMPLEX_CST
2336 && integer_pow2p (TREE_REALPART (expr))
2337 && integer_zerop (TREE_IMAGPART (expr)))
2338 return 1;
2339
2340 if (TREE_CODE (expr) != INTEGER_CST)
2341 return 0;
2342
2343 return wi::popcount (expr) == 1;
2344 }
2345
2346 /* Return 1 if EXPR is an integer constant other than zero or a
2347 complex constant other than zero. */
2348
2349 int
2350 integer_nonzerop (const_tree expr)
2351 {
2352 STRIP_NOPS (expr);
2353
2354 return ((TREE_CODE (expr) == INTEGER_CST
2355 && !wi::eq_p (expr, 0))
2356 || (TREE_CODE (expr) == COMPLEX_CST
2357 && (integer_nonzerop (TREE_REALPART (expr))
2358 || integer_nonzerop (TREE_IMAGPART (expr)))));
2359 }
2360
2361 /* Return 1 if EXPR is the integer constant one. For vector,
2362 return 1 if every piece is the integer constant minus one
2363 (representing the value TRUE). */
2364
2365 int
2366 integer_truep (const_tree expr)
2367 {
2368 STRIP_NOPS (expr);
2369
2370 if (TREE_CODE (expr) == VECTOR_CST)
2371 return integer_all_onesp (expr);
2372 return integer_onep (expr);
2373 }
2374
2375 /* Return 1 if EXPR is the fixed-point constant zero. */
2376
2377 int
2378 fixed_zerop (const_tree expr)
2379 {
2380 return (TREE_CODE (expr) == FIXED_CST
2381 && TREE_FIXED_CST (expr).data.is_zero ());
2382 }
2383
2384 /* Return the power of two represented by a tree node known to be a
2385 power of two. */
2386
2387 int
2388 tree_log2 (const_tree expr)
2389 {
2390 STRIP_NOPS (expr);
2391
2392 if (TREE_CODE (expr) == COMPLEX_CST)
2393 return tree_log2 (TREE_REALPART (expr));
2394
2395 return wi::exact_log2 (expr);
2396 }
2397
2398 /* Similar, but return the largest integer Y such that 2 ** Y is less
2399 than or equal to EXPR. */
2400
2401 int
2402 tree_floor_log2 (const_tree expr)
2403 {
2404 STRIP_NOPS (expr);
2405
2406 if (TREE_CODE (expr) == COMPLEX_CST)
2407 return tree_log2 (TREE_REALPART (expr));
2408
2409 return wi::floor_log2 (expr);
2410 }
2411
2412 /* Return number of known trailing zero bits in EXPR, or, if the value of
2413 EXPR is known to be zero, the precision of it's type. */
2414
2415 unsigned int
2416 tree_ctz (const_tree expr)
2417 {
2418 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2419 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2420 return 0;
2421
2422 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2423 switch (TREE_CODE (expr))
2424 {
2425 case INTEGER_CST:
2426 ret1 = wi::ctz (expr);
2427 return MIN (ret1, prec);
2428 case SSA_NAME:
2429 ret1 = wi::ctz (get_nonzero_bits (expr));
2430 return MIN (ret1, prec);
2431 case PLUS_EXPR:
2432 case MINUS_EXPR:
2433 case BIT_IOR_EXPR:
2434 case BIT_XOR_EXPR:
2435 case MIN_EXPR:
2436 case MAX_EXPR:
2437 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2438 if (ret1 == 0)
2439 return ret1;
2440 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2441 return MIN (ret1, ret2);
2442 case POINTER_PLUS_EXPR:
2443 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2444 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2445 /* Second operand is sizetype, which could be in theory
2446 wider than pointer's precision. Make sure we never
2447 return more than prec. */
2448 ret2 = MIN (ret2, prec);
2449 return MIN (ret1, ret2);
2450 case BIT_AND_EXPR:
2451 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2452 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2453 return MAX (ret1, ret2);
2454 case MULT_EXPR:
2455 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2456 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2457 return MIN (ret1 + ret2, prec);
2458 case LSHIFT_EXPR:
2459 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2460 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2461 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2462 {
2463 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2464 return MIN (ret1 + ret2, prec);
2465 }
2466 return ret1;
2467 case RSHIFT_EXPR:
2468 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2469 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2470 {
2471 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2472 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2473 if (ret1 > ret2)
2474 return ret1 - ret2;
2475 }
2476 return 0;
2477 case TRUNC_DIV_EXPR:
2478 case CEIL_DIV_EXPR:
2479 case FLOOR_DIV_EXPR:
2480 case ROUND_DIV_EXPR:
2481 case EXACT_DIV_EXPR:
2482 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2483 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2484 {
2485 int l = tree_log2 (TREE_OPERAND (expr, 1));
2486 if (l >= 0)
2487 {
2488 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2489 ret2 = l;
2490 if (ret1 > ret2)
2491 return ret1 - ret2;
2492 }
2493 }
2494 return 0;
2495 CASE_CONVERT:
2496 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2497 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2498 ret1 = prec;
2499 return MIN (ret1, prec);
2500 case SAVE_EXPR:
2501 return tree_ctz (TREE_OPERAND (expr, 0));
2502 case COND_EXPR:
2503 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2504 if (ret1 == 0)
2505 return 0;
2506 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2507 return MIN (ret1, ret2);
2508 case COMPOUND_EXPR:
2509 return tree_ctz (TREE_OPERAND (expr, 1));
2510 case ADDR_EXPR:
2511 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2512 if (ret1 > BITS_PER_UNIT)
2513 {
2514 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2515 return MIN (ret1, prec);
2516 }
2517 return 0;
2518 default:
2519 return 0;
2520 }
2521 }
2522
2523 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2524 decimal float constants, so don't return 1 for them. */
2525
2526 int
2527 real_zerop (const_tree expr)
2528 {
2529 STRIP_NOPS (expr);
2530
2531 switch (TREE_CODE (expr))
2532 {
2533 case REAL_CST:
2534 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2535 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2536 case COMPLEX_CST:
2537 return real_zerop (TREE_REALPART (expr))
2538 && real_zerop (TREE_IMAGPART (expr));
2539 case VECTOR_CST:
2540 {
2541 unsigned i;
2542 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2543 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2544 return false;
2545 return true;
2546 }
2547 default:
2548 return false;
2549 }
2550 }
2551
2552 /* Return 1 if EXPR is the real constant one in real or complex form.
2553 Trailing zeroes matter for decimal float constants, so don't return
2554 1 for them. */
2555
2556 int
2557 real_onep (const_tree expr)
2558 {
2559 STRIP_NOPS (expr);
2560
2561 switch (TREE_CODE (expr))
2562 {
2563 case REAL_CST:
2564 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2565 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2566 case COMPLEX_CST:
2567 return real_onep (TREE_REALPART (expr))
2568 && real_zerop (TREE_IMAGPART (expr));
2569 case VECTOR_CST:
2570 {
2571 unsigned i;
2572 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2573 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2574 return false;
2575 return true;
2576 }
2577 default:
2578 return false;
2579 }
2580 }
2581
2582 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2583 matter for decimal float constants, so don't return 1 for them. */
2584
2585 int
2586 real_minus_onep (const_tree expr)
2587 {
2588 STRIP_NOPS (expr);
2589
2590 switch (TREE_CODE (expr))
2591 {
2592 case REAL_CST:
2593 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2594 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2595 case COMPLEX_CST:
2596 return real_minus_onep (TREE_REALPART (expr))
2597 && real_zerop (TREE_IMAGPART (expr));
2598 case VECTOR_CST:
2599 {
2600 unsigned i;
2601 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2602 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2603 return false;
2604 return true;
2605 }
2606 default:
2607 return false;
2608 }
2609 }
2610
2611 /* Nonzero if EXP is a constant or a cast of a constant. */
2612
2613 int
2614 really_constant_p (const_tree exp)
2615 {
2616 /* This is not quite the same as STRIP_NOPS. It does more. */
2617 while (CONVERT_EXPR_P (exp)
2618 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2619 exp = TREE_OPERAND (exp, 0);
2620 return TREE_CONSTANT (exp);
2621 }
2622 \f
2623 /* Return first list element whose TREE_VALUE is ELEM.
2624 Return 0 if ELEM is not in LIST. */
2625
2626 tree
2627 value_member (tree elem, tree list)
2628 {
2629 while (list)
2630 {
2631 if (elem == TREE_VALUE (list))
2632 return list;
2633 list = TREE_CHAIN (list);
2634 }
2635 return NULL_TREE;
2636 }
2637
2638 /* Return first list element whose TREE_PURPOSE is ELEM.
2639 Return 0 if ELEM is not in LIST. */
2640
2641 tree
2642 purpose_member (const_tree elem, tree list)
2643 {
2644 while (list)
2645 {
2646 if (elem == TREE_PURPOSE (list))
2647 return list;
2648 list = TREE_CHAIN (list);
2649 }
2650 return NULL_TREE;
2651 }
2652
2653 /* Return true if ELEM is in V. */
2654
2655 bool
2656 vec_member (const_tree elem, vec<tree, va_gc> *v)
2657 {
2658 unsigned ix;
2659 tree t;
2660 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2661 if (elem == t)
2662 return true;
2663 return false;
2664 }
2665
2666 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2667 NULL_TREE. */
2668
2669 tree
2670 chain_index (int idx, tree chain)
2671 {
2672 for (; chain && idx > 0; --idx)
2673 chain = TREE_CHAIN (chain);
2674 return chain;
2675 }
2676
2677 /* Return nonzero if ELEM is part of the chain CHAIN. */
2678
2679 int
2680 chain_member (const_tree elem, const_tree chain)
2681 {
2682 while (chain)
2683 {
2684 if (elem == chain)
2685 return 1;
2686 chain = DECL_CHAIN (chain);
2687 }
2688
2689 return 0;
2690 }
2691
2692 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2693 We expect a null pointer to mark the end of the chain.
2694 This is the Lisp primitive `length'. */
2695
2696 int
2697 list_length (const_tree t)
2698 {
2699 const_tree p = t;
2700 #ifdef ENABLE_TREE_CHECKING
2701 const_tree q = t;
2702 #endif
2703 int len = 0;
2704
2705 while (p)
2706 {
2707 p = TREE_CHAIN (p);
2708 #ifdef ENABLE_TREE_CHECKING
2709 if (len % 2)
2710 q = TREE_CHAIN (q);
2711 gcc_assert (p != q);
2712 #endif
2713 len++;
2714 }
2715
2716 return len;
2717 }
2718
2719 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2720 UNION_TYPE TYPE, or NULL_TREE if none. */
2721
2722 tree
2723 first_field (const_tree type)
2724 {
2725 tree t = TYPE_FIELDS (type);
2726 while (t && TREE_CODE (t) != FIELD_DECL)
2727 t = TREE_CHAIN (t);
2728 return t;
2729 }
2730
2731 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2732 by modifying the last node in chain 1 to point to chain 2.
2733 This is the Lisp primitive `nconc'. */
2734
2735 tree
2736 chainon (tree op1, tree op2)
2737 {
2738 tree t1;
2739
2740 if (!op1)
2741 return op2;
2742 if (!op2)
2743 return op1;
2744
2745 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2746 continue;
2747 TREE_CHAIN (t1) = op2;
2748
2749 #ifdef ENABLE_TREE_CHECKING
2750 {
2751 tree t2;
2752 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2753 gcc_assert (t2 != t1);
2754 }
2755 #endif
2756
2757 return op1;
2758 }
2759
2760 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2761
2762 tree
2763 tree_last (tree chain)
2764 {
2765 tree next;
2766 if (chain)
2767 while ((next = TREE_CHAIN (chain)))
2768 chain = next;
2769 return chain;
2770 }
2771
2772 /* Reverse the order of elements in the chain T,
2773 and return the new head of the chain (old last element). */
2774
2775 tree
2776 nreverse (tree t)
2777 {
2778 tree prev = 0, decl, next;
2779 for (decl = t; decl; decl = next)
2780 {
2781 /* We shouldn't be using this function to reverse BLOCK chains; we
2782 have blocks_nreverse for that. */
2783 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2784 next = TREE_CHAIN (decl);
2785 TREE_CHAIN (decl) = prev;
2786 prev = decl;
2787 }
2788 return prev;
2789 }
2790 \f
2791 /* Return a newly created TREE_LIST node whose
2792 purpose and value fields are PARM and VALUE. */
2793
2794 tree
2795 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2796 {
2797 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2798 TREE_PURPOSE (t) = parm;
2799 TREE_VALUE (t) = value;
2800 return t;
2801 }
2802
2803 /* Build a chain of TREE_LIST nodes from a vector. */
2804
2805 tree
2806 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2807 {
2808 tree ret = NULL_TREE;
2809 tree *pp = &ret;
2810 unsigned int i;
2811 tree t;
2812 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2813 {
2814 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2815 pp = &TREE_CHAIN (*pp);
2816 }
2817 return ret;
2818 }
2819
2820 /* Return a newly created TREE_LIST node whose
2821 purpose and value fields are PURPOSE and VALUE
2822 and whose TREE_CHAIN is CHAIN. */
2823
2824 tree
2825 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2826 {
2827 tree node;
2828
2829 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2830 memset (node, 0, sizeof (struct tree_common));
2831
2832 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2833
2834 TREE_SET_CODE (node, TREE_LIST);
2835 TREE_CHAIN (node) = chain;
2836 TREE_PURPOSE (node) = purpose;
2837 TREE_VALUE (node) = value;
2838 return node;
2839 }
2840
2841 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2842 trees. */
2843
2844 vec<tree, va_gc> *
2845 ctor_to_vec (tree ctor)
2846 {
2847 vec<tree, va_gc> *vec;
2848 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2849 unsigned int ix;
2850 tree val;
2851
2852 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2853 vec->quick_push (val);
2854
2855 return vec;
2856 }
2857 \f
2858 /* Return the size nominally occupied by an object of type TYPE
2859 when it resides in memory. The value is measured in units of bytes,
2860 and its data type is that normally used for type sizes
2861 (which is the first type created by make_signed_type or
2862 make_unsigned_type). */
2863
2864 tree
2865 size_in_bytes (const_tree type)
2866 {
2867 tree t;
2868
2869 if (type == error_mark_node)
2870 return integer_zero_node;
2871
2872 type = TYPE_MAIN_VARIANT (type);
2873 t = TYPE_SIZE_UNIT (type);
2874
2875 if (t == 0)
2876 {
2877 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2878 return size_zero_node;
2879 }
2880
2881 return t;
2882 }
2883
2884 /* Return the size of TYPE (in bytes) as a wide integer
2885 or return -1 if the size can vary or is larger than an integer. */
2886
2887 HOST_WIDE_INT
2888 int_size_in_bytes (const_tree type)
2889 {
2890 tree t;
2891
2892 if (type == error_mark_node)
2893 return 0;
2894
2895 type = TYPE_MAIN_VARIANT (type);
2896 t = TYPE_SIZE_UNIT (type);
2897
2898 if (t && tree_fits_uhwi_p (t))
2899 return TREE_INT_CST_LOW (t);
2900 else
2901 return -1;
2902 }
2903
2904 /* Return the maximum size of TYPE (in bytes) as a wide integer
2905 or return -1 if the size can vary or is larger than an integer. */
2906
2907 HOST_WIDE_INT
2908 max_int_size_in_bytes (const_tree type)
2909 {
2910 HOST_WIDE_INT size = -1;
2911 tree size_tree;
2912
2913 /* If this is an array type, check for a possible MAX_SIZE attached. */
2914
2915 if (TREE_CODE (type) == ARRAY_TYPE)
2916 {
2917 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2918
2919 if (size_tree && tree_fits_uhwi_p (size_tree))
2920 size = tree_to_uhwi (size_tree);
2921 }
2922
2923 /* If we still haven't been able to get a size, see if the language
2924 can compute a maximum size. */
2925
2926 if (size == -1)
2927 {
2928 size_tree = lang_hooks.types.max_size (type);
2929
2930 if (size_tree && tree_fits_uhwi_p (size_tree))
2931 size = tree_to_uhwi (size_tree);
2932 }
2933
2934 return size;
2935 }
2936 \f
2937 /* Return the bit position of FIELD, in bits from the start of the record.
2938 This is a tree of type bitsizetype. */
2939
2940 tree
2941 bit_position (const_tree field)
2942 {
2943 return bit_from_pos (DECL_FIELD_OFFSET (field),
2944 DECL_FIELD_BIT_OFFSET (field));
2945 }
2946 \f
2947 /* Return the byte position of FIELD, in bytes from the start of the record.
2948 This is a tree of type sizetype. */
2949
2950 tree
2951 byte_position (const_tree field)
2952 {
2953 return byte_from_pos (DECL_FIELD_OFFSET (field),
2954 DECL_FIELD_BIT_OFFSET (field));
2955 }
2956
2957 /* Likewise, but return as an integer. It must be representable in
2958 that way (since it could be a signed value, we don't have the
2959 option of returning -1 like int_size_in_byte can. */
2960
2961 HOST_WIDE_INT
2962 int_byte_position (const_tree field)
2963 {
2964 return tree_to_shwi (byte_position (field));
2965 }
2966 \f
2967 /* Return the strictest alignment, in bits, that T is known to have. */
2968
2969 unsigned int
2970 expr_align (const_tree t)
2971 {
2972 unsigned int align0, align1;
2973
2974 switch (TREE_CODE (t))
2975 {
2976 CASE_CONVERT: case NON_LVALUE_EXPR:
2977 /* If we have conversions, we know that the alignment of the
2978 object must meet each of the alignments of the types. */
2979 align0 = expr_align (TREE_OPERAND (t, 0));
2980 align1 = TYPE_ALIGN (TREE_TYPE (t));
2981 return MAX (align0, align1);
2982
2983 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2984 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2985 case CLEANUP_POINT_EXPR:
2986 /* These don't change the alignment of an object. */
2987 return expr_align (TREE_OPERAND (t, 0));
2988
2989 case COND_EXPR:
2990 /* The best we can do is say that the alignment is the least aligned
2991 of the two arms. */
2992 align0 = expr_align (TREE_OPERAND (t, 1));
2993 align1 = expr_align (TREE_OPERAND (t, 2));
2994 return MIN (align0, align1);
2995
2996 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2997 meaningfully, it's always 1. */
2998 case LABEL_DECL: case CONST_DECL:
2999 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3000 case FUNCTION_DECL:
3001 gcc_assert (DECL_ALIGN (t) != 0);
3002 return DECL_ALIGN (t);
3003
3004 default:
3005 break;
3006 }
3007
3008 /* Otherwise take the alignment from that of the type. */
3009 return TYPE_ALIGN (TREE_TYPE (t));
3010 }
3011 \f
3012 /* Return, as a tree node, the number of elements for TYPE (which is an
3013 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3014
3015 tree
3016 array_type_nelts (const_tree type)
3017 {
3018 tree index_type, min, max;
3019
3020 /* If they did it with unspecified bounds, then we should have already
3021 given an error about it before we got here. */
3022 if (! TYPE_DOMAIN (type))
3023 return error_mark_node;
3024
3025 index_type = TYPE_DOMAIN (type);
3026 min = TYPE_MIN_VALUE (index_type);
3027 max = TYPE_MAX_VALUE (index_type);
3028
3029 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3030 if (!max)
3031 return error_mark_node;
3032
3033 return (integer_zerop (min)
3034 ? max
3035 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3036 }
3037 \f
3038 /* If arg is static -- a reference to an object in static storage -- then
3039 return the object. This is not the same as the C meaning of `static'.
3040 If arg isn't static, return NULL. */
3041
3042 tree
3043 staticp (tree arg)
3044 {
3045 switch (TREE_CODE (arg))
3046 {
3047 case FUNCTION_DECL:
3048 /* Nested functions are static, even though taking their address will
3049 involve a trampoline as we unnest the nested function and create
3050 the trampoline on the tree level. */
3051 return arg;
3052
3053 case VAR_DECL:
3054 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3055 && ! DECL_THREAD_LOCAL_P (arg)
3056 && ! DECL_DLLIMPORT_P (arg)
3057 ? arg : NULL);
3058
3059 case CONST_DECL:
3060 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3061 ? arg : NULL);
3062
3063 case CONSTRUCTOR:
3064 return TREE_STATIC (arg) ? arg : NULL;
3065
3066 case LABEL_DECL:
3067 case STRING_CST:
3068 return arg;
3069
3070 case COMPONENT_REF:
3071 /* If the thing being referenced is not a field, then it is
3072 something language specific. */
3073 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3074
3075 /* If we are referencing a bitfield, we can't evaluate an
3076 ADDR_EXPR at compile time and so it isn't a constant. */
3077 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3078 return NULL;
3079
3080 return staticp (TREE_OPERAND (arg, 0));
3081
3082 case BIT_FIELD_REF:
3083 return NULL;
3084
3085 case INDIRECT_REF:
3086 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3087
3088 case ARRAY_REF:
3089 case ARRAY_RANGE_REF:
3090 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3091 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3092 return staticp (TREE_OPERAND (arg, 0));
3093 else
3094 return NULL;
3095
3096 case COMPOUND_LITERAL_EXPR:
3097 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3098
3099 default:
3100 return NULL;
3101 }
3102 }
3103
3104 \f
3105
3106
3107 /* Return whether OP is a DECL whose address is function-invariant. */
3108
3109 bool
3110 decl_address_invariant_p (const_tree op)
3111 {
3112 /* The conditions below are slightly less strict than the one in
3113 staticp. */
3114
3115 switch (TREE_CODE (op))
3116 {
3117 case PARM_DECL:
3118 case RESULT_DECL:
3119 case LABEL_DECL:
3120 case FUNCTION_DECL:
3121 return true;
3122
3123 case VAR_DECL:
3124 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3125 || DECL_THREAD_LOCAL_P (op)
3126 || DECL_CONTEXT (op) == current_function_decl
3127 || decl_function_context (op) == current_function_decl)
3128 return true;
3129 break;
3130
3131 case CONST_DECL:
3132 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3133 || decl_function_context (op) == current_function_decl)
3134 return true;
3135 break;
3136
3137 default:
3138 break;
3139 }
3140
3141 return false;
3142 }
3143
3144 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3145
3146 bool
3147 decl_address_ip_invariant_p (const_tree op)
3148 {
3149 /* The conditions below are slightly less strict than the one in
3150 staticp. */
3151
3152 switch (TREE_CODE (op))
3153 {
3154 case LABEL_DECL:
3155 case FUNCTION_DECL:
3156 case STRING_CST:
3157 return true;
3158
3159 case VAR_DECL:
3160 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3161 && !DECL_DLLIMPORT_P (op))
3162 || DECL_THREAD_LOCAL_P (op))
3163 return true;
3164 break;
3165
3166 case CONST_DECL:
3167 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3168 return true;
3169 break;
3170
3171 default:
3172 break;
3173 }
3174
3175 return false;
3176 }
3177
3178
3179 /* Return true if T is function-invariant (internal function, does
3180 not handle arithmetic; that's handled in skip_simple_arithmetic and
3181 tree_invariant_p). */
3182
3183 static bool tree_invariant_p (tree t);
3184
3185 static bool
3186 tree_invariant_p_1 (tree t)
3187 {
3188 tree op;
3189
3190 if (TREE_CONSTANT (t)
3191 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3192 return true;
3193
3194 switch (TREE_CODE (t))
3195 {
3196 case SAVE_EXPR:
3197 return true;
3198
3199 case ADDR_EXPR:
3200 op = TREE_OPERAND (t, 0);
3201 while (handled_component_p (op))
3202 {
3203 switch (TREE_CODE (op))
3204 {
3205 case ARRAY_REF:
3206 case ARRAY_RANGE_REF:
3207 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3208 || TREE_OPERAND (op, 2) != NULL_TREE
3209 || TREE_OPERAND (op, 3) != NULL_TREE)
3210 return false;
3211 break;
3212
3213 case COMPONENT_REF:
3214 if (TREE_OPERAND (op, 2) != NULL_TREE)
3215 return false;
3216 break;
3217
3218 default:;
3219 }
3220 op = TREE_OPERAND (op, 0);
3221 }
3222
3223 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3224
3225 default:
3226 break;
3227 }
3228
3229 return false;
3230 }
3231
3232 /* Return true if T is function-invariant. */
3233
3234 static bool
3235 tree_invariant_p (tree t)
3236 {
3237 tree inner = skip_simple_arithmetic (t);
3238 return tree_invariant_p_1 (inner);
3239 }
3240
3241 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3242 Do this to any expression which may be used in more than one place,
3243 but must be evaluated only once.
3244
3245 Normally, expand_expr would reevaluate the expression each time.
3246 Calling save_expr produces something that is evaluated and recorded
3247 the first time expand_expr is called on it. Subsequent calls to
3248 expand_expr just reuse the recorded value.
3249
3250 The call to expand_expr that generates code that actually computes
3251 the value is the first call *at compile time*. Subsequent calls
3252 *at compile time* generate code to use the saved value.
3253 This produces correct result provided that *at run time* control
3254 always flows through the insns made by the first expand_expr
3255 before reaching the other places where the save_expr was evaluated.
3256 You, the caller of save_expr, must make sure this is so.
3257
3258 Constants, and certain read-only nodes, are returned with no
3259 SAVE_EXPR because that is safe. Expressions containing placeholders
3260 are not touched; see tree.def for an explanation of what these
3261 are used for. */
3262
3263 tree
3264 save_expr (tree expr)
3265 {
3266 tree t = fold (expr);
3267 tree inner;
3268
3269 /* If the tree evaluates to a constant, then we don't want to hide that
3270 fact (i.e. this allows further folding, and direct checks for constants).
3271 However, a read-only object that has side effects cannot be bypassed.
3272 Since it is no problem to reevaluate literals, we just return the
3273 literal node. */
3274 inner = skip_simple_arithmetic (t);
3275 if (TREE_CODE (inner) == ERROR_MARK)
3276 return inner;
3277
3278 if (tree_invariant_p_1 (inner))
3279 return t;
3280
3281 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3282 it means that the size or offset of some field of an object depends on
3283 the value within another field.
3284
3285 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3286 and some variable since it would then need to be both evaluated once and
3287 evaluated more than once. Front-ends must assure this case cannot
3288 happen by surrounding any such subexpressions in their own SAVE_EXPR
3289 and forcing evaluation at the proper time. */
3290 if (contains_placeholder_p (inner))
3291 return t;
3292
3293 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3294 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3295
3296 /* This expression might be placed ahead of a jump to ensure that the
3297 value was computed on both sides of the jump. So make sure it isn't
3298 eliminated as dead. */
3299 TREE_SIDE_EFFECTS (t) = 1;
3300 return t;
3301 }
3302
3303 /* Look inside EXPR into any simple arithmetic operations. Return the
3304 outermost non-arithmetic or non-invariant node. */
3305
3306 tree
3307 skip_simple_arithmetic (tree expr)
3308 {
3309 /* We don't care about whether this can be used as an lvalue in this
3310 context. */
3311 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3312 expr = TREE_OPERAND (expr, 0);
3313
3314 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3315 a constant, it will be more efficient to not make another SAVE_EXPR since
3316 it will allow better simplification and GCSE will be able to merge the
3317 computations if they actually occur. */
3318 while (true)
3319 {
3320 if (UNARY_CLASS_P (expr))
3321 expr = TREE_OPERAND (expr, 0);
3322 else if (BINARY_CLASS_P (expr))
3323 {
3324 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3325 expr = TREE_OPERAND (expr, 0);
3326 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3327 expr = TREE_OPERAND (expr, 1);
3328 else
3329 break;
3330 }
3331 else
3332 break;
3333 }
3334
3335 return expr;
3336 }
3337
3338 /* Look inside EXPR into simple arithmetic operations involving constants.
3339 Return the outermost non-arithmetic or non-constant node. */
3340
3341 tree
3342 skip_simple_constant_arithmetic (tree expr)
3343 {
3344 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3345 expr = TREE_OPERAND (expr, 0);
3346
3347 while (true)
3348 {
3349 if (UNARY_CLASS_P (expr))
3350 expr = TREE_OPERAND (expr, 0);
3351 else if (BINARY_CLASS_P (expr))
3352 {
3353 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3354 expr = TREE_OPERAND (expr, 0);
3355 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3356 expr = TREE_OPERAND (expr, 1);
3357 else
3358 break;
3359 }
3360 else
3361 break;
3362 }
3363
3364 return expr;
3365 }
3366
3367 /* Return which tree structure is used by T. */
3368
3369 enum tree_node_structure_enum
3370 tree_node_structure (const_tree t)
3371 {
3372 const enum tree_code code = TREE_CODE (t);
3373 return tree_node_structure_for_code (code);
3374 }
3375
3376 /* Set various status flags when building a CALL_EXPR object T. */
3377
3378 static void
3379 process_call_operands (tree t)
3380 {
3381 bool side_effects = TREE_SIDE_EFFECTS (t);
3382 bool read_only = false;
3383 int i = call_expr_flags (t);
3384
3385 /* Calls have side-effects, except those to const or pure functions. */
3386 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3387 side_effects = true;
3388 /* Propagate TREE_READONLY of arguments for const functions. */
3389 if (i & ECF_CONST)
3390 read_only = true;
3391
3392 if (!side_effects || read_only)
3393 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3394 {
3395 tree op = TREE_OPERAND (t, i);
3396 if (op && TREE_SIDE_EFFECTS (op))
3397 side_effects = true;
3398 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3399 read_only = false;
3400 }
3401
3402 TREE_SIDE_EFFECTS (t) = side_effects;
3403 TREE_READONLY (t) = read_only;
3404 }
3405 \f
3406 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3407 size or offset that depends on a field within a record. */
3408
3409 bool
3410 contains_placeholder_p (const_tree exp)
3411 {
3412 enum tree_code code;
3413
3414 if (!exp)
3415 return 0;
3416
3417 code = TREE_CODE (exp);
3418 if (code == PLACEHOLDER_EXPR)
3419 return 1;
3420
3421 switch (TREE_CODE_CLASS (code))
3422 {
3423 case tcc_reference:
3424 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3425 position computations since they will be converted into a
3426 WITH_RECORD_EXPR involving the reference, which will assume
3427 here will be valid. */
3428 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3429
3430 case tcc_exceptional:
3431 if (code == TREE_LIST)
3432 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3433 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3434 break;
3435
3436 case tcc_unary:
3437 case tcc_binary:
3438 case tcc_comparison:
3439 case tcc_expression:
3440 switch (code)
3441 {
3442 case COMPOUND_EXPR:
3443 /* Ignoring the first operand isn't quite right, but works best. */
3444 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3445
3446 case COND_EXPR:
3447 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3448 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3449 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3450
3451 case SAVE_EXPR:
3452 /* The save_expr function never wraps anything containing
3453 a PLACEHOLDER_EXPR. */
3454 return 0;
3455
3456 default:
3457 break;
3458 }
3459
3460 switch (TREE_CODE_LENGTH (code))
3461 {
3462 case 1:
3463 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3464 case 2:
3465 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3466 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3467 default:
3468 return 0;
3469 }
3470
3471 case tcc_vl_exp:
3472 switch (code)
3473 {
3474 case CALL_EXPR:
3475 {
3476 const_tree arg;
3477 const_call_expr_arg_iterator iter;
3478 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3479 if (CONTAINS_PLACEHOLDER_P (arg))
3480 return 1;
3481 return 0;
3482 }
3483 default:
3484 return 0;
3485 }
3486
3487 default:
3488 return 0;
3489 }
3490 return 0;
3491 }
3492
3493 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3494 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3495 field positions. */
3496
3497 static bool
3498 type_contains_placeholder_1 (const_tree type)
3499 {
3500 /* If the size contains a placeholder or the parent type (component type in
3501 the case of arrays) type involves a placeholder, this type does. */
3502 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3503 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3504 || (!POINTER_TYPE_P (type)
3505 && TREE_TYPE (type)
3506 && type_contains_placeholder_p (TREE_TYPE (type))))
3507 return true;
3508
3509 /* Now do type-specific checks. Note that the last part of the check above
3510 greatly limits what we have to do below. */
3511 switch (TREE_CODE (type))
3512 {
3513 case VOID_TYPE:
3514 case POINTER_BOUNDS_TYPE:
3515 case COMPLEX_TYPE:
3516 case ENUMERAL_TYPE:
3517 case BOOLEAN_TYPE:
3518 case POINTER_TYPE:
3519 case OFFSET_TYPE:
3520 case REFERENCE_TYPE:
3521 case METHOD_TYPE:
3522 case FUNCTION_TYPE:
3523 case VECTOR_TYPE:
3524 case NULLPTR_TYPE:
3525 return false;
3526
3527 case INTEGER_TYPE:
3528 case REAL_TYPE:
3529 case FIXED_POINT_TYPE:
3530 /* Here we just check the bounds. */
3531 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3532 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3533
3534 case ARRAY_TYPE:
3535 /* We have already checked the component type above, so just check the
3536 domain type. */
3537 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3538
3539 case RECORD_TYPE:
3540 case UNION_TYPE:
3541 case QUAL_UNION_TYPE:
3542 {
3543 tree field;
3544
3545 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3546 if (TREE_CODE (field) == FIELD_DECL
3547 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3548 || (TREE_CODE (type) == QUAL_UNION_TYPE
3549 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3550 || type_contains_placeholder_p (TREE_TYPE (field))))
3551 return true;
3552
3553 return false;
3554 }
3555
3556 default:
3557 gcc_unreachable ();
3558 }
3559 }
3560
3561 /* Wrapper around above function used to cache its result. */
3562
3563 bool
3564 type_contains_placeholder_p (tree type)
3565 {
3566 bool result;
3567
3568 /* If the contains_placeholder_bits field has been initialized,
3569 then we know the answer. */
3570 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3571 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3572
3573 /* Indicate that we've seen this type node, and the answer is false.
3574 This is what we want to return if we run into recursion via fields. */
3575 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3576
3577 /* Compute the real value. */
3578 result = type_contains_placeholder_1 (type);
3579
3580 /* Store the real value. */
3581 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3582
3583 return result;
3584 }
3585 \f
3586 /* Push tree EXP onto vector QUEUE if it is not already present. */
3587
3588 static void
3589 push_without_duplicates (tree exp, vec<tree> *queue)
3590 {
3591 unsigned int i;
3592 tree iter;
3593
3594 FOR_EACH_VEC_ELT (*queue, i, iter)
3595 if (simple_cst_equal (iter, exp) == 1)
3596 break;
3597
3598 if (!iter)
3599 queue->safe_push (exp);
3600 }
3601
3602 /* Given a tree EXP, find all occurrences of references to fields
3603 in a PLACEHOLDER_EXPR and place them in vector REFS without
3604 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3605 we assume here that EXP contains only arithmetic expressions
3606 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3607 argument list. */
3608
3609 void
3610 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3611 {
3612 enum tree_code code = TREE_CODE (exp);
3613 tree inner;
3614 int i;
3615
3616 /* We handle TREE_LIST and COMPONENT_REF separately. */
3617 if (code == TREE_LIST)
3618 {
3619 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3620 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3621 }
3622 else if (code == COMPONENT_REF)
3623 {
3624 for (inner = TREE_OPERAND (exp, 0);
3625 REFERENCE_CLASS_P (inner);
3626 inner = TREE_OPERAND (inner, 0))
3627 ;
3628
3629 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3630 push_without_duplicates (exp, refs);
3631 else
3632 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3633 }
3634 else
3635 switch (TREE_CODE_CLASS (code))
3636 {
3637 case tcc_constant:
3638 break;
3639
3640 case tcc_declaration:
3641 /* Variables allocated to static storage can stay. */
3642 if (!TREE_STATIC (exp))
3643 push_without_duplicates (exp, refs);
3644 break;
3645
3646 case tcc_expression:
3647 /* This is the pattern built in ada/make_aligning_type. */
3648 if (code == ADDR_EXPR
3649 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3650 {
3651 push_without_duplicates (exp, refs);
3652 break;
3653 }
3654
3655 /* Fall through... */
3656
3657 case tcc_exceptional:
3658 case tcc_unary:
3659 case tcc_binary:
3660 case tcc_comparison:
3661 case tcc_reference:
3662 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3663 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3664 break;
3665
3666 case tcc_vl_exp:
3667 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3668 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3669 break;
3670
3671 default:
3672 gcc_unreachable ();
3673 }
3674 }
3675
3676 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3677 return a tree with all occurrences of references to F in a
3678 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3679 CONST_DECLs. Note that we assume here that EXP contains only
3680 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3681 occurring only in their argument list. */
3682
3683 tree
3684 substitute_in_expr (tree exp, tree f, tree r)
3685 {
3686 enum tree_code code = TREE_CODE (exp);
3687 tree op0, op1, op2, op3;
3688 tree new_tree;
3689
3690 /* We handle TREE_LIST and COMPONENT_REF separately. */
3691 if (code == TREE_LIST)
3692 {
3693 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3694 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3695 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3696 return exp;
3697
3698 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3699 }
3700 else if (code == COMPONENT_REF)
3701 {
3702 tree inner;
3703
3704 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3705 and it is the right field, replace it with R. */
3706 for (inner = TREE_OPERAND (exp, 0);
3707 REFERENCE_CLASS_P (inner);
3708 inner = TREE_OPERAND (inner, 0))
3709 ;
3710
3711 /* The field. */
3712 op1 = TREE_OPERAND (exp, 1);
3713
3714 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3715 return r;
3716
3717 /* If this expression hasn't been completed let, leave it alone. */
3718 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3719 return exp;
3720
3721 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3722 if (op0 == TREE_OPERAND (exp, 0))
3723 return exp;
3724
3725 new_tree
3726 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3727 }
3728 else
3729 switch (TREE_CODE_CLASS (code))
3730 {
3731 case tcc_constant:
3732 return exp;
3733
3734 case tcc_declaration:
3735 if (exp == f)
3736 return r;
3737 else
3738 return exp;
3739
3740 case tcc_expression:
3741 if (exp == f)
3742 return r;
3743
3744 /* Fall through... */
3745
3746 case tcc_exceptional:
3747 case tcc_unary:
3748 case tcc_binary:
3749 case tcc_comparison:
3750 case tcc_reference:
3751 switch (TREE_CODE_LENGTH (code))
3752 {
3753 case 0:
3754 return exp;
3755
3756 case 1:
3757 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3758 if (op0 == TREE_OPERAND (exp, 0))
3759 return exp;
3760
3761 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3762 break;
3763
3764 case 2:
3765 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3766 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3767
3768 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3769 return exp;
3770
3771 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3772 break;
3773
3774 case 3:
3775 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3776 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3777 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3778
3779 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3780 && op2 == TREE_OPERAND (exp, 2))
3781 return exp;
3782
3783 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3784 break;
3785
3786 case 4:
3787 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3788 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3789 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3790 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3791
3792 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3793 && op2 == TREE_OPERAND (exp, 2)
3794 && op3 == TREE_OPERAND (exp, 3))
3795 return exp;
3796
3797 new_tree
3798 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3799 break;
3800
3801 default:
3802 gcc_unreachable ();
3803 }
3804 break;
3805
3806 case tcc_vl_exp:
3807 {
3808 int i;
3809
3810 new_tree = NULL_TREE;
3811
3812 /* If we are trying to replace F with a constant, inline back
3813 functions which do nothing else than computing a value from
3814 the arguments they are passed. This makes it possible to
3815 fold partially or entirely the replacement expression. */
3816 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3817 {
3818 tree t = maybe_inline_call_in_expr (exp);
3819 if (t)
3820 return SUBSTITUTE_IN_EXPR (t, f, r);
3821 }
3822
3823 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3824 {
3825 tree op = TREE_OPERAND (exp, i);
3826 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3827 if (new_op != op)
3828 {
3829 if (!new_tree)
3830 new_tree = copy_node (exp);
3831 TREE_OPERAND (new_tree, i) = new_op;
3832 }
3833 }
3834
3835 if (new_tree)
3836 {
3837 new_tree = fold (new_tree);
3838 if (TREE_CODE (new_tree) == CALL_EXPR)
3839 process_call_operands (new_tree);
3840 }
3841 else
3842 return exp;
3843 }
3844 break;
3845
3846 default:
3847 gcc_unreachable ();
3848 }
3849
3850 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3851
3852 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3853 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3854
3855 return new_tree;
3856 }
3857
3858 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3859 for it within OBJ, a tree that is an object or a chain of references. */
3860
3861 tree
3862 substitute_placeholder_in_expr (tree exp, tree obj)
3863 {
3864 enum tree_code code = TREE_CODE (exp);
3865 tree op0, op1, op2, op3;
3866 tree new_tree;
3867
3868 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3869 in the chain of OBJ. */
3870 if (code == PLACEHOLDER_EXPR)
3871 {
3872 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3873 tree elt;
3874
3875 for (elt = obj; elt != 0;
3876 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3877 || TREE_CODE (elt) == COND_EXPR)
3878 ? TREE_OPERAND (elt, 1)
3879 : (REFERENCE_CLASS_P (elt)
3880 || UNARY_CLASS_P (elt)
3881 || BINARY_CLASS_P (elt)
3882 || VL_EXP_CLASS_P (elt)
3883 || EXPRESSION_CLASS_P (elt))
3884 ? TREE_OPERAND (elt, 0) : 0))
3885 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3886 return elt;
3887
3888 for (elt = obj; elt != 0;
3889 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3890 || TREE_CODE (elt) == COND_EXPR)
3891 ? TREE_OPERAND (elt, 1)
3892 : (REFERENCE_CLASS_P (elt)
3893 || UNARY_CLASS_P (elt)
3894 || BINARY_CLASS_P (elt)
3895 || VL_EXP_CLASS_P (elt)
3896 || EXPRESSION_CLASS_P (elt))
3897 ? TREE_OPERAND (elt, 0) : 0))
3898 if (POINTER_TYPE_P (TREE_TYPE (elt))
3899 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3900 == need_type))
3901 return fold_build1 (INDIRECT_REF, need_type, elt);
3902
3903 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3904 survives until RTL generation, there will be an error. */
3905 return exp;
3906 }
3907
3908 /* TREE_LIST is special because we need to look at TREE_VALUE
3909 and TREE_CHAIN, not TREE_OPERANDS. */
3910 else if (code == TREE_LIST)
3911 {
3912 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3913 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3914 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3915 return exp;
3916
3917 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3918 }
3919 else
3920 switch (TREE_CODE_CLASS (code))
3921 {
3922 case tcc_constant:
3923 case tcc_declaration:
3924 return exp;
3925
3926 case tcc_exceptional:
3927 case tcc_unary:
3928 case tcc_binary:
3929 case tcc_comparison:
3930 case tcc_expression:
3931 case tcc_reference:
3932 case tcc_statement:
3933 switch (TREE_CODE_LENGTH (code))
3934 {
3935 case 0:
3936 return exp;
3937
3938 case 1:
3939 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3940 if (op0 == TREE_OPERAND (exp, 0))
3941 return exp;
3942
3943 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3944 break;
3945
3946 case 2:
3947 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3948 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3949
3950 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3951 return exp;
3952
3953 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3954 break;
3955
3956 case 3:
3957 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3958 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3959 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3960
3961 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3962 && op2 == TREE_OPERAND (exp, 2))
3963 return exp;
3964
3965 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3966 break;
3967
3968 case 4:
3969 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3970 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3971 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3972 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3973
3974 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3975 && op2 == TREE_OPERAND (exp, 2)
3976 && op3 == TREE_OPERAND (exp, 3))
3977 return exp;
3978
3979 new_tree
3980 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3981 break;
3982
3983 default:
3984 gcc_unreachable ();
3985 }
3986 break;
3987
3988 case tcc_vl_exp:
3989 {
3990 int i;
3991
3992 new_tree = NULL_TREE;
3993
3994 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3995 {
3996 tree op = TREE_OPERAND (exp, i);
3997 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3998 if (new_op != op)
3999 {
4000 if (!new_tree)
4001 new_tree = copy_node (exp);
4002 TREE_OPERAND (new_tree, i) = new_op;
4003 }
4004 }
4005
4006 if (new_tree)
4007 {
4008 new_tree = fold (new_tree);
4009 if (TREE_CODE (new_tree) == CALL_EXPR)
4010 process_call_operands (new_tree);
4011 }
4012 else
4013 return exp;
4014 }
4015 break;
4016
4017 default:
4018 gcc_unreachable ();
4019 }
4020
4021 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4022
4023 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4024 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4025
4026 return new_tree;
4027 }
4028 \f
4029
4030 /* Subroutine of stabilize_reference; this is called for subtrees of
4031 references. Any expression with side-effects must be put in a SAVE_EXPR
4032 to ensure that it is only evaluated once.
4033
4034 We don't put SAVE_EXPR nodes around everything, because assigning very
4035 simple expressions to temporaries causes us to miss good opportunities
4036 for optimizations. Among other things, the opportunity to fold in the
4037 addition of a constant into an addressing mode often gets lost, e.g.
4038 "y[i+1] += x;". In general, we take the approach that we should not make
4039 an assignment unless we are forced into it - i.e., that any non-side effect
4040 operator should be allowed, and that cse should take care of coalescing
4041 multiple utterances of the same expression should that prove fruitful. */
4042
4043 static tree
4044 stabilize_reference_1 (tree e)
4045 {
4046 tree result;
4047 enum tree_code code = TREE_CODE (e);
4048
4049 /* We cannot ignore const expressions because it might be a reference
4050 to a const array but whose index contains side-effects. But we can
4051 ignore things that are actual constant or that already have been
4052 handled by this function. */
4053
4054 if (tree_invariant_p (e))
4055 return e;
4056
4057 switch (TREE_CODE_CLASS (code))
4058 {
4059 case tcc_exceptional:
4060 case tcc_type:
4061 case tcc_declaration:
4062 case tcc_comparison:
4063 case tcc_statement:
4064 case tcc_expression:
4065 case tcc_reference:
4066 case tcc_vl_exp:
4067 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4068 so that it will only be evaluated once. */
4069 /* The reference (r) and comparison (<) classes could be handled as
4070 below, but it is generally faster to only evaluate them once. */
4071 if (TREE_SIDE_EFFECTS (e))
4072 return save_expr (e);
4073 return e;
4074
4075 case tcc_constant:
4076 /* Constants need no processing. In fact, we should never reach
4077 here. */
4078 return e;
4079
4080 case tcc_binary:
4081 /* Division is slow and tends to be compiled with jumps,
4082 especially the division by powers of 2 that is often
4083 found inside of an array reference. So do it just once. */
4084 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4085 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4086 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4087 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4088 return save_expr (e);
4089 /* Recursively stabilize each operand. */
4090 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4091 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4092 break;
4093
4094 case tcc_unary:
4095 /* Recursively stabilize each operand. */
4096 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4097 break;
4098
4099 default:
4100 gcc_unreachable ();
4101 }
4102
4103 TREE_TYPE (result) = TREE_TYPE (e);
4104 TREE_READONLY (result) = TREE_READONLY (e);
4105 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4106 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4107
4108 return result;
4109 }
4110
4111 /* Stabilize a reference so that we can use it any number of times
4112 without causing its operands to be evaluated more than once.
4113 Returns the stabilized reference. This works by means of save_expr,
4114 so see the caveats in the comments about save_expr.
4115
4116 Also allows conversion expressions whose operands are references.
4117 Any other kind of expression is returned unchanged. */
4118
4119 tree
4120 stabilize_reference (tree ref)
4121 {
4122 tree result;
4123 enum tree_code code = TREE_CODE (ref);
4124
4125 switch (code)
4126 {
4127 case VAR_DECL:
4128 case PARM_DECL:
4129 case RESULT_DECL:
4130 /* No action is needed in this case. */
4131 return ref;
4132
4133 CASE_CONVERT:
4134 case FLOAT_EXPR:
4135 case FIX_TRUNC_EXPR:
4136 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4137 break;
4138
4139 case INDIRECT_REF:
4140 result = build_nt (INDIRECT_REF,
4141 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4142 break;
4143
4144 case COMPONENT_REF:
4145 result = build_nt (COMPONENT_REF,
4146 stabilize_reference (TREE_OPERAND (ref, 0)),
4147 TREE_OPERAND (ref, 1), NULL_TREE);
4148 break;
4149
4150 case BIT_FIELD_REF:
4151 result = build_nt (BIT_FIELD_REF,
4152 stabilize_reference (TREE_OPERAND (ref, 0)),
4153 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4154 break;
4155
4156 case ARRAY_REF:
4157 result = build_nt (ARRAY_REF,
4158 stabilize_reference (TREE_OPERAND (ref, 0)),
4159 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4160 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4161 break;
4162
4163 case ARRAY_RANGE_REF:
4164 result = build_nt (ARRAY_RANGE_REF,
4165 stabilize_reference (TREE_OPERAND (ref, 0)),
4166 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4167 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4168 break;
4169
4170 case COMPOUND_EXPR:
4171 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4172 it wouldn't be ignored. This matters when dealing with
4173 volatiles. */
4174 return stabilize_reference_1 (ref);
4175
4176 /* If arg isn't a kind of lvalue we recognize, make no change.
4177 Caller should recognize the error for an invalid lvalue. */
4178 default:
4179 return ref;
4180
4181 case ERROR_MARK:
4182 return error_mark_node;
4183 }
4184
4185 TREE_TYPE (result) = TREE_TYPE (ref);
4186 TREE_READONLY (result) = TREE_READONLY (ref);
4187 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4188 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4189
4190 return result;
4191 }
4192 \f
4193 /* Low-level constructors for expressions. */
4194
4195 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4196 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4197
4198 void
4199 recompute_tree_invariant_for_addr_expr (tree t)
4200 {
4201 tree node;
4202 bool tc = true, se = false;
4203
4204 /* We started out assuming this address is both invariant and constant, but
4205 does not have side effects. Now go down any handled components and see if
4206 any of them involve offsets that are either non-constant or non-invariant.
4207 Also check for side-effects.
4208
4209 ??? Note that this code makes no attempt to deal with the case where
4210 taking the address of something causes a copy due to misalignment. */
4211
4212 #define UPDATE_FLAGS(NODE) \
4213 do { tree _node = (NODE); \
4214 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4215 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4216
4217 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4218 node = TREE_OPERAND (node, 0))
4219 {
4220 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4221 array reference (probably made temporarily by the G++ front end),
4222 so ignore all the operands. */
4223 if ((TREE_CODE (node) == ARRAY_REF
4224 || TREE_CODE (node) == ARRAY_RANGE_REF)
4225 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4226 {
4227 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4228 if (TREE_OPERAND (node, 2))
4229 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4230 if (TREE_OPERAND (node, 3))
4231 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4232 }
4233 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4234 FIELD_DECL, apparently. The G++ front end can put something else
4235 there, at least temporarily. */
4236 else if (TREE_CODE (node) == COMPONENT_REF
4237 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4238 {
4239 if (TREE_OPERAND (node, 2))
4240 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4241 }
4242 }
4243
4244 node = lang_hooks.expr_to_decl (node, &tc, &se);
4245
4246 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4247 the address, since &(*a)->b is a form of addition. If it's a constant, the
4248 address is constant too. If it's a decl, its address is constant if the
4249 decl is static. Everything else is not constant and, furthermore,
4250 taking the address of a volatile variable is not volatile. */
4251 if (TREE_CODE (node) == INDIRECT_REF
4252 || TREE_CODE (node) == MEM_REF)
4253 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4254 else if (CONSTANT_CLASS_P (node))
4255 ;
4256 else if (DECL_P (node))
4257 tc &= (staticp (node) != NULL_TREE);
4258 else
4259 {
4260 tc = false;
4261 se |= TREE_SIDE_EFFECTS (node);
4262 }
4263
4264
4265 TREE_CONSTANT (t) = tc;
4266 TREE_SIDE_EFFECTS (t) = se;
4267 #undef UPDATE_FLAGS
4268 }
4269
4270 /* Build an expression of code CODE, data type TYPE, and operands as
4271 specified. Expressions and reference nodes can be created this way.
4272 Constants, decls, types and misc nodes cannot be.
4273
4274 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4275 enough for all extant tree codes. */
4276
4277 tree
4278 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4279 {
4280 tree t;
4281
4282 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4283
4284 t = make_node_stat (code PASS_MEM_STAT);
4285 TREE_TYPE (t) = tt;
4286
4287 return t;
4288 }
4289
4290 tree
4291 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4292 {
4293 int length = sizeof (struct tree_exp);
4294 tree t;
4295
4296 record_node_allocation_statistics (code, length);
4297
4298 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4299
4300 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4301
4302 memset (t, 0, sizeof (struct tree_common));
4303
4304 TREE_SET_CODE (t, code);
4305
4306 TREE_TYPE (t) = type;
4307 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4308 TREE_OPERAND (t, 0) = node;
4309 if (node && !TYPE_P (node))
4310 {
4311 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4312 TREE_READONLY (t) = TREE_READONLY (node);
4313 }
4314
4315 if (TREE_CODE_CLASS (code) == tcc_statement)
4316 TREE_SIDE_EFFECTS (t) = 1;
4317 else switch (code)
4318 {
4319 case VA_ARG_EXPR:
4320 /* All of these have side-effects, no matter what their
4321 operands are. */
4322 TREE_SIDE_EFFECTS (t) = 1;
4323 TREE_READONLY (t) = 0;
4324 break;
4325
4326 case INDIRECT_REF:
4327 /* Whether a dereference is readonly has nothing to do with whether
4328 its operand is readonly. */
4329 TREE_READONLY (t) = 0;
4330 break;
4331
4332 case ADDR_EXPR:
4333 if (node)
4334 recompute_tree_invariant_for_addr_expr (t);
4335 break;
4336
4337 default:
4338 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4339 && node && !TYPE_P (node)
4340 && TREE_CONSTANT (node))
4341 TREE_CONSTANT (t) = 1;
4342 if (TREE_CODE_CLASS (code) == tcc_reference
4343 && node && TREE_THIS_VOLATILE (node))
4344 TREE_THIS_VOLATILE (t) = 1;
4345 break;
4346 }
4347
4348 return t;
4349 }
4350
4351 #define PROCESS_ARG(N) \
4352 do { \
4353 TREE_OPERAND (t, N) = arg##N; \
4354 if (arg##N &&!TYPE_P (arg##N)) \
4355 { \
4356 if (TREE_SIDE_EFFECTS (arg##N)) \
4357 side_effects = 1; \
4358 if (!TREE_READONLY (arg##N) \
4359 && !CONSTANT_CLASS_P (arg##N)) \
4360 (void) (read_only = 0); \
4361 if (!TREE_CONSTANT (arg##N)) \
4362 (void) (constant = 0); \
4363 } \
4364 } while (0)
4365
4366 tree
4367 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4368 {
4369 bool constant, read_only, side_effects;
4370 tree t;
4371
4372 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4373
4374 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4375 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4376 /* When sizetype precision doesn't match that of pointers
4377 we need to be able to build explicit extensions or truncations
4378 of the offset argument. */
4379 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4380 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4381 && TREE_CODE (arg1) == INTEGER_CST);
4382
4383 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4384 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4385 && ptrofftype_p (TREE_TYPE (arg1)));
4386
4387 t = make_node_stat (code PASS_MEM_STAT);
4388 TREE_TYPE (t) = tt;
4389
4390 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4391 result based on those same flags for the arguments. But if the
4392 arguments aren't really even `tree' expressions, we shouldn't be trying
4393 to do this. */
4394
4395 /* Expressions without side effects may be constant if their
4396 arguments are as well. */
4397 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4398 || TREE_CODE_CLASS (code) == tcc_binary);
4399 read_only = 1;
4400 side_effects = TREE_SIDE_EFFECTS (t);
4401
4402 PROCESS_ARG (0);
4403 PROCESS_ARG (1);
4404
4405 TREE_SIDE_EFFECTS (t) = side_effects;
4406 if (code == MEM_REF)
4407 {
4408 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4409 {
4410 tree o = TREE_OPERAND (arg0, 0);
4411 TREE_READONLY (t) = TREE_READONLY (o);
4412 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4413 }
4414 }
4415 else
4416 {
4417 TREE_READONLY (t) = read_only;
4418 TREE_CONSTANT (t) = constant;
4419 TREE_THIS_VOLATILE (t)
4420 = (TREE_CODE_CLASS (code) == tcc_reference
4421 && arg0 && TREE_THIS_VOLATILE (arg0));
4422 }
4423
4424 return t;
4425 }
4426
4427
4428 tree
4429 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4430 tree arg2 MEM_STAT_DECL)
4431 {
4432 bool constant, read_only, side_effects;
4433 tree t;
4434
4435 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4436 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4437
4438 t = make_node_stat (code PASS_MEM_STAT);
4439 TREE_TYPE (t) = tt;
4440
4441 read_only = 1;
4442
4443 /* As a special exception, if COND_EXPR has NULL branches, we
4444 assume that it is a gimple statement and always consider
4445 it to have side effects. */
4446 if (code == COND_EXPR
4447 && tt == void_type_node
4448 && arg1 == NULL_TREE
4449 && arg2 == NULL_TREE)
4450 side_effects = true;
4451 else
4452 side_effects = TREE_SIDE_EFFECTS (t);
4453
4454 PROCESS_ARG (0);
4455 PROCESS_ARG (1);
4456 PROCESS_ARG (2);
4457
4458 if (code == COND_EXPR)
4459 TREE_READONLY (t) = read_only;
4460
4461 TREE_SIDE_EFFECTS (t) = side_effects;
4462 TREE_THIS_VOLATILE (t)
4463 = (TREE_CODE_CLASS (code) == tcc_reference
4464 && arg0 && TREE_THIS_VOLATILE (arg0));
4465
4466 return t;
4467 }
4468
4469 tree
4470 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4471 tree arg2, tree arg3 MEM_STAT_DECL)
4472 {
4473 bool constant, read_only, side_effects;
4474 tree t;
4475
4476 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4477
4478 t = make_node_stat (code PASS_MEM_STAT);
4479 TREE_TYPE (t) = tt;
4480
4481 side_effects = TREE_SIDE_EFFECTS (t);
4482
4483 PROCESS_ARG (0);
4484 PROCESS_ARG (1);
4485 PROCESS_ARG (2);
4486 PROCESS_ARG (3);
4487
4488 TREE_SIDE_EFFECTS (t) = side_effects;
4489 TREE_THIS_VOLATILE (t)
4490 = (TREE_CODE_CLASS (code) == tcc_reference
4491 && arg0 && TREE_THIS_VOLATILE (arg0));
4492
4493 return t;
4494 }
4495
4496 tree
4497 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4498 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4499 {
4500 bool constant, read_only, side_effects;
4501 tree t;
4502
4503 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4504
4505 t = make_node_stat (code PASS_MEM_STAT);
4506 TREE_TYPE (t) = tt;
4507
4508 side_effects = TREE_SIDE_EFFECTS (t);
4509
4510 PROCESS_ARG (0);
4511 PROCESS_ARG (1);
4512 PROCESS_ARG (2);
4513 PROCESS_ARG (3);
4514 PROCESS_ARG (4);
4515
4516 TREE_SIDE_EFFECTS (t) = side_effects;
4517 if (code == TARGET_MEM_REF)
4518 {
4519 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4520 {
4521 tree o = TREE_OPERAND (arg0, 0);
4522 TREE_READONLY (t) = TREE_READONLY (o);
4523 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4524 }
4525 }
4526 else
4527 TREE_THIS_VOLATILE (t)
4528 = (TREE_CODE_CLASS (code) == tcc_reference
4529 && arg0 && TREE_THIS_VOLATILE (arg0));
4530
4531 return t;
4532 }
4533
4534 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4535 on the pointer PTR. */
4536
4537 tree
4538 build_simple_mem_ref_loc (location_t loc, tree ptr)
4539 {
4540 HOST_WIDE_INT offset = 0;
4541 tree ptype = TREE_TYPE (ptr);
4542 tree tem;
4543 /* For convenience allow addresses that collapse to a simple base
4544 and offset. */
4545 if (TREE_CODE (ptr) == ADDR_EXPR
4546 && (handled_component_p (TREE_OPERAND (ptr, 0))
4547 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4548 {
4549 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4550 gcc_assert (ptr);
4551 ptr = build_fold_addr_expr (ptr);
4552 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4553 }
4554 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4555 ptr, build_int_cst (ptype, offset));
4556 SET_EXPR_LOCATION (tem, loc);
4557 return tem;
4558 }
4559
4560 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4561
4562 offset_int
4563 mem_ref_offset (const_tree t)
4564 {
4565 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4566 }
4567
4568 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4569 offsetted by OFFSET units. */
4570
4571 tree
4572 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4573 {
4574 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4575 build_fold_addr_expr (base),
4576 build_int_cst (ptr_type_node, offset));
4577 tree addr = build1 (ADDR_EXPR, type, ref);
4578 recompute_tree_invariant_for_addr_expr (addr);
4579 return addr;
4580 }
4581
4582 /* Similar except don't specify the TREE_TYPE
4583 and leave the TREE_SIDE_EFFECTS as 0.
4584 It is permissible for arguments to be null,
4585 or even garbage if their values do not matter. */
4586
4587 tree
4588 build_nt (enum tree_code code, ...)
4589 {
4590 tree t;
4591 int length;
4592 int i;
4593 va_list p;
4594
4595 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4596
4597 va_start (p, code);
4598
4599 t = make_node (code);
4600 length = TREE_CODE_LENGTH (code);
4601
4602 for (i = 0; i < length; i++)
4603 TREE_OPERAND (t, i) = va_arg (p, tree);
4604
4605 va_end (p);
4606 return t;
4607 }
4608
4609 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4610 tree vec. */
4611
4612 tree
4613 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4614 {
4615 tree ret, t;
4616 unsigned int ix;
4617
4618 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4619 CALL_EXPR_FN (ret) = fn;
4620 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4621 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4622 CALL_EXPR_ARG (ret, ix) = t;
4623 return ret;
4624 }
4625 \f
4626 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4627 We do NOT enter this node in any sort of symbol table.
4628
4629 LOC is the location of the decl.
4630
4631 layout_decl is used to set up the decl's storage layout.
4632 Other slots are initialized to 0 or null pointers. */
4633
4634 tree
4635 build_decl_stat (location_t loc, enum tree_code code, tree name,
4636 tree type MEM_STAT_DECL)
4637 {
4638 tree t;
4639
4640 t = make_node_stat (code PASS_MEM_STAT);
4641 DECL_SOURCE_LOCATION (t) = loc;
4642
4643 /* if (type == error_mark_node)
4644 type = integer_type_node; */
4645 /* That is not done, deliberately, so that having error_mark_node
4646 as the type can suppress useless errors in the use of this variable. */
4647
4648 DECL_NAME (t) = name;
4649 TREE_TYPE (t) = type;
4650
4651 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4652 layout_decl (t, 0);
4653
4654 return t;
4655 }
4656
4657 /* Builds and returns function declaration with NAME and TYPE. */
4658
4659 tree
4660 build_fn_decl (const char *name, tree type)
4661 {
4662 tree id = get_identifier (name);
4663 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4664
4665 DECL_EXTERNAL (decl) = 1;
4666 TREE_PUBLIC (decl) = 1;
4667 DECL_ARTIFICIAL (decl) = 1;
4668 TREE_NOTHROW (decl) = 1;
4669
4670 return decl;
4671 }
4672
4673 vec<tree, va_gc> *all_translation_units;
4674
4675 /* Builds a new translation-unit decl with name NAME, queues it in the
4676 global list of translation-unit decls and returns it. */
4677
4678 tree
4679 build_translation_unit_decl (tree name)
4680 {
4681 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4682 name, NULL_TREE);
4683 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4684 vec_safe_push (all_translation_units, tu);
4685 return tu;
4686 }
4687
4688 \f
4689 /* BLOCK nodes are used to represent the structure of binding contours
4690 and declarations, once those contours have been exited and their contents
4691 compiled. This information is used for outputting debugging info. */
4692
4693 tree
4694 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4695 {
4696 tree block = make_node (BLOCK);
4697
4698 BLOCK_VARS (block) = vars;
4699 BLOCK_SUBBLOCKS (block) = subblocks;
4700 BLOCK_SUPERCONTEXT (block) = supercontext;
4701 BLOCK_CHAIN (block) = chain;
4702 return block;
4703 }
4704
4705 \f
4706 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4707
4708 LOC is the location to use in tree T. */
4709
4710 void
4711 protected_set_expr_location (tree t, location_t loc)
4712 {
4713 if (CAN_HAVE_LOCATION_P (t))
4714 SET_EXPR_LOCATION (t, loc);
4715 }
4716 \f
4717 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4718 is ATTRIBUTE. */
4719
4720 tree
4721 build_decl_attribute_variant (tree ddecl, tree attribute)
4722 {
4723 DECL_ATTRIBUTES (ddecl) = attribute;
4724 return ddecl;
4725 }
4726
4727 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4728 is ATTRIBUTE and its qualifiers are QUALS.
4729
4730 Record such modified types already made so we don't make duplicates. */
4731
4732 tree
4733 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4734 {
4735 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4736 {
4737 inchash::hash hstate;
4738 tree ntype;
4739 int i;
4740 tree t;
4741 enum tree_code code = TREE_CODE (ttype);
4742
4743 /* Building a distinct copy of a tagged type is inappropriate; it
4744 causes breakage in code that expects there to be a one-to-one
4745 relationship between a struct and its fields.
4746 build_duplicate_type is another solution (as used in
4747 handle_transparent_union_attribute), but that doesn't play well
4748 with the stronger C++ type identity model. */
4749 if (TREE_CODE (ttype) == RECORD_TYPE
4750 || TREE_CODE (ttype) == UNION_TYPE
4751 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4752 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4753 {
4754 warning (OPT_Wattributes,
4755 "ignoring attributes applied to %qT after definition",
4756 TYPE_MAIN_VARIANT (ttype));
4757 return build_qualified_type (ttype, quals);
4758 }
4759
4760 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4761 ntype = build_distinct_type_copy (ttype);
4762
4763 TYPE_ATTRIBUTES (ntype) = attribute;
4764
4765 hstate.add_int (code);
4766 if (TREE_TYPE (ntype))
4767 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4768 attribute_hash_list (attribute, hstate);
4769
4770 switch (TREE_CODE (ntype))
4771 {
4772 case FUNCTION_TYPE:
4773 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4774 break;
4775 case ARRAY_TYPE:
4776 if (TYPE_DOMAIN (ntype))
4777 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4778 break;
4779 case INTEGER_TYPE:
4780 t = TYPE_MAX_VALUE (ntype);
4781 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4782 hstate.add_object (TREE_INT_CST_ELT (t, i));
4783 break;
4784 case REAL_TYPE:
4785 case FIXED_POINT_TYPE:
4786 {
4787 unsigned int precision = TYPE_PRECISION (ntype);
4788 hstate.add_object (precision);
4789 }
4790 break;
4791 default:
4792 break;
4793 }
4794
4795 ntype = type_hash_canon (hstate.end(), ntype);
4796
4797 /* If the target-dependent attributes make NTYPE different from
4798 its canonical type, we will need to use structural equality
4799 checks for this type. */
4800 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4801 || !comp_type_attributes (ntype, ttype))
4802 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4803 else if (TYPE_CANONICAL (ntype) == ntype)
4804 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4805
4806 ttype = build_qualified_type (ntype, quals);
4807 }
4808 else if (TYPE_QUALS (ttype) != quals)
4809 ttype = build_qualified_type (ttype, quals);
4810
4811 return ttype;
4812 }
4813
4814 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4815 the same. */
4816
4817 static bool
4818 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4819 {
4820 tree cl1, cl2;
4821 for (cl1 = clauses1, cl2 = clauses2;
4822 cl1 && cl2;
4823 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4824 {
4825 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4826 return false;
4827 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4828 {
4829 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4830 OMP_CLAUSE_DECL (cl2)) != 1)
4831 return false;
4832 }
4833 switch (OMP_CLAUSE_CODE (cl1))
4834 {
4835 case OMP_CLAUSE_ALIGNED:
4836 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4837 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4838 return false;
4839 break;
4840 case OMP_CLAUSE_LINEAR:
4841 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4842 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4843 return false;
4844 break;
4845 case OMP_CLAUSE_SIMDLEN:
4846 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4847 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4848 return false;
4849 default:
4850 break;
4851 }
4852 }
4853 return true;
4854 }
4855
4856 /* Compare two constructor-element-type constants. Return 1 if the lists
4857 are known to be equal; otherwise return 0. */
4858
4859 static bool
4860 simple_cst_list_equal (const_tree l1, const_tree l2)
4861 {
4862 while (l1 != NULL_TREE && l2 != NULL_TREE)
4863 {
4864 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4865 return false;
4866
4867 l1 = TREE_CHAIN (l1);
4868 l2 = TREE_CHAIN (l2);
4869 }
4870
4871 return l1 == l2;
4872 }
4873
4874 /* Compare two attributes for their value identity. Return true if the
4875 attribute values are known to be equal; otherwise return false.
4876 */
4877
4878 bool
4879 attribute_value_equal (const_tree attr1, const_tree attr2)
4880 {
4881 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4882 return true;
4883
4884 if (TREE_VALUE (attr1) != NULL_TREE
4885 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4886 && TREE_VALUE (attr2) != NULL
4887 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4888 return (simple_cst_list_equal (TREE_VALUE (attr1),
4889 TREE_VALUE (attr2)) == 1);
4890
4891 if ((flag_openmp || flag_openmp_simd)
4892 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4893 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4894 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4895 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4896 TREE_VALUE (attr2));
4897
4898 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4899 }
4900
4901 /* Return 0 if the attributes for two types are incompatible, 1 if they
4902 are compatible, and 2 if they are nearly compatible (which causes a
4903 warning to be generated). */
4904 int
4905 comp_type_attributes (const_tree type1, const_tree type2)
4906 {
4907 const_tree a1 = TYPE_ATTRIBUTES (type1);
4908 const_tree a2 = TYPE_ATTRIBUTES (type2);
4909 const_tree a;
4910
4911 if (a1 == a2)
4912 return 1;
4913 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4914 {
4915 const struct attribute_spec *as;
4916 const_tree attr;
4917
4918 as = lookup_attribute_spec (get_attribute_name (a));
4919 if (!as || as->affects_type_identity == false)
4920 continue;
4921
4922 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4923 if (!attr || !attribute_value_equal (a, attr))
4924 break;
4925 }
4926 if (!a)
4927 {
4928 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4929 {
4930 const struct attribute_spec *as;
4931
4932 as = lookup_attribute_spec (get_attribute_name (a));
4933 if (!as || as->affects_type_identity == false)
4934 continue;
4935
4936 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4937 break;
4938 /* We don't need to compare trees again, as we did this
4939 already in first loop. */
4940 }
4941 /* All types - affecting identity - are equal, so
4942 there is no need to call target hook for comparison. */
4943 if (!a)
4944 return 1;
4945 }
4946 /* As some type combinations - like default calling-convention - might
4947 be compatible, we have to call the target hook to get the final result. */
4948 return targetm.comp_type_attributes (type1, type2);
4949 }
4950
4951 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4952 is ATTRIBUTE.
4953
4954 Record such modified types already made so we don't make duplicates. */
4955
4956 tree
4957 build_type_attribute_variant (tree ttype, tree attribute)
4958 {
4959 return build_type_attribute_qual_variant (ttype, attribute,
4960 TYPE_QUALS (ttype));
4961 }
4962
4963
4964 /* Reset the expression *EXPR_P, a size or position.
4965
4966 ??? We could reset all non-constant sizes or positions. But it's cheap
4967 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4968
4969 We need to reset self-referential sizes or positions because they cannot
4970 be gimplified and thus can contain a CALL_EXPR after the gimplification
4971 is finished, which will run afoul of LTO streaming. And they need to be
4972 reset to something essentially dummy but not constant, so as to preserve
4973 the properties of the object they are attached to. */
4974
4975 static inline void
4976 free_lang_data_in_one_sizepos (tree *expr_p)
4977 {
4978 tree expr = *expr_p;
4979 if (CONTAINS_PLACEHOLDER_P (expr))
4980 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4981 }
4982
4983
4984 /* Reset all the fields in a binfo node BINFO. We only keep
4985 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4986
4987 static void
4988 free_lang_data_in_binfo (tree binfo)
4989 {
4990 unsigned i;
4991 tree t;
4992
4993 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4994
4995 BINFO_VIRTUALS (binfo) = NULL_TREE;
4996 BINFO_BASE_ACCESSES (binfo) = NULL;
4997 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4998 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4999
5000 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5001 free_lang_data_in_binfo (t);
5002 }
5003
5004
5005 /* Reset all language specific information still present in TYPE. */
5006
5007 static void
5008 free_lang_data_in_type (tree type)
5009 {
5010 gcc_assert (TYPE_P (type));
5011
5012 /* Give the FE a chance to remove its own data first. */
5013 lang_hooks.free_lang_data (type);
5014
5015 TREE_LANG_FLAG_0 (type) = 0;
5016 TREE_LANG_FLAG_1 (type) = 0;
5017 TREE_LANG_FLAG_2 (type) = 0;
5018 TREE_LANG_FLAG_3 (type) = 0;
5019 TREE_LANG_FLAG_4 (type) = 0;
5020 TREE_LANG_FLAG_5 (type) = 0;
5021 TREE_LANG_FLAG_6 (type) = 0;
5022
5023 if (TREE_CODE (type) == FUNCTION_TYPE)
5024 {
5025 /* Remove the const and volatile qualifiers from arguments. The
5026 C++ front end removes them, but the C front end does not,
5027 leading to false ODR violation errors when merging two
5028 instances of the same function signature compiled by
5029 different front ends. */
5030 tree p;
5031
5032 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5033 {
5034 tree arg_type = TREE_VALUE (p);
5035
5036 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5037 {
5038 int quals = TYPE_QUALS (arg_type)
5039 & ~TYPE_QUAL_CONST
5040 & ~TYPE_QUAL_VOLATILE;
5041 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5042 free_lang_data_in_type (TREE_VALUE (p));
5043 }
5044 /* C++ FE uses TREE_PURPOSE to store initial values. */
5045 TREE_PURPOSE (p) = NULL;
5046 }
5047 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5048 TYPE_MINVAL (type) = NULL;
5049 }
5050 if (TREE_CODE (type) == METHOD_TYPE)
5051 {
5052 tree p;
5053
5054 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5055 {
5056 /* C++ FE uses TREE_PURPOSE to store initial values. */
5057 TREE_PURPOSE (p) = NULL;
5058 }
5059 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5060 TYPE_MINVAL (type) = NULL;
5061 }
5062
5063 /* Remove members that are not actually FIELD_DECLs from the field
5064 list of an aggregate. These occur in C++. */
5065 if (RECORD_OR_UNION_TYPE_P (type))
5066 {
5067 tree prev, member;
5068
5069 /* Note that TYPE_FIELDS can be shared across distinct
5070 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5071 to be removed, we cannot set its TREE_CHAIN to NULL.
5072 Otherwise, we would not be able to find all the other fields
5073 in the other instances of this TREE_TYPE.
5074
5075 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5076 prev = NULL_TREE;
5077 member = TYPE_FIELDS (type);
5078 while (member)
5079 {
5080 if (TREE_CODE (member) == FIELD_DECL
5081 || TREE_CODE (member) == TYPE_DECL)
5082 {
5083 if (prev)
5084 TREE_CHAIN (prev) = member;
5085 else
5086 TYPE_FIELDS (type) = member;
5087 prev = member;
5088 }
5089
5090 member = TREE_CHAIN (member);
5091 }
5092
5093 if (prev)
5094 TREE_CHAIN (prev) = NULL_TREE;
5095 else
5096 TYPE_FIELDS (type) = NULL_TREE;
5097
5098 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5099 and danagle the pointer from time to time. */
5100 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5101 TYPE_VFIELD (type) = NULL_TREE;
5102
5103 TYPE_METHODS (type) = NULL_TREE;
5104 if (TYPE_BINFO (type))
5105 {
5106 free_lang_data_in_binfo (TYPE_BINFO (type));
5107 /* We need to preserve link to bases and virtual table for all
5108 polymorphic types to make devirtualization machinery working.
5109 Debug output cares only about bases, but output also
5110 virtual table pointers so merging of -fdevirtualize and
5111 -fno-devirtualize units is easier. */
5112 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5113 || !flag_devirtualize)
5114 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5115 && !BINFO_VTABLE (TYPE_BINFO (type)))
5116 || debug_info_level != DINFO_LEVEL_NONE))
5117 TYPE_BINFO (type) = NULL;
5118 }
5119 }
5120 else
5121 {
5122 /* For non-aggregate types, clear out the language slot (which
5123 overloads TYPE_BINFO). */
5124 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5125
5126 if (INTEGRAL_TYPE_P (type)
5127 || SCALAR_FLOAT_TYPE_P (type)
5128 || FIXED_POINT_TYPE_P (type))
5129 {
5130 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5131 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5132 }
5133 }
5134
5135 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5136 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5137
5138 if (TYPE_CONTEXT (type)
5139 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5140 {
5141 tree ctx = TYPE_CONTEXT (type);
5142 do
5143 {
5144 ctx = BLOCK_SUPERCONTEXT (ctx);
5145 }
5146 while (ctx && TREE_CODE (ctx) == BLOCK);
5147 TYPE_CONTEXT (type) = ctx;
5148 }
5149 }
5150
5151
5152 /* Return true if DECL may need an assembler name to be set. */
5153
5154 static inline bool
5155 need_assembler_name_p (tree decl)
5156 {
5157 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule
5158 merging. */
5159 if (flag_lto_odr_type_mering
5160 && TREE_CODE (decl) == TYPE_DECL
5161 && DECL_NAME (decl)
5162 && decl == TYPE_NAME (TREE_TYPE (decl))
5163 && !is_lang_specific (TREE_TYPE (decl))
5164 /* Save some work. Names of builtin types are always derived from
5165 properties of its main variant. A special case are integer types
5166 where mangling do make differences between char/signed char/unsigned
5167 char etc. Storing name for these makes e.g.
5168 -fno-signed-char/-fsigned-char mismatches to be handled well.
5169
5170 See cp/mangle.c:write_builtin_type for details. */
5171 && (TREE_CODE (TREE_TYPE (decl)) != VOID_TYPE
5172 && TREE_CODE (TREE_TYPE (decl)) != BOOLEAN_TYPE
5173 && TREE_CODE (TREE_TYPE (decl)) != REAL_TYPE
5174 && TREE_CODE (TREE_TYPE (decl)) != FIXED_POINT_TYPE)
5175 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5176 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)
5177 && !type_in_anonymous_namespace_p (TREE_TYPE (decl)))
5178 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5179 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5180 if (TREE_CODE (decl) != FUNCTION_DECL
5181 && TREE_CODE (decl) != VAR_DECL)
5182 return false;
5183
5184 /* If DECL already has its assembler name set, it does not need a
5185 new one. */
5186 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5187 || DECL_ASSEMBLER_NAME_SET_P (decl))
5188 return false;
5189
5190 /* Abstract decls do not need an assembler name. */
5191 if (DECL_ABSTRACT_P (decl))
5192 return false;
5193
5194 /* For VAR_DECLs, only static, public and external symbols need an
5195 assembler name. */
5196 if (TREE_CODE (decl) == VAR_DECL
5197 && !TREE_STATIC (decl)
5198 && !TREE_PUBLIC (decl)
5199 && !DECL_EXTERNAL (decl))
5200 return false;
5201
5202 if (TREE_CODE (decl) == FUNCTION_DECL)
5203 {
5204 /* Do not set assembler name on builtins. Allow RTL expansion to
5205 decide whether to expand inline or via a regular call. */
5206 if (DECL_BUILT_IN (decl)
5207 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5208 return false;
5209
5210 /* Functions represented in the callgraph need an assembler name. */
5211 if (cgraph_node::get (decl) != NULL)
5212 return true;
5213
5214 /* Unused and not public functions don't need an assembler name. */
5215 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5216 return false;
5217 }
5218
5219 return true;
5220 }
5221
5222
5223 /* Reset all language specific information still present in symbol
5224 DECL. */
5225
5226 static void
5227 free_lang_data_in_decl (tree decl)
5228 {
5229 gcc_assert (DECL_P (decl));
5230
5231 /* Give the FE a chance to remove its own data first. */
5232 lang_hooks.free_lang_data (decl);
5233
5234 TREE_LANG_FLAG_0 (decl) = 0;
5235 TREE_LANG_FLAG_1 (decl) = 0;
5236 TREE_LANG_FLAG_2 (decl) = 0;
5237 TREE_LANG_FLAG_3 (decl) = 0;
5238 TREE_LANG_FLAG_4 (decl) = 0;
5239 TREE_LANG_FLAG_5 (decl) = 0;
5240 TREE_LANG_FLAG_6 (decl) = 0;
5241
5242 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5243 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5244 if (TREE_CODE (decl) == FIELD_DECL)
5245 {
5246 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5247 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5248 DECL_QUALIFIER (decl) = NULL_TREE;
5249 }
5250
5251 if (TREE_CODE (decl) == FUNCTION_DECL)
5252 {
5253 struct cgraph_node *node;
5254 if (!(node = cgraph_node::get (decl))
5255 || (!node->definition && !node->clones))
5256 {
5257 if (node)
5258 node->release_body ();
5259 else
5260 {
5261 release_function_body (decl);
5262 DECL_ARGUMENTS (decl) = NULL;
5263 DECL_RESULT (decl) = NULL;
5264 DECL_INITIAL (decl) = error_mark_node;
5265 }
5266 }
5267 if (gimple_has_body_p (decl))
5268 {
5269 tree t;
5270
5271 /* If DECL has a gimple body, then the context for its
5272 arguments must be DECL. Otherwise, it doesn't really
5273 matter, as we will not be emitting any code for DECL. In
5274 general, there may be other instances of DECL created by
5275 the front end and since PARM_DECLs are generally shared,
5276 their DECL_CONTEXT changes as the replicas of DECL are
5277 created. The only time where DECL_CONTEXT is important
5278 is for the FUNCTION_DECLs that have a gimple body (since
5279 the PARM_DECL will be used in the function's body). */
5280 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5281 DECL_CONTEXT (t) = decl;
5282 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5283 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5284 = target_option_default_node;
5285 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5286 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5287 = optimization_default_node;
5288 }
5289
5290 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5291 At this point, it is not needed anymore. */
5292 DECL_SAVED_TREE (decl) = NULL_TREE;
5293
5294 /* Clear the abstract origin if it refers to a method. Otherwise
5295 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5296 origin will not be output correctly. */
5297 if (DECL_ABSTRACT_ORIGIN (decl)
5298 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5299 && RECORD_OR_UNION_TYPE_P
5300 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5301 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5302
5303 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5304 DECL_VINDEX referring to itself into a vtable slot number as it
5305 should. Happens with functions that are copied and then forgotten
5306 about. Just clear it, it won't matter anymore. */
5307 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5308 DECL_VINDEX (decl) = NULL_TREE;
5309 }
5310 else if (TREE_CODE (decl) == VAR_DECL)
5311 {
5312 if ((DECL_EXTERNAL (decl)
5313 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5314 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5315 DECL_INITIAL (decl) = NULL_TREE;
5316 }
5317 else if (TREE_CODE (decl) == TYPE_DECL
5318 || TREE_CODE (decl) == FIELD_DECL)
5319 DECL_INITIAL (decl) = NULL_TREE;
5320 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5321 && DECL_INITIAL (decl)
5322 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5323 {
5324 /* Strip builtins from the translation-unit BLOCK. We still have targets
5325 without builtin_decl_explicit support and also builtins are shared
5326 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5327 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5328 while (*nextp)
5329 {
5330 tree var = *nextp;
5331 if (TREE_CODE (var) == FUNCTION_DECL
5332 && DECL_BUILT_IN (var))
5333 *nextp = TREE_CHAIN (var);
5334 else
5335 nextp = &TREE_CHAIN (var);
5336 }
5337 }
5338 }
5339
5340
5341 /* Data used when collecting DECLs and TYPEs for language data removal. */
5342
5343 struct free_lang_data_d
5344 {
5345 /* Worklist to avoid excessive recursion. */
5346 vec<tree> worklist;
5347
5348 /* Set of traversed objects. Used to avoid duplicate visits. */
5349 hash_set<tree> *pset;
5350
5351 /* Array of symbols to process with free_lang_data_in_decl. */
5352 vec<tree> decls;
5353
5354 /* Array of types to process with free_lang_data_in_type. */
5355 vec<tree> types;
5356 };
5357
5358
5359 /* Save all language fields needed to generate proper debug information
5360 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5361
5362 static void
5363 save_debug_info_for_decl (tree t)
5364 {
5365 /*struct saved_debug_info_d *sdi;*/
5366
5367 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5368
5369 /* FIXME. Partial implementation for saving debug info removed. */
5370 }
5371
5372
5373 /* Save all language fields needed to generate proper debug information
5374 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5375
5376 static void
5377 save_debug_info_for_type (tree t)
5378 {
5379 /*struct saved_debug_info_d *sdi;*/
5380
5381 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5382
5383 /* FIXME. Partial implementation for saving debug info removed. */
5384 }
5385
5386
5387 /* Add type or decl T to one of the list of tree nodes that need their
5388 language data removed. The lists are held inside FLD. */
5389
5390 static void
5391 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5392 {
5393 if (DECL_P (t))
5394 {
5395 fld->decls.safe_push (t);
5396 if (debug_info_level > DINFO_LEVEL_TERSE)
5397 save_debug_info_for_decl (t);
5398 }
5399 else if (TYPE_P (t))
5400 {
5401 fld->types.safe_push (t);
5402 if (debug_info_level > DINFO_LEVEL_TERSE)
5403 save_debug_info_for_type (t);
5404 }
5405 else
5406 gcc_unreachable ();
5407 }
5408
5409 /* Push tree node T into FLD->WORKLIST. */
5410
5411 static inline void
5412 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5413 {
5414 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5415 fld->worklist.safe_push ((t));
5416 }
5417
5418
5419 /* Operand callback helper for free_lang_data_in_node. *TP is the
5420 subtree operand being considered. */
5421
5422 static tree
5423 find_decls_types_r (tree *tp, int *ws, void *data)
5424 {
5425 tree t = *tp;
5426 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5427
5428 if (TREE_CODE (t) == TREE_LIST)
5429 return NULL_TREE;
5430
5431 /* Language specific nodes will be removed, so there is no need
5432 to gather anything under them. */
5433 if (is_lang_specific (t))
5434 {
5435 *ws = 0;
5436 return NULL_TREE;
5437 }
5438
5439 if (DECL_P (t))
5440 {
5441 /* Note that walk_tree does not traverse every possible field in
5442 decls, so we have to do our own traversals here. */
5443 add_tree_to_fld_list (t, fld);
5444
5445 fld_worklist_push (DECL_NAME (t), fld);
5446 fld_worklist_push (DECL_CONTEXT (t), fld);
5447 fld_worklist_push (DECL_SIZE (t), fld);
5448 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5449
5450 /* We are going to remove everything under DECL_INITIAL for
5451 TYPE_DECLs. No point walking them. */
5452 if (TREE_CODE (t) != TYPE_DECL)
5453 fld_worklist_push (DECL_INITIAL (t), fld);
5454
5455 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5456 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5457
5458 if (TREE_CODE (t) == FUNCTION_DECL)
5459 {
5460 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5461 fld_worklist_push (DECL_RESULT (t), fld);
5462 }
5463 else if (TREE_CODE (t) == TYPE_DECL)
5464 {
5465 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5466 }
5467 else if (TREE_CODE (t) == FIELD_DECL)
5468 {
5469 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5470 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5471 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5472 fld_worklist_push (DECL_FCONTEXT (t), fld);
5473 }
5474
5475 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5476 && DECL_HAS_VALUE_EXPR_P (t))
5477 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5478
5479 if (TREE_CODE (t) != FIELD_DECL
5480 && TREE_CODE (t) != TYPE_DECL)
5481 fld_worklist_push (TREE_CHAIN (t), fld);
5482 *ws = 0;
5483 }
5484 else if (TYPE_P (t))
5485 {
5486 /* Note that walk_tree does not traverse every possible field in
5487 types, so we have to do our own traversals here. */
5488 add_tree_to_fld_list (t, fld);
5489
5490 if (!RECORD_OR_UNION_TYPE_P (t))
5491 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5492 fld_worklist_push (TYPE_SIZE (t), fld);
5493 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5494 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5495 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5496 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5497 fld_worklist_push (TYPE_NAME (t), fld);
5498 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5499 them and thus do not and want not to reach unused pointer types
5500 this way. */
5501 if (!POINTER_TYPE_P (t))
5502 fld_worklist_push (TYPE_MINVAL (t), fld);
5503 if (!RECORD_OR_UNION_TYPE_P (t))
5504 fld_worklist_push (TYPE_MAXVAL (t), fld);
5505 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5506 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5507 do not and want not to reach unused variants this way. */
5508 if (TYPE_CONTEXT (t))
5509 {
5510 tree ctx = TYPE_CONTEXT (t);
5511 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5512 So push that instead. */
5513 while (ctx && TREE_CODE (ctx) == BLOCK)
5514 ctx = BLOCK_SUPERCONTEXT (ctx);
5515 fld_worklist_push (ctx, fld);
5516 }
5517 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5518 and want not to reach unused types this way. */
5519
5520 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5521 {
5522 unsigned i;
5523 tree tem;
5524 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5525 fld_worklist_push (TREE_TYPE (tem), fld);
5526 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5527 if (tem
5528 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5529 && TREE_CODE (tem) == TREE_LIST)
5530 do
5531 {
5532 fld_worklist_push (TREE_VALUE (tem), fld);
5533 tem = TREE_CHAIN (tem);
5534 }
5535 while (tem);
5536 }
5537 if (RECORD_OR_UNION_TYPE_P (t))
5538 {
5539 tree tem;
5540 /* Push all TYPE_FIELDS - there can be interleaving interesting
5541 and non-interesting things. */
5542 tem = TYPE_FIELDS (t);
5543 while (tem)
5544 {
5545 if (TREE_CODE (tem) == FIELD_DECL
5546 || TREE_CODE (tem) == TYPE_DECL)
5547 fld_worklist_push (tem, fld);
5548 tem = TREE_CHAIN (tem);
5549 }
5550 }
5551
5552 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5553 *ws = 0;
5554 }
5555 else if (TREE_CODE (t) == BLOCK)
5556 {
5557 tree tem;
5558 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5559 fld_worklist_push (tem, fld);
5560 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5561 fld_worklist_push (tem, fld);
5562 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5563 }
5564
5565 if (TREE_CODE (t) != IDENTIFIER_NODE
5566 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5567 fld_worklist_push (TREE_TYPE (t), fld);
5568
5569 return NULL_TREE;
5570 }
5571
5572
5573 /* Find decls and types in T. */
5574
5575 static void
5576 find_decls_types (tree t, struct free_lang_data_d *fld)
5577 {
5578 while (1)
5579 {
5580 if (!fld->pset->contains (t))
5581 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5582 if (fld->worklist.is_empty ())
5583 break;
5584 t = fld->worklist.pop ();
5585 }
5586 }
5587
5588 /* Translate all the types in LIST with the corresponding runtime
5589 types. */
5590
5591 static tree
5592 get_eh_types_for_runtime (tree list)
5593 {
5594 tree head, prev;
5595
5596 if (list == NULL_TREE)
5597 return NULL_TREE;
5598
5599 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5600 prev = head;
5601 list = TREE_CHAIN (list);
5602 while (list)
5603 {
5604 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5605 TREE_CHAIN (prev) = n;
5606 prev = TREE_CHAIN (prev);
5607 list = TREE_CHAIN (list);
5608 }
5609
5610 return head;
5611 }
5612
5613
5614 /* Find decls and types referenced in EH region R and store them in
5615 FLD->DECLS and FLD->TYPES. */
5616
5617 static void
5618 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5619 {
5620 switch (r->type)
5621 {
5622 case ERT_CLEANUP:
5623 break;
5624
5625 case ERT_TRY:
5626 {
5627 eh_catch c;
5628
5629 /* The types referenced in each catch must first be changed to the
5630 EH types used at runtime. This removes references to FE types
5631 in the region. */
5632 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5633 {
5634 c->type_list = get_eh_types_for_runtime (c->type_list);
5635 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5636 }
5637 }
5638 break;
5639
5640 case ERT_ALLOWED_EXCEPTIONS:
5641 r->u.allowed.type_list
5642 = get_eh_types_for_runtime (r->u.allowed.type_list);
5643 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5644 break;
5645
5646 case ERT_MUST_NOT_THROW:
5647 walk_tree (&r->u.must_not_throw.failure_decl,
5648 find_decls_types_r, fld, fld->pset);
5649 break;
5650 }
5651 }
5652
5653
5654 /* Find decls and types referenced in cgraph node N and store them in
5655 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5656 look for *every* kind of DECL and TYPE node reachable from N,
5657 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5658 NAMESPACE_DECLs, etc). */
5659
5660 static void
5661 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5662 {
5663 basic_block bb;
5664 struct function *fn;
5665 unsigned ix;
5666 tree t;
5667
5668 find_decls_types (n->decl, fld);
5669
5670 if (!gimple_has_body_p (n->decl))
5671 return;
5672
5673 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5674
5675 fn = DECL_STRUCT_FUNCTION (n->decl);
5676
5677 /* Traverse locals. */
5678 FOR_EACH_LOCAL_DECL (fn, ix, t)
5679 find_decls_types (t, fld);
5680
5681 /* Traverse EH regions in FN. */
5682 {
5683 eh_region r;
5684 FOR_ALL_EH_REGION_FN (r, fn)
5685 find_decls_types_in_eh_region (r, fld);
5686 }
5687
5688 /* Traverse every statement in FN. */
5689 FOR_EACH_BB_FN (bb, fn)
5690 {
5691 gphi_iterator psi;
5692 gimple_stmt_iterator si;
5693 unsigned i;
5694
5695 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5696 {
5697 gphi *phi = psi.phi ();
5698
5699 for (i = 0; i < gimple_phi_num_args (phi); i++)
5700 {
5701 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5702 find_decls_types (*arg_p, fld);
5703 }
5704 }
5705
5706 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5707 {
5708 gimple stmt = gsi_stmt (si);
5709
5710 if (is_gimple_call (stmt))
5711 find_decls_types (gimple_call_fntype (stmt), fld);
5712
5713 for (i = 0; i < gimple_num_ops (stmt); i++)
5714 {
5715 tree arg = gimple_op (stmt, i);
5716 find_decls_types (arg, fld);
5717 }
5718 }
5719 }
5720 }
5721
5722
5723 /* Find decls and types referenced in varpool node N and store them in
5724 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5725 look for *every* kind of DECL and TYPE node reachable from N,
5726 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5727 NAMESPACE_DECLs, etc). */
5728
5729 static void
5730 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5731 {
5732 find_decls_types (v->decl, fld);
5733 }
5734
5735 /* If T needs an assembler name, have one created for it. */
5736
5737 void
5738 assign_assembler_name_if_neeeded (tree t)
5739 {
5740 if (need_assembler_name_p (t))
5741 {
5742 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5743 diagnostics that use input_location to show locus
5744 information. The problem here is that, at this point,
5745 input_location is generally anchored to the end of the file
5746 (since the parser is long gone), so we don't have a good
5747 position to pin it to.
5748
5749 To alleviate this problem, this uses the location of T's
5750 declaration. Examples of this are
5751 testsuite/g++.dg/template/cond2.C and
5752 testsuite/g++.dg/template/pr35240.C. */
5753 location_t saved_location = input_location;
5754 input_location = DECL_SOURCE_LOCATION (t);
5755
5756 decl_assembler_name (t);
5757
5758 input_location = saved_location;
5759 }
5760 }
5761
5762
5763 /* Free language specific information for every operand and expression
5764 in every node of the call graph. This process operates in three stages:
5765
5766 1- Every callgraph node and varpool node is traversed looking for
5767 decls and types embedded in them. This is a more exhaustive
5768 search than that done by find_referenced_vars, because it will
5769 also collect individual fields, decls embedded in types, etc.
5770
5771 2- All the decls found are sent to free_lang_data_in_decl.
5772
5773 3- All the types found are sent to free_lang_data_in_type.
5774
5775 The ordering between decls and types is important because
5776 free_lang_data_in_decl sets assembler names, which includes
5777 mangling. So types cannot be freed up until assembler names have
5778 been set up. */
5779
5780 static void
5781 free_lang_data_in_cgraph (void)
5782 {
5783 struct cgraph_node *n;
5784 varpool_node *v;
5785 struct free_lang_data_d fld;
5786 tree t;
5787 unsigned i;
5788 alias_pair *p;
5789
5790 /* Initialize sets and arrays to store referenced decls and types. */
5791 fld.pset = new hash_set<tree>;
5792 fld.worklist.create (0);
5793 fld.decls.create (100);
5794 fld.types.create (100);
5795
5796 /* Find decls and types in the body of every function in the callgraph. */
5797 FOR_EACH_FUNCTION (n)
5798 find_decls_types_in_node (n, &fld);
5799
5800 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5801 find_decls_types (p->decl, &fld);
5802
5803 /* Find decls and types in every varpool symbol. */
5804 FOR_EACH_VARIABLE (v)
5805 find_decls_types_in_var (v, &fld);
5806
5807 /* Set the assembler name on every decl found. We need to do this
5808 now because free_lang_data_in_decl will invalidate data needed
5809 for mangling. This breaks mangling on interdependent decls. */
5810 FOR_EACH_VEC_ELT (fld.decls, i, t)
5811 assign_assembler_name_if_neeeded (t);
5812
5813 /* Traverse every decl found freeing its language data. */
5814 FOR_EACH_VEC_ELT (fld.decls, i, t)
5815 free_lang_data_in_decl (t);
5816
5817 /* Traverse every type found freeing its language data. */
5818 FOR_EACH_VEC_ELT (fld.types, i, t)
5819 free_lang_data_in_type (t);
5820 #ifdef ENABLE_CHECKING
5821 FOR_EACH_VEC_ELT (fld.types, i, t)
5822 verify_type (t);
5823 #endif
5824
5825 delete fld.pset;
5826 fld.worklist.release ();
5827 fld.decls.release ();
5828 fld.types.release ();
5829 }
5830
5831
5832 /* Free resources that are used by FE but are not needed once they are done. */
5833
5834 static unsigned
5835 free_lang_data (void)
5836 {
5837 unsigned i;
5838
5839 /* If we are the LTO frontend we have freed lang-specific data already. */
5840 if (in_lto_p
5841 || (!flag_generate_lto && !flag_generate_offload))
5842 return 0;
5843
5844 /* Allocate and assign alias sets to the standard integer types
5845 while the slots are still in the way the frontends generated them. */
5846 for (i = 0; i < itk_none; ++i)
5847 if (integer_types[i])
5848 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5849
5850 /* Traverse the IL resetting language specific information for
5851 operands, expressions, etc. */
5852 free_lang_data_in_cgraph ();
5853
5854 /* Create gimple variants for common types. */
5855 ptrdiff_type_node = integer_type_node;
5856 fileptr_type_node = ptr_type_node;
5857
5858 /* Reset some langhooks. Do not reset types_compatible_p, it may
5859 still be used indirectly via the get_alias_set langhook. */
5860 lang_hooks.dwarf_name = lhd_dwarf_name;
5861 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5862 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5863
5864 /* We do not want the default decl_assembler_name implementation,
5865 rather if we have fixed everything we want a wrapper around it
5866 asserting that all non-local symbols already got their assembler
5867 name and only produce assembler names for local symbols. Or rather
5868 make sure we never call decl_assembler_name on local symbols and
5869 devise a separate, middle-end private scheme for it. */
5870
5871 /* Reset diagnostic machinery. */
5872 tree_diagnostics_defaults (global_dc);
5873
5874 return 0;
5875 }
5876
5877
5878 namespace {
5879
5880 const pass_data pass_data_ipa_free_lang_data =
5881 {
5882 SIMPLE_IPA_PASS, /* type */
5883 "*free_lang_data", /* name */
5884 OPTGROUP_NONE, /* optinfo_flags */
5885 TV_IPA_FREE_LANG_DATA, /* tv_id */
5886 0, /* properties_required */
5887 0, /* properties_provided */
5888 0, /* properties_destroyed */
5889 0, /* todo_flags_start */
5890 0, /* todo_flags_finish */
5891 };
5892
5893 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5894 {
5895 public:
5896 pass_ipa_free_lang_data (gcc::context *ctxt)
5897 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5898 {}
5899
5900 /* opt_pass methods: */
5901 virtual unsigned int execute (function *) { return free_lang_data (); }
5902
5903 }; // class pass_ipa_free_lang_data
5904
5905 } // anon namespace
5906
5907 simple_ipa_opt_pass *
5908 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5909 {
5910 return new pass_ipa_free_lang_data (ctxt);
5911 }
5912
5913 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5914 ATTR_NAME. Also used internally by remove_attribute(). */
5915 bool
5916 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5917 {
5918 size_t ident_len = IDENTIFIER_LENGTH (ident);
5919
5920 if (ident_len == attr_len)
5921 {
5922 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5923 return true;
5924 }
5925 else if (ident_len == attr_len + 4)
5926 {
5927 /* There is the possibility that ATTR is 'text' and IDENT is
5928 '__text__'. */
5929 const char *p = IDENTIFIER_POINTER (ident);
5930 if (p[0] == '_' && p[1] == '_'
5931 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5932 && strncmp (attr_name, p + 2, attr_len) == 0)
5933 return true;
5934 }
5935
5936 return false;
5937 }
5938
5939 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5940 of ATTR_NAME, and LIST is not NULL_TREE. */
5941 tree
5942 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5943 {
5944 while (list)
5945 {
5946 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5947
5948 if (ident_len == attr_len)
5949 {
5950 if (!strcmp (attr_name,
5951 IDENTIFIER_POINTER (get_attribute_name (list))))
5952 break;
5953 }
5954 /* TODO: If we made sure that attributes were stored in the
5955 canonical form without '__...__' (ie, as in 'text' as opposed
5956 to '__text__') then we could avoid the following case. */
5957 else if (ident_len == attr_len + 4)
5958 {
5959 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5960 if (p[0] == '_' && p[1] == '_'
5961 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5962 && strncmp (attr_name, p + 2, attr_len) == 0)
5963 break;
5964 }
5965 list = TREE_CHAIN (list);
5966 }
5967
5968 return list;
5969 }
5970
5971 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5972 return a pointer to the attribute's list first element if the attribute
5973 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5974 '__text__'). */
5975
5976 tree
5977 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5978 tree list)
5979 {
5980 while (list)
5981 {
5982 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5983
5984 if (attr_len > ident_len)
5985 {
5986 list = TREE_CHAIN (list);
5987 continue;
5988 }
5989
5990 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5991
5992 if (strncmp (attr_name, p, attr_len) == 0)
5993 break;
5994
5995 /* TODO: If we made sure that attributes were stored in the
5996 canonical form without '__...__' (ie, as in 'text' as opposed
5997 to '__text__') then we could avoid the following case. */
5998 if (p[0] == '_' && p[1] == '_' &&
5999 strncmp (attr_name, p + 2, attr_len) == 0)
6000 break;
6001
6002 list = TREE_CHAIN (list);
6003 }
6004
6005 return list;
6006 }
6007
6008
6009 /* A variant of lookup_attribute() that can be used with an identifier
6010 as the first argument, and where the identifier can be either
6011 'text' or '__text__'.
6012
6013 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6014 return a pointer to the attribute's list element if the attribute
6015 is part of the list, or NULL_TREE if not found. If the attribute
6016 appears more than once, this only returns the first occurrence; the
6017 TREE_CHAIN of the return value should be passed back in if further
6018 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6019 can be in the form 'text' or '__text__'. */
6020 static tree
6021 lookup_ident_attribute (tree attr_identifier, tree list)
6022 {
6023 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6024
6025 while (list)
6026 {
6027 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6028 == IDENTIFIER_NODE);
6029
6030 /* Identifiers can be compared directly for equality. */
6031 if (attr_identifier == get_attribute_name (list))
6032 break;
6033
6034 /* If they are not equal, they may still be one in the form
6035 'text' while the other one is in the form '__text__'. TODO:
6036 If we were storing attributes in normalized 'text' form, then
6037 this could all go away and we could take full advantage of
6038 the fact that we're comparing identifiers. :-) */
6039 {
6040 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
6041 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6042
6043 if (ident_len == attr_len + 4)
6044 {
6045 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6046 const char *q = IDENTIFIER_POINTER (attr_identifier);
6047 if (p[0] == '_' && p[1] == '_'
6048 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6049 && strncmp (q, p + 2, attr_len) == 0)
6050 break;
6051 }
6052 else if (ident_len + 4 == attr_len)
6053 {
6054 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6055 const char *q = IDENTIFIER_POINTER (attr_identifier);
6056 if (q[0] == '_' && q[1] == '_'
6057 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
6058 && strncmp (q + 2, p, ident_len) == 0)
6059 break;
6060 }
6061 }
6062 list = TREE_CHAIN (list);
6063 }
6064
6065 return list;
6066 }
6067
6068 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6069 modified list. */
6070
6071 tree
6072 remove_attribute (const char *attr_name, tree list)
6073 {
6074 tree *p;
6075 size_t attr_len = strlen (attr_name);
6076
6077 gcc_checking_assert (attr_name[0] != '_');
6078
6079 for (p = &list; *p; )
6080 {
6081 tree l = *p;
6082 /* TODO: If we were storing attributes in normalized form, here
6083 we could use a simple strcmp(). */
6084 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6085 *p = TREE_CHAIN (l);
6086 else
6087 p = &TREE_CHAIN (l);
6088 }
6089
6090 return list;
6091 }
6092
6093 /* Return an attribute list that is the union of a1 and a2. */
6094
6095 tree
6096 merge_attributes (tree a1, tree a2)
6097 {
6098 tree attributes;
6099
6100 /* Either one unset? Take the set one. */
6101
6102 if ((attributes = a1) == 0)
6103 attributes = a2;
6104
6105 /* One that completely contains the other? Take it. */
6106
6107 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6108 {
6109 if (attribute_list_contained (a2, a1))
6110 attributes = a2;
6111 else
6112 {
6113 /* Pick the longest list, and hang on the other list. */
6114
6115 if (list_length (a1) < list_length (a2))
6116 attributes = a2, a2 = a1;
6117
6118 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6119 {
6120 tree a;
6121 for (a = lookup_ident_attribute (get_attribute_name (a2),
6122 attributes);
6123 a != NULL_TREE && !attribute_value_equal (a, a2);
6124 a = lookup_ident_attribute (get_attribute_name (a2),
6125 TREE_CHAIN (a)))
6126 ;
6127 if (a == NULL_TREE)
6128 {
6129 a1 = copy_node (a2);
6130 TREE_CHAIN (a1) = attributes;
6131 attributes = a1;
6132 }
6133 }
6134 }
6135 }
6136 return attributes;
6137 }
6138
6139 /* Given types T1 and T2, merge their attributes and return
6140 the result. */
6141
6142 tree
6143 merge_type_attributes (tree t1, tree t2)
6144 {
6145 return merge_attributes (TYPE_ATTRIBUTES (t1),
6146 TYPE_ATTRIBUTES (t2));
6147 }
6148
6149 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6150 the result. */
6151
6152 tree
6153 merge_decl_attributes (tree olddecl, tree newdecl)
6154 {
6155 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6156 DECL_ATTRIBUTES (newdecl));
6157 }
6158
6159 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6160
6161 /* Specialization of merge_decl_attributes for various Windows targets.
6162
6163 This handles the following situation:
6164
6165 __declspec (dllimport) int foo;
6166 int foo;
6167
6168 The second instance of `foo' nullifies the dllimport. */
6169
6170 tree
6171 merge_dllimport_decl_attributes (tree old, tree new_tree)
6172 {
6173 tree a;
6174 int delete_dllimport_p = 1;
6175
6176 /* What we need to do here is remove from `old' dllimport if it doesn't
6177 appear in `new'. dllimport behaves like extern: if a declaration is
6178 marked dllimport and a definition appears later, then the object
6179 is not dllimport'd. We also remove a `new' dllimport if the old list
6180 contains dllexport: dllexport always overrides dllimport, regardless
6181 of the order of declaration. */
6182 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6183 delete_dllimport_p = 0;
6184 else if (DECL_DLLIMPORT_P (new_tree)
6185 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6186 {
6187 DECL_DLLIMPORT_P (new_tree) = 0;
6188 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6189 "dllimport ignored", new_tree);
6190 }
6191 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6192 {
6193 /* Warn about overriding a symbol that has already been used, e.g.:
6194 extern int __attribute__ ((dllimport)) foo;
6195 int* bar () {return &foo;}
6196 int foo;
6197 */
6198 if (TREE_USED (old))
6199 {
6200 warning (0, "%q+D redeclared without dllimport attribute "
6201 "after being referenced with dll linkage", new_tree);
6202 /* If we have used a variable's address with dllimport linkage,
6203 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6204 decl may already have had TREE_CONSTANT computed.
6205 We still remove the attribute so that assembler code refers
6206 to '&foo rather than '_imp__foo'. */
6207 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6208 DECL_DLLIMPORT_P (new_tree) = 1;
6209 }
6210
6211 /* Let an inline definition silently override the external reference,
6212 but otherwise warn about attribute inconsistency. */
6213 else if (TREE_CODE (new_tree) == VAR_DECL
6214 || !DECL_DECLARED_INLINE_P (new_tree))
6215 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6216 "previous dllimport ignored", new_tree);
6217 }
6218 else
6219 delete_dllimport_p = 0;
6220
6221 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6222
6223 if (delete_dllimport_p)
6224 a = remove_attribute ("dllimport", a);
6225
6226 return a;
6227 }
6228
6229 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6230 struct attribute_spec.handler. */
6231
6232 tree
6233 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6234 bool *no_add_attrs)
6235 {
6236 tree node = *pnode;
6237 bool is_dllimport;
6238
6239 /* These attributes may apply to structure and union types being created,
6240 but otherwise should pass to the declaration involved. */
6241 if (!DECL_P (node))
6242 {
6243 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6244 | (int) ATTR_FLAG_ARRAY_NEXT))
6245 {
6246 *no_add_attrs = true;
6247 return tree_cons (name, args, NULL_TREE);
6248 }
6249 if (TREE_CODE (node) == RECORD_TYPE
6250 || TREE_CODE (node) == UNION_TYPE)
6251 {
6252 node = TYPE_NAME (node);
6253 if (!node)
6254 return NULL_TREE;
6255 }
6256 else
6257 {
6258 warning (OPT_Wattributes, "%qE attribute ignored",
6259 name);
6260 *no_add_attrs = true;
6261 return NULL_TREE;
6262 }
6263 }
6264
6265 if (TREE_CODE (node) != FUNCTION_DECL
6266 && TREE_CODE (node) != VAR_DECL
6267 && TREE_CODE (node) != TYPE_DECL)
6268 {
6269 *no_add_attrs = true;
6270 warning (OPT_Wattributes, "%qE attribute ignored",
6271 name);
6272 return NULL_TREE;
6273 }
6274
6275 if (TREE_CODE (node) == TYPE_DECL
6276 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6277 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6278 {
6279 *no_add_attrs = true;
6280 warning (OPT_Wattributes, "%qE attribute ignored",
6281 name);
6282 return NULL_TREE;
6283 }
6284
6285 is_dllimport = is_attribute_p ("dllimport", name);
6286
6287 /* Report error on dllimport ambiguities seen now before they cause
6288 any damage. */
6289 if (is_dllimport)
6290 {
6291 /* Honor any target-specific overrides. */
6292 if (!targetm.valid_dllimport_attribute_p (node))
6293 *no_add_attrs = true;
6294
6295 else if (TREE_CODE (node) == FUNCTION_DECL
6296 && DECL_DECLARED_INLINE_P (node))
6297 {
6298 warning (OPT_Wattributes, "inline function %q+D declared as "
6299 " dllimport: attribute ignored", node);
6300 *no_add_attrs = true;
6301 }
6302 /* Like MS, treat definition of dllimported variables and
6303 non-inlined functions on declaration as syntax errors. */
6304 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6305 {
6306 error ("function %q+D definition is marked dllimport", node);
6307 *no_add_attrs = true;
6308 }
6309
6310 else if (TREE_CODE (node) == VAR_DECL)
6311 {
6312 if (DECL_INITIAL (node))
6313 {
6314 error ("variable %q+D definition is marked dllimport",
6315 node);
6316 *no_add_attrs = true;
6317 }
6318
6319 /* `extern' needn't be specified with dllimport.
6320 Specify `extern' now and hope for the best. Sigh. */
6321 DECL_EXTERNAL (node) = 1;
6322 /* Also, implicitly give dllimport'd variables declared within
6323 a function global scope, unless declared static. */
6324 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6325 TREE_PUBLIC (node) = 1;
6326 }
6327
6328 if (*no_add_attrs == false)
6329 DECL_DLLIMPORT_P (node) = 1;
6330 }
6331 else if (TREE_CODE (node) == FUNCTION_DECL
6332 && DECL_DECLARED_INLINE_P (node)
6333 && flag_keep_inline_dllexport)
6334 /* An exported function, even if inline, must be emitted. */
6335 DECL_EXTERNAL (node) = 0;
6336
6337 /* Report error if symbol is not accessible at global scope. */
6338 if (!TREE_PUBLIC (node)
6339 && (TREE_CODE (node) == VAR_DECL
6340 || TREE_CODE (node) == FUNCTION_DECL))
6341 {
6342 error ("external linkage required for symbol %q+D because of "
6343 "%qE attribute", node, name);
6344 *no_add_attrs = true;
6345 }
6346
6347 /* A dllexport'd entity must have default visibility so that other
6348 program units (shared libraries or the main executable) can see
6349 it. A dllimport'd entity must have default visibility so that
6350 the linker knows that undefined references within this program
6351 unit can be resolved by the dynamic linker. */
6352 if (!*no_add_attrs)
6353 {
6354 if (DECL_VISIBILITY_SPECIFIED (node)
6355 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6356 error ("%qE implies default visibility, but %qD has already "
6357 "been declared with a different visibility",
6358 name, node);
6359 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6360 DECL_VISIBILITY_SPECIFIED (node) = 1;
6361 }
6362
6363 return NULL_TREE;
6364 }
6365
6366 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6367 \f
6368 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6369 of the various TYPE_QUAL values. */
6370
6371 static void
6372 set_type_quals (tree type, int type_quals)
6373 {
6374 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6375 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6376 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6377 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6378 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6379 }
6380
6381 /* Returns true iff unqualified CAND and BASE are equivalent. */
6382
6383 bool
6384 check_base_type (const_tree cand, const_tree base)
6385 {
6386 return (TYPE_NAME (cand) == TYPE_NAME (base)
6387 /* Apparently this is needed for Objective-C. */
6388 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6389 /* Check alignment. */
6390 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6391 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6392 TYPE_ATTRIBUTES (base)));
6393 }
6394
6395 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6396
6397 bool
6398 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6399 {
6400 return (TYPE_QUALS (cand) == type_quals
6401 && check_base_type (cand, base));
6402 }
6403
6404 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6405
6406 static bool
6407 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6408 {
6409 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6410 && TYPE_NAME (cand) == TYPE_NAME (base)
6411 /* Apparently this is needed for Objective-C. */
6412 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6413 /* Check alignment. */
6414 && TYPE_ALIGN (cand) == align
6415 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6416 TYPE_ATTRIBUTES (base)));
6417 }
6418
6419 /* This function checks to see if TYPE matches the size one of the built-in
6420 atomic types, and returns that core atomic type. */
6421
6422 static tree
6423 find_atomic_core_type (tree type)
6424 {
6425 tree base_atomic_type;
6426
6427 /* Only handle complete types. */
6428 if (TYPE_SIZE (type) == NULL_TREE)
6429 return NULL_TREE;
6430
6431 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6432 switch (type_size)
6433 {
6434 case 8:
6435 base_atomic_type = atomicQI_type_node;
6436 break;
6437
6438 case 16:
6439 base_atomic_type = atomicHI_type_node;
6440 break;
6441
6442 case 32:
6443 base_atomic_type = atomicSI_type_node;
6444 break;
6445
6446 case 64:
6447 base_atomic_type = atomicDI_type_node;
6448 break;
6449
6450 case 128:
6451 base_atomic_type = atomicTI_type_node;
6452 break;
6453
6454 default:
6455 base_atomic_type = NULL_TREE;
6456 }
6457
6458 return base_atomic_type;
6459 }
6460
6461 /* Return a version of the TYPE, qualified as indicated by the
6462 TYPE_QUALS, if one exists. If no qualified version exists yet,
6463 return NULL_TREE. */
6464
6465 tree
6466 get_qualified_type (tree type, int type_quals)
6467 {
6468 tree t;
6469
6470 if (TYPE_QUALS (type) == type_quals)
6471 return type;
6472
6473 /* Search the chain of variants to see if there is already one there just
6474 like the one we need to have. If so, use that existing one. We must
6475 preserve the TYPE_NAME, since there is code that depends on this. */
6476 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6477 if (check_qualified_type (t, type, type_quals))
6478 return t;
6479
6480 return NULL_TREE;
6481 }
6482
6483 /* Like get_qualified_type, but creates the type if it does not
6484 exist. This function never returns NULL_TREE. */
6485
6486 tree
6487 build_qualified_type (tree type, int type_quals)
6488 {
6489 tree t;
6490
6491 /* See if we already have the appropriate qualified variant. */
6492 t = get_qualified_type (type, type_quals);
6493
6494 /* If not, build it. */
6495 if (!t)
6496 {
6497 t = build_variant_type_copy (type);
6498 set_type_quals (t, type_quals);
6499
6500 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6501 {
6502 /* See if this object can map to a basic atomic type. */
6503 tree atomic_type = find_atomic_core_type (type);
6504 if (atomic_type)
6505 {
6506 /* Ensure the alignment of this type is compatible with
6507 the required alignment of the atomic type. */
6508 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6509 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6510 }
6511 }
6512
6513 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6514 /* Propagate structural equality. */
6515 SET_TYPE_STRUCTURAL_EQUALITY (t);
6516 else if (TYPE_CANONICAL (type) != type)
6517 /* Build the underlying canonical type, since it is different
6518 from TYPE. */
6519 {
6520 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6521 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6522 }
6523 else
6524 /* T is its own canonical type. */
6525 TYPE_CANONICAL (t) = t;
6526
6527 }
6528
6529 return t;
6530 }
6531
6532 /* Create a variant of type T with alignment ALIGN. */
6533
6534 tree
6535 build_aligned_type (tree type, unsigned int align)
6536 {
6537 tree t;
6538
6539 if (TYPE_PACKED (type)
6540 || TYPE_ALIGN (type) == align)
6541 return type;
6542
6543 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6544 if (check_aligned_type (t, type, align))
6545 return t;
6546
6547 t = build_variant_type_copy (type);
6548 TYPE_ALIGN (t) = align;
6549
6550 return t;
6551 }
6552
6553 /* Create a new distinct copy of TYPE. The new type is made its own
6554 MAIN_VARIANT. If TYPE requires structural equality checks, the
6555 resulting type requires structural equality checks; otherwise, its
6556 TYPE_CANONICAL points to itself. */
6557
6558 tree
6559 build_distinct_type_copy (tree type)
6560 {
6561 tree t = copy_node (type);
6562
6563 TYPE_POINTER_TO (t) = 0;
6564 TYPE_REFERENCE_TO (t) = 0;
6565
6566 /* Set the canonical type either to a new equivalence class, or
6567 propagate the need for structural equality checks. */
6568 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6569 SET_TYPE_STRUCTURAL_EQUALITY (t);
6570 else
6571 TYPE_CANONICAL (t) = t;
6572
6573 /* Make it its own variant. */
6574 TYPE_MAIN_VARIANT (t) = t;
6575 TYPE_NEXT_VARIANT (t) = 0;
6576
6577 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6578 whose TREE_TYPE is not t. This can also happen in the Ada
6579 frontend when using subtypes. */
6580
6581 return t;
6582 }
6583
6584 /* Create a new variant of TYPE, equivalent but distinct. This is so
6585 the caller can modify it. TYPE_CANONICAL for the return type will
6586 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6587 are considered equal by the language itself (or that both types
6588 require structural equality checks). */
6589
6590 tree
6591 build_variant_type_copy (tree type)
6592 {
6593 tree t, m = TYPE_MAIN_VARIANT (type);
6594
6595 t = build_distinct_type_copy (type);
6596
6597 /* Since we're building a variant, assume that it is a non-semantic
6598 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6599 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6600
6601 /* Add the new type to the chain of variants of TYPE. */
6602 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6603 TYPE_NEXT_VARIANT (m) = t;
6604 TYPE_MAIN_VARIANT (t) = m;
6605
6606 return t;
6607 }
6608 \f
6609 /* Return true if the from tree in both tree maps are equal. */
6610
6611 int
6612 tree_map_base_eq (const void *va, const void *vb)
6613 {
6614 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6615 *const b = (const struct tree_map_base *) vb;
6616 return (a->from == b->from);
6617 }
6618
6619 /* Hash a from tree in a tree_base_map. */
6620
6621 unsigned int
6622 tree_map_base_hash (const void *item)
6623 {
6624 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6625 }
6626
6627 /* Return true if this tree map structure is marked for garbage collection
6628 purposes. We simply return true if the from tree is marked, so that this
6629 structure goes away when the from tree goes away. */
6630
6631 int
6632 tree_map_base_marked_p (const void *p)
6633 {
6634 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6635 }
6636
6637 /* Hash a from tree in a tree_map. */
6638
6639 unsigned int
6640 tree_map_hash (const void *item)
6641 {
6642 return (((const struct tree_map *) item)->hash);
6643 }
6644
6645 /* Hash a from tree in a tree_decl_map. */
6646
6647 unsigned int
6648 tree_decl_map_hash (const void *item)
6649 {
6650 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6651 }
6652
6653 /* Return the initialization priority for DECL. */
6654
6655 priority_type
6656 decl_init_priority_lookup (tree decl)
6657 {
6658 symtab_node *snode = symtab_node::get (decl);
6659
6660 if (!snode)
6661 return DEFAULT_INIT_PRIORITY;
6662 return
6663 snode->get_init_priority ();
6664 }
6665
6666 /* Return the finalization priority for DECL. */
6667
6668 priority_type
6669 decl_fini_priority_lookup (tree decl)
6670 {
6671 cgraph_node *node = cgraph_node::get (decl);
6672
6673 if (!node)
6674 return DEFAULT_INIT_PRIORITY;
6675 return
6676 node->get_fini_priority ();
6677 }
6678
6679 /* Set the initialization priority for DECL to PRIORITY. */
6680
6681 void
6682 decl_init_priority_insert (tree decl, priority_type priority)
6683 {
6684 struct symtab_node *snode;
6685
6686 if (priority == DEFAULT_INIT_PRIORITY)
6687 {
6688 snode = symtab_node::get (decl);
6689 if (!snode)
6690 return;
6691 }
6692 else if (TREE_CODE (decl) == VAR_DECL)
6693 snode = varpool_node::get_create (decl);
6694 else
6695 snode = cgraph_node::get_create (decl);
6696 snode->set_init_priority (priority);
6697 }
6698
6699 /* Set the finalization priority for DECL to PRIORITY. */
6700
6701 void
6702 decl_fini_priority_insert (tree decl, priority_type priority)
6703 {
6704 struct cgraph_node *node;
6705
6706 if (priority == DEFAULT_INIT_PRIORITY)
6707 {
6708 node = cgraph_node::get (decl);
6709 if (!node)
6710 return;
6711 }
6712 else
6713 node = cgraph_node::get_create (decl);
6714 node->set_fini_priority (priority);
6715 }
6716
6717 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6718
6719 static void
6720 print_debug_expr_statistics (void)
6721 {
6722 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6723 (long) debug_expr_for_decl->size (),
6724 (long) debug_expr_for_decl->elements (),
6725 debug_expr_for_decl->collisions ());
6726 }
6727
6728 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6729
6730 static void
6731 print_value_expr_statistics (void)
6732 {
6733 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6734 (long) value_expr_for_decl->size (),
6735 (long) value_expr_for_decl->elements (),
6736 value_expr_for_decl->collisions ());
6737 }
6738
6739 /* Lookup a debug expression for FROM, and return it if we find one. */
6740
6741 tree
6742 decl_debug_expr_lookup (tree from)
6743 {
6744 struct tree_decl_map *h, in;
6745 in.base.from = from;
6746
6747 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6748 if (h)
6749 return h->to;
6750 return NULL_TREE;
6751 }
6752
6753 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6754
6755 void
6756 decl_debug_expr_insert (tree from, tree to)
6757 {
6758 struct tree_decl_map *h;
6759
6760 h = ggc_alloc<tree_decl_map> ();
6761 h->base.from = from;
6762 h->to = to;
6763 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6764 }
6765
6766 /* Lookup a value expression for FROM, and return it if we find one. */
6767
6768 tree
6769 decl_value_expr_lookup (tree from)
6770 {
6771 struct tree_decl_map *h, in;
6772 in.base.from = from;
6773
6774 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6775 if (h)
6776 return h->to;
6777 return NULL_TREE;
6778 }
6779
6780 /* Insert a mapping FROM->TO in the value expression hashtable. */
6781
6782 void
6783 decl_value_expr_insert (tree from, tree to)
6784 {
6785 struct tree_decl_map *h;
6786
6787 h = ggc_alloc<tree_decl_map> ();
6788 h->base.from = from;
6789 h->to = to;
6790 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6791 }
6792
6793 /* Lookup a vector of debug arguments for FROM, and return it if we
6794 find one. */
6795
6796 vec<tree, va_gc> **
6797 decl_debug_args_lookup (tree from)
6798 {
6799 struct tree_vec_map *h, in;
6800
6801 if (!DECL_HAS_DEBUG_ARGS_P (from))
6802 return NULL;
6803 gcc_checking_assert (debug_args_for_decl != NULL);
6804 in.base.from = from;
6805 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6806 if (h)
6807 return &h->to;
6808 return NULL;
6809 }
6810
6811 /* Insert a mapping FROM->empty vector of debug arguments in the value
6812 expression hashtable. */
6813
6814 vec<tree, va_gc> **
6815 decl_debug_args_insert (tree from)
6816 {
6817 struct tree_vec_map *h;
6818 tree_vec_map **loc;
6819
6820 if (DECL_HAS_DEBUG_ARGS_P (from))
6821 return decl_debug_args_lookup (from);
6822 if (debug_args_for_decl == NULL)
6823 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6824 h = ggc_alloc<tree_vec_map> ();
6825 h->base.from = from;
6826 h->to = NULL;
6827 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6828 *loc = h;
6829 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6830 return &h->to;
6831 }
6832
6833 /* Hashing of types so that we don't make duplicates.
6834 The entry point is `type_hash_canon'. */
6835
6836 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6837 with types in the TREE_VALUE slots), by adding the hash codes
6838 of the individual types. */
6839
6840 static void
6841 type_hash_list (const_tree list, inchash::hash &hstate)
6842 {
6843 const_tree tail;
6844
6845 for (tail = list; tail; tail = TREE_CHAIN (tail))
6846 if (TREE_VALUE (tail) != error_mark_node)
6847 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6848 }
6849
6850 /* These are the Hashtable callback functions. */
6851
6852 /* Returns true iff the types are equivalent. */
6853
6854 bool
6855 type_cache_hasher::equal (type_hash *a, type_hash *b)
6856 {
6857 /* First test the things that are the same for all types. */
6858 if (a->hash != b->hash
6859 || TREE_CODE (a->type) != TREE_CODE (b->type)
6860 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6861 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6862 TYPE_ATTRIBUTES (b->type))
6863 || (TREE_CODE (a->type) != COMPLEX_TYPE
6864 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6865 return 0;
6866
6867 /* Be careful about comparing arrays before and after the element type
6868 has been completed; don't compare TYPE_ALIGN unless both types are
6869 complete. */
6870 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6871 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6872 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6873 return 0;
6874
6875 switch (TREE_CODE (a->type))
6876 {
6877 case VOID_TYPE:
6878 case COMPLEX_TYPE:
6879 case POINTER_TYPE:
6880 case REFERENCE_TYPE:
6881 case NULLPTR_TYPE:
6882 return 1;
6883
6884 case VECTOR_TYPE:
6885 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6886
6887 case ENUMERAL_TYPE:
6888 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6889 && !(TYPE_VALUES (a->type)
6890 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6891 && TYPE_VALUES (b->type)
6892 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6893 && type_list_equal (TYPE_VALUES (a->type),
6894 TYPE_VALUES (b->type))))
6895 return 0;
6896
6897 /* ... fall through ... */
6898
6899 case INTEGER_TYPE:
6900 case REAL_TYPE:
6901 case BOOLEAN_TYPE:
6902 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6903 return false;
6904 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6905 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6906 TYPE_MAX_VALUE (b->type)))
6907 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6908 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6909 TYPE_MIN_VALUE (b->type))));
6910
6911 case FIXED_POINT_TYPE:
6912 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6913
6914 case OFFSET_TYPE:
6915 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6916
6917 case METHOD_TYPE:
6918 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6919 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6920 || (TYPE_ARG_TYPES (a->type)
6921 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6922 && TYPE_ARG_TYPES (b->type)
6923 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6924 && type_list_equal (TYPE_ARG_TYPES (a->type),
6925 TYPE_ARG_TYPES (b->type)))))
6926 break;
6927 return 0;
6928 case ARRAY_TYPE:
6929 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6930
6931 case RECORD_TYPE:
6932 case UNION_TYPE:
6933 case QUAL_UNION_TYPE:
6934 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6935 || (TYPE_FIELDS (a->type)
6936 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6937 && TYPE_FIELDS (b->type)
6938 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6939 && type_list_equal (TYPE_FIELDS (a->type),
6940 TYPE_FIELDS (b->type))));
6941
6942 case FUNCTION_TYPE:
6943 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6944 || (TYPE_ARG_TYPES (a->type)
6945 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6946 && TYPE_ARG_TYPES (b->type)
6947 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6948 && type_list_equal (TYPE_ARG_TYPES (a->type),
6949 TYPE_ARG_TYPES (b->type))))
6950 break;
6951 return 0;
6952
6953 default:
6954 return 0;
6955 }
6956
6957 if (lang_hooks.types.type_hash_eq != NULL)
6958 return lang_hooks.types.type_hash_eq (a->type, b->type);
6959
6960 return 1;
6961 }
6962
6963 /* Given TYPE, and HASHCODE its hash code, return the canonical
6964 object for an identical type if one already exists.
6965 Otherwise, return TYPE, and record it as the canonical object.
6966
6967 To use this function, first create a type of the sort you want.
6968 Then compute its hash code from the fields of the type that
6969 make it different from other similar types.
6970 Then call this function and use the value. */
6971
6972 tree
6973 type_hash_canon (unsigned int hashcode, tree type)
6974 {
6975 type_hash in;
6976 type_hash **loc;
6977
6978 /* The hash table only contains main variants, so ensure that's what we're
6979 being passed. */
6980 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6981
6982 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6983 must call that routine before comparing TYPE_ALIGNs. */
6984 layout_type (type);
6985
6986 in.hash = hashcode;
6987 in.type = type;
6988
6989 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6990 if (*loc)
6991 {
6992 tree t1 = ((type_hash *) *loc)->type;
6993 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6994 if (GATHER_STATISTICS)
6995 {
6996 tree_code_counts[(int) TREE_CODE (type)]--;
6997 tree_node_counts[(int) t_kind]--;
6998 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6999 }
7000 return t1;
7001 }
7002 else
7003 {
7004 struct type_hash *h;
7005
7006 h = ggc_alloc<type_hash> ();
7007 h->hash = hashcode;
7008 h->type = type;
7009 *loc = h;
7010
7011 return type;
7012 }
7013 }
7014
7015 static void
7016 print_type_hash_statistics (void)
7017 {
7018 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7019 (long) type_hash_table->size (),
7020 (long) type_hash_table->elements (),
7021 type_hash_table->collisions ());
7022 }
7023
7024 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7025 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7026 by adding the hash codes of the individual attributes. */
7027
7028 static void
7029 attribute_hash_list (const_tree list, inchash::hash &hstate)
7030 {
7031 const_tree tail;
7032
7033 for (tail = list; tail; tail = TREE_CHAIN (tail))
7034 /* ??? Do we want to add in TREE_VALUE too? */
7035 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7036 }
7037
7038 /* Given two lists of attributes, return true if list l2 is
7039 equivalent to l1. */
7040
7041 int
7042 attribute_list_equal (const_tree l1, const_tree l2)
7043 {
7044 if (l1 == l2)
7045 return 1;
7046
7047 return attribute_list_contained (l1, l2)
7048 && attribute_list_contained (l2, l1);
7049 }
7050
7051 /* Given two lists of attributes, return true if list L2 is
7052 completely contained within L1. */
7053 /* ??? This would be faster if attribute names were stored in a canonicalized
7054 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7055 must be used to show these elements are equivalent (which they are). */
7056 /* ??? It's not clear that attributes with arguments will always be handled
7057 correctly. */
7058
7059 int
7060 attribute_list_contained (const_tree l1, const_tree l2)
7061 {
7062 const_tree t1, t2;
7063
7064 /* First check the obvious, maybe the lists are identical. */
7065 if (l1 == l2)
7066 return 1;
7067
7068 /* Maybe the lists are similar. */
7069 for (t1 = l1, t2 = l2;
7070 t1 != 0 && t2 != 0
7071 && get_attribute_name (t1) == get_attribute_name (t2)
7072 && TREE_VALUE (t1) == TREE_VALUE (t2);
7073 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7074 ;
7075
7076 /* Maybe the lists are equal. */
7077 if (t1 == 0 && t2 == 0)
7078 return 1;
7079
7080 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7081 {
7082 const_tree attr;
7083 /* This CONST_CAST is okay because lookup_attribute does not
7084 modify its argument and the return value is assigned to a
7085 const_tree. */
7086 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7087 CONST_CAST_TREE (l1));
7088 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7089 attr = lookup_ident_attribute (get_attribute_name (t2),
7090 TREE_CHAIN (attr)))
7091 ;
7092
7093 if (attr == NULL_TREE)
7094 return 0;
7095 }
7096
7097 return 1;
7098 }
7099
7100 /* Given two lists of types
7101 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7102 return 1 if the lists contain the same types in the same order.
7103 Also, the TREE_PURPOSEs must match. */
7104
7105 int
7106 type_list_equal (const_tree l1, const_tree l2)
7107 {
7108 const_tree t1, t2;
7109
7110 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7111 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7112 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7113 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7114 && (TREE_TYPE (TREE_PURPOSE (t1))
7115 == TREE_TYPE (TREE_PURPOSE (t2))))))
7116 return 0;
7117
7118 return t1 == t2;
7119 }
7120
7121 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7122 given by TYPE. If the argument list accepts variable arguments,
7123 then this function counts only the ordinary arguments. */
7124
7125 int
7126 type_num_arguments (const_tree type)
7127 {
7128 int i = 0;
7129 tree t;
7130
7131 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7132 /* If the function does not take a variable number of arguments,
7133 the last element in the list will have type `void'. */
7134 if (VOID_TYPE_P (TREE_VALUE (t)))
7135 break;
7136 else
7137 ++i;
7138
7139 return i;
7140 }
7141
7142 /* Nonzero if integer constants T1 and T2
7143 represent the same constant value. */
7144
7145 int
7146 tree_int_cst_equal (const_tree t1, const_tree t2)
7147 {
7148 if (t1 == t2)
7149 return 1;
7150
7151 if (t1 == 0 || t2 == 0)
7152 return 0;
7153
7154 if (TREE_CODE (t1) == INTEGER_CST
7155 && TREE_CODE (t2) == INTEGER_CST
7156 && wi::to_widest (t1) == wi::to_widest (t2))
7157 return 1;
7158
7159 return 0;
7160 }
7161
7162 /* Return true if T is an INTEGER_CST whose numerical value (extended
7163 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7164
7165 bool
7166 tree_fits_shwi_p (const_tree t)
7167 {
7168 return (t != NULL_TREE
7169 && TREE_CODE (t) == INTEGER_CST
7170 && wi::fits_shwi_p (wi::to_widest (t)));
7171 }
7172
7173 /* Return true if T is an INTEGER_CST whose numerical value (extended
7174 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7175
7176 bool
7177 tree_fits_uhwi_p (const_tree t)
7178 {
7179 return (t != NULL_TREE
7180 && TREE_CODE (t) == INTEGER_CST
7181 && wi::fits_uhwi_p (wi::to_widest (t)));
7182 }
7183
7184 /* T is an INTEGER_CST whose numerical value (extended according to
7185 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7186 HOST_WIDE_INT. */
7187
7188 HOST_WIDE_INT
7189 tree_to_shwi (const_tree t)
7190 {
7191 gcc_assert (tree_fits_shwi_p (t));
7192 return TREE_INT_CST_LOW (t);
7193 }
7194
7195 /* T is an INTEGER_CST whose numerical value (extended according to
7196 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7197 HOST_WIDE_INT. */
7198
7199 unsigned HOST_WIDE_INT
7200 tree_to_uhwi (const_tree t)
7201 {
7202 gcc_assert (tree_fits_uhwi_p (t));
7203 return TREE_INT_CST_LOW (t);
7204 }
7205
7206 /* Return the most significant (sign) bit of T. */
7207
7208 int
7209 tree_int_cst_sign_bit (const_tree t)
7210 {
7211 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7212
7213 return wi::extract_uhwi (t, bitno, 1);
7214 }
7215
7216 /* Return an indication of the sign of the integer constant T.
7217 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7218 Note that -1 will never be returned if T's type is unsigned. */
7219
7220 int
7221 tree_int_cst_sgn (const_tree t)
7222 {
7223 if (wi::eq_p (t, 0))
7224 return 0;
7225 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7226 return 1;
7227 else if (wi::neg_p (t))
7228 return -1;
7229 else
7230 return 1;
7231 }
7232
7233 /* Return the minimum number of bits needed to represent VALUE in a
7234 signed or unsigned type, UNSIGNEDP says which. */
7235
7236 unsigned int
7237 tree_int_cst_min_precision (tree value, signop sgn)
7238 {
7239 /* If the value is negative, compute its negative minus 1. The latter
7240 adjustment is because the absolute value of the largest negative value
7241 is one larger than the largest positive value. This is equivalent to
7242 a bit-wise negation, so use that operation instead. */
7243
7244 if (tree_int_cst_sgn (value) < 0)
7245 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7246
7247 /* Return the number of bits needed, taking into account the fact
7248 that we need one more bit for a signed than unsigned type.
7249 If value is 0 or -1, the minimum precision is 1 no matter
7250 whether unsignedp is true or false. */
7251
7252 if (integer_zerop (value))
7253 return 1;
7254 else
7255 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7256 }
7257
7258 /* Return truthvalue of whether T1 is the same tree structure as T2.
7259 Return 1 if they are the same.
7260 Return 0 if they are understandably different.
7261 Return -1 if either contains tree structure not understood by
7262 this function. */
7263
7264 int
7265 simple_cst_equal (const_tree t1, const_tree t2)
7266 {
7267 enum tree_code code1, code2;
7268 int cmp;
7269 int i;
7270
7271 if (t1 == t2)
7272 return 1;
7273 if (t1 == 0 || t2 == 0)
7274 return 0;
7275
7276 code1 = TREE_CODE (t1);
7277 code2 = TREE_CODE (t2);
7278
7279 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7280 {
7281 if (CONVERT_EXPR_CODE_P (code2)
7282 || code2 == NON_LVALUE_EXPR)
7283 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7284 else
7285 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7286 }
7287
7288 else if (CONVERT_EXPR_CODE_P (code2)
7289 || code2 == NON_LVALUE_EXPR)
7290 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7291
7292 if (code1 != code2)
7293 return 0;
7294
7295 switch (code1)
7296 {
7297 case INTEGER_CST:
7298 return wi::to_widest (t1) == wi::to_widest (t2);
7299
7300 case REAL_CST:
7301 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7302
7303 case FIXED_CST:
7304 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7305
7306 case STRING_CST:
7307 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7308 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7309 TREE_STRING_LENGTH (t1)));
7310
7311 case CONSTRUCTOR:
7312 {
7313 unsigned HOST_WIDE_INT idx;
7314 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7315 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7316
7317 if (vec_safe_length (v1) != vec_safe_length (v2))
7318 return false;
7319
7320 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7321 /* ??? Should we handle also fields here? */
7322 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7323 return false;
7324 return true;
7325 }
7326
7327 case SAVE_EXPR:
7328 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7329
7330 case CALL_EXPR:
7331 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7332 if (cmp <= 0)
7333 return cmp;
7334 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7335 return 0;
7336 {
7337 const_tree arg1, arg2;
7338 const_call_expr_arg_iterator iter1, iter2;
7339 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7340 arg2 = first_const_call_expr_arg (t2, &iter2);
7341 arg1 && arg2;
7342 arg1 = next_const_call_expr_arg (&iter1),
7343 arg2 = next_const_call_expr_arg (&iter2))
7344 {
7345 cmp = simple_cst_equal (arg1, arg2);
7346 if (cmp <= 0)
7347 return cmp;
7348 }
7349 return arg1 == arg2;
7350 }
7351
7352 case TARGET_EXPR:
7353 /* Special case: if either target is an unallocated VAR_DECL,
7354 it means that it's going to be unified with whatever the
7355 TARGET_EXPR is really supposed to initialize, so treat it
7356 as being equivalent to anything. */
7357 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7358 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7359 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7360 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7361 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7362 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7363 cmp = 1;
7364 else
7365 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7366
7367 if (cmp <= 0)
7368 return cmp;
7369
7370 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7371
7372 case WITH_CLEANUP_EXPR:
7373 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7374 if (cmp <= 0)
7375 return cmp;
7376
7377 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7378
7379 case COMPONENT_REF:
7380 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7381 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7382
7383 return 0;
7384
7385 case VAR_DECL:
7386 case PARM_DECL:
7387 case CONST_DECL:
7388 case FUNCTION_DECL:
7389 return 0;
7390
7391 default:
7392 break;
7393 }
7394
7395 /* This general rule works for most tree codes. All exceptions should be
7396 handled above. If this is a language-specific tree code, we can't
7397 trust what might be in the operand, so say we don't know
7398 the situation. */
7399 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7400 return -1;
7401
7402 switch (TREE_CODE_CLASS (code1))
7403 {
7404 case tcc_unary:
7405 case tcc_binary:
7406 case tcc_comparison:
7407 case tcc_expression:
7408 case tcc_reference:
7409 case tcc_statement:
7410 cmp = 1;
7411 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7412 {
7413 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7414 if (cmp <= 0)
7415 return cmp;
7416 }
7417
7418 return cmp;
7419
7420 default:
7421 return -1;
7422 }
7423 }
7424
7425 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7426 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7427 than U, respectively. */
7428
7429 int
7430 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7431 {
7432 if (tree_int_cst_sgn (t) < 0)
7433 return -1;
7434 else if (!tree_fits_uhwi_p (t))
7435 return 1;
7436 else if (TREE_INT_CST_LOW (t) == u)
7437 return 0;
7438 else if (TREE_INT_CST_LOW (t) < u)
7439 return -1;
7440 else
7441 return 1;
7442 }
7443
7444 /* Return true if SIZE represents a constant size that is in bounds of
7445 what the middle-end and the backend accepts (covering not more than
7446 half of the address-space). */
7447
7448 bool
7449 valid_constant_size_p (const_tree size)
7450 {
7451 if (! tree_fits_uhwi_p (size)
7452 || TREE_OVERFLOW (size)
7453 || tree_int_cst_sign_bit (size) != 0)
7454 return false;
7455 return true;
7456 }
7457
7458 /* Return the precision of the type, or for a complex or vector type the
7459 precision of the type of its elements. */
7460
7461 unsigned int
7462 element_precision (const_tree type)
7463 {
7464 enum tree_code code = TREE_CODE (type);
7465 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7466 type = TREE_TYPE (type);
7467
7468 return TYPE_PRECISION (type);
7469 }
7470
7471 /* Return true if CODE represents an associative tree code. Otherwise
7472 return false. */
7473 bool
7474 associative_tree_code (enum tree_code code)
7475 {
7476 switch (code)
7477 {
7478 case BIT_IOR_EXPR:
7479 case BIT_AND_EXPR:
7480 case BIT_XOR_EXPR:
7481 case PLUS_EXPR:
7482 case MULT_EXPR:
7483 case MIN_EXPR:
7484 case MAX_EXPR:
7485 return true;
7486
7487 default:
7488 break;
7489 }
7490 return false;
7491 }
7492
7493 /* Return true if CODE represents a commutative tree code. Otherwise
7494 return false. */
7495 bool
7496 commutative_tree_code (enum tree_code code)
7497 {
7498 switch (code)
7499 {
7500 case PLUS_EXPR:
7501 case MULT_EXPR:
7502 case MULT_HIGHPART_EXPR:
7503 case MIN_EXPR:
7504 case MAX_EXPR:
7505 case BIT_IOR_EXPR:
7506 case BIT_XOR_EXPR:
7507 case BIT_AND_EXPR:
7508 case NE_EXPR:
7509 case EQ_EXPR:
7510 case UNORDERED_EXPR:
7511 case ORDERED_EXPR:
7512 case UNEQ_EXPR:
7513 case LTGT_EXPR:
7514 case TRUTH_AND_EXPR:
7515 case TRUTH_XOR_EXPR:
7516 case TRUTH_OR_EXPR:
7517 case WIDEN_MULT_EXPR:
7518 case VEC_WIDEN_MULT_HI_EXPR:
7519 case VEC_WIDEN_MULT_LO_EXPR:
7520 case VEC_WIDEN_MULT_EVEN_EXPR:
7521 case VEC_WIDEN_MULT_ODD_EXPR:
7522 return true;
7523
7524 default:
7525 break;
7526 }
7527 return false;
7528 }
7529
7530 /* Return true if CODE represents a ternary tree code for which the
7531 first two operands are commutative. Otherwise return false. */
7532 bool
7533 commutative_ternary_tree_code (enum tree_code code)
7534 {
7535 switch (code)
7536 {
7537 case WIDEN_MULT_PLUS_EXPR:
7538 case WIDEN_MULT_MINUS_EXPR:
7539 case DOT_PROD_EXPR:
7540 case FMA_EXPR:
7541 return true;
7542
7543 default:
7544 break;
7545 }
7546 return false;
7547 }
7548
7549 namespace inchash
7550 {
7551
7552 /* Generate a hash value for an expression. This can be used iteratively
7553 by passing a previous result as the HSTATE argument.
7554
7555 This function is intended to produce the same hash for expressions which
7556 would compare equal using operand_equal_p. */
7557 void
7558 add_expr (const_tree t, inchash::hash &hstate)
7559 {
7560 int i;
7561 enum tree_code code;
7562 enum tree_code_class tclass;
7563
7564 if (t == NULL_TREE)
7565 {
7566 hstate.merge_hash (0);
7567 return;
7568 }
7569
7570 code = TREE_CODE (t);
7571
7572 switch (code)
7573 {
7574 /* Alas, constants aren't shared, so we can't rely on pointer
7575 identity. */
7576 case VOID_CST:
7577 hstate.merge_hash (0);
7578 return;
7579 case INTEGER_CST:
7580 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7581 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7582 return;
7583 case REAL_CST:
7584 {
7585 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7586 hstate.merge_hash (val2);
7587 return;
7588 }
7589 case FIXED_CST:
7590 {
7591 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7592 hstate.merge_hash (val2);
7593 return;
7594 }
7595 case STRING_CST:
7596 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7597 return;
7598 case COMPLEX_CST:
7599 inchash::add_expr (TREE_REALPART (t), hstate);
7600 inchash::add_expr (TREE_IMAGPART (t), hstate);
7601 return;
7602 case VECTOR_CST:
7603 {
7604 unsigned i;
7605 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7606 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7607 return;
7608 }
7609 case SSA_NAME:
7610 /* We can just compare by pointer. */
7611 hstate.add_wide_int (SSA_NAME_VERSION (t));
7612 return;
7613 case PLACEHOLDER_EXPR:
7614 /* The node itself doesn't matter. */
7615 return;
7616 case TREE_LIST:
7617 /* A list of expressions, for a CALL_EXPR or as the elements of a
7618 VECTOR_CST. */
7619 for (; t; t = TREE_CHAIN (t))
7620 inchash::add_expr (TREE_VALUE (t), hstate);
7621 return;
7622 case CONSTRUCTOR:
7623 {
7624 unsigned HOST_WIDE_INT idx;
7625 tree field, value;
7626 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7627 {
7628 inchash::add_expr (field, hstate);
7629 inchash::add_expr (value, hstate);
7630 }
7631 return;
7632 }
7633 case FUNCTION_DECL:
7634 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7635 Otherwise nodes that compare equal according to operand_equal_p might
7636 get different hash codes. However, don't do this for machine specific
7637 or front end builtins, since the function code is overloaded in those
7638 cases. */
7639 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7640 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7641 {
7642 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7643 code = TREE_CODE (t);
7644 }
7645 /* FALL THROUGH */
7646 default:
7647 tclass = TREE_CODE_CLASS (code);
7648
7649 if (tclass == tcc_declaration)
7650 {
7651 /* DECL's have a unique ID */
7652 hstate.add_wide_int (DECL_UID (t));
7653 }
7654 else
7655 {
7656 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7657
7658 hstate.add_object (code);
7659
7660 /* Don't hash the type, that can lead to having nodes which
7661 compare equal according to operand_equal_p, but which
7662 have different hash codes. */
7663 if (CONVERT_EXPR_CODE_P (code)
7664 || code == NON_LVALUE_EXPR)
7665 {
7666 /* Make sure to include signness in the hash computation. */
7667 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7668 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7669 }
7670
7671 else if (commutative_tree_code (code))
7672 {
7673 /* It's a commutative expression. We want to hash it the same
7674 however it appears. We do this by first hashing both operands
7675 and then rehashing based on the order of their independent
7676 hashes. */
7677 inchash::hash one, two;
7678 inchash::add_expr (TREE_OPERAND (t, 0), one);
7679 inchash::add_expr (TREE_OPERAND (t, 1), two);
7680 hstate.add_commutative (one, two);
7681 }
7682 else
7683 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7684 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7685 }
7686 return;
7687 }
7688 }
7689
7690 }
7691
7692 /* Constructors for pointer, array and function types.
7693 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7694 constructed by language-dependent code, not here.) */
7695
7696 /* Construct, lay out and return the type of pointers to TO_TYPE with
7697 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7698 reference all of memory. If such a type has already been
7699 constructed, reuse it. */
7700
7701 tree
7702 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7703 bool can_alias_all)
7704 {
7705 tree t;
7706
7707 if (to_type == error_mark_node)
7708 return error_mark_node;
7709
7710 /* If the pointed-to type has the may_alias attribute set, force
7711 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7712 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7713 can_alias_all = true;
7714
7715 /* In some cases, languages will have things that aren't a POINTER_TYPE
7716 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7717 In that case, return that type without regard to the rest of our
7718 operands.
7719
7720 ??? This is a kludge, but consistent with the way this function has
7721 always operated and there doesn't seem to be a good way to avoid this
7722 at the moment. */
7723 if (TYPE_POINTER_TO (to_type) != 0
7724 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7725 return TYPE_POINTER_TO (to_type);
7726
7727 /* First, if we already have a type for pointers to TO_TYPE and it's
7728 the proper mode, use it. */
7729 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7730 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7731 return t;
7732
7733 t = make_node (POINTER_TYPE);
7734
7735 TREE_TYPE (t) = to_type;
7736 SET_TYPE_MODE (t, mode);
7737 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7738 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7739 TYPE_POINTER_TO (to_type) = t;
7740
7741 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7742 SET_TYPE_STRUCTURAL_EQUALITY (t);
7743 else if (TYPE_CANONICAL (to_type) != to_type)
7744 TYPE_CANONICAL (t)
7745 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7746 mode, false);
7747
7748 /* Lay out the type. This function has many callers that are concerned
7749 with expression-construction, and this simplifies them all. */
7750 layout_type (t);
7751
7752 return t;
7753 }
7754
7755 /* By default build pointers in ptr_mode. */
7756
7757 tree
7758 build_pointer_type (tree to_type)
7759 {
7760 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7761 : TYPE_ADDR_SPACE (to_type);
7762 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7763 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7764 }
7765
7766 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7767
7768 tree
7769 build_reference_type_for_mode (tree to_type, machine_mode mode,
7770 bool can_alias_all)
7771 {
7772 tree t;
7773
7774 if (to_type == error_mark_node)
7775 return error_mark_node;
7776
7777 /* If the pointed-to type has the may_alias attribute set, force
7778 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7779 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7780 can_alias_all = true;
7781
7782 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7783 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7784 In that case, return that type without regard to the rest of our
7785 operands.
7786
7787 ??? This is a kludge, but consistent with the way this function has
7788 always operated and there doesn't seem to be a good way to avoid this
7789 at the moment. */
7790 if (TYPE_REFERENCE_TO (to_type) != 0
7791 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7792 return TYPE_REFERENCE_TO (to_type);
7793
7794 /* First, if we already have a type for pointers to TO_TYPE and it's
7795 the proper mode, use it. */
7796 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7797 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7798 return t;
7799
7800 t = make_node (REFERENCE_TYPE);
7801
7802 TREE_TYPE (t) = to_type;
7803 SET_TYPE_MODE (t, mode);
7804 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7805 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7806 TYPE_REFERENCE_TO (to_type) = t;
7807
7808 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7809 SET_TYPE_STRUCTURAL_EQUALITY (t);
7810 else if (TYPE_CANONICAL (to_type) != to_type)
7811 TYPE_CANONICAL (t)
7812 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7813 mode, false);
7814
7815 layout_type (t);
7816
7817 return t;
7818 }
7819
7820
7821 /* Build the node for the type of references-to-TO_TYPE by default
7822 in ptr_mode. */
7823
7824 tree
7825 build_reference_type (tree to_type)
7826 {
7827 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7828 : TYPE_ADDR_SPACE (to_type);
7829 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7830 return build_reference_type_for_mode (to_type, pointer_mode, false);
7831 }
7832
7833 #define MAX_INT_CACHED_PREC \
7834 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7835 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7836
7837 /* Builds a signed or unsigned integer type of precision PRECISION.
7838 Used for C bitfields whose precision does not match that of
7839 built-in target types. */
7840 tree
7841 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7842 int unsignedp)
7843 {
7844 tree itype, ret;
7845
7846 if (unsignedp)
7847 unsignedp = MAX_INT_CACHED_PREC + 1;
7848
7849 if (precision <= MAX_INT_CACHED_PREC)
7850 {
7851 itype = nonstandard_integer_type_cache[precision + unsignedp];
7852 if (itype)
7853 return itype;
7854 }
7855
7856 itype = make_node (INTEGER_TYPE);
7857 TYPE_PRECISION (itype) = precision;
7858
7859 if (unsignedp)
7860 fixup_unsigned_type (itype);
7861 else
7862 fixup_signed_type (itype);
7863
7864 ret = itype;
7865 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7866 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7867 if (precision <= MAX_INT_CACHED_PREC)
7868 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7869
7870 return ret;
7871 }
7872
7873 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7874 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7875 is true, reuse such a type that has already been constructed. */
7876
7877 static tree
7878 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7879 {
7880 tree itype = make_node (INTEGER_TYPE);
7881 inchash::hash hstate;
7882
7883 TREE_TYPE (itype) = type;
7884
7885 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7886 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7887
7888 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7889 SET_TYPE_MODE (itype, TYPE_MODE (type));
7890 TYPE_SIZE (itype) = TYPE_SIZE (type);
7891 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7892 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7893 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7894
7895 if (!shared)
7896 return itype;
7897
7898 if ((TYPE_MIN_VALUE (itype)
7899 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7900 || (TYPE_MAX_VALUE (itype)
7901 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7902 {
7903 /* Since we cannot reliably merge this type, we need to compare it using
7904 structural equality checks. */
7905 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7906 return itype;
7907 }
7908
7909 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7910 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7911 hstate.merge_hash (TYPE_HASH (type));
7912 itype = type_hash_canon (hstate.end (), itype);
7913
7914 return itype;
7915 }
7916
7917 /* Wrapper around build_range_type_1 with SHARED set to true. */
7918
7919 tree
7920 build_range_type (tree type, tree lowval, tree highval)
7921 {
7922 return build_range_type_1 (type, lowval, highval, true);
7923 }
7924
7925 /* Wrapper around build_range_type_1 with SHARED set to false. */
7926
7927 tree
7928 build_nonshared_range_type (tree type, tree lowval, tree highval)
7929 {
7930 return build_range_type_1 (type, lowval, highval, false);
7931 }
7932
7933 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7934 MAXVAL should be the maximum value in the domain
7935 (one less than the length of the array).
7936
7937 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7938 We don't enforce this limit, that is up to caller (e.g. language front end).
7939 The limit exists because the result is a signed type and we don't handle
7940 sizes that use more than one HOST_WIDE_INT. */
7941
7942 tree
7943 build_index_type (tree maxval)
7944 {
7945 return build_range_type (sizetype, size_zero_node, maxval);
7946 }
7947
7948 /* Return true if the debug information for TYPE, a subtype, should be emitted
7949 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7950 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7951 debug info and doesn't reflect the source code. */
7952
7953 bool
7954 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7955 {
7956 tree base_type = TREE_TYPE (type), low, high;
7957
7958 /* Subrange types have a base type which is an integral type. */
7959 if (!INTEGRAL_TYPE_P (base_type))
7960 return false;
7961
7962 /* Get the real bounds of the subtype. */
7963 if (lang_hooks.types.get_subrange_bounds)
7964 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7965 else
7966 {
7967 low = TYPE_MIN_VALUE (type);
7968 high = TYPE_MAX_VALUE (type);
7969 }
7970
7971 /* If the type and its base type have the same representation and the same
7972 name, then the type is not a subrange but a copy of the base type. */
7973 if ((TREE_CODE (base_type) == INTEGER_TYPE
7974 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7975 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7976 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7977 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7978 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7979 return false;
7980
7981 if (lowval)
7982 *lowval = low;
7983 if (highval)
7984 *highval = high;
7985 return true;
7986 }
7987
7988 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7989 and number of elements specified by the range of values of INDEX_TYPE.
7990 If SHARED is true, reuse such a type that has already been constructed. */
7991
7992 static tree
7993 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7994 {
7995 tree t;
7996
7997 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7998 {
7999 error ("arrays of functions are not meaningful");
8000 elt_type = integer_type_node;
8001 }
8002
8003 t = make_node (ARRAY_TYPE);
8004 TREE_TYPE (t) = elt_type;
8005 TYPE_DOMAIN (t) = index_type;
8006 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8007 layout_type (t);
8008
8009 /* If the element type is incomplete at this point we get marked for
8010 structural equality. Do not record these types in the canonical
8011 type hashtable. */
8012 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8013 return t;
8014
8015 if (shared)
8016 {
8017 inchash::hash hstate;
8018 hstate.add_object (TYPE_HASH (elt_type));
8019 if (index_type)
8020 hstate.add_object (TYPE_HASH (index_type));
8021 t = type_hash_canon (hstate.end (), t);
8022 }
8023
8024 if (TYPE_CANONICAL (t) == t)
8025 {
8026 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8027 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
8028 SET_TYPE_STRUCTURAL_EQUALITY (t);
8029 else if (TYPE_CANONICAL (elt_type) != elt_type
8030 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8031 TYPE_CANONICAL (t)
8032 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8033 index_type
8034 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8035 shared);
8036 }
8037
8038 return t;
8039 }
8040
8041 /* Wrapper around build_array_type_1 with SHARED set to true. */
8042
8043 tree
8044 build_array_type (tree elt_type, tree index_type)
8045 {
8046 return build_array_type_1 (elt_type, index_type, true);
8047 }
8048
8049 /* Wrapper around build_array_type_1 with SHARED set to false. */
8050
8051 tree
8052 build_nonshared_array_type (tree elt_type, tree index_type)
8053 {
8054 return build_array_type_1 (elt_type, index_type, false);
8055 }
8056
8057 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8058 sizetype. */
8059
8060 tree
8061 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8062 {
8063 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8064 }
8065
8066 /* Recursively examines the array elements of TYPE, until a non-array
8067 element type is found. */
8068
8069 tree
8070 strip_array_types (tree type)
8071 {
8072 while (TREE_CODE (type) == ARRAY_TYPE)
8073 type = TREE_TYPE (type);
8074
8075 return type;
8076 }
8077
8078 /* Computes the canonical argument types from the argument type list
8079 ARGTYPES.
8080
8081 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8082 on entry to this function, or if any of the ARGTYPES are
8083 structural.
8084
8085 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8086 true on entry to this function, or if any of the ARGTYPES are
8087 non-canonical.
8088
8089 Returns a canonical argument list, which may be ARGTYPES when the
8090 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8091 true) or would not differ from ARGTYPES. */
8092
8093 static tree
8094 maybe_canonicalize_argtypes (tree argtypes,
8095 bool *any_structural_p,
8096 bool *any_noncanonical_p)
8097 {
8098 tree arg;
8099 bool any_noncanonical_argtypes_p = false;
8100
8101 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8102 {
8103 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8104 /* Fail gracefully by stating that the type is structural. */
8105 *any_structural_p = true;
8106 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8107 *any_structural_p = true;
8108 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8109 || TREE_PURPOSE (arg))
8110 /* If the argument has a default argument, we consider it
8111 non-canonical even though the type itself is canonical.
8112 That way, different variants of function and method types
8113 with default arguments will all point to the variant with
8114 no defaults as their canonical type. */
8115 any_noncanonical_argtypes_p = true;
8116 }
8117
8118 if (*any_structural_p)
8119 return argtypes;
8120
8121 if (any_noncanonical_argtypes_p)
8122 {
8123 /* Build the canonical list of argument types. */
8124 tree canon_argtypes = NULL_TREE;
8125 bool is_void = false;
8126
8127 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8128 {
8129 if (arg == void_list_node)
8130 is_void = true;
8131 else
8132 canon_argtypes = tree_cons (NULL_TREE,
8133 TYPE_CANONICAL (TREE_VALUE (arg)),
8134 canon_argtypes);
8135 }
8136
8137 canon_argtypes = nreverse (canon_argtypes);
8138 if (is_void)
8139 canon_argtypes = chainon (canon_argtypes, void_list_node);
8140
8141 /* There is a non-canonical type. */
8142 *any_noncanonical_p = true;
8143 return canon_argtypes;
8144 }
8145
8146 /* The canonical argument types are the same as ARGTYPES. */
8147 return argtypes;
8148 }
8149
8150 /* Construct, lay out and return
8151 the type of functions returning type VALUE_TYPE
8152 given arguments of types ARG_TYPES.
8153 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8154 are data type nodes for the arguments of the function.
8155 If such a type has already been constructed, reuse it. */
8156
8157 tree
8158 build_function_type (tree value_type, tree arg_types)
8159 {
8160 tree t;
8161 inchash::hash hstate;
8162 bool any_structural_p, any_noncanonical_p;
8163 tree canon_argtypes;
8164
8165 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8166 {
8167 error ("function return type cannot be function");
8168 value_type = integer_type_node;
8169 }
8170
8171 /* Make a node of the sort we want. */
8172 t = make_node (FUNCTION_TYPE);
8173 TREE_TYPE (t) = value_type;
8174 TYPE_ARG_TYPES (t) = arg_types;
8175
8176 /* If we already have such a type, use the old one. */
8177 hstate.add_object (TYPE_HASH (value_type));
8178 type_hash_list (arg_types, hstate);
8179 t = type_hash_canon (hstate.end (), t);
8180
8181 /* Set up the canonical type. */
8182 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8183 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8184 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8185 &any_structural_p,
8186 &any_noncanonical_p);
8187 if (any_structural_p)
8188 SET_TYPE_STRUCTURAL_EQUALITY (t);
8189 else if (any_noncanonical_p)
8190 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8191 canon_argtypes);
8192
8193 if (!COMPLETE_TYPE_P (t))
8194 layout_type (t);
8195 return t;
8196 }
8197
8198 /* Build a function type. The RETURN_TYPE is the type returned by the
8199 function. If VAARGS is set, no void_type_node is appended to the
8200 the list. ARGP must be always be terminated be a NULL_TREE. */
8201
8202 static tree
8203 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8204 {
8205 tree t, args, last;
8206
8207 t = va_arg (argp, tree);
8208 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8209 args = tree_cons (NULL_TREE, t, args);
8210
8211 if (vaargs)
8212 {
8213 last = args;
8214 if (args != NULL_TREE)
8215 args = nreverse (args);
8216 gcc_assert (last != void_list_node);
8217 }
8218 else if (args == NULL_TREE)
8219 args = void_list_node;
8220 else
8221 {
8222 last = args;
8223 args = nreverse (args);
8224 TREE_CHAIN (last) = void_list_node;
8225 }
8226 args = build_function_type (return_type, args);
8227
8228 return args;
8229 }
8230
8231 /* Build a function type. The RETURN_TYPE is the type returned by the
8232 function. If additional arguments are provided, they are
8233 additional argument types. The list of argument types must always
8234 be terminated by NULL_TREE. */
8235
8236 tree
8237 build_function_type_list (tree return_type, ...)
8238 {
8239 tree args;
8240 va_list p;
8241
8242 va_start (p, return_type);
8243 args = build_function_type_list_1 (false, return_type, p);
8244 va_end (p);
8245 return args;
8246 }
8247
8248 /* Build a variable argument function type. The RETURN_TYPE is the
8249 type returned by the function. If additional arguments are provided,
8250 they are additional argument types. The list of argument types must
8251 always be terminated by NULL_TREE. */
8252
8253 tree
8254 build_varargs_function_type_list (tree return_type, ...)
8255 {
8256 tree args;
8257 va_list p;
8258
8259 va_start (p, return_type);
8260 args = build_function_type_list_1 (true, return_type, p);
8261 va_end (p);
8262
8263 return args;
8264 }
8265
8266 /* Build a function type. RETURN_TYPE is the type returned by the
8267 function; VAARGS indicates whether the function takes varargs. The
8268 function takes N named arguments, the types of which are provided in
8269 ARG_TYPES. */
8270
8271 static tree
8272 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8273 tree *arg_types)
8274 {
8275 int i;
8276 tree t = vaargs ? NULL_TREE : void_list_node;
8277
8278 for (i = n - 1; i >= 0; i--)
8279 t = tree_cons (NULL_TREE, arg_types[i], t);
8280
8281 return build_function_type (return_type, t);
8282 }
8283
8284 /* Build a function type. RETURN_TYPE is the type returned by the
8285 function. The function takes N named arguments, the types of which
8286 are provided in ARG_TYPES. */
8287
8288 tree
8289 build_function_type_array (tree return_type, int n, tree *arg_types)
8290 {
8291 return build_function_type_array_1 (false, return_type, n, arg_types);
8292 }
8293
8294 /* Build a variable argument function type. RETURN_TYPE is the type
8295 returned by the function. The function takes N named arguments, the
8296 types of which are provided in ARG_TYPES. */
8297
8298 tree
8299 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8300 {
8301 return build_function_type_array_1 (true, return_type, n, arg_types);
8302 }
8303
8304 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8305 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8306 for the method. An implicit additional parameter (of type
8307 pointer-to-BASETYPE) is added to the ARGTYPES. */
8308
8309 tree
8310 build_method_type_directly (tree basetype,
8311 tree rettype,
8312 tree argtypes)
8313 {
8314 tree t;
8315 tree ptype;
8316 inchash::hash hstate;
8317 bool any_structural_p, any_noncanonical_p;
8318 tree canon_argtypes;
8319
8320 /* Make a node of the sort we want. */
8321 t = make_node (METHOD_TYPE);
8322
8323 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8324 TREE_TYPE (t) = rettype;
8325 ptype = build_pointer_type (basetype);
8326
8327 /* The actual arglist for this function includes a "hidden" argument
8328 which is "this". Put it into the list of argument types. */
8329 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8330 TYPE_ARG_TYPES (t) = argtypes;
8331
8332 /* If we already have such a type, use the old one. */
8333 hstate.add_object (TYPE_HASH (basetype));
8334 hstate.add_object (TYPE_HASH (rettype));
8335 type_hash_list (argtypes, hstate);
8336 t = type_hash_canon (hstate.end (), t);
8337
8338 /* Set up the canonical type. */
8339 any_structural_p
8340 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8341 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8342 any_noncanonical_p
8343 = (TYPE_CANONICAL (basetype) != basetype
8344 || TYPE_CANONICAL (rettype) != rettype);
8345 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8346 &any_structural_p,
8347 &any_noncanonical_p);
8348 if (any_structural_p)
8349 SET_TYPE_STRUCTURAL_EQUALITY (t);
8350 else if (any_noncanonical_p)
8351 TYPE_CANONICAL (t)
8352 = build_method_type_directly (TYPE_CANONICAL (basetype),
8353 TYPE_CANONICAL (rettype),
8354 canon_argtypes);
8355 if (!COMPLETE_TYPE_P (t))
8356 layout_type (t);
8357
8358 return t;
8359 }
8360
8361 /* Construct, lay out and return the type of methods belonging to class
8362 BASETYPE and whose arguments and values are described by TYPE.
8363 If that type exists already, reuse it.
8364 TYPE must be a FUNCTION_TYPE node. */
8365
8366 tree
8367 build_method_type (tree basetype, tree type)
8368 {
8369 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8370
8371 return build_method_type_directly (basetype,
8372 TREE_TYPE (type),
8373 TYPE_ARG_TYPES (type));
8374 }
8375
8376 /* Construct, lay out and return the type of offsets to a value
8377 of type TYPE, within an object of type BASETYPE.
8378 If a suitable offset type exists already, reuse it. */
8379
8380 tree
8381 build_offset_type (tree basetype, tree type)
8382 {
8383 tree t;
8384 inchash::hash hstate;
8385
8386 /* Make a node of the sort we want. */
8387 t = make_node (OFFSET_TYPE);
8388
8389 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8390 TREE_TYPE (t) = type;
8391
8392 /* If we already have such a type, use the old one. */
8393 hstate.add_object (TYPE_HASH (basetype));
8394 hstate.add_object (TYPE_HASH (type));
8395 t = type_hash_canon (hstate.end (), t);
8396
8397 if (!COMPLETE_TYPE_P (t))
8398 layout_type (t);
8399
8400 if (TYPE_CANONICAL (t) == t)
8401 {
8402 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8403 || TYPE_STRUCTURAL_EQUALITY_P (type))
8404 SET_TYPE_STRUCTURAL_EQUALITY (t);
8405 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8406 || TYPE_CANONICAL (type) != type)
8407 TYPE_CANONICAL (t)
8408 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8409 TYPE_CANONICAL (type));
8410 }
8411
8412 return t;
8413 }
8414
8415 /* Create a complex type whose components are COMPONENT_TYPE. */
8416
8417 tree
8418 build_complex_type (tree component_type)
8419 {
8420 tree t;
8421 inchash::hash hstate;
8422
8423 gcc_assert (INTEGRAL_TYPE_P (component_type)
8424 || SCALAR_FLOAT_TYPE_P (component_type)
8425 || FIXED_POINT_TYPE_P (component_type));
8426
8427 /* Make a node of the sort we want. */
8428 t = make_node (COMPLEX_TYPE);
8429
8430 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8431
8432 /* If we already have such a type, use the old one. */
8433 hstate.add_object (TYPE_HASH (component_type));
8434 t = type_hash_canon (hstate.end (), t);
8435
8436 if (!COMPLETE_TYPE_P (t))
8437 layout_type (t);
8438
8439 if (TYPE_CANONICAL (t) == t)
8440 {
8441 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8442 SET_TYPE_STRUCTURAL_EQUALITY (t);
8443 else if (TYPE_CANONICAL (component_type) != component_type)
8444 TYPE_CANONICAL (t)
8445 = build_complex_type (TYPE_CANONICAL (component_type));
8446 }
8447
8448 /* We need to create a name, since complex is a fundamental type. */
8449 if (! TYPE_NAME (t))
8450 {
8451 const char *name;
8452 if (component_type == char_type_node)
8453 name = "complex char";
8454 else if (component_type == signed_char_type_node)
8455 name = "complex signed char";
8456 else if (component_type == unsigned_char_type_node)
8457 name = "complex unsigned char";
8458 else if (component_type == short_integer_type_node)
8459 name = "complex short int";
8460 else if (component_type == short_unsigned_type_node)
8461 name = "complex short unsigned int";
8462 else if (component_type == integer_type_node)
8463 name = "complex int";
8464 else if (component_type == unsigned_type_node)
8465 name = "complex unsigned int";
8466 else if (component_type == long_integer_type_node)
8467 name = "complex long int";
8468 else if (component_type == long_unsigned_type_node)
8469 name = "complex long unsigned int";
8470 else if (component_type == long_long_integer_type_node)
8471 name = "complex long long int";
8472 else if (component_type == long_long_unsigned_type_node)
8473 name = "complex long long unsigned int";
8474 else
8475 name = 0;
8476
8477 if (name != 0)
8478 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8479 get_identifier (name), t);
8480 }
8481
8482 return build_qualified_type (t, TYPE_QUALS (component_type));
8483 }
8484
8485 /* If TYPE is a real or complex floating-point type and the target
8486 does not directly support arithmetic on TYPE then return the wider
8487 type to be used for arithmetic on TYPE. Otherwise, return
8488 NULL_TREE. */
8489
8490 tree
8491 excess_precision_type (tree type)
8492 {
8493 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8494 {
8495 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8496 switch (TREE_CODE (type))
8497 {
8498 case REAL_TYPE:
8499 switch (flt_eval_method)
8500 {
8501 case 1:
8502 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8503 return double_type_node;
8504 break;
8505 case 2:
8506 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8507 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8508 return long_double_type_node;
8509 break;
8510 default:
8511 gcc_unreachable ();
8512 }
8513 break;
8514 case COMPLEX_TYPE:
8515 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8516 return NULL_TREE;
8517 switch (flt_eval_method)
8518 {
8519 case 1:
8520 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8521 return complex_double_type_node;
8522 break;
8523 case 2:
8524 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8525 || (TYPE_MODE (TREE_TYPE (type))
8526 == TYPE_MODE (double_type_node)))
8527 return complex_long_double_type_node;
8528 break;
8529 default:
8530 gcc_unreachable ();
8531 }
8532 break;
8533 default:
8534 break;
8535 }
8536 }
8537 return NULL_TREE;
8538 }
8539 \f
8540 /* Return OP, stripped of any conversions to wider types as much as is safe.
8541 Converting the value back to OP's type makes a value equivalent to OP.
8542
8543 If FOR_TYPE is nonzero, we return a value which, if converted to
8544 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8545
8546 OP must have integer, real or enumeral type. Pointers are not allowed!
8547
8548 There are some cases where the obvious value we could return
8549 would regenerate to OP if converted to OP's type,
8550 but would not extend like OP to wider types.
8551 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8552 For example, if OP is (unsigned short)(signed char)-1,
8553 we avoid returning (signed char)-1 if FOR_TYPE is int,
8554 even though extending that to an unsigned short would regenerate OP,
8555 since the result of extending (signed char)-1 to (int)
8556 is different from (int) OP. */
8557
8558 tree
8559 get_unwidened (tree op, tree for_type)
8560 {
8561 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8562 tree type = TREE_TYPE (op);
8563 unsigned final_prec
8564 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8565 int uns
8566 = (for_type != 0 && for_type != type
8567 && final_prec > TYPE_PRECISION (type)
8568 && TYPE_UNSIGNED (type));
8569 tree win = op;
8570
8571 while (CONVERT_EXPR_P (op))
8572 {
8573 int bitschange;
8574
8575 /* TYPE_PRECISION on vector types has different meaning
8576 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8577 so avoid them here. */
8578 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8579 break;
8580
8581 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8582 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8583
8584 /* Truncations are many-one so cannot be removed.
8585 Unless we are later going to truncate down even farther. */
8586 if (bitschange < 0
8587 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8588 break;
8589
8590 /* See what's inside this conversion. If we decide to strip it,
8591 we will set WIN. */
8592 op = TREE_OPERAND (op, 0);
8593
8594 /* If we have not stripped any zero-extensions (uns is 0),
8595 we can strip any kind of extension.
8596 If we have previously stripped a zero-extension,
8597 only zero-extensions can safely be stripped.
8598 Any extension can be stripped if the bits it would produce
8599 are all going to be discarded later by truncating to FOR_TYPE. */
8600
8601 if (bitschange > 0)
8602 {
8603 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8604 win = op;
8605 /* TYPE_UNSIGNED says whether this is a zero-extension.
8606 Let's avoid computing it if it does not affect WIN
8607 and if UNS will not be needed again. */
8608 if ((uns
8609 || CONVERT_EXPR_P (op))
8610 && TYPE_UNSIGNED (TREE_TYPE (op)))
8611 {
8612 uns = 1;
8613 win = op;
8614 }
8615 }
8616 }
8617
8618 /* If we finally reach a constant see if it fits in for_type and
8619 in that case convert it. */
8620 if (for_type
8621 && TREE_CODE (win) == INTEGER_CST
8622 && TREE_TYPE (win) != for_type
8623 && int_fits_type_p (win, for_type))
8624 win = fold_convert (for_type, win);
8625
8626 return win;
8627 }
8628 \f
8629 /* Return OP or a simpler expression for a narrower value
8630 which can be sign-extended or zero-extended to give back OP.
8631 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8632 or 0 if the value should be sign-extended. */
8633
8634 tree
8635 get_narrower (tree op, int *unsignedp_ptr)
8636 {
8637 int uns = 0;
8638 int first = 1;
8639 tree win = op;
8640 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8641
8642 while (TREE_CODE (op) == NOP_EXPR)
8643 {
8644 int bitschange
8645 = (TYPE_PRECISION (TREE_TYPE (op))
8646 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8647
8648 /* Truncations are many-one so cannot be removed. */
8649 if (bitschange < 0)
8650 break;
8651
8652 /* See what's inside this conversion. If we decide to strip it,
8653 we will set WIN. */
8654
8655 if (bitschange > 0)
8656 {
8657 op = TREE_OPERAND (op, 0);
8658 /* An extension: the outermost one can be stripped,
8659 but remember whether it is zero or sign extension. */
8660 if (first)
8661 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8662 /* Otherwise, if a sign extension has been stripped,
8663 only sign extensions can now be stripped;
8664 if a zero extension has been stripped, only zero-extensions. */
8665 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8666 break;
8667 first = 0;
8668 }
8669 else /* bitschange == 0 */
8670 {
8671 /* A change in nominal type can always be stripped, but we must
8672 preserve the unsignedness. */
8673 if (first)
8674 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8675 first = 0;
8676 op = TREE_OPERAND (op, 0);
8677 /* Keep trying to narrow, but don't assign op to win if it
8678 would turn an integral type into something else. */
8679 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8680 continue;
8681 }
8682
8683 win = op;
8684 }
8685
8686 if (TREE_CODE (op) == COMPONENT_REF
8687 /* Since type_for_size always gives an integer type. */
8688 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8689 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8690 /* Ensure field is laid out already. */
8691 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8692 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8693 {
8694 unsigned HOST_WIDE_INT innerprec
8695 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8696 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8697 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8698 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8699
8700 /* We can get this structure field in a narrower type that fits it,
8701 but the resulting extension to its nominal type (a fullword type)
8702 must satisfy the same conditions as for other extensions.
8703
8704 Do this only for fields that are aligned (not bit-fields),
8705 because when bit-field insns will be used there is no
8706 advantage in doing this. */
8707
8708 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8709 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8710 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8711 && type != 0)
8712 {
8713 if (first)
8714 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8715 win = fold_convert (type, op);
8716 }
8717 }
8718
8719 *unsignedp_ptr = uns;
8720 return win;
8721 }
8722 \f
8723 /* Returns true if integer constant C has a value that is permissible
8724 for type TYPE (an INTEGER_TYPE). */
8725
8726 bool
8727 int_fits_type_p (const_tree c, const_tree type)
8728 {
8729 tree type_low_bound, type_high_bound;
8730 bool ok_for_low_bound, ok_for_high_bound;
8731 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8732
8733 retry:
8734 type_low_bound = TYPE_MIN_VALUE (type);
8735 type_high_bound = TYPE_MAX_VALUE (type);
8736
8737 /* If at least one bound of the type is a constant integer, we can check
8738 ourselves and maybe make a decision. If no such decision is possible, but
8739 this type is a subtype, try checking against that. Otherwise, use
8740 fits_to_tree_p, which checks against the precision.
8741
8742 Compute the status for each possibly constant bound, and return if we see
8743 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8744 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8745 for "constant known to fit". */
8746
8747 /* Check if c >= type_low_bound. */
8748 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8749 {
8750 if (tree_int_cst_lt (c, type_low_bound))
8751 return false;
8752 ok_for_low_bound = true;
8753 }
8754 else
8755 ok_for_low_bound = false;
8756
8757 /* Check if c <= type_high_bound. */
8758 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8759 {
8760 if (tree_int_cst_lt (type_high_bound, c))
8761 return false;
8762 ok_for_high_bound = true;
8763 }
8764 else
8765 ok_for_high_bound = false;
8766
8767 /* If the constant fits both bounds, the result is known. */
8768 if (ok_for_low_bound && ok_for_high_bound)
8769 return true;
8770
8771 /* Perform some generic filtering which may allow making a decision
8772 even if the bounds are not constant. First, negative integers
8773 never fit in unsigned types, */
8774 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8775 return false;
8776
8777 /* Second, narrower types always fit in wider ones. */
8778 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8779 return true;
8780
8781 /* Third, unsigned integers with top bit set never fit signed types. */
8782 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8783 {
8784 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8785 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8786 {
8787 /* When a tree_cst is converted to a wide-int, the precision
8788 is taken from the type. However, if the precision of the
8789 mode underneath the type is smaller than that, it is
8790 possible that the value will not fit. The test below
8791 fails if any bit is set between the sign bit of the
8792 underlying mode and the top bit of the type. */
8793 if (wi::ne_p (wi::zext (c, prec - 1), c))
8794 return false;
8795 }
8796 else if (wi::neg_p (c))
8797 return false;
8798 }
8799
8800 /* If we haven't been able to decide at this point, there nothing more we
8801 can check ourselves here. Look at the base type if we have one and it
8802 has the same precision. */
8803 if (TREE_CODE (type) == INTEGER_TYPE
8804 && TREE_TYPE (type) != 0
8805 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8806 {
8807 type = TREE_TYPE (type);
8808 goto retry;
8809 }
8810
8811 /* Or to fits_to_tree_p, if nothing else. */
8812 return wi::fits_to_tree_p (c, type);
8813 }
8814
8815 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8816 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8817 represented (assuming two's-complement arithmetic) within the bit
8818 precision of the type are returned instead. */
8819
8820 void
8821 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8822 {
8823 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8824 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8825 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8826 else
8827 {
8828 if (TYPE_UNSIGNED (type))
8829 mpz_set_ui (min, 0);
8830 else
8831 {
8832 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8833 wi::to_mpz (mn, min, SIGNED);
8834 }
8835 }
8836
8837 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8838 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8839 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8840 else
8841 {
8842 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8843 wi::to_mpz (mn, max, TYPE_SIGN (type));
8844 }
8845 }
8846
8847 /* Return true if VAR is an automatic variable defined in function FN. */
8848
8849 bool
8850 auto_var_in_fn_p (const_tree var, const_tree fn)
8851 {
8852 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8853 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8854 || TREE_CODE (var) == PARM_DECL)
8855 && ! TREE_STATIC (var))
8856 || TREE_CODE (var) == LABEL_DECL
8857 || TREE_CODE (var) == RESULT_DECL));
8858 }
8859
8860 /* Subprogram of following function. Called by walk_tree.
8861
8862 Return *TP if it is an automatic variable or parameter of the
8863 function passed in as DATA. */
8864
8865 static tree
8866 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8867 {
8868 tree fn = (tree) data;
8869
8870 if (TYPE_P (*tp))
8871 *walk_subtrees = 0;
8872
8873 else if (DECL_P (*tp)
8874 && auto_var_in_fn_p (*tp, fn))
8875 return *tp;
8876
8877 return NULL_TREE;
8878 }
8879
8880 /* Returns true if T is, contains, or refers to a type with variable
8881 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8882 arguments, but not the return type. If FN is nonzero, only return
8883 true if a modifier of the type or position of FN is a variable or
8884 parameter inside FN.
8885
8886 This concept is more general than that of C99 'variably modified types':
8887 in C99, a struct type is never variably modified because a VLA may not
8888 appear as a structure member. However, in GNU C code like:
8889
8890 struct S { int i[f()]; };
8891
8892 is valid, and other languages may define similar constructs. */
8893
8894 bool
8895 variably_modified_type_p (tree type, tree fn)
8896 {
8897 tree t;
8898
8899 /* Test if T is either variable (if FN is zero) or an expression containing
8900 a variable in FN. If TYPE isn't gimplified, return true also if
8901 gimplify_one_sizepos would gimplify the expression into a local
8902 variable. */
8903 #define RETURN_TRUE_IF_VAR(T) \
8904 do { tree _t = (T); \
8905 if (_t != NULL_TREE \
8906 && _t != error_mark_node \
8907 && TREE_CODE (_t) != INTEGER_CST \
8908 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8909 && (!fn \
8910 || (!TYPE_SIZES_GIMPLIFIED (type) \
8911 && !is_gimple_sizepos (_t)) \
8912 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8913 return true; } while (0)
8914
8915 if (type == error_mark_node)
8916 return false;
8917
8918 /* If TYPE itself has variable size, it is variably modified. */
8919 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8920 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8921
8922 switch (TREE_CODE (type))
8923 {
8924 case POINTER_TYPE:
8925 case REFERENCE_TYPE:
8926 case VECTOR_TYPE:
8927 if (variably_modified_type_p (TREE_TYPE (type), fn))
8928 return true;
8929 break;
8930
8931 case FUNCTION_TYPE:
8932 case METHOD_TYPE:
8933 /* If TYPE is a function type, it is variably modified if the
8934 return type is variably modified. */
8935 if (variably_modified_type_p (TREE_TYPE (type), fn))
8936 return true;
8937 break;
8938
8939 case INTEGER_TYPE:
8940 case REAL_TYPE:
8941 case FIXED_POINT_TYPE:
8942 case ENUMERAL_TYPE:
8943 case BOOLEAN_TYPE:
8944 /* Scalar types are variably modified if their end points
8945 aren't constant. */
8946 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8947 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8948 break;
8949
8950 case RECORD_TYPE:
8951 case UNION_TYPE:
8952 case QUAL_UNION_TYPE:
8953 /* We can't see if any of the fields are variably-modified by the
8954 definition we normally use, since that would produce infinite
8955 recursion via pointers. */
8956 /* This is variably modified if some field's type is. */
8957 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8958 if (TREE_CODE (t) == FIELD_DECL)
8959 {
8960 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8961 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8962 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8963
8964 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8965 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8966 }
8967 break;
8968
8969 case ARRAY_TYPE:
8970 /* Do not call ourselves to avoid infinite recursion. This is
8971 variably modified if the element type is. */
8972 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8973 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8974 break;
8975
8976 default:
8977 break;
8978 }
8979
8980 /* The current language may have other cases to check, but in general,
8981 all other types are not variably modified. */
8982 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8983
8984 #undef RETURN_TRUE_IF_VAR
8985 }
8986
8987 /* Given a DECL or TYPE, return the scope in which it was declared, or
8988 NULL_TREE if there is no containing scope. */
8989
8990 tree
8991 get_containing_scope (const_tree t)
8992 {
8993 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8994 }
8995
8996 /* Return the innermost context enclosing DECL that is
8997 a FUNCTION_DECL, or zero if none. */
8998
8999 tree
9000 decl_function_context (const_tree decl)
9001 {
9002 tree context;
9003
9004 if (TREE_CODE (decl) == ERROR_MARK)
9005 return 0;
9006
9007 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9008 where we look up the function at runtime. Such functions always take
9009 a first argument of type 'pointer to real context'.
9010
9011 C++ should really be fixed to use DECL_CONTEXT for the real context,
9012 and use something else for the "virtual context". */
9013 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9014 context
9015 = TYPE_MAIN_VARIANT
9016 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9017 else
9018 context = DECL_CONTEXT (decl);
9019
9020 while (context && TREE_CODE (context) != FUNCTION_DECL)
9021 {
9022 if (TREE_CODE (context) == BLOCK)
9023 context = BLOCK_SUPERCONTEXT (context);
9024 else
9025 context = get_containing_scope (context);
9026 }
9027
9028 return context;
9029 }
9030
9031 /* Return the innermost context enclosing DECL that is
9032 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9033 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9034
9035 tree
9036 decl_type_context (const_tree decl)
9037 {
9038 tree context = DECL_CONTEXT (decl);
9039
9040 while (context)
9041 switch (TREE_CODE (context))
9042 {
9043 case NAMESPACE_DECL:
9044 case TRANSLATION_UNIT_DECL:
9045 return NULL_TREE;
9046
9047 case RECORD_TYPE:
9048 case UNION_TYPE:
9049 case QUAL_UNION_TYPE:
9050 return context;
9051
9052 case TYPE_DECL:
9053 case FUNCTION_DECL:
9054 context = DECL_CONTEXT (context);
9055 break;
9056
9057 case BLOCK:
9058 context = BLOCK_SUPERCONTEXT (context);
9059 break;
9060
9061 default:
9062 gcc_unreachable ();
9063 }
9064
9065 return NULL_TREE;
9066 }
9067
9068 /* CALL is a CALL_EXPR. Return the declaration for the function
9069 called, or NULL_TREE if the called function cannot be
9070 determined. */
9071
9072 tree
9073 get_callee_fndecl (const_tree call)
9074 {
9075 tree addr;
9076
9077 if (call == error_mark_node)
9078 return error_mark_node;
9079
9080 /* It's invalid to call this function with anything but a
9081 CALL_EXPR. */
9082 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9083
9084 /* The first operand to the CALL is the address of the function
9085 called. */
9086 addr = CALL_EXPR_FN (call);
9087
9088 /* If there is no function, return early. */
9089 if (addr == NULL_TREE)
9090 return NULL_TREE;
9091
9092 STRIP_NOPS (addr);
9093
9094 /* If this is a readonly function pointer, extract its initial value. */
9095 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9096 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9097 && DECL_INITIAL (addr))
9098 addr = DECL_INITIAL (addr);
9099
9100 /* If the address is just `&f' for some function `f', then we know
9101 that `f' is being called. */
9102 if (TREE_CODE (addr) == ADDR_EXPR
9103 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9104 return TREE_OPERAND (addr, 0);
9105
9106 /* We couldn't figure out what was being called. */
9107 return NULL_TREE;
9108 }
9109
9110 /* Print debugging information about tree nodes generated during the compile,
9111 and any language-specific information. */
9112
9113 void
9114 dump_tree_statistics (void)
9115 {
9116 if (GATHER_STATISTICS)
9117 {
9118 int i;
9119 int total_nodes, total_bytes;
9120 fprintf (stderr, "Kind Nodes Bytes\n");
9121 fprintf (stderr, "---------------------------------------\n");
9122 total_nodes = total_bytes = 0;
9123 for (i = 0; i < (int) all_kinds; i++)
9124 {
9125 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9126 tree_node_counts[i], tree_node_sizes[i]);
9127 total_nodes += tree_node_counts[i];
9128 total_bytes += tree_node_sizes[i];
9129 }
9130 fprintf (stderr, "---------------------------------------\n");
9131 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9132 fprintf (stderr, "---------------------------------------\n");
9133 fprintf (stderr, "Code Nodes\n");
9134 fprintf (stderr, "----------------------------\n");
9135 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9136 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9137 tree_code_counts[i]);
9138 fprintf (stderr, "----------------------------\n");
9139 ssanames_print_statistics ();
9140 phinodes_print_statistics ();
9141 }
9142 else
9143 fprintf (stderr, "(No per-node statistics)\n");
9144
9145 print_type_hash_statistics ();
9146 print_debug_expr_statistics ();
9147 print_value_expr_statistics ();
9148 lang_hooks.print_statistics ();
9149 }
9150 \f
9151 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9152
9153 /* Generate a crc32 of a byte. */
9154
9155 static unsigned
9156 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9157 {
9158 unsigned ix;
9159
9160 for (ix = bits; ix--; value <<= 1)
9161 {
9162 unsigned feedback;
9163
9164 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9165 chksum <<= 1;
9166 chksum ^= feedback;
9167 }
9168 return chksum;
9169 }
9170
9171 /* Generate a crc32 of a 32-bit unsigned. */
9172
9173 unsigned
9174 crc32_unsigned (unsigned chksum, unsigned value)
9175 {
9176 return crc32_unsigned_bits (chksum, value, 32);
9177 }
9178
9179 /* Generate a crc32 of a byte. */
9180
9181 unsigned
9182 crc32_byte (unsigned chksum, char byte)
9183 {
9184 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9185 }
9186
9187 /* Generate a crc32 of a string. */
9188
9189 unsigned
9190 crc32_string (unsigned chksum, const char *string)
9191 {
9192 do
9193 {
9194 chksum = crc32_byte (chksum, *string);
9195 }
9196 while (*string++);
9197 return chksum;
9198 }
9199
9200 /* P is a string that will be used in a symbol. Mask out any characters
9201 that are not valid in that context. */
9202
9203 void
9204 clean_symbol_name (char *p)
9205 {
9206 for (; *p; p++)
9207 if (! (ISALNUM (*p)
9208 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9209 || *p == '$'
9210 #endif
9211 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9212 || *p == '.'
9213 #endif
9214 ))
9215 *p = '_';
9216 }
9217
9218 /* Generate a name for a special-purpose function.
9219 The generated name may need to be unique across the whole link.
9220 Changes to this function may also require corresponding changes to
9221 xstrdup_mask_random.
9222 TYPE is some string to identify the purpose of this function to the
9223 linker or collect2; it must start with an uppercase letter,
9224 one of:
9225 I - for constructors
9226 D - for destructors
9227 N - for C++ anonymous namespaces
9228 F - for DWARF unwind frame information. */
9229
9230 tree
9231 get_file_function_name (const char *type)
9232 {
9233 char *buf;
9234 const char *p;
9235 char *q;
9236
9237 /* If we already have a name we know to be unique, just use that. */
9238 if (first_global_object_name)
9239 p = q = ASTRDUP (first_global_object_name);
9240 /* If the target is handling the constructors/destructors, they
9241 will be local to this file and the name is only necessary for
9242 debugging purposes.
9243 We also assign sub_I and sub_D sufixes to constructors called from
9244 the global static constructors. These are always local. */
9245 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9246 || (strncmp (type, "sub_", 4) == 0
9247 && (type[4] == 'I' || type[4] == 'D')))
9248 {
9249 const char *file = main_input_filename;
9250 if (! file)
9251 file = LOCATION_FILE (input_location);
9252 /* Just use the file's basename, because the full pathname
9253 might be quite long. */
9254 p = q = ASTRDUP (lbasename (file));
9255 }
9256 else
9257 {
9258 /* Otherwise, the name must be unique across the entire link.
9259 We don't have anything that we know to be unique to this translation
9260 unit, so use what we do have and throw in some randomness. */
9261 unsigned len;
9262 const char *name = weak_global_object_name;
9263 const char *file = main_input_filename;
9264
9265 if (! name)
9266 name = "";
9267 if (! file)
9268 file = LOCATION_FILE (input_location);
9269
9270 len = strlen (file);
9271 q = (char *) alloca (9 + 17 + len + 1);
9272 memcpy (q, file, len + 1);
9273
9274 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9275 crc32_string (0, name), get_random_seed (false));
9276
9277 p = q;
9278 }
9279
9280 clean_symbol_name (q);
9281 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9282 + strlen (type));
9283
9284 /* Set up the name of the file-level functions we may need.
9285 Use a global object (which is already required to be unique over
9286 the program) rather than the file name (which imposes extra
9287 constraints). */
9288 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9289
9290 return get_identifier (buf);
9291 }
9292 \f
9293 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9294
9295 /* Complain that the tree code of NODE does not match the expected 0
9296 terminated list of trailing codes. The trailing code list can be
9297 empty, for a more vague error message. FILE, LINE, and FUNCTION
9298 are of the caller. */
9299
9300 void
9301 tree_check_failed (const_tree node, const char *file,
9302 int line, const char *function, ...)
9303 {
9304 va_list args;
9305 const char *buffer;
9306 unsigned length = 0;
9307 enum tree_code code;
9308
9309 va_start (args, function);
9310 while ((code = (enum tree_code) va_arg (args, int)))
9311 length += 4 + strlen (get_tree_code_name (code));
9312 va_end (args);
9313 if (length)
9314 {
9315 char *tmp;
9316 va_start (args, function);
9317 length += strlen ("expected ");
9318 buffer = tmp = (char *) alloca (length);
9319 length = 0;
9320 while ((code = (enum tree_code) va_arg (args, int)))
9321 {
9322 const char *prefix = length ? " or " : "expected ";
9323
9324 strcpy (tmp + length, prefix);
9325 length += strlen (prefix);
9326 strcpy (tmp + length, get_tree_code_name (code));
9327 length += strlen (get_tree_code_name (code));
9328 }
9329 va_end (args);
9330 }
9331 else
9332 buffer = "unexpected node";
9333
9334 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9335 buffer, get_tree_code_name (TREE_CODE (node)),
9336 function, trim_filename (file), line);
9337 }
9338
9339 /* Complain that the tree code of NODE does match the expected 0
9340 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9341 the caller. */
9342
9343 void
9344 tree_not_check_failed (const_tree node, const char *file,
9345 int line, const char *function, ...)
9346 {
9347 va_list args;
9348 char *buffer;
9349 unsigned length = 0;
9350 enum tree_code code;
9351
9352 va_start (args, function);
9353 while ((code = (enum tree_code) va_arg (args, int)))
9354 length += 4 + strlen (get_tree_code_name (code));
9355 va_end (args);
9356 va_start (args, function);
9357 buffer = (char *) alloca (length);
9358 length = 0;
9359 while ((code = (enum tree_code) va_arg (args, int)))
9360 {
9361 if (length)
9362 {
9363 strcpy (buffer + length, " or ");
9364 length += 4;
9365 }
9366 strcpy (buffer + length, get_tree_code_name (code));
9367 length += strlen (get_tree_code_name (code));
9368 }
9369 va_end (args);
9370
9371 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9372 buffer, get_tree_code_name (TREE_CODE (node)),
9373 function, trim_filename (file), line);
9374 }
9375
9376 /* Similar to tree_check_failed, except that we check for a class of tree
9377 code, given in CL. */
9378
9379 void
9380 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9381 const char *file, int line, const char *function)
9382 {
9383 internal_error
9384 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9385 TREE_CODE_CLASS_STRING (cl),
9386 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9387 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9388 }
9389
9390 /* Similar to tree_check_failed, except that instead of specifying a
9391 dozen codes, use the knowledge that they're all sequential. */
9392
9393 void
9394 tree_range_check_failed (const_tree node, const char *file, int line,
9395 const char *function, enum tree_code c1,
9396 enum tree_code c2)
9397 {
9398 char *buffer;
9399 unsigned length = 0;
9400 unsigned int c;
9401
9402 for (c = c1; c <= c2; ++c)
9403 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9404
9405 length += strlen ("expected ");
9406 buffer = (char *) alloca (length);
9407 length = 0;
9408
9409 for (c = c1; c <= c2; ++c)
9410 {
9411 const char *prefix = length ? " or " : "expected ";
9412
9413 strcpy (buffer + length, prefix);
9414 length += strlen (prefix);
9415 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9416 length += strlen (get_tree_code_name ((enum tree_code) c));
9417 }
9418
9419 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9420 buffer, get_tree_code_name (TREE_CODE (node)),
9421 function, trim_filename (file), line);
9422 }
9423
9424
9425 /* Similar to tree_check_failed, except that we check that a tree does
9426 not have the specified code, given in CL. */
9427
9428 void
9429 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9430 const char *file, int line, const char *function)
9431 {
9432 internal_error
9433 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9434 TREE_CODE_CLASS_STRING (cl),
9435 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9436 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9437 }
9438
9439
9440 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9441
9442 void
9443 omp_clause_check_failed (const_tree node, const char *file, int line,
9444 const char *function, enum omp_clause_code code)
9445 {
9446 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9447 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9448 function, trim_filename (file), line);
9449 }
9450
9451
9452 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9453
9454 void
9455 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9456 const char *function, enum omp_clause_code c1,
9457 enum omp_clause_code c2)
9458 {
9459 char *buffer;
9460 unsigned length = 0;
9461 unsigned int c;
9462
9463 for (c = c1; c <= c2; ++c)
9464 length += 4 + strlen (omp_clause_code_name[c]);
9465
9466 length += strlen ("expected ");
9467 buffer = (char *) alloca (length);
9468 length = 0;
9469
9470 for (c = c1; c <= c2; ++c)
9471 {
9472 const char *prefix = length ? " or " : "expected ";
9473
9474 strcpy (buffer + length, prefix);
9475 length += strlen (prefix);
9476 strcpy (buffer + length, omp_clause_code_name[c]);
9477 length += strlen (omp_clause_code_name[c]);
9478 }
9479
9480 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9481 buffer, omp_clause_code_name[TREE_CODE (node)],
9482 function, trim_filename (file), line);
9483 }
9484
9485
9486 #undef DEFTREESTRUCT
9487 #define DEFTREESTRUCT(VAL, NAME) NAME,
9488
9489 static const char *ts_enum_names[] = {
9490 #include "treestruct.def"
9491 };
9492 #undef DEFTREESTRUCT
9493
9494 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9495
9496 /* Similar to tree_class_check_failed, except that we check for
9497 whether CODE contains the tree structure identified by EN. */
9498
9499 void
9500 tree_contains_struct_check_failed (const_tree node,
9501 const enum tree_node_structure_enum en,
9502 const char *file, int line,
9503 const char *function)
9504 {
9505 internal_error
9506 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9507 TS_ENUM_NAME (en),
9508 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9509 }
9510
9511
9512 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9513 (dynamically sized) vector. */
9514
9515 void
9516 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9517 const char *function)
9518 {
9519 internal_error
9520 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9521 idx + 1, len, function, trim_filename (file), line);
9522 }
9523
9524 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9525 (dynamically sized) vector. */
9526
9527 void
9528 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9529 const char *function)
9530 {
9531 internal_error
9532 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9533 idx + 1, len, function, trim_filename (file), line);
9534 }
9535
9536 /* Similar to above, except that the check is for the bounds of the operand
9537 vector of an expression node EXP. */
9538
9539 void
9540 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9541 int line, const char *function)
9542 {
9543 enum tree_code code = TREE_CODE (exp);
9544 internal_error
9545 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9546 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9547 function, trim_filename (file), line);
9548 }
9549
9550 /* Similar to above, except that the check is for the number of
9551 operands of an OMP_CLAUSE node. */
9552
9553 void
9554 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9555 int line, const char *function)
9556 {
9557 internal_error
9558 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9559 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9560 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9561 trim_filename (file), line);
9562 }
9563 #endif /* ENABLE_TREE_CHECKING */
9564 \f
9565 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9566 and mapped to the machine mode MODE. Initialize its fields and build
9567 the information necessary for debugging output. */
9568
9569 static tree
9570 make_vector_type (tree innertype, int nunits, machine_mode mode)
9571 {
9572 tree t;
9573 inchash::hash hstate;
9574
9575 t = make_node (VECTOR_TYPE);
9576 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9577 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9578 SET_TYPE_MODE (t, mode);
9579
9580 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9581 SET_TYPE_STRUCTURAL_EQUALITY (t);
9582 else if (TYPE_CANONICAL (innertype) != innertype
9583 || mode != VOIDmode)
9584 TYPE_CANONICAL (t)
9585 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9586
9587 layout_type (t);
9588
9589 hstate.add_wide_int (VECTOR_TYPE);
9590 hstate.add_wide_int (nunits);
9591 hstate.add_wide_int (mode);
9592 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9593 t = type_hash_canon (hstate.end (), t);
9594
9595 /* We have built a main variant, based on the main variant of the
9596 inner type. Use it to build the variant we return. */
9597 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9598 && TREE_TYPE (t) != innertype)
9599 return build_type_attribute_qual_variant (t,
9600 TYPE_ATTRIBUTES (innertype),
9601 TYPE_QUALS (innertype));
9602
9603 return t;
9604 }
9605
9606 static tree
9607 make_or_reuse_type (unsigned size, int unsignedp)
9608 {
9609 int i;
9610
9611 if (size == INT_TYPE_SIZE)
9612 return unsignedp ? unsigned_type_node : integer_type_node;
9613 if (size == CHAR_TYPE_SIZE)
9614 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9615 if (size == SHORT_TYPE_SIZE)
9616 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9617 if (size == LONG_TYPE_SIZE)
9618 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9619 if (size == LONG_LONG_TYPE_SIZE)
9620 return (unsignedp ? long_long_unsigned_type_node
9621 : long_long_integer_type_node);
9622
9623 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9624 if (size == int_n_data[i].bitsize
9625 && int_n_enabled_p[i])
9626 return (unsignedp ? int_n_trees[i].unsigned_type
9627 : int_n_trees[i].signed_type);
9628
9629 if (unsignedp)
9630 return make_unsigned_type (size);
9631 else
9632 return make_signed_type (size);
9633 }
9634
9635 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9636
9637 static tree
9638 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9639 {
9640 if (satp)
9641 {
9642 if (size == SHORT_FRACT_TYPE_SIZE)
9643 return unsignedp ? sat_unsigned_short_fract_type_node
9644 : sat_short_fract_type_node;
9645 if (size == FRACT_TYPE_SIZE)
9646 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9647 if (size == LONG_FRACT_TYPE_SIZE)
9648 return unsignedp ? sat_unsigned_long_fract_type_node
9649 : sat_long_fract_type_node;
9650 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9651 return unsignedp ? sat_unsigned_long_long_fract_type_node
9652 : sat_long_long_fract_type_node;
9653 }
9654 else
9655 {
9656 if (size == SHORT_FRACT_TYPE_SIZE)
9657 return unsignedp ? unsigned_short_fract_type_node
9658 : short_fract_type_node;
9659 if (size == FRACT_TYPE_SIZE)
9660 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9661 if (size == LONG_FRACT_TYPE_SIZE)
9662 return unsignedp ? unsigned_long_fract_type_node
9663 : long_fract_type_node;
9664 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9665 return unsignedp ? unsigned_long_long_fract_type_node
9666 : long_long_fract_type_node;
9667 }
9668
9669 return make_fract_type (size, unsignedp, satp);
9670 }
9671
9672 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9673
9674 static tree
9675 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9676 {
9677 if (satp)
9678 {
9679 if (size == SHORT_ACCUM_TYPE_SIZE)
9680 return unsignedp ? sat_unsigned_short_accum_type_node
9681 : sat_short_accum_type_node;
9682 if (size == ACCUM_TYPE_SIZE)
9683 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9684 if (size == LONG_ACCUM_TYPE_SIZE)
9685 return unsignedp ? sat_unsigned_long_accum_type_node
9686 : sat_long_accum_type_node;
9687 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9688 return unsignedp ? sat_unsigned_long_long_accum_type_node
9689 : sat_long_long_accum_type_node;
9690 }
9691 else
9692 {
9693 if (size == SHORT_ACCUM_TYPE_SIZE)
9694 return unsignedp ? unsigned_short_accum_type_node
9695 : short_accum_type_node;
9696 if (size == ACCUM_TYPE_SIZE)
9697 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9698 if (size == LONG_ACCUM_TYPE_SIZE)
9699 return unsignedp ? unsigned_long_accum_type_node
9700 : long_accum_type_node;
9701 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9702 return unsignedp ? unsigned_long_long_accum_type_node
9703 : long_long_accum_type_node;
9704 }
9705
9706 return make_accum_type (size, unsignedp, satp);
9707 }
9708
9709
9710 /* Create an atomic variant node for TYPE. This routine is called
9711 during initialization of data types to create the 5 basic atomic
9712 types. The generic build_variant_type function requires these to
9713 already be set up in order to function properly, so cannot be
9714 called from there. If ALIGN is non-zero, then ensure alignment is
9715 overridden to this value. */
9716
9717 static tree
9718 build_atomic_base (tree type, unsigned int align)
9719 {
9720 tree t;
9721
9722 /* Make sure its not already registered. */
9723 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9724 return t;
9725
9726 t = build_variant_type_copy (type);
9727 set_type_quals (t, TYPE_QUAL_ATOMIC);
9728
9729 if (align)
9730 TYPE_ALIGN (t) = align;
9731
9732 return t;
9733 }
9734
9735 /* Create nodes for all integer types (and error_mark_node) using the sizes
9736 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9737 SHORT_DOUBLE specifies whether double should be of the same precision
9738 as float. */
9739
9740 void
9741 build_common_tree_nodes (bool signed_char, bool short_double)
9742 {
9743 int i;
9744
9745 error_mark_node = make_node (ERROR_MARK);
9746 TREE_TYPE (error_mark_node) = error_mark_node;
9747
9748 initialize_sizetypes ();
9749
9750 /* Define both `signed char' and `unsigned char'. */
9751 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9752 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9753 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9754 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9755
9756 /* Define `char', which is like either `signed char' or `unsigned char'
9757 but not the same as either. */
9758 char_type_node
9759 = (signed_char
9760 ? make_signed_type (CHAR_TYPE_SIZE)
9761 : make_unsigned_type (CHAR_TYPE_SIZE));
9762 TYPE_STRING_FLAG (char_type_node) = 1;
9763
9764 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9765 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9766 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9767 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9768 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9769 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9770 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9771 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9772
9773 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9774 {
9775 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9776 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9777 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9778 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9779
9780 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9781 && int_n_enabled_p[i])
9782 {
9783 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9784 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9785 }
9786 }
9787
9788 /* Define a boolean type. This type only represents boolean values but
9789 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9790 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9791 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9792 TYPE_PRECISION (boolean_type_node) = 1;
9793 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9794
9795 /* Define what type to use for size_t. */
9796 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9797 size_type_node = unsigned_type_node;
9798 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9799 size_type_node = long_unsigned_type_node;
9800 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9801 size_type_node = long_long_unsigned_type_node;
9802 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9803 size_type_node = short_unsigned_type_node;
9804 else
9805 {
9806 int i;
9807
9808 size_type_node = NULL_TREE;
9809 for (i = 0; i < NUM_INT_N_ENTS; i++)
9810 if (int_n_enabled_p[i])
9811 {
9812 char name[50];
9813 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9814
9815 if (strcmp (name, SIZE_TYPE) == 0)
9816 {
9817 size_type_node = int_n_trees[i].unsigned_type;
9818 }
9819 }
9820 if (size_type_node == NULL_TREE)
9821 gcc_unreachable ();
9822 }
9823
9824 /* Fill in the rest of the sized types. Reuse existing type nodes
9825 when possible. */
9826 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9827 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9828 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9829 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9830 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9831
9832 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9833 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9834 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9835 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9836 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9837
9838 /* Don't call build_qualified type for atomics. That routine does
9839 special processing for atomics, and until they are initialized
9840 it's better not to make that call.
9841
9842 Check to see if there is a target override for atomic types. */
9843
9844 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9845 targetm.atomic_align_for_mode (QImode));
9846 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9847 targetm.atomic_align_for_mode (HImode));
9848 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9849 targetm.atomic_align_for_mode (SImode));
9850 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9851 targetm.atomic_align_for_mode (DImode));
9852 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9853 targetm.atomic_align_for_mode (TImode));
9854
9855 access_public_node = get_identifier ("public");
9856 access_protected_node = get_identifier ("protected");
9857 access_private_node = get_identifier ("private");
9858
9859 /* Define these next since types below may used them. */
9860 integer_zero_node = build_int_cst (integer_type_node, 0);
9861 integer_one_node = build_int_cst (integer_type_node, 1);
9862 integer_three_node = build_int_cst (integer_type_node, 3);
9863 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9864
9865 size_zero_node = size_int (0);
9866 size_one_node = size_int (1);
9867 bitsize_zero_node = bitsize_int (0);
9868 bitsize_one_node = bitsize_int (1);
9869 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9870
9871 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9872 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9873
9874 void_type_node = make_node (VOID_TYPE);
9875 layout_type (void_type_node);
9876
9877 pointer_bounds_type_node = targetm.chkp_bound_type ();
9878
9879 /* We are not going to have real types in C with less than byte alignment,
9880 so we might as well not have any types that claim to have it. */
9881 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9882 TYPE_USER_ALIGN (void_type_node) = 0;
9883
9884 void_node = make_node (VOID_CST);
9885 TREE_TYPE (void_node) = void_type_node;
9886
9887 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9888 layout_type (TREE_TYPE (null_pointer_node));
9889
9890 ptr_type_node = build_pointer_type (void_type_node);
9891 const_ptr_type_node
9892 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9893 fileptr_type_node = ptr_type_node;
9894
9895 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9896
9897 float_type_node = make_node (REAL_TYPE);
9898 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9899 layout_type (float_type_node);
9900
9901 double_type_node = make_node (REAL_TYPE);
9902 if (short_double)
9903 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9904 else
9905 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9906 layout_type (double_type_node);
9907
9908 long_double_type_node = make_node (REAL_TYPE);
9909 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9910 layout_type (long_double_type_node);
9911
9912 float_ptr_type_node = build_pointer_type (float_type_node);
9913 double_ptr_type_node = build_pointer_type (double_type_node);
9914 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9915 integer_ptr_type_node = build_pointer_type (integer_type_node);
9916
9917 /* Fixed size integer types. */
9918 uint16_type_node = make_or_reuse_type (16, 1);
9919 uint32_type_node = make_or_reuse_type (32, 1);
9920 uint64_type_node = make_or_reuse_type (64, 1);
9921
9922 /* Decimal float types. */
9923 dfloat32_type_node = make_node (REAL_TYPE);
9924 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9925 layout_type (dfloat32_type_node);
9926 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9927 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9928
9929 dfloat64_type_node = make_node (REAL_TYPE);
9930 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9931 layout_type (dfloat64_type_node);
9932 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9933 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9934
9935 dfloat128_type_node = make_node (REAL_TYPE);
9936 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9937 layout_type (dfloat128_type_node);
9938 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9939 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9940
9941 complex_integer_type_node = build_complex_type (integer_type_node);
9942 complex_float_type_node = build_complex_type (float_type_node);
9943 complex_double_type_node = build_complex_type (double_type_node);
9944 complex_long_double_type_node = build_complex_type (long_double_type_node);
9945
9946 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9947 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9948 sat_ ## KIND ## _type_node = \
9949 make_sat_signed_ ## KIND ## _type (SIZE); \
9950 sat_unsigned_ ## KIND ## _type_node = \
9951 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9952 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9953 unsigned_ ## KIND ## _type_node = \
9954 make_unsigned_ ## KIND ## _type (SIZE);
9955
9956 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9957 sat_ ## WIDTH ## KIND ## _type_node = \
9958 make_sat_signed_ ## KIND ## _type (SIZE); \
9959 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9960 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9961 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9962 unsigned_ ## WIDTH ## KIND ## _type_node = \
9963 make_unsigned_ ## KIND ## _type (SIZE);
9964
9965 /* Make fixed-point type nodes based on four different widths. */
9966 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9967 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9968 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9969 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9970 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9971
9972 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9973 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9974 NAME ## _type_node = \
9975 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9976 u ## NAME ## _type_node = \
9977 make_or_reuse_unsigned_ ## KIND ## _type \
9978 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9979 sat_ ## NAME ## _type_node = \
9980 make_or_reuse_sat_signed_ ## KIND ## _type \
9981 (GET_MODE_BITSIZE (MODE ## mode)); \
9982 sat_u ## NAME ## _type_node = \
9983 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9984 (GET_MODE_BITSIZE (U ## MODE ## mode));
9985
9986 /* Fixed-point type and mode nodes. */
9987 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9988 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9989 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9990 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9991 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9992 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9993 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9994 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9995 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9996 MAKE_FIXED_MODE_NODE (accum, da, DA)
9997 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9998
9999 {
10000 tree t = targetm.build_builtin_va_list ();
10001
10002 /* Many back-ends define record types without setting TYPE_NAME.
10003 If we copied the record type here, we'd keep the original
10004 record type without a name. This breaks name mangling. So,
10005 don't copy record types and let c_common_nodes_and_builtins()
10006 declare the type to be __builtin_va_list. */
10007 if (TREE_CODE (t) != RECORD_TYPE)
10008 t = build_variant_type_copy (t);
10009
10010 va_list_type_node = t;
10011 }
10012 }
10013
10014 /* Modify DECL for given flags.
10015 TM_PURE attribute is set only on types, so the function will modify
10016 DECL's type when ECF_TM_PURE is used. */
10017
10018 void
10019 set_call_expr_flags (tree decl, int flags)
10020 {
10021 if (flags & ECF_NOTHROW)
10022 TREE_NOTHROW (decl) = 1;
10023 if (flags & ECF_CONST)
10024 TREE_READONLY (decl) = 1;
10025 if (flags & ECF_PURE)
10026 DECL_PURE_P (decl) = 1;
10027 if (flags & ECF_LOOPING_CONST_OR_PURE)
10028 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10029 if (flags & ECF_NOVOPS)
10030 DECL_IS_NOVOPS (decl) = 1;
10031 if (flags & ECF_NORETURN)
10032 TREE_THIS_VOLATILE (decl) = 1;
10033 if (flags & ECF_MALLOC)
10034 DECL_IS_MALLOC (decl) = 1;
10035 if (flags & ECF_RETURNS_TWICE)
10036 DECL_IS_RETURNS_TWICE (decl) = 1;
10037 if (flags & ECF_LEAF)
10038 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10039 NULL, DECL_ATTRIBUTES (decl));
10040 if ((flags & ECF_TM_PURE) && flag_tm)
10041 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10042 /* Looping const or pure is implied by noreturn.
10043 There is currently no way to declare looping const or looping pure alone. */
10044 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10045 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10046 }
10047
10048
10049 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10050
10051 static void
10052 local_define_builtin (const char *name, tree type, enum built_in_function code,
10053 const char *library_name, int ecf_flags)
10054 {
10055 tree decl;
10056
10057 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10058 library_name, NULL_TREE);
10059 set_call_expr_flags (decl, ecf_flags);
10060
10061 set_builtin_decl (code, decl, true);
10062 }
10063
10064 /* Call this function after instantiating all builtins that the language
10065 front end cares about. This will build the rest of the builtins
10066 and internal functions that are relied upon by the tree optimizers and
10067 the middle-end. */
10068
10069 void
10070 build_common_builtin_nodes (void)
10071 {
10072 tree tmp, ftype;
10073 int ecf_flags;
10074
10075 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10076 {
10077 ftype = build_function_type (void_type_node, void_list_node);
10078 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10079 "__builtin_unreachable",
10080 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10081 | ECF_CONST);
10082 }
10083
10084 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10085 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10086 {
10087 ftype = build_function_type_list (ptr_type_node,
10088 ptr_type_node, const_ptr_type_node,
10089 size_type_node, NULL_TREE);
10090
10091 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10092 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10093 "memcpy", ECF_NOTHROW | ECF_LEAF);
10094 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10095 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10096 "memmove", ECF_NOTHROW | ECF_LEAF);
10097 }
10098
10099 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10100 {
10101 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10102 const_ptr_type_node, size_type_node,
10103 NULL_TREE);
10104 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10105 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10106 }
10107
10108 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10109 {
10110 ftype = build_function_type_list (ptr_type_node,
10111 ptr_type_node, integer_type_node,
10112 size_type_node, NULL_TREE);
10113 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10114 "memset", ECF_NOTHROW | ECF_LEAF);
10115 }
10116
10117 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10118 {
10119 ftype = build_function_type_list (ptr_type_node,
10120 size_type_node, NULL_TREE);
10121 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10122 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10123 }
10124
10125 ftype = build_function_type_list (ptr_type_node, size_type_node,
10126 size_type_node, NULL_TREE);
10127 local_define_builtin ("__builtin_alloca_with_align", ftype,
10128 BUILT_IN_ALLOCA_WITH_ALIGN,
10129 "__builtin_alloca_with_align",
10130 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10131
10132 /* If we're checking the stack, `alloca' can throw. */
10133 if (flag_stack_check)
10134 {
10135 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10136 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10137 }
10138
10139 ftype = build_function_type_list (void_type_node,
10140 ptr_type_node, ptr_type_node,
10141 ptr_type_node, NULL_TREE);
10142 local_define_builtin ("__builtin_init_trampoline", ftype,
10143 BUILT_IN_INIT_TRAMPOLINE,
10144 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10145 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10146 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10147 "__builtin_init_heap_trampoline",
10148 ECF_NOTHROW | ECF_LEAF);
10149
10150 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10151 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10152 BUILT_IN_ADJUST_TRAMPOLINE,
10153 "__builtin_adjust_trampoline",
10154 ECF_CONST | ECF_NOTHROW);
10155
10156 ftype = build_function_type_list (void_type_node,
10157 ptr_type_node, ptr_type_node, NULL_TREE);
10158 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10159 BUILT_IN_NONLOCAL_GOTO,
10160 "__builtin_nonlocal_goto",
10161 ECF_NORETURN | ECF_NOTHROW);
10162
10163 ftype = build_function_type_list (void_type_node,
10164 ptr_type_node, ptr_type_node, NULL_TREE);
10165 local_define_builtin ("__builtin_setjmp_setup", ftype,
10166 BUILT_IN_SETJMP_SETUP,
10167 "__builtin_setjmp_setup", ECF_NOTHROW);
10168
10169 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10170 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10171 BUILT_IN_SETJMP_RECEIVER,
10172 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10173
10174 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10175 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10176 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10177
10178 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10179 local_define_builtin ("__builtin_stack_restore", ftype,
10180 BUILT_IN_STACK_RESTORE,
10181 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10182
10183 /* If there's a possibility that we might use the ARM EABI, build the
10184 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10185 if (targetm.arm_eabi_unwinder)
10186 {
10187 ftype = build_function_type_list (void_type_node, NULL_TREE);
10188 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10189 BUILT_IN_CXA_END_CLEANUP,
10190 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10191 }
10192
10193 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10194 local_define_builtin ("__builtin_unwind_resume", ftype,
10195 BUILT_IN_UNWIND_RESUME,
10196 ((targetm_common.except_unwind_info (&global_options)
10197 == UI_SJLJ)
10198 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10199 ECF_NORETURN);
10200
10201 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10202 {
10203 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10204 NULL_TREE);
10205 local_define_builtin ("__builtin_return_address", ftype,
10206 BUILT_IN_RETURN_ADDRESS,
10207 "__builtin_return_address",
10208 ECF_NOTHROW);
10209 }
10210
10211 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10212 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10213 {
10214 ftype = build_function_type_list (void_type_node, ptr_type_node,
10215 ptr_type_node, NULL_TREE);
10216 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10217 local_define_builtin ("__cyg_profile_func_enter", ftype,
10218 BUILT_IN_PROFILE_FUNC_ENTER,
10219 "__cyg_profile_func_enter", 0);
10220 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10221 local_define_builtin ("__cyg_profile_func_exit", ftype,
10222 BUILT_IN_PROFILE_FUNC_EXIT,
10223 "__cyg_profile_func_exit", 0);
10224 }
10225
10226 /* The exception object and filter values from the runtime. The argument
10227 must be zero before exception lowering, i.e. from the front end. After
10228 exception lowering, it will be the region number for the exception
10229 landing pad. These functions are PURE instead of CONST to prevent
10230 them from being hoisted past the exception edge that will initialize
10231 its value in the landing pad. */
10232 ftype = build_function_type_list (ptr_type_node,
10233 integer_type_node, NULL_TREE);
10234 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10235 /* Only use TM_PURE if we we have TM language support. */
10236 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10237 ecf_flags |= ECF_TM_PURE;
10238 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10239 "__builtin_eh_pointer", ecf_flags);
10240
10241 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10242 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10243 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10244 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10245
10246 ftype = build_function_type_list (void_type_node,
10247 integer_type_node, integer_type_node,
10248 NULL_TREE);
10249 local_define_builtin ("__builtin_eh_copy_values", ftype,
10250 BUILT_IN_EH_COPY_VALUES,
10251 "__builtin_eh_copy_values", ECF_NOTHROW);
10252
10253 /* Complex multiplication and division. These are handled as builtins
10254 rather than optabs because emit_library_call_value doesn't support
10255 complex. Further, we can do slightly better with folding these
10256 beasties if the real and complex parts of the arguments are separate. */
10257 {
10258 int mode;
10259
10260 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10261 {
10262 char mode_name_buf[4], *q;
10263 const char *p;
10264 enum built_in_function mcode, dcode;
10265 tree type, inner_type;
10266 const char *prefix = "__";
10267
10268 if (targetm.libfunc_gnu_prefix)
10269 prefix = "__gnu_";
10270
10271 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10272 if (type == NULL)
10273 continue;
10274 inner_type = TREE_TYPE (type);
10275
10276 ftype = build_function_type_list (type, inner_type, inner_type,
10277 inner_type, inner_type, NULL_TREE);
10278
10279 mcode = ((enum built_in_function)
10280 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10281 dcode = ((enum built_in_function)
10282 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10283
10284 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10285 *q = TOLOWER (*p);
10286 *q = '\0';
10287
10288 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10289 NULL);
10290 local_define_builtin (built_in_names[mcode], ftype, mcode,
10291 built_in_names[mcode],
10292 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10293
10294 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10295 NULL);
10296 local_define_builtin (built_in_names[dcode], ftype, dcode,
10297 built_in_names[dcode],
10298 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10299 }
10300 }
10301
10302 init_internal_fns ();
10303 }
10304
10305 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10306 better way.
10307
10308 If we requested a pointer to a vector, build up the pointers that
10309 we stripped off while looking for the inner type. Similarly for
10310 return values from functions.
10311
10312 The argument TYPE is the top of the chain, and BOTTOM is the
10313 new type which we will point to. */
10314
10315 tree
10316 reconstruct_complex_type (tree type, tree bottom)
10317 {
10318 tree inner, outer;
10319
10320 if (TREE_CODE (type) == POINTER_TYPE)
10321 {
10322 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10323 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10324 TYPE_REF_CAN_ALIAS_ALL (type));
10325 }
10326 else if (TREE_CODE (type) == REFERENCE_TYPE)
10327 {
10328 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10329 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10330 TYPE_REF_CAN_ALIAS_ALL (type));
10331 }
10332 else if (TREE_CODE (type) == ARRAY_TYPE)
10333 {
10334 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10335 outer = build_array_type (inner, TYPE_DOMAIN (type));
10336 }
10337 else if (TREE_CODE (type) == FUNCTION_TYPE)
10338 {
10339 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10340 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10341 }
10342 else if (TREE_CODE (type) == METHOD_TYPE)
10343 {
10344 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10345 /* The build_method_type_directly() routine prepends 'this' to argument list,
10346 so we must compensate by getting rid of it. */
10347 outer
10348 = build_method_type_directly
10349 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10350 inner,
10351 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10352 }
10353 else if (TREE_CODE (type) == OFFSET_TYPE)
10354 {
10355 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10356 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10357 }
10358 else
10359 return bottom;
10360
10361 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10362 TYPE_QUALS (type));
10363 }
10364
10365 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10366 the inner type. */
10367 tree
10368 build_vector_type_for_mode (tree innertype, machine_mode mode)
10369 {
10370 int nunits;
10371
10372 switch (GET_MODE_CLASS (mode))
10373 {
10374 case MODE_VECTOR_INT:
10375 case MODE_VECTOR_FLOAT:
10376 case MODE_VECTOR_FRACT:
10377 case MODE_VECTOR_UFRACT:
10378 case MODE_VECTOR_ACCUM:
10379 case MODE_VECTOR_UACCUM:
10380 nunits = GET_MODE_NUNITS (mode);
10381 break;
10382
10383 case MODE_INT:
10384 /* Check that there are no leftover bits. */
10385 gcc_assert (GET_MODE_BITSIZE (mode)
10386 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10387
10388 nunits = GET_MODE_BITSIZE (mode)
10389 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10390 break;
10391
10392 default:
10393 gcc_unreachable ();
10394 }
10395
10396 return make_vector_type (innertype, nunits, mode);
10397 }
10398
10399 /* Similarly, but takes the inner type and number of units, which must be
10400 a power of two. */
10401
10402 tree
10403 build_vector_type (tree innertype, int nunits)
10404 {
10405 return make_vector_type (innertype, nunits, VOIDmode);
10406 }
10407
10408 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10409
10410 tree
10411 build_opaque_vector_type (tree innertype, int nunits)
10412 {
10413 tree t = make_vector_type (innertype, nunits, VOIDmode);
10414 tree cand;
10415 /* We always build the non-opaque variant before the opaque one,
10416 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10417 cand = TYPE_NEXT_VARIANT (t);
10418 if (cand
10419 && TYPE_VECTOR_OPAQUE (cand)
10420 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10421 return cand;
10422 /* Othewise build a variant type and make sure to queue it after
10423 the non-opaque type. */
10424 cand = build_distinct_type_copy (t);
10425 TYPE_VECTOR_OPAQUE (cand) = true;
10426 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10427 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10428 TYPE_NEXT_VARIANT (t) = cand;
10429 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10430 return cand;
10431 }
10432
10433
10434 /* Given an initializer INIT, return TRUE if INIT is zero or some
10435 aggregate of zeros. Otherwise return FALSE. */
10436 bool
10437 initializer_zerop (const_tree init)
10438 {
10439 tree elt;
10440
10441 STRIP_NOPS (init);
10442
10443 switch (TREE_CODE (init))
10444 {
10445 case INTEGER_CST:
10446 return integer_zerop (init);
10447
10448 case REAL_CST:
10449 /* ??? Note that this is not correct for C4X float formats. There,
10450 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10451 negative exponent. */
10452 return real_zerop (init)
10453 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10454
10455 case FIXED_CST:
10456 return fixed_zerop (init);
10457
10458 case COMPLEX_CST:
10459 return integer_zerop (init)
10460 || (real_zerop (init)
10461 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10462 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10463
10464 case VECTOR_CST:
10465 {
10466 unsigned i;
10467 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10468 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10469 return false;
10470 return true;
10471 }
10472
10473 case CONSTRUCTOR:
10474 {
10475 unsigned HOST_WIDE_INT idx;
10476
10477 if (TREE_CLOBBER_P (init))
10478 return false;
10479 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10480 if (!initializer_zerop (elt))
10481 return false;
10482 return true;
10483 }
10484
10485 case STRING_CST:
10486 {
10487 int i;
10488
10489 /* We need to loop through all elements to handle cases like
10490 "\0" and "\0foobar". */
10491 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10492 if (TREE_STRING_POINTER (init)[i] != '\0')
10493 return false;
10494
10495 return true;
10496 }
10497
10498 default:
10499 return false;
10500 }
10501 }
10502
10503 /* Check if vector VEC consists of all the equal elements and
10504 that the number of elements corresponds to the type of VEC.
10505 The function returns first element of the vector
10506 or NULL_TREE if the vector is not uniform. */
10507 tree
10508 uniform_vector_p (const_tree vec)
10509 {
10510 tree first, t;
10511 unsigned i;
10512
10513 if (vec == NULL_TREE)
10514 return NULL_TREE;
10515
10516 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10517
10518 if (TREE_CODE (vec) == VECTOR_CST)
10519 {
10520 first = VECTOR_CST_ELT (vec, 0);
10521 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10522 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10523 return NULL_TREE;
10524
10525 return first;
10526 }
10527
10528 else if (TREE_CODE (vec) == CONSTRUCTOR)
10529 {
10530 first = error_mark_node;
10531
10532 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10533 {
10534 if (i == 0)
10535 {
10536 first = t;
10537 continue;
10538 }
10539 if (!operand_equal_p (first, t, 0))
10540 return NULL_TREE;
10541 }
10542 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10543 return NULL_TREE;
10544
10545 return first;
10546 }
10547
10548 return NULL_TREE;
10549 }
10550
10551 /* Build an empty statement at location LOC. */
10552
10553 tree
10554 build_empty_stmt (location_t loc)
10555 {
10556 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10557 SET_EXPR_LOCATION (t, loc);
10558 return t;
10559 }
10560
10561
10562 /* Build an OpenMP clause with code CODE. LOC is the location of the
10563 clause. */
10564
10565 tree
10566 build_omp_clause (location_t loc, enum omp_clause_code code)
10567 {
10568 tree t;
10569 int size, length;
10570
10571 length = omp_clause_num_ops[code];
10572 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10573
10574 record_node_allocation_statistics (OMP_CLAUSE, size);
10575
10576 t = (tree) ggc_internal_alloc (size);
10577 memset (t, 0, size);
10578 TREE_SET_CODE (t, OMP_CLAUSE);
10579 OMP_CLAUSE_SET_CODE (t, code);
10580 OMP_CLAUSE_LOCATION (t) = loc;
10581
10582 return t;
10583 }
10584
10585 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10586 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10587 Except for the CODE and operand count field, other storage for the
10588 object is initialized to zeros. */
10589
10590 tree
10591 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10592 {
10593 tree t;
10594 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10595
10596 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10597 gcc_assert (len >= 1);
10598
10599 record_node_allocation_statistics (code, length);
10600
10601 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10602
10603 TREE_SET_CODE (t, code);
10604
10605 /* Can't use TREE_OPERAND to store the length because if checking is
10606 enabled, it will try to check the length before we store it. :-P */
10607 t->exp.operands[0] = build_int_cst (sizetype, len);
10608
10609 return t;
10610 }
10611
10612 /* Helper function for build_call_* functions; build a CALL_EXPR with
10613 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10614 the argument slots. */
10615
10616 static tree
10617 build_call_1 (tree return_type, tree fn, int nargs)
10618 {
10619 tree t;
10620
10621 t = build_vl_exp (CALL_EXPR, nargs + 3);
10622 TREE_TYPE (t) = return_type;
10623 CALL_EXPR_FN (t) = fn;
10624 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10625
10626 return t;
10627 }
10628
10629 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10630 FN and a null static chain slot. NARGS is the number of call arguments
10631 which are specified as "..." arguments. */
10632
10633 tree
10634 build_call_nary (tree return_type, tree fn, int nargs, ...)
10635 {
10636 tree ret;
10637 va_list args;
10638 va_start (args, nargs);
10639 ret = build_call_valist (return_type, fn, nargs, args);
10640 va_end (args);
10641 return ret;
10642 }
10643
10644 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10645 FN and a null static chain slot. NARGS is the number of call arguments
10646 which are specified as a va_list ARGS. */
10647
10648 tree
10649 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10650 {
10651 tree t;
10652 int i;
10653
10654 t = build_call_1 (return_type, fn, nargs);
10655 for (i = 0; i < nargs; i++)
10656 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10657 process_call_operands (t);
10658 return t;
10659 }
10660
10661 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10662 FN and a null static chain slot. NARGS is the number of call arguments
10663 which are specified as a tree array ARGS. */
10664
10665 tree
10666 build_call_array_loc (location_t loc, tree return_type, tree fn,
10667 int nargs, const tree *args)
10668 {
10669 tree t;
10670 int i;
10671
10672 t = build_call_1 (return_type, fn, nargs);
10673 for (i = 0; i < nargs; i++)
10674 CALL_EXPR_ARG (t, i) = args[i];
10675 process_call_operands (t);
10676 SET_EXPR_LOCATION (t, loc);
10677 return t;
10678 }
10679
10680 /* Like build_call_array, but takes a vec. */
10681
10682 tree
10683 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10684 {
10685 tree ret, t;
10686 unsigned int ix;
10687
10688 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10689 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10690 CALL_EXPR_ARG (ret, ix) = t;
10691 process_call_operands (ret);
10692 return ret;
10693 }
10694
10695 /* Conveniently construct a function call expression. FNDECL names the
10696 function to be called and N arguments are passed in the array
10697 ARGARRAY. */
10698
10699 tree
10700 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10701 {
10702 tree fntype = TREE_TYPE (fndecl);
10703 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10704
10705 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10706 }
10707
10708 /* Conveniently construct a function call expression. FNDECL names the
10709 function to be called and the arguments are passed in the vector
10710 VEC. */
10711
10712 tree
10713 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10714 {
10715 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10716 vec_safe_address (vec));
10717 }
10718
10719
10720 /* Conveniently construct a function call expression. FNDECL names the
10721 function to be called, N is the number of arguments, and the "..."
10722 parameters are the argument expressions. */
10723
10724 tree
10725 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10726 {
10727 va_list ap;
10728 tree *argarray = XALLOCAVEC (tree, n);
10729 int i;
10730
10731 va_start (ap, n);
10732 for (i = 0; i < n; i++)
10733 argarray[i] = va_arg (ap, tree);
10734 va_end (ap);
10735 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10736 }
10737
10738 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10739 varargs macros aren't supported by all bootstrap compilers. */
10740
10741 tree
10742 build_call_expr (tree fndecl, int n, ...)
10743 {
10744 va_list ap;
10745 tree *argarray = XALLOCAVEC (tree, n);
10746 int i;
10747
10748 va_start (ap, n);
10749 for (i = 0; i < n; i++)
10750 argarray[i] = va_arg (ap, tree);
10751 va_end (ap);
10752 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10753 }
10754
10755 /* Build internal call expression. This is just like CALL_EXPR, except
10756 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10757 internal function. */
10758
10759 tree
10760 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10761 tree type, int n, ...)
10762 {
10763 va_list ap;
10764 int i;
10765
10766 tree fn = build_call_1 (type, NULL_TREE, n);
10767 va_start (ap, n);
10768 for (i = 0; i < n; i++)
10769 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10770 va_end (ap);
10771 SET_EXPR_LOCATION (fn, loc);
10772 CALL_EXPR_IFN (fn) = ifn;
10773 return fn;
10774 }
10775
10776 /* Create a new constant string literal and return a char* pointer to it.
10777 The STRING_CST value is the LEN characters at STR. */
10778 tree
10779 build_string_literal (int len, const char *str)
10780 {
10781 tree t, elem, index, type;
10782
10783 t = build_string (len, str);
10784 elem = build_type_variant (char_type_node, 1, 0);
10785 index = build_index_type (size_int (len - 1));
10786 type = build_array_type (elem, index);
10787 TREE_TYPE (t) = type;
10788 TREE_CONSTANT (t) = 1;
10789 TREE_READONLY (t) = 1;
10790 TREE_STATIC (t) = 1;
10791
10792 type = build_pointer_type (elem);
10793 t = build1 (ADDR_EXPR, type,
10794 build4 (ARRAY_REF, elem,
10795 t, integer_zero_node, NULL_TREE, NULL_TREE));
10796 return t;
10797 }
10798
10799
10800
10801 /* Return true if T (assumed to be a DECL) must be assigned a memory
10802 location. */
10803
10804 bool
10805 needs_to_live_in_memory (const_tree t)
10806 {
10807 return (TREE_ADDRESSABLE (t)
10808 || is_global_var (t)
10809 || (TREE_CODE (t) == RESULT_DECL
10810 && !DECL_BY_REFERENCE (t)
10811 && aggregate_value_p (t, current_function_decl)));
10812 }
10813
10814 /* Return value of a constant X and sign-extend it. */
10815
10816 HOST_WIDE_INT
10817 int_cst_value (const_tree x)
10818 {
10819 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10820 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10821
10822 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10823 gcc_assert (cst_and_fits_in_hwi (x));
10824
10825 if (bits < HOST_BITS_PER_WIDE_INT)
10826 {
10827 bool negative = ((val >> (bits - 1)) & 1) != 0;
10828 if (negative)
10829 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10830 else
10831 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10832 }
10833
10834 return val;
10835 }
10836
10837 /* If TYPE is an integral or pointer type, return an integer type with
10838 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10839 if TYPE is already an integer type of signedness UNSIGNEDP. */
10840
10841 tree
10842 signed_or_unsigned_type_for (int unsignedp, tree type)
10843 {
10844 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10845 return type;
10846
10847 if (TREE_CODE (type) == VECTOR_TYPE)
10848 {
10849 tree inner = TREE_TYPE (type);
10850 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10851 if (!inner2)
10852 return NULL_TREE;
10853 if (inner == inner2)
10854 return type;
10855 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10856 }
10857
10858 if (!INTEGRAL_TYPE_P (type)
10859 && !POINTER_TYPE_P (type)
10860 && TREE_CODE (type) != OFFSET_TYPE)
10861 return NULL_TREE;
10862
10863 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10864 }
10865
10866 /* If TYPE is an integral or pointer type, return an integer type with
10867 the same precision which is unsigned, or itself if TYPE is already an
10868 unsigned integer type. */
10869
10870 tree
10871 unsigned_type_for (tree type)
10872 {
10873 return signed_or_unsigned_type_for (1, type);
10874 }
10875
10876 /* If TYPE is an integral or pointer type, return an integer type with
10877 the same precision which is signed, or itself if TYPE is already a
10878 signed integer type. */
10879
10880 tree
10881 signed_type_for (tree type)
10882 {
10883 return signed_or_unsigned_type_for (0, type);
10884 }
10885
10886 /* If TYPE is a vector type, return a signed integer vector type with the
10887 same width and number of subparts. Otherwise return boolean_type_node. */
10888
10889 tree
10890 truth_type_for (tree type)
10891 {
10892 if (TREE_CODE (type) == VECTOR_TYPE)
10893 {
10894 tree elem = lang_hooks.types.type_for_size
10895 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10896 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10897 }
10898 else
10899 return boolean_type_node;
10900 }
10901
10902 /* Returns the largest value obtainable by casting something in INNER type to
10903 OUTER type. */
10904
10905 tree
10906 upper_bound_in_type (tree outer, tree inner)
10907 {
10908 unsigned int det = 0;
10909 unsigned oprec = TYPE_PRECISION (outer);
10910 unsigned iprec = TYPE_PRECISION (inner);
10911 unsigned prec;
10912
10913 /* Compute a unique number for every combination. */
10914 det |= (oprec > iprec) ? 4 : 0;
10915 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10916 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10917
10918 /* Determine the exponent to use. */
10919 switch (det)
10920 {
10921 case 0:
10922 case 1:
10923 /* oprec <= iprec, outer: signed, inner: don't care. */
10924 prec = oprec - 1;
10925 break;
10926 case 2:
10927 case 3:
10928 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10929 prec = oprec;
10930 break;
10931 case 4:
10932 /* oprec > iprec, outer: signed, inner: signed. */
10933 prec = iprec - 1;
10934 break;
10935 case 5:
10936 /* oprec > iprec, outer: signed, inner: unsigned. */
10937 prec = iprec;
10938 break;
10939 case 6:
10940 /* oprec > iprec, outer: unsigned, inner: signed. */
10941 prec = oprec;
10942 break;
10943 case 7:
10944 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10945 prec = iprec;
10946 break;
10947 default:
10948 gcc_unreachable ();
10949 }
10950
10951 return wide_int_to_tree (outer,
10952 wi::mask (prec, false, TYPE_PRECISION (outer)));
10953 }
10954
10955 /* Returns the smallest value obtainable by casting something in INNER type to
10956 OUTER type. */
10957
10958 tree
10959 lower_bound_in_type (tree outer, tree inner)
10960 {
10961 unsigned oprec = TYPE_PRECISION (outer);
10962 unsigned iprec = TYPE_PRECISION (inner);
10963
10964 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10965 and obtain 0. */
10966 if (TYPE_UNSIGNED (outer)
10967 /* If we are widening something of an unsigned type, OUTER type
10968 contains all values of INNER type. In particular, both INNER
10969 and OUTER types have zero in common. */
10970 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10971 return build_int_cst (outer, 0);
10972 else
10973 {
10974 /* If we are widening a signed type to another signed type, we
10975 want to obtain -2^^(iprec-1). If we are keeping the
10976 precision or narrowing to a signed type, we want to obtain
10977 -2^(oprec-1). */
10978 unsigned prec = oprec > iprec ? iprec : oprec;
10979 return wide_int_to_tree (outer,
10980 wi::mask (prec - 1, true,
10981 TYPE_PRECISION (outer)));
10982 }
10983 }
10984
10985 /* Return nonzero if two operands that are suitable for PHI nodes are
10986 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10987 SSA_NAME or invariant. Note that this is strictly an optimization.
10988 That is, callers of this function can directly call operand_equal_p
10989 and get the same result, only slower. */
10990
10991 int
10992 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10993 {
10994 if (arg0 == arg1)
10995 return 1;
10996 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10997 return 0;
10998 return operand_equal_p (arg0, arg1, 0);
10999 }
11000
11001 /* Returns number of zeros at the end of binary representation of X. */
11002
11003 tree
11004 num_ending_zeros (const_tree x)
11005 {
11006 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11007 }
11008
11009
11010 #define WALK_SUBTREE(NODE) \
11011 do \
11012 { \
11013 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11014 if (result) \
11015 return result; \
11016 } \
11017 while (0)
11018
11019 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11020 be walked whenever a type is seen in the tree. Rest of operands and return
11021 value are as for walk_tree. */
11022
11023 static tree
11024 walk_type_fields (tree type, walk_tree_fn func, void *data,
11025 hash_set<tree> *pset, walk_tree_lh lh)
11026 {
11027 tree result = NULL_TREE;
11028
11029 switch (TREE_CODE (type))
11030 {
11031 case POINTER_TYPE:
11032 case REFERENCE_TYPE:
11033 case VECTOR_TYPE:
11034 /* We have to worry about mutually recursive pointers. These can't
11035 be written in C. They can in Ada. It's pathological, but
11036 there's an ACATS test (c38102a) that checks it. Deal with this
11037 by checking if we're pointing to another pointer, that one
11038 points to another pointer, that one does too, and we have no htab.
11039 If so, get a hash table. We check three levels deep to avoid
11040 the cost of the hash table if we don't need one. */
11041 if (POINTER_TYPE_P (TREE_TYPE (type))
11042 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11043 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11044 && !pset)
11045 {
11046 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11047 func, data);
11048 if (result)
11049 return result;
11050
11051 break;
11052 }
11053
11054 /* ... fall through ... */
11055
11056 case COMPLEX_TYPE:
11057 WALK_SUBTREE (TREE_TYPE (type));
11058 break;
11059
11060 case METHOD_TYPE:
11061 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11062
11063 /* Fall through. */
11064
11065 case FUNCTION_TYPE:
11066 WALK_SUBTREE (TREE_TYPE (type));
11067 {
11068 tree arg;
11069
11070 /* We never want to walk into default arguments. */
11071 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11072 WALK_SUBTREE (TREE_VALUE (arg));
11073 }
11074 break;
11075
11076 case ARRAY_TYPE:
11077 /* Don't follow this nodes's type if a pointer for fear that
11078 we'll have infinite recursion. If we have a PSET, then we
11079 need not fear. */
11080 if (pset
11081 || (!POINTER_TYPE_P (TREE_TYPE (type))
11082 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11083 WALK_SUBTREE (TREE_TYPE (type));
11084 WALK_SUBTREE (TYPE_DOMAIN (type));
11085 break;
11086
11087 case OFFSET_TYPE:
11088 WALK_SUBTREE (TREE_TYPE (type));
11089 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11090 break;
11091
11092 default:
11093 break;
11094 }
11095
11096 return NULL_TREE;
11097 }
11098
11099 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11100 called with the DATA and the address of each sub-tree. If FUNC returns a
11101 non-NULL value, the traversal is stopped, and the value returned by FUNC
11102 is returned. If PSET is non-NULL it is used to record the nodes visited,
11103 and to avoid visiting a node more than once. */
11104
11105 tree
11106 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11107 hash_set<tree> *pset, walk_tree_lh lh)
11108 {
11109 enum tree_code code;
11110 int walk_subtrees;
11111 tree result;
11112
11113 #define WALK_SUBTREE_TAIL(NODE) \
11114 do \
11115 { \
11116 tp = & (NODE); \
11117 goto tail_recurse; \
11118 } \
11119 while (0)
11120
11121 tail_recurse:
11122 /* Skip empty subtrees. */
11123 if (!*tp)
11124 return NULL_TREE;
11125
11126 /* Don't walk the same tree twice, if the user has requested
11127 that we avoid doing so. */
11128 if (pset && pset->add (*tp))
11129 return NULL_TREE;
11130
11131 /* Call the function. */
11132 walk_subtrees = 1;
11133 result = (*func) (tp, &walk_subtrees, data);
11134
11135 /* If we found something, return it. */
11136 if (result)
11137 return result;
11138
11139 code = TREE_CODE (*tp);
11140
11141 /* Even if we didn't, FUNC may have decided that there was nothing
11142 interesting below this point in the tree. */
11143 if (!walk_subtrees)
11144 {
11145 /* But we still need to check our siblings. */
11146 if (code == TREE_LIST)
11147 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11148 else if (code == OMP_CLAUSE)
11149 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11150 else
11151 return NULL_TREE;
11152 }
11153
11154 if (lh)
11155 {
11156 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11157 if (result || !walk_subtrees)
11158 return result;
11159 }
11160
11161 switch (code)
11162 {
11163 case ERROR_MARK:
11164 case IDENTIFIER_NODE:
11165 case INTEGER_CST:
11166 case REAL_CST:
11167 case FIXED_CST:
11168 case VECTOR_CST:
11169 case STRING_CST:
11170 case BLOCK:
11171 case PLACEHOLDER_EXPR:
11172 case SSA_NAME:
11173 case FIELD_DECL:
11174 case RESULT_DECL:
11175 /* None of these have subtrees other than those already walked
11176 above. */
11177 break;
11178
11179 case TREE_LIST:
11180 WALK_SUBTREE (TREE_VALUE (*tp));
11181 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11182 break;
11183
11184 case TREE_VEC:
11185 {
11186 int len = TREE_VEC_LENGTH (*tp);
11187
11188 if (len == 0)
11189 break;
11190
11191 /* Walk all elements but the first. */
11192 while (--len)
11193 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11194
11195 /* Now walk the first one as a tail call. */
11196 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11197 }
11198
11199 case COMPLEX_CST:
11200 WALK_SUBTREE (TREE_REALPART (*tp));
11201 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11202
11203 case CONSTRUCTOR:
11204 {
11205 unsigned HOST_WIDE_INT idx;
11206 constructor_elt *ce;
11207
11208 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11209 idx++)
11210 WALK_SUBTREE (ce->value);
11211 }
11212 break;
11213
11214 case SAVE_EXPR:
11215 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11216
11217 case BIND_EXPR:
11218 {
11219 tree decl;
11220 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11221 {
11222 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11223 into declarations that are just mentioned, rather than
11224 declared; they don't really belong to this part of the tree.
11225 And, we can see cycles: the initializer for a declaration
11226 can refer to the declaration itself. */
11227 WALK_SUBTREE (DECL_INITIAL (decl));
11228 WALK_SUBTREE (DECL_SIZE (decl));
11229 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11230 }
11231 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11232 }
11233
11234 case STATEMENT_LIST:
11235 {
11236 tree_stmt_iterator i;
11237 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11238 WALK_SUBTREE (*tsi_stmt_ptr (i));
11239 }
11240 break;
11241
11242 case OMP_CLAUSE:
11243 switch (OMP_CLAUSE_CODE (*tp))
11244 {
11245 case OMP_CLAUSE_GANG:
11246 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11247 /* FALLTHRU */
11248
11249 case OMP_CLAUSE_DEVICE_RESIDENT:
11250 case OMP_CLAUSE_USE_DEVICE:
11251 case OMP_CLAUSE_ASYNC:
11252 case OMP_CLAUSE_WAIT:
11253 case OMP_CLAUSE_WORKER:
11254 case OMP_CLAUSE_VECTOR:
11255 case OMP_CLAUSE_NUM_GANGS:
11256 case OMP_CLAUSE_NUM_WORKERS:
11257 case OMP_CLAUSE_VECTOR_LENGTH:
11258 case OMP_CLAUSE_PRIVATE:
11259 case OMP_CLAUSE_SHARED:
11260 case OMP_CLAUSE_FIRSTPRIVATE:
11261 case OMP_CLAUSE_COPYIN:
11262 case OMP_CLAUSE_COPYPRIVATE:
11263 case OMP_CLAUSE_FINAL:
11264 case OMP_CLAUSE_IF:
11265 case OMP_CLAUSE_NUM_THREADS:
11266 case OMP_CLAUSE_SCHEDULE:
11267 case OMP_CLAUSE_UNIFORM:
11268 case OMP_CLAUSE_DEPEND:
11269 case OMP_CLAUSE_NUM_TEAMS:
11270 case OMP_CLAUSE_THREAD_LIMIT:
11271 case OMP_CLAUSE_DEVICE:
11272 case OMP_CLAUSE_DIST_SCHEDULE:
11273 case OMP_CLAUSE_SAFELEN:
11274 case OMP_CLAUSE_SIMDLEN:
11275 case OMP_CLAUSE__LOOPTEMP_:
11276 case OMP_CLAUSE__SIMDUID_:
11277 case OMP_CLAUSE__CILK_FOR_COUNT_:
11278 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11279 /* FALLTHRU */
11280
11281 case OMP_CLAUSE_INDEPENDENT:
11282 case OMP_CLAUSE_NOWAIT:
11283 case OMP_CLAUSE_ORDERED:
11284 case OMP_CLAUSE_DEFAULT:
11285 case OMP_CLAUSE_UNTIED:
11286 case OMP_CLAUSE_MERGEABLE:
11287 case OMP_CLAUSE_PROC_BIND:
11288 case OMP_CLAUSE_INBRANCH:
11289 case OMP_CLAUSE_NOTINBRANCH:
11290 case OMP_CLAUSE_FOR:
11291 case OMP_CLAUSE_PARALLEL:
11292 case OMP_CLAUSE_SECTIONS:
11293 case OMP_CLAUSE_TASKGROUP:
11294 case OMP_CLAUSE_AUTO:
11295 case OMP_CLAUSE_SEQ:
11296 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11297
11298 case OMP_CLAUSE_LASTPRIVATE:
11299 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11300 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11301 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11302
11303 case OMP_CLAUSE_COLLAPSE:
11304 {
11305 int i;
11306 for (i = 0; i < 3; i++)
11307 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11308 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11309 }
11310
11311 case OMP_CLAUSE_LINEAR:
11312 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11313 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11314 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11315 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11316
11317 case OMP_CLAUSE_ALIGNED:
11318 case OMP_CLAUSE_FROM:
11319 case OMP_CLAUSE_TO:
11320 case OMP_CLAUSE_MAP:
11321 case OMP_CLAUSE__CACHE_:
11322 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11323 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11324 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11325
11326 case OMP_CLAUSE_REDUCTION:
11327 {
11328 int i;
11329 for (i = 0; i < 4; i++)
11330 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11331 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11332 }
11333
11334 default:
11335 gcc_unreachable ();
11336 }
11337 break;
11338
11339 case TARGET_EXPR:
11340 {
11341 int i, len;
11342
11343 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11344 But, we only want to walk once. */
11345 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11346 for (i = 0; i < len; ++i)
11347 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11348 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11349 }
11350
11351 case DECL_EXPR:
11352 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11353 defining. We only want to walk into these fields of a type in this
11354 case and not in the general case of a mere reference to the type.
11355
11356 The criterion is as follows: if the field can be an expression, it
11357 must be walked only here. This should be in keeping with the fields
11358 that are directly gimplified in gimplify_type_sizes in order for the
11359 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11360 variable-sized types.
11361
11362 Note that DECLs get walked as part of processing the BIND_EXPR. */
11363 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11364 {
11365 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11366 if (TREE_CODE (*type_p) == ERROR_MARK)
11367 return NULL_TREE;
11368
11369 /* Call the function for the type. See if it returns anything or
11370 doesn't want us to continue. If we are to continue, walk both
11371 the normal fields and those for the declaration case. */
11372 result = (*func) (type_p, &walk_subtrees, data);
11373 if (result || !walk_subtrees)
11374 return result;
11375
11376 /* But do not walk a pointed-to type since it may itself need to
11377 be walked in the declaration case if it isn't anonymous. */
11378 if (!POINTER_TYPE_P (*type_p))
11379 {
11380 result = walk_type_fields (*type_p, func, data, pset, lh);
11381 if (result)
11382 return result;
11383 }
11384
11385 /* If this is a record type, also walk the fields. */
11386 if (RECORD_OR_UNION_TYPE_P (*type_p))
11387 {
11388 tree field;
11389
11390 for (field = TYPE_FIELDS (*type_p); field;
11391 field = DECL_CHAIN (field))
11392 {
11393 /* We'd like to look at the type of the field, but we can
11394 easily get infinite recursion. So assume it's pointed
11395 to elsewhere in the tree. Also, ignore things that
11396 aren't fields. */
11397 if (TREE_CODE (field) != FIELD_DECL)
11398 continue;
11399
11400 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11401 WALK_SUBTREE (DECL_SIZE (field));
11402 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11403 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11404 WALK_SUBTREE (DECL_QUALIFIER (field));
11405 }
11406 }
11407
11408 /* Same for scalar types. */
11409 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11410 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11411 || TREE_CODE (*type_p) == INTEGER_TYPE
11412 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11413 || TREE_CODE (*type_p) == REAL_TYPE)
11414 {
11415 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11416 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11417 }
11418
11419 WALK_SUBTREE (TYPE_SIZE (*type_p));
11420 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11421 }
11422 /* FALLTHRU */
11423
11424 default:
11425 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11426 {
11427 int i, len;
11428
11429 /* Walk over all the sub-trees of this operand. */
11430 len = TREE_OPERAND_LENGTH (*tp);
11431
11432 /* Go through the subtrees. We need to do this in forward order so
11433 that the scope of a FOR_EXPR is handled properly. */
11434 if (len)
11435 {
11436 for (i = 0; i < len - 1; ++i)
11437 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11438 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11439 }
11440 }
11441 /* If this is a type, walk the needed fields in the type. */
11442 else if (TYPE_P (*tp))
11443 return walk_type_fields (*tp, func, data, pset, lh);
11444 break;
11445 }
11446
11447 /* We didn't find what we were looking for. */
11448 return NULL_TREE;
11449
11450 #undef WALK_SUBTREE_TAIL
11451 }
11452 #undef WALK_SUBTREE
11453
11454 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11455
11456 tree
11457 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11458 walk_tree_lh lh)
11459 {
11460 tree result;
11461
11462 hash_set<tree> pset;
11463 result = walk_tree_1 (tp, func, data, &pset, lh);
11464 return result;
11465 }
11466
11467
11468 tree
11469 tree_block (tree t)
11470 {
11471 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11472
11473 if (IS_EXPR_CODE_CLASS (c))
11474 return LOCATION_BLOCK (t->exp.locus);
11475 gcc_unreachable ();
11476 return NULL;
11477 }
11478
11479 void
11480 tree_set_block (tree t, tree b)
11481 {
11482 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11483
11484 if (IS_EXPR_CODE_CLASS (c))
11485 {
11486 if (b)
11487 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11488 else
11489 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11490 }
11491 else
11492 gcc_unreachable ();
11493 }
11494
11495 /* Create a nameless artificial label and put it in the current
11496 function context. The label has a location of LOC. Returns the
11497 newly created label. */
11498
11499 tree
11500 create_artificial_label (location_t loc)
11501 {
11502 tree lab = build_decl (loc,
11503 LABEL_DECL, NULL_TREE, void_type_node);
11504
11505 DECL_ARTIFICIAL (lab) = 1;
11506 DECL_IGNORED_P (lab) = 1;
11507 DECL_CONTEXT (lab) = current_function_decl;
11508 return lab;
11509 }
11510
11511 /* Given a tree, try to return a useful variable name that we can use
11512 to prefix a temporary that is being assigned the value of the tree.
11513 I.E. given <temp> = &A, return A. */
11514
11515 const char *
11516 get_name (tree t)
11517 {
11518 tree stripped_decl;
11519
11520 stripped_decl = t;
11521 STRIP_NOPS (stripped_decl);
11522 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11523 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11524 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11525 {
11526 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11527 if (!name)
11528 return NULL;
11529 return IDENTIFIER_POINTER (name);
11530 }
11531 else
11532 {
11533 switch (TREE_CODE (stripped_decl))
11534 {
11535 case ADDR_EXPR:
11536 return get_name (TREE_OPERAND (stripped_decl, 0));
11537 default:
11538 return NULL;
11539 }
11540 }
11541 }
11542
11543 /* Return true if TYPE has a variable argument list. */
11544
11545 bool
11546 stdarg_p (const_tree fntype)
11547 {
11548 function_args_iterator args_iter;
11549 tree n = NULL_TREE, t;
11550
11551 if (!fntype)
11552 return false;
11553
11554 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11555 {
11556 n = t;
11557 }
11558
11559 return n != NULL_TREE && n != void_type_node;
11560 }
11561
11562 /* Return true if TYPE has a prototype. */
11563
11564 bool
11565 prototype_p (tree fntype)
11566 {
11567 tree t;
11568
11569 gcc_assert (fntype != NULL_TREE);
11570
11571 t = TYPE_ARG_TYPES (fntype);
11572 return (t != NULL_TREE);
11573 }
11574
11575 /* If BLOCK is inlined from an __attribute__((__artificial__))
11576 routine, return pointer to location from where it has been
11577 called. */
11578 location_t *
11579 block_nonartificial_location (tree block)
11580 {
11581 location_t *ret = NULL;
11582
11583 while (block && TREE_CODE (block) == BLOCK
11584 && BLOCK_ABSTRACT_ORIGIN (block))
11585 {
11586 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11587
11588 while (TREE_CODE (ao) == BLOCK
11589 && BLOCK_ABSTRACT_ORIGIN (ao)
11590 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11591 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11592
11593 if (TREE_CODE (ao) == FUNCTION_DECL)
11594 {
11595 /* If AO is an artificial inline, point RET to the
11596 call site locus at which it has been inlined and continue
11597 the loop, in case AO's caller is also an artificial
11598 inline. */
11599 if (DECL_DECLARED_INLINE_P (ao)
11600 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11601 ret = &BLOCK_SOURCE_LOCATION (block);
11602 else
11603 break;
11604 }
11605 else if (TREE_CODE (ao) != BLOCK)
11606 break;
11607
11608 block = BLOCK_SUPERCONTEXT (block);
11609 }
11610 return ret;
11611 }
11612
11613
11614 /* If EXP is inlined from an __attribute__((__artificial__))
11615 function, return the location of the original call expression. */
11616
11617 location_t
11618 tree_nonartificial_location (tree exp)
11619 {
11620 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11621
11622 if (loc)
11623 return *loc;
11624 else
11625 return EXPR_LOCATION (exp);
11626 }
11627
11628
11629 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11630 nodes. */
11631
11632 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11633
11634 hashval_t
11635 cl_option_hasher::hash (tree x)
11636 {
11637 const_tree const t = x;
11638 const char *p;
11639 size_t i;
11640 size_t len = 0;
11641 hashval_t hash = 0;
11642
11643 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11644 {
11645 p = (const char *)TREE_OPTIMIZATION (t);
11646 len = sizeof (struct cl_optimization);
11647 }
11648
11649 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11650 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11651
11652 else
11653 gcc_unreachable ();
11654
11655 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11656 something else. */
11657 for (i = 0; i < len; i++)
11658 if (p[i])
11659 hash = (hash << 4) ^ ((i << 2) | p[i]);
11660
11661 return hash;
11662 }
11663
11664 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11665 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11666 same. */
11667
11668 bool
11669 cl_option_hasher::equal (tree x, tree y)
11670 {
11671 const_tree const xt = x;
11672 const_tree const yt = y;
11673 const char *xp;
11674 const char *yp;
11675 size_t len;
11676
11677 if (TREE_CODE (xt) != TREE_CODE (yt))
11678 return 0;
11679
11680 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11681 {
11682 xp = (const char *)TREE_OPTIMIZATION (xt);
11683 yp = (const char *)TREE_OPTIMIZATION (yt);
11684 len = sizeof (struct cl_optimization);
11685 }
11686
11687 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11688 {
11689 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11690 TREE_TARGET_OPTION (yt));
11691 }
11692
11693 else
11694 gcc_unreachable ();
11695
11696 return (memcmp (xp, yp, len) == 0);
11697 }
11698
11699 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11700
11701 tree
11702 build_optimization_node (struct gcc_options *opts)
11703 {
11704 tree t;
11705
11706 /* Use the cache of optimization nodes. */
11707
11708 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11709 opts);
11710
11711 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11712 t = *slot;
11713 if (!t)
11714 {
11715 /* Insert this one into the hash table. */
11716 t = cl_optimization_node;
11717 *slot = t;
11718
11719 /* Make a new node for next time round. */
11720 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11721 }
11722
11723 return t;
11724 }
11725
11726 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11727
11728 tree
11729 build_target_option_node (struct gcc_options *opts)
11730 {
11731 tree t;
11732
11733 /* Use the cache of optimization nodes. */
11734
11735 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11736 opts);
11737
11738 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11739 t = *slot;
11740 if (!t)
11741 {
11742 /* Insert this one into the hash table. */
11743 t = cl_target_option_node;
11744 *slot = t;
11745
11746 /* Make a new node for next time round. */
11747 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11748 }
11749
11750 return t;
11751 }
11752
11753 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11754 so that they aren't saved during PCH writing. */
11755
11756 void
11757 prepare_target_option_nodes_for_pch (void)
11758 {
11759 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11760 for (; iter != cl_option_hash_table->end (); ++iter)
11761 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11762 TREE_TARGET_GLOBALS (*iter) = NULL;
11763 }
11764
11765 /* Determine the "ultimate origin" of a block. The block may be an inlined
11766 instance of an inlined instance of a block which is local to an inline
11767 function, so we have to trace all of the way back through the origin chain
11768 to find out what sort of node actually served as the original seed for the
11769 given block. */
11770
11771 tree
11772 block_ultimate_origin (const_tree block)
11773 {
11774 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11775
11776 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11777 we're trying to output the abstract instance of this function. */
11778 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11779 return NULL_TREE;
11780
11781 if (immediate_origin == NULL_TREE)
11782 return NULL_TREE;
11783 else
11784 {
11785 tree ret_val;
11786 tree lookahead = immediate_origin;
11787
11788 do
11789 {
11790 ret_val = lookahead;
11791 lookahead = (TREE_CODE (ret_val) == BLOCK
11792 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11793 }
11794 while (lookahead != NULL && lookahead != ret_val);
11795
11796 /* The block's abstract origin chain may not be the *ultimate* origin of
11797 the block. It could lead to a DECL that has an abstract origin set.
11798 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11799 will give us if it has one). Note that DECL's abstract origins are
11800 supposed to be the most distant ancestor (or so decl_ultimate_origin
11801 claims), so we don't need to loop following the DECL origins. */
11802 if (DECL_P (ret_val))
11803 return DECL_ORIGIN (ret_val);
11804
11805 return ret_val;
11806 }
11807 }
11808
11809 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11810 no instruction. */
11811
11812 bool
11813 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11814 {
11815 /* Use precision rather then machine mode when we can, which gives
11816 the correct answer even for submode (bit-field) types. */
11817 if ((INTEGRAL_TYPE_P (outer_type)
11818 || POINTER_TYPE_P (outer_type)
11819 || TREE_CODE (outer_type) == OFFSET_TYPE)
11820 && (INTEGRAL_TYPE_P (inner_type)
11821 || POINTER_TYPE_P (inner_type)
11822 || TREE_CODE (inner_type) == OFFSET_TYPE))
11823 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11824
11825 /* Otherwise fall back on comparing machine modes (e.g. for
11826 aggregate types, floats). */
11827 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11828 }
11829
11830 /* Return true iff conversion in EXP generates no instruction. Mark
11831 it inline so that we fully inline into the stripping functions even
11832 though we have two uses of this function. */
11833
11834 static inline bool
11835 tree_nop_conversion (const_tree exp)
11836 {
11837 tree outer_type, inner_type;
11838
11839 if (!CONVERT_EXPR_P (exp)
11840 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11841 return false;
11842 if (TREE_OPERAND (exp, 0) == error_mark_node)
11843 return false;
11844
11845 outer_type = TREE_TYPE (exp);
11846 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11847
11848 if (!inner_type)
11849 return false;
11850
11851 return tree_nop_conversion_p (outer_type, inner_type);
11852 }
11853
11854 /* Return true iff conversion in EXP generates no instruction. Don't
11855 consider conversions changing the signedness. */
11856
11857 static bool
11858 tree_sign_nop_conversion (const_tree exp)
11859 {
11860 tree outer_type, inner_type;
11861
11862 if (!tree_nop_conversion (exp))
11863 return false;
11864
11865 outer_type = TREE_TYPE (exp);
11866 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11867
11868 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11869 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11870 }
11871
11872 /* Strip conversions from EXP according to tree_nop_conversion and
11873 return the resulting expression. */
11874
11875 tree
11876 tree_strip_nop_conversions (tree exp)
11877 {
11878 while (tree_nop_conversion (exp))
11879 exp = TREE_OPERAND (exp, 0);
11880 return exp;
11881 }
11882
11883 /* Strip conversions from EXP according to tree_sign_nop_conversion
11884 and return the resulting expression. */
11885
11886 tree
11887 tree_strip_sign_nop_conversions (tree exp)
11888 {
11889 while (tree_sign_nop_conversion (exp))
11890 exp = TREE_OPERAND (exp, 0);
11891 return exp;
11892 }
11893
11894 /* Avoid any floating point extensions from EXP. */
11895 tree
11896 strip_float_extensions (tree exp)
11897 {
11898 tree sub, expt, subt;
11899
11900 /* For floating point constant look up the narrowest type that can hold
11901 it properly and handle it like (type)(narrowest_type)constant.
11902 This way we can optimize for instance a=a*2.0 where "a" is float
11903 but 2.0 is double constant. */
11904 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11905 {
11906 REAL_VALUE_TYPE orig;
11907 tree type = NULL;
11908
11909 orig = TREE_REAL_CST (exp);
11910 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11911 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11912 type = float_type_node;
11913 else if (TYPE_PRECISION (TREE_TYPE (exp))
11914 > TYPE_PRECISION (double_type_node)
11915 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11916 type = double_type_node;
11917 if (type)
11918 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11919 }
11920
11921 if (!CONVERT_EXPR_P (exp))
11922 return exp;
11923
11924 sub = TREE_OPERAND (exp, 0);
11925 subt = TREE_TYPE (sub);
11926 expt = TREE_TYPE (exp);
11927
11928 if (!FLOAT_TYPE_P (subt))
11929 return exp;
11930
11931 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11932 return exp;
11933
11934 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11935 return exp;
11936
11937 return strip_float_extensions (sub);
11938 }
11939
11940 /* Strip out all handled components that produce invariant
11941 offsets. */
11942
11943 const_tree
11944 strip_invariant_refs (const_tree op)
11945 {
11946 while (handled_component_p (op))
11947 {
11948 switch (TREE_CODE (op))
11949 {
11950 case ARRAY_REF:
11951 case ARRAY_RANGE_REF:
11952 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11953 || TREE_OPERAND (op, 2) != NULL_TREE
11954 || TREE_OPERAND (op, 3) != NULL_TREE)
11955 return NULL;
11956 break;
11957
11958 case COMPONENT_REF:
11959 if (TREE_OPERAND (op, 2) != NULL_TREE)
11960 return NULL;
11961 break;
11962
11963 default:;
11964 }
11965 op = TREE_OPERAND (op, 0);
11966 }
11967
11968 return op;
11969 }
11970
11971 static GTY(()) tree gcc_eh_personality_decl;
11972
11973 /* Return the GCC personality function decl. */
11974
11975 tree
11976 lhd_gcc_personality (void)
11977 {
11978 if (!gcc_eh_personality_decl)
11979 gcc_eh_personality_decl = build_personality_function ("gcc");
11980 return gcc_eh_personality_decl;
11981 }
11982
11983 /* TARGET is a call target of GIMPLE call statement
11984 (obtained by gimple_call_fn). Return true if it is
11985 OBJ_TYPE_REF representing an virtual call of C++ method.
11986 (As opposed to OBJ_TYPE_REF representing objc calls
11987 through a cast where middle-end devirtualization machinery
11988 can't apply.) */
11989
11990 bool
11991 virtual_method_call_p (tree target)
11992 {
11993 if (TREE_CODE (target) != OBJ_TYPE_REF)
11994 return false;
11995 tree t = TREE_TYPE (target);
11996 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
11997 t = TREE_TYPE (t);
11998 if (TREE_CODE (t) == FUNCTION_TYPE)
11999 return false;
12000 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12001 /* If we do not have BINFO associated, it means that type was built
12002 without devirtualization enabled. Do not consider this a virtual
12003 call. */
12004 if (!TYPE_BINFO (obj_type_ref_class (target)))
12005 return false;
12006 return true;
12007 }
12008
12009 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12010
12011 tree
12012 obj_type_ref_class (tree ref)
12013 {
12014 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12015 ref = TREE_TYPE (ref);
12016 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12017 ref = TREE_TYPE (ref);
12018 /* We look for type THIS points to. ObjC also builds
12019 OBJ_TYPE_REF with non-method calls, Their first parameter
12020 ID however also corresponds to class type. */
12021 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12022 || TREE_CODE (ref) == FUNCTION_TYPE);
12023 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12024 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12025 return TREE_TYPE (ref);
12026 }
12027
12028 /* Return true if T is in anonymous namespace. */
12029
12030 bool
12031 type_in_anonymous_namespace_p (const_tree t)
12032 {
12033 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
12034 bulitin types; those have CONTEXT NULL. */
12035 if (!TYPE_CONTEXT (t))
12036 return false;
12037 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
12038 }
12039
12040 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12041
12042 static tree
12043 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12044 {
12045 unsigned int i;
12046 tree base_binfo, b;
12047
12048 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12049 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12050 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12051 return base_binfo;
12052 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12053 return b;
12054 return NULL;
12055 }
12056
12057 /* Try to find a base info of BINFO that would have its field decl at offset
12058 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12059 found, return, otherwise return NULL_TREE. */
12060
12061 tree
12062 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12063 {
12064 tree type = BINFO_TYPE (binfo);
12065
12066 while (true)
12067 {
12068 HOST_WIDE_INT pos, size;
12069 tree fld;
12070 int i;
12071
12072 if (types_same_for_odr (type, expected_type))
12073 return binfo;
12074 if (offset < 0)
12075 return NULL_TREE;
12076
12077 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12078 {
12079 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12080 continue;
12081
12082 pos = int_bit_position (fld);
12083 size = tree_to_uhwi (DECL_SIZE (fld));
12084 if (pos <= offset && (pos + size) > offset)
12085 break;
12086 }
12087 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12088 return NULL_TREE;
12089
12090 /* Offset 0 indicates the primary base, whose vtable contents are
12091 represented in the binfo for the derived class. */
12092 else if (offset != 0)
12093 {
12094 tree found_binfo = NULL, base_binfo;
12095 /* Offsets in BINFO are in bytes relative to the whole structure
12096 while POS is in bits relative to the containing field. */
12097 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12098 / BITS_PER_UNIT);
12099
12100 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12101 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12102 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12103 {
12104 found_binfo = base_binfo;
12105 break;
12106 }
12107 if (found_binfo)
12108 binfo = found_binfo;
12109 else
12110 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12111 binfo_offset);
12112 }
12113
12114 type = TREE_TYPE (fld);
12115 offset -= pos;
12116 }
12117 }
12118
12119 /* Returns true if X is a typedef decl. */
12120
12121 bool
12122 is_typedef_decl (tree x)
12123 {
12124 return (x && TREE_CODE (x) == TYPE_DECL
12125 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12126 }
12127
12128 /* Returns true iff TYPE is a type variant created for a typedef. */
12129
12130 bool
12131 typedef_variant_p (tree type)
12132 {
12133 return is_typedef_decl (TYPE_NAME (type));
12134 }
12135
12136 /* Warn about a use of an identifier which was marked deprecated. */
12137 void
12138 warn_deprecated_use (tree node, tree attr)
12139 {
12140 const char *msg;
12141
12142 if (node == 0 || !warn_deprecated_decl)
12143 return;
12144
12145 if (!attr)
12146 {
12147 if (DECL_P (node))
12148 attr = DECL_ATTRIBUTES (node);
12149 else if (TYPE_P (node))
12150 {
12151 tree decl = TYPE_STUB_DECL (node);
12152 if (decl)
12153 attr = lookup_attribute ("deprecated",
12154 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12155 }
12156 }
12157
12158 if (attr)
12159 attr = lookup_attribute ("deprecated", attr);
12160
12161 if (attr)
12162 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12163 else
12164 msg = NULL;
12165
12166 bool w;
12167 if (DECL_P (node))
12168 {
12169 if (msg)
12170 w = warning (OPT_Wdeprecated_declarations,
12171 "%qD is deprecated: %s", node, msg);
12172 else
12173 w = warning (OPT_Wdeprecated_declarations,
12174 "%qD is deprecated", node);
12175 if (w)
12176 inform (DECL_SOURCE_LOCATION (node), "declared here");
12177 }
12178 else if (TYPE_P (node))
12179 {
12180 tree what = NULL_TREE;
12181 tree decl = TYPE_STUB_DECL (node);
12182
12183 if (TYPE_NAME (node))
12184 {
12185 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12186 what = TYPE_NAME (node);
12187 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12188 && DECL_NAME (TYPE_NAME (node)))
12189 what = DECL_NAME (TYPE_NAME (node));
12190 }
12191
12192 if (decl)
12193 {
12194 if (what)
12195 {
12196 if (msg)
12197 w = warning (OPT_Wdeprecated_declarations,
12198 "%qE is deprecated: %s", what, msg);
12199 else
12200 w = warning (OPT_Wdeprecated_declarations,
12201 "%qE is deprecated", what);
12202 }
12203 else
12204 {
12205 if (msg)
12206 w = warning (OPT_Wdeprecated_declarations,
12207 "type is deprecated: %s", msg);
12208 else
12209 w = warning (OPT_Wdeprecated_declarations,
12210 "type is deprecated");
12211 }
12212 if (w)
12213 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12214 }
12215 else
12216 {
12217 if (what)
12218 {
12219 if (msg)
12220 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12221 what, msg);
12222 else
12223 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12224 }
12225 else
12226 {
12227 if (msg)
12228 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12229 msg);
12230 else
12231 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12232 }
12233 }
12234 }
12235 }
12236
12237 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12238 somewhere in it. */
12239
12240 bool
12241 contains_bitfld_component_ref_p (const_tree ref)
12242 {
12243 while (handled_component_p (ref))
12244 {
12245 if (TREE_CODE (ref) == COMPONENT_REF
12246 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12247 return true;
12248 ref = TREE_OPERAND (ref, 0);
12249 }
12250
12251 return false;
12252 }
12253
12254 /* Try to determine whether a TRY_CATCH expression can fall through.
12255 This is a subroutine of block_may_fallthru. */
12256
12257 static bool
12258 try_catch_may_fallthru (const_tree stmt)
12259 {
12260 tree_stmt_iterator i;
12261
12262 /* If the TRY block can fall through, the whole TRY_CATCH can
12263 fall through. */
12264 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12265 return true;
12266
12267 i = tsi_start (TREE_OPERAND (stmt, 1));
12268 switch (TREE_CODE (tsi_stmt (i)))
12269 {
12270 case CATCH_EXPR:
12271 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12272 catch expression and a body. The whole TRY_CATCH may fall
12273 through iff any of the catch bodies falls through. */
12274 for (; !tsi_end_p (i); tsi_next (&i))
12275 {
12276 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12277 return true;
12278 }
12279 return false;
12280
12281 case EH_FILTER_EXPR:
12282 /* The exception filter expression only matters if there is an
12283 exception. If the exception does not match EH_FILTER_TYPES,
12284 we will execute EH_FILTER_FAILURE, and we will fall through
12285 if that falls through. If the exception does match
12286 EH_FILTER_TYPES, the stack unwinder will continue up the
12287 stack, so we will not fall through. We don't know whether we
12288 will throw an exception which matches EH_FILTER_TYPES or not,
12289 so we just ignore EH_FILTER_TYPES and assume that we might
12290 throw an exception which doesn't match. */
12291 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12292
12293 default:
12294 /* This case represents statements to be executed when an
12295 exception occurs. Those statements are implicitly followed
12296 by a RESX statement to resume execution after the exception.
12297 So in this case the TRY_CATCH never falls through. */
12298 return false;
12299 }
12300 }
12301
12302 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12303 need not be 100% accurate; simply be conservative and return true if we
12304 don't know. This is used only to avoid stupidly generating extra code.
12305 If we're wrong, we'll just delete the extra code later. */
12306
12307 bool
12308 block_may_fallthru (const_tree block)
12309 {
12310 /* This CONST_CAST is okay because expr_last returns its argument
12311 unmodified and we assign it to a const_tree. */
12312 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12313
12314 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12315 {
12316 case GOTO_EXPR:
12317 case RETURN_EXPR:
12318 /* Easy cases. If the last statement of the block implies
12319 control transfer, then we can't fall through. */
12320 return false;
12321
12322 case SWITCH_EXPR:
12323 /* If SWITCH_LABELS is set, this is lowered, and represents a
12324 branch to a selected label and hence can not fall through.
12325 Otherwise SWITCH_BODY is set, and the switch can fall
12326 through. */
12327 return SWITCH_LABELS (stmt) == NULL_TREE;
12328
12329 case COND_EXPR:
12330 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12331 return true;
12332 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12333
12334 case BIND_EXPR:
12335 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12336
12337 case TRY_CATCH_EXPR:
12338 return try_catch_may_fallthru (stmt);
12339
12340 case TRY_FINALLY_EXPR:
12341 /* The finally clause is always executed after the try clause,
12342 so if it does not fall through, then the try-finally will not
12343 fall through. Otherwise, if the try clause does not fall
12344 through, then when the finally clause falls through it will
12345 resume execution wherever the try clause was going. So the
12346 whole try-finally will only fall through if both the try
12347 clause and the finally clause fall through. */
12348 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12349 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12350
12351 case MODIFY_EXPR:
12352 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12353 stmt = TREE_OPERAND (stmt, 1);
12354 else
12355 return true;
12356 /* FALLTHRU */
12357
12358 case CALL_EXPR:
12359 /* Functions that do not return do not fall through. */
12360 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12361
12362 case CLEANUP_POINT_EXPR:
12363 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12364
12365 case TARGET_EXPR:
12366 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12367
12368 case ERROR_MARK:
12369 return true;
12370
12371 default:
12372 return lang_hooks.block_may_fallthru (stmt);
12373 }
12374 }
12375
12376 /* True if we are using EH to handle cleanups. */
12377 static bool using_eh_for_cleanups_flag = false;
12378
12379 /* This routine is called from front ends to indicate eh should be used for
12380 cleanups. */
12381 void
12382 using_eh_for_cleanups (void)
12383 {
12384 using_eh_for_cleanups_flag = true;
12385 }
12386
12387 /* Query whether EH is used for cleanups. */
12388 bool
12389 using_eh_for_cleanups_p (void)
12390 {
12391 return using_eh_for_cleanups_flag;
12392 }
12393
12394 /* Wrapper for tree_code_name to ensure that tree code is valid */
12395 const char *
12396 get_tree_code_name (enum tree_code code)
12397 {
12398 const char *invalid = "<invalid tree code>";
12399
12400 if (code >= MAX_TREE_CODES)
12401 return invalid;
12402
12403 return tree_code_name[code];
12404 }
12405
12406 /* Drops the TREE_OVERFLOW flag from T. */
12407
12408 tree
12409 drop_tree_overflow (tree t)
12410 {
12411 gcc_checking_assert (TREE_OVERFLOW (t));
12412
12413 /* For tree codes with a sharing machinery re-build the result. */
12414 if (TREE_CODE (t) == INTEGER_CST)
12415 return wide_int_to_tree (TREE_TYPE (t), t);
12416
12417 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12418 and drop the flag. */
12419 t = copy_node (t);
12420 TREE_OVERFLOW (t) = 0;
12421 return t;
12422 }
12423
12424 /* Given a memory reference expression T, return its base address.
12425 The base address of a memory reference expression is the main
12426 object being referenced. For instance, the base address for
12427 'array[i].fld[j]' is 'array'. You can think of this as stripping
12428 away the offset part from a memory address.
12429
12430 This function calls handled_component_p to strip away all the inner
12431 parts of the memory reference until it reaches the base object. */
12432
12433 tree
12434 get_base_address (tree t)
12435 {
12436 while (handled_component_p (t))
12437 t = TREE_OPERAND (t, 0);
12438
12439 if ((TREE_CODE (t) == MEM_REF
12440 || TREE_CODE (t) == TARGET_MEM_REF)
12441 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12442 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12443
12444 /* ??? Either the alias oracle or all callers need to properly deal
12445 with WITH_SIZE_EXPRs before we can look through those. */
12446 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12447 return NULL_TREE;
12448
12449 return t;
12450 }
12451
12452 /* Return the machine mode of T. For vectors, returns the mode of the
12453 inner type. The main use case is to feed the result to HONOR_NANS,
12454 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12455
12456 machine_mode
12457 element_mode (const_tree t)
12458 {
12459 if (!TYPE_P (t))
12460 t = TREE_TYPE (t);
12461 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12462 t = TREE_TYPE (t);
12463 return TYPE_MODE (t);
12464 }
12465
12466 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12467 TV. TV should be the more specified variant (i.e. the main variant). */
12468
12469 static bool
12470 verify_type_variant (const_tree t, tree tv)
12471 {
12472 if (TREE_CODE (t) != TREE_CODE (tv))
12473 {
12474 error ("type variant has different TREE_CODE");
12475 debug_tree (tv);
12476 return false;
12477 }
12478 if (COMPLETE_TYPE_P (t) && TYPE_SIZE (t) != TYPE_SIZE (tv))
12479 {
12480 error ("type variant has different TYPE_SIZE");
12481 debug_tree (tv);
12482 error ("type variant's TYPE_SIZE");
12483 debug_tree (TYPE_SIZE (tv));
12484 error ("type's TYPE_SIZE");
12485 debug_tree (TYPE_SIZE (t));
12486 return false;
12487 }
12488 if (COMPLETE_TYPE_P (t)
12489 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
12490 /* FIXME: ideally we should compare pointer equality, but java FE produce
12491 variants where size is INTEGER_CST of different type (int wrt size_type)
12492 during libjava biuld. */
12493 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
12494 {
12495 error ("type variant has different TYPE_SIZE_UNIT");
12496 debug_tree (tv);
12497 error ("type variant's TYPE_SIZE_UNIT");
12498 debug_tree (TYPE_SIZE_UNIT (tv));
12499 error ("type's TYPE_SIZE_UNIT");
12500 debug_tree (TYPE_SIZE_UNIT (t));
12501 return false;
12502 }
12503 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12504 and danagle the pointer from time to time. */
12505 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12506 && (!TYPE_VFIELD (tv) || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12507 {
12508 error ("type variant has different TYPE_VFIELD");
12509 debug_tree (tv);
12510 return false;
12511 }
12512 if (((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
12513 || TREE_CODE (t) == INTEGER_TYPE
12514 || TREE_CODE (t) == BOOLEAN_TYPE
12515 || TREE_CODE (t) == REAL_TYPE
12516 || TREE_CODE (t) == FIXED_POINT_TYPE)
12517 && (TYPE_MAX_VALUE (t) != TYPE_MAX_VALUE (tv)
12518 || TYPE_MIN_VALUE (t) != TYPE_MIN_VALUE (tv)))
12519 {
12520 error ("type variant has different TYPE_MAX_VALUE or TYPE_MIN_VALUE");
12521 debug_tree (tv);
12522 return false;
12523 }
12524 if (TREE_CODE (t) == METHOD_TYPE
12525 && TYPE_METHOD_BASETYPE (t) != TYPE_METHOD_BASETYPE (tv))
12526 {
12527 error ("type variant has different TYPE_METHOD_BASETYPE");
12528 debug_tree (tv);
12529 return false;
12530 }
12531 /* FIXME: this check triggers during libstdc++ build that is a bug.
12532 It affects non-LTO debug output only, because free_lang_data clears
12533 this anyway. */
12534 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0
12535 && TYPE_METHODS (t) != TYPE_METHODS (tv))
12536 {
12537 error ("type variant has different TYPE_METHODS");
12538 debug_tree (tv);
12539 return false;
12540 }
12541 if (TREE_CODE (t) == OFFSET_TYPE
12542 && TYPE_OFFSET_BASETYPE (t) != TYPE_OFFSET_BASETYPE (tv))
12543 {
12544 error ("type variant has different TYPE_OFFSET_BASETYPE");
12545 debug_tree (tv);
12546 return false;
12547 }
12548 if (TREE_CODE (t) == ARRAY_TYPE
12549 && TYPE_ARRAY_MAX_SIZE (t) != TYPE_ARRAY_MAX_SIZE (tv))
12550 {
12551 error ("type variant has different TYPE_ARRAY_MAX_SIZE");
12552 debug_tree (tv);
12553 return false;
12554 }
12555 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
12556 or even type's main variant. This is needed to make bootstrap pass
12557 and the bug seems new in GCC 5.
12558 C++ FE should be updated to make this consistent and we should check
12559 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
12560 is a match with main variant.
12561
12562 Also disable the check for Java for now because of parser hack that builds
12563 first an dummy BINFO and then sometimes replace it by real BINFO in some
12564 of the copies. */
12565 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
12566 && TYPE_BINFO (t) != TYPE_BINFO (tv)
12567 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
12568 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
12569 at LTO time only. */
12570 && (in_lto_p && odr_type_p (t)))
12571 {
12572 error ("type variant has different TYPE_BINFO");
12573 debug_tree (tv);
12574 error ("type variant's TYPE_BINFO");
12575 debug_tree (TYPE_BINFO (tv));
12576 error ("type's TYPE_BINFO");
12577 debug_tree (TYPE_BINFO (t));
12578 return false;
12579 }
12580 return true;
12581 }
12582
12583 /* Verify type T. */
12584
12585 void
12586 verify_type (const_tree t)
12587 {
12588 bool error_found = false;
12589 tree mv = TYPE_MAIN_VARIANT (t);
12590 if (!mv)
12591 {
12592 error ("Main variant is not defined");
12593 error_found = true;
12594 }
12595 else if (mv != TYPE_MAIN_VARIANT (mv))
12596 {
12597 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
12598 debug_tree (mv);
12599 error_found = true;
12600 }
12601 else if (t != mv && !verify_type_variant (t, mv))
12602 error_found = true;
12603
12604 /* Check various uses of TYPE_MINVAL. */
12605 if (RECORD_OR_UNION_TYPE_P (t))
12606 {
12607 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12608 and danagle the pointer from time to time. */
12609 if (TYPE_VFIELD (t)
12610 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
12611 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
12612 {
12613 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
12614 debug_tree (TYPE_VFIELD (t));
12615 error_found = true;
12616 }
12617 }
12618 else if (TREE_CODE (t) == POINTER_TYPE)
12619 {
12620 if (TYPE_NEXT_PTR_TO (t)
12621 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
12622 {
12623 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
12624 debug_tree (TYPE_NEXT_PTR_TO (t));
12625 error_found = true;
12626 }
12627 }
12628 else if (TREE_CODE (t) == REFERENCE_TYPE)
12629 {
12630 if (TYPE_NEXT_REF_TO (t)
12631 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
12632 {
12633 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
12634 debug_tree (TYPE_NEXT_REF_TO (t));
12635 error_found = true;
12636 }
12637 }
12638 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
12639 || TREE_CODE (t) == FIXED_POINT_TYPE)
12640 {
12641 /* FIXME: The following check should pass:
12642 useless_type_conversion_p (const_cast <tree> (t),
12643 TREE_TYPE (TYPE_MIN_VALUE (t))
12644 but does not for C sizetypes in LTO. */
12645 }
12646 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
12647 else if (TYPE_MINVAL (t)
12648 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
12649 || in_lto_p))
12650 {
12651 error ("TYPE_MINVAL non-NULL");
12652 debug_tree (TYPE_MINVAL (t));
12653 error_found = true;
12654 }
12655
12656 /* Check various uses of TYPE_MAXVAL. */
12657 if (RECORD_OR_UNION_TYPE_P (t))
12658 {
12659 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
12660 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL)
12661 {
12662 error ("TYPE_METHODS is not FUNCTION_DECL nor TEMPLATE_DECL");
12663 debug_tree (TYPE_METHODS (t));
12664 error_found = true;
12665 }
12666 }
12667 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
12668 {
12669 if (TYPE_METHOD_BASETYPE (t)
12670 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
12671 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
12672 {
12673 error ("TYPE_METHOD_BASETYPE is not record nor union");
12674 debug_tree (TYPE_METHOD_BASETYPE (t));
12675 error_found = true;
12676 }
12677 }
12678 else if (TREE_CODE (t) == OFFSET_TYPE)
12679 {
12680 if (TYPE_OFFSET_BASETYPE (t)
12681 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
12682 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
12683 {
12684 error ("TYPE_OFFSET_BASETYPE is not record nor union");
12685 debug_tree (TYPE_OFFSET_BASETYPE (t));
12686 error_found = true;
12687 }
12688 }
12689 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
12690 || TREE_CODE (t) == FIXED_POINT_TYPE)
12691 {
12692 /* FIXME: The following check should pass:
12693 useless_type_conversion_p (const_cast <tree> (t),
12694 TREE_TYPE (TYPE_MAX_VALUE (t))
12695 but does not for C sizetypes in LTO. */
12696 }
12697 else if (TREE_CODE (t) == ARRAY_TYPE)
12698 {
12699 if (TYPE_ARRAY_MAX_SIZE (t)
12700 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
12701 {
12702 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
12703 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
12704 error_found = true;
12705 }
12706 }
12707 else if (TYPE_MAXVAL (t))
12708 {
12709 error ("TYPE_MAXVAL non-NULL");
12710 debug_tree (TYPE_MAXVAL (t));
12711 error_found = true;
12712 }
12713
12714 /* Check various uses of TYPE_BINFO. */
12715 if (RECORD_OR_UNION_TYPE_P (t))
12716 {
12717 if (!TYPE_BINFO (t))
12718 ;
12719 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
12720 {
12721 error ("TYPE_BINFO is not TREE_BINFO");
12722 debug_tree (TYPE_BINFO (t));
12723 error_found = true;
12724 }
12725 /* FIXME: Java builds invalid empty binfos that do not have
12726 TREE_TYPE set. */
12727 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
12728 {
12729 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
12730 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
12731 error_found = true;
12732 }
12733 }
12734 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
12735 {
12736 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
12737 debug_tree (TYPE_LANG_SLOT_1 (t));
12738 error_found = true;
12739 }
12740
12741 /* Check various uses of TYPE_VALUES_RAW. */
12742 if (TREE_CODE (t) == ENUMERAL_TYPE)
12743 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
12744 {
12745 tree value = TREE_VALUE (l);
12746 tree name = TREE_PURPOSE (l);
12747
12748 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
12749 CONST_DECL of ENUMERAL TYPE. */
12750 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
12751 {
12752 error ("Enum value is not CONST_DECL or INTEGER_CST");
12753 debug_tree (value);
12754 debug_tree (name);
12755 error_found = true;
12756 }
12757 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
12758 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
12759 {
12760 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
12761 debug_tree (value);
12762 debug_tree (name);
12763 error_found = true;
12764 }
12765 if (TREE_CODE (name) != IDENTIFIER_NODE)
12766 {
12767 error ("Enum value name is not IDENTIFIER_NODE");
12768 debug_tree (value);
12769 debug_tree (name);
12770 error_found = true;
12771 }
12772 }
12773 else if (TREE_CODE (t) == ARRAY_TYPE)
12774 {
12775 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
12776 {
12777 error ("Array TYPE_DOMAIN is not integer type");
12778 debug_tree (TYPE_DOMAIN (t));
12779 error_found = true;
12780 }
12781 }
12782 else if (RECORD_OR_UNION_TYPE_P (t))
12783 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
12784 {
12785 /* TODO: verify properties of decls. */
12786 if (TREE_CODE (fld) == FIELD_DECL)
12787 ;
12788 else if (TREE_CODE (fld) == TYPE_DECL)
12789 ;
12790 else if (TREE_CODE (fld) == CONST_DECL)
12791 ;
12792 else if (TREE_CODE (fld) == VAR_DECL)
12793 ;
12794 else if (TREE_CODE (fld) == TEMPLATE_DECL)
12795 ;
12796 else if (TREE_CODE (fld) == USING_DECL)
12797 ;
12798 else
12799 {
12800 error ("Wrong tree in TYPE_FIELDS list");
12801 debug_tree (fld);
12802 error_found = true;
12803 }
12804 }
12805 else if (TREE_CODE (t) == INTEGER_TYPE
12806 || TREE_CODE (t) == BOOLEAN_TYPE
12807 || TREE_CODE (t) == OFFSET_TYPE
12808 || TREE_CODE (t) == REFERENCE_TYPE
12809 || TREE_CODE (t) == NULLPTR_TYPE
12810 || TREE_CODE (t) == POINTER_TYPE)
12811 {
12812 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
12813 {
12814 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
12815 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
12816 error_found = true;
12817 }
12818 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
12819 {
12820 error ("TYPE_CACHED_VALUES is not TREE_VEC");
12821 debug_tree (TYPE_CACHED_VALUES (t));
12822 error_found = true;
12823 }
12824 /* Verify just enough of cache to ensure that no one copied it to new type.
12825 All copying should go by copy_node that should clear it. */
12826 else if (TYPE_CACHED_VALUES_P (t))
12827 {
12828 int i;
12829 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
12830 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
12831 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
12832 {
12833 error ("wrong TYPE_CACHED_VALUES entry");
12834 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
12835 error_found = true;
12836 break;
12837 }
12838 }
12839 }
12840 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
12841 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
12842 {
12843 /* C++ FE uses TREE_PURPOSE to store initial values. */
12844 if (TREE_PURPOSE (l) && in_lto_p)
12845 {
12846 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
12847 debug_tree (l);
12848 error_found = true;
12849 }
12850 if (!TYPE_P (TREE_VALUE (l)))
12851 {
12852 error ("Wrong entry in TYPE_ARG_TYPES list");
12853 debug_tree (l);
12854 error_found = true;
12855 }
12856 }
12857 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
12858 {
12859 error ("TYPE_VALUES_RAW field is non-NULL");
12860 debug_tree (TYPE_VALUES_RAW (t));
12861 error_found = true;
12862 }
12863 if (TREE_CODE (t) != INTEGER_TYPE
12864 && TREE_CODE (t) != BOOLEAN_TYPE
12865 && TREE_CODE (t) != OFFSET_TYPE
12866 && TREE_CODE (t) != REFERENCE_TYPE
12867 && TREE_CODE (t) != NULLPTR_TYPE
12868 && TREE_CODE (t) != POINTER_TYPE
12869 && TYPE_CACHED_VALUES_P (t))
12870 {
12871 error ("TYPE_CACHED_VALUES_P is set while it should not");
12872 error_found = true;
12873 }
12874
12875
12876 if (error_found)
12877 {
12878 debug_tree (const_cast <tree> (t));
12879 internal_error ("verify_type failed");
12880 }
12881 }
12882
12883 #include "gt-tree.h"