coretypes.h: Include input.h and as-a.h.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "alias.h"
36 #include "symtab.h"
37 #include "tree.h"
38 #include "fold-const.h"
39 #include "stor-layout.h"
40 #include "calls.h"
41 #include "attribs.h"
42 #include "varasm.h"
43 #include "tm_p.h"
44 #include "hard-reg-set.h"
45 #include "function.h"
46 #include "obstack.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "filenames.h"
49 #include "output.h"
50 #include "target.h"
51 #include "common/common-target.h"
52 #include "langhooks.h"
53 #include "tree-inline.h"
54 #include "tree-iterator.h"
55 #include "predict.h"
56 #include "dominance.h"
57 #include "cfg.h"
58 #include "basic-block.h"
59 #include "bitmap.h"
60 #include "tree-ssa-alias.h"
61 #include "internal-fn.h"
62 #include "gimple-expr.h"
63 #include "gimple.h"
64 #include "gimple-iterator.h"
65 #include "gimplify.h"
66 #include "gimple-ssa.h"
67 #include "plugin-api.h"
68 #include "ipa-ref.h"
69 #include "cgraph.h"
70 #include "tree-phinodes.h"
71 #include "stringpool.h"
72 #include "tree-ssanames.h"
73 #include "rtl.h"
74 #include "insn-config.h"
75 #include "expmed.h"
76 #include "dojump.h"
77 #include "explow.h"
78 #include "emit-rtl.h"
79 #include "stmt.h"
80 #include "expr.h"
81 #include "tree-dfa.h"
82 #include "params.h"
83 #include "tree-pass.h"
84 #include "langhooks-def.h"
85 #include "diagnostic.h"
86 #include "tree-diagnostic.h"
87 #include "tree-pretty-print.h"
88 #include "except.h"
89 #include "debug.h"
90 #include "intl.h"
91 #include "builtins.h"
92 #include "print-tree.h"
93 #include "ipa-utils.h"
94
95 /* Tree code classes. */
96
97 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
98 #define END_OF_BASE_TREE_CODES tcc_exceptional,
99
100 const enum tree_code_class tree_code_type[] = {
101 #include "all-tree.def"
102 };
103
104 #undef DEFTREECODE
105 #undef END_OF_BASE_TREE_CODES
106
107 /* Table indexed by tree code giving number of expression
108 operands beyond the fixed part of the node structure.
109 Not used for types or decls. */
110
111 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
112 #define END_OF_BASE_TREE_CODES 0,
113
114 const unsigned char tree_code_length[] = {
115 #include "all-tree.def"
116 };
117
118 #undef DEFTREECODE
119 #undef END_OF_BASE_TREE_CODES
120
121 /* Names of tree components.
122 Used for printing out the tree and error messages. */
123 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
124 #define END_OF_BASE_TREE_CODES "@dummy",
125
126 static const char *const tree_code_name[] = {
127 #include "all-tree.def"
128 };
129
130 #undef DEFTREECODE
131 #undef END_OF_BASE_TREE_CODES
132
133 /* Each tree code class has an associated string representation.
134 These must correspond to the tree_code_class entries. */
135
136 const char *const tree_code_class_strings[] =
137 {
138 "exceptional",
139 "constant",
140 "type",
141 "declaration",
142 "reference",
143 "comparison",
144 "unary",
145 "binary",
146 "statement",
147 "vl_exp",
148 "expression"
149 };
150
151 /* obstack.[ch] explicitly declined to prototype this. */
152 extern int _obstack_allocated_p (struct obstack *h, void *obj);
153
154 /* Statistics-gathering stuff. */
155
156 static int tree_code_counts[MAX_TREE_CODES];
157 int tree_node_counts[(int) all_kinds];
158 int tree_node_sizes[(int) all_kinds];
159
160 /* Keep in sync with tree.h:enum tree_node_kind. */
161 static const char * const tree_node_kind_names[] = {
162 "decls",
163 "types",
164 "blocks",
165 "stmts",
166 "refs",
167 "exprs",
168 "constants",
169 "identifiers",
170 "vecs",
171 "binfos",
172 "ssa names",
173 "constructors",
174 "random kinds",
175 "lang_decl kinds",
176 "lang_type kinds",
177 "omp clauses",
178 };
179
180 /* Unique id for next decl created. */
181 static GTY(()) int next_decl_uid;
182 /* Unique id for next type created. */
183 static GTY(()) int next_type_uid = 1;
184 /* Unique id for next debug decl created. Use negative numbers,
185 to catch erroneous uses. */
186 static GTY(()) int next_debug_decl_uid;
187
188 /* Since we cannot rehash a type after it is in the table, we have to
189 keep the hash code. */
190
191 struct GTY((for_user)) type_hash {
192 unsigned long hash;
193 tree type;
194 };
195
196 /* Initial size of the hash table (rounded to next prime). */
197 #define TYPE_HASH_INITIAL_SIZE 1000
198
199 struct type_cache_hasher : ggc_cache_hasher<type_hash *>
200 {
201 static hashval_t hash (type_hash *t) { return t->hash; }
202 static bool equal (type_hash *a, type_hash *b);
203
204 static void
205 handle_cache_entry (type_hash *&t)
206 {
207 extern void gt_ggc_mx (type_hash *&);
208 if (t == HTAB_DELETED_ENTRY || t == HTAB_EMPTY_ENTRY)
209 return;
210 else if (ggc_marked_p (t->type))
211 gt_ggc_mx (t);
212 else
213 t = static_cast<type_hash *> (HTAB_DELETED_ENTRY);
214 }
215 };
216
217 /* Now here is the hash table. When recording a type, it is added to
218 the slot whose index is the hash code. Note that the hash table is
219 used for several kinds of types (function types, array types and
220 array index range types, for now). While all these live in the
221 same table, they are completely independent, and the hash code is
222 computed differently for each of these. */
223
224 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
225
226 /* Hash table and temporary node for larger integer const values. */
227 static GTY (()) tree int_cst_node;
228
229 struct int_cst_hasher : ggc_cache_hasher<tree>
230 {
231 static hashval_t hash (tree t);
232 static bool equal (tree x, tree y);
233 };
234
235 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
236
237 /* Hash table for optimization flags and target option flags. Use the same
238 hash table for both sets of options. Nodes for building the current
239 optimization and target option nodes. The assumption is most of the time
240 the options created will already be in the hash table, so we avoid
241 allocating and freeing up a node repeatably. */
242 static GTY (()) tree cl_optimization_node;
243 static GTY (()) tree cl_target_option_node;
244
245 struct cl_option_hasher : ggc_cache_hasher<tree>
246 {
247 static hashval_t hash (tree t);
248 static bool equal (tree x, tree y);
249 };
250
251 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
252
253 /* General tree->tree mapping structure for use in hash tables. */
254
255
256 static GTY ((cache))
257 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
258
259 static GTY ((cache))
260 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
261
262 struct tree_vec_map_cache_hasher : ggc_cache_hasher<tree_vec_map *>
263 {
264 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
265
266 static bool
267 equal (tree_vec_map *a, tree_vec_map *b)
268 {
269 return a->base.from == b->base.from;
270 }
271
272 static void
273 handle_cache_entry (tree_vec_map *&m)
274 {
275 extern void gt_ggc_mx (tree_vec_map *&);
276 if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
277 return;
278 else if (ggc_marked_p (m->base.from))
279 gt_ggc_mx (m);
280 else
281 m = static_cast<tree_vec_map *> (HTAB_DELETED_ENTRY);
282 }
283 };
284
285 static GTY ((cache))
286 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
287
288 static void set_type_quals (tree, int);
289 static void print_type_hash_statistics (void);
290 static void print_debug_expr_statistics (void);
291 static void print_value_expr_statistics (void);
292 static void type_hash_list (const_tree, inchash::hash &);
293 static void attribute_hash_list (const_tree, inchash::hash &);
294
295 tree global_trees[TI_MAX];
296 tree integer_types[itk_none];
297
298 bool int_n_enabled_p[NUM_INT_N_ENTS];
299 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
300
301 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
302
303 /* Number of operands for each OpenMP clause. */
304 unsigned const char omp_clause_num_ops[] =
305 {
306 0, /* OMP_CLAUSE_ERROR */
307 1, /* OMP_CLAUSE_PRIVATE */
308 1, /* OMP_CLAUSE_SHARED */
309 1, /* OMP_CLAUSE_FIRSTPRIVATE */
310 2, /* OMP_CLAUSE_LASTPRIVATE */
311 4, /* OMP_CLAUSE_REDUCTION */
312 1, /* OMP_CLAUSE_COPYIN */
313 1, /* OMP_CLAUSE_COPYPRIVATE */
314 3, /* OMP_CLAUSE_LINEAR */
315 2, /* OMP_CLAUSE_ALIGNED */
316 1, /* OMP_CLAUSE_DEPEND */
317 1, /* OMP_CLAUSE_UNIFORM */
318 2, /* OMP_CLAUSE_FROM */
319 2, /* OMP_CLAUSE_TO */
320 2, /* OMP_CLAUSE_MAP */
321 2, /* OMP_CLAUSE__CACHE_ */
322 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
323 1, /* OMP_CLAUSE_USE_DEVICE */
324 2, /* OMP_CLAUSE_GANG */
325 1, /* OMP_CLAUSE_ASYNC */
326 1, /* OMP_CLAUSE_WAIT */
327 0, /* OMP_CLAUSE_AUTO */
328 0, /* OMP_CLAUSE_SEQ */
329 1, /* OMP_CLAUSE__LOOPTEMP_ */
330 1, /* OMP_CLAUSE_IF */
331 1, /* OMP_CLAUSE_NUM_THREADS */
332 1, /* OMP_CLAUSE_SCHEDULE */
333 0, /* OMP_CLAUSE_NOWAIT */
334 0, /* OMP_CLAUSE_ORDERED */
335 0, /* OMP_CLAUSE_DEFAULT */
336 3, /* OMP_CLAUSE_COLLAPSE */
337 0, /* OMP_CLAUSE_UNTIED */
338 1, /* OMP_CLAUSE_FINAL */
339 0, /* OMP_CLAUSE_MERGEABLE */
340 1, /* OMP_CLAUSE_DEVICE */
341 1, /* OMP_CLAUSE_DIST_SCHEDULE */
342 0, /* OMP_CLAUSE_INBRANCH */
343 0, /* OMP_CLAUSE_NOTINBRANCH */
344 1, /* OMP_CLAUSE_NUM_TEAMS */
345 1, /* OMP_CLAUSE_THREAD_LIMIT */
346 0, /* OMP_CLAUSE_PROC_BIND */
347 1, /* OMP_CLAUSE_SAFELEN */
348 1, /* OMP_CLAUSE_SIMDLEN */
349 0, /* OMP_CLAUSE_FOR */
350 0, /* OMP_CLAUSE_PARALLEL */
351 0, /* OMP_CLAUSE_SECTIONS */
352 0, /* OMP_CLAUSE_TASKGROUP */
353 1, /* OMP_CLAUSE__SIMDUID_ */
354 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
355 0, /* OMP_CLAUSE_INDEPENDENT */
356 1, /* OMP_CLAUSE_WORKER */
357 1, /* OMP_CLAUSE_VECTOR */
358 1, /* OMP_CLAUSE_NUM_GANGS */
359 1, /* OMP_CLAUSE_NUM_WORKERS */
360 1, /* OMP_CLAUSE_VECTOR_LENGTH */
361 };
362
363 const char * const omp_clause_code_name[] =
364 {
365 "error_clause",
366 "private",
367 "shared",
368 "firstprivate",
369 "lastprivate",
370 "reduction",
371 "copyin",
372 "copyprivate",
373 "linear",
374 "aligned",
375 "depend",
376 "uniform",
377 "from",
378 "to",
379 "map",
380 "_cache_",
381 "device_resident",
382 "use_device",
383 "gang",
384 "async",
385 "wait",
386 "auto",
387 "seq",
388 "_looptemp_",
389 "if",
390 "num_threads",
391 "schedule",
392 "nowait",
393 "ordered",
394 "default",
395 "collapse",
396 "untied",
397 "final",
398 "mergeable",
399 "device",
400 "dist_schedule",
401 "inbranch",
402 "notinbranch",
403 "num_teams",
404 "thread_limit",
405 "proc_bind",
406 "safelen",
407 "simdlen",
408 "for",
409 "parallel",
410 "sections",
411 "taskgroup",
412 "_simduid_",
413 "_Cilk_for_count_",
414 "independent",
415 "worker",
416 "vector",
417 "num_gangs",
418 "num_workers",
419 "vector_length"
420 };
421
422
423 /* Return the tree node structure used by tree code CODE. */
424
425 static inline enum tree_node_structure_enum
426 tree_node_structure_for_code (enum tree_code code)
427 {
428 switch (TREE_CODE_CLASS (code))
429 {
430 case tcc_declaration:
431 {
432 switch (code)
433 {
434 case FIELD_DECL:
435 return TS_FIELD_DECL;
436 case PARM_DECL:
437 return TS_PARM_DECL;
438 case VAR_DECL:
439 return TS_VAR_DECL;
440 case LABEL_DECL:
441 return TS_LABEL_DECL;
442 case RESULT_DECL:
443 return TS_RESULT_DECL;
444 case DEBUG_EXPR_DECL:
445 return TS_DECL_WRTL;
446 case CONST_DECL:
447 return TS_CONST_DECL;
448 case TYPE_DECL:
449 return TS_TYPE_DECL;
450 case FUNCTION_DECL:
451 return TS_FUNCTION_DECL;
452 case TRANSLATION_UNIT_DECL:
453 return TS_TRANSLATION_UNIT_DECL;
454 default:
455 return TS_DECL_NON_COMMON;
456 }
457 }
458 case tcc_type:
459 return TS_TYPE_NON_COMMON;
460 case tcc_reference:
461 case tcc_comparison:
462 case tcc_unary:
463 case tcc_binary:
464 case tcc_expression:
465 case tcc_statement:
466 case tcc_vl_exp:
467 return TS_EXP;
468 default: /* tcc_constant and tcc_exceptional */
469 break;
470 }
471 switch (code)
472 {
473 /* tcc_constant cases. */
474 case VOID_CST: return TS_TYPED;
475 case INTEGER_CST: return TS_INT_CST;
476 case REAL_CST: return TS_REAL_CST;
477 case FIXED_CST: return TS_FIXED_CST;
478 case COMPLEX_CST: return TS_COMPLEX;
479 case VECTOR_CST: return TS_VECTOR;
480 case STRING_CST: return TS_STRING;
481 /* tcc_exceptional cases. */
482 case ERROR_MARK: return TS_COMMON;
483 case IDENTIFIER_NODE: return TS_IDENTIFIER;
484 case TREE_LIST: return TS_LIST;
485 case TREE_VEC: return TS_VEC;
486 case SSA_NAME: return TS_SSA_NAME;
487 case PLACEHOLDER_EXPR: return TS_COMMON;
488 case STATEMENT_LIST: return TS_STATEMENT_LIST;
489 case BLOCK: return TS_BLOCK;
490 case CONSTRUCTOR: return TS_CONSTRUCTOR;
491 case TREE_BINFO: return TS_BINFO;
492 case OMP_CLAUSE: return TS_OMP_CLAUSE;
493 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
494 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
495
496 default:
497 gcc_unreachable ();
498 }
499 }
500
501
502 /* Initialize tree_contains_struct to describe the hierarchy of tree
503 nodes. */
504
505 static void
506 initialize_tree_contains_struct (void)
507 {
508 unsigned i;
509
510 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
511 {
512 enum tree_code code;
513 enum tree_node_structure_enum ts_code;
514
515 code = (enum tree_code) i;
516 ts_code = tree_node_structure_for_code (code);
517
518 /* Mark the TS structure itself. */
519 tree_contains_struct[code][ts_code] = 1;
520
521 /* Mark all the structures that TS is derived from. */
522 switch (ts_code)
523 {
524 case TS_TYPED:
525 case TS_BLOCK:
526 MARK_TS_BASE (code);
527 break;
528
529 case TS_COMMON:
530 case TS_INT_CST:
531 case TS_REAL_CST:
532 case TS_FIXED_CST:
533 case TS_VECTOR:
534 case TS_STRING:
535 case TS_COMPLEX:
536 case TS_SSA_NAME:
537 case TS_CONSTRUCTOR:
538 case TS_EXP:
539 case TS_STATEMENT_LIST:
540 MARK_TS_TYPED (code);
541 break;
542
543 case TS_IDENTIFIER:
544 case TS_DECL_MINIMAL:
545 case TS_TYPE_COMMON:
546 case TS_LIST:
547 case TS_VEC:
548 case TS_BINFO:
549 case TS_OMP_CLAUSE:
550 case TS_OPTIMIZATION:
551 case TS_TARGET_OPTION:
552 MARK_TS_COMMON (code);
553 break;
554
555 case TS_TYPE_WITH_LANG_SPECIFIC:
556 MARK_TS_TYPE_COMMON (code);
557 break;
558
559 case TS_TYPE_NON_COMMON:
560 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
561 break;
562
563 case TS_DECL_COMMON:
564 MARK_TS_DECL_MINIMAL (code);
565 break;
566
567 case TS_DECL_WRTL:
568 case TS_CONST_DECL:
569 MARK_TS_DECL_COMMON (code);
570 break;
571
572 case TS_DECL_NON_COMMON:
573 MARK_TS_DECL_WITH_VIS (code);
574 break;
575
576 case TS_DECL_WITH_VIS:
577 case TS_PARM_DECL:
578 case TS_LABEL_DECL:
579 case TS_RESULT_DECL:
580 MARK_TS_DECL_WRTL (code);
581 break;
582
583 case TS_FIELD_DECL:
584 MARK_TS_DECL_COMMON (code);
585 break;
586
587 case TS_VAR_DECL:
588 MARK_TS_DECL_WITH_VIS (code);
589 break;
590
591 case TS_TYPE_DECL:
592 case TS_FUNCTION_DECL:
593 MARK_TS_DECL_NON_COMMON (code);
594 break;
595
596 case TS_TRANSLATION_UNIT_DECL:
597 MARK_TS_DECL_COMMON (code);
598 break;
599
600 default:
601 gcc_unreachable ();
602 }
603 }
604
605 /* Basic consistency checks for attributes used in fold. */
606 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
607 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
608 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
609 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
610 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
611 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
612 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
613 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
614 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
615 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
616 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
617 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
618 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
619 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
620 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
621 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
622 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
623 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
624 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
625 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
626 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
627 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
628 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
629 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
630 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
631 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
632 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
633 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
634 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
635 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
636 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
637 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
638 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
639 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
640 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
641 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
642 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
643 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
645 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
646 }
647
648
649 /* Init tree.c. */
650
651 void
652 init_ttree (void)
653 {
654 /* Initialize the hash table of types. */
655 type_hash_table
656 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
657
658 debug_expr_for_decl
659 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
660
661 value_expr_for_decl
662 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
663
664 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
665
666 int_cst_node = make_int_cst (1, 1);
667
668 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
669
670 cl_optimization_node = make_node (OPTIMIZATION_NODE);
671 cl_target_option_node = make_node (TARGET_OPTION_NODE);
672
673 /* Initialize the tree_contains_struct array. */
674 initialize_tree_contains_struct ();
675 lang_hooks.init_ts ();
676 }
677
678 \f
679 /* The name of the object as the assembler will see it (but before any
680 translations made by ASM_OUTPUT_LABELREF). Often this is the same
681 as DECL_NAME. It is an IDENTIFIER_NODE. */
682 tree
683 decl_assembler_name (tree decl)
684 {
685 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
686 lang_hooks.set_decl_assembler_name (decl);
687 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
688 }
689
690 /* When the target supports COMDAT groups, this indicates which group the
691 DECL is associated with. This can be either an IDENTIFIER_NODE or a
692 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
693 tree
694 decl_comdat_group (const_tree node)
695 {
696 struct symtab_node *snode = symtab_node::get (node);
697 if (!snode)
698 return NULL;
699 return snode->get_comdat_group ();
700 }
701
702 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
703 tree
704 decl_comdat_group_id (const_tree node)
705 {
706 struct symtab_node *snode = symtab_node::get (node);
707 if (!snode)
708 return NULL;
709 return snode->get_comdat_group_id ();
710 }
711
712 /* When the target supports named section, return its name as IDENTIFIER_NODE
713 or NULL if it is in no section. */
714 const char *
715 decl_section_name (const_tree node)
716 {
717 struct symtab_node *snode = symtab_node::get (node);
718 if (!snode)
719 return NULL;
720 return snode->get_section ();
721 }
722
723 /* Set section section name of NODE to VALUE (that is expected to
724 be identifier node) */
725 void
726 set_decl_section_name (tree node, const char *value)
727 {
728 struct symtab_node *snode;
729
730 if (value == NULL)
731 {
732 snode = symtab_node::get (node);
733 if (!snode)
734 return;
735 }
736 else if (TREE_CODE (node) == VAR_DECL)
737 snode = varpool_node::get_create (node);
738 else
739 snode = cgraph_node::get_create (node);
740 snode->set_section (value);
741 }
742
743 /* Return TLS model of a variable NODE. */
744 enum tls_model
745 decl_tls_model (const_tree node)
746 {
747 struct varpool_node *snode = varpool_node::get (node);
748 if (!snode)
749 return TLS_MODEL_NONE;
750 return snode->tls_model;
751 }
752
753 /* Set TLS model of variable NODE to MODEL. */
754 void
755 set_decl_tls_model (tree node, enum tls_model model)
756 {
757 struct varpool_node *vnode;
758
759 if (model == TLS_MODEL_NONE)
760 {
761 vnode = varpool_node::get (node);
762 if (!vnode)
763 return;
764 }
765 else
766 vnode = varpool_node::get_create (node);
767 vnode->tls_model = model;
768 }
769
770 /* Compute the number of bytes occupied by a tree with code CODE.
771 This function cannot be used for nodes that have variable sizes,
772 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
773 size_t
774 tree_code_size (enum tree_code code)
775 {
776 switch (TREE_CODE_CLASS (code))
777 {
778 case tcc_declaration: /* A decl node */
779 {
780 switch (code)
781 {
782 case FIELD_DECL:
783 return sizeof (struct tree_field_decl);
784 case PARM_DECL:
785 return sizeof (struct tree_parm_decl);
786 case VAR_DECL:
787 return sizeof (struct tree_var_decl);
788 case LABEL_DECL:
789 return sizeof (struct tree_label_decl);
790 case RESULT_DECL:
791 return sizeof (struct tree_result_decl);
792 case CONST_DECL:
793 return sizeof (struct tree_const_decl);
794 case TYPE_DECL:
795 return sizeof (struct tree_type_decl);
796 case FUNCTION_DECL:
797 return sizeof (struct tree_function_decl);
798 case DEBUG_EXPR_DECL:
799 return sizeof (struct tree_decl_with_rtl);
800 case TRANSLATION_UNIT_DECL:
801 return sizeof (struct tree_translation_unit_decl);
802 case NAMESPACE_DECL:
803 case IMPORTED_DECL:
804 case NAMELIST_DECL:
805 return sizeof (struct tree_decl_non_common);
806 default:
807 return lang_hooks.tree_size (code);
808 }
809 }
810
811 case tcc_type: /* a type node */
812 return sizeof (struct tree_type_non_common);
813
814 case tcc_reference: /* a reference */
815 case tcc_expression: /* an expression */
816 case tcc_statement: /* an expression with side effects */
817 case tcc_comparison: /* a comparison expression */
818 case tcc_unary: /* a unary arithmetic expression */
819 case tcc_binary: /* a binary arithmetic expression */
820 return (sizeof (struct tree_exp)
821 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
822
823 case tcc_constant: /* a constant */
824 switch (code)
825 {
826 case VOID_CST: return sizeof (struct tree_typed);
827 case INTEGER_CST: gcc_unreachable ();
828 case REAL_CST: return sizeof (struct tree_real_cst);
829 case FIXED_CST: return sizeof (struct tree_fixed_cst);
830 case COMPLEX_CST: return sizeof (struct tree_complex);
831 case VECTOR_CST: return sizeof (struct tree_vector);
832 case STRING_CST: gcc_unreachable ();
833 default:
834 return lang_hooks.tree_size (code);
835 }
836
837 case tcc_exceptional: /* something random, like an identifier. */
838 switch (code)
839 {
840 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
841 case TREE_LIST: return sizeof (struct tree_list);
842
843 case ERROR_MARK:
844 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
845
846 case TREE_VEC:
847 case OMP_CLAUSE: gcc_unreachable ();
848
849 case SSA_NAME: return sizeof (struct tree_ssa_name);
850
851 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
852 case BLOCK: return sizeof (struct tree_block);
853 case CONSTRUCTOR: return sizeof (struct tree_constructor);
854 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
855 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
856
857 default:
858 return lang_hooks.tree_size (code);
859 }
860
861 default:
862 gcc_unreachable ();
863 }
864 }
865
866 /* Compute the number of bytes occupied by NODE. This routine only
867 looks at TREE_CODE, except for those nodes that have variable sizes. */
868 size_t
869 tree_size (const_tree node)
870 {
871 const enum tree_code code = TREE_CODE (node);
872 switch (code)
873 {
874 case INTEGER_CST:
875 return (sizeof (struct tree_int_cst)
876 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
877
878 case TREE_BINFO:
879 return (offsetof (struct tree_binfo, base_binfos)
880 + vec<tree, va_gc>
881 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
882
883 case TREE_VEC:
884 return (sizeof (struct tree_vec)
885 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
886
887 case VECTOR_CST:
888 return (sizeof (struct tree_vector)
889 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
890
891 case STRING_CST:
892 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
893
894 case OMP_CLAUSE:
895 return (sizeof (struct tree_omp_clause)
896 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
897 * sizeof (tree));
898
899 default:
900 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
901 return (sizeof (struct tree_exp)
902 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
903 else
904 return tree_code_size (code);
905 }
906 }
907
908 /* Record interesting allocation statistics for a tree node with CODE
909 and LENGTH. */
910
911 static void
912 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
913 size_t length ATTRIBUTE_UNUSED)
914 {
915 enum tree_code_class type = TREE_CODE_CLASS (code);
916 tree_node_kind kind;
917
918 if (!GATHER_STATISTICS)
919 return;
920
921 switch (type)
922 {
923 case tcc_declaration: /* A decl node */
924 kind = d_kind;
925 break;
926
927 case tcc_type: /* a type node */
928 kind = t_kind;
929 break;
930
931 case tcc_statement: /* an expression with side effects */
932 kind = s_kind;
933 break;
934
935 case tcc_reference: /* a reference */
936 kind = r_kind;
937 break;
938
939 case tcc_expression: /* an expression */
940 case tcc_comparison: /* a comparison expression */
941 case tcc_unary: /* a unary arithmetic expression */
942 case tcc_binary: /* a binary arithmetic expression */
943 kind = e_kind;
944 break;
945
946 case tcc_constant: /* a constant */
947 kind = c_kind;
948 break;
949
950 case tcc_exceptional: /* something random, like an identifier. */
951 switch (code)
952 {
953 case IDENTIFIER_NODE:
954 kind = id_kind;
955 break;
956
957 case TREE_VEC:
958 kind = vec_kind;
959 break;
960
961 case TREE_BINFO:
962 kind = binfo_kind;
963 break;
964
965 case SSA_NAME:
966 kind = ssa_name_kind;
967 break;
968
969 case BLOCK:
970 kind = b_kind;
971 break;
972
973 case CONSTRUCTOR:
974 kind = constr_kind;
975 break;
976
977 case OMP_CLAUSE:
978 kind = omp_clause_kind;
979 break;
980
981 default:
982 kind = x_kind;
983 break;
984 }
985 break;
986
987 case tcc_vl_exp:
988 kind = e_kind;
989 break;
990
991 default:
992 gcc_unreachable ();
993 }
994
995 tree_code_counts[(int) code]++;
996 tree_node_counts[(int) kind]++;
997 tree_node_sizes[(int) kind] += length;
998 }
999
1000 /* Allocate and return a new UID from the DECL_UID namespace. */
1001
1002 int
1003 allocate_decl_uid (void)
1004 {
1005 return next_decl_uid++;
1006 }
1007
1008 /* Return a newly allocated node of code CODE. For decl and type
1009 nodes, some other fields are initialized. The rest of the node is
1010 initialized to zero. This function cannot be used for TREE_VEC,
1011 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1012 tree_code_size.
1013
1014 Achoo! I got a code in the node. */
1015
1016 tree
1017 make_node_stat (enum tree_code code MEM_STAT_DECL)
1018 {
1019 tree t;
1020 enum tree_code_class type = TREE_CODE_CLASS (code);
1021 size_t length = tree_code_size (code);
1022
1023 record_node_allocation_statistics (code, length);
1024
1025 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1026 TREE_SET_CODE (t, code);
1027
1028 switch (type)
1029 {
1030 case tcc_statement:
1031 TREE_SIDE_EFFECTS (t) = 1;
1032 break;
1033
1034 case tcc_declaration:
1035 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1036 {
1037 if (code == FUNCTION_DECL)
1038 {
1039 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1040 DECL_MODE (t) = FUNCTION_MODE;
1041 }
1042 else
1043 DECL_ALIGN (t) = 1;
1044 }
1045 DECL_SOURCE_LOCATION (t) = input_location;
1046 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1047 DECL_UID (t) = --next_debug_decl_uid;
1048 else
1049 {
1050 DECL_UID (t) = allocate_decl_uid ();
1051 SET_DECL_PT_UID (t, -1);
1052 }
1053 if (TREE_CODE (t) == LABEL_DECL)
1054 LABEL_DECL_UID (t) = -1;
1055
1056 break;
1057
1058 case tcc_type:
1059 TYPE_UID (t) = next_type_uid++;
1060 TYPE_ALIGN (t) = BITS_PER_UNIT;
1061 TYPE_USER_ALIGN (t) = 0;
1062 TYPE_MAIN_VARIANT (t) = t;
1063 TYPE_CANONICAL (t) = t;
1064
1065 /* Default to no attributes for type, but let target change that. */
1066 TYPE_ATTRIBUTES (t) = NULL_TREE;
1067 targetm.set_default_type_attributes (t);
1068
1069 /* We have not yet computed the alias set for this type. */
1070 TYPE_ALIAS_SET (t) = -1;
1071 break;
1072
1073 case tcc_constant:
1074 TREE_CONSTANT (t) = 1;
1075 break;
1076
1077 case tcc_expression:
1078 switch (code)
1079 {
1080 case INIT_EXPR:
1081 case MODIFY_EXPR:
1082 case VA_ARG_EXPR:
1083 case PREDECREMENT_EXPR:
1084 case PREINCREMENT_EXPR:
1085 case POSTDECREMENT_EXPR:
1086 case POSTINCREMENT_EXPR:
1087 /* All of these have side-effects, no matter what their
1088 operands are. */
1089 TREE_SIDE_EFFECTS (t) = 1;
1090 break;
1091
1092 default:
1093 break;
1094 }
1095 break;
1096
1097 default:
1098 /* Other classes need no special treatment. */
1099 break;
1100 }
1101
1102 return t;
1103 }
1104 \f
1105 /* Return a new node with the same contents as NODE except that its
1106 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1107
1108 tree
1109 copy_node_stat (tree node MEM_STAT_DECL)
1110 {
1111 tree t;
1112 enum tree_code code = TREE_CODE (node);
1113 size_t length;
1114
1115 gcc_assert (code != STATEMENT_LIST);
1116
1117 length = tree_size (node);
1118 record_node_allocation_statistics (code, length);
1119 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1120 memcpy (t, node, length);
1121
1122 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1123 TREE_CHAIN (t) = 0;
1124 TREE_ASM_WRITTEN (t) = 0;
1125 TREE_VISITED (t) = 0;
1126
1127 if (TREE_CODE_CLASS (code) == tcc_declaration)
1128 {
1129 if (code == DEBUG_EXPR_DECL)
1130 DECL_UID (t) = --next_debug_decl_uid;
1131 else
1132 {
1133 DECL_UID (t) = allocate_decl_uid ();
1134 if (DECL_PT_UID_SET_P (node))
1135 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1136 }
1137 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1138 && DECL_HAS_VALUE_EXPR_P (node))
1139 {
1140 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1141 DECL_HAS_VALUE_EXPR_P (t) = 1;
1142 }
1143 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1144 if (TREE_CODE (node) == VAR_DECL)
1145 {
1146 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1147 t->decl_with_vis.symtab_node = NULL;
1148 }
1149 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1150 {
1151 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1152 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1153 }
1154 if (TREE_CODE (node) == FUNCTION_DECL)
1155 {
1156 DECL_STRUCT_FUNCTION (t) = NULL;
1157 t->decl_with_vis.symtab_node = NULL;
1158 }
1159 }
1160 else if (TREE_CODE_CLASS (code) == tcc_type)
1161 {
1162 TYPE_UID (t) = next_type_uid++;
1163 /* The following is so that the debug code for
1164 the copy is different from the original type.
1165 The two statements usually duplicate each other
1166 (because they clear fields of the same union),
1167 but the optimizer should catch that. */
1168 TYPE_SYMTAB_POINTER (t) = 0;
1169 TYPE_SYMTAB_ADDRESS (t) = 0;
1170
1171 /* Do not copy the values cache. */
1172 if (TYPE_CACHED_VALUES_P (t))
1173 {
1174 TYPE_CACHED_VALUES_P (t) = 0;
1175 TYPE_CACHED_VALUES (t) = NULL_TREE;
1176 }
1177 }
1178
1179 return t;
1180 }
1181
1182 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1183 For example, this can copy a list made of TREE_LIST nodes. */
1184
1185 tree
1186 copy_list (tree list)
1187 {
1188 tree head;
1189 tree prev, next;
1190
1191 if (list == 0)
1192 return 0;
1193
1194 head = prev = copy_node (list);
1195 next = TREE_CHAIN (list);
1196 while (next)
1197 {
1198 TREE_CHAIN (prev) = copy_node (next);
1199 prev = TREE_CHAIN (prev);
1200 next = TREE_CHAIN (next);
1201 }
1202 return head;
1203 }
1204
1205 \f
1206 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1207 INTEGER_CST with value CST and type TYPE. */
1208
1209 static unsigned int
1210 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1211 {
1212 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1213 /* We need an extra zero HWI if CST is an unsigned integer with its
1214 upper bit set, and if CST occupies a whole number of HWIs. */
1215 if (TYPE_UNSIGNED (type)
1216 && wi::neg_p (cst)
1217 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1218 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1219 return cst.get_len ();
1220 }
1221
1222 /* Return a new INTEGER_CST with value CST and type TYPE. */
1223
1224 static tree
1225 build_new_int_cst (tree type, const wide_int &cst)
1226 {
1227 unsigned int len = cst.get_len ();
1228 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1229 tree nt = make_int_cst (len, ext_len);
1230
1231 if (len < ext_len)
1232 {
1233 --ext_len;
1234 TREE_INT_CST_ELT (nt, ext_len) = 0;
1235 for (unsigned int i = len; i < ext_len; ++i)
1236 TREE_INT_CST_ELT (nt, i) = -1;
1237 }
1238 else if (TYPE_UNSIGNED (type)
1239 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1240 {
1241 len--;
1242 TREE_INT_CST_ELT (nt, len)
1243 = zext_hwi (cst.elt (len),
1244 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1245 }
1246
1247 for (unsigned int i = 0; i < len; i++)
1248 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1249 TREE_TYPE (nt) = type;
1250 return nt;
1251 }
1252
1253 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1254
1255 tree
1256 build_int_cst (tree type, HOST_WIDE_INT low)
1257 {
1258 /* Support legacy code. */
1259 if (!type)
1260 type = integer_type_node;
1261
1262 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1263 }
1264
1265 tree
1266 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1267 {
1268 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1269 }
1270
1271 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1272
1273 tree
1274 build_int_cst_type (tree type, HOST_WIDE_INT low)
1275 {
1276 gcc_assert (type);
1277 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1278 }
1279
1280 /* Constructs tree in type TYPE from with value given by CST. Signedness
1281 of CST is assumed to be the same as the signedness of TYPE. */
1282
1283 tree
1284 double_int_to_tree (tree type, double_int cst)
1285 {
1286 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1287 }
1288
1289 /* We force the wide_int CST to the range of the type TYPE by sign or
1290 zero extending it. OVERFLOWABLE indicates if we are interested in
1291 overflow of the value, when >0 we are only interested in signed
1292 overflow, for <0 we are interested in any overflow. OVERFLOWED
1293 indicates whether overflow has already occurred. CONST_OVERFLOWED
1294 indicates whether constant overflow has already occurred. We force
1295 T's value to be within range of T's type (by setting to 0 or 1 all
1296 the bits outside the type's range). We set TREE_OVERFLOWED if,
1297 OVERFLOWED is nonzero,
1298 or OVERFLOWABLE is >0 and signed overflow occurs
1299 or OVERFLOWABLE is <0 and any overflow occurs
1300 We return a new tree node for the extended wide_int. The node
1301 is shared if no overflow flags are set. */
1302
1303
1304 tree
1305 force_fit_type (tree type, const wide_int_ref &cst,
1306 int overflowable, bool overflowed)
1307 {
1308 signop sign = TYPE_SIGN (type);
1309
1310 /* If we need to set overflow flags, return a new unshared node. */
1311 if (overflowed || !wi::fits_to_tree_p (cst, type))
1312 {
1313 if (overflowed
1314 || overflowable < 0
1315 || (overflowable > 0 && sign == SIGNED))
1316 {
1317 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1318 tree t = build_new_int_cst (type, tmp);
1319 TREE_OVERFLOW (t) = 1;
1320 return t;
1321 }
1322 }
1323
1324 /* Else build a shared node. */
1325 return wide_int_to_tree (type, cst);
1326 }
1327
1328 /* These are the hash table functions for the hash table of INTEGER_CST
1329 nodes of a sizetype. */
1330
1331 /* Return the hash code code X, an INTEGER_CST. */
1332
1333 hashval_t
1334 int_cst_hasher::hash (tree x)
1335 {
1336 const_tree const t = x;
1337 hashval_t code = TYPE_UID (TREE_TYPE (t));
1338 int i;
1339
1340 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1341 code ^= TREE_INT_CST_ELT (t, i);
1342
1343 return code;
1344 }
1345
1346 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1347 is the same as that given by *Y, which is the same. */
1348
1349 bool
1350 int_cst_hasher::equal (tree x, tree y)
1351 {
1352 const_tree const xt = x;
1353 const_tree const yt = y;
1354
1355 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1356 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1357 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1358 return false;
1359
1360 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1361 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1362 return false;
1363
1364 return true;
1365 }
1366
1367 /* Create an INT_CST node of TYPE and value CST.
1368 The returned node is always shared. For small integers we use a
1369 per-type vector cache, for larger ones we use a single hash table.
1370 The value is extended from its precision according to the sign of
1371 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1372 the upper bits and ensures that hashing and value equality based
1373 upon the underlying HOST_WIDE_INTs works without masking. */
1374
1375 tree
1376 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1377 {
1378 tree t;
1379 int ix = -1;
1380 int limit = 0;
1381
1382 gcc_assert (type);
1383 unsigned int prec = TYPE_PRECISION (type);
1384 signop sgn = TYPE_SIGN (type);
1385
1386 /* Verify that everything is canonical. */
1387 int l = pcst.get_len ();
1388 if (l > 1)
1389 {
1390 if (pcst.elt (l - 1) == 0)
1391 gcc_checking_assert (pcst.elt (l - 2) < 0);
1392 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1393 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1394 }
1395
1396 wide_int cst = wide_int::from (pcst, prec, sgn);
1397 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1398
1399 if (ext_len == 1)
1400 {
1401 /* We just need to store a single HOST_WIDE_INT. */
1402 HOST_WIDE_INT hwi;
1403 if (TYPE_UNSIGNED (type))
1404 hwi = cst.to_uhwi ();
1405 else
1406 hwi = cst.to_shwi ();
1407
1408 switch (TREE_CODE (type))
1409 {
1410 case NULLPTR_TYPE:
1411 gcc_assert (hwi == 0);
1412 /* Fallthru. */
1413
1414 case POINTER_TYPE:
1415 case REFERENCE_TYPE:
1416 case POINTER_BOUNDS_TYPE:
1417 /* Cache NULL pointer and zero bounds. */
1418 if (hwi == 0)
1419 {
1420 limit = 1;
1421 ix = 0;
1422 }
1423 break;
1424
1425 case BOOLEAN_TYPE:
1426 /* Cache false or true. */
1427 limit = 2;
1428 if (hwi < 2)
1429 ix = hwi;
1430 break;
1431
1432 case INTEGER_TYPE:
1433 case OFFSET_TYPE:
1434 if (TYPE_SIGN (type) == UNSIGNED)
1435 {
1436 /* Cache [0, N). */
1437 limit = INTEGER_SHARE_LIMIT;
1438 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1439 ix = hwi;
1440 }
1441 else
1442 {
1443 /* Cache [-1, N). */
1444 limit = INTEGER_SHARE_LIMIT + 1;
1445 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1446 ix = hwi + 1;
1447 }
1448 break;
1449
1450 case ENUMERAL_TYPE:
1451 break;
1452
1453 default:
1454 gcc_unreachable ();
1455 }
1456
1457 if (ix >= 0)
1458 {
1459 /* Look for it in the type's vector of small shared ints. */
1460 if (!TYPE_CACHED_VALUES_P (type))
1461 {
1462 TYPE_CACHED_VALUES_P (type) = 1;
1463 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1464 }
1465
1466 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1467 if (t)
1468 /* Make sure no one is clobbering the shared constant. */
1469 gcc_checking_assert (TREE_TYPE (t) == type
1470 && TREE_INT_CST_NUNITS (t) == 1
1471 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1472 && TREE_INT_CST_EXT_NUNITS (t) == 1
1473 && TREE_INT_CST_ELT (t, 0) == hwi);
1474 else
1475 {
1476 /* Create a new shared int. */
1477 t = build_new_int_cst (type, cst);
1478 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1479 }
1480 }
1481 else
1482 {
1483 /* Use the cache of larger shared ints, using int_cst_node as
1484 a temporary. */
1485
1486 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1487 TREE_TYPE (int_cst_node) = type;
1488
1489 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1490 t = *slot;
1491 if (!t)
1492 {
1493 /* Insert this one into the hash table. */
1494 t = int_cst_node;
1495 *slot = t;
1496 /* Make a new node for next time round. */
1497 int_cst_node = make_int_cst (1, 1);
1498 }
1499 }
1500 }
1501 else
1502 {
1503 /* The value either hashes properly or we drop it on the floor
1504 for the gc to take care of. There will not be enough of them
1505 to worry about. */
1506
1507 tree nt = build_new_int_cst (type, cst);
1508 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1509 t = *slot;
1510 if (!t)
1511 {
1512 /* Insert this one into the hash table. */
1513 t = nt;
1514 *slot = t;
1515 }
1516 }
1517
1518 return t;
1519 }
1520
1521 void
1522 cache_integer_cst (tree t)
1523 {
1524 tree type = TREE_TYPE (t);
1525 int ix = -1;
1526 int limit = 0;
1527 int prec = TYPE_PRECISION (type);
1528
1529 gcc_assert (!TREE_OVERFLOW (t));
1530
1531 switch (TREE_CODE (type))
1532 {
1533 case NULLPTR_TYPE:
1534 gcc_assert (integer_zerop (t));
1535 /* Fallthru. */
1536
1537 case POINTER_TYPE:
1538 case REFERENCE_TYPE:
1539 /* Cache NULL pointer. */
1540 if (integer_zerop (t))
1541 {
1542 limit = 1;
1543 ix = 0;
1544 }
1545 break;
1546
1547 case BOOLEAN_TYPE:
1548 /* Cache false or true. */
1549 limit = 2;
1550 if (wi::ltu_p (t, 2))
1551 ix = TREE_INT_CST_ELT (t, 0);
1552 break;
1553
1554 case INTEGER_TYPE:
1555 case OFFSET_TYPE:
1556 if (TYPE_UNSIGNED (type))
1557 {
1558 /* Cache 0..N */
1559 limit = INTEGER_SHARE_LIMIT;
1560
1561 /* This is a little hokie, but if the prec is smaller than
1562 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1563 obvious test will not get the correct answer. */
1564 if (prec < HOST_BITS_PER_WIDE_INT)
1565 {
1566 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1567 ix = tree_to_uhwi (t);
1568 }
1569 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1570 ix = tree_to_uhwi (t);
1571 }
1572 else
1573 {
1574 /* Cache -1..N */
1575 limit = INTEGER_SHARE_LIMIT + 1;
1576
1577 if (integer_minus_onep (t))
1578 ix = 0;
1579 else if (!wi::neg_p (t))
1580 {
1581 if (prec < HOST_BITS_PER_WIDE_INT)
1582 {
1583 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1584 ix = tree_to_shwi (t) + 1;
1585 }
1586 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1587 ix = tree_to_shwi (t) + 1;
1588 }
1589 }
1590 break;
1591
1592 case ENUMERAL_TYPE:
1593 break;
1594
1595 default:
1596 gcc_unreachable ();
1597 }
1598
1599 if (ix >= 0)
1600 {
1601 /* Look for it in the type's vector of small shared ints. */
1602 if (!TYPE_CACHED_VALUES_P (type))
1603 {
1604 TYPE_CACHED_VALUES_P (type) = 1;
1605 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1606 }
1607
1608 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1609 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1610 }
1611 else
1612 {
1613 /* Use the cache of larger shared ints. */
1614 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1615 /* If there is already an entry for the number verify it's the
1616 same. */
1617 if (*slot)
1618 gcc_assert (wi::eq_p (tree (*slot), t));
1619 else
1620 /* Otherwise insert this one into the hash table. */
1621 *slot = t;
1622 }
1623 }
1624
1625
1626 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1627 and the rest are zeros. */
1628
1629 tree
1630 build_low_bits_mask (tree type, unsigned bits)
1631 {
1632 gcc_assert (bits <= TYPE_PRECISION (type));
1633
1634 return wide_int_to_tree (type, wi::mask (bits, false,
1635 TYPE_PRECISION (type)));
1636 }
1637
1638 /* Checks that X is integer constant that can be expressed in (unsigned)
1639 HOST_WIDE_INT without loss of precision. */
1640
1641 bool
1642 cst_and_fits_in_hwi (const_tree x)
1643 {
1644 if (TREE_CODE (x) != INTEGER_CST)
1645 return false;
1646
1647 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1648 return false;
1649
1650 return TREE_INT_CST_NUNITS (x) == 1;
1651 }
1652
1653 /* Build a newly constructed VECTOR_CST node of length LEN. */
1654
1655 tree
1656 make_vector_stat (unsigned len MEM_STAT_DECL)
1657 {
1658 tree t;
1659 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1660
1661 record_node_allocation_statistics (VECTOR_CST, length);
1662
1663 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1664
1665 TREE_SET_CODE (t, VECTOR_CST);
1666 TREE_CONSTANT (t) = 1;
1667
1668 return t;
1669 }
1670
1671 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1672 are in a list pointed to by VALS. */
1673
1674 tree
1675 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1676 {
1677 int over = 0;
1678 unsigned cnt = 0;
1679 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1680 TREE_TYPE (v) = type;
1681
1682 /* Iterate through elements and check for overflow. */
1683 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1684 {
1685 tree value = vals[cnt];
1686
1687 VECTOR_CST_ELT (v, cnt) = value;
1688
1689 /* Don't crash if we get an address constant. */
1690 if (!CONSTANT_CLASS_P (value))
1691 continue;
1692
1693 over |= TREE_OVERFLOW (value);
1694 }
1695
1696 TREE_OVERFLOW (v) = over;
1697 return v;
1698 }
1699
1700 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1701 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1702
1703 tree
1704 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1705 {
1706 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1707 unsigned HOST_WIDE_INT idx;
1708 tree value;
1709
1710 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1711 vec[idx] = value;
1712 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1713 vec[idx] = build_zero_cst (TREE_TYPE (type));
1714
1715 return build_vector (type, vec);
1716 }
1717
1718 /* Build a vector of type VECTYPE where all the elements are SCs. */
1719 tree
1720 build_vector_from_val (tree vectype, tree sc)
1721 {
1722 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1723
1724 if (sc == error_mark_node)
1725 return sc;
1726
1727 /* Verify that the vector type is suitable for SC. Note that there
1728 is some inconsistency in the type-system with respect to restrict
1729 qualifications of pointers. Vector types always have a main-variant
1730 element type and the qualification is applied to the vector-type.
1731 So TREE_TYPE (vector-type) does not return a properly qualified
1732 vector element-type. */
1733 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1734 TREE_TYPE (vectype)));
1735
1736 if (CONSTANT_CLASS_P (sc))
1737 {
1738 tree *v = XALLOCAVEC (tree, nunits);
1739 for (i = 0; i < nunits; ++i)
1740 v[i] = sc;
1741 return build_vector (vectype, v);
1742 }
1743 else
1744 {
1745 vec<constructor_elt, va_gc> *v;
1746 vec_alloc (v, nunits);
1747 for (i = 0; i < nunits; ++i)
1748 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1749 return build_constructor (vectype, v);
1750 }
1751 }
1752
1753 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1754 are in the vec pointed to by VALS. */
1755 tree
1756 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1757 {
1758 tree c = make_node (CONSTRUCTOR);
1759 unsigned int i;
1760 constructor_elt *elt;
1761 bool constant_p = true;
1762 bool side_effects_p = false;
1763
1764 TREE_TYPE (c) = type;
1765 CONSTRUCTOR_ELTS (c) = vals;
1766
1767 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1768 {
1769 /* Mostly ctors will have elts that don't have side-effects, so
1770 the usual case is to scan all the elements. Hence a single
1771 loop for both const and side effects, rather than one loop
1772 each (with early outs). */
1773 if (!TREE_CONSTANT (elt->value))
1774 constant_p = false;
1775 if (TREE_SIDE_EFFECTS (elt->value))
1776 side_effects_p = true;
1777 }
1778
1779 TREE_SIDE_EFFECTS (c) = side_effects_p;
1780 TREE_CONSTANT (c) = constant_p;
1781
1782 return c;
1783 }
1784
1785 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1786 INDEX and VALUE. */
1787 tree
1788 build_constructor_single (tree type, tree index, tree value)
1789 {
1790 vec<constructor_elt, va_gc> *v;
1791 constructor_elt elt = {index, value};
1792
1793 vec_alloc (v, 1);
1794 v->quick_push (elt);
1795
1796 return build_constructor (type, v);
1797 }
1798
1799
1800 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1801 are in a list pointed to by VALS. */
1802 tree
1803 build_constructor_from_list (tree type, tree vals)
1804 {
1805 tree t;
1806 vec<constructor_elt, va_gc> *v = NULL;
1807
1808 if (vals)
1809 {
1810 vec_alloc (v, list_length (vals));
1811 for (t = vals; t; t = TREE_CHAIN (t))
1812 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1813 }
1814
1815 return build_constructor (type, v);
1816 }
1817
1818 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1819 of elements, provided as index/value pairs. */
1820
1821 tree
1822 build_constructor_va (tree type, int nelts, ...)
1823 {
1824 vec<constructor_elt, va_gc> *v = NULL;
1825 va_list p;
1826
1827 va_start (p, nelts);
1828 vec_alloc (v, nelts);
1829 while (nelts--)
1830 {
1831 tree index = va_arg (p, tree);
1832 tree value = va_arg (p, tree);
1833 CONSTRUCTOR_APPEND_ELT (v, index, value);
1834 }
1835 va_end (p);
1836 return build_constructor (type, v);
1837 }
1838
1839 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1840
1841 tree
1842 build_fixed (tree type, FIXED_VALUE_TYPE f)
1843 {
1844 tree v;
1845 FIXED_VALUE_TYPE *fp;
1846
1847 v = make_node (FIXED_CST);
1848 fp = ggc_alloc<fixed_value> ();
1849 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1850
1851 TREE_TYPE (v) = type;
1852 TREE_FIXED_CST_PTR (v) = fp;
1853 return v;
1854 }
1855
1856 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1857
1858 tree
1859 build_real (tree type, REAL_VALUE_TYPE d)
1860 {
1861 tree v;
1862 REAL_VALUE_TYPE *dp;
1863 int overflow = 0;
1864
1865 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1866 Consider doing it via real_convert now. */
1867
1868 v = make_node (REAL_CST);
1869 dp = ggc_alloc<real_value> ();
1870 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1871
1872 TREE_TYPE (v) = type;
1873 TREE_REAL_CST_PTR (v) = dp;
1874 TREE_OVERFLOW (v) = overflow;
1875 return v;
1876 }
1877
1878 /* Return a new REAL_CST node whose type is TYPE
1879 and whose value is the integer value of the INTEGER_CST node I. */
1880
1881 REAL_VALUE_TYPE
1882 real_value_from_int_cst (const_tree type, const_tree i)
1883 {
1884 REAL_VALUE_TYPE d;
1885
1886 /* Clear all bits of the real value type so that we can later do
1887 bitwise comparisons to see if two values are the same. */
1888 memset (&d, 0, sizeof d);
1889
1890 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1891 TYPE_SIGN (TREE_TYPE (i)));
1892 return d;
1893 }
1894
1895 /* Given a tree representing an integer constant I, return a tree
1896 representing the same value as a floating-point constant of type TYPE. */
1897
1898 tree
1899 build_real_from_int_cst (tree type, const_tree i)
1900 {
1901 tree v;
1902 int overflow = TREE_OVERFLOW (i);
1903
1904 v = build_real (type, real_value_from_int_cst (type, i));
1905
1906 TREE_OVERFLOW (v) |= overflow;
1907 return v;
1908 }
1909
1910 /* Return a newly constructed STRING_CST node whose value is
1911 the LEN characters at STR.
1912 Note that for a C string literal, LEN should include the trailing NUL.
1913 The TREE_TYPE is not initialized. */
1914
1915 tree
1916 build_string (int len, const char *str)
1917 {
1918 tree s;
1919 size_t length;
1920
1921 /* Do not waste bytes provided by padding of struct tree_string. */
1922 length = len + offsetof (struct tree_string, str) + 1;
1923
1924 record_node_allocation_statistics (STRING_CST, length);
1925
1926 s = (tree) ggc_internal_alloc (length);
1927
1928 memset (s, 0, sizeof (struct tree_typed));
1929 TREE_SET_CODE (s, STRING_CST);
1930 TREE_CONSTANT (s) = 1;
1931 TREE_STRING_LENGTH (s) = len;
1932 memcpy (s->string.str, str, len);
1933 s->string.str[len] = '\0';
1934
1935 return s;
1936 }
1937
1938 /* Return a newly constructed COMPLEX_CST node whose value is
1939 specified by the real and imaginary parts REAL and IMAG.
1940 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1941 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1942
1943 tree
1944 build_complex (tree type, tree real, tree imag)
1945 {
1946 tree t = make_node (COMPLEX_CST);
1947
1948 TREE_REALPART (t) = real;
1949 TREE_IMAGPART (t) = imag;
1950 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1951 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1952 return t;
1953 }
1954
1955 /* Return a constant of arithmetic type TYPE which is the
1956 multiplicative identity of the set TYPE. */
1957
1958 tree
1959 build_one_cst (tree type)
1960 {
1961 switch (TREE_CODE (type))
1962 {
1963 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1964 case POINTER_TYPE: case REFERENCE_TYPE:
1965 case OFFSET_TYPE:
1966 return build_int_cst (type, 1);
1967
1968 case REAL_TYPE:
1969 return build_real (type, dconst1);
1970
1971 case FIXED_POINT_TYPE:
1972 /* We can only generate 1 for accum types. */
1973 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1974 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1975
1976 case VECTOR_TYPE:
1977 {
1978 tree scalar = build_one_cst (TREE_TYPE (type));
1979
1980 return build_vector_from_val (type, scalar);
1981 }
1982
1983 case COMPLEX_TYPE:
1984 return build_complex (type,
1985 build_one_cst (TREE_TYPE (type)),
1986 build_zero_cst (TREE_TYPE (type)));
1987
1988 default:
1989 gcc_unreachable ();
1990 }
1991 }
1992
1993 /* Return an integer of type TYPE containing all 1's in as much precision as
1994 it contains, or a complex or vector whose subparts are such integers. */
1995
1996 tree
1997 build_all_ones_cst (tree type)
1998 {
1999 if (TREE_CODE (type) == COMPLEX_TYPE)
2000 {
2001 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2002 return build_complex (type, scalar, scalar);
2003 }
2004 else
2005 return build_minus_one_cst (type);
2006 }
2007
2008 /* Return a constant of arithmetic type TYPE which is the
2009 opposite of the multiplicative identity of the set TYPE. */
2010
2011 tree
2012 build_minus_one_cst (tree type)
2013 {
2014 switch (TREE_CODE (type))
2015 {
2016 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2017 case POINTER_TYPE: case REFERENCE_TYPE:
2018 case OFFSET_TYPE:
2019 return build_int_cst (type, -1);
2020
2021 case REAL_TYPE:
2022 return build_real (type, dconstm1);
2023
2024 case FIXED_POINT_TYPE:
2025 /* We can only generate 1 for accum types. */
2026 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2027 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2028 TYPE_MODE (type)));
2029
2030 case VECTOR_TYPE:
2031 {
2032 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2033
2034 return build_vector_from_val (type, scalar);
2035 }
2036
2037 case COMPLEX_TYPE:
2038 return build_complex (type,
2039 build_minus_one_cst (TREE_TYPE (type)),
2040 build_zero_cst (TREE_TYPE (type)));
2041
2042 default:
2043 gcc_unreachable ();
2044 }
2045 }
2046
2047 /* Build 0 constant of type TYPE. This is used by constructor folding
2048 and thus the constant should be represented in memory by
2049 zero(es). */
2050
2051 tree
2052 build_zero_cst (tree type)
2053 {
2054 switch (TREE_CODE (type))
2055 {
2056 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2057 case POINTER_TYPE: case REFERENCE_TYPE:
2058 case OFFSET_TYPE: case NULLPTR_TYPE:
2059 return build_int_cst (type, 0);
2060
2061 case REAL_TYPE:
2062 return build_real (type, dconst0);
2063
2064 case FIXED_POINT_TYPE:
2065 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2066
2067 case VECTOR_TYPE:
2068 {
2069 tree scalar = build_zero_cst (TREE_TYPE (type));
2070
2071 return build_vector_from_val (type, scalar);
2072 }
2073
2074 case COMPLEX_TYPE:
2075 {
2076 tree zero = build_zero_cst (TREE_TYPE (type));
2077
2078 return build_complex (type, zero, zero);
2079 }
2080
2081 default:
2082 if (!AGGREGATE_TYPE_P (type))
2083 return fold_convert (type, integer_zero_node);
2084 return build_constructor (type, NULL);
2085 }
2086 }
2087
2088
2089 /* Build a BINFO with LEN language slots. */
2090
2091 tree
2092 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2093 {
2094 tree t;
2095 size_t length = (offsetof (struct tree_binfo, base_binfos)
2096 + vec<tree, va_gc>::embedded_size (base_binfos));
2097
2098 record_node_allocation_statistics (TREE_BINFO, length);
2099
2100 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2101
2102 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2103
2104 TREE_SET_CODE (t, TREE_BINFO);
2105
2106 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2107
2108 return t;
2109 }
2110
2111 /* Create a CASE_LABEL_EXPR tree node and return it. */
2112
2113 tree
2114 build_case_label (tree low_value, tree high_value, tree label_decl)
2115 {
2116 tree t = make_node (CASE_LABEL_EXPR);
2117
2118 TREE_TYPE (t) = void_type_node;
2119 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2120
2121 CASE_LOW (t) = low_value;
2122 CASE_HIGH (t) = high_value;
2123 CASE_LABEL (t) = label_decl;
2124 CASE_CHAIN (t) = NULL_TREE;
2125
2126 return t;
2127 }
2128
2129 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2130 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2131 The latter determines the length of the HOST_WIDE_INT vector. */
2132
2133 tree
2134 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2135 {
2136 tree t;
2137 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2138 + sizeof (struct tree_int_cst));
2139
2140 gcc_assert (len);
2141 record_node_allocation_statistics (INTEGER_CST, length);
2142
2143 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2144
2145 TREE_SET_CODE (t, INTEGER_CST);
2146 TREE_INT_CST_NUNITS (t) = len;
2147 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2148 /* to_offset can only be applied to trees that are offset_int-sized
2149 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2150 must be exactly the precision of offset_int and so LEN is correct. */
2151 if (ext_len <= OFFSET_INT_ELTS)
2152 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2153 else
2154 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2155
2156 TREE_CONSTANT (t) = 1;
2157
2158 return t;
2159 }
2160
2161 /* Build a newly constructed TREE_VEC node of length LEN. */
2162
2163 tree
2164 make_tree_vec_stat (int len MEM_STAT_DECL)
2165 {
2166 tree t;
2167 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2168
2169 record_node_allocation_statistics (TREE_VEC, length);
2170
2171 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2172
2173 TREE_SET_CODE (t, TREE_VEC);
2174 TREE_VEC_LENGTH (t) = len;
2175
2176 return t;
2177 }
2178
2179 /* Grow a TREE_VEC node to new length LEN. */
2180
2181 tree
2182 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2183 {
2184 gcc_assert (TREE_CODE (v) == TREE_VEC);
2185
2186 int oldlen = TREE_VEC_LENGTH (v);
2187 gcc_assert (len > oldlen);
2188
2189 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2190 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2191
2192 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2193
2194 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2195
2196 TREE_VEC_LENGTH (v) = len;
2197
2198 return v;
2199 }
2200 \f
2201 /* Return 1 if EXPR is the integer constant zero or a complex constant
2202 of zero. */
2203
2204 int
2205 integer_zerop (const_tree expr)
2206 {
2207 STRIP_NOPS (expr);
2208
2209 switch (TREE_CODE (expr))
2210 {
2211 case INTEGER_CST:
2212 return wi::eq_p (expr, 0);
2213 case COMPLEX_CST:
2214 return (integer_zerop (TREE_REALPART (expr))
2215 && integer_zerop (TREE_IMAGPART (expr)));
2216 case VECTOR_CST:
2217 {
2218 unsigned i;
2219 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2220 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2221 return false;
2222 return true;
2223 }
2224 default:
2225 return false;
2226 }
2227 }
2228
2229 /* Return 1 if EXPR is the integer constant one or the corresponding
2230 complex constant. */
2231
2232 int
2233 integer_onep (const_tree expr)
2234 {
2235 STRIP_NOPS (expr);
2236
2237 switch (TREE_CODE (expr))
2238 {
2239 case INTEGER_CST:
2240 return wi::eq_p (wi::to_widest (expr), 1);
2241 case COMPLEX_CST:
2242 return (integer_onep (TREE_REALPART (expr))
2243 && integer_zerop (TREE_IMAGPART (expr)));
2244 case VECTOR_CST:
2245 {
2246 unsigned i;
2247 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2248 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2249 return false;
2250 return true;
2251 }
2252 default:
2253 return false;
2254 }
2255 }
2256
2257 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2258 return 1 if every piece is the integer constant one. */
2259
2260 int
2261 integer_each_onep (const_tree expr)
2262 {
2263 STRIP_NOPS (expr);
2264
2265 if (TREE_CODE (expr) == COMPLEX_CST)
2266 return (integer_onep (TREE_REALPART (expr))
2267 && integer_onep (TREE_IMAGPART (expr)));
2268 else
2269 return integer_onep (expr);
2270 }
2271
2272 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2273 it contains, or a complex or vector whose subparts are such integers. */
2274
2275 int
2276 integer_all_onesp (const_tree expr)
2277 {
2278 STRIP_NOPS (expr);
2279
2280 if (TREE_CODE (expr) == COMPLEX_CST
2281 && integer_all_onesp (TREE_REALPART (expr))
2282 && integer_all_onesp (TREE_IMAGPART (expr)))
2283 return 1;
2284
2285 else if (TREE_CODE (expr) == VECTOR_CST)
2286 {
2287 unsigned i;
2288 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2289 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2290 return 0;
2291 return 1;
2292 }
2293
2294 else if (TREE_CODE (expr) != INTEGER_CST)
2295 return 0;
2296
2297 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2298 }
2299
2300 /* Return 1 if EXPR is the integer constant minus one. */
2301
2302 int
2303 integer_minus_onep (const_tree expr)
2304 {
2305 STRIP_NOPS (expr);
2306
2307 if (TREE_CODE (expr) == COMPLEX_CST)
2308 return (integer_all_onesp (TREE_REALPART (expr))
2309 && integer_zerop (TREE_IMAGPART (expr)));
2310 else
2311 return integer_all_onesp (expr);
2312 }
2313
2314 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2315 one bit on). */
2316
2317 int
2318 integer_pow2p (const_tree expr)
2319 {
2320 STRIP_NOPS (expr);
2321
2322 if (TREE_CODE (expr) == COMPLEX_CST
2323 && integer_pow2p (TREE_REALPART (expr))
2324 && integer_zerop (TREE_IMAGPART (expr)))
2325 return 1;
2326
2327 if (TREE_CODE (expr) != INTEGER_CST)
2328 return 0;
2329
2330 return wi::popcount (expr) == 1;
2331 }
2332
2333 /* Return 1 if EXPR is an integer constant other than zero or a
2334 complex constant other than zero. */
2335
2336 int
2337 integer_nonzerop (const_tree expr)
2338 {
2339 STRIP_NOPS (expr);
2340
2341 return ((TREE_CODE (expr) == INTEGER_CST
2342 && !wi::eq_p (expr, 0))
2343 || (TREE_CODE (expr) == COMPLEX_CST
2344 && (integer_nonzerop (TREE_REALPART (expr))
2345 || integer_nonzerop (TREE_IMAGPART (expr)))));
2346 }
2347
2348 /* Return 1 if EXPR is the integer constant one. For vector,
2349 return 1 if every piece is the integer constant minus one
2350 (representing the value TRUE). */
2351
2352 int
2353 integer_truep (const_tree expr)
2354 {
2355 STRIP_NOPS (expr);
2356
2357 if (TREE_CODE (expr) == VECTOR_CST)
2358 return integer_all_onesp (expr);
2359 return integer_onep (expr);
2360 }
2361
2362 /* Return 1 if EXPR is the fixed-point constant zero. */
2363
2364 int
2365 fixed_zerop (const_tree expr)
2366 {
2367 return (TREE_CODE (expr) == FIXED_CST
2368 && TREE_FIXED_CST (expr).data.is_zero ());
2369 }
2370
2371 /* Return the power of two represented by a tree node known to be a
2372 power of two. */
2373
2374 int
2375 tree_log2 (const_tree expr)
2376 {
2377 STRIP_NOPS (expr);
2378
2379 if (TREE_CODE (expr) == COMPLEX_CST)
2380 return tree_log2 (TREE_REALPART (expr));
2381
2382 return wi::exact_log2 (expr);
2383 }
2384
2385 /* Similar, but return the largest integer Y such that 2 ** Y is less
2386 than or equal to EXPR. */
2387
2388 int
2389 tree_floor_log2 (const_tree expr)
2390 {
2391 STRIP_NOPS (expr);
2392
2393 if (TREE_CODE (expr) == COMPLEX_CST)
2394 return tree_log2 (TREE_REALPART (expr));
2395
2396 return wi::floor_log2 (expr);
2397 }
2398
2399 /* Return number of known trailing zero bits in EXPR, or, if the value of
2400 EXPR is known to be zero, the precision of it's type. */
2401
2402 unsigned int
2403 tree_ctz (const_tree expr)
2404 {
2405 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2406 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2407 return 0;
2408
2409 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2410 switch (TREE_CODE (expr))
2411 {
2412 case INTEGER_CST:
2413 ret1 = wi::ctz (expr);
2414 return MIN (ret1, prec);
2415 case SSA_NAME:
2416 ret1 = wi::ctz (get_nonzero_bits (expr));
2417 return MIN (ret1, prec);
2418 case PLUS_EXPR:
2419 case MINUS_EXPR:
2420 case BIT_IOR_EXPR:
2421 case BIT_XOR_EXPR:
2422 case MIN_EXPR:
2423 case MAX_EXPR:
2424 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2425 if (ret1 == 0)
2426 return ret1;
2427 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2428 return MIN (ret1, ret2);
2429 case POINTER_PLUS_EXPR:
2430 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2431 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2432 /* Second operand is sizetype, which could be in theory
2433 wider than pointer's precision. Make sure we never
2434 return more than prec. */
2435 ret2 = MIN (ret2, prec);
2436 return MIN (ret1, ret2);
2437 case BIT_AND_EXPR:
2438 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2439 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2440 return MAX (ret1, ret2);
2441 case MULT_EXPR:
2442 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2443 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2444 return MIN (ret1 + ret2, prec);
2445 case LSHIFT_EXPR:
2446 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2447 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2448 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2449 {
2450 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2451 return MIN (ret1 + ret2, prec);
2452 }
2453 return ret1;
2454 case RSHIFT_EXPR:
2455 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2456 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2457 {
2458 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2459 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2460 if (ret1 > ret2)
2461 return ret1 - ret2;
2462 }
2463 return 0;
2464 case TRUNC_DIV_EXPR:
2465 case CEIL_DIV_EXPR:
2466 case FLOOR_DIV_EXPR:
2467 case ROUND_DIV_EXPR:
2468 case EXACT_DIV_EXPR:
2469 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2470 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2471 {
2472 int l = tree_log2 (TREE_OPERAND (expr, 1));
2473 if (l >= 0)
2474 {
2475 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2476 ret2 = l;
2477 if (ret1 > ret2)
2478 return ret1 - ret2;
2479 }
2480 }
2481 return 0;
2482 CASE_CONVERT:
2483 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2484 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2485 ret1 = prec;
2486 return MIN (ret1, prec);
2487 case SAVE_EXPR:
2488 return tree_ctz (TREE_OPERAND (expr, 0));
2489 case COND_EXPR:
2490 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2491 if (ret1 == 0)
2492 return 0;
2493 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2494 return MIN (ret1, ret2);
2495 case COMPOUND_EXPR:
2496 return tree_ctz (TREE_OPERAND (expr, 1));
2497 case ADDR_EXPR:
2498 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2499 if (ret1 > BITS_PER_UNIT)
2500 {
2501 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2502 return MIN (ret1, prec);
2503 }
2504 return 0;
2505 default:
2506 return 0;
2507 }
2508 }
2509
2510 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2511 decimal float constants, so don't return 1 for them. */
2512
2513 int
2514 real_zerop (const_tree expr)
2515 {
2516 STRIP_NOPS (expr);
2517
2518 switch (TREE_CODE (expr))
2519 {
2520 case REAL_CST:
2521 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2522 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2523 case COMPLEX_CST:
2524 return real_zerop (TREE_REALPART (expr))
2525 && real_zerop (TREE_IMAGPART (expr));
2526 case VECTOR_CST:
2527 {
2528 unsigned i;
2529 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2530 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2531 return false;
2532 return true;
2533 }
2534 default:
2535 return false;
2536 }
2537 }
2538
2539 /* Return 1 if EXPR is the real constant one in real or complex form.
2540 Trailing zeroes matter for decimal float constants, so don't return
2541 1 for them. */
2542
2543 int
2544 real_onep (const_tree expr)
2545 {
2546 STRIP_NOPS (expr);
2547
2548 switch (TREE_CODE (expr))
2549 {
2550 case REAL_CST:
2551 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2552 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2553 case COMPLEX_CST:
2554 return real_onep (TREE_REALPART (expr))
2555 && real_zerop (TREE_IMAGPART (expr));
2556 case VECTOR_CST:
2557 {
2558 unsigned i;
2559 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2560 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2561 return false;
2562 return true;
2563 }
2564 default:
2565 return false;
2566 }
2567 }
2568
2569 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2570 matter for decimal float constants, so don't return 1 for them. */
2571
2572 int
2573 real_minus_onep (const_tree expr)
2574 {
2575 STRIP_NOPS (expr);
2576
2577 switch (TREE_CODE (expr))
2578 {
2579 case REAL_CST:
2580 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2581 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2582 case COMPLEX_CST:
2583 return real_minus_onep (TREE_REALPART (expr))
2584 && real_zerop (TREE_IMAGPART (expr));
2585 case VECTOR_CST:
2586 {
2587 unsigned i;
2588 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2589 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2590 return false;
2591 return true;
2592 }
2593 default:
2594 return false;
2595 }
2596 }
2597
2598 /* Nonzero if EXP is a constant or a cast of a constant. */
2599
2600 int
2601 really_constant_p (const_tree exp)
2602 {
2603 /* This is not quite the same as STRIP_NOPS. It does more. */
2604 while (CONVERT_EXPR_P (exp)
2605 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2606 exp = TREE_OPERAND (exp, 0);
2607 return TREE_CONSTANT (exp);
2608 }
2609 \f
2610 /* Return first list element whose TREE_VALUE is ELEM.
2611 Return 0 if ELEM is not in LIST. */
2612
2613 tree
2614 value_member (tree elem, tree list)
2615 {
2616 while (list)
2617 {
2618 if (elem == TREE_VALUE (list))
2619 return list;
2620 list = TREE_CHAIN (list);
2621 }
2622 return NULL_TREE;
2623 }
2624
2625 /* Return first list element whose TREE_PURPOSE is ELEM.
2626 Return 0 if ELEM is not in LIST. */
2627
2628 tree
2629 purpose_member (const_tree elem, tree list)
2630 {
2631 while (list)
2632 {
2633 if (elem == TREE_PURPOSE (list))
2634 return list;
2635 list = TREE_CHAIN (list);
2636 }
2637 return NULL_TREE;
2638 }
2639
2640 /* Return true if ELEM is in V. */
2641
2642 bool
2643 vec_member (const_tree elem, vec<tree, va_gc> *v)
2644 {
2645 unsigned ix;
2646 tree t;
2647 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2648 if (elem == t)
2649 return true;
2650 return false;
2651 }
2652
2653 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2654 NULL_TREE. */
2655
2656 tree
2657 chain_index (int idx, tree chain)
2658 {
2659 for (; chain && idx > 0; --idx)
2660 chain = TREE_CHAIN (chain);
2661 return chain;
2662 }
2663
2664 /* Return nonzero if ELEM is part of the chain CHAIN. */
2665
2666 int
2667 chain_member (const_tree elem, const_tree chain)
2668 {
2669 while (chain)
2670 {
2671 if (elem == chain)
2672 return 1;
2673 chain = DECL_CHAIN (chain);
2674 }
2675
2676 return 0;
2677 }
2678
2679 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2680 We expect a null pointer to mark the end of the chain.
2681 This is the Lisp primitive `length'. */
2682
2683 int
2684 list_length (const_tree t)
2685 {
2686 const_tree p = t;
2687 #ifdef ENABLE_TREE_CHECKING
2688 const_tree q = t;
2689 #endif
2690 int len = 0;
2691
2692 while (p)
2693 {
2694 p = TREE_CHAIN (p);
2695 #ifdef ENABLE_TREE_CHECKING
2696 if (len % 2)
2697 q = TREE_CHAIN (q);
2698 gcc_assert (p != q);
2699 #endif
2700 len++;
2701 }
2702
2703 return len;
2704 }
2705
2706 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2707 UNION_TYPE TYPE, or NULL_TREE if none. */
2708
2709 tree
2710 first_field (const_tree type)
2711 {
2712 tree t = TYPE_FIELDS (type);
2713 while (t && TREE_CODE (t) != FIELD_DECL)
2714 t = TREE_CHAIN (t);
2715 return t;
2716 }
2717
2718 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2719 by modifying the last node in chain 1 to point to chain 2.
2720 This is the Lisp primitive `nconc'. */
2721
2722 tree
2723 chainon (tree op1, tree op2)
2724 {
2725 tree t1;
2726
2727 if (!op1)
2728 return op2;
2729 if (!op2)
2730 return op1;
2731
2732 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2733 continue;
2734 TREE_CHAIN (t1) = op2;
2735
2736 #ifdef ENABLE_TREE_CHECKING
2737 {
2738 tree t2;
2739 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2740 gcc_assert (t2 != t1);
2741 }
2742 #endif
2743
2744 return op1;
2745 }
2746
2747 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2748
2749 tree
2750 tree_last (tree chain)
2751 {
2752 tree next;
2753 if (chain)
2754 while ((next = TREE_CHAIN (chain)))
2755 chain = next;
2756 return chain;
2757 }
2758
2759 /* Reverse the order of elements in the chain T,
2760 and return the new head of the chain (old last element). */
2761
2762 tree
2763 nreverse (tree t)
2764 {
2765 tree prev = 0, decl, next;
2766 for (decl = t; decl; decl = next)
2767 {
2768 /* We shouldn't be using this function to reverse BLOCK chains; we
2769 have blocks_nreverse for that. */
2770 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2771 next = TREE_CHAIN (decl);
2772 TREE_CHAIN (decl) = prev;
2773 prev = decl;
2774 }
2775 return prev;
2776 }
2777 \f
2778 /* Return a newly created TREE_LIST node whose
2779 purpose and value fields are PARM and VALUE. */
2780
2781 tree
2782 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2783 {
2784 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2785 TREE_PURPOSE (t) = parm;
2786 TREE_VALUE (t) = value;
2787 return t;
2788 }
2789
2790 /* Build a chain of TREE_LIST nodes from a vector. */
2791
2792 tree
2793 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2794 {
2795 tree ret = NULL_TREE;
2796 tree *pp = &ret;
2797 unsigned int i;
2798 tree t;
2799 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2800 {
2801 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2802 pp = &TREE_CHAIN (*pp);
2803 }
2804 return ret;
2805 }
2806
2807 /* Return a newly created TREE_LIST node whose
2808 purpose and value fields are PURPOSE and VALUE
2809 and whose TREE_CHAIN is CHAIN. */
2810
2811 tree
2812 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2813 {
2814 tree node;
2815
2816 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2817 memset (node, 0, sizeof (struct tree_common));
2818
2819 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2820
2821 TREE_SET_CODE (node, TREE_LIST);
2822 TREE_CHAIN (node) = chain;
2823 TREE_PURPOSE (node) = purpose;
2824 TREE_VALUE (node) = value;
2825 return node;
2826 }
2827
2828 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2829 trees. */
2830
2831 vec<tree, va_gc> *
2832 ctor_to_vec (tree ctor)
2833 {
2834 vec<tree, va_gc> *vec;
2835 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2836 unsigned int ix;
2837 tree val;
2838
2839 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2840 vec->quick_push (val);
2841
2842 return vec;
2843 }
2844 \f
2845 /* Return the size nominally occupied by an object of type TYPE
2846 when it resides in memory. The value is measured in units of bytes,
2847 and its data type is that normally used for type sizes
2848 (which is the first type created by make_signed_type or
2849 make_unsigned_type). */
2850
2851 tree
2852 size_in_bytes (const_tree type)
2853 {
2854 tree t;
2855
2856 if (type == error_mark_node)
2857 return integer_zero_node;
2858
2859 type = TYPE_MAIN_VARIANT (type);
2860 t = TYPE_SIZE_UNIT (type);
2861
2862 if (t == 0)
2863 {
2864 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2865 return size_zero_node;
2866 }
2867
2868 return t;
2869 }
2870
2871 /* Return the size of TYPE (in bytes) as a wide integer
2872 or return -1 if the size can vary or is larger than an integer. */
2873
2874 HOST_WIDE_INT
2875 int_size_in_bytes (const_tree type)
2876 {
2877 tree t;
2878
2879 if (type == error_mark_node)
2880 return 0;
2881
2882 type = TYPE_MAIN_VARIANT (type);
2883 t = TYPE_SIZE_UNIT (type);
2884
2885 if (t && tree_fits_uhwi_p (t))
2886 return TREE_INT_CST_LOW (t);
2887 else
2888 return -1;
2889 }
2890
2891 /* Return the maximum size of TYPE (in bytes) as a wide integer
2892 or return -1 if the size can vary or is larger than an integer. */
2893
2894 HOST_WIDE_INT
2895 max_int_size_in_bytes (const_tree type)
2896 {
2897 HOST_WIDE_INT size = -1;
2898 tree size_tree;
2899
2900 /* If this is an array type, check for a possible MAX_SIZE attached. */
2901
2902 if (TREE_CODE (type) == ARRAY_TYPE)
2903 {
2904 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2905
2906 if (size_tree && tree_fits_uhwi_p (size_tree))
2907 size = tree_to_uhwi (size_tree);
2908 }
2909
2910 /* If we still haven't been able to get a size, see if the language
2911 can compute a maximum size. */
2912
2913 if (size == -1)
2914 {
2915 size_tree = lang_hooks.types.max_size (type);
2916
2917 if (size_tree && tree_fits_uhwi_p (size_tree))
2918 size = tree_to_uhwi (size_tree);
2919 }
2920
2921 return size;
2922 }
2923 \f
2924 /* Return the bit position of FIELD, in bits from the start of the record.
2925 This is a tree of type bitsizetype. */
2926
2927 tree
2928 bit_position (const_tree field)
2929 {
2930 return bit_from_pos (DECL_FIELD_OFFSET (field),
2931 DECL_FIELD_BIT_OFFSET (field));
2932 }
2933 \f
2934 /* Return the byte position of FIELD, in bytes from the start of the record.
2935 This is a tree of type sizetype. */
2936
2937 tree
2938 byte_position (const_tree field)
2939 {
2940 return byte_from_pos (DECL_FIELD_OFFSET (field),
2941 DECL_FIELD_BIT_OFFSET (field));
2942 }
2943
2944 /* Likewise, but return as an integer. It must be representable in
2945 that way (since it could be a signed value, we don't have the
2946 option of returning -1 like int_size_in_byte can. */
2947
2948 HOST_WIDE_INT
2949 int_byte_position (const_tree field)
2950 {
2951 return tree_to_shwi (byte_position (field));
2952 }
2953 \f
2954 /* Return the strictest alignment, in bits, that T is known to have. */
2955
2956 unsigned int
2957 expr_align (const_tree t)
2958 {
2959 unsigned int align0, align1;
2960
2961 switch (TREE_CODE (t))
2962 {
2963 CASE_CONVERT: case NON_LVALUE_EXPR:
2964 /* If we have conversions, we know that the alignment of the
2965 object must meet each of the alignments of the types. */
2966 align0 = expr_align (TREE_OPERAND (t, 0));
2967 align1 = TYPE_ALIGN (TREE_TYPE (t));
2968 return MAX (align0, align1);
2969
2970 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2971 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2972 case CLEANUP_POINT_EXPR:
2973 /* These don't change the alignment of an object. */
2974 return expr_align (TREE_OPERAND (t, 0));
2975
2976 case COND_EXPR:
2977 /* The best we can do is say that the alignment is the least aligned
2978 of the two arms. */
2979 align0 = expr_align (TREE_OPERAND (t, 1));
2980 align1 = expr_align (TREE_OPERAND (t, 2));
2981 return MIN (align0, align1);
2982
2983 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2984 meaningfully, it's always 1. */
2985 case LABEL_DECL: case CONST_DECL:
2986 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2987 case FUNCTION_DECL:
2988 gcc_assert (DECL_ALIGN (t) != 0);
2989 return DECL_ALIGN (t);
2990
2991 default:
2992 break;
2993 }
2994
2995 /* Otherwise take the alignment from that of the type. */
2996 return TYPE_ALIGN (TREE_TYPE (t));
2997 }
2998 \f
2999 /* Return, as a tree node, the number of elements for TYPE (which is an
3000 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3001
3002 tree
3003 array_type_nelts (const_tree type)
3004 {
3005 tree index_type, min, max;
3006
3007 /* If they did it with unspecified bounds, then we should have already
3008 given an error about it before we got here. */
3009 if (! TYPE_DOMAIN (type))
3010 return error_mark_node;
3011
3012 index_type = TYPE_DOMAIN (type);
3013 min = TYPE_MIN_VALUE (index_type);
3014 max = TYPE_MAX_VALUE (index_type);
3015
3016 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3017 if (!max)
3018 return error_mark_node;
3019
3020 return (integer_zerop (min)
3021 ? max
3022 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3023 }
3024 \f
3025 /* If arg is static -- a reference to an object in static storage -- then
3026 return the object. This is not the same as the C meaning of `static'.
3027 If arg isn't static, return NULL. */
3028
3029 tree
3030 staticp (tree arg)
3031 {
3032 switch (TREE_CODE (arg))
3033 {
3034 case FUNCTION_DECL:
3035 /* Nested functions are static, even though taking their address will
3036 involve a trampoline as we unnest the nested function and create
3037 the trampoline on the tree level. */
3038 return arg;
3039
3040 case VAR_DECL:
3041 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3042 && ! DECL_THREAD_LOCAL_P (arg)
3043 && ! DECL_DLLIMPORT_P (arg)
3044 ? arg : NULL);
3045
3046 case CONST_DECL:
3047 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3048 ? arg : NULL);
3049
3050 case CONSTRUCTOR:
3051 return TREE_STATIC (arg) ? arg : NULL;
3052
3053 case LABEL_DECL:
3054 case STRING_CST:
3055 return arg;
3056
3057 case COMPONENT_REF:
3058 /* If the thing being referenced is not a field, then it is
3059 something language specific. */
3060 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3061
3062 /* If we are referencing a bitfield, we can't evaluate an
3063 ADDR_EXPR at compile time and so it isn't a constant. */
3064 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3065 return NULL;
3066
3067 return staticp (TREE_OPERAND (arg, 0));
3068
3069 case BIT_FIELD_REF:
3070 return NULL;
3071
3072 case INDIRECT_REF:
3073 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3074
3075 case ARRAY_REF:
3076 case ARRAY_RANGE_REF:
3077 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3078 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3079 return staticp (TREE_OPERAND (arg, 0));
3080 else
3081 return NULL;
3082
3083 case COMPOUND_LITERAL_EXPR:
3084 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3085
3086 default:
3087 return NULL;
3088 }
3089 }
3090
3091 \f
3092
3093
3094 /* Return whether OP is a DECL whose address is function-invariant. */
3095
3096 bool
3097 decl_address_invariant_p (const_tree op)
3098 {
3099 /* The conditions below are slightly less strict than the one in
3100 staticp. */
3101
3102 switch (TREE_CODE (op))
3103 {
3104 case PARM_DECL:
3105 case RESULT_DECL:
3106 case LABEL_DECL:
3107 case FUNCTION_DECL:
3108 return true;
3109
3110 case VAR_DECL:
3111 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3112 || DECL_THREAD_LOCAL_P (op)
3113 || DECL_CONTEXT (op) == current_function_decl
3114 || decl_function_context (op) == current_function_decl)
3115 return true;
3116 break;
3117
3118 case CONST_DECL:
3119 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3120 || decl_function_context (op) == current_function_decl)
3121 return true;
3122 break;
3123
3124 default:
3125 break;
3126 }
3127
3128 return false;
3129 }
3130
3131 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3132
3133 bool
3134 decl_address_ip_invariant_p (const_tree op)
3135 {
3136 /* The conditions below are slightly less strict than the one in
3137 staticp. */
3138
3139 switch (TREE_CODE (op))
3140 {
3141 case LABEL_DECL:
3142 case FUNCTION_DECL:
3143 case STRING_CST:
3144 return true;
3145
3146 case VAR_DECL:
3147 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3148 && !DECL_DLLIMPORT_P (op))
3149 || DECL_THREAD_LOCAL_P (op))
3150 return true;
3151 break;
3152
3153 case CONST_DECL:
3154 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3155 return true;
3156 break;
3157
3158 default:
3159 break;
3160 }
3161
3162 return false;
3163 }
3164
3165
3166 /* Return true if T is function-invariant (internal function, does
3167 not handle arithmetic; that's handled in skip_simple_arithmetic and
3168 tree_invariant_p). */
3169
3170 static bool tree_invariant_p (tree t);
3171
3172 static bool
3173 tree_invariant_p_1 (tree t)
3174 {
3175 tree op;
3176
3177 if (TREE_CONSTANT (t)
3178 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3179 return true;
3180
3181 switch (TREE_CODE (t))
3182 {
3183 case SAVE_EXPR:
3184 return true;
3185
3186 case ADDR_EXPR:
3187 op = TREE_OPERAND (t, 0);
3188 while (handled_component_p (op))
3189 {
3190 switch (TREE_CODE (op))
3191 {
3192 case ARRAY_REF:
3193 case ARRAY_RANGE_REF:
3194 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3195 || TREE_OPERAND (op, 2) != NULL_TREE
3196 || TREE_OPERAND (op, 3) != NULL_TREE)
3197 return false;
3198 break;
3199
3200 case COMPONENT_REF:
3201 if (TREE_OPERAND (op, 2) != NULL_TREE)
3202 return false;
3203 break;
3204
3205 default:;
3206 }
3207 op = TREE_OPERAND (op, 0);
3208 }
3209
3210 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3211
3212 default:
3213 break;
3214 }
3215
3216 return false;
3217 }
3218
3219 /* Return true if T is function-invariant. */
3220
3221 static bool
3222 tree_invariant_p (tree t)
3223 {
3224 tree inner = skip_simple_arithmetic (t);
3225 return tree_invariant_p_1 (inner);
3226 }
3227
3228 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3229 Do this to any expression which may be used in more than one place,
3230 but must be evaluated only once.
3231
3232 Normally, expand_expr would reevaluate the expression each time.
3233 Calling save_expr produces something that is evaluated and recorded
3234 the first time expand_expr is called on it. Subsequent calls to
3235 expand_expr just reuse the recorded value.
3236
3237 The call to expand_expr that generates code that actually computes
3238 the value is the first call *at compile time*. Subsequent calls
3239 *at compile time* generate code to use the saved value.
3240 This produces correct result provided that *at run time* control
3241 always flows through the insns made by the first expand_expr
3242 before reaching the other places where the save_expr was evaluated.
3243 You, the caller of save_expr, must make sure this is so.
3244
3245 Constants, and certain read-only nodes, are returned with no
3246 SAVE_EXPR because that is safe. Expressions containing placeholders
3247 are not touched; see tree.def for an explanation of what these
3248 are used for. */
3249
3250 tree
3251 save_expr (tree expr)
3252 {
3253 tree t = fold (expr);
3254 tree inner;
3255
3256 /* If the tree evaluates to a constant, then we don't want to hide that
3257 fact (i.e. this allows further folding, and direct checks for constants).
3258 However, a read-only object that has side effects cannot be bypassed.
3259 Since it is no problem to reevaluate literals, we just return the
3260 literal node. */
3261 inner = skip_simple_arithmetic (t);
3262 if (TREE_CODE (inner) == ERROR_MARK)
3263 return inner;
3264
3265 if (tree_invariant_p_1 (inner))
3266 return t;
3267
3268 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3269 it means that the size or offset of some field of an object depends on
3270 the value within another field.
3271
3272 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3273 and some variable since it would then need to be both evaluated once and
3274 evaluated more than once. Front-ends must assure this case cannot
3275 happen by surrounding any such subexpressions in their own SAVE_EXPR
3276 and forcing evaluation at the proper time. */
3277 if (contains_placeholder_p (inner))
3278 return t;
3279
3280 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3281 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3282
3283 /* This expression might be placed ahead of a jump to ensure that the
3284 value was computed on both sides of the jump. So make sure it isn't
3285 eliminated as dead. */
3286 TREE_SIDE_EFFECTS (t) = 1;
3287 return t;
3288 }
3289
3290 /* Look inside EXPR into any simple arithmetic operations. Return the
3291 outermost non-arithmetic or non-invariant node. */
3292
3293 tree
3294 skip_simple_arithmetic (tree expr)
3295 {
3296 /* We don't care about whether this can be used as an lvalue in this
3297 context. */
3298 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3299 expr = TREE_OPERAND (expr, 0);
3300
3301 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3302 a constant, it will be more efficient to not make another SAVE_EXPR since
3303 it will allow better simplification and GCSE will be able to merge the
3304 computations if they actually occur. */
3305 while (true)
3306 {
3307 if (UNARY_CLASS_P (expr))
3308 expr = TREE_OPERAND (expr, 0);
3309 else if (BINARY_CLASS_P (expr))
3310 {
3311 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3312 expr = TREE_OPERAND (expr, 0);
3313 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3314 expr = TREE_OPERAND (expr, 1);
3315 else
3316 break;
3317 }
3318 else
3319 break;
3320 }
3321
3322 return expr;
3323 }
3324
3325 /* Look inside EXPR into simple arithmetic operations involving constants.
3326 Return the outermost non-arithmetic or non-constant node. */
3327
3328 tree
3329 skip_simple_constant_arithmetic (tree expr)
3330 {
3331 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3332 expr = TREE_OPERAND (expr, 0);
3333
3334 while (true)
3335 {
3336 if (UNARY_CLASS_P (expr))
3337 expr = TREE_OPERAND (expr, 0);
3338 else if (BINARY_CLASS_P (expr))
3339 {
3340 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3341 expr = TREE_OPERAND (expr, 0);
3342 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3343 expr = TREE_OPERAND (expr, 1);
3344 else
3345 break;
3346 }
3347 else
3348 break;
3349 }
3350
3351 return expr;
3352 }
3353
3354 /* Return which tree structure is used by T. */
3355
3356 enum tree_node_structure_enum
3357 tree_node_structure (const_tree t)
3358 {
3359 const enum tree_code code = TREE_CODE (t);
3360 return tree_node_structure_for_code (code);
3361 }
3362
3363 /* Set various status flags when building a CALL_EXPR object T. */
3364
3365 static void
3366 process_call_operands (tree t)
3367 {
3368 bool side_effects = TREE_SIDE_EFFECTS (t);
3369 bool read_only = false;
3370 int i = call_expr_flags (t);
3371
3372 /* Calls have side-effects, except those to const or pure functions. */
3373 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3374 side_effects = true;
3375 /* Propagate TREE_READONLY of arguments for const functions. */
3376 if (i & ECF_CONST)
3377 read_only = true;
3378
3379 if (!side_effects || read_only)
3380 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3381 {
3382 tree op = TREE_OPERAND (t, i);
3383 if (op && TREE_SIDE_EFFECTS (op))
3384 side_effects = true;
3385 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3386 read_only = false;
3387 }
3388
3389 TREE_SIDE_EFFECTS (t) = side_effects;
3390 TREE_READONLY (t) = read_only;
3391 }
3392 \f
3393 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3394 size or offset that depends on a field within a record. */
3395
3396 bool
3397 contains_placeholder_p (const_tree exp)
3398 {
3399 enum tree_code code;
3400
3401 if (!exp)
3402 return 0;
3403
3404 code = TREE_CODE (exp);
3405 if (code == PLACEHOLDER_EXPR)
3406 return 1;
3407
3408 switch (TREE_CODE_CLASS (code))
3409 {
3410 case tcc_reference:
3411 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3412 position computations since they will be converted into a
3413 WITH_RECORD_EXPR involving the reference, which will assume
3414 here will be valid. */
3415 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3416
3417 case tcc_exceptional:
3418 if (code == TREE_LIST)
3419 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3420 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3421 break;
3422
3423 case tcc_unary:
3424 case tcc_binary:
3425 case tcc_comparison:
3426 case tcc_expression:
3427 switch (code)
3428 {
3429 case COMPOUND_EXPR:
3430 /* Ignoring the first operand isn't quite right, but works best. */
3431 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3432
3433 case COND_EXPR:
3434 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3435 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3436 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3437
3438 case SAVE_EXPR:
3439 /* The save_expr function never wraps anything containing
3440 a PLACEHOLDER_EXPR. */
3441 return 0;
3442
3443 default:
3444 break;
3445 }
3446
3447 switch (TREE_CODE_LENGTH (code))
3448 {
3449 case 1:
3450 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3451 case 2:
3452 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3453 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3454 default:
3455 return 0;
3456 }
3457
3458 case tcc_vl_exp:
3459 switch (code)
3460 {
3461 case CALL_EXPR:
3462 {
3463 const_tree arg;
3464 const_call_expr_arg_iterator iter;
3465 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3466 if (CONTAINS_PLACEHOLDER_P (arg))
3467 return 1;
3468 return 0;
3469 }
3470 default:
3471 return 0;
3472 }
3473
3474 default:
3475 return 0;
3476 }
3477 return 0;
3478 }
3479
3480 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3481 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3482 field positions. */
3483
3484 static bool
3485 type_contains_placeholder_1 (const_tree type)
3486 {
3487 /* If the size contains a placeholder or the parent type (component type in
3488 the case of arrays) type involves a placeholder, this type does. */
3489 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3490 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3491 || (!POINTER_TYPE_P (type)
3492 && TREE_TYPE (type)
3493 && type_contains_placeholder_p (TREE_TYPE (type))))
3494 return true;
3495
3496 /* Now do type-specific checks. Note that the last part of the check above
3497 greatly limits what we have to do below. */
3498 switch (TREE_CODE (type))
3499 {
3500 case VOID_TYPE:
3501 case POINTER_BOUNDS_TYPE:
3502 case COMPLEX_TYPE:
3503 case ENUMERAL_TYPE:
3504 case BOOLEAN_TYPE:
3505 case POINTER_TYPE:
3506 case OFFSET_TYPE:
3507 case REFERENCE_TYPE:
3508 case METHOD_TYPE:
3509 case FUNCTION_TYPE:
3510 case VECTOR_TYPE:
3511 case NULLPTR_TYPE:
3512 return false;
3513
3514 case INTEGER_TYPE:
3515 case REAL_TYPE:
3516 case FIXED_POINT_TYPE:
3517 /* Here we just check the bounds. */
3518 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3519 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3520
3521 case ARRAY_TYPE:
3522 /* We have already checked the component type above, so just check the
3523 domain type. */
3524 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3525
3526 case RECORD_TYPE:
3527 case UNION_TYPE:
3528 case QUAL_UNION_TYPE:
3529 {
3530 tree field;
3531
3532 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3533 if (TREE_CODE (field) == FIELD_DECL
3534 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3535 || (TREE_CODE (type) == QUAL_UNION_TYPE
3536 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3537 || type_contains_placeholder_p (TREE_TYPE (field))))
3538 return true;
3539
3540 return false;
3541 }
3542
3543 default:
3544 gcc_unreachable ();
3545 }
3546 }
3547
3548 /* Wrapper around above function used to cache its result. */
3549
3550 bool
3551 type_contains_placeholder_p (tree type)
3552 {
3553 bool result;
3554
3555 /* If the contains_placeholder_bits field has been initialized,
3556 then we know the answer. */
3557 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3558 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3559
3560 /* Indicate that we've seen this type node, and the answer is false.
3561 This is what we want to return if we run into recursion via fields. */
3562 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3563
3564 /* Compute the real value. */
3565 result = type_contains_placeholder_1 (type);
3566
3567 /* Store the real value. */
3568 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3569
3570 return result;
3571 }
3572 \f
3573 /* Push tree EXP onto vector QUEUE if it is not already present. */
3574
3575 static void
3576 push_without_duplicates (tree exp, vec<tree> *queue)
3577 {
3578 unsigned int i;
3579 tree iter;
3580
3581 FOR_EACH_VEC_ELT (*queue, i, iter)
3582 if (simple_cst_equal (iter, exp) == 1)
3583 break;
3584
3585 if (!iter)
3586 queue->safe_push (exp);
3587 }
3588
3589 /* Given a tree EXP, find all occurrences of references to fields
3590 in a PLACEHOLDER_EXPR and place them in vector REFS without
3591 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3592 we assume here that EXP contains only arithmetic expressions
3593 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3594 argument list. */
3595
3596 void
3597 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3598 {
3599 enum tree_code code = TREE_CODE (exp);
3600 tree inner;
3601 int i;
3602
3603 /* We handle TREE_LIST and COMPONENT_REF separately. */
3604 if (code == TREE_LIST)
3605 {
3606 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3607 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3608 }
3609 else if (code == COMPONENT_REF)
3610 {
3611 for (inner = TREE_OPERAND (exp, 0);
3612 REFERENCE_CLASS_P (inner);
3613 inner = TREE_OPERAND (inner, 0))
3614 ;
3615
3616 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3617 push_without_duplicates (exp, refs);
3618 else
3619 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3620 }
3621 else
3622 switch (TREE_CODE_CLASS (code))
3623 {
3624 case tcc_constant:
3625 break;
3626
3627 case tcc_declaration:
3628 /* Variables allocated to static storage can stay. */
3629 if (!TREE_STATIC (exp))
3630 push_without_duplicates (exp, refs);
3631 break;
3632
3633 case tcc_expression:
3634 /* This is the pattern built in ada/make_aligning_type. */
3635 if (code == ADDR_EXPR
3636 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3637 {
3638 push_without_duplicates (exp, refs);
3639 break;
3640 }
3641
3642 /* Fall through... */
3643
3644 case tcc_exceptional:
3645 case tcc_unary:
3646 case tcc_binary:
3647 case tcc_comparison:
3648 case tcc_reference:
3649 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3650 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3651 break;
3652
3653 case tcc_vl_exp:
3654 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3655 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3656 break;
3657
3658 default:
3659 gcc_unreachable ();
3660 }
3661 }
3662
3663 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3664 return a tree with all occurrences of references to F in a
3665 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3666 CONST_DECLs. Note that we assume here that EXP contains only
3667 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3668 occurring only in their argument list. */
3669
3670 tree
3671 substitute_in_expr (tree exp, tree f, tree r)
3672 {
3673 enum tree_code code = TREE_CODE (exp);
3674 tree op0, op1, op2, op3;
3675 tree new_tree;
3676
3677 /* We handle TREE_LIST and COMPONENT_REF separately. */
3678 if (code == TREE_LIST)
3679 {
3680 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3681 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3682 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3683 return exp;
3684
3685 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3686 }
3687 else if (code == COMPONENT_REF)
3688 {
3689 tree inner;
3690
3691 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3692 and it is the right field, replace it with R. */
3693 for (inner = TREE_OPERAND (exp, 0);
3694 REFERENCE_CLASS_P (inner);
3695 inner = TREE_OPERAND (inner, 0))
3696 ;
3697
3698 /* The field. */
3699 op1 = TREE_OPERAND (exp, 1);
3700
3701 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3702 return r;
3703
3704 /* If this expression hasn't been completed let, leave it alone. */
3705 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3706 return exp;
3707
3708 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3709 if (op0 == TREE_OPERAND (exp, 0))
3710 return exp;
3711
3712 new_tree
3713 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3714 }
3715 else
3716 switch (TREE_CODE_CLASS (code))
3717 {
3718 case tcc_constant:
3719 return exp;
3720
3721 case tcc_declaration:
3722 if (exp == f)
3723 return r;
3724 else
3725 return exp;
3726
3727 case tcc_expression:
3728 if (exp == f)
3729 return r;
3730
3731 /* Fall through... */
3732
3733 case tcc_exceptional:
3734 case tcc_unary:
3735 case tcc_binary:
3736 case tcc_comparison:
3737 case tcc_reference:
3738 switch (TREE_CODE_LENGTH (code))
3739 {
3740 case 0:
3741 return exp;
3742
3743 case 1:
3744 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3745 if (op0 == TREE_OPERAND (exp, 0))
3746 return exp;
3747
3748 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3749 break;
3750
3751 case 2:
3752 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3753 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3754
3755 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3756 return exp;
3757
3758 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3759 break;
3760
3761 case 3:
3762 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3763 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3764 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3765
3766 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3767 && op2 == TREE_OPERAND (exp, 2))
3768 return exp;
3769
3770 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3771 break;
3772
3773 case 4:
3774 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3775 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3776 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3777 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3778
3779 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3780 && op2 == TREE_OPERAND (exp, 2)
3781 && op3 == TREE_OPERAND (exp, 3))
3782 return exp;
3783
3784 new_tree
3785 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3786 break;
3787
3788 default:
3789 gcc_unreachable ();
3790 }
3791 break;
3792
3793 case tcc_vl_exp:
3794 {
3795 int i;
3796
3797 new_tree = NULL_TREE;
3798
3799 /* If we are trying to replace F with a constant, inline back
3800 functions which do nothing else than computing a value from
3801 the arguments they are passed. This makes it possible to
3802 fold partially or entirely the replacement expression. */
3803 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3804 {
3805 tree t = maybe_inline_call_in_expr (exp);
3806 if (t)
3807 return SUBSTITUTE_IN_EXPR (t, f, r);
3808 }
3809
3810 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3811 {
3812 tree op = TREE_OPERAND (exp, i);
3813 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3814 if (new_op != op)
3815 {
3816 if (!new_tree)
3817 new_tree = copy_node (exp);
3818 TREE_OPERAND (new_tree, i) = new_op;
3819 }
3820 }
3821
3822 if (new_tree)
3823 {
3824 new_tree = fold (new_tree);
3825 if (TREE_CODE (new_tree) == CALL_EXPR)
3826 process_call_operands (new_tree);
3827 }
3828 else
3829 return exp;
3830 }
3831 break;
3832
3833 default:
3834 gcc_unreachable ();
3835 }
3836
3837 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3838
3839 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3840 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3841
3842 return new_tree;
3843 }
3844
3845 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3846 for it within OBJ, a tree that is an object or a chain of references. */
3847
3848 tree
3849 substitute_placeholder_in_expr (tree exp, tree obj)
3850 {
3851 enum tree_code code = TREE_CODE (exp);
3852 tree op0, op1, op2, op3;
3853 tree new_tree;
3854
3855 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3856 in the chain of OBJ. */
3857 if (code == PLACEHOLDER_EXPR)
3858 {
3859 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3860 tree elt;
3861
3862 for (elt = obj; elt != 0;
3863 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3864 || TREE_CODE (elt) == COND_EXPR)
3865 ? TREE_OPERAND (elt, 1)
3866 : (REFERENCE_CLASS_P (elt)
3867 || UNARY_CLASS_P (elt)
3868 || BINARY_CLASS_P (elt)
3869 || VL_EXP_CLASS_P (elt)
3870 || EXPRESSION_CLASS_P (elt))
3871 ? TREE_OPERAND (elt, 0) : 0))
3872 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3873 return elt;
3874
3875 for (elt = obj; elt != 0;
3876 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3877 || TREE_CODE (elt) == COND_EXPR)
3878 ? TREE_OPERAND (elt, 1)
3879 : (REFERENCE_CLASS_P (elt)
3880 || UNARY_CLASS_P (elt)
3881 || BINARY_CLASS_P (elt)
3882 || VL_EXP_CLASS_P (elt)
3883 || EXPRESSION_CLASS_P (elt))
3884 ? TREE_OPERAND (elt, 0) : 0))
3885 if (POINTER_TYPE_P (TREE_TYPE (elt))
3886 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3887 == need_type))
3888 return fold_build1 (INDIRECT_REF, need_type, elt);
3889
3890 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3891 survives until RTL generation, there will be an error. */
3892 return exp;
3893 }
3894
3895 /* TREE_LIST is special because we need to look at TREE_VALUE
3896 and TREE_CHAIN, not TREE_OPERANDS. */
3897 else if (code == TREE_LIST)
3898 {
3899 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3900 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3901 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3902 return exp;
3903
3904 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3905 }
3906 else
3907 switch (TREE_CODE_CLASS (code))
3908 {
3909 case tcc_constant:
3910 case tcc_declaration:
3911 return exp;
3912
3913 case tcc_exceptional:
3914 case tcc_unary:
3915 case tcc_binary:
3916 case tcc_comparison:
3917 case tcc_expression:
3918 case tcc_reference:
3919 case tcc_statement:
3920 switch (TREE_CODE_LENGTH (code))
3921 {
3922 case 0:
3923 return exp;
3924
3925 case 1:
3926 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3927 if (op0 == TREE_OPERAND (exp, 0))
3928 return exp;
3929
3930 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3931 break;
3932
3933 case 2:
3934 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3935 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3936
3937 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3938 return exp;
3939
3940 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3941 break;
3942
3943 case 3:
3944 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3945 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3946 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3947
3948 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3949 && op2 == TREE_OPERAND (exp, 2))
3950 return exp;
3951
3952 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3953 break;
3954
3955 case 4:
3956 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3957 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3958 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3959 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3960
3961 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3962 && op2 == TREE_OPERAND (exp, 2)
3963 && op3 == TREE_OPERAND (exp, 3))
3964 return exp;
3965
3966 new_tree
3967 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3968 break;
3969
3970 default:
3971 gcc_unreachable ();
3972 }
3973 break;
3974
3975 case tcc_vl_exp:
3976 {
3977 int i;
3978
3979 new_tree = NULL_TREE;
3980
3981 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3982 {
3983 tree op = TREE_OPERAND (exp, i);
3984 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3985 if (new_op != op)
3986 {
3987 if (!new_tree)
3988 new_tree = copy_node (exp);
3989 TREE_OPERAND (new_tree, i) = new_op;
3990 }
3991 }
3992
3993 if (new_tree)
3994 {
3995 new_tree = fold (new_tree);
3996 if (TREE_CODE (new_tree) == CALL_EXPR)
3997 process_call_operands (new_tree);
3998 }
3999 else
4000 return exp;
4001 }
4002 break;
4003
4004 default:
4005 gcc_unreachable ();
4006 }
4007
4008 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4009
4010 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4011 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4012
4013 return new_tree;
4014 }
4015 \f
4016
4017 /* Subroutine of stabilize_reference; this is called for subtrees of
4018 references. Any expression with side-effects must be put in a SAVE_EXPR
4019 to ensure that it is only evaluated once.
4020
4021 We don't put SAVE_EXPR nodes around everything, because assigning very
4022 simple expressions to temporaries causes us to miss good opportunities
4023 for optimizations. Among other things, the opportunity to fold in the
4024 addition of a constant into an addressing mode often gets lost, e.g.
4025 "y[i+1] += x;". In general, we take the approach that we should not make
4026 an assignment unless we are forced into it - i.e., that any non-side effect
4027 operator should be allowed, and that cse should take care of coalescing
4028 multiple utterances of the same expression should that prove fruitful. */
4029
4030 static tree
4031 stabilize_reference_1 (tree e)
4032 {
4033 tree result;
4034 enum tree_code code = TREE_CODE (e);
4035
4036 /* We cannot ignore const expressions because it might be a reference
4037 to a const array but whose index contains side-effects. But we can
4038 ignore things that are actual constant or that already have been
4039 handled by this function. */
4040
4041 if (tree_invariant_p (e))
4042 return e;
4043
4044 switch (TREE_CODE_CLASS (code))
4045 {
4046 case tcc_exceptional:
4047 case tcc_type:
4048 case tcc_declaration:
4049 case tcc_comparison:
4050 case tcc_statement:
4051 case tcc_expression:
4052 case tcc_reference:
4053 case tcc_vl_exp:
4054 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4055 so that it will only be evaluated once. */
4056 /* The reference (r) and comparison (<) classes could be handled as
4057 below, but it is generally faster to only evaluate them once. */
4058 if (TREE_SIDE_EFFECTS (e))
4059 return save_expr (e);
4060 return e;
4061
4062 case tcc_constant:
4063 /* Constants need no processing. In fact, we should never reach
4064 here. */
4065 return e;
4066
4067 case tcc_binary:
4068 /* Division is slow and tends to be compiled with jumps,
4069 especially the division by powers of 2 that is often
4070 found inside of an array reference. So do it just once. */
4071 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4072 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4073 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4074 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4075 return save_expr (e);
4076 /* Recursively stabilize each operand. */
4077 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4078 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4079 break;
4080
4081 case tcc_unary:
4082 /* Recursively stabilize each operand. */
4083 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4084 break;
4085
4086 default:
4087 gcc_unreachable ();
4088 }
4089
4090 TREE_TYPE (result) = TREE_TYPE (e);
4091 TREE_READONLY (result) = TREE_READONLY (e);
4092 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4093 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4094
4095 return result;
4096 }
4097
4098 /* Stabilize a reference so that we can use it any number of times
4099 without causing its operands to be evaluated more than once.
4100 Returns the stabilized reference. This works by means of save_expr,
4101 so see the caveats in the comments about save_expr.
4102
4103 Also allows conversion expressions whose operands are references.
4104 Any other kind of expression is returned unchanged. */
4105
4106 tree
4107 stabilize_reference (tree ref)
4108 {
4109 tree result;
4110 enum tree_code code = TREE_CODE (ref);
4111
4112 switch (code)
4113 {
4114 case VAR_DECL:
4115 case PARM_DECL:
4116 case RESULT_DECL:
4117 /* No action is needed in this case. */
4118 return ref;
4119
4120 CASE_CONVERT:
4121 case FLOAT_EXPR:
4122 case FIX_TRUNC_EXPR:
4123 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4124 break;
4125
4126 case INDIRECT_REF:
4127 result = build_nt (INDIRECT_REF,
4128 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4129 break;
4130
4131 case COMPONENT_REF:
4132 result = build_nt (COMPONENT_REF,
4133 stabilize_reference (TREE_OPERAND (ref, 0)),
4134 TREE_OPERAND (ref, 1), NULL_TREE);
4135 break;
4136
4137 case BIT_FIELD_REF:
4138 result = build_nt (BIT_FIELD_REF,
4139 stabilize_reference (TREE_OPERAND (ref, 0)),
4140 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4141 break;
4142
4143 case ARRAY_REF:
4144 result = build_nt (ARRAY_REF,
4145 stabilize_reference (TREE_OPERAND (ref, 0)),
4146 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4147 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4148 break;
4149
4150 case ARRAY_RANGE_REF:
4151 result = build_nt (ARRAY_RANGE_REF,
4152 stabilize_reference (TREE_OPERAND (ref, 0)),
4153 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4154 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4155 break;
4156
4157 case COMPOUND_EXPR:
4158 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4159 it wouldn't be ignored. This matters when dealing with
4160 volatiles. */
4161 return stabilize_reference_1 (ref);
4162
4163 /* If arg isn't a kind of lvalue we recognize, make no change.
4164 Caller should recognize the error for an invalid lvalue. */
4165 default:
4166 return ref;
4167
4168 case ERROR_MARK:
4169 return error_mark_node;
4170 }
4171
4172 TREE_TYPE (result) = TREE_TYPE (ref);
4173 TREE_READONLY (result) = TREE_READONLY (ref);
4174 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4175 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4176
4177 return result;
4178 }
4179 \f
4180 /* Low-level constructors for expressions. */
4181
4182 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4183 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4184
4185 void
4186 recompute_tree_invariant_for_addr_expr (tree t)
4187 {
4188 tree node;
4189 bool tc = true, se = false;
4190
4191 /* We started out assuming this address is both invariant and constant, but
4192 does not have side effects. Now go down any handled components and see if
4193 any of them involve offsets that are either non-constant or non-invariant.
4194 Also check for side-effects.
4195
4196 ??? Note that this code makes no attempt to deal with the case where
4197 taking the address of something causes a copy due to misalignment. */
4198
4199 #define UPDATE_FLAGS(NODE) \
4200 do { tree _node = (NODE); \
4201 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4202 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4203
4204 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4205 node = TREE_OPERAND (node, 0))
4206 {
4207 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4208 array reference (probably made temporarily by the G++ front end),
4209 so ignore all the operands. */
4210 if ((TREE_CODE (node) == ARRAY_REF
4211 || TREE_CODE (node) == ARRAY_RANGE_REF)
4212 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4213 {
4214 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4215 if (TREE_OPERAND (node, 2))
4216 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4217 if (TREE_OPERAND (node, 3))
4218 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4219 }
4220 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4221 FIELD_DECL, apparently. The G++ front end can put something else
4222 there, at least temporarily. */
4223 else if (TREE_CODE (node) == COMPONENT_REF
4224 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4225 {
4226 if (TREE_OPERAND (node, 2))
4227 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4228 }
4229 }
4230
4231 node = lang_hooks.expr_to_decl (node, &tc, &se);
4232
4233 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4234 the address, since &(*a)->b is a form of addition. If it's a constant, the
4235 address is constant too. If it's a decl, its address is constant if the
4236 decl is static. Everything else is not constant and, furthermore,
4237 taking the address of a volatile variable is not volatile. */
4238 if (TREE_CODE (node) == INDIRECT_REF
4239 || TREE_CODE (node) == MEM_REF)
4240 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4241 else if (CONSTANT_CLASS_P (node))
4242 ;
4243 else if (DECL_P (node))
4244 tc &= (staticp (node) != NULL_TREE);
4245 else
4246 {
4247 tc = false;
4248 se |= TREE_SIDE_EFFECTS (node);
4249 }
4250
4251
4252 TREE_CONSTANT (t) = tc;
4253 TREE_SIDE_EFFECTS (t) = se;
4254 #undef UPDATE_FLAGS
4255 }
4256
4257 /* Build an expression of code CODE, data type TYPE, and operands as
4258 specified. Expressions and reference nodes can be created this way.
4259 Constants, decls, types and misc nodes cannot be.
4260
4261 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4262 enough for all extant tree codes. */
4263
4264 tree
4265 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4266 {
4267 tree t;
4268
4269 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4270
4271 t = make_node_stat (code PASS_MEM_STAT);
4272 TREE_TYPE (t) = tt;
4273
4274 return t;
4275 }
4276
4277 tree
4278 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4279 {
4280 int length = sizeof (struct tree_exp);
4281 tree t;
4282
4283 record_node_allocation_statistics (code, length);
4284
4285 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4286
4287 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4288
4289 memset (t, 0, sizeof (struct tree_common));
4290
4291 TREE_SET_CODE (t, code);
4292
4293 TREE_TYPE (t) = type;
4294 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4295 TREE_OPERAND (t, 0) = node;
4296 if (node && !TYPE_P (node))
4297 {
4298 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4299 TREE_READONLY (t) = TREE_READONLY (node);
4300 }
4301
4302 if (TREE_CODE_CLASS (code) == tcc_statement)
4303 TREE_SIDE_EFFECTS (t) = 1;
4304 else switch (code)
4305 {
4306 case VA_ARG_EXPR:
4307 /* All of these have side-effects, no matter what their
4308 operands are. */
4309 TREE_SIDE_EFFECTS (t) = 1;
4310 TREE_READONLY (t) = 0;
4311 break;
4312
4313 case INDIRECT_REF:
4314 /* Whether a dereference is readonly has nothing to do with whether
4315 its operand is readonly. */
4316 TREE_READONLY (t) = 0;
4317 break;
4318
4319 case ADDR_EXPR:
4320 if (node)
4321 recompute_tree_invariant_for_addr_expr (t);
4322 break;
4323
4324 default:
4325 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4326 && node && !TYPE_P (node)
4327 && TREE_CONSTANT (node))
4328 TREE_CONSTANT (t) = 1;
4329 if (TREE_CODE_CLASS (code) == tcc_reference
4330 && node && TREE_THIS_VOLATILE (node))
4331 TREE_THIS_VOLATILE (t) = 1;
4332 break;
4333 }
4334
4335 return t;
4336 }
4337
4338 #define PROCESS_ARG(N) \
4339 do { \
4340 TREE_OPERAND (t, N) = arg##N; \
4341 if (arg##N &&!TYPE_P (arg##N)) \
4342 { \
4343 if (TREE_SIDE_EFFECTS (arg##N)) \
4344 side_effects = 1; \
4345 if (!TREE_READONLY (arg##N) \
4346 && !CONSTANT_CLASS_P (arg##N)) \
4347 (void) (read_only = 0); \
4348 if (!TREE_CONSTANT (arg##N)) \
4349 (void) (constant = 0); \
4350 } \
4351 } while (0)
4352
4353 tree
4354 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4355 {
4356 bool constant, read_only, side_effects;
4357 tree t;
4358
4359 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4360
4361 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4362 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4363 /* When sizetype precision doesn't match that of pointers
4364 we need to be able to build explicit extensions or truncations
4365 of the offset argument. */
4366 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4367 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4368 && TREE_CODE (arg1) == INTEGER_CST);
4369
4370 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4371 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4372 && ptrofftype_p (TREE_TYPE (arg1)));
4373
4374 t = make_node_stat (code PASS_MEM_STAT);
4375 TREE_TYPE (t) = tt;
4376
4377 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4378 result based on those same flags for the arguments. But if the
4379 arguments aren't really even `tree' expressions, we shouldn't be trying
4380 to do this. */
4381
4382 /* Expressions without side effects may be constant if their
4383 arguments are as well. */
4384 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4385 || TREE_CODE_CLASS (code) == tcc_binary);
4386 read_only = 1;
4387 side_effects = TREE_SIDE_EFFECTS (t);
4388
4389 PROCESS_ARG (0);
4390 PROCESS_ARG (1);
4391
4392 TREE_SIDE_EFFECTS (t) = side_effects;
4393 if (code == MEM_REF)
4394 {
4395 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4396 {
4397 tree o = TREE_OPERAND (arg0, 0);
4398 TREE_READONLY (t) = TREE_READONLY (o);
4399 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4400 }
4401 }
4402 else
4403 {
4404 TREE_READONLY (t) = read_only;
4405 TREE_CONSTANT (t) = constant;
4406 TREE_THIS_VOLATILE (t)
4407 = (TREE_CODE_CLASS (code) == tcc_reference
4408 && arg0 && TREE_THIS_VOLATILE (arg0));
4409 }
4410
4411 return t;
4412 }
4413
4414
4415 tree
4416 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4417 tree arg2 MEM_STAT_DECL)
4418 {
4419 bool constant, read_only, side_effects;
4420 tree t;
4421
4422 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4423 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4424
4425 t = make_node_stat (code PASS_MEM_STAT);
4426 TREE_TYPE (t) = tt;
4427
4428 read_only = 1;
4429
4430 /* As a special exception, if COND_EXPR has NULL branches, we
4431 assume that it is a gimple statement and always consider
4432 it to have side effects. */
4433 if (code == COND_EXPR
4434 && tt == void_type_node
4435 && arg1 == NULL_TREE
4436 && arg2 == NULL_TREE)
4437 side_effects = true;
4438 else
4439 side_effects = TREE_SIDE_EFFECTS (t);
4440
4441 PROCESS_ARG (0);
4442 PROCESS_ARG (1);
4443 PROCESS_ARG (2);
4444
4445 if (code == COND_EXPR)
4446 TREE_READONLY (t) = read_only;
4447
4448 TREE_SIDE_EFFECTS (t) = side_effects;
4449 TREE_THIS_VOLATILE (t)
4450 = (TREE_CODE_CLASS (code) == tcc_reference
4451 && arg0 && TREE_THIS_VOLATILE (arg0));
4452
4453 return t;
4454 }
4455
4456 tree
4457 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4458 tree arg2, tree arg3 MEM_STAT_DECL)
4459 {
4460 bool constant, read_only, side_effects;
4461 tree t;
4462
4463 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4464
4465 t = make_node_stat (code PASS_MEM_STAT);
4466 TREE_TYPE (t) = tt;
4467
4468 side_effects = TREE_SIDE_EFFECTS (t);
4469
4470 PROCESS_ARG (0);
4471 PROCESS_ARG (1);
4472 PROCESS_ARG (2);
4473 PROCESS_ARG (3);
4474
4475 TREE_SIDE_EFFECTS (t) = side_effects;
4476 TREE_THIS_VOLATILE (t)
4477 = (TREE_CODE_CLASS (code) == tcc_reference
4478 && arg0 && TREE_THIS_VOLATILE (arg0));
4479
4480 return t;
4481 }
4482
4483 tree
4484 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4485 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4486 {
4487 bool constant, read_only, side_effects;
4488 tree t;
4489
4490 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4491
4492 t = make_node_stat (code PASS_MEM_STAT);
4493 TREE_TYPE (t) = tt;
4494
4495 side_effects = TREE_SIDE_EFFECTS (t);
4496
4497 PROCESS_ARG (0);
4498 PROCESS_ARG (1);
4499 PROCESS_ARG (2);
4500 PROCESS_ARG (3);
4501 PROCESS_ARG (4);
4502
4503 TREE_SIDE_EFFECTS (t) = side_effects;
4504 if (code == TARGET_MEM_REF)
4505 {
4506 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4507 {
4508 tree o = TREE_OPERAND (arg0, 0);
4509 TREE_READONLY (t) = TREE_READONLY (o);
4510 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4511 }
4512 }
4513 else
4514 TREE_THIS_VOLATILE (t)
4515 = (TREE_CODE_CLASS (code) == tcc_reference
4516 && arg0 && TREE_THIS_VOLATILE (arg0));
4517
4518 return t;
4519 }
4520
4521 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4522 on the pointer PTR. */
4523
4524 tree
4525 build_simple_mem_ref_loc (location_t loc, tree ptr)
4526 {
4527 HOST_WIDE_INT offset = 0;
4528 tree ptype = TREE_TYPE (ptr);
4529 tree tem;
4530 /* For convenience allow addresses that collapse to a simple base
4531 and offset. */
4532 if (TREE_CODE (ptr) == ADDR_EXPR
4533 && (handled_component_p (TREE_OPERAND (ptr, 0))
4534 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4535 {
4536 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4537 gcc_assert (ptr);
4538 ptr = build_fold_addr_expr (ptr);
4539 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4540 }
4541 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4542 ptr, build_int_cst (ptype, offset));
4543 SET_EXPR_LOCATION (tem, loc);
4544 return tem;
4545 }
4546
4547 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4548
4549 offset_int
4550 mem_ref_offset (const_tree t)
4551 {
4552 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4553 }
4554
4555 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4556 offsetted by OFFSET units. */
4557
4558 tree
4559 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4560 {
4561 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4562 build_fold_addr_expr (base),
4563 build_int_cst (ptr_type_node, offset));
4564 tree addr = build1 (ADDR_EXPR, type, ref);
4565 recompute_tree_invariant_for_addr_expr (addr);
4566 return addr;
4567 }
4568
4569 /* Similar except don't specify the TREE_TYPE
4570 and leave the TREE_SIDE_EFFECTS as 0.
4571 It is permissible for arguments to be null,
4572 or even garbage if their values do not matter. */
4573
4574 tree
4575 build_nt (enum tree_code code, ...)
4576 {
4577 tree t;
4578 int length;
4579 int i;
4580 va_list p;
4581
4582 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4583
4584 va_start (p, code);
4585
4586 t = make_node (code);
4587 length = TREE_CODE_LENGTH (code);
4588
4589 for (i = 0; i < length; i++)
4590 TREE_OPERAND (t, i) = va_arg (p, tree);
4591
4592 va_end (p);
4593 return t;
4594 }
4595
4596 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4597 tree vec. */
4598
4599 tree
4600 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4601 {
4602 tree ret, t;
4603 unsigned int ix;
4604
4605 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4606 CALL_EXPR_FN (ret) = fn;
4607 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4608 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4609 CALL_EXPR_ARG (ret, ix) = t;
4610 return ret;
4611 }
4612 \f
4613 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4614 We do NOT enter this node in any sort of symbol table.
4615
4616 LOC is the location of the decl.
4617
4618 layout_decl is used to set up the decl's storage layout.
4619 Other slots are initialized to 0 or null pointers. */
4620
4621 tree
4622 build_decl_stat (location_t loc, enum tree_code code, tree name,
4623 tree type MEM_STAT_DECL)
4624 {
4625 tree t;
4626
4627 t = make_node_stat (code PASS_MEM_STAT);
4628 DECL_SOURCE_LOCATION (t) = loc;
4629
4630 /* if (type == error_mark_node)
4631 type = integer_type_node; */
4632 /* That is not done, deliberately, so that having error_mark_node
4633 as the type can suppress useless errors in the use of this variable. */
4634
4635 DECL_NAME (t) = name;
4636 TREE_TYPE (t) = type;
4637
4638 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4639 layout_decl (t, 0);
4640
4641 return t;
4642 }
4643
4644 /* Builds and returns function declaration with NAME and TYPE. */
4645
4646 tree
4647 build_fn_decl (const char *name, tree type)
4648 {
4649 tree id = get_identifier (name);
4650 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4651
4652 DECL_EXTERNAL (decl) = 1;
4653 TREE_PUBLIC (decl) = 1;
4654 DECL_ARTIFICIAL (decl) = 1;
4655 TREE_NOTHROW (decl) = 1;
4656
4657 return decl;
4658 }
4659
4660 vec<tree, va_gc> *all_translation_units;
4661
4662 /* Builds a new translation-unit decl with name NAME, queues it in the
4663 global list of translation-unit decls and returns it. */
4664
4665 tree
4666 build_translation_unit_decl (tree name)
4667 {
4668 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4669 name, NULL_TREE);
4670 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4671 vec_safe_push (all_translation_units, tu);
4672 return tu;
4673 }
4674
4675 \f
4676 /* BLOCK nodes are used to represent the structure of binding contours
4677 and declarations, once those contours have been exited and their contents
4678 compiled. This information is used for outputting debugging info. */
4679
4680 tree
4681 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4682 {
4683 tree block = make_node (BLOCK);
4684
4685 BLOCK_VARS (block) = vars;
4686 BLOCK_SUBBLOCKS (block) = subblocks;
4687 BLOCK_SUPERCONTEXT (block) = supercontext;
4688 BLOCK_CHAIN (block) = chain;
4689 return block;
4690 }
4691
4692 \f
4693 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4694
4695 LOC is the location to use in tree T. */
4696
4697 void
4698 protected_set_expr_location (tree t, location_t loc)
4699 {
4700 if (CAN_HAVE_LOCATION_P (t))
4701 SET_EXPR_LOCATION (t, loc);
4702 }
4703 \f
4704 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4705 is ATTRIBUTE. */
4706
4707 tree
4708 build_decl_attribute_variant (tree ddecl, tree attribute)
4709 {
4710 DECL_ATTRIBUTES (ddecl) = attribute;
4711 return ddecl;
4712 }
4713
4714 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4715 is ATTRIBUTE and its qualifiers are QUALS.
4716
4717 Record such modified types already made so we don't make duplicates. */
4718
4719 tree
4720 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4721 {
4722 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4723 {
4724 inchash::hash hstate;
4725 tree ntype;
4726 int i;
4727 tree t;
4728 enum tree_code code = TREE_CODE (ttype);
4729
4730 /* Building a distinct copy of a tagged type is inappropriate; it
4731 causes breakage in code that expects there to be a one-to-one
4732 relationship between a struct and its fields.
4733 build_duplicate_type is another solution (as used in
4734 handle_transparent_union_attribute), but that doesn't play well
4735 with the stronger C++ type identity model. */
4736 if (TREE_CODE (ttype) == RECORD_TYPE
4737 || TREE_CODE (ttype) == UNION_TYPE
4738 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4739 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4740 {
4741 warning (OPT_Wattributes,
4742 "ignoring attributes applied to %qT after definition",
4743 TYPE_MAIN_VARIANT (ttype));
4744 return build_qualified_type (ttype, quals);
4745 }
4746
4747 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4748 ntype = build_distinct_type_copy (ttype);
4749
4750 TYPE_ATTRIBUTES (ntype) = attribute;
4751
4752 hstate.add_int (code);
4753 if (TREE_TYPE (ntype))
4754 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4755 attribute_hash_list (attribute, hstate);
4756
4757 switch (TREE_CODE (ntype))
4758 {
4759 case FUNCTION_TYPE:
4760 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4761 break;
4762 case ARRAY_TYPE:
4763 if (TYPE_DOMAIN (ntype))
4764 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4765 break;
4766 case INTEGER_TYPE:
4767 t = TYPE_MAX_VALUE (ntype);
4768 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4769 hstate.add_object (TREE_INT_CST_ELT (t, i));
4770 break;
4771 case REAL_TYPE:
4772 case FIXED_POINT_TYPE:
4773 {
4774 unsigned int precision = TYPE_PRECISION (ntype);
4775 hstate.add_object (precision);
4776 }
4777 break;
4778 default:
4779 break;
4780 }
4781
4782 ntype = type_hash_canon (hstate.end(), ntype);
4783
4784 /* If the target-dependent attributes make NTYPE different from
4785 its canonical type, we will need to use structural equality
4786 checks for this type. */
4787 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4788 || !comp_type_attributes (ntype, ttype))
4789 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4790 else if (TYPE_CANONICAL (ntype) == ntype)
4791 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4792
4793 ttype = build_qualified_type (ntype, quals);
4794 }
4795 else if (TYPE_QUALS (ttype) != quals)
4796 ttype = build_qualified_type (ttype, quals);
4797
4798 return ttype;
4799 }
4800
4801 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4802 the same. */
4803
4804 static bool
4805 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4806 {
4807 tree cl1, cl2;
4808 for (cl1 = clauses1, cl2 = clauses2;
4809 cl1 && cl2;
4810 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4811 {
4812 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4813 return false;
4814 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4815 {
4816 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4817 OMP_CLAUSE_DECL (cl2)) != 1)
4818 return false;
4819 }
4820 switch (OMP_CLAUSE_CODE (cl1))
4821 {
4822 case OMP_CLAUSE_ALIGNED:
4823 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4824 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4825 return false;
4826 break;
4827 case OMP_CLAUSE_LINEAR:
4828 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4829 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4830 return false;
4831 break;
4832 case OMP_CLAUSE_SIMDLEN:
4833 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4834 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4835 return false;
4836 default:
4837 break;
4838 }
4839 }
4840 return true;
4841 }
4842
4843 /* Compare two constructor-element-type constants. Return 1 if the lists
4844 are known to be equal; otherwise return 0. */
4845
4846 static bool
4847 simple_cst_list_equal (const_tree l1, const_tree l2)
4848 {
4849 while (l1 != NULL_TREE && l2 != NULL_TREE)
4850 {
4851 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4852 return false;
4853
4854 l1 = TREE_CHAIN (l1);
4855 l2 = TREE_CHAIN (l2);
4856 }
4857
4858 return l1 == l2;
4859 }
4860
4861 /* Compare two identifier nodes representing attributes. Either one may
4862 be in wrapped __ATTR__ form. Return true if they are the same, false
4863 otherwise. */
4864
4865 static bool
4866 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4867 {
4868 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4869 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4870 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4871
4872 /* Identifiers can be compared directly for equality. */
4873 if (attr1 == attr2)
4874 return true;
4875
4876 /* If they are not equal, they may still be one in the form
4877 'text' while the other one is in the form '__text__'. TODO:
4878 If we were storing attributes in normalized 'text' form, then
4879 this could all go away and we could take full advantage of
4880 the fact that we're comparing identifiers. :-) */
4881 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4882 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4883
4884 if (attr2_len == attr1_len + 4)
4885 {
4886 const char *p = IDENTIFIER_POINTER (attr2);
4887 const char *q = IDENTIFIER_POINTER (attr1);
4888 if (p[0] == '_' && p[1] == '_'
4889 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4890 && strncmp (q, p + 2, attr1_len) == 0)
4891 return true;;
4892 }
4893 else if (attr2_len + 4 == attr1_len)
4894 {
4895 const char *p = IDENTIFIER_POINTER (attr2);
4896 const char *q = IDENTIFIER_POINTER (attr1);
4897 if (q[0] == '_' && q[1] == '_'
4898 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4899 && strncmp (q + 2, p, attr2_len) == 0)
4900 return true;
4901 }
4902
4903 return false;
4904 }
4905
4906 /* Compare two attributes for their value identity. Return true if the
4907 attribute values are known to be equal; otherwise return false. */
4908
4909 bool
4910 attribute_value_equal (const_tree attr1, const_tree attr2)
4911 {
4912 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4913 return true;
4914
4915 if (TREE_VALUE (attr1) != NULL_TREE
4916 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4917 && TREE_VALUE (attr2) != NULL_TREE
4918 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4919 {
4920 /* Handle attribute format. */
4921 if (is_attribute_p ("format", TREE_PURPOSE (attr1)))
4922 {
4923 attr1 = TREE_VALUE (attr1);
4924 attr2 = TREE_VALUE (attr2);
4925 /* Compare the archetypes (printf/scanf/strftime/...). */
4926 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
4927 TREE_VALUE (attr2)))
4928 return false;
4929 /* Archetypes are the same. Compare the rest. */
4930 return (simple_cst_list_equal (TREE_CHAIN (attr1),
4931 TREE_CHAIN (attr2)) == 1);
4932 }
4933 return (simple_cst_list_equal (TREE_VALUE (attr1),
4934 TREE_VALUE (attr2)) == 1);
4935 }
4936
4937 if ((flag_openmp || flag_openmp_simd)
4938 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4939 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4940 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4941 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4942 TREE_VALUE (attr2));
4943
4944 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4945 }
4946
4947 /* Return 0 if the attributes for two types are incompatible, 1 if they
4948 are compatible, and 2 if they are nearly compatible (which causes a
4949 warning to be generated). */
4950 int
4951 comp_type_attributes (const_tree type1, const_tree type2)
4952 {
4953 const_tree a1 = TYPE_ATTRIBUTES (type1);
4954 const_tree a2 = TYPE_ATTRIBUTES (type2);
4955 const_tree a;
4956
4957 if (a1 == a2)
4958 return 1;
4959 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4960 {
4961 const struct attribute_spec *as;
4962 const_tree attr;
4963
4964 as = lookup_attribute_spec (get_attribute_name (a));
4965 if (!as || as->affects_type_identity == false)
4966 continue;
4967
4968 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4969 if (!attr || !attribute_value_equal (a, attr))
4970 break;
4971 }
4972 if (!a)
4973 {
4974 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4975 {
4976 const struct attribute_spec *as;
4977
4978 as = lookup_attribute_spec (get_attribute_name (a));
4979 if (!as || as->affects_type_identity == false)
4980 continue;
4981
4982 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4983 break;
4984 /* We don't need to compare trees again, as we did this
4985 already in first loop. */
4986 }
4987 /* All types - affecting identity - are equal, so
4988 there is no need to call target hook for comparison. */
4989 if (!a)
4990 return 1;
4991 }
4992 /* As some type combinations - like default calling-convention - might
4993 be compatible, we have to call the target hook to get the final result. */
4994 return targetm.comp_type_attributes (type1, type2);
4995 }
4996
4997 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4998 is ATTRIBUTE.
4999
5000 Record such modified types already made so we don't make duplicates. */
5001
5002 tree
5003 build_type_attribute_variant (tree ttype, tree attribute)
5004 {
5005 return build_type_attribute_qual_variant (ttype, attribute,
5006 TYPE_QUALS (ttype));
5007 }
5008
5009
5010 /* Reset the expression *EXPR_P, a size or position.
5011
5012 ??? We could reset all non-constant sizes or positions. But it's cheap
5013 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5014
5015 We need to reset self-referential sizes or positions because they cannot
5016 be gimplified and thus can contain a CALL_EXPR after the gimplification
5017 is finished, which will run afoul of LTO streaming. And they need to be
5018 reset to something essentially dummy but not constant, so as to preserve
5019 the properties of the object they are attached to. */
5020
5021 static inline void
5022 free_lang_data_in_one_sizepos (tree *expr_p)
5023 {
5024 tree expr = *expr_p;
5025 if (CONTAINS_PLACEHOLDER_P (expr))
5026 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5027 }
5028
5029
5030 /* Reset all the fields in a binfo node BINFO. We only keep
5031 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5032
5033 static void
5034 free_lang_data_in_binfo (tree binfo)
5035 {
5036 unsigned i;
5037 tree t;
5038
5039 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5040
5041 BINFO_VIRTUALS (binfo) = NULL_TREE;
5042 BINFO_BASE_ACCESSES (binfo) = NULL;
5043 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5044 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5045
5046 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5047 free_lang_data_in_binfo (t);
5048 }
5049
5050
5051 /* Reset all language specific information still present in TYPE. */
5052
5053 static void
5054 free_lang_data_in_type (tree type)
5055 {
5056 gcc_assert (TYPE_P (type));
5057
5058 /* Give the FE a chance to remove its own data first. */
5059 lang_hooks.free_lang_data (type);
5060
5061 TREE_LANG_FLAG_0 (type) = 0;
5062 TREE_LANG_FLAG_1 (type) = 0;
5063 TREE_LANG_FLAG_2 (type) = 0;
5064 TREE_LANG_FLAG_3 (type) = 0;
5065 TREE_LANG_FLAG_4 (type) = 0;
5066 TREE_LANG_FLAG_5 (type) = 0;
5067 TREE_LANG_FLAG_6 (type) = 0;
5068
5069 if (TREE_CODE (type) == FUNCTION_TYPE)
5070 {
5071 /* Remove the const and volatile qualifiers from arguments. The
5072 C++ front end removes them, but the C front end does not,
5073 leading to false ODR violation errors when merging two
5074 instances of the same function signature compiled by
5075 different front ends. */
5076 tree p;
5077
5078 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5079 {
5080 tree arg_type = TREE_VALUE (p);
5081
5082 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5083 {
5084 int quals = TYPE_QUALS (arg_type)
5085 & ~TYPE_QUAL_CONST
5086 & ~TYPE_QUAL_VOLATILE;
5087 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5088 free_lang_data_in_type (TREE_VALUE (p));
5089 }
5090 /* C++ FE uses TREE_PURPOSE to store initial values. */
5091 TREE_PURPOSE (p) = NULL;
5092 }
5093 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5094 TYPE_MINVAL (type) = NULL;
5095 }
5096 if (TREE_CODE (type) == METHOD_TYPE)
5097 {
5098 tree p;
5099
5100 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5101 {
5102 /* C++ FE uses TREE_PURPOSE to store initial values. */
5103 TREE_PURPOSE (p) = NULL;
5104 }
5105 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5106 TYPE_MINVAL (type) = NULL;
5107 }
5108
5109 /* Remove members that are not actually FIELD_DECLs from the field
5110 list of an aggregate. These occur in C++. */
5111 if (RECORD_OR_UNION_TYPE_P (type))
5112 {
5113 tree prev, member;
5114
5115 /* Note that TYPE_FIELDS can be shared across distinct
5116 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5117 to be removed, we cannot set its TREE_CHAIN to NULL.
5118 Otherwise, we would not be able to find all the other fields
5119 in the other instances of this TREE_TYPE.
5120
5121 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5122 prev = NULL_TREE;
5123 member = TYPE_FIELDS (type);
5124 while (member)
5125 {
5126 if (TREE_CODE (member) == FIELD_DECL
5127 || TREE_CODE (member) == TYPE_DECL)
5128 {
5129 if (prev)
5130 TREE_CHAIN (prev) = member;
5131 else
5132 TYPE_FIELDS (type) = member;
5133 prev = member;
5134 }
5135
5136 member = TREE_CHAIN (member);
5137 }
5138
5139 if (prev)
5140 TREE_CHAIN (prev) = NULL_TREE;
5141 else
5142 TYPE_FIELDS (type) = NULL_TREE;
5143
5144 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5145 and danagle the pointer from time to time. */
5146 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5147 TYPE_VFIELD (type) = NULL_TREE;
5148
5149 /* Remove TYPE_METHODS list. While it would be nice to keep it
5150 to enable ODR warnings about different method lists, doing so
5151 seems to impractically increase size of LTO data streamed.
5152 Keep the infrmation if TYPE_METHODS was non-NULL. This is used
5153 by function.c and pretty printers. */
5154 if (TYPE_METHODS (type))
5155 TYPE_METHODS (type) = error_mark_node;
5156 if (TYPE_BINFO (type))
5157 {
5158 free_lang_data_in_binfo (TYPE_BINFO (type));
5159 /* We need to preserve link to bases and virtual table for all
5160 polymorphic types to make devirtualization machinery working.
5161 Debug output cares only about bases, but output also
5162 virtual table pointers so merging of -fdevirtualize and
5163 -fno-devirtualize units is easier. */
5164 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5165 || !flag_devirtualize)
5166 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5167 && !BINFO_VTABLE (TYPE_BINFO (type)))
5168 || debug_info_level != DINFO_LEVEL_NONE))
5169 TYPE_BINFO (type) = NULL;
5170 }
5171 }
5172 else
5173 {
5174 /* For non-aggregate types, clear out the language slot (which
5175 overloads TYPE_BINFO). */
5176 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5177
5178 if (INTEGRAL_TYPE_P (type)
5179 || SCALAR_FLOAT_TYPE_P (type)
5180 || FIXED_POINT_TYPE_P (type))
5181 {
5182 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5183 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5184 }
5185 }
5186
5187 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5188 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5189
5190 if (TYPE_CONTEXT (type)
5191 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5192 {
5193 tree ctx = TYPE_CONTEXT (type);
5194 do
5195 {
5196 ctx = BLOCK_SUPERCONTEXT (ctx);
5197 }
5198 while (ctx && TREE_CODE (ctx) == BLOCK);
5199 TYPE_CONTEXT (type) = ctx;
5200 }
5201 }
5202
5203
5204 /* Return true if DECL may need an assembler name to be set. */
5205
5206 static inline bool
5207 need_assembler_name_p (tree decl)
5208 {
5209 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5210 Rule merging. This makes type_odr_p to return true on those types during
5211 LTO and by comparing the mangled name, we can say what types are intended
5212 to be equivalent across compilation unit.
5213
5214 We do not store names of type_in_anonymous_namespace_p.
5215
5216 Record, union and enumeration type have linkage that allows use
5217 to check type_in_anonymous_namespace_p. We do not mangle compound types
5218 that always can be compared structurally.
5219
5220 Similarly for builtin types, we compare properties of their main variant.
5221 A special case are integer types where mangling do make differences
5222 between char/signed char/unsigned char etc. Storing name for these makes
5223 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5224 See cp/mangle.c:write_builtin_type for details. */
5225
5226 if (flag_lto_odr_type_mering
5227 && TREE_CODE (decl) == TYPE_DECL
5228 && DECL_NAME (decl)
5229 && decl == TYPE_NAME (TREE_TYPE (decl))
5230 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5231 && (type_with_linkage_p (TREE_TYPE (decl))
5232 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5233 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5234 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5235 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5236 if (TREE_CODE (decl) != FUNCTION_DECL
5237 && TREE_CODE (decl) != VAR_DECL)
5238 return false;
5239
5240 /* If DECL already has its assembler name set, it does not need a
5241 new one. */
5242 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5243 || DECL_ASSEMBLER_NAME_SET_P (decl))
5244 return false;
5245
5246 /* Abstract decls do not need an assembler name. */
5247 if (DECL_ABSTRACT_P (decl))
5248 return false;
5249
5250 /* For VAR_DECLs, only static, public and external symbols need an
5251 assembler name. */
5252 if (TREE_CODE (decl) == VAR_DECL
5253 && !TREE_STATIC (decl)
5254 && !TREE_PUBLIC (decl)
5255 && !DECL_EXTERNAL (decl))
5256 return false;
5257
5258 if (TREE_CODE (decl) == FUNCTION_DECL)
5259 {
5260 /* Do not set assembler name on builtins. Allow RTL expansion to
5261 decide whether to expand inline or via a regular call. */
5262 if (DECL_BUILT_IN (decl)
5263 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5264 return false;
5265
5266 /* Functions represented in the callgraph need an assembler name. */
5267 if (cgraph_node::get (decl) != NULL)
5268 return true;
5269
5270 /* Unused and not public functions don't need an assembler name. */
5271 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5272 return false;
5273 }
5274
5275 return true;
5276 }
5277
5278
5279 /* Reset all language specific information still present in symbol
5280 DECL. */
5281
5282 static void
5283 free_lang_data_in_decl (tree decl)
5284 {
5285 gcc_assert (DECL_P (decl));
5286
5287 /* Give the FE a chance to remove its own data first. */
5288 lang_hooks.free_lang_data (decl);
5289
5290 TREE_LANG_FLAG_0 (decl) = 0;
5291 TREE_LANG_FLAG_1 (decl) = 0;
5292 TREE_LANG_FLAG_2 (decl) = 0;
5293 TREE_LANG_FLAG_3 (decl) = 0;
5294 TREE_LANG_FLAG_4 (decl) = 0;
5295 TREE_LANG_FLAG_5 (decl) = 0;
5296 TREE_LANG_FLAG_6 (decl) = 0;
5297
5298 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5299 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5300 if (TREE_CODE (decl) == FIELD_DECL)
5301 {
5302 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5303 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5304 DECL_QUALIFIER (decl) = NULL_TREE;
5305 }
5306
5307 if (TREE_CODE (decl) == FUNCTION_DECL)
5308 {
5309 struct cgraph_node *node;
5310 if (!(node = cgraph_node::get (decl))
5311 || (!node->definition && !node->clones))
5312 {
5313 if (node)
5314 node->release_body ();
5315 else
5316 {
5317 release_function_body (decl);
5318 DECL_ARGUMENTS (decl) = NULL;
5319 DECL_RESULT (decl) = NULL;
5320 DECL_INITIAL (decl) = error_mark_node;
5321 }
5322 }
5323 if (gimple_has_body_p (decl))
5324 {
5325 tree t;
5326
5327 /* If DECL has a gimple body, then the context for its
5328 arguments must be DECL. Otherwise, it doesn't really
5329 matter, as we will not be emitting any code for DECL. In
5330 general, there may be other instances of DECL created by
5331 the front end and since PARM_DECLs are generally shared,
5332 their DECL_CONTEXT changes as the replicas of DECL are
5333 created. The only time where DECL_CONTEXT is important
5334 is for the FUNCTION_DECLs that have a gimple body (since
5335 the PARM_DECL will be used in the function's body). */
5336 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5337 DECL_CONTEXT (t) = decl;
5338 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5339 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5340 = target_option_default_node;
5341 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5342 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5343 = optimization_default_node;
5344 }
5345
5346 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5347 At this point, it is not needed anymore. */
5348 DECL_SAVED_TREE (decl) = NULL_TREE;
5349
5350 /* Clear the abstract origin if it refers to a method. Otherwise
5351 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5352 origin will not be output correctly. */
5353 if (DECL_ABSTRACT_ORIGIN (decl)
5354 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5355 && RECORD_OR_UNION_TYPE_P
5356 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5357 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5358
5359 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5360 DECL_VINDEX referring to itself into a vtable slot number as it
5361 should. Happens with functions that are copied and then forgotten
5362 about. Just clear it, it won't matter anymore. */
5363 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5364 DECL_VINDEX (decl) = NULL_TREE;
5365 }
5366 else if (TREE_CODE (decl) == VAR_DECL)
5367 {
5368 if ((DECL_EXTERNAL (decl)
5369 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5370 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5371 DECL_INITIAL (decl) = NULL_TREE;
5372 }
5373 else if (TREE_CODE (decl) == TYPE_DECL
5374 || TREE_CODE (decl) == FIELD_DECL)
5375 DECL_INITIAL (decl) = NULL_TREE;
5376 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5377 && DECL_INITIAL (decl)
5378 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5379 {
5380 /* Strip builtins from the translation-unit BLOCK. We still have targets
5381 without builtin_decl_explicit support and also builtins are shared
5382 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5383 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5384 while (*nextp)
5385 {
5386 tree var = *nextp;
5387 if (TREE_CODE (var) == FUNCTION_DECL
5388 && DECL_BUILT_IN (var))
5389 *nextp = TREE_CHAIN (var);
5390 else
5391 nextp = &TREE_CHAIN (var);
5392 }
5393 }
5394 }
5395
5396
5397 /* Data used when collecting DECLs and TYPEs for language data removal. */
5398
5399 struct free_lang_data_d
5400 {
5401 /* Worklist to avoid excessive recursion. */
5402 vec<tree> worklist;
5403
5404 /* Set of traversed objects. Used to avoid duplicate visits. */
5405 hash_set<tree> *pset;
5406
5407 /* Array of symbols to process with free_lang_data_in_decl. */
5408 vec<tree> decls;
5409
5410 /* Array of types to process with free_lang_data_in_type. */
5411 vec<tree> types;
5412 };
5413
5414
5415 /* Save all language fields needed to generate proper debug information
5416 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5417
5418 static void
5419 save_debug_info_for_decl (tree t)
5420 {
5421 /*struct saved_debug_info_d *sdi;*/
5422
5423 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5424
5425 /* FIXME. Partial implementation for saving debug info removed. */
5426 }
5427
5428
5429 /* Save all language fields needed to generate proper debug information
5430 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5431
5432 static void
5433 save_debug_info_for_type (tree t)
5434 {
5435 /*struct saved_debug_info_d *sdi;*/
5436
5437 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5438
5439 /* FIXME. Partial implementation for saving debug info removed. */
5440 }
5441
5442
5443 /* Add type or decl T to one of the list of tree nodes that need their
5444 language data removed. The lists are held inside FLD. */
5445
5446 static void
5447 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5448 {
5449 if (DECL_P (t))
5450 {
5451 fld->decls.safe_push (t);
5452 if (debug_info_level > DINFO_LEVEL_TERSE)
5453 save_debug_info_for_decl (t);
5454 }
5455 else if (TYPE_P (t))
5456 {
5457 fld->types.safe_push (t);
5458 if (debug_info_level > DINFO_LEVEL_TERSE)
5459 save_debug_info_for_type (t);
5460 }
5461 else
5462 gcc_unreachable ();
5463 }
5464
5465 /* Push tree node T into FLD->WORKLIST. */
5466
5467 static inline void
5468 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5469 {
5470 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5471 fld->worklist.safe_push ((t));
5472 }
5473
5474
5475 /* Operand callback helper for free_lang_data_in_node. *TP is the
5476 subtree operand being considered. */
5477
5478 static tree
5479 find_decls_types_r (tree *tp, int *ws, void *data)
5480 {
5481 tree t = *tp;
5482 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5483
5484 if (TREE_CODE (t) == TREE_LIST)
5485 return NULL_TREE;
5486
5487 /* Language specific nodes will be removed, so there is no need
5488 to gather anything under them. */
5489 if (is_lang_specific (t))
5490 {
5491 *ws = 0;
5492 return NULL_TREE;
5493 }
5494
5495 if (DECL_P (t))
5496 {
5497 /* Note that walk_tree does not traverse every possible field in
5498 decls, so we have to do our own traversals here. */
5499 add_tree_to_fld_list (t, fld);
5500
5501 fld_worklist_push (DECL_NAME (t), fld);
5502 fld_worklist_push (DECL_CONTEXT (t), fld);
5503 fld_worklist_push (DECL_SIZE (t), fld);
5504 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5505
5506 /* We are going to remove everything under DECL_INITIAL for
5507 TYPE_DECLs. No point walking them. */
5508 if (TREE_CODE (t) != TYPE_DECL)
5509 fld_worklist_push (DECL_INITIAL (t), fld);
5510
5511 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5512 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5513
5514 if (TREE_CODE (t) == FUNCTION_DECL)
5515 {
5516 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5517 fld_worklist_push (DECL_RESULT (t), fld);
5518 }
5519 else if (TREE_CODE (t) == TYPE_DECL)
5520 {
5521 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5522 }
5523 else if (TREE_CODE (t) == FIELD_DECL)
5524 {
5525 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5526 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5527 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5528 fld_worklist_push (DECL_FCONTEXT (t), fld);
5529 }
5530
5531 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5532 && DECL_HAS_VALUE_EXPR_P (t))
5533 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5534
5535 if (TREE_CODE (t) != FIELD_DECL
5536 && TREE_CODE (t) != TYPE_DECL)
5537 fld_worklist_push (TREE_CHAIN (t), fld);
5538 *ws = 0;
5539 }
5540 else if (TYPE_P (t))
5541 {
5542 /* Note that walk_tree does not traverse every possible field in
5543 types, so we have to do our own traversals here. */
5544 add_tree_to_fld_list (t, fld);
5545
5546 if (!RECORD_OR_UNION_TYPE_P (t))
5547 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5548 fld_worklist_push (TYPE_SIZE (t), fld);
5549 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5550 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5551 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5552 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5553 fld_worklist_push (TYPE_NAME (t), fld);
5554 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5555 them and thus do not and want not to reach unused pointer types
5556 this way. */
5557 if (!POINTER_TYPE_P (t))
5558 fld_worklist_push (TYPE_MINVAL (t), fld);
5559 if (!RECORD_OR_UNION_TYPE_P (t))
5560 fld_worklist_push (TYPE_MAXVAL (t), fld);
5561 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5562 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5563 do not and want not to reach unused variants this way. */
5564 if (TYPE_CONTEXT (t))
5565 {
5566 tree ctx = TYPE_CONTEXT (t);
5567 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5568 So push that instead. */
5569 while (ctx && TREE_CODE (ctx) == BLOCK)
5570 ctx = BLOCK_SUPERCONTEXT (ctx);
5571 fld_worklist_push (ctx, fld);
5572 }
5573 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5574 and want not to reach unused types this way. */
5575
5576 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5577 {
5578 unsigned i;
5579 tree tem;
5580 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5581 fld_worklist_push (TREE_TYPE (tem), fld);
5582 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5583 if (tem
5584 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5585 && TREE_CODE (tem) == TREE_LIST)
5586 do
5587 {
5588 fld_worklist_push (TREE_VALUE (tem), fld);
5589 tem = TREE_CHAIN (tem);
5590 }
5591 while (tem);
5592 }
5593 if (RECORD_OR_UNION_TYPE_P (t))
5594 {
5595 tree tem;
5596 /* Push all TYPE_FIELDS - there can be interleaving interesting
5597 and non-interesting things. */
5598 tem = TYPE_FIELDS (t);
5599 while (tem)
5600 {
5601 if (TREE_CODE (tem) == FIELD_DECL
5602 || TREE_CODE (tem) == TYPE_DECL)
5603 fld_worklist_push (tem, fld);
5604 tem = TREE_CHAIN (tem);
5605 }
5606 }
5607
5608 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5609 *ws = 0;
5610 }
5611 else if (TREE_CODE (t) == BLOCK)
5612 {
5613 tree tem;
5614 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5615 fld_worklist_push (tem, fld);
5616 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5617 fld_worklist_push (tem, fld);
5618 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5619 }
5620
5621 if (TREE_CODE (t) != IDENTIFIER_NODE
5622 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5623 fld_worklist_push (TREE_TYPE (t), fld);
5624
5625 return NULL_TREE;
5626 }
5627
5628
5629 /* Find decls and types in T. */
5630
5631 static void
5632 find_decls_types (tree t, struct free_lang_data_d *fld)
5633 {
5634 while (1)
5635 {
5636 if (!fld->pset->contains (t))
5637 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5638 if (fld->worklist.is_empty ())
5639 break;
5640 t = fld->worklist.pop ();
5641 }
5642 }
5643
5644 /* Translate all the types in LIST with the corresponding runtime
5645 types. */
5646
5647 static tree
5648 get_eh_types_for_runtime (tree list)
5649 {
5650 tree head, prev;
5651
5652 if (list == NULL_TREE)
5653 return NULL_TREE;
5654
5655 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5656 prev = head;
5657 list = TREE_CHAIN (list);
5658 while (list)
5659 {
5660 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5661 TREE_CHAIN (prev) = n;
5662 prev = TREE_CHAIN (prev);
5663 list = TREE_CHAIN (list);
5664 }
5665
5666 return head;
5667 }
5668
5669
5670 /* Find decls and types referenced in EH region R and store them in
5671 FLD->DECLS and FLD->TYPES. */
5672
5673 static void
5674 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5675 {
5676 switch (r->type)
5677 {
5678 case ERT_CLEANUP:
5679 break;
5680
5681 case ERT_TRY:
5682 {
5683 eh_catch c;
5684
5685 /* The types referenced in each catch must first be changed to the
5686 EH types used at runtime. This removes references to FE types
5687 in the region. */
5688 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5689 {
5690 c->type_list = get_eh_types_for_runtime (c->type_list);
5691 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5692 }
5693 }
5694 break;
5695
5696 case ERT_ALLOWED_EXCEPTIONS:
5697 r->u.allowed.type_list
5698 = get_eh_types_for_runtime (r->u.allowed.type_list);
5699 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5700 break;
5701
5702 case ERT_MUST_NOT_THROW:
5703 walk_tree (&r->u.must_not_throw.failure_decl,
5704 find_decls_types_r, fld, fld->pset);
5705 break;
5706 }
5707 }
5708
5709
5710 /* Find decls and types referenced in cgraph node N and store them in
5711 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5712 look for *every* kind of DECL and TYPE node reachable from N,
5713 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5714 NAMESPACE_DECLs, etc). */
5715
5716 static void
5717 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5718 {
5719 basic_block bb;
5720 struct function *fn;
5721 unsigned ix;
5722 tree t;
5723
5724 find_decls_types (n->decl, fld);
5725
5726 if (!gimple_has_body_p (n->decl))
5727 return;
5728
5729 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5730
5731 fn = DECL_STRUCT_FUNCTION (n->decl);
5732
5733 /* Traverse locals. */
5734 FOR_EACH_LOCAL_DECL (fn, ix, t)
5735 find_decls_types (t, fld);
5736
5737 /* Traverse EH regions in FN. */
5738 {
5739 eh_region r;
5740 FOR_ALL_EH_REGION_FN (r, fn)
5741 find_decls_types_in_eh_region (r, fld);
5742 }
5743
5744 /* Traverse every statement in FN. */
5745 FOR_EACH_BB_FN (bb, fn)
5746 {
5747 gphi_iterator psi;
5748 gimple_stmt_iterator si;
5749 unsigned i;
5750
5751 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5752 {
5753 gphi *phi = psi.phi ();
5754
5755 for (i = 0; i < gimple_phi_num_args (phi); i++)
5756 {
5757 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5758 find_decls_types (*arg_p, fld);
5759 }
5760 }
5761
5762 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5763 {
5764 gimple stmt = gsi_stmt (si);
5765
5766 if (is_gimple_call (stmt))
5767 find_decls_types (gimple_call_fntype (stmt), fld);
5768
5769 for (i = 0; i < gimple_num_ops (stmt); i++)
5770 {
5771 tree arg = gimple_op (stmt, i);
5772 find_decls_types (arg, fld);
5773 }
5774 }
5775 }
5776 }
5777
5778
5779 /* Find decls and types referenced in varpool node N and store them in
5780 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5781 look for *every* kind of DECL and TYPE node reachable from N,
5782 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5783 NAMESPACE_DECLs, etc). */
5784
5785 static void
5786 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5787 {
5788 find_decls_types (v->decl, fld);
5789 }
5790
5791 /* If T needs an assembler name, have one created for it. */
5792
5793 void
5794 assign_assembler_name_if_neeeded (tree t)
5795 {
5796 if (need_assembler_name_p (t))
5797 {
5798 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5799 diagnostics that use input_location to show locus
5800 information. The problem here is that, at this point,
5801 input_location is generally anchored to the end of the file
5802 (since the parser is long gone), so we don't have a good
5803 position to pin it to.
5804
5805 To alleviate this problem, this uses the location of T's
5806 declaration. Examples of this are
5807 testsuite/g++.dg/template/cond2.C and
5808 testsuite/g++.dg/template/pr35240.C. */
5809 location_t saved_location = input_location;
5810 input_location = DECL_SOURCE_LOCATION (t);
5811
5812 decl_assembler_name (t);
5813
5814 input_location = saved_location;
5815 }
5816 }
5817
5818
5819 /* Free language specific information for every operand and expression
5820 in every node of the call graph. This process operates in three stages:
5821
5822 1- Every callgraph node and varpool node is traversed looking for
5823 decls and types embedded in them. This is a more exhaustive
5824 search than that done by find_referenced_vars, because it will
5825 also collect individual fields, decls embedded in types, etc.
5826
5827 2- All the decls found are sent to free_lang_data_in_decl.
5828
5829 3- All the types found are sent to free_lang_data_in_type.
5830
5831 The ordering between decls and types is important because
5832 free_lang_data_in_decl sets assembler names, which includes
5833 mangling. So types cannot be freed up until assembler names have
5834 been set up. */
5835
5836 static void
5837 free_lang_data_in_cgraph (void)
5838 {
5839 struct cgraph_node *n;
5840 varpool_node *v;
5841 struct free_lang_data_d fld;
5842 tree t;
5843 unsigned i;
5844 alias_pair *p;
5845
5846 /* Initialize sets and arrays to store referenced decls and types. */
5847 fld.pset = new hash_set<tree>;
5848 fld.worklist.create (0);
5849 fld.decls.create (100);
5850 fld.types.create (100);
5851
5852 /* Find decls and types in the body of every function in the callgraph. */
5853 FOR_EACH_FUNCTION (n)
5854 find_decls_types_in_node (n, &fld);
5855
5856 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5857 find_decls_types (p->decl, &fld);
5858
5859 /* Find decls and types in every varpool symbol. */
5860 FOR_EACH_VARIABLE (v)
5861 find_decls_types_in_var (v, &fld);
5862
5863 /* Set the assembler name on every decl found. We need to do this
5864 now because free_lang_data_in_decl will invalidate data needed
5865 for mangling. This breaks mangling on interdependent decls. */
5866 FOR_EACH_VEC_ELT (fld.decls, i, t)
5867 assign_assembler_name_if_neeeded (t);
5868
5869 /* Traverse every decl found freeing its language data. */
5870 FOR_EACH_VEC_ELT (fld.decls, i, t)
5871 free_lang_data_in_decl (t);
5872
5873 /* Traverse every type found freeing its language data. */
5874 FOR_EACH_VEC_ELT (fld.types, i, t)
5875 free_lang_data_in_type (t);
5876 #ifdef ENABLE_CHECKING
5877 FOR_EACH_VEC_ELT (fld.types, i, t)
5878 verify_type (t);
5879 #endif
5880
5881 delete fld.pset;
5882 fld.worklist.release ();
5883 fld.decls.release ();
5884 fld.types.release ();
5885 }
5886
5887
5888 /* Free resources that are used by FE but are not needed once they are done. */
5889
5890 static unsigned
5891 free_lang_data (void)
5892 {
5893 unsigned i;
5894
5895 /* If we are the LTO frontend we have freed lang-specific data already. */
5896 if (in_lto_p
5897 || (!flag_generate_lto && !flag_generate_offload))
5898 return 0;
5899
5900 /* Allocate and assign alias sets to the standard integer types
5901 while the slots are still in the way the frontends generated them. */
5902 for (i = 0; i < itk_none; ++i)
5903 if (integer_types[i])
5904 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5905
5906 /* Traverse the IL resetting language specific information for
5907 operands, expressions, etc. */
5908 free_lang_data_in_cgraph ();
5909
5910 /* Create gimple variants for common types. */
5911 ptrdiff_type_node = integer_type_node;
5912 fileptr_type_node = ptr_type_node;
5913
5914 /* Reset some langhooks. Do not reset types_compatible_p, it may
5915 still be used indirectly via the get_alias_set langhook. */
5916 lang_hooks.dwarf_name = lhd_dwarf_name;
5917 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5918 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5919
5920 /* We do not want the default decl_assembler_name implementation,
5921 rather if we have fixed everything we want a wrapper around it
5922 asserting that all non-local symbols already got their assembler
5923 name and only produce assembler names for local symbols. Or rather
5924 make sure we never call decl_assembler_name on local symbols and
5925 devise a separate, middle-end private scheme for it. */
5926
5927 /* Reset diagnostic machinery. */
5928 tree_diagnostics_defaults (global_dc);
5929
5930 return 0;
5931 }
5932
5933
5934 namespace {
5935
5936 const pass_data pass_data_ipa_free_lang_data =
5937 {
5938 SIMPLE_IPA_PASS, /* type */
5939 "*free_lang_data", /* name */
5940 OPTGROUP_NONE, /* optinfo_flags */
5941 TV_IPA_FREE_LANG_DATA, /* tv_id */
5942 0, /* properties_required */
5943 0, /* properties_provided */
5944 0, /* properties_destroyed */
5945 0, /* todo_flags_start */
5946 0, /* todo_flags_finish */
5947 };
5948
5949 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5950 {
5951 public:
5952 pass_ipa_free_lang_data (gcc::context *ctxt)
5953 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5954 {}
5955
5956 /* opt_pass methods: */
5957 virtual unsigned int execute (function *) { return free_lang_data (); }
5958
5959 }; // class pass_ipa_free_lang_data
5960
5961 } // anon namespace
5962
5963 simple_ipa_opt_pass *
5964 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5965 {
5966 return new pass_ipa_free_lang_data (ctxt);
5967 }
5968
5969 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5970 ATTR_NAME. Also used internally by remove_attribute(). */
5971 bool
5972 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5973 {
5974 size_t ident_len = IDENTIFIER_LENGTH (ident);
5975
5976 if (ident_len == attr_len)
5977 {
5978 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5979 return true;
5980 }
5981 else if (ident_len == attr_len + 4)
5982 {
5983 /* There is the possibility that ATTR is 'text' and IDENT is
5984 '__text__'. */
5985 const char *p = IDENTIFIER_POINTER (ident);
5986 if (p[0] == '_' && p[1] == '_'
5987 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5988 && strncmp (attr_name, p + 2, attr_len) == 0)
5989 return true;
5990 }
5991
5992 return false;
5993 }
5994
5995 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5996 of ATTR_NAME, and LIST is not NULL_TREE. */
5997 tree
5998 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5999 {
6000 while (list)
6001 {
6002 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6003
6004 if (ident_len == attr_len)
6005 {
6006 if (!strcmp (attr_name,
6007 IDENTIFIER_POINTER (get_attribute_name (list))))
6008 break;
6009 }
6010 /* TODO: If we made sure that attributes were stored in the
6011 canonical form without '__...__' (ie, as in 'text' as opposed
6012 to '__text__') then we could avoid the following case. */
6013 else if (ident_len == attr_len + 4)
6014 {
6015 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6016 if (p[0] == '_' && p[1] == '_'
6017 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6018 && strncmp (attr_name, p + 2, attr_len) == 0)
6019 break;
6020 }
6021 list = TREE_CHAIN (list);
6022 }
6023
6024 return list;
6025 }
6026
6027 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6028 return a pointer to the attribute's list first element if the attribute
6029 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6030 '__text__'). */
6031
6032 tree
6033 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6034 tree list)
6035 {
6036 while (list)
6037 {
6038 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6039
6040 if (attr_len > ident_len)
6041 {
6042 list = TREE_CHAIN (list);
6043 continue;
6044 }
6045
6046 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6047
6048 if (strncmp (attr_name, p, attr_len) == 0)
6049 break;
6050
6051 /* TODO: If we made sure that attributes were stored in the
6052 canonical form without '__...__' (ie, as in 'text' as opposed
6053 to '__text__') then we could avoid the following case. */
6054 if (p[0] == '_' && p[1] == '_' &&
6055 strncmp (attr_name, p + 2, attr_len) == 0)
6056 break;
6057
6058 list = TREE_CHAIN (list);
6059 }
6060
6061 return list;
6062 }
6063
6064
6065 /* A variant of lookup_attribute() that can be used with an identifier
6066 as the first argument, and where the identifier can be either
6067 'text' or '__text__'.
6068
6069 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6070 return a pointer to the attribute's list element if the attribute
6071 is part of the list, or NULL_TREE if not found. If the attribute
6072 appears more than once, this only returns the first occurrence; the
6073 TREE_CHAIN of the return value should be passed back in if further
6074 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6075 can be in the form 'text' or '__text__'. */
6076 static tree
6077 lookup_ident_attribute (tree attr_identifier, tree list)
6078 {
6079 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6080
6081 while (list)
6082 {
6083 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6084 == IDENTIFIER_NODE);
6085
6086 if (cmp_attrib_identifiers (attr_identifier,
6087 get_attribute_name (list)))
6088 /* Found it. */
6089 break;
6090 list = TREE_CHAIN (list);
6091 }
6092
6093 return list;
6094 }
6095
6096 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6097 modified list. */
6098
6099 tree
6100 remove_attribute (const char *attr_name, tree list)
6101 {
6102 tree *p;
6103 size_t attr_len = strlen (attr_name);
6104
6105 gcc_checking_assert (attr_name[0] != '_');
6106
6107 for (p = &list; *p; )
6108 {
6109 tree l = *p;
6110 /* TODO: If we were storing attributes in normalized form, here
6111 we could use a simple strcmp(). */
6112 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6113 *p = TREE_CHAIN (l);
6114 else
6115 p = &TREE_CHAIN (l);
6116 }
6117
6118 return list;
6119 }
6120
6121 /* Return an attribute list that is the union of a1 and a2. */
6122
6123 tree
6124 merge_attributes (tree a1, tree a2)
6125 {
6126 tree attributes;
6127
6128 /* Either one unset? Take the set one. */
6129
6130 if ((attributes = a1) == 0)
6131 attributes = a2;
6132
6133 /* One that completely contains the other? Take it. */
6134
6135 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6136 {
6137 if (attribute_list_contained (a2, a1))
6138 attributes = a2;
6139 else
6140 {
6141 /* Pick the longest list, and hang on the other list. */
6142
6143 if (list_length (a1) < list_length (a2))
6144 attributes = a2, a2 = a1;
6145
6146 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6147 {
6148 tree a;
6149 for (a = lookup_ident_attribute (get_attribute_name (a2),
6150 attributes);
6151 a != NULL_TREE && !attribute_value_equal (a, a2);
6152 a = lookup_ident_attribute (get_attribute_name (a2),
6153 TREE_CHAIN (a)))
6154 ;
6155 if (a == NULL_TREE)
6156 {
6157 a1 = copy_node (a2);
6158 TREE_CHAIN (a1) = attributes;
6159 attributes = a1;
6160 }
6161 }
6162 }
6163 }
6164 return attributes;
6165 }
6166
6167 /* Given types T1 and T2, merge their attributes and return
6168 the result. */
6169
6170 tree
6171 merge_type_attributes (tree t1, tree t2)
6172 {
6173 return merge_attributes (TYPE_ATTRIBUTES (t1),
6174 TYPE_ATTRIBUTES (t2));
6175 }
6176
6177 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6178 the result. */
6179
6180 tree
6181 merge_decl_attributes (tree olddecl, tree newdecl)
6182 {
6183 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6184 DECL_ATTRIBUTES (newdecl));
6185 }
6186
6187 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6188
6189 /* Specialization of merge_decl_attributes for various Windows targets.
6190
6191 This handles the following situation:
6192
6193 __declspec (dllimport) int foo;
6194 int foo;
6195
6196 The second instance of `foo' nullifies the dllimport. */
6197
6198 tree
6199 merge_dllimport_decl_attributes (tree old, tree new_tree)
6200 {
6201 tree a;
6202 int delete_dllimport_p = 1;
6203
6204 /* What we need to do here is remove from `old' dllimport if it doesn't
6205 appear in `new'. dllimport behaves like extern: if a declaration is
6206 marked dllimport and a definition appears later, then the object
6207 is not dllimport'd. We also remove a `new' dllimport if the old list
6208 contains dllexport: dllexport always overrides dllimport, regardless
6209 of the order of declaration. */
6210 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6211 delete_dllimport_p = 0;
6212 else if (DECL_DLLIMPORT_P (new_tree)
6213 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6214 {
6215 DECL_DLLIMPORT_P (new_tree) = 0;
6216 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6217 "dllimport ignored", new_tree);
6218 }
6219 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6220 {
6221 /* Warn about overriding a symbol that has already been used, e.g.:
6222 extern int __attribute__ ((dllimport)) foo;
6223 int* bar () {return &foo;}
6224 int foo;
6225 */
6226 if (TREE_USED (old))
6227 {
6228 warning (0, "%q+D redeclared without dllimport attribute "
6229 "after being referenced with dll linkage", new_tree);
6230 /* If we have used a variable's address with dllimport linkage,
6231 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6232 decl may already have had TREE_CONSTANT computed.
6233 We still remove the attribute so that assembler code refers
6234 to '&foo rather than '_imp__foo'. */
6235 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6236 DECL_DLLIMPORT_P (new_tree) = 1;
6237 }
6238
6239 /* Let an inline definition silently override the external reference,
6240 but otherwise warn about attribute inconsistency. */
6241 else if (TREE_CODE (new_tree) == VAR_DECL
6242 || !DECL_DECLARED_INLINE_P (new_tree))
6243 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6244 "previous dllimport ignored", new_tree);
6245 }
6246 else
6247 delete_dllimport_p = 0;
6248
6249 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6250
6251 if (delete_dllimport_p)
6252 a = remove_attribute ("dllimport", a);
6253
6254 return a;
6255 }
6256
6257 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6258 struct attribute_spec.handler. */
6259
6260 tree
6261 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6262 bool *no_add_attrs)
6263 {
6264 tree node = *pnode;
6265 bool is_dllimport;
6266
6267 /* These attributes may apply to structure and union types being created,
6268 but otherwise should pass to the declaration involved. */
6269 if (!DECL_P (node))
6270 {
6271 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6272 | (int) ATTR_FLAG_ARRAY_NEXT))
6273 {
6274 *no_add_attrs = true;
6275 return tree_cons (name, args, NULL_TREE);
6276 }
6277 if (TREE_CODE (node) == RECORD_TYPE
6278 || TREE_CODE (node) == UNION_TYPE)
6279 {
6280 node = TYPE_NAME (node);
6281 if (!node)
6282 return NULL_TREE;
6283 }
6284 else
6285 {
6286 warning (OPT_Wattributes, "%qE attribute ignored",
6287 name);
6288 *no_add_attrs = true;
6289 return NULL_TREE;
6290 }
6291 }
6292
6293 if (TREE_CODE (node) != FUNCTION_DECL
6294 && TREE_CODE (node) != VAR_DECL
6295 && TREE_CODE (node) != TYPE_DECL)
6296 {
6297 *no_add_attrs = true;
6298 warning (OPT_Wattributes, "%qE attribute ignored",
6299 name);
6300 return NULL_TREE;
6301 }
6302
6303 if (TREE_CODE (node) == TYPE_DECL
6304 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6305 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6306 {
6307 *no_add_attrs = true;
6308 warning (OPT_Wattributes, "%qE attribute ignored",
6309 name);
6310 return NULL_TREE;
6311 }
6312
6313 is_dllimport = is_attribute_p ("dllimport", name);
6314
6315 /* Report error on dllimport ambiguities seen now before they cause
6316 any damage. */
6317 if (is_dllimport)
6318 {
6319 /* Honor any target-specific overrides. */
6320 if (!targetm.valid_dllimport_attribute_p (node))
6321 *no_add_attrs = true;
6322
6323 else if (TREE_CODE (node) == FUNCTION_DECL
6324 && DECL_DECLARED_INLINE_P (node))
6325 {
6326 warning (OPT_Wattributes, "inline function %q+D declared as "
6327 " dllimport: attribute ignored", node);
6328 *no_add_attrs = true;
6329 }
6330 /* Like MS, treat definition of dllimported variables and
6331 non-inlined functions on declaration as syntax errors. */
6332 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6333 {
6334 error ("function %q+D definition is marked dllimport", node);
6335 *no_add_attrs = true;
6336 }
6337
6338 else if (TREE_CODE (node) == VAR_DECL)
6339 {
6340 if (DECL_INITIAL (node))
6341 {
6342 error ("variable %q+D definition is marked dllimport",
6343 node);
6344 *no_add_attrs = true;
6345 }
6346
6347 /* `extern' needn't be specified with dllimport.
6348 Specify `extern' now and hope for the best. Sigh. */
6349 DECL_EXTERNAL (node) = 1;
6350 /* Also, implicitly give dllimport'd variables declared within
6351 a function global scope, unless declared static. */
6352 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6353 TREE_PUBLIC (node) = 1;
6354 }
6355
6356 if (*no_add_attrs == false)
6357 DECL_DLLIMPORT_P (node) = 1;
6358 }
6359 else if (TREE_CODE (node) == FUNCTION_DECL
6360 && DECL_DECLARED_INLINE_P (node)
6361 && flag_keep_inline_dllexport)
6362 /* An exported function, even if inline, must be emitted. */
6363 DECL_EXTERNAL (node) = 0;
6364
6365 /* Report error if symbol is not accessible at global scope. */
6366 if (!TREE_PUBLIC (node)
6367 && (TREE_CODE (node) == VAR_DECL
6368 || TREE_CODE (node) == FUNCTION_DECL))
6369 {
6370 error ("external linkage required for symbol %q+D because of "
6371 "%qE attribute", node, name);
6372 *no_add_attrs = true;
6373 }
6374
6375 /* A dllexport'd entity must have default visibility so that other
6376 program units (shared libraries or the main executable) can see
6377 it. A dllimport'd entity must have default visibility so that
6378 the linker knows that undefined references within this program
6379 unit can be resolved by the dynamic linker. */
6380 if (!*no_add_attrs)
6381 {
6382 if (DECL_VISIBILITY_SPECIFIED (node)
6383 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6384 error ("%qE implies default visibility, but %qD has already "
6385 "been declared with a different visibility",
6386 name, node);
6387 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6388 DECL_VISIBILITY_SPECIFIED (node) = 1;
6389 }
6390
6391 return NULL_TREE;
6392 }
6393
6394 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6395 \f
6396 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6397 of the various TYPE_QUAL values. */
6398
6399 static void
6400 set_type_quals (tree type, int type_quals)
6401 {
6402 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6403 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6404 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6405 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6406 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6407 }
6408
6409 /* Returns true iff unqualified CAND and BASE are equivalent. */
6410
6411 bool
6412 check_base_type (const_tree cand, const_tree base)
6413 {
6414 return (TYPE_NAME (cand) == TYPE_NAME (base)
6415 /* Apparently this is needed for Objective-C. */
6416 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6417 /* Check alignment. */
6418 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6419 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6420 TYPE_ATTRIBUTES (base)));
6421 }
6422
6423 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6424
6425 bool
6426 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6427 {
6428 return (TYPE_QUALS (cand) == type_quals
6429 && check_base_type (cand, base));
6430 }
6431
6432 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6433
6434 static bool
6435 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6436 {
6437 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6438 && TYPE_NAME (cand) == TYPE_NAME (base)
6439 /* Apparently this is needed for Objective-C. */
6440 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6441 /* Check alignment. */
6442 && TYPE_ALIGN (cand) == align
6443 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6444 TYPE_ATTRIBUTES (base)));
6445 }
6446
6447 /* This function checks to see if TYPE matches the size one of the built-in
6448 atomic types, and returns that core atomic type. */
6449
6450 static tree
6451 find_atomic_core_type (tree type)
6452 {
6453 tree base_atomic_type;
6454
6455 /* Only handle complete types. */
6456 if (TYPE_SIZE (type) == NULL_TREE)
6457 return NULL_TREE;
6458
6459 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6460 switch (type_size)
6461 {
6462 case 8:
6463 base_atomic_type = atomicQI_type_node;
6464 break;
6465
6466 case 16:
6467 base_atomic_type = atomicHI_type_node;
6468 break;
6469
6470 case 32:
6471 base_atomic_type = atomicSI_type_node;
6472 break;
6473
6474 case 64:
6475 base_atomic_type = atomicDI_type_node;
6476 break;
6477
6478 case 128:
6479 base_atomic_type = atomicTI_type_node;
6480 break;
6481
6482 default:
6483 base_atomic_type = NULL_TREE;
6484 }
6485
6486 return base_atomic_type;
6487 }
6488
6489 /* Return a version of the TYPE, qualified as indicated by the
6490 TYPE_QUALS, if one exists. If no qualified version exists yet,
6491 return NULL_TREE. */
6492
6493 tree
6494 get_qualified_type (tree type, int type_quals)
6495 {
6496 tree t;
6497
6498 if (TYPE_QUALS (type) == type_quals)
6499 return type;
6500
6501 /* Search the chain of variants to see if there is already one there just
6502 like the one we need to have. If so, use that existing one. We must
6503 preserve the TYPE_NAME, since there is code that depends on this. */
6504 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6505 if (check_qualified_type (t, type, type_quals))
6506 return t;
6507
6508 return NULL_TREE;
6509 }
6510
6511 /* Like get_qualified_type, but creates the type if it does not
6512 exist. This function never returns NULL_TREE. */
6513
6514 tree
6515 build_qualified_type (tree type, int type_quals)
6516 {
6517 tree t;
6518
6519 /* See if we already have the appropriate qualified variant. */
6520 t = get_qualified_type (type, type_quals);
6521
6522 /* If not, build it. */
6523 if (!t)
6524 {
6525 t = build_variant_type_copy (type);
6526 set_type_quals (t, type_quals);
6527
6528 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6529 {
6530 /* See if this object can map to a basic atomic type. */
6531 tree atomic_type = find_atomic_core_type (type);
6532 if (atomic_type)
6533 {
6534 /* Ensure the alignment of this type is compatible with
6535 the required alignment of the atomic type. */
6536 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6537 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6538 }
6539 }
6540
6541 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6542 /* Propagate structural equality. */
6543 SET_TYPE_STRUCTURAL_EQUALITY (t);
6544 else if (TYPE_CANONICAL (type) != type)
6545 /* Build the underlying canonical type, since it is different
6546 from TYPE. */
6547 {
6548 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6549 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6550 }
6551 else
6552 /* T is its own canonical type. */
6553 TYPE_CANONICAL (t) = t;
6554
6555 }
6556
6557 return t;
6558 }
6559
6560 /* Create a variant of type T with alignment ALIGN. */
6561
6562 tree
6563 build_aligned_type (tree type, unsigned int align)
6564 {
6565 tree t;
6566
6567 if (TYPE_PACKED (type)
6568 || TYPE_ALIGN (type) == align)
6569 return type;
6570
6571 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6572 if (check_aligned_type (t, type, align))
6573 return t;
6574
6575 t = build_variant_type_copy (type);
6576 TYPE_ALIGN (t) = align;
6577
6578 return t;
6579 }
6580
6581 /* Create a new distinct copy of TYPE. The new type is made its own
6582 MAIN_VARIANT. If TYPE requires structural equality checks, the
6583 resulting type requires structural equality checks; otherwise, its
6584 TYPE_CANONICAL points to itself. */
6585
6586 tree
6587 build_distinct_type_copy (tree type)
6588 {
6589 tree t = copy_node (type);
6590
6591 TYPE_POINTER_TO (t) = 0;
6592 TYPE_REFERENCE_TO (t) = 0;
6593
6594 /* Set the canonical type either to a new equivalence class, or
6595 propagate the need for structural equality checks. */
6596 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6597 SET_TYPE_STRUCTURAL_EQUALITY (t);
6598 else
6599 TYPE_CANONICAL (t) = t;
6600
6601 /* Make it its own variant. */
6602 TYPE_MAIN_VARIANT (t) = t;
6603 TYPE_NEXT_VARIANT (t) = 0;
6604
6605 /* We do not record methods in type copies nor variants
6606 so we do not need to keep them up to date when new method
6607 is inserted. */
6608 if (RECORD_OR_UNION_TYPE_P (t))
6609 TYPE_METHODS (t) = NULL_TREE;
6610
6611 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6612 whose TREE_TYPE is not t. This can also happen in the Ada
6613 frontend when using subtypes. */
6614
6615 return t;
6616 }
6617
6618 /* Create a new variant of TYPE, equivalent but distinct. This is so
6619 the caller can modify it. TYPE_CANONICAL for the return type will
6620 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6621 are considered equal by the language itself (or that both types
6622 require structural equality checks). */
6623
6624 tree
6625 build_variant_type_copy (tree type)
6626 {
6627 tree t, m = TYPE_MAIN_VARIANT (type);
6628
6629 t = build_distinct_type_copy (type);
6630
6631 /* Since we're building a variant, assume that it is a non-semantic
6632 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6633 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6634
6635 /* Add the new type to the chain of variants of TYPE. */
6636 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6637 TYPE_NEXT_VARIANT (m) = t;
6638 TYPE_MAIN_VARIANT (t) = m;
6639
6640 return t;
6641 }
6642 \f
6643 /* Return true if the from tree in both tree maps are equal. */
6644
6645 int
6646 tree_map_base_eq (const void *va, const void *vb)
6647 {
6648 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6649 *const b = (const struct tree_map_base *) vb;
6650 return (a->from == b->from);
6651 }
6652
6653 /* Hash a from tree in a tree_base_map. */
6654
6655 unsigned int
6656 tree_map_base_hash (const void *item)
6657 {
6658 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6659 }
6660
6661 /* Return true if this tree map structure is marked for garbage collection
6662 purposes. We simply return true if the from tree is marked, so that this
6663 structure goes away when the from tree goes away. */
6664
6665 int
6666 tree_map_base_marked_p (const void *p)
6667 {
6668 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6669 }
6670
6671 /* Hash a from tree in a tree_map. */
6672
6673 unsigned int
6674 tree_map_hash (const void *item)
6675 {
6676 return (((const struct tree_map *) item)->hash);
6677 }
6678
6679 /* Hash a from tree in a tree_decl_map. */
6680
6681 unsigned int
6682 tree_decl_map_hash (const void *item)
6683 {
6684 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6685 }
6686
6687 /* Return the initialization priority for DECL. */
6688
6689 priority_type
6690 decl_init_priority_lookup (tree decl)
6691 {
6692 symtab_node *snode = symtab_node::get (decl);
6693
6694 if (!snode)
6695 return DEFAULT_INIT_PRIORITY;
6696 return
6697 snode->get_init_priority ();
6698 }
6699
6700 /* Return the finalization priority for DECL. */
6701
6702 priority_type
6703 decl_fini_priority_lookup (tree decl)
6704 {
6705 cgraph_node *node = cgraph_node::get (decl);
6706
6707 if (!node)
6708 return DEFAULT_INIT_PRIORITY;
6709 return
6710 node->get_fini_priority ();
6711 }
6712
6713 /* Set the initialization priority for DECL to PRIORITY. */
6714
6715 void
6716 decl_init_priority_insert (tree decl, priority_type priority)
6717 {
6718 struct symtab_node *snode;
6719
6720 if (priority == DEFAULT_INIT_PRIORITY)
6721 {
6722 snode = symtab_node::get (decl);
6723 if (!snode)
6724 return;
6725 }
6726 else if (TREE_CODE (decl) == VAR_DECL)
6727 snode = varpool_node::get_create (decl);
6728 else
6729 snode = cgraph_node::get_create (decl);
6730 snode->set_init_priority (priority);
6731 }
6732
6733 /* Set the finalization priority for DECL to PRIORITY. */
6734
6735 void
6736 decl_fini_priority_insert (tree decl, priority_type priority)
6737 {
6738 struct cgraph_node *node;
6739
6740 if (priority == DEFAULT_INIT_PRIORITY)
6741 {
6742 node = cgraph_node::get (decl);
6743 if (!node)
6744 return;
6745 }
6746 else
6747 node = cgraph_node::get_create (decl);
6748 node->set_fini_priority (priority);
6749 }
6750
6751 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6752
6753 static void
6754 print_debug_expr_statistics (void)
6755 {
6756 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6757 (long) debug_expr_for_decl->size (),
6758 (long) debug_expr_for_decl->elements (),
6759 debug_expr_for_decl->collisions ());
6760 }
6761
6762 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6763
6764 static void
6765 print_value_expr_statistics (void)
6766 {
6767 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6768 (long) value_expr_for_decl->size (),
6769 (long) value_expr_for_decl->elements (),
6770 value_expr_for_decl->collisions ());
6771 }
6772
6773 /* Lookup a debug expression for FROM, and return it if we find one. */
6774
6775 tree
6776 decl_debug_expr_lookup (tree from)
6777 {
6778 struct tree_decl_map *h, in;
6779 in.base.from = from;
6780
6781 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6782 if (h)
6783 return h->to;
6784 return NULL_TREE;
6785 }
6786
6787 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6788
6789 void
6790 decl_debug_expr_insert (tree from, tree to)
6791 {
6792 struct tree_decl_map *h;
6793
6794 h = ggc_alloc<tree_decl_map> ();
6795 h->base.from = from;
6796 h->to = to;
6797 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6798 }
6799
6800 /* Lookup a value expression for FROM, and return it if we find one. */
6801
6802 tree
6803 decl_value_expr_lookup (tree from)
6804 {
6805 struct tree_decl_map *h, in;
6806 in.base.from = from;
6807
6808 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6809 if (h)
6810 return h->to;
6811 return NULL_TREE;
6812 }
6813
6814 /* Insert a mapping FROM->TO in the value expression hashtable. */
6815
6816 void
6817 decl_value_expr_insert (tree from, tree to)
6818 {
6819 struct tree_decl_map *h;
6820
6821 h = ggc_alloc<tree_decl_map> ();
6822 h->base.from = from;
6823 h->to = to;
6824 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6825 }
6826
6827 /* Lookup a vector of debug arguments for FROM, and return it if we
6828 find one. */
6829
6830 vec<tree, va_gc> **
6831 decl_debug_args_lookup (tree from)
6832 {
6833 struct tree_vec_map *h, in;
6834
6835 if (!DECL_HAS_DEBUG_ARGS_P (from))
6836 return NULL;
6837 gcc_checking_assert (debug_args_for_decl != NULL);
6838 in.base.from = from;
6839 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6840 if (h)
6841 return &h->to;
6842 return NULL;
6843 }
6844
6845 /* Insert a mapping FROM->empty vector of debug arguments in the value
6846 expression hashtable. */
6847
6848 vec<tree, va_gc> **
6849 decl_debug_args_insert (tree from)
6850 {
6851 struct tree_vec_map *h;
6852 tree_vec_map **loc;
6853
6854 if (DECL_HAS_DEBUG_ARGS_P (from))
6855 return decl_debug_args_lookup (from);
6856 if (debug_args_for_decl == NULL)
6857 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6858 h = ggc_alloc<tree_vec_map> ();
6859 h->base.from = from;
6860 h->to = NULL;
6861 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6862 *loc = h;
6863 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6864 return &h->to;
6865 }
6866
6867 /* Hashing of types so that we don't make duplicates.
6868 The entry point is `type_hash_canon'. */
6869
6870 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6871 with types in the TREE_VALUE slots), by adding the hash codes
6872 of the individual types. */
6873
6874 static void
6875 type_hash_list (const_tree list, inchash::hash &hstate)
6876 {
6877 const_tree tail;
6878
6879 for (tail = list; tail; tail = TREE_CHAIN (tail))
6880 if (TREE_VALUE (tail) != error_mark_node)
6881 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6882 }
6883
6884 /* These are the Hashtable callback functions. */
6885
6886 /* Returns true iff the types are equivalent. */
6887
6888 bool
6889 type_cache_hasher::equal (type_hash *a, type_hash *b)
6890 {
6891 /* First test the things that are the same for all types. */
6892 if (a->hash != b->hash
6893 || TREE_CODE (a->type) != TREE_CODE (b->type)
6894 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6895 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6896 TYPE_ATTRIBUTES (b->type))
6897 || (TREE_CODE (a->type) != COMPLEX_TYPE
6898 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6899 return 0;
6900
6901 /* Be careful about comparing arrays before and after the element type
6902 has been completed; don't compare TYPE_ALIGN unless both types are
6903 complete. */
6904 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6905 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6906 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6907 return 0;
6908
6909 switch (TREE_CODE (a->type))
6910 {
6911 case VOID_TYPE:
6912 case COMPLEX_TYPE:
6913 case POINTER_TYPE:
6914 case REFERENCE_TYPE:
6915 case NULLPTR_TYPE:
6916 return 1;
6917
6918 case VECTOR_TYPE:
6919 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6920
6921 case ENUMERAL_TYPE:
6922 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6923 && !(TYPE_VALUES (a->type)
6924 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6925 && TYPE_VALUES (b->type)
6926 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6927 && type_list_equal (TYPE_VALUES (a->type),
6928 TYPE_VALUES (b->type))))
6929 return 0;
6930
6931 /* ... fall through ... */
6932
6933 case INTEGER_TYPE:
6934 case REAL_TYPE:
6935 case BOOLEAN_TYPE:
6936 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6937 return false;
6938 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6939 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6940 TYPE_MAX_VALUE (b->type)))
6941 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6942 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6943 TYPE_MIN_VALUE (b->type))));
6944
6945 case FIXED_POINT_TYPE:
6946 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6947
6948 case OFFSET_TYPE:
6949 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6950
6951 case METHOD_TYPE:
6952 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6953 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6954 || (TYPE_ARG_TYPES (a->type)
6955 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6956 && TYPE_ARG_TYPES (b->type)
6957 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6958 && type_list_equal (TYPE_ARG_TYPES (a->type),
6959 TYPE_ARG_TYPES (b->type)))))
6960 break;
6961 return 0;
6962 case ARRAY_TYPE:
6963 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6964
6965 case RECORD_TYPE:
6966 case UNION_TYPE:
6967 case QUAL_UNION_TYPE:
6968 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6969 || (TYPE_FIELDS (a->type)
6970 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6971 && TYPE_FIELDS (b->type)
6972 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6973 && type_list_equal (TYPE_FIELDS (a->type),
6974 TYPE_FIELDS (b->type))));
6975
6976 case FUNCTION_TYPE:
6977 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6978 || (TYPE_ARG_TYPES (a->type)
6979 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6980 && TYPE_ARG_TYPES (b->type)
6981 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6982 && type_list_equal (TYPE_ARG_TYPES (a->type),
6983 TYPE_ARG_TYPES (b->type))))
6984 break;
6985 return 0;
6986
6987 default:
6988 return 0;
6989 }
6990
6991 if (lang_hooks.types.type_hash_eq != NULL)
6992 return lang_hooks.types.type_hash_eq (a->type, b->type);
6993
6994 return 1;
6995 }
6996
6997 /* Given TYPE, and HASHCODE its hash code, return the canonical
6998 object for an identical type if one already exists.
6999 Otherwise, return TYPE, and record it as the canonical object.
7000
7001 To use this function, first create a type of the sort you want.
7002 Then compute its hash code from the fields of the type that
7003 make it different from other similar types.
7004 Then call this function and use the value. */
7005
7006 tree
7007 type_hash_canon (unsigned int hashcode, tree type)
7008 {
7009 type_hash in;
7010 type_hash **loc;
7011
7012 /* The hash table only contains main variants, so ensure that's what we're
7013 being passed. */
7014 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7015
7016 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7017 must call that routine before comparing TYPE_ALIGNs. */
7018 layout_type (type);
7019
7020 in.hash = hashcode;
7021 in.type = type;
7022
7023 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7024 if (*loc)
7025 {
7026 tree t1 = ((type_hash *) *loc)->type;
7027 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7028 if (GATHER_STATISTICS)
7029 {
7030 tree_code_counts[(int) TREE_CODE (type)]--;
7031 tree_node_counts[(int) t_kind]--;
7032 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
7033 }
7034 return t1;
7035 }
7036 else
7037 {
7038 struct type_hash *h;
7039
7040 h = ggc_alloc<type_hash> ();
7041 h->hash = hashcode;
7042 h->type = type;
7043 *loc = h;
7044
7045 return type;
7046 }
7047 }
7048
7049 static void
7050 print_type_hash_statistics (void)
7051 {
7052 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7053 (long) type_hash_table->size (),
7054 (long) type_hash_table->elements (),
7055 type_hash_table->collisions ());
7056 }
7057
7058 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7059 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7060 by adding the hash codes of the individual attributes. */
7061
7062 static void
7063 attribute_hash_list (const_tree list, inchash::hash &hstate)
7064 {
7065 const_tree tail;
7066
7067 for (tail = list; tail; tail = TREE_CHAIN (tail))
7068 /* ??? Do we want to add in TREE_VALUE too? */
7069 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7070 }
7071
7072 /* Given two lists of attributes, return true if list l2 is
7073 equivalent to l1. */
7074
7075 int
7076 attribute_list_equal (const_tree l1, const_tree l2)
7077 {
7078 if (l1 == l2)
7079 return 1;
7080
7081 return attribute_list_contained (l1, l2)
7082 && attribute_list_contained (l2, l1);
7083 }
7084
7085 /* Given two lists of attributes, return true if list L2 is
7086 completely contained within L1. */
7087 /* ??? This would be faster if attribute names were stored in a canonicalized
7088 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7089 must be used to show these elements are equivalent (which they are). */
7090 /* ??? It's not clear that attributes with arguments will always be handled
7091 correctly. */
7092
7093 int
7094 attribute_list_contained (const_tree l1, const_tree l2)
7095 {
7096 const_tree t1, t2;
7097
7098 /* First check the obvious, maybe the lists are identical. */
7099 if (l1 == l2)
7100 return 1;
7101
7102 /* Maybe the lists are similar. */
7103 for (t1 = l1, t2 = l2;
7104 t1 != 0 && t2 != 0
7105 && get_attribute_name (t1) == get_attribute_name (t2)
7106 && TREE_VALUE (t1) == TREE_VALUE (t2);
7107 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7108 ;
7109
7110 /* Maybe the lists are equal. */
7111 if (t1 == 0 && t2 == 0)
7112 return 1;
7113
7114 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7115 {
7116 const_tree attr;
7117 /* This CONST_CAST is okay because lookup_attribute does not
7118 modify its argument and the return value is assigned to a
7119 const_tree. */
7120 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7121 CONST_CAST_TREE (l1));
7122 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7123 attr = lookup_ident_attribute (get_attribute_name (t2),
7124 TREE_CHAIN (attr)))
7125 ;
7126
7127 if (attr == NULL_TREE)
7128 return 0;
7129 }
7130
7131 return 1;
7132 }
7133
7134 /* Given two lists of types
7135 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7136 return 1 if the lists contain the same types in the same order.
7137 Also, the TREE_PURPOSEs must match. */
7138
7139 int
7140 type_list_equal (const_tree l1, const_tree l2)
7141 {
7142 const_tree t1, t2;
7143
7144 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7145 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7146 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7147 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7148 && (TREE_TYPE (TREE_PURPOSE (t1))
7149 == TREE_TYPE (TREE_PURPOSE (t2))))))
7150 return 0;
7151
7152 return t1 == t2;
7153 }
7154
7155 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7156 given by TYPE. If the argument list accepts variable arguments,
7157 then this function counts only the ordinary arguments. */
7158
7159 int
7160 type_num_arguments (const_tree type)
7161 {
7162 int i = 0;
7163 tree t;
7164
7165 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7166 /* If the function does not take a variable number of arguments,
7167 the last element in the list will have type `void'. */
7168 if (VOID_TYPE_P (TREE_VALUE (t)))
7169 break;
7170 else
7171 ++i;
7172
7173 return i;
7174 }
7175
7176 /* Nonzero if integer constants T1 and T2
7177 represent the same constant value. */
7178
7179 int
7180 tree_int_cst_equal (const_tree t1, const_tree t2)
7181 {
7182 if (t1 == t2)
7183 return 1;
7184
7185 if (t1 == 0 || t2 == 0)
7186 return 0;
7187
7188 if (TREE_CODE (t1) == INTEGER_CST
7189 && TREE_CODE (t2) == INTEGER_CST
7190 && wi::to_widest (t1) == wi::to_widest (t2))
7191 return 1;
7192
7193 return 0;
7194 }
7195
7196 /* Return true if T is an INTEGER_CST whose numerical value (extended
7197 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7198
7199 bool
7200 tree_fits_shwi_p (const_tree t)
7201 {
7202 return (t != NULL_TREE
7203 && TREE_CODE (t) == INTEGER_CST
7204 && wi::fits_shwi_p (wi::to_widest (t)));
7205 }
7206
7207 /* Return true if T is an INTEGER_CST whose numerical value (extended
7208 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7209
7210 bool
7211 tree_fits_uhwi_p (const_tree t)
7212 {
7213 return (t != NULL_TREE
7214 && TREE_CODE (t) == INTEGER_CST
7215 && wi::fits_uhwi_p (wi::to_widest (t)));
7216 }
7217
7218 /* T is an INTEGER_CST whose numerical value (extended according to
7219 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7220 HOST_WIDE_INT. */
7221
7222 HOST_WIDE_INT
7223 tree_to_shwi (const_tree t)
7224 {
7225 gcc_assert (tree_fits_shwi_p (t));
7226 return TREE_INT_CST_LOW (t);
7227 }
7228
7229 /* T is an INTEGER_CST whose numerical value (extended according to
7230 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7231 HOST_WIDE_INT. */
7232
7233 unsigned HOST_WIDE_INT
7234 tree_to_uhwi (const_tree t)
7235 {
7236 gcc_assert (tree_fits_uhwi_p (t));
7237 return TREE_INT_CST_LOW (t);
7238 }
7239
7240 /* Return the most significant (sign) bit of T. */
7241
7242 int
7243 tree_int_cst_sign_bit (const_tree t)
7244 {
7245 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7246
7247 return wi::extract_uhwi (t, bitno, 1);
7248 }
7249
7250 /* Return an indication of the sign of the integer constant T.
7251 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7252 Note that -1 will never be returned if T's type is unsigned. */
7253
7254 int
7255 tree_int_cst_sgn (const_tree t)
7256 {
7257 if (wi::eq_p (t, 0))
7258 return 0;
7259 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7260 return 1;
7261 else if (wi::neg_p (t))
7262 return -1;
7263 else
7264 return 1;
7265 }
7266
7267 /* Return the minimum number of bits needed to represent VALUE in a
7268 signed or unsigned type, UNSIGNEDP says which. */
7269
7270 unsigned int
7271 tree_int_cst_min_precision (tree value, signop sgn)
7272 {
7273 /* If the value is negative, compute its negative minus 1. The latter
7274 adjustment is because the absolute value of the largest negative value
7275 is one larger than the largest positive value. This is equivalent to
7276 a bit-wise negation, so use that operation instead. */
7277
7278 if (tree_int_cst_sgn (value) < 0)
7279 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7280
7281 /* Return the number of bits needed, taking into account the fact
7282 that we need one more bit for a signed than unsigned type.
7283 If value is 0 or -1, the minimum precision is 1 no matter
7284 whether unsignedp is true or false. */
7285
7286 if (integer_zerop (value))
7287 return 1;
7288 else
7289 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7290 }
7291
7292 /* Return truthvalue of whether T1 is the same tree structure as T2.
7293 Return 1 if they are the same.
7294 Return 0 if they are understandably different.
7295 Return -1 if either contains tree structure not understood by
7296 this function. */
7297
7298 int
7299 simple_cst_equal (const_tree t1, const_tree t2)
7300 {
7301 enum tree_code code1, code2;
7302 int cmp;
7303 int i;
7304
7305 if (t1 == t2)
7306 return 1;
7307 if (t1 == 0 || t2 == 0)
7308 return 0;
7309
7310 code1 = TREE_CODE (t1);
7311 code2 = TREE_CODE (t2);
7312
7313 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7314 {
7315 if (CONVERT_EXPR_CODE_P (code2)
7316 || code2 == NON_LVALUE_EXPR)
7317 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7318 else
7319 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7320 }
7321
7322 else if (CONVERT_EXPR_CODE_P (code2)
7323 || code2 == NON_LVALUE_EXPR)
7324 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7325
7326 if (code1 != code2)
7327 return 0;
7328
7329 switch (code1)
7330 {
7331 case INTEGER_CST:
7332 return wi::to_widest (t1) == wi::to_widest (t2);
7333
7334 case REAL_CST:
7335 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7336
7337 case FIXED_CST:
7338 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7339
7340 case STRING_CST:
7341 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7342 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7343 TREE_STRING_LENGTH (t1)));
7344
7345 case CONSTRUCTOR:
7346 {
7347 unsigned HOST_WIDE_INT idx;
7348 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7349 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7350
7351 if (vec_safe_length (v1) != vec_safe_length (v2))
7352 return false;
7353
7354 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7355 /* ??? Should we handle also fields here? */
7356 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7357 return false;
7358 return true;
7359 }
7360
7361 case SAVE_EXPR:
7362 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7363
7364 case CALL_EXPR:
7365 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7366 if (cmp <= 0)
7367 return cmp;
7368 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7369 return 0;
7370 {
7371 const_tree arg1, arg2;
7372 const_call_expr_arg_iterator iter1, iter2;
7373 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7374 arg2 = first_const_call_expr_arg (t2, &iter2);
7375 arg1 && arg2;
7376 arg1 = next_const_call_expr_arg (&iter1),
7377 arg2 = next_const_call_expr_arg (&iter2))
7378 {
7379 cmp = simple_cst_equal (arg1, arg2);
7380 if (cmp <= 0)
7381 return cmp;
7382 }
7383 return arg1 == arg2;
7384 }
7385
7386 case TARGET_EXPR:
7387 /* Special case: if either target is an unallocated VAR_DECL,
7388 it means that it's going to be unified with whatever the
7389 TARGET_EXPR is really supposed to initialize, so treat it
7390 as being equivalent to anything. */
7391 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7392 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7393 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7394 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7395 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7396 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7397 cmp = 1;
7398 else
7399 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7400
7401 if (cmp <= 0)
7402 return cmp;
7403
7404 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7405
7406 case WITH_CLEANUP_EXPR:
7407 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7408 if (cmp <= 0)
7409 return cmp;
7410
7411 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7412
7413 case COMPONENT_REF:
7414 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7415 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7416
7417 return 0;
7418
7419 case VAR_DECL:
7420 case PARM_DECL:
7421 case CONST_DECL:
7422 case FUNCTION_DECL:
7423 return 0;
7424
7425 default:
7426 break;
7427 }
7428
7429 /* This general rule works for most tree codes. All exceptions should be
7430 handled above. If this is a language-specific tree code, we can't
7431 trust what might be in the operand, so say we don't know
7432 the situation. */
7433 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7434 return -1;
7435
7436 switch (TREE_CODE_CLASS (code1))
7437 {
7438 case tcc_unary:
7439 case tcc_binary:
7440 case tcc_comparison:
7441 case tcc_expression:
7442 case tcc_reference:
7443 case tcc_statement:
7444 cmp = 1;
7445 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7446 {
7447 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7448 if (cmp <= 0)
7449 return cmp;
7450 }
7451
7452 return cmp;
7453
7454 default:
7455 return -1;
7456 }
7457 }
7458
7459 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7460 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7461 than U, respectively. */
7462
7463 int
7464 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7465 {
7466 if (tree_int_cst_sgn (t) < 0)
7467 return -1;
7468 else if (!tree_fits_uhwi_p (t))
7469 return 1;
7470 else if (TREE_INT_CST_LOW (t) == u)
7471 return 0;
7472 else if (TREE_INT_CST_LOW (t) < u)
7473 return -1;
7474 else
7475 return 1;
7476 }
7477
7478 /* Return true if SIZE represents a constant size that is in bounds of
7479 what the middle-end and the backend accepts (covering not more than
7480 half of the address-space). */
7481
7482 bool
7483 valid_constant_size_p (const_tree size)
7484 {
7485 if (! tree_fits_uhwi_p (size)
7486 || TREE_OVERFLOW (size)
7487 || tree_int_cst_sign_bit (size) != 0)
7488 return false;
7489 return true;
7490 }
7491
7492 /* Return the precision of the type, or for a complex or vector type the
7493 precision of the type of its elements. */
7494
7495 unsigned int
7496 element_precision (const_tree type)
7497 {
7498 enum tree_code code = TREE_CODE (type);
7499 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7500 type = TREE_TYPE (type);
7501
7502 return TYPE_PRECISION (type);
7503 }
7504
7505 /* Return true if CODE represents an associative tree code. Otherwise
7506 return false. */
7507 bool
7508 associative_tree_code (enum tree_code code)
7509 {
7510 switch (code)
7511 {
7512 case BIT_IOR_EXPR:
7513 case BIT_AND_EXPR:
7514 case BIT_XOR_EXPR:
7515 case PLUS_EXPR:
7516 case MULT_EXPR:
7517 case MIN_EXPR:
7518 case MAX_EXPR:
7519 return true;
7520
7521 default:
7522 break;
7523 }
7524 return false;
7525 }
7526
7527 /* Return true if CODE represents a commutative tree code. Otherwise
7528 return false. */
7529 bool
7530 commutative_tree_code (enum tree_code code)
7531 {
7532 switch (code)
7533 {
7534 case PLUS_EXPR:
7535 case MULT_EXPR:
7536 case MULT_HIGHPART_EXPR:
7537 case MIN_EXPR:
7538 case MAX_EXPR:
7539 case BIT_IOR_EXPR:
7540 case BIT_XOR_EXPR:
7541 case BIT_AND_EXPR:
7542 case NE_EXPR:
7543 case EQ_EXPR:
7544 case UNORDERED_EXPR:
7545 case ORDERED_EXPR:
7546 case UNEQ_EXPR:
7547 case LTGT_EXPR:
7548 case TRUTH_AND_EXPR:
7549 case TRUTH_XOR_EXPR:
7550 case TRUTH_OR_EXPR:
7551 case WIDEN_MULT_EXPR:
7552 case VEC_WIDEN_MULT_HI_EXPR:
7553 case VEC_WIDEN_MULT_LO_EXPR:
7554 case VEC_WIDEN_MULT_EVEN_EXPR:
7555 case VEC_WIDEN_MULT_ODD_EXPR:
7556 return true;
7557
7558 default:
7559 break;
7560 }
7561 return false;
7562 }
7563
7564 /* Return true if CODE represents a ternary tree code for which the
7565 first two operands are commutative. Otherwise return false. */
7566 bool
7567 commutative_ternary_tree_code (enum tree_code code)
7568 {
7569 switch (code)
7570 {
7571 case WIDEN_MULT_PLUS_EXPR:
7572 case WIDEN_MULT_MINUS_EXPR:
7573 case DOT_PROD_EXPR:
7574 case FMA_EXPR:
7575 return true;
7576
7577 default:
7578 break;
7579 }
7580 return false;
7581 }
7582
7583 namespace inchash
7584 {
7585
7586 /* Generate a hash value for an expression. This can be used iteratively
7587 by passing a previous result as the HSTATE argument.
7588
7589 This function is intended to produce the same hash for expressions which
7590 would compare equal using operand_equal_p. */
7591 void
7592 add_expr (const_tree t, inchash::hash &hstate)
7593 {
7594 int i;
7595 enum tree_code code;
7596 enum tree_code_class tclass;
7597
7598 if (t == NULL_TREE)
7599 {
7600 hstate.merge_hash (0);
7601 return;
7602 }
7603
7604 code = TREE_CODE (t);
7605
7606 switch (code)
7607 {
7608 /* Alas, constants aren't shared, so we can't rely on pointer
7609 identity. */
7610 case VOID_CST:
7611 hstate.merge_hash (0);
7612 return;
7613 case INTEGER_CST:
7614 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7615 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7616 return;
7617 case REAL_CST:
7618 {
7619 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7620 hstate.merge_hash (val2);
7621 return;
7622 }
7623 case FIXED_CST:
7624 {
7625 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7626 hstate.merge_hash (val2);
7627 return;
7628 }
7629 case STRING_CST:
7630 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7631 return;
7632 case COMPLEX_CST:
7633 inchash::add_expr (TREE_REALPART (t), hstate);
7634 inchash::add_expr (TREE_IMAGPART (t), hstate);
7635 return;
7636 case VECTOR_CST:
7637 {
7638 unsigned i;
7639 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7640 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7641 return;
7642 }
7643 case SSA_NAME:
7644 /* We can just compare by pointer. */
7645 hstate.add_wide_int (SSA_NAME_VERSION (t));
7646 return;
7647 case PLACEHOLDER_EXPR:
7648 /* The node itself doesn't matter. */
7649 return;
7650 case TREE_LIST:
7651 /* A list of expressions, for a CALL_EXPR or as the elements of a
7652 VECTOR_CST. */
7653 for (; t; t = TREE_CHAIN (t))
7654 inchash::add_expr (TREE_VALUE (t), hstate);
7655 return;
7656 case CONSTRUCTOR:
7657 {
7658 unsigned HOST_WIDE_INT idx;
7659 tree field, value;
7660 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7661 {
7662 inchash::add_expr (field, hstate);
7663 inchash::add_expr (value, hstate);
7664 }
7665 return;
7666 }
7667 case FUNCTION_DECL:
7668 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7669 Otherwise nodes that compare equal according to operand_equal_p might
7670 get different hash codes. However, don't do this for machine specific
7671 or front end builtins, since the function code is overloaded in those
7672 cases. */
7673 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7674 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7675 {
7676 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7677 code = TREE_CODE (t);
7678 }
7679 /* FALL THROUGH */
7680 default:
7681 tclass = TREE_CODE_CLASS (code);
7682
7683 if (tclass == tcc_declaration)
7684 {
7685 /* DECL's have a unique ID */
7686 hstate.add_wide_int (DECL_UID (t));
7687 }
7688 else
7689 {
7690 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7691
7692 hstate.add_object (code);
7693
7694 /* Don't hash the type, that can lead to having nodes which
7695 compare equal according to operand_equal_p, but which
7696 have different hash codes. */
7697 if (CONVERT_EXPR_CODE_P (code)
7698 || code == NON_LVALUE_EXPR)
7699 {
7700 /* Make sure to include signness in the hash computation. */
7701 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7702 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7703 }
7704
7705 else if (commutative_tree_code (code))
7706 {
7707 /* It's a commutative expression. We want to hash it the same
7708 however it appears. We do this by first hashing both operands
7709 and then rehashing based on the order of their independent
7710 hashes. */
7711 inchash::hash one, two;
7712 inchash::add_expr (TREE_OPERAND (t, 0), one);
7713 inchash::add_expr (TREE_OPERAND (t, 1), two);
7714 hstate.add_commutative (one, two);
7715 }
7716 else
7717 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7718 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7719 }
7720 return;
7721 }
7722 }
7723
7724 }
7725
7726 /* Constructors for pointer, array and function types.
7727 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7728 constructed by language-dependent code, not here.) */
7729
7730 /* Construct, lay out and return the type of pointers to TO_TYPE with
7731 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7732 reference all of memory. If such a type has already been
7733 constructed, reuse it. */
7734
7735 tree
7736 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7737 bool can_alias_all)
7738 {
7739 tree t;
7740 bool could_alias = can_alias_all;
7741
7742 if (to_type == error_mark_node)
7743 return error_mark_node;
7744
7745 /* If the pointed-to type has the may_alias attribute set, force
7746 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7747 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7748 can_alias_all = true;
7749
7750 /* In some cases, languages will have things that aren't a POINTER_TYPE
7751 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7752 In that case, return that type without regard to the rest of our
7753 operands.
7754
7755 ??? This is a kludge, but consistent with the way this function has
7756 always operated and there doesn't seem to be a good way to avoid this
7757 at the moment. */
7758 if (TYPE_POINTER_TO (to_type) != 0
7759 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7760 return TYPE_POINTER_TO (to_type);
7761
7762 /* First, if we already have a type for pointers to TO_TYPE and it's
7763 the proper mode, use it. */
7764 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7765 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7766 return t;
7767
7768 t = make_node (POINTER_TYPE);
7769
7770 TREE_TYPE (t) = to_type;
7771 SET_TYPE_MODE (t, mode);
7772 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7773 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7774 TYPE_POINTER_TO (to_type) = t;
7775
7776 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7777 SET_TYPE_STRUCTURAL_EQUALITY (t);
7778 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7779 TYPE_CANONICAL (t)
7780 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7781 mode, false);
7782
7783 /* Lay out the type. This function has many callers that are concerned
7784 with expression-construction, and this simplifies them all. */
7785 layout_type (t);
7786
7787 return t;
7788 }
7789
7790 /* By default build pointers in ptr_mode. */
7791
7792 tree
7793 build_pointer_type (tree to_type)
7794 {
7795 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7796 : TYPE_ADDR_SPACE (to_type);
7797 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7798 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7799 }
7800
7801 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7802
7803 tree
7804 build_reference_type_for_mode (tree to_type, machine_mode mode,
7805 bool can_alias_all)
7806 {
7807 tree t;
7808 bool could_alias = can_alias_all;
7809
7810 if (to_type == error_mark_node)
7811 return error_mark_node;
7812
7813 /* If the pointed-to type has the may_alias attribute set, force
7814 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7815 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7816 can_alias_all = true;
7817
7818 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7819 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7820 In that case, return that type without regard to the rest of our
7821 operands.
7822
7823 ??? This is a kludge, but consistent with the way this function has
7824 always operated and there doesn't seem to be a good way to avoid this
7825 at the moment. */
7826 if (TYPE_REFERENCE_TO (to_type) != 0
7827 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7828 return TYPE_REFERENCE_TO (to_type);
7829
7830 /* First, if we already have a type for pointers to TO_TYPE and it's
7831 the proper mode, use it. */
7832 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7833 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7834 return t;
7835
7836 t = make_node (REFERENCE_TYPE);
7837
7838 TREE_TYPE (t) = to_type;
7839 SET_TYPE_MODE (t, mode);
7840 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7841 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7842 TYPE_REFERENCE_TO (to_type) = t;
7843
7844 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7845 SET_TYPE_STRUCTURAL_EQUALITY (t);
7846 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7847 TYPE_CANONICAL (t)
7848 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7849 mode, false);
7850
7851 layout_type (t);
7852
7853 return t;
7854 }
7855
7856
7857 /* Build the node for the type of references-to-TO_TYPE by default
7858 in ptr_mode. */
7859
7860 tree
7861 build_reference_type (tree to_type)
7862 {
7863 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7864 : TYPE_ADDR_SPACE (to_type);
7865 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7866 return build_reference_type_for_mode (to_type, pointer_mode, false);
7867 }
7868
7869 #define MAX_INT_CACHED_PREC \
7870 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7871 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7872
7873 /* Builds a signed or unsigned integer type of precision PRECISION.
7874 Used for C bitfields whose precision does not match that of
7875 built-in target types. */
7876 tree
7877 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7878 int unsignedp)
7879 {
7880 tree itype, ret;
7881
7882 if (unsignedp)
7883 unsignedp = MAX_INT_CACHED_PREC + 1;
7884
7885 if (precision <= MAX_INT_CACHED_PREC)
7886 {
7887 itype = nonstandard_integer_type_cache[precision + unsignedp];
7888 if (itype)
7889 return itype;
7890 }
7891
7892 itype = make_node (INTEGER_TYPE);
7893 TYPE_PRECISION (itype) = precision;
7894
7895 if (unsignedp)
7896 fixup_unsigned_type (itype);
7897 else
7898 fixup_signed_type (itype);
7899
7900 ret = itype;
7901 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7902 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7903 if (precision <= MAX_INT_CACHED_PREC)
7904 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7905
7906 return ret;
7907 }
7908
7909 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7910 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7911 is true, reuse such a type that has already been constructed. */
7912
7913 static tree
7914 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7915 {
7916 tree itype = make_node (INTEGER_TYPE);
7917 inchash::hash hstate;
7918
7919 TREE_TYPE (itype) = type;
7920
7921 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7922 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7923
7924 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7925 SET_TYPE_MODE (itype, TYPE_MODE (type));
7926 TYPE_SIZE (itype) = TYPE_SIZE (type);
7927 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7928 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7929 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7930
7931 if (!shared)
7932 return itype;
7933
7934 if ((TYPE_MIN_VALUE (itype)
7935 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7936 || (TYPE_MAX_VALUE (itype)
7937 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7938 {
7939 /* Since we cannot reliably merge this type, we need to compare it using
7940 structural equality checks. */
7941 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7942 return itype;
7943 }
7944
7945 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7946 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7947 hstate.merge_hash (TYPE_HASH (type));
7948 itype = type_hash_canon (hstate.end (), itype);
7949
7950 return itype;
7951 }
7952
7953 /* Wrapper around build_range_type_1 with SHARED set to true. */
7954
7955 tree
7956 build_range_type (tree type, tree lowval, tree highval)
7957 {
7958 return build_range_type_1 (type, lowval, highval, true);
7959 }
7960
7961 /* Wrapper around build_range_type_1 with SHARED set to false. */
7962
7963 tree
7964 build_nonshared_range_type (tree type, tree lowval, tree highval)
7965 {
7966 return build_range_type_1 (type, lowval, highval, false);
7967 }
7968
7969 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7970 MAXVAL should be the maximum value in the domain
7971 (one less than the length of the array).
7972
7973 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7974 We don't enforce this limit, that is up to caller (e.g. language front end).
7975 The limit exists because the result is a signed type and we don't handle
7976 sizes that use more than one HOST_WIDE_INT. */
7977
7978 tree
7979 build_index_type (tree maxval)
7980 {
7981 return build_range_type (sizetype, size_zero_node, maxval);
7982 }
7983
7984 /* Return true if the debug information for TYPE, a subtype, should be emitted
7985 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7986 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7987 debug info and doesn't reflect the source code. */
7988
7989 bool
7990 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7991 {
7992 tree base_type = TREE_TYPE (type), low, high;
7993
7994 /* Subrange types have a base type which is an integral type. */
7995 if (!INTEGRAL_TYPE_P (base_type))
7996 return false;
7997
7998 /* Get the real bounds of the subtype. */
7999 if (lang_hooks.types.get_subrange_bounds)
8000 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8001 else
8002 {
8003 low = TYPE_MIN_VALUE (type);
8004 high = TYPE_MAX_VALUE (type);
8005 }
8006
8007 /* If the type and its base type have the same representation and the same
8008 name, then the type is not a subrange but a copy of the base type. */
8009 if ((TREE_CODE (base_type) == INTEGER_TYPE
8010 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8011 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8012 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8013 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8014 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8015 return false;
8016
8017 if (lowval)
8018 *lowval = low;
8019 if (highval)
8020 *highval = high;
8021 return true;
8022 }
8023
8024 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8025 and number of elements specified by the range of values of INDEX_TYPE.
8026 If SHARED is true, reuse such a type that has already been constructed. */
8027
8028 static tree
8029 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8030 {
8031 tree t;
8032
8033 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8034 {
8035 error ("arrays of functions are not meaningful");
8036 elt_type = integer_type_node;
8037 }
8038
8039 t = make_node (ARRAY_TYPE);
8040 TREE_TYPE (t) = elt_type;
8041 TYPE_DOMAIN (t) = index_type;
8042 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8043 layout_type (t);
8044
8045 /* If the element type is incomplete at this point we get marked for
8046 structural equality. Do not record these types in the canonical
8047 type hashtable. */
8048 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8049 return t;
8050
8051 if (shared)
8052 {
8053 inchash::hash hstate;
8054 hstate.add_object (TYPE_HASH (elt_type));
8055 if (index_type)
8056 hstate.add_object (TYPE_HASH (index_type));
8057 t = type_hash_canon (hstate.end (), t);
8058 }
8059
8060 if (TYPE_CANONICAL (t) == t)
8061 {
8062 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8063 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
8064 SET_TYPE_STRUCTURAL_EQUALITY (t);
8065 else if (TYPE_CANONICAL (elt_type) != elt_type
8066 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8067 TYPE_CANONICAL (t)
8068 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8069 index_type
8070 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8071 shared);
8072 }
8073
8074 return t;
8075 }
8076
8077 /* Wrapper around build_array_type_1 with SHARED set to true. */
8078
8079 tree
8080 build_array_type (tree elt_type, tree index_type)
8081 {
8082 return build_array_type_1 (elt_type, index_type, true);
8083 }
8084
8085 /* Wrapper around build_array_type_1 with SHARED set to false. */
8086
8087 tree
8088 build_nonshared_array_type (tree elt_type, tree index_type)
8089 {
8090 return build_array_type_1 (elt_type, index_type, false);
8091 }
8092
8093 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8094 sizetype. */
8095
8096 tree
8097 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8098 {
8099 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8100 }
8101
8102 /* Recursively examines the array elements of TYPE, until a non-array
8103 element type is found. */
8104
8105 tree
8106 strip_array_types (tree type)
8107 {
8108 while (TREE_CODE (type) == ARRAY_TYPE)
8109 type = TREE_TYPE (type);
8110
8111 return type;
8112 }
8113
8114 /* Computes the canonical argument types from the argument type list
8115 ARGTYPES.
8116
8117 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8118 on entry to this function, or if any of the ARGTYPES are
8119 structural.
8120
8121 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8122 true on entry to this function, or if any of the ARGTYPES are
8123 non-canonical.
8124
8125 Returns a canonical argument list, which may be ARGTYPES when the
8126 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8127 true) or would not differ from ARGTYPES. */
8128
8129 static tree
8130 maybe_canonicalize_argtypes (tree argtypes,
8131 bool *any_structural_p,
8132 bool *any_noncanonical_p)
8133 {
8134 tree arg;
8135 bool any_noncanonical_argtypes_p = false;
8136
8137 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8138 {
8139 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8140 /* Fail gracefully by stating that the type is structural. */
8141 *any_structural_p = true;
8142 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8143 *any_structural_p = true;
8144 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8145 || TREE_PURPOSE (arg))
8146 /* If the argument has a default argument, we consider it
8147 non-canonical even though the type itself is canonical.
8148 That way, different variants of function and method types
8149 with default arguments will all point to the variant with
8150 no defaults as their canonical type. */
8151 any_noncanonical_argtypes_p = true;
8152 }
8153
8154 if (*any_structural_p)
8155 return argtypes;
8156
8157 if (any_noncanonical_argtypes_p)
8158 {
8159 /* Build the canonical list of argument types. */
8160 tree canon_argtypes = NULL_TREE;
8161 bool is_void = false;
8162
8163 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8164 {
8165 if (arg == void_list_node)
8166 is_void = true;
8167 else
8168 canon_argtypes = tree_cons (NULL_TREE,
8169 TYPE_CANONICAL (TREE_VALUE (arg)),
8170 canon_argtypes);
8171 }
8172
8173 canon_argtypes = nreverse (canon_argtypes);
8174 if (is_void)
8175 canon_argtypes = chainon (canon_argtypes, void_list_node);
8176
8177 /* There is a non-canonical type. */
8178 *any_noncanonical_p = true;
8179 return canon_argtypes;
8180 }
8181
8182 /* The canonical argument types are the same as ARGTYPES. */
8183 return argtypes;
8184 }
8185
8186 /* Construct, lay out and return
8187 the type of functions returning type VALUE_TYPE
8188 given arguments of types ARG_TYPES.
8189 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8190 are data type nodes for the arguments of the function.
8191 If such a type has already been constructed, reuse it. */
8192
8193 tree
8194 build_function_type (tree value_type, tree arg_types)
8195 {
8196 tree t;
8197 inchash::hash hstate;
8198 bool any_structural_p, any_noncanonical_p;
8199 tree canon_argtypes;
8200
8201 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8202 {
8203 error ("function return type cannot be function");
8204 value_type = integer_type_node;
8205 }
8206
8207 /* Make a node of the sort we want. */
8208 t = make_node (FUNCTION_TYPE);
8209 TREE_TYPE (t) = value_type;
8210 TYPE_ARG_TYPES (t) = arg_types;
8211
8212 /* If we already have such a type, use the old one. */
8213 hstate.add_object (TYPE_HASH (value_type));
8214 type_hash_list (arg_types, hstate);
8215 t = type_hash_canon (hstate.end (), t);
8216
8217 /* Set up the canonical type. */
8218 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8219 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8220 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8221 &any_structural_p,
8222 &any_noncanonical_p);
8223 if (any_structural_p)
8224 SET_TYPE_STRUCTURAL_EQUALITY (t);
8225 else if (any_noncanonical_p)
8226 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8227 canon_argtypes);
8228
8229 if (!COMPLETE_TYPE_P (t))
8230 layout_type (t);
8231 return t;
8232 }
8233
8234 /* Build a function type. The RETURN_TYPE is the type returned by the
8235 function. If VAARGS is set, no void_type_node is appended to the
8236 the list. ARGP must be always be terminated be a NULL_TREE. */
8237
8238 static tree
8239 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8240 {
8241 tree t, args, last;
8242
8243 t = va_arg (argp, tree);
8244 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8245 args = tree_cons (NULL_TREE, t, args);
8246
8247 if (vaargs)
8248 {
8249 last = args;
8250 if (args != NULL_TREE)
8251 args = nreverse (args);
8252 gcc_assert (last != void_list_node);
8253 }
8254 else if (args == NULL_TREE)
8255 args = void_list_node;
8256 else
8257 {
8258 last = args;
8259 args = nreverse (args);
8260 TREE_CHAIN (last) = void_list_node;
8261 }
8262 args = build_function_type (return_type, args);
8263
8264 return args;
8265 }
8266
8267 /* Build a function type. The RETURN_TYPE is the type returned by the
8268 function. If additional arguments are provided, they are
8269 additional argument types. The list of argument types must always
8270 be terminated by NULL_TREE. */
8271
8272 tree
8273 build_function_type_list (tree return_type, ...)
8274 {
8275 tree args;
8276 va_list p;
8277
8278 va_start (p, return_type);
8279 args = build_function_type_list_1 (false, return_type, p);
8280 va_end (p);
8281 return args;
8282 }
8283
8284 /* Build a variable argument function type. The RETURN_TYPE is the
8285 type returned by the function. If additional arguments are provided,
8286 they are additional argument types. The list of argument types must
8287 always be terminated by NULL_TREE. */
8288
8289 tree
8290 build_varargs_function_type_list (tree return_type, ...)
8291 {
8292 tree args;
8293 va_list p;
8294
8295 va_start (p, return_type);
8296 args = build_function_type_list_1 (true, return_type, p);
8297 va_end (p);
8298
8299 return args;
8300 }
8301
8302 /* Build a function type. RETURN_TYPE is the type returned by the
8303 function; VAARGS indicates whether the function takes varargs. The
8304 function takes N named arguments, the types of which are provided in
8305 ARG_TYPES. */
8306
8307 static tree
8308 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8309 tree *arg_types)
8310 {
8311 int i;
8312 tree t = vaargs ? NULL_TREE : void_list_node;
8313
8314 for (i = n - 1; i >= 0; i--)
8315 t = tree_cons (NULL_TREE, arg_types[i], t);
8316
8317 return build_function_type (return_type, t);
8318 }
8319
8320 /* Build a function type. RETURN_TYPE is the type returned by the
8321 function. The function takes N named arguments, the types of which
8322 are provided in ARG_TYPES. */
8323
8324 tree
8325 build_function_type_array (tree return_type, int n, tree *arg_types)
8326 {
8327 return build_function_type_array_1 (false, return_type, n, arg_types);
8328 }
8329
8330 /* Build a variable argument function type. RETURN_TYPE is the type
8331 returned by the function. The function takes N named arguments, the
8332 types of which are provided in ARG_TYPES. */
8333
8334 tree
8335 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8336 {
8337 return build_function_type_array_1 (true, return_type, n, arg_types);
8338 }
8339
8340 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8341 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8342 for the method. An implicit additional parameter (of type
8343 pointer-to-BASETYPE) is added to the ARGTYPES. */
8344
8345 tree
8346 build_method_type_directly (tree basetype,
8347 tree rettype,
8348 tree argtypes)
8349 {
8350 tree t;
8351 tree ptype;
8352 inchash::hash hstate;
8353 bool any_structural_p, any_noncanonical_p;
8354 tree canon_argtypes;
8355
8356 /* Make a node of the sort we want. */
8357 t = make_node (METHOD_TYPE);
8358
8359 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8360 TREE_TYPE (t) = rettype;
8361 ptype = build_pointer_type (basetype);
8362
8363 /* The actual arglist for this function includes a "hidden" argument
8364 which is "this". Put it into the list of argument types. */
8365 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8366 TYPE_ARG_TYPES (t) = argtypes;
8367
8368 /* If we already have such a type, use the old one. */
8369 hstate.add_object (TYPE_HASH (basetype));
8370 hstate.add_object (TYPE_HASH (rettype));
8371 type_hash_list (argtypes, hstate);
8372 t = type_hash_canon (hstate.end (), t);
8373
8374 /* Set up the canonical type. */
8375 any_structural_p
8376 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8377 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8378 any_noncanonical_p
8379 = (TYPE_CANONICAL (basetype) != basetype
8380 || TYPE_CANONICAL (rettype) != rettype);
8381 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8382 &any_structural_p,
8383 &any_noncanonical_p);
8384 if (any_structural_p)
8385 SET_TYPE_STRUCTURAL_EQUALITY (t);
8386 else if (any_noncanonical_p)
8387 TYPE_CANONICAL (t)
8388 = build_method_type_directly (TYPE_CANONICAL (basetype),
8389 TYPE_CANONICAL (rettype),
8390 canon_argtypes);
8391 if (!COMPLETE_TYPE_P (t))
8392 layout_type (t);
8393
8394 return t;
8395 }
8396
8397 /* Construct, lay out and return the type of methods belonging to class
8398 BASETYPE and whose arguments and values are described by TYPE.
8399 If that type exists already, reuse it.
8400 TYPE must be a FUNCTION_TYPE node. */
8401
8402 tree
8403 build_method_type (tree basetype, tree type)
8404 {
8405 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8406
8407 return build_method_type_directly (basetype,
8408 TREE_TYPE (type),
8409 TYPE_ARG_TYPES (type));
8410 }
8411
8412 /* Construct, lay out and return the type of offsets to a value
8413 of type TYPE, within an object of type BASETYPE.
8414 If a suitable offset type exists already, reuse it. */
8415
8416 tree
8417 build_offset_type (tree basetype, tree type)
8418 {
8419 tree t;
8420 inchash::hash hstate;
8421
8422 /* Make a node of the sort we want. */
8423 t = make_node (OFFSET_TYPE);
8424
8425 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8426 TREE_TYPE (t) = type;
8427
8428 /* If we already have such a type, use the old one. */
8429 hstate.add_object (TYPE_HASH (basetype));
8430 hstate.add_object (TYPE_HASH (type));
8431 t = type_hash_canon (hstate.end (), t);
8432
8433 if (!COMPLETE_TYPE_P (t))
8434 layout_type (t);
8435
8436 if (TYPE_CANONICAL (t) == t)
8437 {
8438 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8439 || TYPE_STRUCTURAL_EQUALITY_P (type))
8440 SET_TYPE_STRUCTURAL_EQUALITY (t);
8441 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8442 || TYPE_CANONICAL (type) != type)
8443 TYPE_CANONICAL (t)
8444 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8445 TYPE_CANONICAL (type));
8446 }
8447
8448 return t;
8449 }
8450
8451 /* Create a complex type whose components are COMPONENT_TYPE. */
8452
8453 tree
8454 build_complex_type (tree component_type)
8455 {
8456 tree t;
8457 inchash::hash hstate;
8458
8459 gcc_assert (INTEGRAL_TYPE_P (component_type)
8460 || SCALAR_FLOAT_TYPE_P (component_type)
8461 || FIXED_POINT_TYPE_P (component_type));
8462
8463 /* Make a node of the sort we want. */
8464 t = make_node (COMPLEX_TYPE);
8465
8466 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8467
8468 /* If we already have such a type, use the old one. */
8469 hstate.add_object (TYPE_HASH (component_type));
8470 t = type_hash_canon (hstate.end (), t);
8471
8472 if (!COMPLETE_TYPE_P (t))
8473 layout_type (t);
8474
8475 if (TYPE_CANONICAL (t) == t)
8476 {
8477 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8478 SET_TYPE_STRUCTURAL_EQUALITY (t);
8479 else if (TYPE_CANONICAL (component_type) != component_type)
8480 TYPE_CANONICAL (t)
8481 = build_complex_type (TYPE_CANONICAL (component_type));
8482 }
8483
8484 /* We need to create a name, since complex is a fundamental type. */
8485 if (! TYPE_NAME (t))
8486 {
8487 const char *name;
8488 if (component_type == char_type_node)
8489 name = "complex char";
8490 else if (component_type == signed_char_type_node)
8491 name = "complex signed char";
8492 else if (component_type == unsigned_char_type_node)
8493 name = "complex unsigned char";
8494 else if (component_type == short_integer_type_node)
8495 name = "complex short int";
8496 else if (component_type == short_unsigned_type_node)
8497 name = "complex short unsigned int";
8498 else if (component_type == integer_type_node)
8499 name = "complex int";
8500 else if (component_type == unsigned_type_node)
8501 name = "complex unsigned int";
8502 else if (component_type == long_integer_type_node)
8503 name = "complex long int";
8504 else if (component_type == long_unsigned_type_node)
8505 name = "complex long unsigned int";
8506 else if (component_type == long_long_integer_type_node)
8507 name = "complex long long int";
8508 else if (component_type == long_long_unsigned_type_node)
8509 name = "complex long long unsigned int";
8510 else
8511 name = 0;
8512
8513 if (name != 0)
8514 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8515 get_identifier (name), t);
8516 }
8517
8518 return build_qualified_type (t, TYPE_QUALS (component_type));
8519 }
8520
8521 /* If TYPE is a real or complex floating-point type and the target
8522 does not directly support arithmetic on TYPE then return the wider
8523 type to be used for arithmetic on TYPE. Otherwise, return
8524 NULL_TREE. */
8525
8526 tree
8527 excess_precision_type (tree type)
8528 {
8529 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8530 {
8531 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8532 switch (TREE_CODE (type))
8533 {
8534 case REAL_TYPE:
8535 switch (flt_eval_method)
8536 {
8537 case 1:
8538 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8539 return double_type_node;
8540 break;
8541 case 2:
8542 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8543 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8544 return long_double_type_node;
8545 break;
8546 default:
8547 gcc_unreachable ();
8548 }
8549 break;
8550 case COMPLEX_TYPE:
8551 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8552 return NULL_TREE;
8553 switch (flt_eval_method)
8554 {
8555 case 1:
8556 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8557 return complex_double_type_node;
8558 break;
8559 case 2:
8560 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8561 || (TYPE_MODE (TREE_TYPE (type))
8562 == TYPE_MODE (double_type_node)))
8563 return complex_long_double_type_node;
8564 break;
8565 default:
8566 gcc_unreachable ();
8567 }
8568 break;
8569 default:
8570 break;
8571 }
8572 }
8573 return NULL_TREE;
8574 }
8575 \f
8576 /* Return OP, stripped of any conversions to wider types as much as is safe.
8577 Converting the value back to OP's type makes a value equivalent to OP.
8578
8579 If FOR_TYPE is nonzero, we return a value which, if converted to
8580 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8581
8582 OP must have integer, real or enumeral type. Pointers are not allowed!
8583
8584 There are some cases where the obvious value we could return
8585 would regenerate to OP if converted to OP's type,
8586 but would not extend like OP to wider types.
8587 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8588 For example, if OP is (unsigned short)(signed char)-1,
8589 we avoid returning (signed char)-1 if FOR_TYPE is int,
8590 even though extending that to an unsigned short would regenerate OP,
8591 since the result of extending (signed char)-1 to (int)
8592 is different from (int) OP. */
8593
8594 tree
8595 get_unwidened (tree op, tree for_type)
8596 {
8597 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8598 tree type = TREE_TYPE (op);
8599 unsigned final_prec
8600 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8601 int uns
8602 = (for_type != 0 && for_type != type
8603 && final_prec > TYPE_PRECISION (type)
8604 && TYPE_UNSIGNED (type));
8605 tree win = op;
8606
8607 while (CONVERT_EXPR_P (op))
8608 {
8609 int bitschange;
8610
8611 /* TYPE_PRECISION on vector types has different meaning
8612 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8613 so avoid them here. */
8614 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8615 break;
8616
8617 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8618 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8619
8620 /* Truncations are many-one so cannot be removed.
8621 Unless we are later going to truncate down even farther. */
8622 if (bitschange < 0
8623 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8624 break;
8625
8626 /* See what's inside this conversion. If we decide to strip it,
8627 we will set WIN. */
8628 op = TREE_OPERAND (op, 0);
8629
8630 /* If we have not stripped any zero-extensions (uns is 0),
8631 we can strip any kind of extension.
8632 If we have previously stripped a zero-extension,
8633 only zero-extensions can safely be stripped.
8634 Any extension can be stripped if the bits it would produce
8635 are all going to be discarded later by truncating to FOR_TYPE. */
8636
8637 if (bitschange > 0)
8638 {
8639 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8640 win = op;
8641 /* TYPE_UNSIGNED says whether this is a zero-extension.
8642 Let's avoid computing it if it does not affect WIN
8643 and if UNS will not be needed again. */
8644 if ((uns
8645 || CONVERT_EXPR_P (op))
8646 && TYPE_UNSIGNED (TREE_TYPE (op)))
8647 {
8648 uns = 1;
8649 win = op;
8650 }
8651 }
8652 }
8653
8654 /* If we finally reach a constant see if it fits in for_type and
8655 in that case convert it. */
8656 if (for_type
8657 && TREE_CODE (win) == INTEGER_CST
8658 && TREE_TYPE (win) != for_type
8659 && int_fits_type_p (win, for_type))
8660 win = fold_convert (for_type, win);
8661
8662 return win;
8663 }
8664 \f
8665 /* Return OP or a simpler expression for a narrower value
8666 which can be sign-extended or zero-extended to give back OP.
8667 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8668 or 0 if the value should be sign-extended. */
8669
8670 tree
8671 get_narrower (tree op, int *unsignedp_ptr)
8672 {
8673 int uns = 0;
8674 int first = 1;
8675 tree win = op;
8676 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8677
8678 while (TREE_CODE (op) == NOP_EXPR)
8679 {
8680 int bitschange
8681 = (TYPE_PRECISION (TREE_TYPE (op))
8682 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8683
8684 /* Truncations are many-one so cannot be removed. */
8685 if (bitschange < 0)
8686 break;
8687
8688 /* See what's inside this conversion. If we decide to strip it,
8689 we will set WIN. */
8690
8691 if (bitschange > 0)
8692 {
8693 op = TREE_OPERAND (op, 0);
8694 /* An extension: the outermost one can be stripped,
8695 but remember whether it is zero or sign extension. */
8696 if (first)
8697 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8698 /* Otherwise, if a sign extension has been stripped,
8699 only sign extensions can now be stripped;
8700 if a zero extension has been stripped, only zero-extensions. */
8701 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8702 break;
8703 first = 0;
8704 }
8705 else /* bitschange == 0 */
8706 {
8707 /* A change in nominal type can always be stripped, but we must
8708 preserve the unsignedness. */
8709 if (first)
8710 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8711 first = 0;
8712 op = TREE_OPERAND (op, 0);
8713 /* Keep trying to narrow, but don't assign op to win if it
8714 would turn an integral type into something else. */
8715 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8716 continue;
8717 }
8718
8719 win = op;
8720 }
8721
8722 if (TREE_CODE (op) == COMPONENT_REF
8723 /* Since type_for_size always gives an integer type. */
8724 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8725 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8726 /* Ensure field is laid out already. */
8727 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8728 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8729 {
8730 unsigned HOST_WIDE_INT innerprec
8731 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8732 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8733 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8734 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8735
8736 /* We can get this structure field in a narrower type that fits it,
8737 but the resulting extension to its nominal type (a fullword type)
8738 must satisfy the same conditions as for other extensions.
8739
8740 Do this only for fields that are aligned (not bit-fields),
8741 because when bit-field insns will be used there is no
8742 advantage in doing this. */
8743
8744 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8745 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8746 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8747 && type != 0)
8748 {
8749 if (first)
8750 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8751 win = fold_convert (type, op);
8752 }
8753 }
8754
8755 *unsignedp_ptr = uns;
8756 return win;
8757 }
8758 \f
8759 /* Returns true if integer constant C has a value that is permissible
8760 for type TYPE (an INTEGER_TYPE). */
8761
8762 bool
8763 int_fits_type_p (const_tree c, const_tree type)
8764 {
8765 tree type_low_bound, type_high_bound;
8766 bool ok_for_low_bound, ok_for_high_bound;
8767 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8768
8769 retry:
8770 type_low_bound = TYPE_MIN_VALUE (type);
8771 type_high_bound = TYPE_MAX_VALUE (type);
8772
8773 /* If at least one bound of the type is a constant integer, we can check
8774 ourselves and maybe make a decision. If no such decision is possible, but
8775 this type is a subtype, try checking against that. Otherwise, use
8776 fits_to_tree_p, which checks against the precision.
8777
8778 Compute the status for each possibly constant bound, and return if we see
8779 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8780 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8781 for "constant known to fit". */
8782
8783 /* Check if c >= type_low_bound. */
8784 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8785 {
8786 if (tree_int_cst_lt (c, type_low_bound))
8787 return false;
8788 ok_for_low_bound = true;
8789 }
8790 else
8791 ok_for_low_bound = false;
8792
8793 /* Check if c <= type_high_bound. */
8794 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8795 {
8796 if (tree_int_cst_lt (type_high_bound, c))
8797 return false;
8798 ok_for_high_bound = true;
8799 }
8800 else
8801 ok_for_high_bound = false;
8802
8803 /* If the constant fits both bounds, the result is known. */
8804 if (ok_for_low_bound && ok_for_high_bound)
8805 return true;
8806
8807 /* Perform some generic filtering which may allow making a decision
8808 even if the bounds are not constant. First, negative integers
8809 never fit in unsigned types, */
8810 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8811 return false;
8812
8813 /* Second, narrower types always fit in wider ones. */
8814 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8815 return true;
8816
8817 /* Third, unsigned integers with top bit set never fit signed types. */
8818 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8819 {
8820 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8821 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8822 {
8823 /* When a tree_cst is converted to a wide-int, the precision
8824 is taken from the type. However, if the precision of the
8825 mode underneath the type is smaller than that, it is
8826 possible that the value will not fit. The test below
8827 fails if any bit is set between the sign bit of the
8828 underlying mode and the top bit of the type. */
8829 if (wi::ne_p (wi::zext (c, prec - 1), c))
8830 return false;
8831 }
8832 else if (wi::neg_p (c))
8833 return false;
8834 }
8835
8836 /* If we haven't been able to decide at this point, there nothing more we
8837 can check ourselves here. Look at the base type if we have one and it
8838 has the same precision. */
8839 if (TREE_CODE (type) == INTEGER_TYPE
8840 && TREE_TYPE (type) != 0
8841 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8842 {
8843 type = TREE_TYPE (type);
8844 goto retry;
8845 }
8846
8847 /* Or to fits_to_tree_p, if nothing else. */
8848 return wi::fits_to_tree_p (c, type);
8849 }
8850
8851 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8852 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8853 represented (assuming two's-complement arithmetic) within the bit
8854 precision of the type are returned instead. */
8855
8856 void
8857 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8858 {
8859 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8860 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8861 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8862 else
8863 {
8864 if (TYPE_UNSIGNED (type))
8865 mpz_set_ui (min, 0);
8866 else
8867 {
8868 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8869 wi::to_mpz (mn, min, SIGNED);
8870 }
8871 }
8872
8873 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8874 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8875 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8876 else
8877 {
8878 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8879 wi::to_mpz (mn, max, TYPE_SIGN (type));
8880 }
8881 }
8882
8883 /* Return true if VAR is an automatic variable defined in function FN. */
8884
8885 bool
8886 auto_var_in_fn_p (const_tree var, const_tree fn)
8887 {
8888 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8889 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8890 || TREE_CODE (var) == PARM_DECL)
8891 && ! TREE_STATIC (var))
8892 || TREE_CODE (var) == LABEL_DECL
8893 || TREE_CODE (var) == RESULT_DECL));
8894 }
8895
8896 /* Subprogram of following function. Called by walk_tree.
8897
8898 Return *TP if it is an automatic variable or parameter of the
8899 function passed in as DATA. */
8900
8901 static tree
8902 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8903 {
8904 tree fn = (tree) data;
8905
8906 if (TYPE_P (*tp))
8907 *walk_subtrees = 0;
8908
8909 else if (DECL_P (*tp)
8910 && auto_var_in_fn_p (*tp, fn))
8911 return *tp;
8912
8913 return NULL_TREE;
8914 }
8915
8916 /* Returns true if T is, contains, or refers to a type with variable
8917 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8918 arguments, but not the return type. If FN is nonzero, only return
8919 true if a modifier of the type or position of FN is a variable or
8920 parameter inside FN.
8921
8922 This concept is more general than that of C99 'variably modified types':
8923 in C99, a struct type is never variably modified because a VLA may not
8924 appear as a structure member. However, in GNU C code like:
8925
8926 struct S { int i[f()]; };
8927
8928 is valid, and other languages may define similar constructs. */
8929
8930 bool
8931 variably_modified_type_p (tree type, tree fn)
8932 {
8933 tree t;
8934
8935 /* Test if T is either variable (if FN is zero) or an expression containing
8936 a variable in FN. If TYPE isn't gimplified, return true also if
8937 gimplify_one_sizepos would gimplify the expression into a local
8938 variable. */
8939 #define RETURN_TRUE_IF_VAR(T) \
8940 do { tree _t = (T); \
8941 if (_t != NULL_TREE \
8942 && _t != error_mark_node \
8943 && TREE_CODE (_t) != INTEGER_CST \
8944 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8945 && (!fn \
8946 || (!TYPE_SIZES_GIMPLIFIED (type) \
8947 && !is_gimple_sizepos (_t)) \
8948 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8949 return true; } while (0)
8950
8951 if (type == error_mark_node)
8952 return false;
8953
8954 /* If TYPE itself has variable size, it is variably modified. */
8955 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8956 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8957
8958 switch (TREE_CODE (type))
8959 {
8960 case POINTER_TYPE:
8961 case REFERENCE_TYPE:
8962 case VECTOR_TYPE:
8963 if (variably_modified_type_p (TREE_TYPE (type), fn))
8964 return true;
8965 break;
8966
8967 case FUNCTION_TYPE:
8968 case METHOD_TYPE:
8969 /* If TYPE is a function type, it is variably modified if the
8970 return type is variably modified. */
8971 if (variably_modified_type_p (TREE_TYPE (type), fn))
8972 return true;
8973 break;
8974
8975 case INTEGER_TYPE:
8976 case REAL_TYPE:
8977 case FIXED_POINT_TYPE:
8978 case ENUMERAL_TYPE:
8979 case BOOLEAN_TYPE:
8980 /* Scalar types are variably modified if their end points
8981 aren't constant. */
8982 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8983 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8984 break;
8985
8986 case RECORD_TYPE:
8987 case UNION_TYPE:
8988 case QUAL_UNION_TYPE:
8989 /* We can't see if any of the fields are variably-modified by the
8990 definition we normally use, since that would produce infinite
8991 recursion via pointers. */
8992 /* This is variably modified if some field's type is. */
8993 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8994 if (TREE_CODE (t) == FIELD_DECL)
8995 {
8996 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8997 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8998 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8999
9000 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9001 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9002 }
9003 break;
9004
9005 case ARRAY_TYPE:
9006 /* Do not call ourselves to avoid infinite recursion. This is
9007 variably modified if the element type is. */
9008 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9009 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9010 break;
9011
9012 default:
9013 break;
9014 }
9015
9016 /* The current language may have other cases to check, but in general,
9017 all other types are not variably modified. */
9018 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9019
9020 #undef RETURN_TRUE_IF_VAR
9021 }
9022
9023 /* Given a DECL or TYPE, return the scope in which it was declared, or
9024 NULL_TREE if there is no containing scope. */
9025
9026 tree
9027 get_containing_scope (const_tree t)
9028 {
9029 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9030 }
9031
9032 /* Return the innermost context enclosing DECL that is
9033 a FUNCTION_DECL, or zero if none. */
9034
9035 tree
9036 decl_function_context (const_tree decl)
9037 {
9038 tree context;
9039
9040 if (TREE_CODE (decl) == ERROR_MARK)
9041 return 0;
9042
9043 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9044 where we look up the function at runtime. Such functions always take
9045 a first argument of type 'pointer to real context'.
9046
9047 C++ should really be fixed to use DECL_CONTEXT for the real context,
9048 and use something else for the "virtual context". */
9049 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9050 context
9051 = TYPE_MAIN_VARIANT
9052 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9053 else
9054 context = DECL_CONTEXT (decl);
9055
9056 while (context && TREE_CODE (context) != FUNCTION_DECL)
9057 {
9058 if (TREE_CODE (context) == BLOCK)
9059 context = BLOCK_SUPERCONTEXT (context);
9060 else
9061 context = get_containing_scope (context);
9062 }
9063
9064 return context;
9065 }
9066
9067 /* Return the innermost context enclosing DECL that is
9068 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9069 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9070
9071 tree
9072 decl_type_context (const_tree decl)
9073 {
9074 tree context = DECL_CONTEXT (decl);
9075
9076 while (context)
9077 switch (TREE_CODE (context))
9078 {
9079 case NAMESPACE_DECL:
9080 case TRANSLATION_UNIT_DECL:
9081 return NULL_TREE;
9082
9083 case RECORD_TYPE:
9084 case UNION_TYPE:
9085 case QUAL_UNION_TYPE:
9086 return context;
9087
9088 case TYPE_DECL:
9089 case FUNCTION_DECL:
9090 context = DECL_CONTEXT (context);
9091 break;
9092
9093 case BLOCK:
9094 context = BLOCK_SUPERCONTEXT (context);
9095 break;
9096
9097 default:
9098 gcc_unreachable ();
9099 }
9100
9101 return NULL_TREE;
9102 }
9103
9104 /* CALL is a CALL_EXPR. Return the declaration for the function
9105 called, or NULL_TREE if the called function cannot be
9106 determined. */
9107
9108 tree
9109 get_callee_fndecl (const_tree call)
9110 {
9111 tree addr;
9112
9113 if (call == error_mark_node)
9114 return error_mark_node;
9115
9116 /* It's invalid to call this function with anything but a
9117 CALL_EXPR. */
9118 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9119
9120 /* The first operand to the CALL is the address of the function
9121 called. */
9122 addr = CALL_EXPR_FN (call);
9123
9124 /* If there is no function, return early. */
9125 if (addr == NULL_TREE)
9126 return NULL_TREE;
9127
9128 STRIP_NOPS (addr);
9129
9130 /* If this is a readonly function pointer, extract its initial value. */
9131 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9132 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9133 && DECL_INITIAL (addr))
9134 addr = DECL_INITIAL (addr);
9135
9136 /* If the address is just `&f' for some function `f', then we know
9137 that `f' is being called. */
9138 if (TREE_CODE (addr) == ADDR_EXPR
9139 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9140 return TREE_OPERAND (addr, 0);
9141
9142 /* We couldn't figure out what was being called. */
9143 return NULL_TREE;
9144 }
9145
9146 #define TREE_MEM_USAGE_SPACES 40
9147
9148 /* Print debugging information about tree nodes generated during the compile,
9149 and any language-specific information. */
9150
9151 void
9152 dump_tree_statistics (void)
9153 {
9154 if (GATHER_STATISTICS)
9155 {
9156 int i;
9157 int total_nodes, total_bytes;
9158 fprintf (stderr, "\nKind Nodes Bytes\n");
9159 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9160 total_nodes = total_bytes = 0;
9161 for (i = 0; i < (int) all_kinds; i++)
9162 {
9163 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9164 tree_node_counts[i], tree_node_sizes[i]);
9165 total_nodes += tree_node_counts[i];
9166 total_bytes += tree_node_sizes[i];
9167 }
9168 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9169 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9170 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9171 fprintf (stderr, "Code Nodes\n");
9172 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9173 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9174 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9175 tree_code_counts[i]);
9176 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9177 fprintf (stderr, "\n");
9178 ssanames_print_statistics ();
9179 fprintf (stderr, "\n");
9180 phinodes_print_statistics ();
9181 fprintf (stderr, "\n");
9182 }
9183 else
9184 fprintf (stderr, "(No per-node statistics)\n");
9185
9186 print_type_hash_statistics ();
9187 print_debug_expr_statistics ();
9188 print_value_expr_statistics ();
9189 lang_hooks.print_statistics ();
9190 }
9191 \f
9192 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9193
9194 /* Generate a crc32 of a byte. */
9195
9196 static unsigned
9197 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9198 {
9199 unsigned ix;
9200
9201 for (ix = bits; ix--; value <<= 1)
9202 {
9203 unsigned feedback;
9204
9205 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9206 chksum <<= 1;
9207 chksum ^= feedback;
9208 }
9209 return chksum;
9210 }
9211
9212 /* Generate a crc32 of a 32-bit unsigned. */
9213
9214 unsigned
9215 crc32_unsigned (unsigned chksum, unsigned value)
9216 {
9217 return crc32_unsigned_bits (chksum, value, 32);
9218 }
9219
9220 /* Generate a crc32 of a byte. */
9221
9222 unsigned
9223 crc32_byte (unsigned chksum, char byte)
9224 {
9225 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9226 }
9227
9228 /* Generate a crc32 of a string. */
9229
9230 unsigned
9231 crc32_string (unsigned chksum, const char *string)
9232 {
9233 do
9234 {
9235 chksum = crc32_byte (chksum, *string);
9236 }
9237 while (*string++);
9238 return chksum;
9239 }
9240
9241 /* P is a string that will be used in a symbol. Mask out any characters
9242 that are not valid in that context. */
9243
9244 void
9245 clean_symbol_name (char *p)
9246 {
9247 for (; *p; p++)
9248 if (! (ISALNUM (*p)
9249 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9250 || *p == '$'
9251 #endif
9252 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9253 || *p == '.'
9254 #endif
9255 ))
9256 *p = '_';
9257 }
9258
9259 /* Generate a name for a special-purpose function.
9260 The generated name may need to be unique across the whole link.
9261 Changes to this function may also require corresponding changes to
9262 xstrdup_mask_random.
9263 TYPE is some string to identify the purpose of this function to the
9264 linker or collect2; it must start with an uppercase letter,
9265 one of:
9266 I - for constructors
9267 D - for destructors
9268 N - for C++ anonymous namespaces
9269 F - for DWARF unwind frame information. */
9270
9271 tree
9272 get_file_function_name (const char *type)
9273 {
9274 char *buf;
9275 const char *p;
9276 char *q;
9277
9278 /* If we already have a name we know to be unique, just use that. */
9279 if (first_global_object_name)
9280 p = q = ASTRDUP (first_global_object_name);
9281 /* If the target is handling the constructors/destructors, they
9282 will be local to this file and the name is only necessary for
9283 debugging purposes.
9284 We also assign sub_I and sub_D sufixes to constructors called from
9285 the global static constructors. These are always local. */
9286 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9287 || (strncmp (type, "sub_", 4) == 0
9288 && (type[4] == 'I' || type[4] == 'D')))
9289 {
9290 const char *file = main_input_filename;
9291 if (! file)
9292 file = LOCATION_FILE (input_location);
9293 /* Just use the file's basename, because the full pathname
9294 might be quite long. */
9295 p = q = ASTRDUP (lbasename (file));
9296 }
9297 else
9298 {
9299 /* Otherwise, the name must be unique across the entire link.
9300 We don't have anything that we know to be unique to this translation
9301 unit, so use what we do have and throw in some randomness. */
9302 unsigned len;
9303 const char *name = weak_global_object_name;
9304 const char *file = main_input_filename;
9305
9306 if (! name)
9307 name = "";
9308 if (! file)
9309 file = LOCATION_FILE (input_location);
9310
9311 len = strlen (file);
9312 q = (char *) alloca (9 + 17 + len + 1);
9313 memcpy (q, file, len + 1);
9314
9315 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9316 crc32_string (0, name), get_random_seed (false));
9317
9318 p = q;
9319 }
9320
9321 clean_symbol_name (q);
9322 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9323 + strlen (type));
9324
9325 /* Set up the name of the file-level functions we may need.
9326 Use a global object (which is already required to be unique over
9327 the program) rather than the file name (which imposes extra
9328 constraints). */
9329 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9330
9331 return get_identifier (buf);
9332 }
9333 \f
9334 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9335
9336 /* Complain that the tree code of NODE does not match the expected 0
9337 terminated list of trailing codes. The trailing code list can be
9338 empty, for a more vague error message. FILE, LINE, and FUNCTION
9339 are of the caller. */
9340
9341 void
9342 tree_check_failed (const_tree node, const char *file,
9343 int line, const char *function, ...)
9344 {
9345 va_list args;
9346 const char *buffer;
9347 unsigned length = 0;
9348 enum tree_code code;
9349
9350 va_start (args, function);
9351 while ((code = (enum tree_code) va_arg (args, int)))
9352 length += 4 + strlen (get_tree_code_name (code));
9353 va_end (args);
9354 if (length)
9355 {
9356 char *tmp;
9357 va_start (args, function);
9358 length += strlen ("expected ");
9359 buffer = tmp = (char *) alloca (length);
9360 length = 0;
9361 while ((code = (enum tree_code) va_arg (args, int)))
9362 {
9363 const char *prefix = length ? " or " : "expected ";
9364
9365 strcpy (tmp + length, prefix);
9366 length += strlen (prefix);
9367 strcpy (tmp + length, get_tree_code_name (code));
9368 length += strlen (get_tree_code_name (code));
9369 }
9370 va_end (args);
9371 }
9372 else
9373 buffer = "unexpected node";
9374
9375 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9376 buffer, get_tree_code_name (TREE_CODE (node)),
9377 function, trim_filename (file), line);
9378 }
9379
9380 /* Complain that the tree code of NODE does match the expected 0
9381 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9382 the caller. */
9383
9384 void
9385 tree_not_check_failed (const_tree node, const char *file,
9386 int line, const char *function, ...)
9387 {
9388 va_list args;
9389 char *buffer;
9390 unsigned length = 0;
9391 enum tree_code code;
9392
9393 va_start (args, function);
9394 while ((code = (enum tree_code) va_arg (args, int)))
9395 length += 4 + strlen (get_tree_code_name (code));
9396 va_end (args);
9397 va_start (args, function);
9398 buffer = (char *) alloca (length);
9399 length = 0;
9400 while ((code = (enum tree_code) va_arg (args, int)))
9401 {
9402 if (length)
9403 {
9404 strcpy (buffer + length, " or ");
9405 length += 4;
9406 }
9407 strcpy (buffer + length, get_tree_code_name (code));
9408 length += strlen (get_tree_code_name (code));
9409 }
9410 va_end (args);
9411
9412 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9413 buffer, get_tree_code_name (TREE_CODE (node)),
9414 function, trim_filename (file), line);
9415 }
9416
9417 /* Similar to tree_check_failed, except that we check for a class of tree
9418 code, given in CL. */
9419
9420 void
9421 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9422 const char *file, int line, const char *function)
9423 {
9424 internal_error
9425 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9426 TREE_CODE_CLASS_STRING (cl),
9427 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9428 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9429 }
9430
9431 /* Similar to tree_check_failed, except that instead of specifying a
9432 dozen codes, use the knowledge that they're all sequential. */
9433
9434 void
9435 tree_range_check_failed (const_tree node, const char *file, int line,
9436 const char *function, enum tree_code c1,
9437 enum tree_code c2)
9438 {
9439 char *buffer;
9440 unsigned length = 0;
9441 unsigned int c;
9442
9443 for (c = c1; c <= c2; ++c)
9444 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9445
9446 length += strlen ("expected ");
9447 buffer = (char *) alloca (length);
9448 length = 0;
9449
9450 for (c = c1; c <= c2; ++c)
9451 {
9452 const char *prefix = length ? " or " : "expected ";
9453
9454 strcpy (buffer + length, prefix);
9455 length += strlen (prefix);
9456 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9457 length += strlen (get_tree_code_name ((enum tree_code) c));
9458 }
9459
9460 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9461 buffer, get_tree_code_name (TREE_CODE (node)),
9462 function, trim_filename (file), line);
9463 }
9464
9465
9466 /* Similar to tree_check_failed, except that we check that a tree does
9467 not have the specified code, given in CL. */
9468
9469 void
9470 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9471 const char *file, int line, const char *function)
9472 {
9473 internal_error
9474 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9475 TREE_CODE_CLASS_STRING (cl),
9476 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9477 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9478 }
9479
9480
9481 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9482
9483 void
9484 omp_clause_check_failed (const_tree node, const char *file, int line,
9485 const char *function, enum omp_clause_code code)
9486 {
9487 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9488 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9489 function, trim_filename (file), line);
9490 }
9491
9492
9493 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9494
9495 void
9496 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9497 const char *function, enum omp_clause_code c1,
9498 enum omp_clause_code c2)
9499 {
9500 char *buffer;
9501 unsigned length = 0;
9502 unsigned int c;
9503
9504 for (c = c1; c <= c2; ++c)
9505 length += 4 + strlen (omp_clause_code_name[c]);
9506
9507 length += strlen ("expected ");
9508 buffer = (char *) alloca (length);
9509 length = 0;
9510
9511 for (c = c1; c <= c2; ++c)
9512 {
9513 const char *prefix = length ? " or " : "expected ";
9514
9515 strcpy (buffer + length, prefix);
9516 length += strlen (prefix);
9517 strcpy (buffer + length, omp_clause_code_name[c]);
9518 length += strlen (omp_clause_code_name[c]);
9519 }
9520
9521 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9522 buffer, omp_clause_code_name[TREE_CODE (node)],
9523 function, trim_filename (file), line);
9524 }
9525
9526
9527 #undef DEFTREESTRUCT
9528 #define DEFTREESTRUCT(VAL, NAME) NAME,
9529
9530 static const char *ts_enum_names[] = {
9531 #include "treestruct.def"
9532 };
9533 #undef DEFTREESTRUCT
9534
9535 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9536
9537 /* Similar to tree_class_check_failed, except that we check for
9538 whether CODE contains the tree structure identified by EN. */
9539
9540 void
9541 tree_contains_struct_check_failed (const_tree node,
9542 const enum tree_node_structure_enum en,
9543 const char *file, int line,
9544 const char *function)
9545 {
9546 internal_error
9547 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9548 TS_ENUM_NAME (en),
9549 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9550 }
9551
9552
9553 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9554 (dynamically sized) vector. */
9555
9556 void
9557 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9558 const char *function)
9559 {
9560 internal_error
9561 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9562 idx + 1, len, function, trim_filename (file), line);
9563 }
9564
9565 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9566 (dynamically sized) vector. */
9567
9568 void
9569 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9570 const char *function)
9571 {
9572 internal_error
9573 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9574 idx + 1, len, function, trim_filename (file), line);
9575 }
9576
9577 /* Similar to above, except that the check is for the bounds of the operand
9578 vector of an expression node EXP. */
9579
9580 void
9581 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9582 int line, const char *function)
9583 {
9584 enum tree_code code = TREE_CODE (exp);
9585 internal_error
9586 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9587 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9588 function, trim_filename (file), line);
9589 }
9590
9591 /* Similar to above, except that the check is for the number of
9592 operands of an OMP_CLAUSE node. */
9593
9594 void
9595 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9596 int line, const char *function)
9597 {
9598 internal_error
9599 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9600 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9601 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9602 trim_filename (file), line);
9603 }
9604 #endif /* ENABLE_TREE_CHECKING */
9605 \f
9606 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9607 and mapped to the machine mode MODE. Initialize its fields and build
9608 the information necessary for debugging output. */
9609
9610 static tree
9611 make_vector_type (tree innertype, int nunits, machine_mode mode)
9612 {
9613 tree t;
9614 inchash::hash hstate;
9615
9616 t = make_node (VECTOR_TYPE);
9617 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9618 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9619 SET_TYPE_MODE (t, mode);
9620
9621 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9622 SET_TYPE_STRUCTURAL_EQUALITY (t);
9623 else if (TYPE_CANONICAL (innertype) != innertype
9624 || mode != VOIDmode)
9625 TYPE_CANONICAL (t)
9626 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9627
9628 layout_type (t);
9629
9630 hstate.add_wide_int (VECTOR_TYPE);
9631 hstate.add_wide_int (nunits);
9632 hstate.add_wide_int (mode);
9633 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9634 t = type_hash_canon (hstate.end (), t);
9635
9636 /* We have built a main variant, based on the main variant of the
9637 inner type. Use it to build the variant we return. */
9638 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9639 && TREE_TYPE (t) != innertype)
9640 return build_type_attribute_qual_variant (t,
9641 TYPE_ATTRIBUTES (innertype),
9642 TYPE_QUALS (innertype));
9643
9644 return t;
9645 }
9646
9647 static tree
9648 make_or_reuse_type (unsigned size, int unsignedp)
9649 {
9650 int i;
9651
9652 if (size == INT_TYPE_SIZE)
9653 return unsignedp ? unsigned_type_node : integer_type_node;
9654 if (size == CHAR_TYPE_SIZE)
9655 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9656 if (size == SHORT_TYPE_SIZE)
9657 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9658 if (size == LONG_TYPE_SIZE)
9659 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9660 if (size == LONG_LONG_TYPE_SIZE)
9661 return (unsignedp ? long_long_unsigned_type_node
9662 : long_long_integer_type_node);
9663
9664 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9665 if (size == int_n_data[i].bitsize
9666 && int_n_enabled_p[i])
9667 return (unsignedp ? int_n_trees[i].unsigned_type
9668 : int_n_trees[i].signed_type);
9669
9670 if (unsignedp)
9671 return make_unsigned_type (size);
9672 else
9673 return make_signed_type (size);
9674 }
9675
9676 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9677
9678 static tree
9679 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9680 {
9681 if (satp)
9682 {
9683 if (size == SHORT_FRACT_TYPE_SIZE)
9684 return unsignedp ? sat_unsigned_short_fract_type_node
9685 : sat_short_fract_type_node;
9686 if (size == FRACT_TYPE_SIZE)
9687 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9688 if (size == LONG_FRACT_TYPE_SIZE)
9689 return unsignedp ? sat_unsigned_long_fract_type_node
9690 : sat_long_fract_type_node;
9691 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9692 return unsignedp ? sat_unsigned_long_long_fract_type_node
9693 : sat_long_long_fract_type_node;
9694 }
9695 else
9696 {
9697 if (size == SHORT_FRACT_TYPE_SIZE)
9698 return unsignedp ? unsigned_short_fract_type_node
9699 : short_fract_type_node;
9700 if (size == FRACT_TYPE_SIZE)
9701 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9702 if (size == LONG_FRACT_TYPE_SIZE)
9703 return unsignedp ? unsigned_long_fract_type_node
9704 : long_fract_type_node;
9705 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9706 return unsignedp ? unsigned_long_long_fract_type_node
9707 : long_long_fract_type_node;
9708 }
9709
9710 return make_fract_type (size, unsignedp, satp);
9711 }
9712
9713 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9714
9715 static tree
9716 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9717 {
9718 if (satp)
9719 {
9720 if (size == SHORT_ACCUM_TYPE_SIZE)
9721 return unsignedp ? sat_unsigned_short_accum_type_node
9722 : sat_short_accum_type_node;
9723 if (size == ACCUM_TYPE_SIZE)
9724 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9725 if (size == LONG_ACCUM_TYPE_SIZE)
9726 return unsignedp ? sat_unsigned_long_accum_type_node
9727 : sat_long_accum_type_node;
9728 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9729 return unsignedp ? sat_unsigned_long_long_accum_type_node
9730 : sat_long_long_accum_type_node;
9731 }
9732 else
9733 {
9734 if (size == SHORT_ACCUM_TYPE_SIZE)
9735 return unsignedp ? unsigned_short_accum_type_node
9736 : short_accum_type_node;
9737 if (size == ACCUM_TYPE_SIZE)
9738 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9739 if (size == LONG_ACCUM_TYPE_SIZE)
9740 return unsignedp ? unsigned_long_accum_type_node
9741 : long_accum_type_node;
9742 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9743 return unsignedp ? unsigned_long_long_accum_type_node
9744 : long_long_accum_type_node;
9745 }
9746
9747 return make_accum_type (size, unsignedp, satp);
9748 }
9749
9750
9751 /* Create an atomic variant node for TYPE. This routine is called
9752 during initialization of data types to create the 5 basic atomic
9753 types. The generic build_variant_type function requires these to
9754 already be set up in order to function properly, so cannot be
9755 called from there. If ALIGN is non-zero, then ensure alignment is
9756 overridden to this value. */
9757
9758 static tree
9759 build_atomic_base (tree type, unsigned int align)
9760 {
9761 tree t;
9762
9763 /* Make sure its not already registered. */
9764 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9765 return t;
9766
9767 t = build_variant_type_copy (type);
9768 set_type_quals (t, TYPE_QUAL_ATOMIC);
9769
9770 if (align)
9771 TYPE_ALIGN (t) = align;
9772
9773 return t;
9774 }
9775
9776 /* Create nodes for all integer types (and error_mark_node) using the sizes
9777 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9778 SHORT_DOUBLE specifies whether double should be of the same precision
9779 as float. */
9780
9781 void
9782 build_common_tree_nodes (bool signed_char, bool short_double)
9783 {
9784 int i;
9785
9786 error_mark_node = make_node (ERROR_MARK);
9787 TREE_TYPE (error_mark_node) = error_mark_node;
9788
9789 initialize_sizetypes ();
9790
9791 /* Define both `signed char' and `unsigned char'. */
9792 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9793 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9794 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9795 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9796
9797 /* Define `char', which is like either `signed char' or `unsigned char'
9798 but not the same as either. */
9799 char_type_node
9800 = (signed_char
9801 ? make_signed_type (CHAR_TYPE_SIZE)
9802 : make_unsigned_type (CHAR_TYPE_SIZE));
9803 TYPE_STRING_FLAG (char_type_node) = 1;
9804
9805 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9806 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9807 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9808 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9809 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9810 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9811 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9812 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9813
9814 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9815 {
9816 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9817 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9818 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9819 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9820
9821 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9822 && int_n_enabled_p[i])
9823 {
9824 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9825 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9826 }
9827 }
9828
9829 /* Define a boolean type. This type only represents boolean values but
9830 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9831 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9832 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9833 TYPE_PRECISION (boolean_type_node) = 1;
9834 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9835
9836 /* Define what type to use for size_t. */
9837 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9838 size_type_node = unsigned_type_node;
9839 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9840 size_type_node = long_unsigned_type_node;
9841 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9842 size_type_node = long_long_unsigned_type_node;
9843 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9844 size_type_node = short_unsigned_type_node;
9845 else
9846 {
9847 int i;
9848
9849 size_type_node = NULL_TREE;
9850 for (i = 0; i < NUM_INT_N_ENTS; i++)
9851 if (int_n_enabled_p[i])
9852 {
9853 char name[50];
9854 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9855
9856 if (strcmp (name, SIZE_TYPE) == 0)
9857 {
9858 size_type_node = int_n_trees[i].unsigned_type;
9859 }
9860 }
9861 if (size_type_node == NULL_TREE)
9862 gcc_unreachable ();
9863 }
9864
9865 /* Fill in the rest of the sized types. Reuse existing type nodes
9866 when possible. */
9867 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9868 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9869 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9870 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9871 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9872
9873 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9874 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9875 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9876 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9877 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9878
9879 /* Don't call build_qualified type for atomics. That routine does
9880 special processing for atomics, and until they are initialized
9881 it's better not to make that call.
9882
9883 Check to see if there is a target override for atomic types. */
9884
9885 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9886 targetm.atomic_align_for_mode (QImode));
9887 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9888 targetm.atomic_align_for_mode (HImode));
9889 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9890 targetm.atomic_align_for_mode (SImode));
9891 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9892 targetm.atomic_align_for_mode (DImode));
9893 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9894 targetm.atomic_align_for_mode (TImode));
9895
9896 access_public_node = get_identifier ("public");
9897 access_protected_node = get_identifier ("protected");
9898 access_private_node = get_identifier ("private");
9899
9900 /* Define these next since types below may used them. */
9901 integer_zero_node = build_int_cst (integer_type_node, 0);
9902 integer_one_node = build_int_cst (integer_type_node, 1);
9903 integer_three_node = build_int_cst (integer_type_node, 3);
9904 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9905
9906 size_zero_node = size_int (0);
9907 size_one_node = size_int (1);
9908 bitsize_zero_node = bitsize_int (0);
9909 bitsize_one_node = bitsize_int (1);
9910 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9911
9912 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9913 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9914
9915 void_type_node = make_node (VOID_TYPE);
9916 layout_type (void_type_node);
9917
9918 pointer_bounds_type_node = targetm.chkp_bound_type ();
9919
9920 /* We are not going to have real types in C with less than byte alignment,
9921 so we might as well not have any types that claim to have it. */
9922 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9923 TYPE_USER_ALIGN (void_type_node) = 0;
9924
9925 void_node = make_node (VOID_CST);
9926 TREE_TYPE (void_node) = void_type_node;
9927
9928 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9929 layout_type (TREE_TYPE (null_pointer_node));
9930
9931 ptr_type_node = build_pointer_type (void_type_node);
9932 const_ptr_type_node
9933 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9934 fileptr_type_node = ptr_type_node;
9935
9936 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9937
9938 float_type_node = make_node (REAL_TYPE);
9939 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9940 layout_type (float_type_node);
9941
9942 double_type_node = make_node (REAL_TYPE);
9943 if (short_double)
9944 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9945 else
9946 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9947 layout_type (double_type_node);
9948
9949 long_double_type_node = make_node (REAL_TYPE);
9950 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9951 layout_type (long_double_type_node);
9952
9953 float_ptr_type_node = build_pointer_type (float_type_node);
9954 double_ptr_type_node = build_pointer_type (double_type_node);
9955 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9956 integer_ptr_type_node = build_pointer_type (integer_type_node);
9957
9958 /* Fixed size integer types. */
9959 uint16_type_node = make_or_reuse_type (16, 1);
9960 uint32_type_node = make_or_reuse_type (32, 1);
9961 uint64_type_node = make_or_reuse_type (64, 1);
9962
9963 /* Decimal float types. */
9964 dfloat32_type_node = make_node (REAL_TYPE);
9965 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9966 layout_type (dfloat32_type_node);
9967 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9968 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9969
9970 dfloat64_type_node = make_node (REAL_TYPE);
9971 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9972 layout_type (dfloat64_type_node);
9973 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9974 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9975
9976 dfloat128_type_node = make_node (REAL_TYPE);
9977 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9978 layout_type (dfloat128_type_node);
9979 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9980 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9981
9982 complex_integer_type_node = build_complex_type (integer_type_node);
9983 complex_float_type_node = build_complex_type (float_type_node);
9984 complex_double_type_node = build_complex_type (double_type_node);
9985 complex_long_double_type_node = build_complex_type (long_double_type_node);
9986
9987 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9988 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9989 sat_ ## KIND ## _type_node = \
9990 make_sat_signed_ ## KIND ## _type (SIZE); \
9991 sat_unsigned_ ## KIND ## _type_node = \
9992 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9993 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9994 unsigned_ ## KIND ## _type_node = \
9995 make_unsigned_ ## KIND ## _type (SIZE);
9996
9997 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9998 sat_ ## WIDTH ## KIND ## _type_node = \
9999 make_sat_signed_ ## KIND ## _type (SIZE); \
10000 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10001 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10002 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10003 unsigned_ ## WIDTH ## KIND ## _type_node = \
10004 make_unsigned_ ## KIND ## _type (SIZE);
10005
10006 /* Make fixed-point type nodes based on four different widths. */
10007 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10008 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10009 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10010 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10011 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10012
10013 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10014 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10015 NAME ## _type_node = \
10016 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10017 u ## NAME ## _type_node = \
10018 make_or_reuse_unsigned_ ## KIND ## _type \
10019 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10020 sat_ ## NAME ## _type_node = \
10021 make_or_reuse_sat_signed_ ## KIND ## _type \
10022 (GET_MODE_BITSIZE (MODE ## mode)); \
10023 sat_u ## NAME ## _type_node = \
10024 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10025 (GET_MODE_BITSIZE (U ## MODE ## mode));
10026
10027 /* Fixed-point type and mode nodes. */
10028 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10029 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10030 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10031 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10032 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10033 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10034 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10035 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10036 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10037 MAKE_FIXED_MODE_NODE (accum, da, DA)
10038 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10039
10040 {
10041 tree t = targetm.build_builtin_va_list ();
10042
10043 /* Many back-ends define record types without setting TYPE_NAME.
10044 If we copied the record type here, we'd keep the original
10045 record type without a name. This breaks name mangling. So,
10046 don't copy record types and let c_common_nodes_and_builtins()
10047 declare the type to be __builtin_va_list. */
10048 if (TREE_CODE (t) != RECORD_TYPE)
10049 t = build_variant_type_copy (t);
10050
10051 va_list_type_node = t;
10052 }
10053 }
10054
10055 /* Modify DECL for given flags.
10056 TM_PURE attribute is set only on types, so the function will modify
10057 DECL's type when ECF_TM_PURE is used. */
10058
10059 void
10060 set_call_expr_flags (tree decl, int flags)
10061 {
10062 if (flags & ECF_NOTHROW)
10063 TREE_NOTHROW (decl) = 1;
10064 if (flags & ECF_CONST)
10065 TREE_READONLY (decl) = 1;
10066 if (flags & ECF_PURE)
10067 DECL_PURE_P (decl) = 1;
10068 if (flags & ECF_LOOPING_CONST_OR_PURE)
10069 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10070 if (flags & ECF_NOVOPS)
10071 DECL_IS_NOVOPS (decl) = 1;
10072 if (flags & ECF_NORETURN)
10073 TREE_THIS_VOLATILE (decl) = 1;
10074 if (flags & ECF_MALLOC)
10075 DECL_IS_MALLOC (decl) = 1;
10076 if (flags & ECF_RETURNS_TWICE)
10077 DECL_IS_RETURNS_TWICE (decl) = 1;
10078 if (flags & ECF_LEAF)
10079 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10080 NULL, DECL_ATTRIBUTES (decl));
10081 if ((flags & ECF_TM_PURE) && flag_tm)
10082 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10083 /* Looping const or pure is implied by noreturn.
10084 There is currently no way to declare looping const or looping pure alone. */
10085 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10086 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10087 }
10088
10089
10090 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10091
10092 static void
10093 local_define_builtin (const char *name, tree type, enum built_in_function code,
10094 const char *library_name, int ecf_flags)
10095 {
10096 tree decl;
10097
10098 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10099 library_name, NULL_TREE);
10100 set_call_expr_flags (decl, ecf_flags);
10101
10102 set_builtin_decl (code, decl, true);
10103 }
10104
10105 /* Call this function after instantiating all builtins that the language
10106 front end cares about. This will build the rest of the builtins
10107 and internal functions that are relied upon by the tree optimizers and
10108 the middle-end. */
10109
10110 void
10111 build_common_builtin_nodes (void)
10112 {
10113 tree tmp, ftype;
10114 int ecf_flags;
10115
10116 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10117 {
10118 ftype = build_function_type (void_type_node, void_list_node);
10119 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10120 "__builtin_unreachable",
10121 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10122 | ECF_CONST);
10123 }
10124
10125 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10126 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10127 {
10128 ftype = build_function_type_list (ptr_type_node,
10129 ptr_type_node, const_ptr_type_node,
10130 size_type_node, NULL_TREE);
10131
10132 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10133 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10134 "memcpy", ECF_NOTHROW | ECF_LEAF);
10135 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10136 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10137 "memmove", ECF_NOTHROW | ECF_LEAF);
10138 }
10139
10140 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10141 {
10142 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10143 const_ptr_type_node, size_type_node,
10144 NULL_TREE);
10145 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10146 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10147 }
10148
10149 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10150 {
10151 ftype = build_function_type_list (ptr_type_node,
10152 ptr_type_node, integer_type_node,
10153 size_type_node, NULL_TREE);
10154 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10155 "memset", ECF_NOTHROW | ECF_LEAF);
10156 }
10157
10158 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10159 {
10160 ftype = build_function_type_list (ptr_type_node,
10161 size_type_node, NULL_TREE);
10162 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10163 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10164 }
10165
10166 ftype = build_function_type_list (ptr_type_node, size_type_node,
10167 size_type_node, NULL_TREE);
10168 local_define_builtin ("__builtin_alloca_with_align", ftype,
10169 BUILT_IN_ALLOCA_WITH_ALIGN,
10170 "__builtin_alloca_with_align",
10171 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10172
10173 /* If we're checking the stack, `alloca' can throw. */
10174 if (flag_stack_check)
10175 {
10176 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10177 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10178 }
10179
10180 ftype = build_function_type_list (void_type_node,
10181 ptr_type_node, ptr_type_node,
10182 ptr_type_node, NULL_TREE);
10183 local_define_builtin ("__builtin_init_trampoline", ftype,
10184 BUILT_IN_INIT_TRAMPOLINE,
10185 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10186 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10187 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10188 "__builtin_init_heap_trampoline",
10189 ECF_NOTHROW | ECF_LEAF);
10190
10191 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10192 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10193 BUILT_IN_ADJUST_TRAMPOLINE,
10194 "__builtin_adjust_trampoline",
10195 ECF_CONST | ECF_NOTHROW);
10196
10197 ftype = build_function_type_list (void_type_node,
10198 ptr_type_node, ptr_type_node, NULL_TREE);
10199 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10200 BUILT_IN_NONLOCAL_GOTO,
10201 "__builtin_nonlocal_goto",
10202 ECF_NORETURN | ECF_NOTHROW);
10203
10204 ftype = build_function_type_list (void_type_node,
10205 ptr_type_node, ptr_type_node, NULL_TREE);
10206 local_define_builtin ("__builtin_setjmp_setup", ftype,
10207 BUILT_IN_SETJMP_SETUP,
10208 "__builtin_setjmp_setup", ECF_NOTHROW);
10209
10210 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10211 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10212 BUILT_IN_SETJMP_RECEIVER,
10213 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10214
10215 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10216 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10217 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10218
10219 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10220 local_define_builtin ("__builtin_stack_restore", ftype,
10221 BUILT_IN_STACK_RESTORE,
10222 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10223
10224 /* If there's a possibility that we might use the ARM EABI, build the
10225 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10226 if (targetm.arm_eabi_unwinder)
10227 {
10228 ftype = build_function_type_list (void_type_node, NULL_TREE);
10229 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10230 BUILT_IN_CXA_END_CLEANUP,
10231 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10232 }
10233
10234 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10235 local_define_builtin ("__builtin_unwind_resume", ftype,
10236 BUILT_IN_UNWIND_RESUME,
10237 ((targetm_common.except_unwind_info (&global_options)
10238 == UI_SJLJ)
10239 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10240 ECF_NORETURN);
10241
10242 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10243 {
10244 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10245 NULL_TREE);
10246 local_define_builtin ("__builtin_return_address", ftype,
10247 BUILT_IN_RETURN_ADDRESS,
10248 "__builtin_return_address",
10249 ECF_NOTHROW);
10250 }
10251
10252 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10253 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10254 {
10255 ftype = build_function_type_list (void_type_node, ptr_type_node,
10256 ptr_type_node, NULL_TREE);
10257 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10258 local_define_builtin ("__cyg_profile_func_enter", ftype,
10259 BUILT_IN_PROFILE_FUNC_ENTER,
10260 "__cyg_profile_func_enter", 0);
10261 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10262 local_define_builtin ("__cyg_profile_func_exit", ftype,
10263 BUILT_IN_PROFILE_FUNC_EXIT,
10264 "__cyg_profile_func_exit", 0);
10265 }
10266
10267 /* The exception object and filter values from the runtime. The argument
10268 must be zero before exception lowering, i.e. from the front end. After
10269 exception lowering, it will be the region number for the exception
10270 landing pad. These functions are PURE instead of CONST to prevent
10271 them from being hoisted past the exception edge that will initialize
10272 its value in the landing pad. */
10273 ftype = build_function_type_list (ptr_type_node,
10274 integer_type_node, NULL_TREE);
10275 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10276 /* Only use TM_PURE if we we have TM language support. */
10277 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10278 ecf_flags |= ECF_TM_PURE;
10279 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10280 "__builtin_eh_pointer", ecf_flags);
10281
10282 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10283 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10284 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10285 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10286
10287 ftype = build_function_type_list (void_type_node,
10288 integer_type_node, integer_type_node,
10289 NULL_TREE);
10290 local_define_builtin ("__builtin_eh_copy_values", ftype,
10291 BUILT_IN_EH_COPY_VALUES,
10292 "__builtin_eh_copy_values", ECF_NOTHROW);
10293
10294 /* Complex multiplication and division. These are handled as builtins
10295 rather than optabs because emit_library_call_value doesn't support
10296 complex. Further, we can do slightly better with folding these
10297 beasties if the real and complex parts of the arguments are separate. */
10298 {
10299 int mode;
10300
10301 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10302 {
10303 char mode_name_buf[4], *q;
10304 const char *p;
10305 enum built_in_function mcode, dcode;
10306 tree type, inner_type;
10307 const char *prefix = "__";
10308
10309 if (targetm.libfunc_gnu_prefix)
10310 prefix = "__gnu_";
10311
10312 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10313 if (type == NULL)
10314 continue;
10315 inner_type = TREE_TYPE (type);
10316
10317 ftype = build_function_type_list (type, inner_type, inner_type,
10318 inner_type, inner_type, NULL_TREE);
10319
10320 mcode = ((enum built_in_function)
10321 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10322 dcode = ((enum built_in_function)
10323 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10324
10325 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10326 *q = TOLOWER (*p);
10327 *q = '\0';
10328
10329 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10330 NULL);
10331 local_define_builtin (built_in_names[mcode], ftype, mcode,
10332 built_in_names[mcode],
10333 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10334
10335 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10336 NULL);
10337 local_define_builtin (built_in_names[dcode], ftype, dcode,
10338 built_in_names[dcode],
10339 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10340 }
10341 }
10342
10343 init_internal_fns ();
10344 }
10345
10346 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10347 better way.
10348
10349 If we requested a pointer to a vector, build up the pointers that
10350 we stripped off while looking for the inner type. Similarly for
10351 return values from functions.
10352
10353 The argument TYPE is the top of the chain, and BOTTOM is the
10354 new type which we will point to. */
10355
10356 tree
10357 reconstruct_complex_type (tree type, tree bottom)
10358 {
10359 tree inner, outer;
10360
10361 if (TREE_CODE (type) == POINTER_TYPE)
10362 {
10363 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10364 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10365 TYPE_REF_CAN_ALIAS_ALL (type));
10366 }
10367 else if (TREE_CODE (type) == REFERENCE_TYPE)
10368 {
10369 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10370 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10371 TYPE_REF_CAN_ALIAS_ALL (type));
10372 }
10373 else if (TREE_CODE (type) == ARRAY_TYPE)
10374 {
10375 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10376 outer = build_array_type (inner, TYPE_DOMAIN (type));
10377 }
10378 else if (TREE_CODE (type) == FUNCTION_TYPE)
10379 {
10380 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10381 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10382 }
10383 else if (TREE_CODE (type) == METHOD_TYPE)
10384 {
10385 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10386 /* The build_method_type_directly() routine prepends 'this' to argument list,
10387 so we must compensate by getting rid of it. */
10388 outer
10389 = build_method_type_directly
10390 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10391 inner,
10392 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10393 }
10394 else if (TREE_CODE (type) == OFFSET_TYPE)
10395 {
10396 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10397 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10398 }
10399 else
10400 return bottom;
10401
10402 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10403 TYPE_QUALS (type));
10404 }
10405
10406 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10407 the inner type. */
10408 tree
10409 build_vector_type_for_mode (tree innertype, machine_mode mode)
10410 {
10411 int nunits;
10412
10413 switch (GET_MODE_CLASS (mode))
10414 {
10415 case MODE_VECTOR_INT:
10416 case MODE_VECTOR_FLOAT:
10417 case MODE_VECTOR_FRACT:
10418 case MODE_VECTOR_UFRACT:
10419 case MODE_VECTOR_ACCUM:
10420 case MODE_VECTOR_UACCUM:
10421 nunits = GET_MODE_NUNITS (mode);
10422 break;
10423
10424 case MODE_INT:
10425 /* Check that there are no leftover bits. */
10426 gcc_assert (GET_MODE_BITSIZE (mode)
10427 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10428
10429 nunits = GET_MODE_BITSIZE (mode)
10430 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10431 break;
10432
10433 default:
10434 gcc_unreachable ();
10435 }
10436
10437 return make_vector_type (innertype, nunits, mode);
10438 }
10439
10440 /* Similarly, but takes the inner type and number of units, which must be
10441 a power of two. */
10442
10443 tree
10444 build_vector_type (tree innertype, int nunits)
10445 {
10446 return make_vector_type (innertype, nunits, VOIDmode);
10447 }
10448
10449 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10450
10451 tree
10452 build_opaque_vector_type (tree innertype, int nunits)
10453 {
10454 tree t = make_vector_type (innertype, nunits, VOIDmode);
10455 tree cand;
10456 /* We always build the non-opaque variant before the opaque one,
10457 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10458 cand = TYPE_NEXT_VARIANT (t);
10459 if (cand
10460 && TYPE_VECTOR_OPAQUE (cand)
10461 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10462 return cand;
10463 /* Othewise build a variant type and make sure to queue it after
10464 the non-opaque type. */
10465 cand = build_distinct_type_copy (t);
10466 TYPE_VECTOR_OPAQUE (cand) = true;
10467 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10468 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10469 TYPE_NEXT_VARIANT (t) = cand;
10470 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10471 return cand;
10472 }
10473
10474
10475 /* Given an initializer INIT, return TRUE if INIT is zero or some
10476 aggregate of zeros. Otherwise return FALSE. */
10477 bool
10478 initializer_zerop (const_tree init)
10479 {
10480 tree elt;
10481
10482 STRIP_NOPS (init);
10483
10484 switch (TREE_CODE (init))
10485 {
10486 case INTEGER_CST:
10487 return integer_zerop (init);
10488
10489 case REAL_CST:
10490 /* ??? Note that this is not correct for C4X float formats. There,
10491 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10492 negative exponent. */
10493 return real_zerop (init)
10494 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10495
10496 case FIXED_CST:
10497 return fixed_zerop (init);
10498
10499 case COMPLEX_CST:
10500 return integer_zerop (init)
10501 || (real_zerop (init)
10502 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10503 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10504
10505 case VECTOR_CST:
10506 {
10507 unsigned i;
10508 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10509 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10510 return false;
10511 return true;
10512 }
10513
10514 case CONSTRUCTOR:
10515 {
10516 unsigned HOST_WIDE_INT idx;
10517
10518 if (TREE_CLOBBER_P (init))
10519 return false;
10520 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10521 if (!initializer_zerop (elt))
10522 return false;
10523 return true;
10524 }
10525
10526 case STRING_CST:
10527 {
10528 int i;
10529
10530 /* We need to loop through all elements to handle cases like
10531 "\0" and "\0foobar". */
10532 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10533 if (TREE_STRING_POINTER (init)[i] != '\0')
10534 return false;
10535
10536 return true;
10537 }
10538
10539 default:
10540 return false;
10541 }
10542 }
10543
10544 /* Check if vector VEC consists of all the equal elements and
10545 that the number of elements corresponds to the type of VEC.
10546 The function returns first element of the vector
10547 or NULL_TREE if the vector is not uniform. */
10548 tree
10549 uniform_vector_p (const_tree vec)
10550 {
10551 tree first, t;
10552 unsigned i;
10553
10554 if (vec == NULL_TREE)
10555 return NULL_TREE;
10556
10557 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10558
10559 if (TREE_CODE (vec) == VECTOR_CST)
10560 {
10561 first = VECTOR_CST_ELT (vec, 0);
10562 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10563 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10564 return NULL_TREE;
10565
10566 return first;
10567 }
10568
10569 else if (TREE_CODE (vec) == CONSTRUCTOR)
10570 {
10571 first = error_mark_node;
10572
10573 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10574 {
10575 if (i == 0)
10576 {
10577 first = t;
10578 continue;
10579 }
10580 if (!operand_equal_p (first, t, 0))
10581 return NULL_TREE;
10582 }
10583 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10584 return NULL_TREE;
10585
10586 return first;
10587 }
10588
10589 return NULL_TREE;
10590 }
10591
10592 /* Build an empty statement at location LOC. */
10593
10594 tree
10595 build_empty_stmt (location_t loc)
10596 {
10597 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10598 SET_EXPR_LOCATION (t, loc);
10599 return t;
10600 }
10601
10602
10603 /* Build an OpenMP clause with code CODE. LOC is the location of the
10604 clause. */
10605
10606 tree
10607 build_omp_clause (location_t loc, enum omp_clause_code code)
10608 {
10609 tree t;
10610 int size, length;
10611
10612 length = omp_clause_num_ops[code];
10613 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10614
10615 record_node_allocation_statistics (OMP_CLAUSE, size);
10616
10617 t = (tree) ggc_internal_alloc (size);
10618 memset (t, 0, size);
10619 TREE_SET_CODE (t, OMP_CLAUSE);
10620 OMP_CLAUSE_SET_CODE (t, code);
10621 OMP_CLAUSE_LOCATION (t) = loc;
10622
10623 return t;
10624 }
10625
10626 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10627 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10628 Except for the CODE and operand count field, other storage for the
10629 object is initialized to zeros. */
10630
10631 tree
10632 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10633 {
10634 tree t;
10635 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10636
10637 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10638 gcc_assert (len >= 1);
10639
10640 record_node_allocation_statistics (code, length);
10641
10642 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10643
10644 TREE_SET_CODE (t, code);
10645
10646 /* Can't use TREE_OPERAND to store the length because if checking is
10647 enabled, it will try to check the length before we store it. :-P */
10648 t->exp.operands[0] = build_int_cst (sizetype, len);
10649
10650 return t;
10651 }
10652
10653 /* Helper function for build_call_* functions; build a CALL_EXPR with
10654 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10655 the argument slots. */
10656
10657 static tree
10658 build_call_1 (tree return_type, tree fn, int nargs)
10659 {
10660 tree t;
10661
10662 t = build_vl_exp (CALL_EXPR, nargs + 3);
10663 TREE_TYPE (t) = return_type;
10664 CALL_EXPR_FN (t) = fn;
10665 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10666
10667 return t;
10668 }
10669
10670 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10671 FN and a null static chain slot. NARGS is the number of call arguments
10672 which are specified as "..." arguments. */
10673
10674 tree
10675 build_call_nary (tree return_type, tree fn, int nargs, ...)
10676 {
10677 tree ret;
10678 va_list args;
10679 va_start (args, nargs);
10680 ret = build_call_valist (return_type, fn, nargs, args);
10681 va_end (args);
10682 return ret;
10683 }
10684
10685 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10686 FN and a null static chain slot. NARGS is the number of call arguments
10687 which are specified as a va_list ARGS. */
10688
10689 tree
10690 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10691 {
10692 tree t;
10693 int i;
10694
10695 t = build_call_1 (return_type, fn, nargs);
10696 for (i = 0; i < nargs; i++)
10697 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10698 process_call_operands (t);
10699 return t;
10700 }
10701
10702 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10703 FN and a null static chain slot. NARGS is the number of call arguments
10704 which are specified as a tree array ARGS. */
10705
10706 tree
10707 build_call_array_loc (location_t loc, tree return_type, tree fn,
10708 int nargs, const tree *args)
10709 {
10710 tree t;
10711 int i;
10712
10713 t = build_call_1 (return_type, fn, nargs);
10714 for (i = 0; i < nargs; i++)
10715 CALL_EXPR_ARG (t, i) = args[i];
10716 process_call_operands (t);
10717 SET_EXPR_LOCATION (t, loc);
10718 return t;
10719 }
10720
10721 /* Like build_call_array, but takes a vec. */
10722
10723 tree
10724 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10725 {
10726 tree ret, t;
10727 unsigned int ix;
10728
10729 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10730 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10731 CALL_EXPR_ARG (ret, ix) = t;
10732 process_call_operands (ret);
10733 return ret;
10734 }
10735
10736 /* Conveniently construct a function call expression. FNDECL names the
10737 function to be called and N arguments are passed in the array
10738 ARGARRAY. */
10739
10740 tree
10741 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10742 {
10743 tree fntype = TREE_TYPE (fndecl);
10744 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10745
10746 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10747 }
10748
10749 /* Conveniently construct a function call expression. FNDECL names the
10750 function to be called and the arguments are passed in the vector
10751 VEC. */
10752
10753 tree
10754 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10755 {
10756 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10757 vec_safe_address (vec));
10758 }
10759
10760
10761 /* Conveniently construct a function call expression. FNDECL names the
10762 function to be called, N is the number of arguments, and the "..."
10763 parameters are the argument expressions. */
10764
10765 tree
10766 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10767 {
10768 va_list ap;
10769 tree *argarray = XALLOCAVEC (tree, n);
10770 int i;
10771
10772 va_start (ap, n);
10773 for (i = 0; i < n; i++)
10774 argarray[i] = va_arg (ap, tree);
10775 va_end (ap);
10776 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10777 }
10778
10779 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10780 varargs macros aren't supported by all bootstrap compilers. */
10781
10782 tree
10783 build_call_expr (tree fndecl, int n, ...)
10784 {
10785 va_list ap;
10786 tree *argarray = XALLOCAVEC (tree, n);
10787 int i;
10788
10789 va_start (ap, n);
10790 for (i = 0; i < n; i++)
10791 argarray[i] = va_arg (ap, tree);
10792 va_end (ap);
10793 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10794 }
10795
10796 /* Build internal call expression. This is just like CALL_EXPR, except
10797 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10798 internal function. */
10799
10800 tree
10801 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10802 tree type, int n, ...)
10803 {
10804 va_list ap;
10805 int i;
10806
10807 tree fn = build_call_1 (type, NULL_TREE, n);
10808 va_start (ap, n);
10809 for (i = 0; i < n; i++)
10810 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10811 va_end (ap);
10812 SET_EXPR_LOCATION (fn, loc);
10813 CALL_EXPR_IFN (fn) = ifn;
10814 return fn;
10815 }
10816
10817 /* Create a new constant string literal and return a char* pointer to it.
10818 The STRING_CST value is the LEN characters at STR. */
10819 tree
10820 build_string_literal (int len, const char *str)
10821 {
10822 tree t, elem, index, type;
10823
10824 t = build_string (len, str);
10825 elem = build_type_variant (char_type_node, 1, 0);
10826 index = build_index_type (size_int (len - 1));
10827 type = build_array_type (elem, index);
10828 TREE_TYPE (t) = type;
10829 TREE_CONSTANT (t) = 1;
10830 TREE_READONLY (t) = 1;
10831 TREE_STATIC (t) = 1;
10832
10833 type = build_pointer_type (elem);
10834 t = build1 (ADDR_EXPR, type,
10835 build4 (ARRAY_REF, elem,
10836 t, integer_zero_node, NULL_TREE, NULL_TREE));
10837 return t;
10838 }
10839
10840
10841
10842 /* Return true if T (assumed to be a DECL) must be assigned a memory
10843 location. */
10844
10845 bool
10846 needs_to_live_in_memory (const_tree t)
10847 {
10848 return (TREE_ADDRESSABLE (t)
10849 || is_global_var (t)
10850 || (TREE_CODE (t) == RESULT_DECL
10851 && !DECL_BY_REFERENCE (t)
10852 && aggregate_value_p (t, current_function_decl)));
10853 }
10854
10855 /* Return value of a constant X and sign-extend it. */
10856
10857 HOST_WIDE_INT
10858 int_cst_value (const_tree x)
10859 {
10860 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10861 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10862
10863 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10864 gcc_assert (cst_and_fits_in_hwi (x));
10865
10866 if (bits < HOST_BITS_PER_WIDE_INT)
10867 {
10868 bool negative = ((val >> (bits - 1)) & 1) != 0;
10869 if (negative)
10870 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10871 else
10872 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10873 }
10874
10875 return val;
10876 }
10877
10878 /* If TYPE is an integral or pointer type, return an integer type with
10879 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10880 if TYPE is already an integer type of signedness UNSIGNEDP. */
10881
10882 tree
10883 signed_or_unsigned_type_for (int unsignedp, tree type)
10884 {
10885 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10886 return type;
10887
10888 if (TREE_CODE (type) == VECTOR_TYPE)
10889 {
10890 tree inner = TREE_TYPE (type);
10891 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10892 if (!inner2)
10893 return NULL_TREE;
10894 if (inner == inner2)
10895 return type;
10896 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10897 }
10898
10899 if (!INTEGRAL_TYPE_P (type)
10900 && !POINTER_TYPE_P (type)
10901 && TREE_CODE (type) != OFFSET_TYPE)
10902 return NULL_TREE;
10903
10904 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10905 }
10906
10907 /* If TYPE is an integral or pointer type, return an integer type with
10908 the same precision which is unsigned, or itself if TYPE is already an
10909 unsigned integer type. */
10910
10911 tree
10912 unsigned_type_for (tree type)
10913 {
10914 return signed_or_unsigned_type_for (1, type);
10915 }
10916
10917 /* If TYPE is an integral or pointer type, return an integer type with
10918 the same precision which is signed, or itself if TYPE is already a
10919 signed integer type. */
10920
10921 tree
10922 signed_type_for (tree type)
10923 {
10924 return signed_or_unsigned_type_for (0, type);
10925 }
10926
10927 /* If TYPE is a vector type, return a signed integer vector type with the
10928 same width and number of subparts. Otherwise return boolean_type_node. */
10929
10930 tree
10931 truth_type_for (tree type)
10932 {
10933 if (TREE_CODE (type) == VECTOR_TYPE)
10934 {
10935 tree elem = lang_hooks.types.type_for_size
10936 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10937 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10938 }
10939 else
10940 return boolean_type_node;
10941 }
10942
10943 /* Returns the largest value obtainable by casting something in INNER type to
10944 OUTER type. */
10945
10946 tree
10947 upper_bound_in_type (tree outer, tree inner)
10948 {
10949 unsigned int det = 0;
10950 unsigned oprec = TYPE_PRECISION (outer);
10951 unsigned iprec = TYPE_PRECISION (inner);
10952 unsigned prec;
10953
10954 /* Compute a unique number for every combination. */
10955 det |= (oprec > iprec) ? 4 : 0;
10956 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10957 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10958
10959 /* Determine the exponent to use. */
10960 switch (det)
10961 {
10962 case 0:
10963 case 1:
10964 /* oprec <= iprec, outer: signed, inner: don't care. */
10965 prec = oprec - 1;
10966 break;
10967 case 2:
10968 case 3:
10969 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10970 prec = oprec;
10971 break;
10972 case 4:
10973 /* oprec > iprec, outer: signed, inner: signed. */
10974 prec = iprec - 1;
10975 break;
10976 case 5:
10977 /* oprec > iprec, outer: signed, inner: unsigned. */
10978 prec = iprec;
10979 break;
10980 case 6:
10981 /* oprec > iprec, outer: unsigned, inner: signed. */
10982 prec = oprec;
10983 break;
10984 case 7:
10985 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10986 prec = iprec;
10987 break;
10988 default:
10989 gcc_unreachable ();
10990 }
10991
10992 return wide_int_to_tree (outer,
10993 wi::mask (prec, false, TYPE_PRECISION (outer)));
10994 }
10995
10996 /* Returns the smallest value obtainable by casting something in INNER type to
10997 OUTER type. */
10998
10999 tree
11000 lower_bound_in_type (tree outer, tree inner)
11001 {
11002 unsigned oprec = TYPE_PRECISION (outer);
11003 unsigned iprec = TYPE_PRECISION (inner);
11004
11005 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11006 and obtain 0. */
11007 if (TYPE_UNSIGNED (outer)
11008 /* If we are widening something of an unsigned type, OUTER type
11009 contains all values of INNER type. In particular, both INNER
11010 and OUTER types have zero in common. */
11011 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11012 return build_int_cst (outer, 0);
11013 else
11014 {
11015 /* If we are widening a signed type to another signed type, we
11016 want to obtain -2^^(iprec-1). If we are keeping the
11017 precision or narrowing to a signed type, we want to obtain
11018 -2^(oprec-1). */
11019 unsigned prec = oprec > iprec ? iprec : oprec;
11020 return wide_int_to_tree (outer,
11021 wi::mask (prec - 1, true,
11022 TYPE_PRECISION (outer)));
11023 }
11024 }
11025
11026 /* Return nonzero if two operands that are suitable for PHI nodes are
11027 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11028 SSA_NAME or invariant. Note that this is strictly an optimization.
11029 That is, callers of this function can directly call operand_equal_p
11030 and get the same result, only slower. */
11031
11032 int
11033 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11034 {
11035 if (arg0 == arg1)
11036 return 1;
11037 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11038 return 0;
11039 return operand_equal_p (arg0, arg1, 0);
11040 }
11041
11042 /* Returns number of zeros at the end of binary representation of X. */
11043
11044 tree
11045 num_ending_zeros (const_tree x)
11046 {
11047 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11048 }
11049
11050
11051 #define WALK_SUBTREE(NODE) \
11052 do \
11053 { \
11054 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11055 if (result) \
11056 return result; \
11057 } \
11058 while (0)
11059
11060 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11061 be walked whenever a type is seen in the tree. Rest of operands and return
11062 value are as for walk_tree. */
11063
11064 static tree
11065 walk_type_fields (tree type, walk_tree_fn func, void *data,
11066 hash_set<tree> *pset, walk_tree_lh lh)
11067 {
11068 tree result = NULL_TREE;
11069
11070 switch (TREE_CODE (type))
11071 {
11072 case POINTER_TYPE:
11073 case REFERENCE_TYPE:
11074 case VECTOR_TYPE:
11075 /* We have to worry about mutually recursive pointers. These can't
11076 be written in C. They can in Ada. It's pathological, but
11077 there's an ACATS test (c38102a) that checks it. Deal with this
11078 by checking if we're pointing to another pointer, that one
11079 points to another pointer, that one does too, and we have no htab.
11080 If so, get a hash table. We check three levels deep to avoid
11081 the cost of the hash table if we don't need one. */
11082 if (POINTER_TYPE_P (TREE_TYPE (type))
11083 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11084 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11085 && !pset)
11086 {
11087 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11088 func, data);
11089 if (result)
11090 return result;
11091
11092 break;
11093 }
11094
11095 /* ... fall through ... */
11096
11097 case COMPLEX_TYPE:
11098 WALK_SUBTREE (TREE_TYPE (type));
11099 break;
11100
11101 case METHOD_TYPE:
11102 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11103
11104 /* Fall through. */
11105
11106 case FUNCTION_TYPE:
11107 WALK_SUBTREE (TREE_TYPE (type));
11108 {
11109 tree arg;
11110
11111 /* We never want to walk into default arguments. */
11112 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11113 WALK_SUBTREE (TREE_VALUE (arg));
11114 }
11115 break;
11116
11117 case ARRAY_TYPE:
11118 /* Don't follow this nodes's type if a pointer for fear that
11119 we'll have infinite recursion. If we have a PSET, then we
11120 need not fear. */
11121 if (pset
11122 || (!POINTER_TYPE_P (TREE_TYPE (type))
11123 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11124 WALK_SUBTREE (TREE_TYPE (type));
11125 WALK_SUBTREE (TYPE_DOMAIN (type));
11126 break;
11127
11128 case OFFSET_TYPE:
11129 WALK_SUBTREE (TREE_TYPE (type));
11130 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11131 break;
11132
11133 default:
11134 break;
11135 }
11136
11137 return NULL_TREE;
11138 }
11139
11140 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11141 called with the DATA and the address of each sub-tree. If FUNC returns a
11142 non-NULL value, the traversal is stopped, and the value returned by FUNC
11143 is returned. If PSET is non-NULL it is used to record the nodes visited,
11144 and to avoid visiting a node more than once. */
11145
11146 tree
11147 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11148 hash_set<tree> *pset, walk_tree_lh lh)
11149 {
11150 enum tree_code code;
11151 int walk_subtrees;
11152 tree result;
11153
11154 #define WALK_SUBTREE_TAIL(NODE) \
11155 do \
11156 { \
11157 tp = & (NODE); \
11158 goto tail_recurse; \
11159 } \
11160 while (0)
11161
11162 tail_recurse:
11163 /* Skip empty subtrees. */
11164 if (!*tp)
11165 return NULL_TREE;
11166
11167 /* Don't walk the same tree twice, if the user has requested
11168 that we avoid doing so. */
11169 if (pset && pset->add (*tp))
11170 return NULL_TREE;
11171
11172 /* Call the function. */
11173 walk_subtrees = 1;
11174 result = (*func) (tp, &walk_subtrees, data);
11175
11176 /* If we found something, return it. */
11177 if (result)
11178 return result;
11179
11180 code = TREE_CODE (*tp);
11181
11182 /* Even if we didn't, FUNC may have decided that there was nothing
11183 interesting below this point in the tree. */
11184 if (!walk_subtrees)
11185 {
11186 /* But we still need to check our siblings. */
11187 if (code == TREE_LIST)
11188 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11189 else if (code == OMP_CLAUSE)
11190 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11191 else
11192 return NULL_TREE;
11193 }
11194
11195 if (lh)
11196 {
11197 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11198 if (result || !walk_subtrees)
11199 return result;
11200 }
11201
11202 switch (code)
11203 {
11204 case ERROR_MARK:
11205 case IDENTIFIER_NODE:
11206 case INTEGER_CST:
11207 case REAL_CST:
11208 case FIXED_CST:
11209 case VECTOR_CST:
11210 case STRING_CST:
11211 case BLOCK:
11212 case PLACEHOLDER_EXPR:
11213 case SSA_NAME:
11214 case FIELD_DECL:
11215 case RESULT_DECL:
11216 /* None of these have subtrees other than those already walked
11217 above. */
11218 break;
11219
11220 case TREE_LIST:
11221 WALK_SUBTREE (TREE_VALUE (*tp));
11222 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11223 break;
11224
11225 case TREE_VEC:
11226 {
11227 int len = TREE_VEC_LENGTH (*tp);
11228
11229 if (len == 0)
11230 break;
11231
11232 /* Walk all elements but the first. */
11233 while (--len)
11234 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11235
11236 /* Now walk the first one as a tail call. */
11237 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11238 }
11239
11240 case COMPLEX_CST:
11241 WALK_SUBTREE (TREE_REALPART (*tp));
11242 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11243
11244 case CONSTRUCTOR:
11245 {
11246 unsigned HOST_WIDE_INT idx;
11247 constructor_elt *ce;
11248
11249 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11250 idx++)
11251 WALK_SUBTREE (ce->value);
11252 }
11253 break;
11254
11255 case SAVE_EXPR:
11256 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11257
11258 case BIND_EXPR:
11259 {
11260 tree decl;
11261 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11262 {
11263 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11264 into declarations that are just mentioned, rather than
11265 declared; they don't really belong to this part of the tree.
11266 And, we can see cycles: the initializer for a declaration
11267 can refer to the declaration itself. */
11268 WALK_SUBTREE (DECL_INITIAL (decl));
11269 WALK_SUBTREE (DECL_SIZE (decl));
11270 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11271 }
11272 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11273 }
11274
11275 case STATEMENT_LIST:
11276 {
11277 tree_stmt_iterator i;
11278 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11279 WALK_SUBTREE (*tsi_stmt_ptr (i));
11280 }
11281 break;
11282
11283 case OMP_CLAUSE:
11284 switch (OMP_CLAUSE_CODE (*tp))
11285 {
11286 case OMP_CLAUSE_GANG:
11287 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11288 /* FALLTHRU */
11289
11290 case OMP_CLAUSE_DEVICE_RESIDENT:
11291 case OMP_CLAUSE_USE_DEVICE:
11292 case OMP_CLAUSE_ASYNC:
11293 case OMP_CLAUSE_WAIT:
11294 case OMP_CLAUSE_WORKER:
11295 case OMP_CLAUSE_VECTOR:
11296 case OMP_CLAUSE_NUM_GANGS:
11297 case OMP_CLAUSE_NUM_WORKERS:
11298 case OMP_CLAUSE_VECTOR_LENGTH:
11299 case OMP_CLAUSE_PRIVATE:
11300 case OMP_CLAUSE_SHARED:
11301 case OMP_CLAUSE_FIRSTPRIVATE:
11302 case OMP_CLAUSE_COPYIN:
11303 case OMP_CLAUSE_COPYPRIVATE:
11304 case OMP_CLAUSE_FINAL:
11305 case OMP_CLAUSE_IF:
11306 case OMP_CLAUSE_NUM_THREADS:
11307 case OMP_CLAUSE_SCHEDULE:
11308 case OMP_CLAUSE_UNIFORM:
11309 case OMP_CLAUSE_DEPEND:
11310 case OMP_CLAUSE_NUM_TEAMS:
11311 case OMP_CLAUSE_THREAD_LIMIT:
11312 case OMP_CLAUSE_DEVICE:
11313 case OMP_CLAUSE_DIST_SCHEDULE:
11314 case OMP_CLAUSE_SAFELEN:
11315 case OMP_CLAUSE_SIMDLEN:
11316 case OMP_CLAUSE__LOOPTEMP_:
11317 case OMP_CLAUSE__SIMDUID_:
11318 case OMP_CLAUSE__CILK_FOR_COUNT_:
11319 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11320 /* FALLTHRU */
11321
11322 case OMP_CLAUSE_INDEPENDENT:
11323 case OMP_CLAUSE_NOWAIT:
11324 case OMP_CLAUSE_ORDERED:
11325 case OMP_CLAUSE_DEFAULT:
11326 case OMP_CLAUSE_UNTIED:
11327 case OMP_CLAUSE_MERGEABLE:
11328 case OMP_CLAUSE_PROC_BIND:
11329 case OMP_CLAUSE_INBRANCH:
11330 case OMP_CLAUSE_NOTINBRANCH:
11331 case OMP_CLAUSE_FOR:
11332 case OMP_CLAUSE_PARALLEL:
11333 case OMP_CLAUSE_SECTIONS:
11334 case OMP_CLAUSE_TASKGROUP:
11335 case OMP_CLAUSE_AUTO:
11336 case OMP_CLAUSE_SEQ:
11337 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11338
11339 case OMP_CLAUSE_LASTPRIVATE:
11340 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11341 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11342 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11343
11344 case OMP_CLAUSE_COLLAPSE:
11345 {
11346 int i;
11347 for (i = 0; i < 3; i++)
11348 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11349 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11350 }
11351
11352 case OMP_CLAUSE_LINEAR:
11353 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11354 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11355 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11356 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11357
11358 case OMP_CLAUSE_ALIGNED:
11359 case OMP_CLAUSE_FROM:
11360 case OMP_CLAUSE_TO:
11361 case OMP_CLAUSE_MAP:
11362 case OMP_CLAUSE__CACHE_:
11363 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11364 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11365 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11366
11367 case OMP_CLAUSE_REDUCTION:
11368 {
11369 int i;
11370 for (i = 0; i < 4; i++)
11371 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11372 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11373 }
11374
11375 default:
11376 gcc_unreachable ();
11377 }
11378 break;
11379
11380 case TARGET_EXPR:
11381 {
11382 int i, len;
11383
11384 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11385 But, we only want to walk once. */
11386 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11387 for (i = 0; i < len; ++i)
11388 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11389 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11390 }
11391
11392 case DECL_EXPR:
11393 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11394 defining. We only want to walk into these fields of a type in this
11395 case and not in the general case of a mere reference to the type.
11396
11397 The criterion is as follows: if the field can be an expression, it
11398 must be walked only here. This should be in keeping with the fields
11399 that are directly gimplified in gimplify_type_sizes in order for the
11400 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11401 variable-sized types.
11402
11403 Note that DECLs get walked as part of processing the BIND_EXPR. */
11404 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11405 {
11406 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11407 if (TREE_CODE (*type_p) == ERROR_MARK)
11408 return NULL_TREE;
11409
11410 /* Call the function for the type. See if it returns anything or
11411 doesn't want us to continue. If we are to continue, walk both
11412 the normal fields and those for the declaration case. */
11413 result = (*func) (type_p, &walk_subtrees, data);
11414 if (result || !walk_subtrees)
11415 return result;
11416
11417 /* But do not walk a pointed-to type since it may itself need to
11418 be walked in the declaration case if it isn't anonymous. */
11419 if (!POINTER_TYPE_P (*type_p))
11420 {
11421 result = walk_type_fields (*type_p, func, data, pset, lh);
11422 if (result)
11423 return result;
11424 }
11425
11426 /* If this is a record type, also walk the fields. */
11427 if (RECORD_OR_UNION_TYPE_P (*type_p))
11428 {
11429 tree field;
11430
11431 for (field = TYPE_FIELDS (*type_p); field;
11432 field = DECL_CHAIN (field))
11433 {
11434 /* We'd like to look at the type of the field, but we can
11435 easily get infinite recursion. So assume it's pointed
11436 to elsewhere in the tree. Also, ignore things that
11437 aren't fields. */
11438 if (TREE_CODE (field) != FIELD_DECL)
11439 continue;
11440
11441 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11442 WALK_SUBTREE (DECL_SIZE (field));
11443 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11444 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11445 WALK_SUBTREE (DECL_QUALIFIER (field));
11446 }
11447 }
11448
11449 /* Same for scalar types. */
11450 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11451 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11452 || TREE_CODE (*type_p) == INTEGER_TYPE
11453 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11454 || TREE_CODE (*type_p) == REAL_TYPE)
11455 {
11456 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11457 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11458 }
11459
11460 WALK_SUBTREE (TYPE_SIZE (*type_p));
11461 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11462 }
11463 /* FALLTHRU */
11464
11465 default:
11466 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11467 {
11468 int i, len;
11469
11470 /* Walk over all the sub-trees of this operand. */
11471 len = TREE_OPERAND_LENGTH (*tp);
11472
11473 /* Go through the subtrees. We need to do this in forward order so
11474 that the scope of a FOR_EXPR is handled properly. */
11475 if (len)
11476 {
11477 for (i = 0; i < len - 1; ++i)
11478 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11479 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11480 }
11481 }
11482 /* If this is a type, walk the needed fields in the type. */
11483 else if (TYPE_P (*tp))
11484 return walk_type_fields (*tp, func, data, pset, lh);
11485 break;
11486 }
11487
11488 /* We didn't find what we were looking for. */
11489 return NULL_TREE;
11490
11491 #undef WALK_SUBTREE_TAIL
11492 }
11493 #undef WALK_SUBTREE
11494
11495 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11496
11497 tree
11498 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11499 walk_tree_lh lh)
11500 {
11501 tree result;
11502
11503 hash_set<tree> pset;
11504 result = walk_tree_1 (tp, func, data, &pset, lh);
11505 return result;
11506 }
11507
11508
11509 tree
11510 tree_block (tree t)
11511 {
11512 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11513
11514 if (IS_EXPR_CODE_CLASS (c))
11515 return LOCATION_BLOCK (t->exp.locus);
11516 gcc_unreachable ();
11517 return NULL;
11518 }
11519
11520 void
11521 tree_set_block (tree t, tree b)
11522 {
11523 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11524
11525 if (IS_EXPR_CODE_CLASS (c))
11526 {
11527 if (b)
11528 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11529 else
11530 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11531 }
11532 else
11533 gcc_unreachable ();
11534 }
11535
11536 /* Create a nameless artificial label and put it in the current
11537 function context. The label has a location of LOC. Returns the
11538 newly created label. */
11539
11540 tree
11541 create_artificial_label (location_t loc)
11542 {
11543 tree lab = build_decl (loc,
11544 LABEL_DECL, NULL_TREE, void_type_node);
11545
11546 DECL_ARTIFICIAL (lab) = 1;
11547 DECL_IGNORED_P (lab) = 1;
11548 DECL_CONTEXT (lab) = current_function_decl;
11549 return lab;
11550 }
11551
11552 /* Given a tree, try to return a useful variable name that we can use
11553 to prefix a temporary that is being assigned the value of the tree.
11554 I.E. given <temp> = &A, return A. */
11555
11556 const char *
11557 get_name (tree t)
11558 {
11559 tree stripped_decl;
11560
11561 stripped_decl = t;
11562 STRIP_NOPS (stripped_decl);
11563 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11564 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11565 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11566 {
11567 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11568 if (!name)
11569 return NULL;
11570 return IDENTIFIER_POINTER (name);
11571 }
11572 else
11573 {
11574 switch (TREE_CODE (stripped_decl))
11575 {
11576 case ADDR_EXPR:
11577 return get_name (TREE_OPERAND (stripped_decl, 0));
11578 default:
11579 return NULL;
11580 }
11581 }
11582 }
11583
11584 /* Return true if TYPE has a variable argument list. */
11585
11586 bool
11587 stdarg_p (const_tree fntype)
11588 {
11589 function_args_iterator args_iter;
11590 tree n = NULL_TREE, t;
11591
11592 if (!fntype)
11593 return false;
11594
11595 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11596 {
11597 n = t;
11598 }
11599
11600 return n != NULL_TREE && n != void_type_node;
11601 }
11602
11603 /* Return true if TYPE has a prototype. */
11604
11605 bool
11606 prototype_p (const_tree fntype)
11607 {
11608 tree t;
11609
11610 gcc_assert (fntype != NULL_TREE);
11611
11612 t = TYPE_ARG_TYPES (fntype);
11613 return (t != NULL_TREE);
11614 }
11615
11616 /* If BLOCK is inlined from an __attribute__((__artificial__))
11617 routine, return pointer to location from where it has been
11618 called. */
11619 location_t *
11620 block_nonartificial_location (tree block)
11621 {
11622 location_t *ret = NULL;
11623
11624 while (block && TREE_CODE (block) == BLOCK
11625 && BLOCK_ABSTRACT_ORIGIN (block))
11626 {
11627 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11628
11629 while (TREE_CODE (ao) == BLOCK
11630 && BLOCK_ABSTRACT_ORIGIN (ao)
11631 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11632 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11633
11634 if (TREE_CODE (ao) == FUNCTION_DECL)
11635 {
11636 /* If AO is an artificial inline, point RET to the
11637 call site locus at which it has been inlined and continue
11638 the loop, in case AO's caller is also an artificial
11639 inline. */
11640 if (DECL_DECLARED_INLINE_P (ao)
11641 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11642 ret = &BLOCK_SOURCE_LOCATION (block);
11643 else
11644 break;
11645 }
11646 else if (TREE_CODE (ao) != BLOCK)
11647 break;
11648
11649 block = BLOCK_SUPERCONTEXT (block);
11650 }
11651 return ret;
11652 }
11653
11654
11655 /* If EXP is inlined from an __attribute__((__artificial__))
11656 function, return the location of the original call expression. */
11657
11658 location_t
11659 tree_nonartificial_location (tree exp)
11660 {
11661 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11662
11663 if (loc)
11664 return *loc;
11665 else
11666 return EXPR_LOCATION (exp);
11667 }
11668
11669
11670 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11671 nodes. */
11672
11673 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11674
11675 hashval_t
11676 cl_option_hasher::hash (tree x)
11677 {
11678 const_tree const t = x;
11679 const char *p;
11680 size_t i;
11681 size_t len = 0;
11682 hashval_t hash = 0;
11683
11684 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11685 {
11686 p = (const char *)TREE_OPTIMIZATION (t);
11687 len = sizeof (struct cl_optimization);
11688 }
11689
11690 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11691 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11692
11693 else
11694 gcc_unreachable ();
11695
11696 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11697 something else. */
11698 for (i = 0; i < len; i++)
11699 if (p[i])
11700 hash = (hash << 4) ^ ((i << 2) | p[i]);
11701
11702 return hash;
11703 }
11704
11705 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11706 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11707 same. */
11708
11709 bool
11710 cl_option_hasher::equal (tree x, tree y)
11711 {
11712 const_tree const xt = x;
11713 const_tree const yt = y;
11714 const char *xp;
11715 const char *yp;
11716 size_t len;
11717
11718 if (TREE_CODE (xt) != TREE_CODE (yt))
11719 return 0;
11720
11721 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11722 {
11723 xp = (const char *)TREE_OPTIMIZATION (xt);
11724 yp = (const char *)TREE_OPTIMIZATION (yt);
11725 len = sizeof (struct cl_optimization);
11726 }
11727
11728 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11729 {
11730 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11731 TREE_TARGET_OPTION (yt));
11732 }
11733
11734 else
11735 gcc_unreachable ();
11736
11737 return (memcmp (xp, yp, len) == 0);
11738 }
11739
11740 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11741
11742 tree
11743 build_optimization_node (struct gcc_options *opts)
11744 {
11745 tree t;
11746
11747 /* Use the cache of optimization nodes. */
11748
11749 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11750 opts);
11751
11752 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11753 t = *slot;
11754 if (!t)
11755 {
11756 /* Insert this one into the hash table. */
11757 t = cl_optimization_node;
11758 *slot = t;
11759
11760 /* Make a new node for next time round. */
11761 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11762 }
11763
11764 return t;
11765 }
11766
11767 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11768
11769 tree
11770 build_target_option_node (struct gcc_options *opts)
11771 {
11772 tree t;
11773
11774 /* Use the cache of optimization nodes. */
11775
11776 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11777 opts);
11778
11779 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11780 t = *slot;
11781 if (!t)
11782 {
11783 /* Insert this one into the hash table. */
11784 t = cl_target_option_node;
11785 *slot = t;
11786
11787 /* Make a new node for next time round. */
11788 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11789 }
11790
11791 return t;
11792 }
11793
11794 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11795 so that they aren't saved during PCH writing. */
11796
11797 void
11798 prepare_target_option_nodes_for_pch (void)
11799 {
11800 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11801 for (; iter != cl_option_hash_table->end (); ++iter)
11802 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11803 TREE_TARGET_GLOBALS (*iter) = NULL;
11804 }
11805
11806 /* Determine the "ultimate origin" of a block. The block may be an inlined
11807 instance of an inlined instance of a block which is local to an inline
11808 function, so we have to trace all of the way back through the origin chain
11809 to find out what sort of node actually served as the original seed for the
11810 given block. */
11811
11812 tree
11813 block_ultimate_origin (const_tree block)
11814 {
11815 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11816
11817 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11818 we're trying to output the abstract instance of this function. */
11819 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11820 return NULL_TREE;
11821
11822 if (immediate_origin == NULL_TREE)
11823 return NULL_TREE;
11824 else
11825 {
11826 tree ret_val;
11827 tree lookahead = immediate_origin;
11828
11829 do
11830 {
11831 ret_val = lookahead;
11832 lookahead = (TREE_CODE (ret_val) == BLOCK
11833 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11834 }
11835 while (lookahead != NULL && lookahead != ret_val);
11836
11837 /* The block's abstract origin chain may not be the *ultimate* origin of
11838 the block. It could lead to a DECL that has an abstract origin set.
11839 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11840 will give us if it has one). Note that DECL's abstract origins are
11841 supposed to be the most distant ancestor (or so decl_ultimate_origin
11842 claims), so we don't need to loop following the DECL origins. */
11843 if (DECL_P (ret_val))
11844 return DECL_ORIGIN (ret_val);
11845
11846 return ret_val;
11847 }
11848 }
11849
11850 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11851 no instruction. */
11852
11853 bool
11854 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11855 {
11856 /* Use precision rather then machine mode when we can, which gives
11857 the correct answer even for submode (bit-field) types. */
11858 if ((INTEGRAL_TYPE_P (outer_type)
11859 || POINTER_TYPE_P (outer_type)
11860 || TREE_CODE (outer_type) == OFFSET_TYPE)
11861 && (INTEGRAL_TYPE_P (inner_type)
11862 || POINTER_TYPE_P (inner_type)
11863 || TREE_CODE (inner_type) == OFFSET_TYPE))
11864 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11865
11866 /* Otherwise fall back on comparing machine modes (e.g. for
11867 aggregate types, floats). */
11868 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11869 }
11870
11871 /* Return true iff conversion in EXP generates no instruction. Mark
11872 it inline so that we fully inline into the stripping functions even
11873 though we have two uses of this function. */
11874
11875 static inline bool
11876 tree_nop_conversion (const_tree exp)
11877 {
11878 tree outer_type, inner_type;
11879
11880 if (!CONVERT_EXPR_P (exp)
11881 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11882 return false;
11883 if (TREE_OPERAND (exp, 0) == error_mark_node)
11884 return false;
11885
11886 outer_type = TREE_TYPE (exp);
11887 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11888
11889 if (!inner_type)
11890 return false;
11891
11892 return tree_nop_conversion_p (outer_type, inner_type);
11893 }
11894
11895 /* Return true iff conversion in EXP generates no instruction. Don't
11896 consider conversions changing the signedness. */
11897
11898 static bool
11899 tree_sign_nop_conversion (const_tree exp)
11900 {
11901 tree outer_type, inner_type;
11902
11903 if (!tree_nop_conversion (exp))
11904 return false;
11905
11906 outer_type = TREE_TYPE (exp);
11907 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11908
11909 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11910 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11911 }
11912
11913 /* Strip conversions from EXP according to tree_nop_conversion and
11914 return the resulting expression. */
11915
11916 tree
11917 tree_strip_nop_conversions (tree exp)
11918 {
11919 while (tree_nop_conversion (exp))
11920 exp = TREE_OPERAND (exp, 0);
11921 return exp;
11922 }
11923
11924 /* Strip conversions from EXP according to tree_sign_nop_conversion
11925 and return the resulting expression. */
11926
11927 tree
11928 tree_strip_sign_nop_conversions (tree exp)
11929 {
11930 while (tree_sign_nop_conversion (exp))
11931 exp = TREE_OPERAND (exp, 0);
11932 return exp;
11933 }
11934
11935 /* Avoid any floating point extensions from EXP. */
11936 tree
11937 strip_float_extensions (tree exp)
11938 {
11939 tree sub, expt, subt;
11940
11941 /* For floating point constant look up the narrowest type that can hold
11942 it properly and handle it like (type)(narrowest_type)constant.
11943 This way we can optimize for instance a=a*2.0 where "a" is float
11944 but 2.0 is double constant. */
11945 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11946 {
11947 REAL_VALUE_TYPE orig;
11948 tree type = NULL;
11949
11950 orig = TREE_REAL_CST (exp);
11951 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11952 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11953 type = float_type_node;
11954 else if (TYPE_PRECISION (TREE_TYPE (exp))
11955 > TYPE_PRECISION (double_type_node)
11956 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11957 type = double_type_node;
11958 if (type)
11959 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11960 }
11961
11962 if (!CONVERT_EXPR_P (exp))
11963 return exp;
11964
11965 sub = TREE_OPERAND (exp, 0);
11966 subt = TREE_TYPE (sub);
11967 expt = TREE_TYPE (exp);
11968
11969 if (!FLOAT_TYPE_P (subt))
11970 return exp;
11971
11972 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11973 return exp;
11974
11975 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11976 return exp;
11977
11978 return strip_float_extensions (sub);
11979 }
11980
11981 /* Strip out all handled components that produce invariant
11982 offsets. */
11983
11984 const_tree
11985 strip_invariant_refs (const_tree op)
11986 {
11987 while (handled_component_p (op))
11988 {
11989 switch (TREE_CODE (op))
11990 {
11991 case ARRAY_REF:
11992 case ARRAY_RANGE_REF:
11993 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11994 || TREE_OPERAND (op, 2) != NULL_TREE
11995 || TREE_OPERAND (op, 3) != NULL_TREE)
11996 return NULL;
11997 break;
11998
11999 case COMPONENT_REF:
12000 if (TREE_OPERAND (op, 2) != NULL_TREE)
12001 return NULL;
12002 break;
12003
12004 default:;
12005 }
12006 op = TREE_OPERAND (op, 0);
12007 }
12008
12009 return op;
12010 }
12011
12012 static GTY(()) tree gcc_eh_personality_decl;
12013
12014 /* Return the GCC personality function decl. */
12015
12016 tree
12017 lhd_gcc_personality (void)
12018 {
12019 if (!gcc_eh_personality_decl)
12020 gcc_eh_personality_decl = build_personality_function ("gcc");
12021 return gcc_eh_personality_decl;
12022 }
12023
12024 /* TARGET is a call target of GIMPLE call statement
12025 (obtained by gimple_call_fn). Return true if it is
12026 OBJ_TYPE_REF representing an virtual call of C++ method.
12027 (As opposed to OBJ_TYPE_REF representing objc calls
12028 through a cast where middle-end devirtualization machinery
12029 can't apply.) */
12030
12031 bool
12032 virtual_method_call_p (const_tree target)
12033 {
12034 if (TREE_CODE (target) != OBJ_TYPE_REF)
12035 return false;
12036 tree t = TREE_TYPE (target);
12037 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12038 t = TREE_TYPE (t);
12039 if (TREE_CODE (t) == FUNCTION_TYPE)
12040 return false;
12041 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12042 /* If we do not have BINFO associated, it means that type was built
12043 without devirtualization enabled. Do not consider this a virtual
12044 call. */
12045 if (!TYPE_BINFO (obj_type_ref_class (target)))
12046 return false;
12047 return true;
12048 }
12049
12050 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12051
12052 tree
12053 obj_type_ref_class (const_tree ref)
12054 {
12055 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12056 ref = TREE_TYPE (ref);
12057 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12058 ref = TREE_TYPE (ref);
12059 /* We look for type THIS points to. ObjC also builds
12060 OBJ_TYPE_REF with non-method calls, Their first parameter
12061 ID however also corresponds to class type. */
12062 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12063 || TREE_CODE (ref) == FUNCTION_TYPE);
12064 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12065 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12066 return TREE_TYPE (ref);
12067 }
12068
12069 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12070
12071 static tree
12072 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12073 {
12074 unsigned int i;
12075 tree base_binfo, b;
12076
12077 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12078 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12079 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12080 return base_binfo;
12081 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12082 return b;
12083 return NULL;
12084 }
12085
12086 /* Try to find a base info of BINFO that would have its field decl at offset
12087 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12088 found, return, otherwise return NULL_TREE. */
12089
12090 tree
12091 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12092 {
12093 tree type = BINFO_TYPE (binfo);
12094
12095 while (true)
12096 {
12097 HOST_WIDE_INT pos, size;
12098 tree fld;
12099 int i;
12100
12101 if (types_same_for_odr (type, expected_type))
12102 return binfo;
12103 if (offset < 0)
12104 return NULL_TREE;
12105
12106 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12107 {
12108 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12109 continue;
12110
12111 pos = int_bit_position (fld);
12112 size = tree_to_uhwi (DECL_SIZE (fld));
12113 if (pos <= offset && (pos + size) > offset)
12114 break;
12115 }
12116 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12117 return NULL_TREE;
12118
12119 /* Offset 0 indicates the primary base, whose vtable contents are
12120 represented in the binfo for the derived class. */
12121 else if (offset != 0)
12122 {
12123 tree found_binfo = NULL, base_binfo;
12124 /* Offsets in BINFO are in bytes relative to the whole structure
12125 while POS is in bits relative to the containing field. */
12126 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12127 / BITS_PER_UNIT);
12128
12129 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12130 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12131 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12132 {
12133 found_binfo = base_binfo;
12134 break;
12135 }
12136 if (found_binfo)
12137 binfo = found_binfo;
12138 else
12139 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12140 binfo_offset);
12141 }
12142
12143 type = TREE_TYPE (fld);
12144 offset -= pos;
12145 }
12146 }
12147
12148 /* Returns true if X is a typedef decl. */
12149
12150 bool
12151 is_typedef_decl (const_tree x)
12152 {
12153 return (x && TREE_CODE (x) == TYPE_DECL
12154 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12155 }
12156
12157 /* Returns true iff TYPE is a type variant created for a typedef. */
12158
12159 bool
12160 typedef_variant_p (const_tree type)
12161 {
12162 return is_typedef_decl (TYPE_NAME (type));
12163 }
12164
12165 /* Warn about a use of an identifier which was marked deprecated. */
12166 void
12167 warn_deprecated_use (tree node, tree attr)
12168 {
12169 const char *msg;
12170
12171 if (node == 0 || !warn_deprecated_decl)
12172 return;
12173
12174 if (!attr)
12175 {
12176 if (DECL_P (node))
12177 attr = DECL_ATTRIBUTES (node);
12178 else if (TYPE_P (node))
12179 {
12180 tree decl = TYPE_STUB_DECL (node);
12181 if (decl)
12182 attr = lookup_attribute ("deprecated",
12183 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12184 }
12185 }
12186
12187 if (attr)
12188 attr = lookup_attribute ("deprecated", attr);
12189
12190 if (attr)
12191 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12192 else
12193 msg = NULL;
12194
12195 bool w;
12196 if (DECL_P (node))
12197 {
12198 if (msg)
12199 w = warning (OPT_Wdeprecated_declarations,
12200 "%qD is deprecated: %s", node, msg);
12201 else
12202 w = warning (OPT_Wdeprecated_declarations,
12203 "%qD is deprecated", node);
12204 if (w)
12205 inform (DECL_SOURCE_LOCATION (node), "declared here");
12206 }
12207 else if (TYPE_P (node))
12208 {
12209 tree what = NULL_TREE;
12210 tree decl = TYPE_STUB_DECL (node);
12211
12212 if (TYPE_NAME (node))
12213 {
12214 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12215 what = TYPE_NAME (node);
12216 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12217 && DECL_NAME (TYPE_NAME (node)))
12218 what = DECL_NAME (TYPE_NAME (node));
12219 }
12220
12221 if (decl)
12222 {
12223 if (what)
12224 {
12225 if (msg)
12226 w = warning (OPT_Wdeprecated_declarations,
12227 "%qE is deprecated: %s", what, msg);
12228 else
12229 w = warning (OPT_Wdeprecated_declarations,
12230 "%qE is deprecated", what);
12231 }
12232 else
12233 {
12234 if (msg)
12235 w = warning (OPT_Wdeprecated_declarations,
12236 "type is deprecated: %s", msg);
12237 else
12238 w = warning (OPT_Wdeprecated_declarations,
12239 "type is deprecated");
12240 }
12241 if (w)
12242 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12243 }
12244 else
12245 {
12246 if (what)
12247 {
12248 if (msg)
12249 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12250 what, msg);
12251 else
12252 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12253 }
12254 else
12255 {
12256 if (msg)
12257 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12258 msg);
12259 else
12260 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12261 }
12262 }
12263 }
12264 }
12265
12266 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12267 somewhere in it. */
12268
12269 bool
12270 contains_bitfld_component_ref_p (const_tree ref)
12271 {
12272 while (handled_component_p (ref))
12273 {
12274 if (TREE_CODE (ref) == COMPONENT_REF
12275 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12276 return true;
12277 ref = TREE_OPERAND (ref, 0);
12278 }
12279
12280 return false;
12281 }
12282
12283 /* Try to determine whether a TRY_CATCH expression can fall through.
12284 This is a subroutine of block_may_fallthru. */
12285
12286 static bool
12287 try_catch_may_fallthru (const_tree stmt)
12288 {
12289 tree_stmt_iterator i;
12290
12291 /* If the TRY block can fall through, the whole TRY_CATCH can
12292 fall through. */
12293 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12294 return true;
12295
12296 i = tsi_start (TREE_OPERAND (stmt, 1));
12297 switch (TREE_CODE (tsi_stmt (i)))
12298 {
12299 case CATCH_EXPR:
12300 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12301 catch expression and a body. The whole TRY_CATCH may fall
12302 through iff any of the catch bodies falls through. */
12303 for (; !tsi_end_p (i); tsi_next (&i))
12304 {
12305 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12306 return true;
12307 }
12308 return false;
12309
12310 case EH_FILTER_EXPR:
12311 /* The exception filter expression only matters if there is an
12312 exception. If the exception does not match EH_FILTER_TYPES,
12313 we will execute EH_FILTER_FAILURE, and we will fall through
12314 if that falls through. If the exception does match
12315 EH_FILTER_TYPES, the stack unwinder will continue up the
12316 stack, so we will not fall through. We don't know whether we
12317 will throw an exception which matches EH_FILTER_TYPES or not,
12318 so we just ignore EH_FILTER_TYPES and assume that we might
12319 throw an exception which doesn't match. */
12320 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12321
12322 default:
12323 /* This case represents statements to be executed when an
12324 exception occurs. Those statements are implicitly followed
12325 by a RESX statement to resume execution after the exception.
12326 So in this case the TRY_CATCH never falls through. */
12327 return false;
12328 }
12329 }
12330
12331 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12332 need not be 100% accurate; simply be conservative and return true if we
12333 don't know. This is used only to avoid stupidly generating extra code.
12334 If we're wrong, we'll just delete the extra code later. */
12335
12336 bool
12337 block_may_fallthru (const_tree block)
12338 {
12339 /* This CONST_CAST is okay because expr_last returns its argument
12340 unmodified and we assign it to a const_tree. */
12341 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12342
12343 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12344 {
12345 case GOTO_EXPR:
12346 case RETURN_EXPR:
12347 /* Easy cases. If the last statement of the block implies
12348 control transfer, then we can't fall through. */
12349 return false;
12350
12351 case SWITCH_EXPR:
12352 /* If SWITCH_LABELS is set, this is lowered, and represents a
12353 branch to a selected label and hence can not fall through.
12354 Otherwise SWITCH_BODY is set, and the switch can fall
12355 through. */
12356 return SWITCH_LABELS (stmt) == NULL_TREE;
12357
12358 case COND_EXPR:
12359 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12360 return true;
12361 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12362
12363 case BIND_EXPR:
12364 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12365
12366 case TRY_CATCH_EXPR:
12367 return try_catch_may_fallthru (stmt);
12368
12369 case TRY_FINALLY_EXPR:
12370 /* The finally clause is always executed after the try clause,
12371 so if it does not fall through, then the try-finally will not
12372 fall through. Otherwise, if the try clause does not fall
12373 through, then when the finally clause falls through it will
12374 resume execution wherever the try clause was going. So the
12375 whole try-finally will only fall through if both the try
12376 clause and the finally clause fall through. */
12377 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12378 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12379
12380 case MODIFY_EXPR:
12381 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12382 stmt = TREE_OPERAND (stmt, 1);
12383 else
12384 return true;
12385 /* FALLTHRU */
12386
12387 case CALL_EXPR:
12388 /* Functions that do not return do not fall through. */
12389 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12390
12391 case CLEANUP_POINT_EXPR:
12392 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12393
12394 case TARGET_EXPR:
12395 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12396
12397 case ERROR_MARK:
12398 return true;
12399
12400 default:
12401 return lang_hooks.block_may_fallthru (stmt);
12402 }
12403 }
12404
12405 /* True if we are using EH to handle cleanups. */
12406 static bool using_eh_for_cleanups_flag = false;
12407
12408 /* This routine is called from front ends to indicate eh should be used for
12409 cleanups. */
12410 void
12411 using_eh_for_cleanups (void)
12412 {
12413 using_eh_for_cleanups_flag = true;
12414 }
12415
12416 /* Query whether EH is used for cleanups. */
12417 bool
12418 using_eh_for_cleanups_p (void)
12419 {
12420 return using_eh_for_cleanups_flag;
12421 }
12422
12423 /* Wrapper for tree_code_name to ensure that tree code is valid */
12424 const char *
12425 get_tree_code_name (enum tree_code code)
12426 {
12427 const char *invalid = "<invalid tree code>";
12428
12429 if (code >= MAX_TREE_CODES)
12430 return invalid;
12431
12432 return tree_code_name[code];
12433 }
12434
12435 /* Drops the TREE_OVERFLOW flag from T. */
12436
12437 tree
12438 drop_tree_overflow (tree t)
12439 {
12440 gcc_checking_assert (TREE_OVERFLOW (t));
12441
12442 /* For tree codes with a sharing machinery re-build the result. */
12443 if (TREE_CODE (t) == INTEGER_CST)
12444 return wide_int_to_tree (TREE_TYPE (t), t);
12445
12446 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12447 and drop the flag. */
12448 t = copy_node (t);
12449 TREE_OVERFLOW (t) = 0;
12450 return t;
12451 }
12452
12453 /* Given a memory reference expression T, return its base address.
12454 The base address of a memory reference expression is the main
12455 object being referenced. For instance, the base address for
12456 'array[i].fld[j]' is 'array'. You can think of this as stripping
12457 away the offset part from a memory address.
12458
12459 This function calls handled_component_p to strip away all the inner
12460 parts of the memory reference until it reaches the base object. */
12461
12462 tree
12463 get_base_address (tree t)
12464 {
12465 while (handled_component_p (t))
12466 t = TREE_OPERAND (t, 0);
12467
12468 if ((TREE_CODE (t) == MEM_REF
12469 || TREE_CODE (t) == TARGET_MEM_REF)
12470 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12471 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12472
12473 /* ??? Either the alias oracle or all callers need to properly deal
12474 with WITH_SIZE_EXPRs before we can look through those. */
12475 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12476 return NULL_TREE;
12477
12478 return t;
12479 }
12480
12481 /* Return a tree of sizetype representing the size, in bytes, of the element
12482 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12483
12484 tree
12485 array_ref_element_size (tree exp)
12486 {
12487 tree aligned_size = TREE_OPERAND (exp, 3);
12488 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12489 location_t loc = EXPR_LOCATION (exp);
12490
12491 /* If a size was specified in the ARRAY_REF, it's the size measured
12492 in alignment units of the element type. So multiply by that value. */
12493 if (aligned_size)
12494 {
12495 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12496 sizetype from another type of the same width and signedness. */
12497 if (TREE_TYPE (aligned_size) != sizetype)
12498 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12499 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12500 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12501 }
12502
12503 /* Otherwise, take the size from that of the element type. Substitute
12504 any PLACEHOLDER_EXPR that we have. */
12505 else
12506 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12507 }
12508
12509 /* Return a tree representing the lower bound of the array mentioned in
12510 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12511
12512 tree
12513 array_ref_low_bound (tree exp)
12514 {
12515 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12516
12517 /* If a lower bound is specified in EXP, use it. */
12518 if (TREE_OPERAND (exp, 2))
12519 return TREE_OPERAND (exp, 2);
12520
12521 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12522 substituting for a PLACEHOLDER_EXPR as needed. */
12523 if (domain_type && TYPE_MIN_VALUE (domain_type))
12524 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12525
12526 /* Otherwise, return a zero of the appropriate type. */
12527 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12528 }
12529
12530 /* Return a tree representing the upper bound of the array mentioned in
12531 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12532
12533 tree
12534 array_ref_up_bound (tree exp)
12535 {
12536 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12537
12538 /* If there is a domain type and it has an upper bound, use it, substituting
12539 for a PLACEHOLDER_EXPR as needed. */
12540 if (domain_type && TYPE_MAX_VALUE (domain_type))
12541 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12542
12543 /* Otherwise fail. */
12544 return NULL_TREE;
12545 }
12546
12547 /* Returns true if REF is an array reference to an array at the end of
12548 a structure. If this is the case, the array may be allocated larger
12549 than its upper bound implies. */
12550
12551 bool
12552 array_at_struct_end_p (tree ref)
12553 {
12554 if (TREE_CODE (ref) != ARRAY_REF
12555 && TREE_CODE (ref) != ARRAY_RANGE_REF)
12556 return false;
12557
12558 while (handled_component_p (ref))
12559 {
12560 /* If the reference chain contains a component reference to a
12561 non-union type and there follows another field the reference
12562 is not at the end of a structure. */
12563 if (TREE_CODE (ref) == COMPONENT_REF
12564 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12565 {
12566 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12567 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12568 nextf = DECL_CHAIN (nextf);
12569 if (nextf)
12570 return false;
12571 }
12572
12573 ref = TREE_OPERAND (ref, 0);
12574 }
12575
12576 /* If the reference is based on a declared entity, the size of the array
12577 is constrained by its given domain. */
12578 if (DECL_P (ref))
12579 return false;
12580
12581 return true;
12582 }
12583
12584 /* Return a tree representing the offset, in bytes, of the field referenced
12585 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12586
12587 tree
12588 component_ref_field_offset (tree exp)
12589 {
12590 tree aligned_offset = TREE_OPERAND (exp, 2);
12591 tree field = TREE_OPERAND (exp, 1);
12592 location_t loc = EXPR_LOCATION (exp);
12593
12594 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12595 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12596 value. */
12597 if (aligned_offset)
12598 {
12599 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12600 sizetype from another type of the same width and signedness. */
12601 if (TREE_TYPE (aligned_offset) != sizetype)
12602 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12603 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12604 size_int (DECL_OFFSET_ALIGN (field)
12605 / BITS_PER_UNIT));
12606 }
12607
12608 /* Otherwise, take the offset from that of the field. Substitute
12609 any PLACEHOLDER_EXPR that we have. */
12610 else
12611 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12612 }
12613
12614 /* Return the machine mode of T. For vectors, returns the mode of the
12615 inner type. The main use case is to feed the result to HONOR_NANS,
12616 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12617
12618 machine_mode
12619 element_mode (const_tree t)
12620 {
12621 if (!TYPE_P (t))
12622 t = TREE_TYPE (t);
12623 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12624 t = TREE_TYPE (t);
12625 return TYPE_MODE (t);
12626 }
12627
12628
12629 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12630 TV. TV should be the more specified variant (i.e. the main variant). */
12631
12632 static bool
12633 verify_type_variant (const_tree t, tree tv)
12634 {
12635 /* Type variant can differ by:
12636
12637 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12638 ENCODE_QUAL_ADDR_SPACE.
12639 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12640 in this case some values may not be set in the variant types
12641 (see TYPE_COMPLETE_P checks).
12642 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12643 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12644 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12645 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12646 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12647 this is necessary to make it possible to merge types form different TUs
12648 - arrays, pointers and references may have TREE_TYPE that is a variant
12649 of TREE_TYPE of their main variants.
12650 - aggregates may have new TYPE_FIELDS list that list variants of
12651 the main variant TYPE_FIELDS.
12652 - vector types may differ by TYPE_VECTOR_OPAQUE
12653 - TYPE_METHODS is always NULL for vairant types and maintained for
12654 main variant only.
12655 */
12656
12657 /* Convenience macro for matching individual fields. */
12658 #define verify_variant_match(flag) \
12659 do { \
12660 if (flag (tv) != flag (t)) \
12661 { \
12662 error ("type variant differs by " #flag "."); \
12663 debug_tree (tv); \
12664 return false; \
12665 } \
12666 } while (false)
12667
12668 /* tree_base checks. */
12669
12670 verify_variant_match (TREE_CODE);
12671 /* FIXME: Ada builds non-artificial variants of artificial types. */
12672 if (TYPE_ARTIFICIAL (tv) && 0)
12673 verify_variant_match (TYPE_ARTIFICIAL);
12674 if (POINTER_TYPE_P (tv))
12675 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12676 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12677 verify_variant_match (TYPE_UNSIGNED);
12678 verify_variant_match (TYPE_ALIGN_OK);
12679 verify_variant_match (TYPE_PACKED);
12680 if (TREE_CODE (t) == REFERENCE_TYPE)
12681 verify_variant_match (TYPE_REF_IS_RVALUE);
12682 verify_variant_match (TYPE_SATURATING);
12683 /* FIXME: This check trigger during libstdc++ build. */
12684 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12685 verify_variant_match (TYPE_FINAL_P);
12686
12687 /* tree_type_common checks. */
12688
12689 if (COMPLETE_TYPE_P (t))
12690 {
12691 verify_variant_match (TYPE_SIZE);
12692 verify_variant_match (TYPE_MODE);
12693 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
12694 /* FIXME: ideally we should compare pointer equality, but java FE
12695 produce variants where size is INTEGER_CST of different type (int
12696 wrt size_type) during libjava biuld. */
12697 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
12698 {
12699 error ("type variant has different TYPE_SIZE_UNIT");
12700 debug_tree (tv);
12701 error ("type variant's TYPE_SIZE_UNIT");
12702 debug_tree (TYPE_SIZE_UNIT (tv));
12703 error ("type's TYPE_SIZE_UNIT");
12704 debug_tree (TYPE_SIZE_UNIT (t));
12705 return false;
12706 }
12707 }
12708 verify_variant_match (TYPE_PRECISION);
12709 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12710 if (RECORD_OR_UNION_TYPE_P (t))
12711 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12712 else if (TREE_CODE (t) == ARRAY_TYPE)
12713 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12714 /* During LTO we merge variant lists from diferent translation units
12715 that may differ BY TYPE_CONTEXT that in turn may point
12716 to TRANSLATION_UNIT_DECL.
12717 Ada also builds variants of types with different TYPE_CONTEXT. */
12718 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12719 verify_variant_match (TYPE_CONTEXT);
12720 verify_variant_match (TYPE_STRING_FLAG);
12721 if (TYPE_ALIAS_SET_KNOWN_P (t) && TYPE_ALIAS_SET_KNOWN_P (tv))
12722 verify_variant_match (TYPE_ALIAS_SET);
12723
12724 /* tree_type_non_common checks. */
12725
12726 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12727 and dangle the pointer from time to time. */
12728 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12729 && (in_lto_p || !TYPE_VFIELD (tv)
12730 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12731 {
12732 error ("type variant has different TYPE_VFIELD");
12733 debug_tree (tv);
12734 return false;
12735 }
12736 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
12737 || TREE_CODE (t) == INTEGER_TYPE
12738 || TREE_CODE (t) == BOOLEAN_TYPE
12739 || TREE_CODE (t) == REAL_TYPE
12740 || TREE_CODE (t) == FIXED_POINT_TYPE)
12741 {
12742 verify_variant_match (TYPE_MAX_VALUE);
12743 verify_variant_match (TYPE_MIN_VALUE);
12744 }
12745 if (TREE_CODE (t) == METHOD_TYPE)
12746 verify_variant_match (TYPE_METHOD_BASETYPE);
12747 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
12748 {
12749 error ("type variant has TYPE_METHODS");
12750 debug_tree (tv);
12751 return false;
12752 }
12753 if (TREE_CODE (t) == OFFSET_TYPE)
12754 verify_variant_match (TYPE_OFFSET_BASETYPE);
12755 if (TREE_CODE (t) == ARRAY_TYPE)
12756 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
12757 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
12758 or even type's main variant. This is needed to make bootstrap pass
12759 and the bug seems new in GCC 5.
12760 C++ FE should be updated to make this consistent and we should check
12761 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
12762 is a match with main variant.
12763
12764 Also disable the check for Java for now because of parser hack that builds
12765 first an dummy BINFO and then sometimes replace it by real BINFO in some
12766 of the copies. */
12767 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
12768 && TYPE_BINFO (t) != TYPE_BINFO (tv)
12769 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
12770 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
12771 at LTO time only. */
12772 && (in_lto_p && odr_type_p (t)))
12773 {
12774 error ("type variant has different TYPE_BINFO");
12775 debug_tree (tv);
12776 error ("type variant's TYPE_BINFO");
12777 debug_tree (TYPE_BINFO (tv));
12778 error ("type's TYPE_BINFO");
12779 debug_tree (TYPE_BINFO (t));
12780 return false;
12781 }
12782
12783 /* Check various uses of TYPE_VALUES_RAW. */
12784 if (TREE_CODE (t) == ENUMERAL_TYPE)
12785 verify_variant_match (TYPE_VALUES);
12786 else if (TREE_CODE (t) == ARRAY_TYPE)
12787 verify_variant_match (TYPE_DOMAIN);
12788 /* Permit incomplete variants of complete type. While FEs may complete
12789 all variants, this does not happen for C++ templates in all cases. */
12790 else if (RECORD_OR_UNION_TYPE_P (t)
12791 && COMPLETE_TYPE_P (t)
12792 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
12793 {
12794 tree f1, f2;
12795
12796 /* Fortran builds qualified variants as new records with items of
12797 qualified type. Verify that they looks same. */
12798 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
12799 f1 && f2;
12800 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
12801 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
12802 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
12803 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
12804 /* FIXME: gfc_nonrestricted_type builds all types as variants
12805 with exception of pointer types. It deeply copies the type
12806 which means that we may end up with a variant type
12807 referring non-variant pointer. We may change it to
12808 produce types as variants, too, like
12809 objc_get_protocol_qualified_type does. */
12810 && !POINTER_TYPE_P (TREE_TYPE (f1)))
12811 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
12812 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
12813 break;
12814 if (f1 || f2)
12815 {
12816 error ("type variant has different TYPE_FIELDS");
12817 debug_tree (tv);
12818 error ("first mismatch is field");
12819 debug_tree (f1);
12820 error ("and field");
12821 debug_tree (f2);
12822 return false;
12823 }
12824 }
12825 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
12826 verify_variant_match (TYPE_ARG_TYPES);
12827 /* For C++ the qualified variant of array type is really an array type
12828 of qualified TREE_TYPE.
12829 objc builds variants of pointer where pointer to type is a variant, too
12830 in objc_get_protocol_qualified_type. */
12831 if (TREE_TYPE (t) != TREE_TYPE (tv)
12832 && ((TREE_CODE (t) != ARRAY_TYPE
12833 && !POINTER_TYPE_P (t))
12834 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
12835 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
12836 {
12837 error ("type variant has different TREE_TYPE");
12838 debug_tree (tv);
12839 error ("type variant's TREE_TYPE");
12840 debug_tree (TREE_TYPE (tv));
12841 error ("type's TREE_TYPE");
12842 debug_tree (TREE_TYPE (t));
12843 return false;
12844 }
12845 if (type_with_alias_set_p (t)
12846 && !gimple_canonical_types_compatible_p (t, tv, false))
12847 {
12848 error ("type is not compatible with its vairant");
12849 debug_tree (tv);
12850 error ("type variant's TREE_TYPE");
12851 debug_tree (TREE_TYPE (tv));
12852 error ("type's TREE_TYPE");
12853 debug_tree (TREE_TYPE (t));
12854 return false;
12855 }
12856 return true;
12857 #undef verify_variant_match
12858 }
12859
12860
12861 /* The TYPE_CANONICAL merging machinery. It should closely resemble
12862 the middle-end types_compatible_p function. It needs to avoid
12863 claiming types are different for types that should be treated
12864 the same with respect to TBAA. Canonical types are also used
12865 for IL consistency checks via the useless_type_conversion_p
12866 predicate which does not handle all type kinds itself but falls
12867 back to pointer-comparison of TYPE_CANONICAL for aggregates
12868 for example. */
12869
12870 /* Return true iff T1 and T2 are structurally identical for what
12871 TBAA is concerned.
12872 This function is used both by lto.c canonical type merging and by the
12873 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
12874 that have TYPE_CANONICAL defined and assume them equivalent. */
12875
12876 bool
12877 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
12878 bool trust_type_canonical)
12879 {
12880 /* Type variants should be same as the main variant. When not doing sanity
12881 checking to verify this fact, go to main variants and save some work. */
12882 if (trust_type_canonical)
12883 {
12884 t1 = TYPE_MAIN_VARIANT (t1);
12885 t2 = TYPE_MAIN_VARIANT (t2);
12886 }
12887
12888 /* Check first for the obvious case of pointer identity. */
12889 if (t1 == t2)
12890 return true;
12891
12892 /* Check that we have two types to compare. */
12893 if (t1 == NULL_TREE || t2 == NULL_TREE)
12894 return false;
12895
12896 /* We consider complete types always compatible with incomplete type.
12897 This does not make sense for canonical type calculation and thus we
12898 need to ensure that we are never called on it.
12899
12900 FIXME: For more correctness the function probably should have three modes
12901 1) mode assuming that types are complete mathcing their structure
12902 2) mode allowing incomplete types but producing equivalence classes
12903 and thus ignoring all info from complete types
12904 3) mode allowing incomplete types to match complete but checking
12905 compatibility between complete types.
12906
12907 1 and 2 can be used for canonical type calculation. 3 is the real
12908 definition of type compatibility that can be used i.e. for warnings during
12909 declaration merging. */
12910
12911 gcc_assert (!trust_type_canonical
12912 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
12913 /* If the types have been previously registered and found equal
12914 they still are. */
12915 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
12916 && trust_type_canonical)
12917 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
12918
12919 /* Can't be the same type if the types don't have the same code. */
12920 if (tree_code_for_canonical_type_merging (TREE_CODE (t1))
12921 != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
12922 return false;
12923
12924 /* Qualifiers do not matter for canonical type comparison purposes. */
12925
12926 /* Void types and nullptr types are always the same. */
12927 if (TREE_CODE (t1) == VOID_TYPE
12928 || TREE_CODE (t1) == NULLPTR_TYPE)
12929 return true;
12930
12931 /* Can't be the same type if they have different mode. */
12932 if (TYPE_MODE (t1) != TYPE_MODE (t2))
12933 return false;
12934
12935 /* Non-aggregate types can be handled cheaply. */
12936 if (INTEGRAL_TYPE_P (t1)
12937 || SCALAR_FLOAT_TYPE_P (t1)
12938 || FIXED_POINT_TYPE_P (t1)
12939 || TREE_CODE (t1) == VECTOR_TYPE
12940 || TREE_CODE (t1) == COMPLEX_TYPE
12941 || TREE_CODE (t1) == OFFSET_TYPE
12942 || POINTER_TYPE_P (t1))
12943 {
12944 /* Can't be the same type if they have different sign or precision. */
12945 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
12946 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
12947 return false;
12948
12949 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
12950 interoperable with "signed char". Unless all frontends are revisited
12951 to agree on these types, we must ignore the flag completely. */
12952
12953 /* Fortran standard define C_PTR type that is compatible with every
12954 C pointer. For this reason we need to glob all pointers into one.
12955 Still pointers in different address spaces are not compatible. */
12956 if (POINTER_TYPE_P (t1))
12957 {
12958 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
12959 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
12960 return false;
12961 }
12962
12963 /* Tail-recurse to components. */
12964 if (TREE_CODE (t1) == VECTOR_TYPE
12965 || TREE_CODE (t1) == COMPLEX_TYPE)
12966 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
12967 TREE_TYPE (t2),
12968 trust_type_canonical);
12969
12970 return true;
12971 }
12972
12973 /* Do type-specific comparisons. */
12974 switch (TREE_CODE (t1))
12975 {
12976 case ARRAY_TYPE:
12977 /* Array types are the same if the element types are the same and
12978 the number of elements are the same. */
12979 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
12980 trust_type_canonical)
12981 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
12982 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
12983 return false;
12984 else
12985 {
12986 tree i1 = TYPE_DOMAIN (t1);
12987 tree i2 = TYPE_DOMAIN (t2);
12988
12989 /* For an incomplete external array, the type domain can be
12990 NULL_TREE. Check this condition also. */
12991 if (i1 == NULL_TREE && i2 == NULL_TREE)
12992 return true;
12993 else if (i1 == NULL_TREE || i2 == NULL_TREE)
12994 return false;
12995 else
12996 {
12997 tree min1 = TYPE_MIN_VALUE (i1);
12998 tree min2 = TYPE_MIN_VALUE (i2);
12999 tree max1 = TYPE_MAX_VALUE (i1);
13000 tree max2 = TYPE_MAX_VALUE (i2);
13001
13002 /* The minimum/maximum values have to be the same. */
13003 if ((min1 == min2
13004 || (min1 && min2
13005 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13006 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13007 || operand_equal_p (min1, min2, 0))))
13008 && (max1 == max2
13009 || (max1 && max2
13010 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13011 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13012 || operand_equal_p (max1, max2, 0)))))
13013 return true;
13014 else
13015 return false;
13016 }
13017 }
13018
13019 case METHOD_TYPE:
13020 case FUNCTION_TYPE:
13021 /* Function types are the same if the return type and arguments types
13022 are the same. */
13023 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13024 trust_type_canonical))
13025 return false;
13026
13027 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13028 return true;
13029 else
13030 {
13031 tree parms1, parms2;
13032
13033 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13034 parms1 && parms2;
13035 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13036 {
13037 if (!gimple_canonical_types_compatible_p
13038 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13039 trust_type_canonical))
13040 return false;
13041 }
13042
13043 if (parms1 || parms2)
13044 return false;
13045
13046 return true;
13047 }
13048
13049 case RECORD_TYPE:
13050 case UNION_TYPE:
13051 case QUAL_UNION_TYPE:
13052 {
13053 tree f1, f2;
13054
13055 /* For aggregate types, all the fields must be the same. */
13056 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13057 f1 || f2;
13058 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13059 {
13060 /* Skip non-fields. */
13061 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13062 f1 = TREE_CHAIN (f1);
13063 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13064 f2 = TREE_CHAIN (f2);
13065 if (!f1 || !f2)
13066 break;
13067 /* The fields must have the same name, offset and type. */
13068 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13069 || !gimple_compare_field_offset (f1, f2)
13070 || !gimple_canonical_types_compatible_p
13071 (TREE_TYPE (f1), TREE_TYPE (f2),
13072 trust_type_canonical))
13073 return false;
13074 }
13075
13076 /* If one aggregate has more fields than the other, they
13077 are not the same. */
13078 if (f1 || f2)
13079 return false;
13080
13081 return true;
13082 }
13083
13084 default:
13085 /* Consider all types with language specific trees in them mutually
13086 compatible. This is executed only from verify_type and false
13087 positives can be tolerated. */
13088 gcc_assert (!in_lto_p);
13089 return true;
13090 }
13091 }
13092
13093 /* Verify type T. */
13094
13095 void
13096 verify_type (const_tree t)
13097 {
13098 bool error_found = false;
13099 tree mv = TYPE_MAIN_VARIANT (t);
13100 if (!mv)
13101 {
13102 error ("Main variant is not defined");
13103 error_found = true;
13104 }
13105 else if (mv != TYPE_MAIN_VARIANT (mv))
13106 {
13107 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13108 debug_tree (mv);
13109 error_found = true;
13110 }
13111 else if (t != mv && !verify_type_variant (t, mv))
13112 error_found = true;
13113
13114 tree ct = TYPE_CANONICAL (t);
13115 if (!ct)
13116 ;
13117 else if (TYPE_CANONICAL (t) != ct)
13118 {
13119 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13120 debug_tree (ct);
13121 error_found = true;
13122 }
13123 /* Method and function types can not be used to address memory and thus
13124 TYPE_CANONICAL really matters only for determining useless conversions.
13125
13126 FIXME: C++ FE produce declarations of builtin functions that are not
13127 compatible with main variants. */
13128 else if (TREE_CODE (t) == FUNCTION_TYPE)
13129 ;
13130 else if (t != ct
13131 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13132 with variably sized arrays because their sizes possibly
13133 gimplified to different variables. */
13134 && !variably_modified_type_p (ct, NULL)
13135 && !gimple_canonical_types_compatible_p (t, ct, false))
13136 {
13137 error ("TYPE_CANONICAL is not compatible");
13138 debug_tree (ct);
13139 error_found = true;
13140 }
13141
13142
13143 /* Check various uses of TYPE_MINVAL. */
13144 if (RECORD_OR_UNION_TYPE_P (t))
13145 {
13146 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13147 and danagle the pointer from time to time. */
13148 if (TYPE_VFIELD (t)
13149 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13150 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13151 {
13152 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13153 debug_tree (TYPE_VFIELD (t));
13154 error_found = true;
13155 }
13156 }
13157 else if (TREE_CODE (t) == POINTER_TYPE)
13158 {
13159 if (TYPE_NEXT_PTR_TO (t)
13160 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13161 {
13162 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13163 debug_tree (TYPE_NEXT_PTR_TO (t));
13164 error_found = true;
13165 }
13166 }
13167 else if (TREE_CODE (t) == REFERENCE_TYPE)
13168 {
13169 if (TYPE_NEXT_REF_TO (t)
13170 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13171 {
13172 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13173 debug_tree (TYPE_NEXT_REF_TO (t));
13174 error_found = true;
13175 }
13176 }
13177 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13178 || TREE_CODE (t) == FIXED_POINT_TYPE)
13179 {
13180 /* FIXME: The following check should pass:
13181 useless_type_conversion_p (const_cast <tree> (t),
13182 TREE_TYPE (TYPE_MIN_VALUE (t))
13183 but does not for C sizetypes in LTO. */
13184 }
13185 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13186 else if (TYPE_MINVAL (t)
13187 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13188 || in_lto_p))
13189 {
13190 error ("TYPE_MINVAL non-NULL");
13191 debug_tree (TYPE_MINVAL (t));
13192 error_found = true;
13193 }
13194
13195 /* Check various uses of TYPE_MAXVAL. */
13196 if (RECORD_OR_UNION_TYPE_P (t))
13197 {
13198 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13199 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13200 && TYPE_METHODS (t) != error_mark_node)
13201 {
13202 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13203 debug_tree (TYPE_METHODS (t));
13204 error_found = true;
13205 }
13206 }
13207 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13208 {
13209 if (TYPE_METHOD_BASETYPE (t)
13210 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13211 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13212 {
13213 error ("TYPE_METHOD_BASETYPE is not record nor union");
13214 debug_tree (TYPE_METHOD_BASETYPE (t));
13215 error_found = true;
13216 }
13217 }
13218 else if (TREE_CODE (t) == OFFSET_TYPE)
13219 {
13220 if (TYPE_OFFSET_BASETYPE (t)
13221 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13222 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13223 {
13224 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13225 debug_tree (TYPE_OFFSET_BASETYPE (t));
13226 error_found = true;
13227 }
13228 }
13229 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13230 || TREE_CODE (t) == FIXED_POINT_TYPE)
13231 {
13232 /* FIXME: The following check should pass:
13233 useless_type_conversion_p (const_cast <tree> (t),
13234 TREE_TYPE (TYPE_MAX_VALUE (t))
13235 but does not for C sizetypes in LTO. */
13236 }
13237 else if (TREE_CODE (t) == ARRAY_TYPE)
13238 {
13239 if (TYPE_ARRAY_MAX_SIZE (t)
13240 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13241 {
13242 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13243 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13244 error_found = true;
13245 }
13246 }
13247 else if (TYPE_MAXVAL (t))
13248 {
13249 error ("TYPE_MAXVAL non-NULL");
13250 debug_tree (TYPE_MAXVAL (t));
13251 error_found = true;
13252 }
13253
13254 /* Check various uses of TYPE_BINFO. */
13255 if (RECORD_OR_UNION_TYPE_P (t))
13256 {
13257 if (!TYPE_BINFO (t))
13258 ;
13259 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13260 {
13261 error ("TYPE_BINFO is not TREE_BINFO");
13262 debug_tree (TYPE_BINFO (t));
13263 error_found = true;
13264 }
13265 /* FIXME: Java builds invalid empty binfos that do not have
13266 TREE_TYPE set. */
13267 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13268 {
13269 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13270 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13271 error_found = true;
13272 }
13273 }
13274 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13275 {
13276 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13277 debug_tree (TYPE_LANG_SLOT_1 (t));
13278 error_found = true;
13279 }
13280
13281 /* Check various uses of TYPE_VALUES_RAW. */
13282 if (TREE_CODE (t) == ENUMERAL_TYPE)
13283 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13284 {
13285 tree value = TREE_VALUE (l);
13286 tree name = TREE_PURPOSE (l);
13287
13288 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13289 CONST_DECL of ENUMERAL TYPE. */
13290 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13291 {
13292 error ("Enum value is not CONST_DECL or INTEGER_CST");
13293 debug_tree (value);
13294 debug_tree (name);
13295 error_found = true;
13296 }
13297 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13298 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13299 {
13300 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13301 debug_tree (value);
13302 debug_tree (name);
13303 error_found = true;
13304 }
13305 if (TREE_CODE (name) != IDENTIFIER_NODE)
13306 {
13307 error ("Enum value name is not IDENTIFIER_NODE");
13308 debug_tree (value);
13309 debug_tree (name);
13310 error_found = true;
13311 }
13312 }
13313 else if (TREE_CODE (t) == ARRAY_TYPE)
13314 {
13315 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13316 {
13317 error ("Array TYPE_DOMAIN is not integer type");
13318 debug_tree (TYPE_DOMAIN (t));
13319 error_found = true;
13320 }
13321 }
13322 else if (RECORD_OR_UNION_TYPE_P (t))
13323 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13324 {
13325 /* TODO: verify properties of decls. */
13326 if (TREE_CODE (fld) == FIELD_DECL)
13327 ;
13328 else if (TREE_CODE (fld) == TYPE_DECL)
13329 ;
13330 else if (TREE_CODE (fld) == CONST_DECL)
13331 ;
13332 else if (TREE_CODE (fld) == VAR_DECL)
13333 ;
13334 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13335 ;
13336 else if (TREE_CODE (fld) == USING_DECL)
13337 ;
13338 else
13339 {
13340 error ("Wrong tree in TYPE_FIELDS list");
13341 debug_tree (fld);
13342 error_found = true;
13343 }
13344 }
13345 else if (TREE_CODE (t) == INTEGER_TYPE
13346 || TREE_CODE (t) == BOOLEAN_TYPE
13347 || TREE_CODE (t) == OFFSET_TYPE
13348 || TREE_CODE (t) == REFERENCE_TYPE
13349 || TREE_CODE (t) == NULLPTR_TYPE
13350 || TREE_CODE (t) == POINTER_TYPE)
13351 {
13352 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13353 {
13354 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13355 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13356 error_found = true;
13357 }
13358 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13359 {
13360 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13361 debug_tree (TYPE_CACHED_VALUES (t));
13362 error_found = true;
13363 }
13364 /* Verify just enough of cache to ensure that no one copied it to new type.
13365 All copying should go by copy_node that should clear it. */
13366 else if (TYPE_CACHED_VALUES_P (t))
13367 {
13368 int i;
13369 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13370 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13371 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13372 {
13373 error ("wrong TYPE_CACHED_VALUES entry");
13374 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13375 error_found = true;
13376 break;
13377 }
13378 }
13379 }
13380 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13381 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13382 {
13383 /* C++ FE uses TREE_PURPOSE to store initial values. */
13384 if (TREE_PURPOSE (l) && in_lto_p)
13385 {
13386 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13387 debug_tree (l);
13388 error_found = true;
13389 }
13390 if (!TYPE_P (TREE_VALUE (l)))
13391 {
13392 error ("Wrong entry in TYPE_ARG_TYPES list");
13393 debug_tree (l);
13394 error_found = true;
13395 }
13396 }
13397 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13398 {
13399 error ("TYPE_VALUES_RAW field is non-NULL");
13400 debug_tree (TYPE_VALUES_RAW (t));
13401 error_found = true;
13402 }
13403 if (TREE_CODE (t) != INTEGER_TYPE
13404 && TREE_CODE (t) != BOOLEAN_TYPE
13405 && TREE_CODE (t) != OFFSET_TYPE
13406 && TREE_CODE (t) != REFERENCE_TYPE
13407 && TREE_CODE (t) != NULLPTR_TYPE
13408 && TREE_CODE (t) != POINTER_TYPE
13409 && TYPE_CACHED_VALUES_P (t))
13410 {
13411 error ("TYPE_CACHED_VALUES_P is set while it should not");
13412 error_found = true;
13413 }
13414 if (TYPE_STRING_FLAG (t)
13415 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13416 {
13417 error ("TYPE_STRING_FLAG is set on wrong type code");
13418 error_found = true;
13419 }
13420 else if (TYPE_STRING_FLAG (t))
13421 {
13422 const_tree b = t;
13423 if (TREE_CODE (b) == ARRAY_TYPE)
13424 b = TREE_TYPE (t);
13425 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13426 that is 32bits. */
13427 if (TREE_CODE (b) != INTEGER_TYPE)
13428 {
13429 error ("TYPE_STRING_FLAG is set on type that does not look like "
13430 "char nor array of chars");
13431 error_found = true;
13432 }
13433 }
13434
13435 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13436 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13437 of a type. */
13438 if (TREE_CODE (t) == METHOD_TYPE
13439 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13440 {
13441 error ("TYPE_METHOD_BASETYPE is not main variant");
13442 error_found = true;
13443 }
13444
13445 if (error_found)
13446 {
13447 debug_tree (const_cast <tree> (t));
13448 internal_error ("verify_type failed");
13449 }
13450 }
13451
13452 #include "gt-tree.h"