PR c/71171: Fix uninitialized source_range in c_parser_postfix_expression
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64
65 /* Tree code classes. */
66
67 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
68 #define END_OF_BASE_TREE_CODES tcc_exceptional,
69
70 const enum tree_code_class tree_code_type[] = {
71 #include "all-tree.def"
72 };
73
74 #undef DEFTREECODE
75 #undef END_OF_BASE_TREE_CODES
76
77 /* Table indexed by tree code giving number of expression
78 operands beyond the fixed part of the node structure.
79 Not used for types or decls. */
80
81 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
82 #define END_OF_BASE_TREE_CODES 0,
83
84 const unsigned char tree_code_length[] = {
85 #include "all-tree.def"
86 };
87
88 #undef DEFTREECODE
89 #undef END_OF_BASE_TREE_CODES
90
91 /* Names of tree components.
92 Used for printing out the tree and error messages. */
93 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
94 #define END_OF_BASE_TREE_CODES "@dummy",
95
96 static const char *const tree_code_name[] = {
97 #include "all-tree.def"
98 };
99
100 #undef DEFTREECODE
101 #undef END_OF_BASE_TREE_CODES
102
103 /* Each tree code class has an associated string representation.
104 These must correspond to the tree_code_class entries. */
105
106 const char *const tree_code_class_strings[] =
107 {
108 "exceptional",
109 "constant",
110 "type",
111 "declaration",
112 "reference",
113 "comparison",
114 "unary",
115 "binary",
116 "statement",
117 "vl_exp",
118 "expression"
119 };
120
121 /* obstack.[ch] explicitly declined to prototype this. */
122 extern int _obstack_allocated_p (struct obstack *h, void *obj);
123
124 /* Statistics-gathering stuff. */
125
126 static int tree_code_counts[MAX_TREE_CODES];
127 int tree_node_counts[(int) all_kinds];
128 int tree_node_sizes[(int) all_kinds];
129
130 /* Keep in sync with tree.h:enum tree_node_kind. */
131 static const char * const tree_node_kind_names[] = {
132 "decls",
133 "types",
134 "blocks",
135 "stmts",
136 "refs",
137 "exprs",
138 "constants",
139 "identifiers",
140 "vecs",
141 "binfos",
142 "ssa names",
143 "constructors",
144 "random kinds",
145 "lang_decl kinds",
146 "lang_type kinds",
147 "omp clauses",
148 };
149
150 /* Unique id for next decl created. */
151 static GTY(()) int next_decl_uid;
152 /* Unique id for next type created. */
153 static GTY(()) int next_type_uid = 1;
154 /* Unique id for next debug decl created. Use negative numbers,
155 to catch erroneous uses. */
156 static GTY(()) int next_debug_decl_uid;
157
158 /* Since we cannot rehash a type after it is in the table, we have to
159 keep the hash code. */
160
161 struct GTY((for_user)) type_hash {
162 unsigned long hash;
163 tree type;
164 };
165
166 /* Initial size of the hash table (rounded to next prime). */
167 #define TYPE_HASH_INITIAL_SIZE 1000
168
169 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
170 {
171 static hashval_t hash (type_hash *t) { return t->hash; }
172 static bool equal (type_hash *a, type_hash *b);
173
174 static int
175 keep_cache_entry (type_hash *&t)
176 {
177 return ggc_marked_p (t->type);
178 }
179 };
180
181 /* Now here is the hash table. When recording a type, it is added to
182 the slot whose index is the hash code. Note that the hash table is
183 used for several kinds of types (function types, array types and
184 array index range types, for now). While all these live in the
185 same table, they are completely independent, and the hash code is
186 computed differently for each of these. */
187
188 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
189
190 /* Hash table and temporary node for larger integer const values. */
191 static GTY (()) tree int_cst_node;
192
193 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
194 {
195 static hashval_t hash (tree t);
196 static bool equal (tree x, tree y);
197 };
198
199 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
200
201 /* Hash table for optimization flags and target option flags. Use the same
202 hash table for both sets of options. Nodes for building the current
203 optimization and target option nodes. The assumption is most of the time
204 the options created will already be in the hash table, so we avoid
205 allocating and freeing up a node repeatably. */
206 static GTY (()) tree cl_optimization_node;
207 static GTY (()) tree cl_target_option_node;
208
209 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
210 {
211 static hashval_t hash (tree t);
212 static bool equal (tree x, tree y);
213 };
214
215 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
216
217 /* General tree->tree mapping structure for use in hash tables. */
218
219
220 static GTY ((cache))
221 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
222
223 static GTY ((cache))
224 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
225
226 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
227 {
228 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
229
230 static bool
231 equal (tree_vec_map *a, tree_vec_map *b)
232 {
233 return a->base.from == b->base.from;
234 }
235
236 static int
237 keep_cache_entry (tree_vec_map *&m)
238 {
239 return ggc_marked_p (m->base.from);
240 }
241 };
242
243 static GTY ((cache))
244 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
245
246 static void set_type_quals (tree, int);
247 static void print_type_hash_statistics (void);
248 static void print_debug_expr_statistics (void);
249 static void print_value_expr_statistics (void);
250 static void type_hash_list (const_tree, inchash::hash &);
251 static void attribute_hash_list (const_tree, inchash::hash &);
252
253 tree global_trees[TI_MAX];
254 tree integer_types[itk_none];
255
256 bool int_n_enabled_p[NUM_INT_N_ENTS];
257 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
258
259 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
260
261 /* Number of operands for each OpenMP clause. */
262 unsigned const char omp_clause_num_ops[] =
263 {
264 0, /* OMP_CLAUSE_ERROR */
265 1, /* OMP_CLAUSE_PRIVATE */
266 1, /* OMP_CLAUSE_SHARED */
267 1, /* OMP_CLAUSE_FIRSTPRIVATE */
268 2, /* OMP_CLAUSE_LASTPRIVATE */
269 5, /* OMP_CLAUSE_REDUCTION */
270 1, /* OMP_CLAUSE_COPYIN */
271 1, /* OMP_CLAUSE_COPYPRIVATE */
272 3, /* OMP_CLAUSE_LINEAR */
273 2, /* OMP_CLAUSE_ALIGNED */
274 1, /* OMP_CLAUSE_DEPEND */
275 1, /* OMP_CLAUSE_UNIFORM */
276 1, /* OMP_CLAUSE_TO_DECLARE */
277 1, /* OMP_CLAUSE_LINK */
278 2, /* OMP_CLAUSE_FROM */
279 2, /* OMP_CLAUSE_TO */
280 2, /* OMP_CLAUSE_MAP */
281 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
282 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
283 2, /* OMP_CLAUSE__CACHE_ */
284 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
285 2, /* OMP_CLAUSE_GANG */
286 1, /* OMP_CLAUSE_ASYNC */
287 1, /* OMP_CLAUSE_WAIT */
288 0, /* OMP_CLAUSE_AUTO */
289 0, /* OMP_CLAUSE_SEQ */
290 1, /* OMP_CLAUSE__LOOPTEMP_ */
291 1, /* OMP_CLAUSE_IF */
292 1, /* OMP_CLAUSE_NUM_THREADS */
293 1, /* OMP_CLAUSE_SCHEDULE */
294 0, /* OMP_CLAUSE_NOWAIT */
295 1, /* OMP_CLAUSE_ORDERED */
296 0, /* OMP_CLAUSE_DEFAULT */
297 3, /* OMP_CLAUSE_COLLAPSE */
298 0, /* OMP_CLAUSE_UNTIED */
299 1, /* OMP_CLAUSE_FINAL */
300 0, /* OMP_CLAUSE_MERGEABLE */
301 1, /* OMP_CLAUSE_DEVICE */
302 1, /* OMP_CLAUSE_DIST_SCHEDULE */
303 0, /* OMP_CLAUSE_INBRANCH */
304 0, /* OMP_CLAUSE_NOTINBRANCH */
305 1, /* OMP_CLAUSE_NUM_TEAMS */
306 1, /* OMP_CLAUSE_THREAD_LIMIT */
307 0, /* OMP_CLAUSE_PROC_BIND */
308 1, /* OMP_CLAUSE_SAFELEN */
309 1, /* OMP_CLAUSE_SIMDLEN */
310 0, /* OMP_CLAUSE_FOR */
311 0, /* OMP_CLAUSE_PARALLEL */
312 0, /* OMP_CLAUSE_SECTIONS */
313 0, /* OMP_CLAUSE_TASKGROUP */
314 1, /* OMP_CLAUSE_PRIORITY */
315 1, /* OMP_CLAUSE_GRAINSIZE */
316 1, /* OMP_CLAUSE_NUM_TASKS */
317 0, /* OMP_CLAUSE_NOGROUP */
318 0, /* OMP_CLAUSE_THREADS */
319 0, /* OMP_CLAUSE_SIMD */
320 1, /* OMP_CLAUSE_HINT */
321 0, /* OMP_CLAUSE_DEFALTMAP */
322 1, /* OMP_CLAUSE__SIMDUID_ */
323 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
324 0, /* OMP_CLAUSE_INDEPENDENT */
325 1, /* OMP_CLAUSE_WORKER */
326 1, /* OMP_CLAUSE_VECTOR */
327 1, /* OMP_CLAUSE_NUM_GANGS */
328 1, /* OMP_CLAUSE_NUM_WORKERS */
329 1, /* OMP_CLAUSE_VECTOR_LENGTH */
330 1, /* OMP_CLAUSE_TILE */
331 2, /* OMP_CLAUSE__GRIDDIM_ */
332 };
333
334 const char * const omp_clause_code_name[] =
335 {
336 "error_clause",
337 "private",
338 "shared",
339 "firstprivate",
340 "lastprivate",
341 "reduction",
342 "copyin",
343 "copyprivate",
344 "linear",
345 "aligned",
346 "depend",
347 "uniform",
348 "to",
349 "link",
350 "from",
351 "to",
352 "map",
353 "use_device_ptr",
354 "is_device_ptr",
355 "_cache_",
356 "device_resident",
357 "gang",
358 "async",
359 "wait",
360 "auto",
361 "seq",
362 "_looptemp_",
363 "if",
364 "num_threads",
365 "schedule",
366 "nowait",
367 "ordered",
368 "default",
369 "collapse",
370 "untied",
371 "final",
372 "mergeable",
373 "device",
374 "dist_schedule",
375 "inbranch",
376 "notinbranch",
377 "num_teams",
378 "thread_limit",
379 "proc_bind",
380 "safelen",
381 "simdlen",
382 "for",
383 "parallel",
384 "sections",
385 "taskgroup",
386 "priority",
387 "grainsize",
388 "num_tasks",
389 "nogroup",
390 "threads",
391 "simd",
392 "hint",
393 "defaultmap",
394 "_simduid_",
395 "_Cilk_for_count_",
396 "independent",
397 "worker",
398 "vector",
399 "num_gangs",
400 "num_workers",
401 "vector_length",
402 "tile",
403 "_griddim_"
404 };
405
406
407 /* Return the tree node structure used by tree code CODE. */
408
409 static inline enum tree_node_structure_enum
410 tree_node_structure_for_code (enum tree_code code)
411 {
412 switch (TREE_CODE_CLASS (code))
413 {
414 case tcc_declaration:
415 {
416 switch (code)
417 {
418 case FIELD_DECL:
419 return TS_FIELD_DECL;
420 case PARM_DECL:
421 return TS_PARM_DECL;
422 case VAR_DECL:
423 return TS_VAR_DECL;
424 case LABEL_DECL:
425 return TS_LABEL_DECL;
426 case RESULT_DECL:
427 return TS_RESULT_DECL;
428 case DEBUG_EXPR_DECL:
429 return TS_DECL_WRTL;
430 case CONST_DECL:
431 return TS_CONST_DECL;
432 case TYPE_DECL:
433 return TS_TYPE_DECL;
434 case FUNCTION_DECL:
435 return TS_FUNCTION_DECL;
436 case TRANSLATION_UNIT_DECL:
437 return TS_TRANSLATION_UNIT_DECL;
438 default:
439 return TS_DECL_NON_COMMON;
440 }
441 }
442 case tcc_type:
443 return TS_TYPE_NON_COMMON;
444 case tcc_reference:
445 case tcc_comparison:
446 case tcc_unary:
447 case tcc_binary:
448 case tcc_expression:
449 case tcc_statement:
450 case tcc_vl_exp:
451 return TS_EXP;
452 default: /* tcc_constant and tcc_exceptional */
453 break;
454 }
455 switch (code)
456 {
457 /* tcc_constant cases. */
458 case VOID_CST: return TS_TYPED;
459 case INTEGER_CST: return TS_INT_CST;
460 case REAL_CST: return TS_REAL_CST;
461 case FIXED_CST: return TS_FIXED_CST;
462 case COMPLEX_CST: return TS_COMPLEX;
463 case VECTOR_CST: return TS_VECTOR;
464 case STRING_CST: return TS_STRING;
465 /* tcc_exceptional cases. */
466 case ERROR_MARK: return TS_COMMON;
467 case IDENTIFIER_NODE: return TS_IDENTIFIER;
468 case TREE_LIST: return TS_LIST;
469 case TREE_VEC: return TS_VEC;
470 case SSA_NAME: return TS_SSA_NAME;
471 case PLACEHOLDER_EXPR: return TS_COMMON;
472 case STATEMENT_LIST: return TS_STATEMENT_LIST;
473 case BLOCK: return TS_BLOCK;
474 case CONSTRUCTOR: return TS_CONSTRUCTOR;
475 case TREE_BINFO: return TS_BINFO;
476 case OMP_CLAUSE: return TS_OMP_CLAUSE;
477 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
478 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
479
480 default:
481 gcc_unreachable ();
482 }
483 }
484
485
486 /* Initialize tree_contains_struct to describe the hierarchy of tree
487 nodes. */
488
489 static void
490 initialize_tree_contains_struct (void)
491 {
492 unsigned i;
493
494 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
495 {
496 enum tree_code code;
497 enum tree_node_structure_enum ts_code;
498
499 code = (enum tree_code) i;
500 ts_code = tree_node_structure_for_code (code);
501
502 /* Mark the TS structure itself. */
503 tree_contains_struct[code][ts_code] = 1;
504
505 /* Mark all the structures that TS is derived from. */
506 switch (ts_code)
507 {
508 case TS_TYPED:
509 case TS_BLOCK:
510 MARK_TS_BASE (code);
511 break;
512
513 case TS_COMMON:
514 case TS_INT_CST:
515 case TS_REAL_CST:
516 case TS_FIXED_CST:
517 case TS_VECTOR:
518 case TS_STRING:
519 case TS_COMPLEX:
520 case TS_SSA_NAME:
521 case TS_CONSTRUCTOR:
522 case TS_EXP:
523 case TS_STATEMENT_LIST:
524 MARK_TS_TYPED (code);
525 break;
526
527 case TS_IDENTIFIER:
528 case TS_DECL_MINIMAL:
529 case TS_TYPE_COMMON:
530 case TS_LIST:
531 case TS_VEC:
532 case TS_BINFO:
533 case TS_OMP_CLAUSE:
534 case TS_OPTIMIZATION:
535 case TS_TARGET_OPTION:
536 MARK_TS_COMMON (code);
537 break;
538
539 case TS_TYPE_WITH_LANG_SPECIFIC:
540 MARK_TS_TYPE_COMMON (code);
541 break;
542
543 case TS_TYPE_NON_COMMON:
544 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
545 break;
546
547 case TS_DECL_COMMON:
548 MARK_TS_DECL_MINIMAL (code);
549 break;
550
551 case TS_DECL_WRTL:
552 case TS_CONST_DECL:
553 MARK_TS_DECL_COMMON (code);
554 break;
555
556 case TS_DECL_NON_COMMON:
557 MARK_TS_DECL_WITH_VIS (code);
558 break;
559
560 case TS_DECL_WITH_VIS:
561 case TS_PARM_DECL:
562 case TS_LABEL_DECL:
563 case TS_RESULT_DECL:
564 MARK_TS_DECL_WRTL (code);
565 break;
566
567 case TS_FIELD_DECL:
568 MARK_TS_DECL_COMMON (code);
569 break;
570
571 case TS_VAR_DECL:
572 MARK_TS_DECL_WITH_VIS (code);
573 break;
574
575 case TS_TYPE_DECL:
576 case TS_FUNCTION_DECL:
577 MARK_TS_DECL_NON_COMMON (code);
578 break;
579
580 case TS_TRANSLATION_UNIT_DECL:
581 MARK_TS_DECL_COMMON (code);
582 break;
583
584 default:
585 gcc_unreachable ();
586 }
587 }
588
589 /* Basic consistency checks for attributes used in fold. */
590 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
591 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
592 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
593 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
594 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
595 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
596 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
597 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
598 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
599 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
600 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
601 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
602 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
603 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
604 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
605 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
606 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
607 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
608 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
609 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
610 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
611 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
612 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
613 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
614 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
615 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
616 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
617 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
618 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
619 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
620 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
621 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
622 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
623 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
624 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
625 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
626 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
627 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
628 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
629 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
630 }
631
632
633 /* Init tree.c. */
634
635 void
636 init_ttree (void)
637 {
638 /* Initialize the hash table of types. */
639 type_hash_table
640 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
641
642 debug_expr_for_decl
643 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
644
645 value_expr_for_decl
646 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
647
648 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
649
650 int_cst_node = make_int_cst (1, 1);
651
652 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
653
654 cl_optimization_node = make_node (OPTIMIZATION_NODE);
655 cl_target_option_node = make_node (TARGET_OPTION_NODE);
656
657 /* Initialize the tree_contains_struct array. */
658 initialize_tree_contains_struct ();
659 lang_hooks.init_ts ();
660 }
661
662 \f
663 /* The name of the object as the assembler will see it (but before any
664 translations made by ASM_OUTPUT_LABELREF). Often this is the same
665 as DECL_NAME. It is an IDENTIFIER_NODE. */
666 tree
667 decl_assembler_name (tree decl)
668 {
669 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
670 lang_hooks.set_decl_assembler_name (decl);
671 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
672 }
673
674 /* When the target supports COMDAT groups, this indicates which group the
675 DECL is associated with. This can be either an IDENTIFIER_NODE or a
676 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
677 tree
678 decl_comdat_group (const_tree node)
679 {
680 struct symtab_node *snode = symtab_node::get (node);
681 if (!snode)
682 return NULL;
683 return snode->get_comdat_group ();
684 }
685
686 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
687 tree
688 decl_comdat_group_id (const_tree node)
689 {
690 struct symtab_node *snode = symtab_node::get (node);
691 if (!snode)
692 return NULL;
693 return snode->get_comdat_group_id ();
694 }
695
696 /* When the target supports named section, return its name as IDENTIFIER_NODE
697 or NULL if it is in no section. */
698 const char *
699 decl_section_name (const_tree node)
700 {
701 struct symtab_node *snode = symtab_node::get (node);
702 if (!snode)
703 return NULL;
704 return snode->get_section ();
705 }
706
707 /* Set section name of NODE to VALUE (that is expected to be
708 identifier node) */
709 void
710 set_decl_section_name (tree node, const char *value)
711 {
712 struct symtab_node *snode;
713
714 if (value == NULL)
715 {
716 snode = symtab_node::get (node);
717 if (!snode)
718 return;
719 }
720 else if (TREE_CODE (node) == VAR_DECL)
721 snode = varpool_node::get_create (node);
722 else
723 snode = cgraph_node::get_create (node);
724 snode->set_section (value);
725 }
726
727 /* Return TLS model of a variable NODE. */
728 enum tls_model
729 decl_tls_model (const_tree node)
730 {
731 struct varpool_node *snode = varpool_node::get (node);
732 if (!snode)
733 return TLS_MODEL_NONE;
734 return snode->tls_model;
735 }
736
737 /* Set TLS model of variable NODE to MODEL. */
738 void
739 set_decl_tls_model (tree node, enum tls_model model)
740 {
741 struct varpool_node *vnode;
742
743 if (model == TLS_MODEL_NONE)
744 {
745 vnode = varpool_node::get (node);
746 if (!vnode)
747 return;
748 }
749 else
750 vnode = varpool_node::get_create (node);
751 vnode->tls_model = model;
752 }
753
754 /* Compute the number of bytes occupied by a tree with code CODE.
755 This function cannot be used for nodes that have variable sizes,
756 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
757 size_t
758 tree_code_size (enum tree_code code)
759 {
760 switch (TREE_CODE_CLASS (code))
761 {
762 case tcc_declaration: /* A decl node */
763 {
764 switch (code)
765 {
766 case FIELD_DECL:
767 return sizeof (struct tree_field_decl);
768 case PARM_DECL:
769 return sizeof (struct tree_parm_decl);
770 case VAR_DECL:
771 return sizeof (struct tree_var_decl);
772 case LABEL_DECL:
773 return sizeof (struct tree_label_decl);
774 case RESULT_DECL:
775 return sizeof (struct tree_result_decl);
776 case CONST_DECL:
777 return sizeof (struct tree_const_decl);
778 case TYPE_DECL:
779 return sizeof (struct tree_type_decl);
780 case FUNCTION_DECL:
781 return sizeof (struct tree_function_decl);
782 case DEBUG_EXPR_DECL:
783 return sizeof (struct tree_decl_with_rtl);
784 case TRANSLATION_UNIT_DECL:
785 return sizeof (struct tree_translation_unit_decl);
786 case NAMESPACE_DECL:
787 case IMPORTED_DECL:
788 case NAMELIST_DECL:
789 return sizeof (struct tree_decl_non_common);
790 default:
791 return lang_hooks.tree_size (code);
792 }
793 }
794
795 case tcc_type: /* a type node */
796 return sizeof (struct tree_type_non_common);
797
798 case tcc_reference: /* a reference */
799 case tcc_expression: /* an expression */
800 case tcc_statement: /* an expression with side effects */
801 case tcc_comparison: /* a comparison expression */
802 case tcc_unary: /* a unary arithmetic expression */
803 case tcc_binary: /* a binary arithmetic expression */
804 return (sizeof (struct tree_exp)
805 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
806
807 case tcc_constant: /* a constant */
808 switch (code)
809 {
810 case VOID_CST: return sizeof (struct tree_typed);
811 case INTEGER_CST: gcc_unreachable ();
812 case REAL_CST: return sizeof (struct tree_real_cst);
813 case FIXED_CST: return sizeof (struct tree_fixed_cst);
814 case COMPLEX_CST: return sizeof (struct tree_complex);
815 case VECTOR_CST: return sizeof (struct tree_vector);
816 case STRING_CST: gcc_unreachable ();
817 default:
818 return lang_hooks.tree_size (code);
819 }
820
821 case tcc_exceptional: /* something random, like an identifier. */
822 switch (code)
823 {
824 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
825 case TREE_LIST: return sizeof (struct tree_list);
826
827 case ERROR_MARK:
828 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
829
830 case TREE_VEC:
831 case OMP_CLAUSE: gcc_unreachable ();
832
833 case SSA_NAME: return sizeof (struct tree_ssa_name);
834
835 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
836 case BLOCK: return sizeof (struct tree_block);
837 case CONSTRUCTOR: return sizeof (struct tree_constructor);
838 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
839 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
840
841 default:
842 return lang_hooks.tree_size (code);
843 }
844
845 default:
846 gcc_unreachable ();
847 }
848 }
849
850 /* Compute the number of bytes occupied by NODE. This routine only
851 looks at TREE_CODE, except for those nodes that have variable sizes. */
852 size_t
853 tree_size (const_tree node)
854 {
855 const enum tree_code code = TREE_CODE (node);
856 switch (code)
857 {
858 case INTEGER_CST:
859 return (sizeof (struct tree_int_cst)
860 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
861
862 case TREE_BINFO:
863 return (offsetof (struct tree_binfo, base_binfos)
864 + vec<tree, va_gc>
865 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
866
867 case TREE_VEC:
868 return (sizeof (struct tree_vec)
869 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
870
871 case VECTOR_CST:
872 return (sizeof (struct tree_vector)
873 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
874
875 case STRING_CST:
876 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
877
878 case OMP_CLAUSE:
879 return (sizeof (struct tree_omp_clause)
880 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
881 * sizeof (tree));
882
883 default:
884 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
885 return (sizeof (struct tree_exp)
886 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
887 else
888 return tree_code_size (code);
889 }
890 }
891
892 /* Record interesting allocation statistics for a tree node with CODE
893 and LENGTH. */
894
895 static void
896 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
897 size_t length ATTRIBUTE_UNUSED)
898 {
899 enum tree_code_class type = TREE_CODE_CLASS (code);
900 tree_node_kind kind;
901
902 if (!GATHER_STATISTICS)
903 return;
904
905 switch (type)
906 {
907 case tcc_declaration: /* A decl node */
908 kind = d_kind;
909 break;
910
911 case tcc_type: /* a type node */
912 kind = t_kind;
913 break;
914
915 case tcc_statement: /* an expression with side effects */
916 kind = s_kind;
917 break;
918
919 case tcc_reference: /* a reference */
920 kind = r_kind;
921 break;
922
923 case tcc_expression: /* an expression */
924 case tcc_comparison: /* a comparison expression */
925 case tcc_unary: /* a unary arithmetic expression */
926 case tcc_binary: /* a binary arithmetic expression */
927 kind = e_kind;
928 break;
929
930 case tcc_constant: /* a constant */
931 kind = c_kind;
932 break;
933
934 case tcc_exceptional: /* something random, like an identifier. */
935 switch (code)
936 {
937 case IDENTIFIER_NODE:
938 kind = id_kind;
939 break;
940
941 case TREE_VEC:
942 kind = vec_kind;
943 break;
944
945 case TREE_BINFO:
946 kind = binfo_kind;
947 break;
948
949 case SSA_NAME:
950 kind = ssa_name_kind;
951 break;
952
953 case BLOCK:
954 kind = b_kind;
955 break;
956
957 case CONSTRUCTOR:
958 kind = constr_kind;
959 break;
960
961 case OMP_CLAUSE:
962 kind = omp_clause_kind;
963 break;
964
965 default:
966 kind = x_kind;
967 break;
968 }
969 break;
970
971 case tcc_vl_exp:
972 kind = e_kind;
973 break;
974
975 default:
976 gcc_unreachable ();
977 }
978
979 tree_code_counts[(int) code]++;
980 tree_node_counts[(int) kind]++;
981 tree_node_sizes[(int) kind] += length;
982 }
983
984 /* Allocate and return a new UID from the DECL_UID namespace. */
985
986 int
987 allocate_decl_uid (void)
988 {
989 return next_decl_uid++;
990 }
991
992 /* Return a newly allocated node of code CODE. For decl and type
993 nodes, some other fields are initialized. The rest of the node is
994 initialized to zero. This function cannot be used for TREE_VEC,
995 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
996 tree_code_size.
997
998 Achoo! I got a code in the node. */
999
1000 tree
1001 make_node_stat (enum tree_code code MEM_STAT_DECL)
1002 {
1003 tree t;
1004 enum tree_code_class type = TREE_CODE_CLASS (code);
1005 size_t length = tree_code_size (code);
1006
1007 record_node_allocation_statistics (code, length);
1008
1009 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1010 TREE_SET_CODE (t, code);
1011
1012 switch (type)
1013 {
1014 case tcc_statement:
1015 TREE_SIDE_EFFECTS (t) = 1;
1016 break;
1017
1018 case tcc_declaration:
1019 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1020 {
1021 if (code == FUNCTION_DECL)
1022 {
1023 SET_DECL_ALIGN (t, FUNCTION_BOUNDARY);
1024 DECL_MODE (t) = FUNCTION_MODE;
1025 }
1026 else
1027 SET_DECL_ALIGN (t, 1);
1028 }
1029 DECL_SOURCE_LOCATION (t) = input_location;
1030 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1031 DECL_UID (t) = --next_debug_decl_uid;
1032 else
1033 {
1034 DECL_UID (t) = allocate_decl_uid ();
1035 SET_DECL_PT_UID (t, -1);
1036 }
1037 if (TREE_CODE (t) == LABEL_DECL)
1038 LABEL_DECL_UID (t) = -1;
1039
1040 break;
1041
1042 case tcc_type:
1043 TYPE_UID (t) = next_type_uid++;
1044 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1045 TYPE_USER_ALIGN (t) = 0;
1046 TYPE_MAIN_VARIANT (t) = t;
1047 TYPE_CANONICAL (t) = t;
1048
1049 /* Default to no attributes for type, but let target change that. */
1050 TYPE_ATTRIBUTES (t) = NULL_TREE;
1051 targetm.set_default_type_attributes (t);
1052
1053 /* We have not yet computed the alias set for this type. */
1054 TYPE_ALIAS_SET (t) = -1;
1055 break;
1056
1057 case tcc_constant:
1058 TREE_CONSTANT (t) = 1;
1059 break;
1060
1061 case tcc_expression:
1062 switch (code)
1063 {
1064 case INIT_EXPR:
1065 case MODIFY_EXPR:
1066 case VA_ARG_EXPR:
1067 case PREDECREMENT_EXPR:
1068 case PREINCREMENT_EXPR:
1069 case POSTDECREMENT_EXPR:
1070 case POSTINCREMENT_EXPR:
1071 /* All of these have side-effects, no matter what their
1072 operands are. */
1073 TREE_SIDE_EFFECTS (t) = 1;
1074 break;
1075
1076 default:
1077 break;
1078 }
1079 break;
1080
1081 case tcc_exceptional:
1082 switch (code)
1083 {
1084 case TARGET_OPTION_NODE:
1085 TREE_TARGET_OPTION(t)
1086 = ggc_cleared_alloc<struct cl_target_option> ();
1087 break;
1088
1089 case OPTIMIZATION_NODE:
1090 TREE_OPTIMIZATION (t)
1091 = ggc_cleared_alloc<struct cl_optimization> ();
1092 break;
1093
1094 default:
1095 break;
1096 }
1097 break;
1098
1099 default:
1100 /* Other classes need no special treatment. */
1101 break;
1102 }
1103
1104 return t;
1105 }
1106
1107 /* Free tree node. */
1108
1109 void
1110 free_node (tree node)
1111 {
1112 enum tree_code code = TREE_CODE (node);
1113 if (GATHER_STATISTICS)
1114 {
1115 tree_code_counts[(int) TREE_CODE (node)]--;
1116 tree_node_counts[(int) t_kind]--;
1117 tree_node_sizes[(int) t_kind] -= tree_size (node);
1118 }
1119 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1120 vec_free (CONSTRUCTOR_ELTS (node));
1121 else if (code == BLOCK)
1122 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1123 else if (code == TREE_BINFO)
1124 vec_free (BINFO_BASE_ACCESSES (node));
1125 ggc_free (node);
1126 }
1127 \f
1128 /* Return a new node with the same contents as NODE except that its
1129 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1130
1131 tree
1132 copy_node_stat (tree node MEM_STAT_DECL)
1133 {
1134 tree t;
1135 enum tree_code code = TREE_CODE (node);
1136 size_t length;
1137
1138 gcc_assert (code != STATEMENT_LIST);
1139
1140 length = tree_size (node);
1141 record_node_allocation_statistics (code, length);
1142 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1143 memcpy (t, node, length);
1144
1145 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1146 TREE_CHAIN (t) = 0;
1147 TREE_ASM_WRITTEN (t) = 0;
1148 TREE_VISITED (t) = 0;
1149
1150 if (TREE_CODE_CLASS (code) == tcc_declaration)
1151 {
1152 if (code == DEBUG_EXPR_DECL)
1153 DECL_UID (t) = --next_debug_decl_uid;
1154 else
1155 {
1156 DECL_UID (t) = allocate_decl_uid ();
1157 if (DECL_PT_UID_SET_P (node))
1158 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1159 }
1160 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1161 && DECL_HAS_VALUE_EXPR_P (node))
1162 {
1163 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1164 DECL_HAS_VALUE_EXPR_P (t) = 1;
1165 }
1166 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1167 if (TREE_CODE (node) == VAR_DECL)
1168 {
1169 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1170 t->decl_with_vis.symtab_node = NULL;
1171 }
1172 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1173 {
1174 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1175 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1176 }
1177 if (TREE_CODE (node) == FUNCTION_DECL)
1178 {
1179 DECL_STRUCT_FUNCTION (t) = NULL;
1180 t->decl_with_vis.symtab_node = NULL;
1181 }
1182 }
1183 else if (TREE_CODE_CLASS (code) == tcc_type)
1184 {
1185 TYPE_UID (t) = next_type_uid++;
1186 /* The following is so that the debug code for
1187 the copy is different from the original type.
1188 The two statements usually duplicate each other
1189 (because they clear fields of the same union),
1190 but the optimizer should catch that. */
1191 TYPE_SYMTAB_POINTER (t) = 0;
1192 TYPE_SYMTAB_ADDRESS (t) = 0;
1193
1194 /* Do not copy the values cache. */
1195 if (TYPE_CACHED_VALUES_P (t))
1196 {
1197 TYPE_CACHED_VALUES_P (t) = 0;
1198 TYPE_CACHED_VALUES (t) = NULL_TREE;
1199 }
1200 }
1201 else if (code == TARGET_OPTION_NODE)
1202 {
1203 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1204 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1205 sizeof (struct cl_target_option));
1206 }
1207 else if (code == OPTIMIZATION_NODE)
1208 {
1209 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1210 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1211 sizeof (struct cl_optimization));
1212 }
1213
1214 return t;
1215 }
1216
1217 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1218 For example, this can copy a list made of TREE_LIST nodes. */
1219
1220 tree
1221 copy_list (tree list)
1222 {
1223 tree head;
1224 tree prev, next;
1225
1226 if (list == 0)
1227 return 0;
1228
1229 head = prev = copy_node (list);
1230 next = TREE_CHAIN (list);
1231 while (next)
1232 {
1233 TREE_CHAIN (prev) = copy_node (next);
1234 prev = TREE_CHAIN (prev);
1235 next = TREE_CHAIN (next);
1236 }
1237 return head;
1238 }
1239
1240 \f
1241 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1242 INTEGER_CST with value CST and type TYPE. */
1243
1244 static unsigned int
1245 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1246 {
1247 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1248 /* We need extra HWIs if CST is an unsigned integer with its
1249 upper bit set. */
1250 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1251 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1252 return cst.get_len ();
1253 }
1254
1255 /* Return a new INTEGER_CST with value CST and type TYPE. */
1256
1257 static tree
1258 build_new_int_cst (tree type, const wide_int &cst)
1259 {
1260 unsigned int len = cst.get_len ();
1261 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1262 tree nt = make_int_cst (len, ext_len);
1263
1264 if (len < ext_len)
1265 {
1266 --ext_len;
1267 TREE_INT_CST_ELT (nt, ext_len)
1268 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1269 for (unsigned int i = len; i < ext_len; ++i)
1270 TREE_INT_CST_ELT (nt, i) = -1;
1271 }
1272 else if (TYPE_UNSIGNED (type)
1273 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1274 {
1275 len--;
1276 TREE_INT_CST_ELT (nt, len)
1277 = zext_hwi (cst.elt (len),
1278 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1279 }
1280
1281 for (unsigned int i = 0; i < len; i++)
1282 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1283 TREE_TYPE (nt) = type;
1284 return nt;
1285 }
1286
1287 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1288
1289 tree
1290 build_int_cst (tree type, HOST_WIDE_INT low)
1291 {
1292 /* Support legacy code. */
1293 if (!type)
1294 type = integer_type_node;
1295
1296 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1297 }
1298
1299 tree
1300 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1301 {
1302 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1303 }
1304
1305 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1306
1307 tree
1308 build_int_cst_type (tree type, HOST_WIDE_INT low)
1309 {
1310 gcc_assert (type);
1311 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1312 }
1313
1314 /* Constructs tree in type TYPE from with value given by CST. Signedness
1315 of CST is assumed to be the same as the signedness of TYPE. */
1316
1317 tree
1318 double_int_to_tree (tree type, double_int cst)
1319 {
1320 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1321 }
1322
1323 /* We force the wide_int CST to the range of the type TYPE by sign or
1324 zero extending it. OVERFLOWABLE indicates if we are interested in
1325 overflow of the value, when >0 we are only interested in signed
1326 overflow, for <0 we are interested in any overflow. OVERFLOWED
1327 indicates whether overflow has already occurred. CONST_OVERFLOWED
1328 indicates whether constant overflow has already occurred. We force
1329 T's value to be within range of T's type (by setting to 0 or 1 all
1330 the bits outside the type's range). We set TREE_OVERFLOWED if,
1331 OVERFLOWED is nonzero,
1332 or OVERFLOWABLE is >0 and signed overflow occurs
1333 or OVERFLOWABLE is <0 and any overflow occurs
1334 We return a new tree node for the extended wide_int. The node
1335 is shared if no overflow flags are set. */
1336
1337
1338 tree
1339 force_fit_type (tree type, const wide_int_ref &cst,
1340 int overflowable, bool overflowed)
1341 {
1342 signop sign = TYPE_SIGN (type);
1343
1344 /* If we need to set overflow flags, return a new unshared node. */
1345 if (overflowed || !wi::fits_to_tree_p (cst, type))
1346 {
1347 if (overflowed
1348 || overflowable < 0
1349 || (overflowable > 0 && sign == SIGNED))
1350 {
1351 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1352 tree t = build_new_int_cst (type, tmp);
1353 TREE_OVERFLOW (t) = 1;
1354 return t;
1355 }
1356 }
1357
1358 /* Else build a shared node. */
1359 return wide_int_to_tree (type, cst);
1360 }
1361
1362 /* These are the hash table functions for the hash table of INTEGER_CST
1363 nodes of a sizetype. */
1364
1365 /* Return the hash code X, an INTEGER_CST. */
1366
1367 hashval_t
1368 int_cst_hasher::hash (tree x)
1369 {
1370 const_tree const t = x;
1371 hashval_t code = TYPE_UID (TREE_TYPE (t));
1372 int i;
1373
1374 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1375 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1376
1377 return code;
1378 }
1379
1380 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1381 is the same as that given by *Y, which is the same. */
1382
1383 bool
1384 int_cst_hasher::equal (tree x, tree y)
1385 {
1386 const_tree const xt = x;
1387 const_tree const yt = y;
1388
1389 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1390 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1391 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1392 return false;
1393
1394 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1395 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1396 return false;
1397
1398 return true;
1399 }
1400
1401 /* Create an INT_CST node of TYPE and value CST.
1402 The returned node is always shared. For small integers we use a
1403 per-type vector cache, for larger ones we use a single hash table.
1404 The value is extended from its precision according to the sign of
1405 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1406 the upper bits and ensures that hashing and value equality based
1407 upon the underlying HOST_WIDE_INTs works without masking. */
1408
1409 tree
1410 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1411 {
1412 tree t;
1413 int ix = -1;
1414 int limit = 0;
1415
1416 gcc_assert (type);
1417 unsigned int prec = TYPE_PRECISION (type);
1418 signop sgn = TYPE_SIGN (type);
1419
1420 /* Verify that everything is canonical. */
1421 int l = pcst.get_len ();
1422 if (l > 1)
1423 {
1424 if (pcst.elt (l - 1) == 0)
1425 gcc_checking_assert (pcst.elt (l - 2) < 0);
1426 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1427 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1428 }
1429
1430 wide_int cst = wide_int::from (pcst, prec, sgn);
1431 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1432
1433 if (ext_len == 1)
1434 {
1435 /* We just need to store a single HOST_WIDE_INT. */
1436 HOST_WIDE_INT hwi;
1437 if (TYPE_UNSIGNED (type))
1438 hwi = cst.to_uhwi ();
1439 else
1440 hwi = cst.to_shwi ();
1441
1442 switch (TREE_CODE (type))
1443 {
1444 case NULLPTR_TYPE:
1445 gcc_assert (hwi == 0);
1446 /* Fallthru. */
1447
1448 case POINTER_TYPE:
1449 case REFERENCE_TYPE:
1450 case POINTER_BOUNDS_TYPE:
1451 /* Cache NULL pointer and zero bounds. */
1452 if (hwi == 0)
1453 {
1454 limit = 1;
1455 ix = 0;
1456 }
1457 break;
1458
1459 case BOOLEAN_TYPE:
1460 /* Cache false or true. */
1461 limit = 2;
1462 if (IN_RANGE (hwi, 0, 1))
1463 ix = hwi;
1464 break;
1465
1466 case INTEGER_TYPE:
1467 case OFFSET_TYPE:
1468 if (TYPE_SIGN (type) == UNSIGNED)
1469 {
1470 /* Cache [0, N). */
1471 limit = INTEGER_SHARE_LIMIT;
1472 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1473 ix = hwi;
1474 }
1475 else
1476 {
1477 /* Cache [-1, N). */
1478 limit = INTEGER_SHARE_LIMIT + 1;
1479 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1480 ix = hwi + 1;
1481 }
1482 break;
1483
1484 case ENUMERAL_TYPE:
1485 break;
1486
1487 default:
1488 gcc_unreachable ();
1489 }
1490
1491 if (ix >= 0)
1492 {
1493 /* Look for it in the type's vector of small shared ints. */
1494 if (!TYPE_CACHED_VALUES_P (type))
1495 {
1496 TYPE_CACHED_VALUES_P (type) = 1;
1497 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1498 }
1499
1500 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1501 if (t)
1502 /* Make sure no one is clobbering the shared constant. */
1503 gcc_checking_assert (TREE_TYPE (t) == type
1504 && TREE_INT_CST_NUNITS (t) == 1
1505 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1506 && TREE_INT_CST_EXT_NUNITS (t) == 1
1507 && TREE_INT_CST_ELT (t, 0) == hwi);
1508 else
1509 {
1510 /* Create a new shared int. */
1511 t = build_new_int_cst (type, cst);
1512 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1513 }
1514 }
1515 else
1516 {
1517 /* Use the cache of larger shared ints, using int_cst_node as
1518 a temporary. */
1519
1520 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1521 TREE_TYPE (int_cst_node) = type;
1522
1523 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1524 t = *slot;
1525 if (!t)
1526 {
1527 /* Insert this one into the hash table. */
1528 t = int_cst_node;
1529 *slot = t;
1530 /* Make a new node for next time round. */
1531 int_cst_node = make_int_cst (1, 1);
1532 }
1533 }
1534 }
1535 else
1536 {
1537 /* The value either hashes properly or we drop it on the floor
1538 for the gc to take care of. There will not be enough of them
1539 to worry about. */
1540
1541 tree nt = build_new_int_cst (type, cst);
1542 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1543 t = *slot;
1544 if (!t)
1545 {
1546 /* Insert this one into the hash table. */
1547 t = nt;
1548 *slot = t;
1549 }
1550 }
1551
1552 return t;
1553 }
1554
1555 void
1556 cache_integer_cst (tree t)
1557 {
1558 tree type = TREE_TYPE (t);
1559 int ix = -1;
1560 int limit = 0;
1561 int prec = TYPE_PRECISION (type);
1562
1563 gcc_assert (!TREE_OVERFLOW (t));
1564
1565 switch (TREE_CODE (type))
1566 {
1567 case NULLPTR_TYPE:
1568 gcc_assert (integer_zerop (t));
1569 /* Fallthru. */
1570
1571 case POINTER_TYPE:
1572 case REFERENCE_TYPE:
1573 /* Cache NULL pointer. */
1574 if (integer_zerop (t))
1575 {
1576 limit = 1;
1577 ix = 0;
1578 }
1579 break;
1580
1581 case BOOLEAN_TYPE:
1582 /* Cache false or true. */
1583 limit = 2;
1584 if (wi::ltu_p (t, 2))
1585 ix = TREE_INT_CST_ELT (t, 0);
1586 break;
1587
1588 case INTEGER_TYPE:
1589 case OFFSET_TYPE:
1590 if (TYPE_UNSIGNED (type))
1591 {
1592 /* Cache 0..N */
1593 limit = INTEGER_SHARE_LIMIT;
1594
1595 /* This is a little hokie, but if the prec is smaller than
1596 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1597 obvious test will not get the correct answer. */
1598 if (prec < HOST_BITS_PER_WIDE_INT)
1599 {
1600 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1601 ix = tree_to_uhwi (t);
1602 }
1603 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1604 ix = tree_to_uhwi (t);
1605 }
1606 else
1607 {
1608 /* Cache -1..N */
1609 limit = INTEGER_SHARE_LIMIT + 1;
1610
1611 if (integer_minus_onep (t))
1612 ix = 0;
1613 else if (!wi::neg_p (t))
1614 {
1615 if (prec < HOST_BITS_PER_WIDE_INT)
1616 {
1617 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1618 ix = tree_to_shwi (t) + 1;
1619 }
1620 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1621 ix = tree_to_shwi (t) + 1;
1622 }
1623 }
1624 break;
1625
1626 case ENUMERAL_TYPE:
1627 break;
1628
1629 default:
1630 gcc_unreachable ();
1631 }
1632
1633 if (ix >= 0)
1634 {
1635 /* Look for it in the type's vector of small shared ints. */
1636 if (!TYPE_CACHED_VALUES_P (type))
1637 {
1638 TYPE_CACHED_VALUES_P (type) = 1;
1639 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1640 }
1641
1642 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1643 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1644 }
1645 else
1646 {
1647 /* Use the cache of larger shared ints. */
1648 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1649 /* If there is already an entry for the number verify it's the
1650 same. */
1651 if (*slot)
1652 gcc_assert (wi::eq_p (tree (*slot), t));
1653 else
1654 /* Otherwise insert this one into the hash table. */
1655 *slot = t;
1656 }
1657 }
1658
1659
1660 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1661 and the rest are zeros. */
1662
1663 tree
1664 build_low_bits_mask (tree type, unsigned bits)
1665 {
1666 gcc_assert (bits <= TYPE_PRECISION (type));
1667
1668 return wide_int_to_tree (type, wi::mask (bits, false,
1669 TYPE_PRECISION (type)));
1670 }
1671
1672 /* Checks that X is integer constant that can be expressed in (unsigned)
1673 HOST_WIDE_INT without loss of precision. */
1674
1675 bool
1676 cst_and_fits_in_hwi (const_tree x)
1677 {
1678 return (TREE_CODE (x) == INTEGER_CST
1679 && TYPE_PRECISION (TREE_TYPE (x)) <= HOST_BITS_PER_WIDE_INT);
1680 }
1681
1682 /* Build a newly constructed VECTOR_CST node of length LEN. */
1683
1684 tree
1685 make_vector_stat (unsigned len MEM_STAT_DECL)
1686 {
1687 tree t;
1688 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1689
1690 record_node_allocation_statistics (VECTOR_CST, length);
1691
1692 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1693
1694 TREE_SET_CODE (t, VECTOR_CST);
1695 TREE_CONSTANT (t) = 1;
1696
1697 return t;
1698 }
1699
1700 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1701 are in a list pointed to by VALS. */
1702
1703 tree
1704 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1705 {
1706 int over = 0;
1707 unsigned cnt = 0;
1708 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1709 TREE_TYPE (v) = type;
1710
1711 /* Iterate through elements and check for overflow. */
1712 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1713 {
1714 tree value = vals[cnt];
1715
1716 VECTOR_CST_ELT (v, cnt) = value;
1717
1718 /* Don't crash if we get an address constant. */
1719 if (!CONSTANT_CLASS_P (value))
1720 continue;
1721
1722 over |= TREE_OVERFLOW (value);
1723 }
1724
1725 TREE_OVERFLOW (v) = over;
1726 return v;
1727 }
1728
1729 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1730 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1731
1732 tree
1733 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1734 {
1735 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1736 unsigned HOST_WIDE_INT idx, pos = 0;
1737 tree value;
1738
1739 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1740 {
1741 if (TREE_CODE (value) == VECTOR_CST)
1742 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1743 vec[pos++] = VECTOR_CST_ELT (value, i);
1744 else
1745 vec[pos++] = value;
1746 }
1747 while (pos < TYPE_VECTOR_SUBPARTS (type))
1748 vec[pos++] = build_zero_cst (TREE_TYPE (type));
1749
1750 return build_vector (type, vec);
1751 }
1752
1753 /* Build a vector of type VECTYPE where all the elements are SCs. */
1754 tree
1755 build_vector_from_val (tree vectype, tree sc)
1756 {
1757 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1758
1759 if (sc == error_mark_node)
1760 return sc;
1761
1762 /* Verify that the vector type is suitable for SC. Note that there
1763 is some inconsistency in the type-system with respect to restrict
1764 qualifications of pointers. Vector types always have a main-variant
1765 element type and the qualification is applied to the vector-type.
1766 So TREE_TYPE (vector-type) does not return a properly qualified
1767 vector element-type. */
1768 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1769 TREE_TYPE (vectype)));
1770
1771 if (CONSTANT_CLASS_P (sc))
1772 {
1773 tree *v = XALLOCAVEC (tree, nunits);
1774 for (i = 0; i < nunits; ++i)
1775 v[i] = sc;
1776 return build_vector (vectype, v);
1777 }
1778 else
1779 {
1780 vec<constructor_elt, va_gc> *v;
1781 vec_alloc (v, nunits);
1782 for (i = 0; i < nunits; ++i)
1783 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1784 return build_constructor (vectype, v);
1785 }
1786 }
1787
1788 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1789 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1790
1791 void
1792 recompute_constructor_flags (tree c)
1793 {
1794 unsigned int i;
1795 tree val;
1796 bool constant_p = true;
1797 bool side_effects_p = false;
1798 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1799
1800 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1801 {
1802 /* Mostly ctors will have elts that don't have side-effects, so
1803 the usual case is to scan all the elements. Hence a single
1804 loop for both const and side effects, rather than one loop
1805 each (with early outs). */
1806 if (!TREE_CONSTANT (val))
1807 constant_p = false;
1808 if (TREE_SIDE_EFFECTS (val))
1809 side_effects_p = true;
1810 }
1811
1812 TREE_SIDE_EFFECTS (c) = side_effects_p;
1813 TREE_CONSTANT (c) = constant_p;
1814 }
1815
1816 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1817 CONSTRUCTOR C. */
1818
1819 void
1820 verify_constructor_flags (tree c)
1821 {
1822 unsigned int i;
1823 tree val;
1824 bool constant_p = TREE_CONSTANT (c);
1825 bool side_effects_p = TREE_SIDE_EFFECTS (c);
1826 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1827
1828 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1829 {
1830 if (constant_p && !TREE_CONSTANT (val))
1831 internal_error ("non-constant element in constant CONSTRUCTOR");
1832 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
1833 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
1834 }
1835 }
1836
1837 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1838 are in the vec pointed to by VALS. */
1839 tree
1840 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1841 {
1842 tree c = make_node (CONSTRUCTOR);
1843
1844 TREE_TYPE (c) = type;
1845 CONSTRUCTOR_ELTS (c) = vals;
1846
1847 recompute_constructor_flags (c);
1848
1849 return c;
1850 }
1851
1852 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1853 INDEX and VALUE. */
1854 tree
1855 build_constructor_single (tree type, tree index, tree value)
1856 {
1857 vec<constructor_elt, va_gc> *v;
1858 constructor_elt elt = {index, value};
1859
1860 vec_alloc (v, 1);
1861 v->quick_push (elt);
1862
1863 return build_constructor (type, v);
1864 }
1865
1866
1867 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1868 are in a list pointed to by VALS. */
1869 tree
1870 build_constructor_from_list (tree type, tree vals)
1871 {
1872 tree t;
1873 vec<constructor_elt, va_gc> *v = NULL;
1874
1875 if (vals)
1876 {
1877 vec_alloc (v, list_length (vals));
1878 for (t = vals; t; t = TREE_CHAIN (t))
1879 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1880 }
1881
1882 return build_constructor (type, v);
1883 }
1884
1885 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1886 of elements, provided as index/value pairs. */
1887
1888 tree
1889 build_constructor_va (tree type, int nelts, ...)
1890 {
1891 vec<constructor_elt, va_gc> *v = NULL;
1892 va_list p;
1893
1894 va_start (p, nelts);
1895 vec_alloc (v, nelts);
1896 while (nelts--)
1897 {
1898 tree index = va_arg (p, tree);
1899 tree value = va_arg (p, tree);
1900 CONSTRUCTOR_APPEND_ELT (v, index, value);
1901 }
1902 va_end (p);
1903 return build_constructor (type, v);
1904 }
1905
1906 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1907
1908 tree
1909 build_fixed (tree type, FIXED_VALUE_TYPE f)
1910 {
1911 tree v;
1912 FIXED_VALUE_TYPE *fp;
1913
1914 v = make_node (FIXED_CST);
1915 fp = ggc_alloc<fixed_value> ();
1916 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1917
1918 TREE_TYPE (v) = type;
1919 TREE_FIXED_CST_PTR (v) = fp;
1920 return v;
1921 }
1922
1923 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1924
1925 tree
1926 build_real (tree type, REAL_VALUE_TYPE d)
1927 {
1928 tree v;
1929 REAL_VALUE_TYPE *dp;
1930 int overflow = 0;
1931
1932 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1933 Consider doing it via real_convert now. */
1934
1935 v = make_node (REAL_CST);
1936 dp = ggc_alloc<real_value> ();
1937 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1938
1939 TREE_TYPE (v) = type;
1940 TREE_REAL_CST_PTR (v) = dp;
1941 TREE_OVERFLOW (v) = overflow;
1942 return v;
1943 }
1944
1945 /* Like build_real, but first truncate D to the type. */
1946
1947 tree
1948 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1949 {
1950 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1951 }
1952
1953 /* Return a new REAL_CST node whose type is TYPE
1954 and whose value is the integer value of the INTEGER_CST node I. */
1955
1956 REAL_VALUE_TYPE
1957 real_value_from_int_cst (const_tree type, const_tree i)
1958 {
1959 REAL_VALUE_TYPE d;
1960
1961 /* Clear all bits of the real value type so that we can later do
1962 bitwise comparisons to see if two values are the same. */
1963 memset (&d, 0, sizeof d);
1964
1965 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1966 TYPE_SIGN (TREE_TYPE (i)));
1967 return d;
1968 }
1969
1970 /* Given a tree representing an integer constant I, return a tree
1971 representing the same value as a floating-point constant of type TYPE. */
1972
1973 tree
1974 build_real_from_int_cst (tree type, const_tree i)
1975 {
1976 tree v;
1977 int overflow = TREE_OVERFLOW (i);
1978
1979 v = build_real (type, real_value_from_int_cst (type, i));
1980
1981 TREE_OVERFLOW (v) |= overflow;
1982 return v;
1983 }
1984
1985 /* Return a newly constructed STRING_CST node whose value is
1986 the LEN characters at STR.
1987 Note that for a C string literal, LEN should include the trailing NUL.
1988 The TREE_TYPE is not initialized. */
1989
1990 tree
1991 build_string (int len, const char *str)
1992 {
1993 tree s;
1994 size_t length;
1995
1996 /* Do not waste bytes provided by padding of struct tree_string. */
1997 length = len + offsetof (struct tree_string, str) + 1;
1998
1999 record_node_allocation_statistics (STRING_CST, length);
2000
2001 s = (tree) ggc_internal_alloc (length);
2002
2003 memset (s, 0, sizeof (struct tree_typed));
2004 TREE_SET_CODE (s, STRING_CST);
2005 TREE_CONSTANT (s) = 1;
2006 TREE_STRING_LENGTH (s) = len;
2007 memcpy (s->string.str, str, len);
2008 s->string.str[len] = '\0';
2009
2010 return s;
2011 }
2012
2013 /* Return a newly constructed COMPLEX_CST node whose value is
2014 specified by the real and imaginary parts REAL and IMAG.
2015 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2016 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2017
2018 tree
2019 build_complex (tree type, tree real, tree imag)
2020 {
2021 tree t = make_node (COMPLEX_CST);
2022
2023 TREE_REALPART (t) = real;
2024 TREE_IMAGPART (t) = imag;
2025 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2026 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2027 return t;
2028 }
2029
2030 /* Build a complex (inf +- 0i), such as for the result of cproj.
2031 TYPE is the complex tree type of the result. If NEG is true, the
2032 imaginary zero is negative. */
2033
2034 tree
2035 build_complex_inf (tree type, bool neg)
2036 {
2037 REAL_VALUE_TYPE rinf, rzero = dconst0;
2038
2039 real_inf (&rinf);
2040 rzero.sign = neg;
2041 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2042 build_real (TREE_TYPE (type), rzero));
2043 }
2044
2045 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2046 element is set to 1. In particular, this is 1 + i for complex types. */
2047
2048 tree
2049 build_each_one_cst (tree type)
2050 {
2051 if (TREE_CODE (type) == COMPLEX_TYPE)
2052 {
2053 tree scalar = build_one_cst (TREE_TYPE (type));
2054 return build_complex (type, scalar, scalar);
2055 }
2056 else
2057 return build_one_cst (type);
2058 }
2059
2060 /* Return a constant of arithmetic type TYPE which is the
2061 multiplicative identity of the set TYPE. */
2062
2063 tree
2064 build_one_cst (tree type)
2065 {
2066 switch (TREE_CODE (type))
2067 {
2068 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2069 case POINTER_TYPE: case REFERENCE_TYPE:
2070 case OFFSET_TYPE:
2071 return build_int_cst (type, 1);
2072
2073 case REAL_TYPE:
2074 return build_real (type, dconst1);
2075
2076 case FIXED_POINT_TYPE:
2077 /* We can only generate 1 for accum types. */
2078 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2079 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2080
2081 case VECTOR_TYPE:
2082 {
2083 tree scalar = build_one_cst (TREE_TYPE (type));
2084
2085 return build_vector_from_val (type, scalar);
2086 }
2087
2088 case COMPLEX_TYPE:
2089 return build_complex (type,
2090 build_one_cst (TREE_TYPE (type)),
2091 build_zero_cst (TREE_TYPE (type)));
2092
2093 default:
2094 gcc_unreachable ();
2095 }
2096 }
2097
2098 /* Return an integer of type TYPE containing all 1's in as much precision as
2099 it contains, or a complex or vector whose subparts are such integers. */
2100
2101 tree
2102 build_all_ones_cst (tree type)
2103 {
2104 if (TREE_CODE (type) == COMPLEX_TYPE)
2105 {
2106 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2107 return build_complex (type, scalar, scalar);
2108 }
2109 else
2110 return build_minus_one_cst (type);
2111 }
2112
2113 /* Return a constant of arithmetic type TYPE which is the
2114 opposite of the multiplicative identity of the set TYPE. */
2115
2116 tree
2117 build_minus_one_cst (tree type)
2118 {
2119 switch (TREE_CODE (type))
2120 {
2121 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2122 case POINTER_TYPE: case REFERENCE_TYPE:
2123 case OFFSET_TYPE:
2124 return build_int_cst (type, -1);
2125
2126 case REAL_TYPE:
2127 return build_real (type, dconstm1);
2128
2129 case FIXED_POINT_TYPE:
2130 /* We can only generate 1 for accum types. */
2131 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2132 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2133 TYPE_MODE (type)));
2134
2135 case VECTOR_TYPE:
2136 {
2137 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2138
2139 return build_vector_from_val (type, scalar);
2140 }
2141
2142 case COMPLEX_TYPE:
2143 return build_complex (type,
2144 build_minus_one_cst (TREE_TYPE (type)),
2145 build_zero_cst (TREE_TYPE (type)));
2146
2147 default:
2148 gcc_unreachable ();
2149 }
2150 }
2151
2152 /* Build 0 constant of type TYPE. This is used by constructor folding
2153 and thus the constant should be represented in memory by
2154 zero(es). */
2155
2156 tree
2157 build_zero_cst (tree type)
2158 {
2159 switch (TREE_CODE (type))
2160 {
2161 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2162 case POINTER_TYPE: case REFERENCE_TYPE:
2163 case OFFSET_TYPE: case NULLPTR_TYPE:
2164 return build_int_cst (type, 0);
2165
2166 case REAL_TYPE:
2167 return build_real (type, dconst0);
2168
2169 case FIXED_POINT_TYPE:
2170 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2171
2172 case VECTOR_TYPE:
2173 {
2174 tree scalar = build_zero_cst (TREE_TYPE (type));
2175
2176 return build_vector_from_val (type, scalar);
2177 }
2178
2179 case COMPLEX_TYPE:
2180 {
2181 tree zero = build_zero_cst (TREE_TYPE (type));
2182
2183 return build_complex (type, zero, zero);
2184 }
2185
2186 default:
2187 if (!AGGREGATE_TYPE_P (type))
2188 return fold_convert (type, integer_zero_node);
2189 return build_constructor (type, NULL);
2190 }
2191 }
2192
2193
2194 /* Build a BINFO with LEN language slots. */
2195
2196 tree
2197 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2198 {
2199 tree t;
2200 size_t length = (offsetof (struct tree_binfo, base_binfos)
2201 + vec<tree, va_gc>::embedded_size (base_binfos));
2202
2203 record_node_allocation_statistics (TREE_BINFO, length);
2204
2205 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2206
2207 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2208
2209 TREE_SET_CODE (t, TREE_BINFO);
2210
2211 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2212
2213 return t;
2214 }
2215
2216 /* Create a CASE_LABEL_EXPR tree node and return it. */
2217
2218 tree
2219 build_case_label (tree low_value, tree high_value, tree label_decl)
2220 {
2221 tree t = make_node (CASE_LABEL_EXPR);
2222
2223 TREE_TYPE (t) = void_type_node;
2224 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2225
2226 CASE_LOW (t) = low_value;
2227 CASE_HIGH (t) = high_value;
2228 CASE_LABEL (t) = label_decl;
2229 CASE_CHAIN (t) = NULL_TREE;
2230
2231 return t;
2232 }
2233
2234 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2235 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2236 The latter determines the length of the HOST_WIDE_INT vector. */
2237
2238 tree
2239 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2240 {
2241 tree t;
2242 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2243 + sizeof (struct tree_int_cst));
2244
2245 gcc_assert (len);
2246 record_node_allocation_statistics (INTEGER_CST, length);
2247
2248 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2249
2250 TREE_SET_CODE (t, INTEGER_CST);
2251 TREE_INT_CST_NUNITS (t) = len;
2252 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2253 /* to_offset can only be applied to trees that are offset_int-sized
2254 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2255 must be exactly the precision of offset_int and so LEN is correct. */
2256 if (ext_len <= OFFSET_INT_ELTS)
2257 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2258 else
2259 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2260
2261 TREE_CONSTANT (t) = 1;
2262
2263 return t;
2264 }
2265
2266 /* Build a newly constructed TREE_VEC node of length LEN. */
2267
2268 tree
2269 make_tree_vec_stat (int len MEM_STAT_DECL)
2270 {
2271 tree t;
2272 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2273
2274 record_node_allocation_statistics (TREE_VEC, length);
2275
2276 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2277
2278 TREE_SET_CODE (t, TREE_VEC);
2279 TREE_VEC_LENGTH (t) = len;
2280
2281 return t;
2282 }
2283
2284 /* Grow a TREE_VEC node to new length LEN. */
2285
2286 tree
2287 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2288 {
2289 gcc_assert (TREE_CODE (v) == TREE_VEC);
2290
2291 int oldlen = TREE_VEC_LENGTH (v);
2292 gcc_assert (len > oldlen);
2293
2294 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2295 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2296
2297 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2298
2299 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2300
2301 TREE_VEC_LENGTH (v) = len;
2302
2303 return v;
2304 }
2305 \f
2306 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2307 fixed, and scalar, complex or vector. */
2308
2309 int
2310 zerop (const_tree expr)
2311 {
2312 return (integer_zerop (expr)
2313 || real_zerop (expr)
2314 || fixed_zerop (expr));
2315 }
2316
2317 /* Return 1 if EXPR is the integer constant zero or a complex constant
2318 of zero. */
2319
2320 int
2321 integer_zerop (const_tree expr)
2322 {
2323 switch (TREE_CODE (expr))
2324 {
2325 case INTEGER_CST:
2326 return wi::eq_p (expr, 0);
2327 case COMPLEX_CST:
2328 return (integer_zerop (TREE_REALPART (expr))
2329 && integer_zerop (TREE_IMAGPART (expr)));
2330 case VECTOR_CST:
2331 {
2332 unsigned i;
2333 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2334 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2335 return false;
2336 return true;
2337 }
2338 default:
2339 return false;
2340 }
2341 }
2342
2343 /* Return 1 if EXPR is the integer constant one or the corresponding
2344 complex constant. */
2345
2346 int
2347 integer_onep (const_tree expr)
2348 {
2349 switch (TREE_CODE (expr))
2350 {
2351 case INTEGER_CST:
2352 return wi::eq_p (wi::to_widest (expr), 1);
2353 case COMPLEX_CST:
2354 return (integer_onep (TREE_REALPART (expr))
2355 && integer_zerop (TREE_IMAGPART (expr)));
2356 case VECTOR_CST:
2357 {
2358 unsigned i;
2359 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2360 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2361 return false;
2362 return true;
2363 }
2364 default:
2365 return false;
2366 }
2367 }
2368
2369 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2370 return 1 if every piece is the integer constant one. */
2371
2372 int
2373 integer_each_onep (const_tree expr)
2374 {
2375 if (TREE_CODE (expr) == COMPLEX_CST)
2376 return (integer_onep (TREE_REALPART (expr))
2377 && integer_onep (TREE_IMAGPART (expr)));
2378 else
2379 return integer_onep (expr);
2380 }
2381
2382 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2383 it contains, or a complex or vector whose subparts are such integers. */
2384
2385 int
2386 integer_all_onesp (const_tree expr)
2387 {
2388 if (TREE_CODE (expr) == COMPLEX_CST
2389 && integer_all_onesp (TREE_REALPART (expr))
2390 && integer_all_onesp (TREE_IMAGPART (expr)))
2391 return 1;
2392
2393 else if (TREE_CODE (expr) == VECTOR_CST)
2394 {
2395 unsigned i;
2396 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2397 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2398 return 0;
2399 return 1;
2400 }
2401
2402 else if (TREE_CODE (expr) != INTEGER_CST)
2403 return 0;
2404
2405 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2406 }
2407
2408 /* Return 1 if EXPR is the integer constant minus one. */
2409
2410 int
2411 integer_minus_onep (const_tree expr)
2412 {
2413 if (TREE_CODE (expr) == COMPLEX_CST)
2414 return (integer_all_onesp (TREE_REALPART (expr))
2415 && integer_zerop (TREE_IMAGPART (expr)));
2416 else
2417 return integer_all_onesp (expr);
2418 }
2419
2420 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2421 one bit on). */
2422
2423 int
2424 integer_pow2p (const_tree expr)
2425 {
2426 if (TREE_CODE (expr) == COMPLEX_CST
2427 && integer_pow2p (TREE_REALPART (expr))
2428 && integer_zerop (TREE_IMAGPART (expr)))
2429 return 1;
2430
2431 if (TREE_CODE (expr) != INTEGER_CST)
2432 return 0;
2433
2434 return wi::popcount (expr) == 1;
2435 }
2436
2437 /* Return 1 if EXPR is an integer constant other than zero or a
2438 complex constant other than zero. */
2439
2440 int
2441 integer_nonzerop (const_tree expr)
2442 {
2443 return ((TREE_CODE (expr) == INTEGER_CST
2444 && !wi::eq_p (expr, 0))
2445 || (TREE_CODE (expr) == COMPLEX_CST
2446 && (integer_nonzerop (TREE_REALPART (expr))
2447 || integer_nonzerop (TREE_IMAGPART (expr)))));
2448 }
2449
2450 /* Return 1 if EXPR is the integer constant one. For vector,
2451 return 1 if every piece is the integer constant minus one
2452 (representing the value TRUE). */
2453
2454 int
2455 integer_truep (const_tree expr)
2456 {
2457 if (TREE_CODE (expr) == VECTOR_CST)
2458 return integer_all_onesp (expr);
2459 return integer_onep (expr);
2460 }
2461
2462 /* Return 1 if EXPR is the fixed-point constant zero. */
2463
2464 int
2465 fixed_zerop (const_tree expr)
2466 {
2467 return (TREE_CODE (expr) == FIXED_CST
2468 && TREE_FIXED_CST (expr).data.is_zero ());
2469 }
2470
2471 /* Return the power of two represented by a tree node known to be a
2472 power of two. */
2473
2474 int
2475 tree_log2 (const_tree expr)
2476 {
2477 if (TREE_CODE (expr) == COMPLEX_CST)
2478 return tree_log2 (TREE_REALPART (expr));
2479
2480 return wi::exact_log2 (expr);
2481 }
2482
2483 /* Similar, but return the largest integer Y such that 2 ** Y is less
2484 than or equal to EXPR. */
2485
2486 int
2487 tree_floor_log2 (const_tree expr)
2488 {
2489 if (TREE_CODE (expr) == COMPLEX_CST)
2490 return tree_log2 (TREE_REALPART (expr));
2491
2492 return wi::floor_log2 (expr);
2493 }
2494
2495 /* Return number of known trailing zero bits in EXPR, or, if the value of
2496 EXPR is known to be zero, the precision of it's type. */
2497
2498 unsigned int
2499 tree_ctz (const_tree expr)
2500 {
2501 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2502 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2503 return 0;
2504
2505 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2506 switch (TREE_CODE (expr))
2507 {
2508 case INTEGER_CST:
2509 ret1 = wi::ctz (expr);
2510 return MIN (ret1, prec);
2511 case SSA_NAME:
2512 ret1 = wi::ctz (get_nonzero_bits (expr));
2513 return MIN (ret1, prec);
2514 case PLUS_EXPR:
2515 case MINUS_EXPR:
2516 case BIT_IOR_EXPR:
2517 case BIT_XOR_EXPR:
2518 case MIN_EXPR:
2519 case MAX_EXPR:
2520 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2521 if (ret1 == 0)
2522 return ret1;
2523 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2524 return MIN (ret1, ret2);
2525 case POINTER_PLUS_EXPR:
2526 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2527 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2528 /* Second operand is sizetype, which could be in theory
2529 wider than pointer's precision. Make sure we never
2530 return more than prec. */
2531 ret2 = MIN (ret2, prec);
2532 return MIN (ret1, ret2);
2533 case BIT_AND_EXPR:
2534 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2535 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2536 return MAX (ret1, ret2);
2537 case MULT_EXPR:
2538 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2539 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2540 return MIN (ret1 + ret2, prec);
2541 case LSHIFT_EXPR:
2542 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2543 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2544 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2545 {
2546 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2547 return MIN (ret1 + ret2, prec);
2548 }
2549 return ret1;
2550 case RSHIFT_EXPR:
2551 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2552 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2553 {
2554 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2555 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2556 if (ret1 > ret2)
2557 return ret1 - ret2;
2558 }
2559 return 0;
2560 case TRUNC_DIV_EXPR:
2561 case CEIL_DIV_EXPR:
2562 case FLOOR_DIV_EXPR:
2563 case ROUND_DIV_EXPR:
2564 case EXACT_DIV_EXPR:
2565 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2566 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2567 {
2568 int l = tree_log2 (TREE_OPERAND (expr, 1));
2569 if (l >= 0)
2570 {
2571 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2572 ret2 = l;
2573 if (ret1 > ret2)
2574 return ret1 - ret2;
2575 }
2576 }
2577 return 0;
2578 CASE_CONVERT:
2579 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2580 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2581 ret1 = prec;
2582 return MIN (ret1, prec);
2583 case SAVE_EXPR:
2584 return tree_ctz (TREE_OPERAND (expr, 0));
2585 case COND_EXPR:
2586 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2587 if (ret1 == 0)
2588 return 0;
2589 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2590 return MIN (ret1, ret2);
2591 case COMPOUND_EXPR:
2592 return tree_ctz (TREE_OPERAND (expr, 1));
2593 case ADDR_EXPR:
2594 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2595 if (ret1 > BITS_PER_UNIT)
2596 {
2597 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2598 return MIN (ret1, prec);
2599 }
2600 return 0;
2601 default:
2602 return 0;
2603 }
2604 }
2605
2606 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2607 decimal float constants, so don't return 1 for them. */
2608
2609 int
2610 real_zerop (const_tree expr)
2611 {
2612 switch (TREE_CODE (expr))
2613 {
2614 case REAL_CST:
2615 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2616 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2617 case COMPLEX_CST:
2618 return real_zerop (TREE_REALPART (expr))
2619 && real_zerop (TREE_IMAGPART (expr));
2620 case VECTOR_CST:
2621 {
2622 unsigned i;
2623 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2624 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2625 return false;
2626 return true;
2627 }
2628 default:
2629 return false;
2630 }
2631 }
2632
2633 /* Return 1 if EXPR is the real constant one in real or complex form.
2634 Trailing zeroes matter for decimal float constants, so don't return
2635 1 for them. */
2636
2637 int
2638 real_onep (const_tree expr)
2639 {
2640 switch (TREE_CODE (expr))
2641 {
2642 case REAL_CST:
2643 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2644 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2645 case COMPLEX_CST:
2646 return real_onep (TREE_REALPART (expr))
2647 && real_zerop (TREE_IMAGPART (expr));
2648 case VECTOR_CST:
2649 {
2650 unsigned i;
2651 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2652 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2653 return false;
2654 return true;
2655 }
2656 default:
2657 return false;
2658 }
2659 }
2660
2661 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2662 matter for decimal float constants, so don't return 1 for them. */
2663
2664 int
2665 real_minus_onep (const_tree expr)
2666 {
2667 switch (TREE_CODE (expr))
2668 {
2669 case REAL_CST:
2670 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2671 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2672 case COMPLEX_CST:
2673 return real_minus_onep (TREE_REALPART (expr))
2674 && real_zerop (TREE_IMAGPART (expr));
2675 case VECTOR_CST:
2676 {
2677 unsigned i;
2678 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2679 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2680 return false;
2681 return true;
2682 }
2683 default:
2684 return false;
2685 }
2686 }
2687
2688 /* Nonzero if EXP is a constant or a cast of a constant. */
2689
2690 int
2691 really_constant_p (const_tree exp)
2692 {
2693 /* This is not quite the same as STRIP_NOPS. It does more. */
2694 while (CONVERT_EXPR_P (exp)
2695 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2696 exp = TREE_OPERAND (exp, 0);
2697 return TREE_CONSTANT (exp);
2698 }
2699 \f
2700 /* Return first list element whose TREE_VALUE is ELEM.
2701 Return 0 if ELEM is not in LIST. */
2702
2703 tree
2704 value_member (tree elem, tree list)
2705 {
2706 while (list)
2707 {
2708 if (elem == TREE_VALUE (list))
2709 return list;
2710 list = TREE_CHAIN (list);
2711 }
2712 return NULL_TREE;
2713 }
2714
2715 /* Return first list element whose TREE_PURPOSE is ELEM.
2716 Return 0 if ELEM is not in LIST. */
2717
2718 tree
2719 purpose_member (const_tree elem, tree list)
2720 {
2721 while (list)
2722 {
2723 if (elem == TREE_PURPOSE (list))
2724 return list;
2725 list = TREE_CHAIN (list);
2726 }
2727 return NULL_TREE;
2728 }
2729
2730 /* Return true if ELEM is in V. */
2731
2732 bool
2733 vec_member (const_tree elem, vec<tree, va_gc> *v)
2734 {
2735 unsigned ix;
2736 tree t;
2737 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2738 if (elem == t)
2739 return true;
2740 return false;
2741 }
2742
2743 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2744 NULL_TREE. */
2745
2746 tree
2747 chain_index (int idx, tree chain)
2748 {
2749 for (; chain && idx > 0; --idx)
2750 chain = TREE_CHAIN (chain);
2751 return chain;
2752 }
2753
2754 /* Return nonzero if ELEM is part of the chain CHAIN. */
2755
2756 int
2757 chain_member (const_tree elem, const_tree chain)
2758 {
2759 while (chain)
2760 {
2761 if (elem == chain)
2762 return 1;
2763 chain = DECL_CHAIN (chain);
2764 }
2765
2766 return 0;
2767 }
2768
2769 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2770 We expect a null pointer to mark the end of the chain.
2771 This is the Lisp primitive `length'. */
2772
2773 int
2774 list_length (const_tree t)
2775 {
2776 const_tree p = t;
2777 #ifdef ENABLE_TREE_CHECKING
2778 const_tree q = t;
2779 #endif
2780 int len = 0;
2781
2782 while (p)
2783 {
2784 p = TREE_CHAIN (p);
2785 #ifdef ENABLE_TREE_CHECKING
2786 if (len % 2)
2787 q = TREE_CHAIN (q);
2788 gcc_assert (p != q);
2789 #endif
2790 len++;
2791 }
2792
2793 return len;
2794 }
2795
2796 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2797 UNION_TYPE TYPE, or NULL_TREE if none. */
2798
2799 tree
2800 first_field (const_tree type)
2801 {
2802 tree t = TYPE_FIELDS (type);
2803 while (t && TREE_CODE (t) != FIELD_DECL)
2804 t = TREE_CHAIN (t);
2805 return t;
2806 }
2807
2808 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2809 by modifying the last node in chain 1 to point to chain 2.
2810 This is the Lisp primitive `nconc'. */
2811
2812 tree
2813 chainon (tree op1, tree op2)
2814 {
2815 tree t1;
2816
2817 if (!op1)
2818 return op2;
2819 if (!op2)
2820 return op1;
2821
2822 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2823 continue;
2824 TREE_CHAIN (t1) = op2;
2825
2826 #ifdef ENABLE_TREE_CHECKING
2827 {
2828 tree t2;
2829 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2830 gcc_assert (t2 != t1);
2831 }
2832 #endif
2833
2834 return op1;
2835 }
2836
2837 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2838
2839 tree
2840 tree_last (tree chain)
2841 {
2842 tree next;
2843 if (chain)
2844 while ((next = TREE_CHAIN (chain)))
2845 chain = next;
2846 return chain;
2847 }
2848
2849 /* Reverse the order of elements in the chain T,
2850 and return the new head of the chain (old last element). */
2851
2852 tree
2853 nreverse (tree t)
2854 {
2855 tree prev = 0, decl, next;
2856 for (decl = t; decl; decl = next)
2857 {
2858 /* We shouldn't be using this function to reverse BLOCK chains; we
2859 have blocks_nreverse for that. */
2860 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2861 next = TREE_CHAIN (decl);
2862 TREE_CHAIN (decl) = prev;
2863 prev = decl;
2864 }
2865 return prev;
2866 }
2867 \f
2868 /* Return a newly created TREE_LIST node whose
2869 purpose and value fields are PARM and VALUE. */
2870
2871 tree
2872 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2873 {
2874 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2875 TREE_PURPOSE (t) = parm;
2876 TREE_VALUE (t) = value;
2877 return t;
2878 }
2879
2880 /* Build a chain of TREE_LIST nodes from a vector. */
2881
2882 tree
2883 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2884 {
2885 tree ret = NULL_TREE;
2886 tree *pp = &ret;
2887 unsigned int i;
2888 tree t;
2889 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2890 {
2891 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2892 pp = &TREE_CHAIN (*pp);
2893 }
2894 return ret;
2895 }
2896
2897 /* Return a newly created TREE_LIST node whose
2898 purpose and value fields are PURPOSE and VALUE
2899 and whose TREE_CHAIN is CHAIN. */
2900
2901 tree
2902 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2903 {
2904 tree node;
2905
2906 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2907 memset (node, 0, sizeof (struct tree_common));
2908
2909 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2910
2911 TREE_SET_CODE (node, TREE_LIST);
2912 TREE_CHAIN (node) = chain;
2913 TREE_PURPOSE (node) = purpose;
2914 TREE_VALUE (node) = value;
2915 return node;
2916 }
2917
2918 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2919 trees. */
2920
2921 vec<tree, va_gc> *
2922 ctor_to_vec (tree ctor)
2923 {
2924 vec<tree, va_gc> *vec;
2925 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2926 unsigned int ix;
2927 tree val;
2928
2929 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2930 vec->quick_push (val);
2931
2932 return vec;
2933 }
2934 \f
2935 /* Return the size nominally occupied by an object of type TYPE
2936 when it resides in memory. The value is measured in units of bytes,
2937 and its data type is that normally used for type sizes
2938 (which is the first type created by make_signed_type or
2939 make_unsigned_type). */
2940
2941 tree
2942 size_in_bytes_loc (location_t loc, const_tree type)
2943 {
2944 tree t;
2945
2946 if (type == error_mark_node)
2947 return integer_zero_node;
2948
2949 type = TYPE_MAIN_VARIANT (type);
2950 t = TYPE_SIZE_UNIT (type);
2951
2952 if (t == 0)
2953 {
2954 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
2955 return size_zero_node;
2956 }
2957
2958 return t;
2959 }
2960
2961 /* Return the size of TYPE (in bytes) as a wide integer
2962 or return -1 if the size can vary or is larger than an integer. */
2963
2964 HOST_WIDE_INT
2965 int_size_in_bytes (const_tree type)
2966 {
2967 tree t;
2968
2969 if (type == error_mark_node)
2970 return 0;
2971
2972 type = TYPE_MAIN_VARIANT (type);
2973 t = TYPE_SIZE_UNIT (type);
2974
2975 if (t && tree_fits_uhwi_p (t))
2976 return TREE_INT_CST_LOW (t);
2977 else
2978 return -1;
2979 }
2980
2981 /* Return the maximum size of TYPE (in bytes) as a wide integer
2982 or return -1 if the size can vary or is larger than an integer. */
2983
2984 HOST_WIDE_INT
2985 max_int_size_in_bytes (const_tree type)
2986 {
2987 HOST_WIDE_INT size = -1;
2988 tree size_tree;
2989
2990 /* If this is an array type, check for a possible MAX_SIZE attached. */
2991
2992 if (TREE_CODE (type) == ARRAY_TYPE)
2993 {
2994 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2995
2996 if (size_tree && tree_fits_uhwi_p (size_tree))
2997 size = tree_to_uhwi (size_tree);
2998 }
2999
3000 /* If we still haven't been able to get a size, see if the language
3001 can compute a maximum size. */
3002
3003 if (size == -1)
3004 {
3005 size_tree = lang_hooks.types.max_size (type);
3006
3007 if (size_tree && tree_fits_uhwi_p (size_tree))
3008 size = tree_to_uhwi (size_tree);
3009 }
3010
3011 return size;
3012 }
3013 \f
3014 /* Return the bit position of FIELD, in bits from the start of the record.
3015 This is a tree of type bitsizetype. */
3016
3017 tree
3018 bit_position (const_tree field)
3019 {
3020 return bit_from_pos (DECL_FIELD_OFFSET (field),
3021 DECL_FIELD_BIT_OFFSET (field));
3022 }
3023 \f
3024 /* Return the byte position of FIELD, in bytes from the start of the record.
3025 This is a tree of type sizetype. */
3026
3027 tree
3028 byte_position (const_tree field)
3029 {
3030 return byte_from_pos (DECL_FIELD_OFFSET (field),
3031 DECL_FIELD_BIT_OFFSET (field));
3032 }
3033
3034 /* Likewise, but return as an integer. It must be representable in
3035 that way (since it could be a signed value, we don't have the
3036 option of returning -1 like int_size_in_byte can. */
3037
3038 HOST_WIDE_INT
3039 int_byte_position (const_tree field)
3040 {
3041 return tree_to_shwi (byte_position (field));
3042 }
3043 \f
3044 /* Return the strictest alignment, in bits, that T is known to have. */
3045
3046 unsigned int
3047 expr_align (const_tree t)
3048 {
3049 unsigned int align0, align1;
3050
3051 switch (TREE_CODE (t))
3052 {
3053 CASE_CONVERT: case NON_LVALUE_EXPR:
3054 /* If we have conversions, we know that the alignment of the
3055 object must meet each of the alignments of the types. */
3056 align0 = expr_align (TREE_OPERAND (t, 0));
3057 align1 = TYPE_ALIGN (TREE_TYPE (t));
3058 return MAX (align0, align1);
3059
3060 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3061 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3062 case CLEANUP_POINT_EXPR:
3063 /* These don't change the alignment of an object. */
3064 return expr_align (TREE_OPERAND (t, 0));
3065
3066 case COND_EXPR:
3067 /* The best we can do is say that the alignment is the least aligned
3068 of the two arms. */
3069 align0 = expr_align (TREE_OPERAND (t, 1));
3070 align1 = expr_align (TREE_OPERAND (t, 2));
3071 return MIN (align0, align1);
3072
3073 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3074 meaningfully, it's always 1. */
3075 case LABEL_DECL: case CONST_DECL:
3076 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3077 case FUNCTION_DECL:
3078 gcc_assert (DECL_ALIGN (t) != 0);
3079 return DECL_ALIGN (t);
3080
3081 default:
3082 break;
3083 }
3084
3085 /* Otherwise take the alignment from that of the type. */
3086 return TYPE_ALIGN (TREE_TYPE (t));
3087 }
3088 \f
3089 /* Return, as a tree node, the number of elements for TYPE (which is an
3090 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3091
3092 tree
3093 array_type_nelts (const_tree type)
3094 {
3095 tree index_type, min, max;
3096
3097 /* If they did it with unspecified bounds, then we should have already
3098 given an error about it before we got here. */
3099 if (! TYPE_DOMAIN (type))
3100 return error_mark_node;
3101
3102 index_type = TYPE_DOMAIN (type);
3103 min = TYPE_MIN_VALUE (index_type);
3104 max = TYPE_MAX_VALUE (index_type);
3105
3106 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3107 if (!max)
3108 return error_mark_node;
3109
3110 return (integer_zerop (min)
3111 ? max
3112 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3113 }
3114 \f
3115 /* If arg is static -- a reference to an object in static storage -- then
3116 return the object. This is not the same as the C meaning of `static'.
3117 If arg isn't static, return NULL. */
3118
3119 tree
3120 staticp (tree arg)
3121 {
3122 switch (TREE_CODE (arg))
3123 {
3124 case FUNCTION_DECL:
3125 /* Nested functions are static, even though taking their address will
3126 involve a trampoline as we unnest the nested function and create
3127 the trampoline on the tree level. */
3128 return arg;
3129
3130 case VAR_DECL:
3131 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3132 && ! DECL_THREAD_LOCAL_P (arg)
3133 && ! DECL_DLLIMPORT_P (arg)
3134 ? arg : NULL);
3135
3136 case CONST_DECL:
3137 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3138 ? arg : NULL);
3139
3140 case CONSTRUCTOR:
3141 return TREE_STATIC (arg) ? arg : NULL;
3142
3143 case LABEL_DECL:
3144 case STRING_CST:
3145 return arg;
3146
3147 case COMPONENT_REF:
3148 /* If the thing being referenced is not a field, then it is
3149 something language specific. */
3150 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3151
3152 /* If we are referencing a bitfield, we can't evaluate an
3153 ADDR_EXPR at compile time and so it isn't a constant. */
3154 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3155 return NULL;
3156
3157 return staticp (TREE_OPERAND (arg, 0));
3158
3159 case BIT_FIELD_REF:
3160 return NULL;
3161
3162 case INDIRECT_REF:
3163 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3164
3165 case ARRAY_REF:
3166 case ARRAY_RANGE_REF:
3167 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3168 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3169 return staticp (TREE_OPERAND (arg, 0));
3170 else
3171 return NULL;
3172
3173 case COMPOUND_LITERAL_EXPR:
3174 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3175
3176 default:
3177 return NULL;
3178 }
3179 }
3180
3181 \f
3182
3183
3184 /* Return whether OP is a DECL whose address is function-invariant. */
3185
3186 bool
3187 decl_address_invariant_p (const_tree op)
3188 {
3189 /* The conditions below are slightly less strict than the one in
3190 staticp. */
3191
3192 switch (TREE_CODE (op))
3193 {
3194 case PARM_DECL:
3195 case RESULT_DECL:
3196 case LABEL_DECL:
3197 case FUNCTION_DECL:
3198 return true;
3199
3200 case VAR_DECL:
3201 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3202 || DECL_THREAD_LOCAL_P (op)
3203 || DECL_CONTEXT (op) == current_function_decl
3204 || decl_function_context (op) == current_function_decl)
3205 return true;
3206 break;
3207
3208 case CONST_DECL:
3209 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3210 || decl_function_context (op) == current_function_decl)
3211 return true;
3212 break;
3213
3214 default:
3215 break;
3216 }
3217
3218 return false;
3219 }
3220
3221 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3222
3223 bool
3224 decl_address_ip_invariant_p (const_tree op)
3225 {
3226 /* The conditions below are slightly less strict than the one in
3227 staticp. */
3228
3229 switch (TREE_CODE (op))
3230 {
3231 case LABEL_DECL:
3232 case FUNCTION_DECL:
3233 case STRING_CST:
3234 return true;
3235
3236 case VAR_DECL:
3237 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3238 && !DECL_DLLIMPORT_P (op))
3239 || DECL_THREAD_LOCAL_P (op))
3240 return true;
3241 break;
3242
3243 case CONST_DECL:
3244 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3245 return true;
3246 break;
3247
3248 default:
3249 break;
3250 }
3251
3252 return false;
3253 }
3254
3255
3256 /* Return true if T is function-invariant (internal function, does
3257 not handle arithmetic; that's handled in skip_simple_arithmetic and
3258 tree_invariant_p). */
3259
3260 static bool
3261 tree_invariant_p_1 (tree t)
3262 {
3263 tree op;
3264
3265 if (TREE_CONSTANT (t)
3266 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3267 return true;
3268
3269 switch (TREE_CODE (t))
3270 {
3271 case SAVE_EXPR:
3272 return true;
3273
3274 case ADDR_EXPR:
3275 op = TREE_OPERAND (t, 0);
3276 while (handled_component_p (op))
3277 {
3278 switch (TREE_CODE (op))
3279 {
3280 case ARRAY_REF:
3281 case ARRAY_RANGE_REF:
3282 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3283 || TREE_OPERAND (op, 2) != NULL_TREE
3284 || TREE_OPERAND (op, 3) != NULL_TREE)
3285 return false;
3286 break;
3287
3288 case COMPONENT_REF:
3289 if (TREE_OPERAND (op, 2) != NULL_TREE)
3290 return false;
3291 break;
3292
3293 default:;
3294 }
3295 op = TREE_OPERAND (op, 0);
3296 }
3297
3298 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3299
3300 default:
3301 break;
3302 }
3303
3304 return false;
3305 }
3306
3307 /* Return true if T is function-invariant. */
3308
3309 bool
3310 tree_invariant_p (tree t)
3311 {
3312 tree inner = skip_simple_arithmetic (t);
3313 return tree_invariant_p_1 (inner);
3314 }
3315
3316 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3317 Do this to any expression which may be used in more than one place,
3318 but must be evaluated only once.
3319
3320 Normally, expand_expr would reevaluate the expression each time.
3321 Calling save_expr produces something that is evaluated and recorded
3322 the first time expand_expr is called on it. Subsequent calls to
3323 expand_expr just reuse the recorded value.
3324
3325 The call to expand_expr that generates code that actually computes
3326 the value is the first call *at compile time*. Subsequent calls
3327 *at compile time* generate code to use the saved value.
3328 This produces correct result provided that *at run time* control
3329 always flows through the insns made by the first expand_expr
3330 before reaching the other places where the save_expr was evaluated.
3331 You, the caller of save_expr, must make sure this is so.
3332
3333 Constants, and certain read-only nodes, are returned with no
3334 SAVE_EXPR because that is safe. Expressions containing placeholders
3335 are not touched; see tree.def for an explanation of what these
3336 are used for. */
3337
3338 tree
3339 save_expr (tree expr)
3340 {
3341 tree t = fold (expr);
3342 tree inner;
3343
3344 /* If the tree evaluates to a constant, then we don't want to hide that
3345 fact (i.e. this allows further folding, and direct checks for constants).
3346 However, a read-only object that has side effects cannot be bypassed.
3347 Since it is no problem to reevaluate literals, we just return the
3348 literal node. */
3349 inner = skip_simple_arithmetic (t);
3350 if (TREE_CODE (inner) == ERROR_MARK)
3351 return inner;
3352
3353 if (tree_invariant_p_1 (inner))
3354 return t;
3355
3356 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3357 it means that the size or offset of some field of an object depends on
3358 the value within another field.
3359
3360 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3361 and some variable since it would then need to be both evaluated once and
3362 evaluated more than once. Front-ends must assure this case cannot
3363 happen by surrounding any such subexpressions in their own SAVE_EXPR
3364 and forcing evaluation at the proper time. */
3365 if (contains_placeholder_p (inner))
3366 return t;
3367
3368 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3369 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3370
3371 /* This expression might be placed ahead of a jump to ensure that the
3372 value was computed on both sides of the jump. So make sure it isn't
3373 eliminated as dead. */
3374 TREE_SIDE_EFFECTS (t) = 1;
3375 return t;
3376 }
3377
3378 /* Look inside EXPR into any simple arithmetic operations. Return the
3379 outermost non-arithmetic or non-invariant node. */
3380
3381 tree
3382 skip_simple_arithmetic (tree expr)
3383 {
3384 /* We don't care about whether this can be used as an lvalue in this
3385 context. */
3386 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3387 expr = TREE_OPERAND (expr, 0);
3388
3389 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3390 a constant, it will be more efficient to not make another SAVE_EXPR since
3391 it will allow better simplification and GCSE will be able to merge the
3392 computations if they actually occur. */
3393 while (true)
3394 {
3395 if (UNARY_CLASS_P (expr))
3396 expr = TREE_OPERAND (expr, 0);
3397 else if (BINARY_CLASS_P (expr))
3398 {
3399 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3400 expr = TREE_OPERAND (expr, 0);
3401 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3402 expr = TREE_OPERAND (expr, 1);
3403 else
3404 break;
3405 }
3406 else
3407 break;
3408 }
3409
3410 return expr;
3411 }
3412
3413 /* Look inside EXPR into simple arithmetic operations involving constants.
3414 Return the outermost non-arithmetic or non-constant node. */
3415
3416 tree
3417 skip_simple_constant_arithmetic (tree expr)
3418 {
3419 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3420 expr = TREE_OPERAND (expr, 0);
3421
3422 while (true)
3423 {
3424 if (UNARY_CLASS_P (expr))
3425 expr = TREE_OPERAND (expr, 0);
3426 else if (BINARY_CLASS_P (expr))
3427 {
3428 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3429 expr = TREE_OPERAND (expr, 0);
3430 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3431 expr = TREE_OPERAND (expr, 1);
3432 else
3433 break;
3434 }
3435 else
3436 break;
3437 }
3438
3439 return expr;
3440 }
3441
3442 /* Return which tree structure is used by T. */
3443
3444 enum tree_node_structure_enum
3445 tree_node_structure (const_tree t)
3446 {
3447 const enum tree_code code = TREE_CODE (t);
3448 return tree_node_structure_for_code (code);
3449 }
3450
3451 /* Set various status flags when building a CALL_EXPR object T. */
3452
3453 static void
3454 process_call_operands (tree t)
3455 {
3456 bool side_effects = TREE_SIDE_EFFECTS (t);
3457 bool read_only = false;
3458 int i = call_expr_flags (t);
3459
3460 /* Calls have side-effects, except those to const or pure functions. */
3461 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3462 side_effects = true;
3463 /* Propagate TREE_READONLY of arguments for const functions. */
3464 if (i & ECF_CONST)
3465 read_only = true;
3466
3467 if (!side_effects || read_only)
3468 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3469 {
3470 tree op = TREE_OPERAND (t, i);
3471 if (op && TREE_SIDE_EFFECTS (op))
3472 side_effects = true;
3473 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3474 read_only = false;
3475 }
3476
3477 TREE_SIDE_EFFECTS (t) = side_effects;
3478 TREE_READONLY (t) = read_only;
3479 }
3480 \f
3481 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3482 size or offset that depends on a field within a record. */
3483
3484 bool
3485 contains_placeholder_p (const_tree exp)
3486 {
3487 enum tree_code code;
3488
3489 if (!exp)
3490 return 0;
3491
3492 code = TREE_CODE (exp);
3493 if (code == PLACEHOLDER_EXPR)
3494 return 1;
3495
3496 switch (TREE_CODE_CLASS (code))
3497 {
3498 case tcc_reference:
3499 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3500 position computations since they will be converted into a
3501 WITH_RECORD_EXPR involving the reference, which will assume
3502 here will be valid. */
3503 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3504
3505 case tcc_exceptional:
3506 if (code == TREE_LIST)
3507 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3508 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3509 break;
3510
3511 case tcc_unary:
3512 case tcc_binary:
3513 case tcc_comparison:
3514 case tcc_expression:
3515 switch (code)
3516 {
3517 case COMPOUND_EXPR:
3518 /* Ignoring the first operand isn't quite right, but works best. */
3519 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3520
3521 case COND_EXPR:
3522 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3523 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3524 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3525
3526 case SAVE_EXPR:
3527 /* The save_expr function never wraps anything containing
3528 a PLACEHOLDER_EXPR. */
3529 return 0;
3530
3531 default:
3532 break;
3533 }
3534
3535 switch (TREE_CODE_LENGTH (code))
3536 {
3537 case 1:
3538 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3539 case 2:
3540 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3541 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3542 default:
3543 return 0;
3544 }
3545
3546 case tcc_vl_exp:
3547 switch (code)
3548 {
3549 case CALL_EXPR:
3550 {
3551 const_tree arg;
3552 const_call_expr_arg_iterator iter;
3553 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3554 if (CONTAINS_PLACEHOLDER_P (arg))
3555 return 1;
3556 return 0;
3557 }
3558 default:
3559 return 0;
3560 }
3561
3562 default:
3563 return 0;
3564 }
3565 return 0;
3566 }
3567
3568 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3569 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3570 field positions. */
3571
3572 static bool
3573 type_contains_placeholder_1 (const_tree type)
3574 {
3575 /* If the size contains a placeholder or the parent type (component type in
3576 the case of arrays) type involves a placeholder, this type does. */
3577 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3578 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3579 || (!POINTER_TYPE_P (type)
3580 && TREE_TYPE (type)
3581 && type_contains_placeholder_p (TREE_TYPE (type))))
3582 return true;
3583
3584 /* Now do type-specific checks. Note that the last part of the check above
3585 greatly limits what we have to do below. */
3586 switch (TREE_CODE (type))
3587 {
3588 case VOID_TYPE:
3589 case POINTER_BOUNDS_TYPE:
3590 case COMPLEX_TYPE:
3591 case ENUMERAL_TYPE:
3592 case BOOLEAN_TYPE:
3593 case POINTER_TYPE:
3594 case OFFSET_TYPE:
3595 case REFERENCE_TYPE:
3596 case METHOD_TYPE:
3597 case FUNCTION_TYPE:
3598 case VECTOR_TYPE:
3599 case NULLPTR_TYPE:
3600 return false;
3601
3602 case INTEGER_TYPE:
3603 case REAL_TYPE:
3604 case FIXED_POINT_TYPE:
3605 /* Here we just check the bounds. */
3606 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3607 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3608
3609 case ARRAY_TYPE:
3610 /* We have already checked the component type above, so just check
3611 the domain type. Flexible array members have a null domain. */
3612 return TYPE_DOMAIN (type) ?
3613 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3614
3615 case RECORD_TYPE:
3616 case UNION_TYPE:
3617 case QUAL_UNION_TYPE:
3618 {
3619 tree field;
3620
3621 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3622 if (TREE_CODE (field) == FIELD_DECL
3623 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3624 || (TREE_CODE (type) == QUAL_UNION_TYPE
3625 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3626 || type_contains_placeholder_p (TREE_TYPE (field))))
3627 return true;
3628
3629 return false;
3630 }
3631
3632 default:
3633 gcc_unreachable ();
3634 }
3635 }
3636
3637 /* Wrapper around above function used to cache its result. */
3638
3639 bool
3640 type_contains_placeholder_p (tree type)
3641 {
3642 bool result;
3643
3644 /* If the contains_placeholder_bits field has been initialized,
3645 then we know the answer. */
3646 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3647 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3648
3649 /* Indicate that we've seen this type node, and the answer is false.
3650 This is what we want to return if we run into recursion via fields. */
3651 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3652
3653 /* Compute the real value. */
3654 result = type_contains_placeholder_1 (type);
3655
3656 /* Store the real value. */
3657 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3658
3659 return result;
3660 }
3661 \f
3662 /* Push tree EXP onto vector QUEUE if it is not already present. */
3663
3664 static void
3665 push_without_duplicates (tree exp, vec<tree> *queue)
3666 {
3667 unsigned int i;
3668 tree iter;
3669
3670 FOR_EACH_VEC_ELT (*queue, i, iter)
3671 if (simple_cst_equal (iter, exp) == 1)
3672 break;
3673
3674 if (!iter)
3675 queue->safe_push (exp);
3676 }
3677
3678 /* Given a tree EXP, find all occurrences of references to fields
3679 in a PLACEHOLDER_EXPR and place them in vector REFS without
3680 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3681 we assume here that EXP contains only arithmetic expressions
3682 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3683 argument list. */
3684
3685 void
3686 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3687 {
3688 enum tree_code code = TREE_CODE (exp);
3689 tree inner;
3690 int i;
3691
3692 /* We handle TREE_LIST and COMPONENT_REF separately. */
3693 if (code == TREE_LIST)
3694 {
3695 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3696 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3697 }
3698 else if (code == COMPONENT_REF)
3699 {
3700 for (inner = TREE_OPERAND (exp, 0);
3701 REFERENCE_CLASS_P (inner);
3702 inner = TREE_OPERAND (inner, 0))
3703 ;
3704
3705 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3706 push_without_duplicates (exp, refs);
3707 else
3708 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3709 }
3710 else
3711 switch (TREE_CODE_CLASS (code))
3712 {
3713 case tcc_constant:
3714 break;
3715
3716 case tcc_declaration:
3717 /* Variables allocated to static storage can stay. */
3718 if (!TREE_STATIC (exp))
3719 push_without_duplicates (exp, refs);
3720 break;
3721
3722 case tcc_expression:
3723 /* This is the pattern built in ada/make_aligning_type. */
3724 if (code == ADDR_EXPR
3725 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3726 {
3727 push_without_duplicates (exp, refs);
3728 break;
3729 }
3730
3731 /* Fall through... */
3732
3733 case tcc_exceptional:
3734 case tcc_unary:
3735 case tcc_binary:
3736 case tcc_comparison:
3737 case tcc_reference:
3738 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3739 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3740 break;
3741
3742 case tcc_vl_exp:
3743 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3744 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3745 break;
3746
3747 default:
3748 gcc_unreachable ();
3749 }
3750 }
3751
3752 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3753 return a tree with all occurrences of references to F in a
3754 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3755 CONST_DECLs. Note that we assume here that EXP contains only
3756 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3757 occurring only in their argument list. */
3758
3759 tree
3760 substitute_in_expr (tree exp, tree f, tree r)
3761 {
3762 enum tree_code code = TREE_CODE (exp);
3763 tree op0, op1, op2, op3;
3764 tree new_tree;
3765
3766 /* We handle TREE_LIST and COMPONENT_REF separately. */
3767 if (code == TREE_LIST)
3768 {
3769 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3770 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3771 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3772 return exp;
3773
3774 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3775 }
3776 else if (code == COMPONENT_REF)
3777 {
3778 tree inner;
3779
3780 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3781 and it is the right field, replace it with R. */
3782 for (inner = TREE_OPERAND (exp, 0);
3783 REFERENCE_CLASS_P (inner);
3784 inner = TREE_OPERAND (inner, 0))
3785 ;
3786
3787 /* The field. */
3788 op1 = TREE_OPERAND (exp, 1);
3789
3790 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3791 return r;
3792
3793 /* If this expression hasn't been completed let, leave it alone. */
3794 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3795 return exp;
3796
3797 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3798 if (op0 == TREE_OPERAND (exp, 0))
3799 return exp;
3800
3801 new_tree
3802 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3803 }
3804 else
3805 switch (TREE_CODE_CLASS (code))
3806 {
3807 case tcc_constant:
3808 return exp;
3809
3810 case tcc_declaration:
3811 if (exp == f)
3812 return r;
3813 else
3814 return exp;
3815
3816 case tcc_expression:
3817 if (exp == f)
3818 return r;
3819
3820 /* Fall through... */
3821
3822 case tcc_exceptional:
3823 case tcc_unary:
3824 case tcc_binary:
3825 case tcc_comparison:
3826 case tcc_reference:
3827 switch (TREE_CODE_LENGTH (code))
3828 {
3829 case 0:
3830 return exp;
3831
3832 case 1:
3833 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3834 if (op0 == TREE_OPERAND (exp, 0))
3835 return exp;
3836
3837 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3838 break;
3839
3840 case 2:
3841 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3842 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3843
3844 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3845 return exp;
3846
3847 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3848 break;
3849
3850 case 3:
3851 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3852 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3853 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3854
3855 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3856 && op2 == TREE_OPERAND (exp, 2))
3857 return exp;
3858
3859 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3860 break;
3861
3862 case 4:
3863 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3864 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3865 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3866 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3867
3868 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3869 && op2 == TREE_OPERAND (exp, 2)
3870 && op3 == TREE_OPERAND (exp, 3))
3871 return exp;
3872
3873 new_tree
3874 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3875 break;
3876
3877 default:
3878 gcc_unreachable ();
3879 }
3880 break;
3881
3882 case tcc_vl_exp:
3883 {
3884 int i;
3885
3886 new_tree = NULL_TREE;
3887
3888 /* If we are trying to replace F with a constant, inline back
3889 functions which do nothing else than computing a value from
3890 the arguments they are passed. This makes it possible to
3891 fold partially or entirely the replacement expression. */
3892 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3893 {
3894 tree t = maybe_inline_call_in_expr (exp);
3895 if (t)
3896 return SUBSTITUTE_IN_EXPR (t, f, r);
3897 }
3898
3899 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3900 {
3901 tree op = TREE_OPERAND (exp, i);
3902 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3903 if (new_op != op)
3904 {
3905 if (!new_tree)
3906 new_tree = copy_node (exp);
3907 TREE_OPERAND (new_tree, i) = new_op;
3908 }
3909 }
3910
3911 if (new_tree)
3912 {
3913 new_tree = fold (new_tree);
3914 if (TREE_CODE (new_tree) == CALL_EXPR)
3915 process_call_operands (new_tree);
3916 }
3917 else
3918 return exp;
3919 }
3920 break;
3921
3922 default:
3923 gcc_unreachable ();
3924 }
3925
3926 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3927
3928 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3929 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3930
3931 return new_tree;
3932 }
3933
3934 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3935 for it within OBJ, a tree that is an object or a chain of references. */
3936
3937 tree
3938 substitute_placeholder_in_expr (tree exp, tree obj)
3939 {
3940 enum tree_code code = TREE_CODE (exp);
3941 tree op0, op1, op2, op3;
3942 tree new_tree;
3943
3944 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3945 in the chain of OBJ. */
3946 if (code == PLACEHOLDER_EXPR)
3947 {
3948 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3949 tree elt;
3950
3951 for (elt = obj; elt != 0;
3952 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3953 || TREE_CODE (elt) == COND_EXPR)
3954 ? TREE_OPERAND (elt, 1)
3955 : (REFERENCE_CLASS_P (elt)
3956 || UNARY_CLASS_P (elt)
3957 || BINARY_CLASS_P (elt)
3958 || VL_EXP_CLASS_P (elt)
3959 || EXPRESSION_CLASS_P (elt))
3960 ? TREE_OPERAND (elt, 0) : 0))
3961 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3962 return elt;
3963
3964 for (elt = obj; elt != 0;
3965 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3966 || TREE_CODE (elt) == COND_EXPR)
3967 ? TREE_OPERAND (elt, 1)
3968 : (REFERENCE_CLASS_P (elt)
3969 || UNARY_CLASS_P (elt)
3970 || BINARY_CLASS_P (elt)
3971 || VL_EXP_CLASS_P (elt)
3972 || EXPRESSION_CLASS_P (elt))
3973 ? TREE_OPERAND (elt, 0) : 0))
3974 if (POINTER_TYPE_P (TREE_TYPE (elt))
3975 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3976 == need_type))
3977 return fold_build1 (INDIRECT_REF, need_type, elt);
3978
3979 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3980 survives until RTL generation, there will be an error. */
3981 return exp;
3982 }
3983
3984 /* TREE_LIST is special because we need to look at TREE_VALUE
3985 and TREE_CHAIN, not TREE_OPERANDS. */
3986 else if (code == TREE_LIST)
3987 {
3988 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3989 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3990 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3991 return exp;
3992
3993 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3994 }
3995 else
3996 switch (TREE_CODE_CLASS (code))
3997 {
3998 case tcc_constant:
3999 case tcc_declaration:
4000 return exp;
4001
4002 case tcc_exceptional:
4003 case tcc_unary:
4004 case tcc_binary:
4005 case tcc_comparison:
4006 case tcc_expression:
4007 case tcc_reference:
4008 case tcc_statement:
4009 switch (TREE_CODE_LENGTH (code))
4010 {
4011 case 0:
4012 return exp;
4013
4014 case 1:
4015 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4016 if (op0 == TREE_OPERAND (exp, 0))
4017 return exp;
4018
4019 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4020 break;
4021
4022 case 2:
4023 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4024 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4025
4026 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4027 return exp;
4028
4029 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4030 break;
4031
4032 case 3:
4033 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4034 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4035 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4036
4037 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4038 && op2 == TREE_OPERAND (exp, 2))
4039 return exp;
4040
4041 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4042 break;
4043
4044 case 4:
4045 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4046 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4047 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4048 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4049
4050 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4051 && op2 == TREE_OPERAND (exp, 2)
4052 && op3 == TREE_OPERAND (exp, 3))
4053 return exp;
4054
4055 new_tree
4056 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4057 break;
4058
4059 default:
4060 gcc_unreachable ();
4061 }
4062 break;
4063
4064 case tcc_vl_exp:
4065 {
4066 int i;
4067
4068 new_tree = NULL_TREE;
4069
4070 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4071 {
4072 tree op = TREE_OPERAND (exp, i);
4073 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4074 if (new_op != op)
4075 {
4076 if (!new_tree)
4077 new_tree = copy_node (exp);
4078 TREE_OPERAND (new_tree, i) = new_op;
4079 }
4080 }
4081
4082 if (new_tree)
4083 {
4084 new_tree = fold (new_tree);
4085 if (TREE_CODE (new_tree) == CALL_EXPR)
4086 process_call_operands (new_tree);
4087 }
4088 else
4089 return exp;
4090 }
4091 break;
4092
4093 default:
4094 gcc_unreachable ();
4095 }
4096
4097 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4098
4099 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4100 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4101
4102 return new_tree;
4103 }
4104 \f
4105
4106 /* Subroutine of stabilize_reference; this is called for subtrees of
4107 references. Any expression with side-effects must be put in a SAVE_EXPR
4108 to ensure that it is only evaluated once.
4109
4110 We don't put SAVE_EXPR nodes around everything, because assigning very
4111 simple expressions to temporaries causes us to miss good opportunities
4112 for optimizations. Among other things, the opportunity to fold in the
4113 addition of a constant into an addressing mode often gets lost, e.g.
4114 "y[i+1] += x;". In general, we take the approach that we should not make
4115 an assignment unless we are forced into it - i.e., that any non-side effect
4116 operator should be allowed, and that cse should take care of coalescing
4117 multiple utterances of the same expression should that prove fruitful. */
4118
4119 static tree
4120 stabilize_reference_1 (tree e)
4121 {
4122 tree result;
4123 enum tree_code code = TREE_CODE (e);
4124
4125 /* We cannot ignore const expressions because it might be a reference
4126 to a const array but whose index contains side-effects. But we can
4127 ignore things that are actual constant or that already have been
4128 handled by this function. */
4129
4130 if (tree_invariant_p (e))
4131 return e;
4132
4133 switch (TREE_CODE_CLASS (code))
4134 {
4135 case tcc_exceptional:
4136 case tcc_type:
4137 case tcc_declaration:
4138 case tcc_comparison:
4139 case tcc_statement:
4140 case tcc_expression:
4141 case tcc_reference:
4142 case tcc_vl_exp:
4143 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4144 so that it will only be evaluated once. */
4145 /* The reference (r) and comparison (<) classes could be handled as
4146 below, but it is generally faster to only evaluate them once. */
4147 if (TREE_SIDE_EFFECTS (e))
4148 return save_expr (e);
4149 return e;
4150
4151 case tcc_constant:
4152 /* Constants need no processing. In fact, we should never reach
4153 here. */
4154 return e;
4155
4156 case tcc_binary:
4157 /* Division is slow and tends to be compiled with jumps,
4158 especially the division by powers of 2 that is often
4159 found inside of an array reference. So do it just once. */
4160 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4161 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4162 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4163 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4164 return save_expr (e);
4165 /* Recursively stabilize each operand. */
4166 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4167 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4168 break;
4169
4170 case tcc_unary:
4171 /* Recursively stabilize each operand. */
4172 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4173 break;
4174
4175 default:
4176 gcc_unreachable ();
4177 }
4178
4179 TREE_TYPE (result) = TREE_TYPE (e);
4180 TREE_READONLY (result) = TREE_READONLY (e);
4181 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4182 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4183
4184 return result;
4185 }
4186
4187 /* Stabilize a reference so that we can use it any number of times
4188 without causing its operands to be evaluated more than once.
4189 Returns the stabilized reference. This works by means of save_expr,
4190 so see the caveats in the comments about save_expr.
4191
4192 Also allows conversion expressions whose operands are references.
4193 Any other kind of expression is returned unchanged. */
4194
4195 tree
4196 stabilize_reference (tree ref)
4197 {
4198 tree result;
4199 enum tree_code code = TREE_CODE (ref);
4200
4201 switch (code)
4202 {
4203 case VAR_DECL:
4204 case PARM_DECL:
4205 case RESULT_DECL:
4206 /* No action is needed in this case. */
4207 return ref;
4208
4209 CASE_CONVERT:
4210 case FLOAT_EXPR:
4211 case FIX_TRUNC_EXPR:
4212 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4213 break;
4214
4215 case INDIRECT_REF:
4216 result = build_nt (INDIRECT_REF,
4217 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4218 break;
4219
4220 case COMPONENT_REF:
4221 result = build_nt (COMPONENT_REF,
4222 stabilize_reference (TREE_OPERAND (ref, 0)),
4223 TREE_OPERAND (ref, 1), NULL_TREE);
4224 break;
4225
4226 case BIT_FIELD_REF:
4227 result = build_nt (BIT_FIELD_REF,
4228 stabilize_reference (TREE_OPERAND (ref, 0)),
4229 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4230 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4231 break;
4232
4233 case ARRAY_REF:
4234 result = build_nt (ARRAY_REF,
4235 stabilize_reference (TREE_OPERAND (ref, 0)),
4236 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4237 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4238 break;
4239
4240 case ARRAY_RANGE_REF:
4241 result = build_nt (ARRAY_RANGE_REF,
4242 stabilize_reference (TREE_OPERAND (ref, 0)),
4243 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4244 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4245 break;
4246
4247 case COMPOUND_EXPR:
4248 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4249 it wouldn't be ignored. This matters when dealing with
4250 volatiles. */
4251 return stabilize_reference_1 (ref);
4252
4253 /* If arg isn't a kind of lvalue we recognize, make no change.
4254 Caller should recognize the error for an invalid lvalue. */
4255 default:
4256 return ref;
4257
4258 case ERROR_MARK:
4259 return error_mark_node;
4260 }
4261
4262 TREE_TYPE (result) = TREE_TYPE (ref);
4263 TREE_READONLY (result) = TREE_READONLY (ref);
4264 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4265 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4266
4267 return result;
4268 }
4269 \f
4270 /* Low-level constructors for expressions. */
4271
4272 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4273 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4274
4275 void
4276 recompute_tree_invariant_for_addr_expr (tree t)
4277 {
4278 tree node;
4279 bool tc = true, se = false;
4280
4281 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4282
4283 /* We started out assuming this address is both invariant and constant, but
4284 does not have side effects. Now go down any handled components and see if
4285 any of them involve offsets that are either non-constant or non-invariant.
4286 Also check for side-effects.
4287
4288 ??? Note that this code makes no attempt to deal with the case where
4289 taking the address of something causes a copy due to misalignment. */
4290
4291 #define UPDATE_FLAGS(NODE) \
4292 do { tree _node = (NODE); \
4293 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4294 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4295
4296 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4297 node = TREE_OPERAND (node, 0))
4298 {
4299 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4300 array reference (probably made temporarily by the G++ front end),
4301 so ignore all the operands. */
4302 if ((TREE_CODE (node) == ARRAY_REF
4303 || TREE_CODE (node) == ARRAY_RANGE_REF)
4304 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4305 {
4306 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4307 if (TREE_OPERAND (node, 2))
4308 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4309 if (TREE_OPERAND (node, 3))
4310 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4311 }
4312 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4313 FIELD_DECL, apparently. The G++ front end can put something else
4314 there, at least temporarily. */
4315 else if (TREE_CODE (node) == COMPONENT_REF
4316 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4317 {
4318 if (TREE_OPERAND (node, 2))
4319 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4320 }
4321 }
4322
4323 node = lang_hooks.expr_to_decl (node, &tc, &se);
4324
4325 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4326 the address, since &(*a)->b is a form of addition. If it's a constant, the
4327 address is constant too. If it's a decl, its address is constant if the
4328 decl is static. Everything else is not constant and, furthermore,
4329 taking the address of a volatile variable is not volatile. */
4330 if (TREE_CODE (node) == INDIRECT_REF
4331 || TREE_CODE (node) == MEM_REF)
4332 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4333 else if (CONSTANT_CLASS_P (node))
4334 ;
4335 else if (DECL_P (node))
4336 tc &= (staticp (node) != NULL_TREE);
4337 else
4338 {
4339 tc = false;
4340 se |= TREE_SIDE_EFFECTS (node);
4341 }
4342
4343
4344 TREE_CONSTANT (t) = tc;
4345 TREE_SIDE_EFFECTS (t) = se;
4346 #undef UPDATE_FLAGS
4347 }
4348
4349 /* Build an expression of code CODE, data type TYPE, and operands as
4350 specified. Expressions and reference nodes can be created this way.
4351 Constants, decls, types and misc nodes cannot be.
4352
4353 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4354 enough for all extant tree codes. */
4355
4356 tree
4357 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4358 {
4359 tree t;
4360
4361 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4362
4363 t = make_node_stat (code PASS_MEM_STAT);
4364 TREE_TYPE (t) = tt;
4365
4366 return t;
4367 }
4368
4369 tree
4370 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4371 {
4372 int length = sizeof (struct tree_exp);
4373 tree t;
4374
4375 record_node_allocation_statistics (code, length);
4376
4377 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4378
4379 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4380
4381 memset (t, 0, sizeof (struct tree_common));
4382
4383 TREE_SET_CODE (t, code);
4384
4385 TREE_TYPE (t) = type;
4386 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4387 TREE_OPERAND (t, 0) = node;
4388 if (node && !TYPE_P (node))
4389 {
4390 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4391 TREE_READONLY (t) = TREE_READONLY (node);
4392 }
4393
4394 if (TREE_CODE_CLASS (code) == tcc_statement)
4395 TREE_SIDE_EFFECTS (t) = 1;
4396 else switch (code)
4397 {
4398 case VA_ARG_EXPR:
4399 /* All of these have side-effects, no matter what their
4400 operands are. */
4401 TREE_SIDE_EFFECTS (t) = 1;
4402 TREE_READONLY (t) = 0;
4403 break;
4404
4405 case INDIRECT_REF:
4406 /* Whether a dereference is readonly has nothing to do with whether
4407 its operand is readonly. */
4408 TREE_READONLY (t) = 0;
4409 break;
4410
4411 case ADDR_EXPR:
4412 if (node)
4413 recompute_tree_invariant_for_addr_expr (t);
4414 break;
4415
4416 default:
4417 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4418 && node && !TYPE_P (node)
4419 && TREE_CONSTANT (node))
4420 TREE_CONSTANT (t) = 1;
4421 if (TREE_CODE_CLASS (code) == tcc_reference
4422 && node && TREE_THIS_VOLATILE (node))
4423 TREE_THIS_VOLATILE (t) = 1;
4424 break;
4425 }
4426
4427 return t;
4428 }
4429
4430 #define PROCESS_ARG(N) \
4431 do { \
4432 TREE_OPERAND (t, N) = arg##N; \
4433 if (arg##N &&!TYPE_P (arg##N)) \
4434 { \
4435 if (TREE_SIDE_EFFECTS (arg##N)) \
4436 side_effects = 1; \
4437 if (!TREE_READONLY (arg##N) \
4438 && !CONSTANT_CLASS_P (arg##N)) \
4439 (void) (read_only = 0); \
4440 if (!TREE_CONSTANT (arg##N)) \
4441 (void) (constant = 0); \
4442 } \
4443 } while (0)
4444
4445 tree
4446 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4447 {
4448 bool constant, read_only, side_effects;
4449 tree t;
4450
4451 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4452
4453 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4454 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4455 /* When sizetype precision doesn't match that of pointers
4456 we need to be able to build explicit extensions or truncations
4457 of the offset argument. */
4458 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4459 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4460 && TREE_CODE (arg1) == INTEGER_CST);
4461
4462 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4463 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4464 && ptrofftype_p (TREE_TYPE (arg1)));
4465
4466 t = make_node_stat (code PASS_MEM_STAT);
4467 TREE_TYPE (t) = tt;
4468
4469 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4470 result based on those same flags for the arguments. But if the
4471 arguments aren't really even `tree' expressions, we shouldn't be trying
4472 to do this. */
4473
4474 /* Expressions without side effects may be constant if their
4475 arguments are as well. */
4476 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4477 || TREE_CODE_CLASS (code) == tcc_binary);
4478 read_only = 1;
4479 side_effects = TREE_SIDE_EFFECTS (t);
4480
4481 PROCESS_ARG (0);
4482 PROCESS_ARG (1);
4483
4484 TREE_SIDE_EFFECTS (t) = side_effects;
4485 if (code == MEM_REF)
4486 {
4487 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4488 {
4489 tree o = TREE_OPERAND (arg0, 0);
4490 TREE_READONLY (t) = TREE_READONLY (o);
4491 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4492 }
4493 }
4494 else
4495 {
4496 TREE_READONLY (t) = read_only;
4497 TREE_CONSTANT (t) = constant;
4498 TREE_THIS_VOLATILE (t)
4499 = (TREE_CODE_CLASS (code) == tcc_reference
4500 && arg0 && TREE_THIS_VOLATILE (arg0));
4501 }
4502
4503 return t;
4504 }
4505
4506
4507 tree
4508 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4509 tree arg2 MEM_STAT_DECL)
4510 {
4511 bool constant, read_only, side_effects;
4512 tree t;
4513
4514 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4515 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4516
4517 t = make_node_stat (code PASS_MEM_STAT);
4518 TREE_TYPE (t) = tt;
4519
4520 read_only = 1;
4521
4522 /* As a special exception, if COND_EXPR has NULL branches, we
4523 assume that it is a gimple statement and always consider
4524 it to have side effects. */
4525 if (code == COND_EXPR
4526 && tt == void_type_node
4527 && arg1 == NULL_TREE
4528 && arg2 == NULL_TREE)
4529 side_effects = true;
4530 else
4531 side_effects = TREE_SIDE_EFFECTS (t);
4532
4533 PROCESS_ARG (0);
4534 PROCESS_ARG (1);
4535 PROCESS_ARG (2);
4536
4537 if (code == COND_EXPR)
4538 TREE_READONLY (t) = read_only;
4539
4540 TREE_SIDE_EFFECTS (t) = side_effects;
4541 TREE_THIS_VOLATILE (t)
4542 = (TREE_CODE_CLASS (code) == tcc_reference
4543 && arg0 && TREE_THIS_VOLATILE (arg0));
4544
4545 return t;
4546 }
4547
4548 tree
4549 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4550 tree arg2, tree arg3 MEM_STAT_DECL)
4551 {
4552 bool constant, read_only, side_effects;
4553 tree t;
4554
4555 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4556
4557 t = make_node_stat (code PASS_MEM_STAT);
4558 TREE_TYPE (t) = tt;
4559
4560 side_effects = TREE_SIDE_EFFECTS (t);
4561
4562 PROCESS_ARG (0);
4563 PROCESS_ARG (1);
4564 PROCESS_ARG (2);
4565 PROCESS_ARG (3);
4566
4567 TREE_SIDE_EFFECTS (t) = side_effects;
4568 TREE_THIS_VOLATILE (t)
4569 = (TREE_CODE_CLASS (code) == tcc_reference
4570 && arg0 && TREE_THIS_VOLATILE (arg0));
4571
4572 return t;
4573 }
4574
4575 tree
4576 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4577 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4578 {
4579 bool constant, read_only, side_effects;
4580 tree t;
4581
4582 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4583
4584 t = make_node_stat (code PASS_MEM_STAT);
4585 TREE_TYPE (t) = tt;
4586
4587 side_effects = TREE_SIDE_EFFECTS (t);
4588
4589 PROCESS_ARG (0);
4590 PROCESS_ARG (1);
4591 PROCESS_ARG (2);
4592 PROCESS_ARG (3);
4593 PROCESS_ARG (4);
4594
4595 TREE_SIDE_EFFECTS (t) = side_effects;
4596 if (code == TARGET_MEM_REF)
4597 {
4598 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4599 {
4600 tree o = TREE_OPERAND (arg0, 0);
4601 TREE_READONLY (t) = TREE_READONLY (o);
4602 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4603 }
4604 }
4605 else
4606 TREE_THIS_VOLATILE (t)
4607 = (TREE_CODE_CLASS (code) == tcc_reference
4608 && arg0 && TREE_THIS_VOLATILE (arg0));
4609
4610 return t;
4611 }
4612
4613 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4614 on the pointer PTR. */
4615
4616 tree
4617 build_simple_mem_ref_loc (location_t loc, tree ptr)
4618 {
4619 HOST_WIDE_INT offset = 0;
4620 tree ptype = TREE_TYPE (ptr);
4621 tree tem;
4622 /* For convenience allow addresses that collapse to a simple base
4623 and offset. */
4624 if (TREE_CODE (ptr) == ADDR_EXPR
4625 && (handled_component_p (TREE_OPERAND (ptr, 0))
4626 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4627 {
4628 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4629 gcc_assert (ptr);
4630 ptr = build_fold_addr_expr (ptr);
4631 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4632 }
4633 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4634 ptr, build_int_cst (ptype, offset));
4635 SET_EXPR_LOCATION (tem, loc);
4636 return tem;
4637 }
4638
4639 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4640
4641 offset_int
4642 mem_ref_offset (const_tree t)
4643 {
4644 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4645 }
4646
4647 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4648 offsetted by OFFSET units. */
4649
4650 tree
4651 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4652 {
4653 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4654 build_fold_addr_expr (base),
4655 build_int_cst (ptr_type_node, offset));
4656 tree addr = build1 (ADDR_EXPR, type, ref);
4657 recompute_tree_invariant_for_addr_expr (addr);
4658 return addr;
4659 }
4660
4661 /* Similar except don't specify the TREE_TYPE
4662 and leave the TREE_SIDE_EFFECTS as 0.
4663 It is permissible for arguments to be null,
4664 or even garbage if their values do not matter. */
4665
4666 tree
4667 build_nt (enum tree_code code, ...)
4668 {
4669 tree t;
4670 int length;
4671 int i;
4672 va_list p;
4673
4674 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4675
4676 va_start (p, code);
4677
4678 t = make_node (code);
4679 length = TREE_CODE_LENGTH (code);
4680
4681 for (i = 0; i < length; i++)
4682 TREE_OPERAND (t, i) = va_arg (p, tree);
4683
4684 va_end (p);
4685 return t;
4686 }
4687
4688 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4689 tree vec. */
4690
4691 tree
4692 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4693 {
4694 tree ret, t;
4695 unsigned int ix;
4696
4697 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4698 CALL_EXPR_FN (ret) = fn;
4699 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4700 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4701 CALL_EXPR_ARG (ret, ix) = t;
4702 return ret;
4703 }
4704 \f
4705 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4706 We do NOT enter this node in any sort of symbol table.
4707
4708 LOC is the location of the decl.
4709
4710 layout_decl is used to set up the decl's storage layout.
4711 Other slots are initialized to 0 or null pointers. */
4712
4713 tree
4714 build_decl_stat (location_t loc, enum tree_code code, tree name,
4715 tree type MEM_STAT_DECL)
4716 {
4717 tree t;
4718
4719 t = make_node_stat (code PASS_MEM_STAT);
4720 DECL_SOURCE_LOCATION (t) = loc;
4721
4722 /* if (type == error_mark_node)
4723 type = integer_type_node; */
4724 /* That is not done, deliberately, so that having error_mark_node
4725 as the type can suppress useless errors in the use of this variable. */
4726
4727 DECL_NAME (t) = name;
4728 TREE_TYPE (t) = type;
4729
4730 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4731 layout_decl (t, 0);
4732
4733 return t;
4734 }
4735
4736 /* Builds and returns function declaration with NAME and TYPE. */
4737
4738 tree
4739 build_fn_decl (const char *name, tree type)
4740 {
4741 tree id = get_identifier (name);
4742 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4743
4744 DECL_EXTERNAL (decl) = 1;
4745 TREE_PUBLIC (decl) = 1;
4746 DECL_ARTIFICIAL (decl) = 1;
4747 TREE_NOTHROW (decl) = 1;
4748
4749 return decl;
4750 }
4751
4752 vec<tree, va_gc> *all_translation_units;
4753
4754 /* Builds a new translation-unit decl with name NAME, queues it in the
4755 global list of translation-unit decls and returns it. */
4756
4757 tree
4758 build_translation_unit_decl (tree name)
4759 {
4760 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4761 name, NULL_TREE);
4762 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4763 vec_safe_push (all_translation_units, tu);
4764 return tu;
4765 }
4766
4767 \f
4768 /* BLOCK nodes are used to represent the structure of binding contours
4769 and declarations, once those contours have been exited and their contents
4770 compiled. This information is used for outputting debugging info. */
4771
4772 tree
4773 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4774 {
4775 tree block = make_node (BLOCK);
4776
4777 BLOCK_VARS (block) = vars;
4778 BLOCK_SUBBLOCKS (block) = subblocks;
4779 BLOCK_SUPERCONTEXT (block) = supercontext;
4780 BLOCK_CHAIN (block) = chain;
4781 return block;
4782 }
4783
4784 \f
4785 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4786
4787 LOC is the location to use in tree T. */
4788
4789 void
4790 protected_set_expr_location (tree t, location_t loc)
4791 {
4792 if (CAN_HAVE_LOCATION_P (t))
4793 SET_EXPR_LOCATION (t, loc);
4794 }
4795 \f
4796 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4797 is ATTRIBUTE. */
4798
4799 tree
4800 build_decl_attribute_variant (tree ddecl, tree attribute)
4801 {
4802 DECL_ATTRIBUTES (ddecl) = attribute;
4803 return ddecl;
4804 }
4805
4806 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4807 is ATTRIBUTE and its qualifiers are QUALS.
4808
4809 Record such modified types already made so we don't make duplicates. */
4810
4811 tree
4812 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4813 {
4814 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4815 {
4816 inchash::hash hstate;
4817 tree ntype;
4818 int i;
4819 tree t;
4820 enum tree_code code = TREE_CODE (ttype);
4821
4822 /* Building a distinct copy of a tagged type is inappropriate; it
4823 causes breakage in code that expects there to be a one-to-one
4824 relationship between a struct and its fields.
4825 build_duplicate_type is another solution (as used in
4826 handle_transparent_union_attribute), but that doesn't play well
4827 with the stronger C++ type identity model. */
4828 if (TREE_CODE (ttype) == RECORD_TYPE
4829 || TREE_CODE (ttype) == UNION_TYPE
4830 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4831 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4832 {
4833 warning (OPT_Wattributes,
4834 "ignoring attributes applied to %qT after definition",
4835 TYPE_MAIN_VARIANT (ttype));
4836 return build_qualified_type (ttype, quals);
4837 }
4838
4839 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4840 ntype = build_distinct_type_copy (ttype);
4841
4842 TYPE_ATTRIBUTES (ntype) = attribute;
4843
4844 hstate.add_int (code);
4845 if (TREE_TYPE (ntype))
4846 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4847 attribute_hash_list (attribute, hstate);
4848
4849 switch (TREE_CODE (ntype))
4850 {
4851 case FUNCTION_TYPE:
4852 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4853 break;
4854 case ARRAY_TYPE:
4855 if (TYPE_DOMAIN (ntype))
4856 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4857 break;
4858 case INTEGER_TYPE:
4859 t = TYPE_MAX_VALUE (ntype);
4860 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4861 hstate.add_object (TREE_INT_CST_ELT (t, i));
4862 break;
4863 case REAL_TYPE:
4864 case FIXED_POINT_TYPE:
4865 {
4866 unsigned int precision = TYPE_PRECISION (ntype);
4867 hstate.add_object (precision);
4868 }
4869 break;
4870 default:
4871 break;
4872 }
4873
4874 ntype = type_hash_canon (hstate.end(), ntype);
4875
4876 /* If the target-dependent attributes make NTYPE different from
4877 its canonical type, we will need to use structural equality
4878 checks for this type. */
4879 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4880 || !comp_type_attributes (ntype, ttype))
4881 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4882 else if (TYPE_CANONICAL (ntype) == ntype)
4883 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4884
4885 ttype = build_qualified_type (ntype, quals);
4886 }
4887 else if (TYPE_QUALS (ttype) != quals)
4888 ttype = build_qualified_type (ttype, quals);
4889
4890 return ttype;
4891 }
4892
4893 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4894 the same. */
4895
4896 static bool
4897 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4898 {
4899 tree cl1, cl2;
4900 for (cl1 = clauses1, cl2 = clauses2;
4901 cl1 && cl2;
4902 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4903 {
4904 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4905 return false;
4906 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4907 {
4908 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4909 OMP_CLAUSE_DECL (cl2)) != 1)
4910 return false;
4911 }
4912 switch (OMP_CLAUSE_CODE (cl1))
4913 {
4914 case OMP_CLAUSE_ALIGNED:
4915 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4916 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4917 return false;
4918 break;
4919 case OMP_CLAUSE_LINEAR:
4920 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4921 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4922 return false;
4923 break;
4924 case OMP_CLAUSE_SIMDLEN:
4925 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4926 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4927 return false;
4928 default:
4929 break;
4930 }
4931 }
4932 return true;
4933 }
4934
4935 /* Compare two constructor-element-type constants. Return 1 if the lists
4936 are known to be equal; otherwise return 0. */
4937
4938 static bool
4939 simple_cst_list_equal (const_tree l1, const_tree l2)
4940 {
4941 while (l1 != NULL_TREE && l2 != NULL_TREE)
4942 {
4943 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4944 return false;
4945
4946 l1 = TREE_CHAIN (l1);
4947 l2 = TREE_CHAIN (l2);
4948 }
4949
4950 return l1 == l2;
4951 }
4952
4953 /* Compare two identifier nodes representing attributes. Either one may
4954 be in wrapped __ATTR__ form. Return true if they are the same, false
4955 otherwise. */
4956
4957 static bool
4958 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4959 {
4960 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4961 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4962 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4963
4964 /* Identifiers can be compared directly for equality. */
4965 if (attr1 == attr2)
4966 return true;
4967
4968 /* If they are not equal, they may still be one in the form
4969 'text' while the other one is in the form '__text__'. TODO:
4970 If we were storing attributes in normalized 'text' form, then
4971 this could all go away and we could take full advantage of
4972 the fact that we're comparing identifiers. :-) */
4973 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4974 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4975
4976 if (attr2_len == attr1_len + 4)
4977 {
4978 const char *p = IDENTIFIER_POINTER (attr2);
4979 const char *q = IDENTIFIER_POINTER (attr1);
4980 if (p[0] == '_' && p[1] == '_'
4981 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4982 && strncmp (q, p + 2, attr1_len) == 0)
4983 return true;;
4984 }
4985 else if (attr2_len + 4 == attr1_len)
4986 {
4987 const char *p = IDENTIFIER_POINTER (attr2);
4988 const char *q = IDENTIFIER_POINTER (attr1);
4989 if (q[0] == '_' && q[1] == '_'
4990 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4991 && strncmp (q + 2, p, attr2_len) == 0)
4992 return true;
4993 }
4994
4995 return false;
4996 }
4997
4998 /* Compare two attributes for their value identity. Return true if the
4999 attribute values are known to be equal; otherwise return false. */
5000
5001 bool
5002 attribute_value_equal (const_tree attr1, const_tree attr2)
5003 {
5004 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
5005 return true;
5006
5007 if (TREE_VALUE (attr1) != NULL_TREE
5008 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
5009 && TREE_VALUE (attr2) != NULL_TREE
5010 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
5011 {
5012 /* Handle attribute format. */
5013 if (is_attribute_p ("format", TREE_PURPOSE (attr1)))
5014 {
5015 attr1 = TREE_VALUE (attr1);
5016 attr2 = TREE_VALUE (attr2);
5017 /* Compare the archetypes (printf/scanf/strftime/...). */
5018 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
5019 TREE_VALUE (attr2)))
5020 return false;
5021 /* Archetypes are the same. Compare the rest. */
5022 return (simple_cst_list_equal (TREE_CHAIN (attr1),
5023 TREE_CHAIN (attr2)) == 1);
5024 }
5025 return (simple_cst_list_equal (TREE_VALUE (attr1),
5026 TREE_VALUE (attr2)) == 1);
5027 }
5028
5029 if ((flag_openmp || flag_openmp_simd)
5030 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
5031 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
5032 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
5033 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
5034 TREE_VALUE (attr2));
5035
5036 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
5037 }
5038
5039 /* Return 0 if the attributes for two types are incompatible, 1 if they
5040 are compatible, and 2 if they are nearly compatible (which causes a
5041 warning to be generated). */
5042 int
5043 comp_type_attributes (const_tree type1, const_tree type2)
5044 {
5045 const_tree a1 = TYPE_ATTRIBUTES (type1);
5046 const_tree a2 = TYPE_ATTRIBUTES (type2);
5047 const_tree a;
5048
5049 if (a1 == a2)
5050 return 1;
5051 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
5052 {
5053 const struct attribute_spec *as;
5054 const_tree attr;
5055
5056 as = lookup_attribute_spec (get_attribute_name (a));
5057 if (!as || as->affects_type_identity == false)
5058 continue;
5059
5060 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
5061 if (!attr || !attribute_value_equal (a, attr))
5062 break;
5063 }
5064 if (!a)
5065 {
5066 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
5067 {
5068 const struct attribute_spec *as;
5069
5070 as = lookup_attribute_spec (get_attribute_name (a));
5071 if (!as || as->affects_type_identity == false)
5072 continue;
5073
5074 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5075 break;
5076 /* We don't need to compare trees again, as we did this
5077 already in first loop. */
5078 }
5079 /* All types - affecting identity - are equal, so
5080 there is no need to call target hook for comparison. */
5081 if (!a)
5082 return 1;
5083 }
5084 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
5085 return 0;
5086 /* As some type combinations - like default calling-convention - might
5087 be compatible, we have to call the target hook to get the final result. */
5088 return targetm.comp_type_attributes (type1, type2);
5089 }
5090
5091 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5092 is ATTRIBUTE.
5093
5094 Record such modified types already made so we don't make duplicates. */
5095
5096 tree
5097 build_type_attribute_variant (tree ttype, tree attribute)
5098 {
5099 return build_type_attribute_qual_variant (ttype, attribute,
5100 TYPE_QUALS (ttype));
5101 }
5102
5103
5104 /* Reset the expression *EXPR_P, a size or position.
5105
5106 ??? We could reset all non-constant sizes or positions. But it's cheap
5107 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5108
5109 We need to reset self-referential sizes or positions because they cannot
5110 be gimplified and thus can contain a CALL_EXPR after the gimplification
5111 is finished, which will run afoul of LTO streaming. And they need to be
5112 reset to something essentially dummy but not constant, so as to preserve
5113 the properties of the object they are attached to. */
5114
5115 static inline void
5116 free_lang_data_in_one_sizepos (tree *expr_p)
5117 {
5118 tree expr = *expr_p;
5119 if (CONTAINS_PLACEHOLDER_P (expr))
5120 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5121 }
5122
5123
5124 /* Reset all the fields in a binfo node BINFO. We only keep
5125 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5126
5127 static void
5128 free_lang_data_in_binfo (tree binfo)
5129 {
5130 unsigned i;
5131 tree t;
5132
5133 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5134
5135 BINFO_VIRTUALS (binfo) = NULL_TREE;
5136 BINFO_BASE_ACCESSES (binfo) = NULL;
5137 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5138 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5139
5140 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5141 free_lang_data_in_binfo (t);
5142 }
5143
5144
5145 /* Reset all language specific information still present in TYPE. */
5146
5147 static void
5148 free_lang_data_in_type (tree type)
5149 {
5150 gcc_assert (TYPE_P (type));
5151
5152 /* Give the FE a chance to remove its own data first. */
5153 lang_hooks.free_lang_data (type);
5154
5155 TREE_LANG_FLAG_0 (type) = 0;
5156 TREE_LANG_FLAG_1 (type) = 0;
5157 TREE_LANG_FLAG_2 (type) = 0;
5158 TREE_LANG_FLAG_3 (type) = 0;
5159 TREE_LANG_FLAG_4 (type) = 0;
5160 TREE_LANG_FLAG_5 (type) = 0;
5161 TREE_LANG_FLAG_6 (type) = 0;
5162
5163 if (TREE_CODE (type) == FUNCTION_TYPE)
5164 {
5165 /* Remove the const and volatile qualifiers from arguments. The
5166 C++ front end removes them, but the C front end does not,
5167 leading to false ODR violation errors when merging two
5168 instances of the same function signature compiled by
5169 different front ends. */
5170 tree p;
5171
5172 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5173 {
5174 tree arg_type = TREE_VALUE (p);
5175
5176 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5177 {
5178 int quals = TYPE_QUALS (arg_type)
5179 & ~TYPE_QUAL_CONST
5180 & ~TYPE_QUAL_VOLATILE;
5181 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5182 free_lang_data_in_type (TREE_VALUE (p));
5183 }
5184 /* C++ FE uses TREE_PURPOSE to store initial values. */
5185 TREE_PURPOSE (p) = NULL;
5186 }
5187 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5188 TYPE_MINVAL (type) = NULL;
5189 }
5190 if (TREE_CODE (type) == METHOD_TYPE)
5191 {
5192 tree p;
5193
5194 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5195 {
5196 /* C++ FE uses TREE_PURPOSE to store initial values. */
5197 TREE_PURPOSE (p) = NULL;
5198 }
5199 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5200 TYPE_MINVAL (type) = NULL;
5201 }
5202
5203 /* Remove members that are not actually FIELD_DECLs from the field
5204 list of an aggregate. These occur in C++. */
5205 if (RECORD_OR_UNION_TYPE_P (type))
5206 {
5207 tree prev, member;
5208
5209 /* Note that TYPE_FIELDS can be shared across distinct
5210 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5211 to be removed, we cannot set its TREE_CHAIN to NULL.
5212 Otherwise, we would not be able to find all the other fields
5213 in the other instances of this TREE_TYPE.
5214
5215 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5216 prev = NULL_TREE;
5217 member = TYPE_FIELDS (type);
5218 while (member)
5219 {
5220 if (TREE_CODE (member) == FIELD_DECL
5221 || (TREE_CODE (member) == TYPE_DECL
5222 && !DECL_IGNORED_P (member)
5223 && debug_info_level > DINFO_LEVEL_TERSE
5224 && !is_redundant_typedef (member)))
5225 {
5226 if (prev)
5227 TREE_CHAIN (prev) = member;
5228 else
5229 TYPE_FIELDS (type) = member;
5230 prev = member;
5231 }
5232
5233 member = TREE_CHAIN (member);
5234 }
5235
5236 if (prev)
5237 TREE_CHAIN (prev) = NULL_TREE;
5238 else
5239 TYPE_FIELDS (type) = NULL_TREE;
5240
5241 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5242 and danagle the pointer from time to time. */
5243 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5244 TYPE_VFIELD (type) = NULL_TREE;
5245
5246 /* Remove TYPE_METHODS list. While it would be nice to keep it
5247 to enable ODR warnings about different method lists, doing so
5248 seems to impractically increase size of LTO data streamed.
5249 Keep the information if TYPE_METHODS was non-NULL. This is used
5250 by function.c and pretty printers. */
5251 if (TYPE_METHODS (type))
5252 TYPE_METHODS (type) = error_mark_node;
5253 if (TYPE_BINFO (type))
5254 {
5255 free_lang_data_in_binfo (TYPE_BINFO (type));
5256 /* We need to preserve link to bases and virtual table for all
5257 polymorphic types to make devirtualization machinery working.
5258 Debug output cares only about bases, but output also
5259 virtual table pointers so merging of -fdevirtualize and
5260 -fno-devirtualize units is easier. */
5261 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5262 || !flag_devirtualize)
5263 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5264 && !BINFO_VTABLE (TYPE_BINFO (type)))
5265 || debug_info_level != DINFO_LEVEL_NONE))
5266 TYPE_BINFO (type) = NULL;
5267 }
5268 }
5269 else
5270 {
5271 /* For non-aggregate types, clear out the language slot (which
5272 overloads TYPE_BINFO). */
5273 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5274
5275 if (INTEGRAL_TYPE_P (type)
5276 || SCALAR_FLOAT_TYPE_P (type)
5277 || FIXED_POINT_TYPE_P (type))
5278 {
5279 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5280 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5281 }
5282 }
5283
5284 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5285 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5286
5287 if (TYPE_CONTEXT (type)
5288 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5289 {
5290 tree ctx = TYPE_CONTEXT (type);
5291 do
5292 {
5293 ctx = BLOCK_SUPERCONTEXT (ctx);
5294 }
5295 while (ctx && TREE_CODE (ctx) == BLOCK);
5296 TYPE_CONTEXT (type) = ctx;
5297 }
5298 }
5299
5300
5301 /* Return true if DECL may need an assembler name to be set. */
5302
5303 static inline bool
5304 need_assembler_name_p (tree decl)
5305 {
5306 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5307 Rule merging. This makes type_odr_p to return true on those types during
5308 LTO and by comparing the mangled name, we can say what types are intended
5309 to be equivalent across compilation unit.
5310
5311 We do not store names of type_in_anonymous_namespace_p.
5312
5313 Record, union and enumeration type have linkage that allows use
5314 to check type_in_anonymous_namespace_p. We do not mangle compound types
5315 that always can be compared structurally.
5316
5317 Similarly for builtin types, we compare properties of their main variant.
5318 A special case are integer types where mangling do make differences
5319 between char/signed char/unsigned char etc. Storing name for these makes
5320 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5321 See cp/mangle.c:write_builtin_type for details. */
5322
5323 if (flag_lto_odr_type_mering
5324 && TREE_CODE (decl) == TYPE_DECL
5325 && DECL_NAME (decl)
5326 && decl == TYPE_NAME (TREE_TYPE (decl))
5327 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5328 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5329 && (type_with_linkage_p (TREE_TYPE (decl))
5330 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5331 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5332 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5333 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5334 if (TREE_CODE (decl) != FUNCTION_DECL
5335 && TREE_CODE (decl) != VAR_DECL)
5336 return false;
5337
5338 /* If DECL already has its assembler name set, it does not need a
5339 new one. */
5340 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5341 || DECL_ASSEMBLER_NAME_SET_P (decl))
5342 return false;
5343
5344 /* Abstract decls do not need an assembler name. */
5345 if (DECL_ABSTRACT_P (decl))
5346 return false;
5347
5348 /* For VAR_DECLs, only static, public and external symbols need an
5349 assembler name. */
5350 if (TREE_CODE (decl) == VAR_DECL
5351 && !TREE_STATIC (decl)
5352 && !TREE_PUBLIC (decl)
5353 && !DECL_EXTERNAL (decl))
5354 return false;
5355
5356 if (TREE_CODE (decl) == FUNCTION_DECL)
5357 {
5358 /* Do not set assembler name on builtins. Allow RTL expansion to
5359 decide whether to expand inline or via a regular call. */
5360 if (DECL_BUILT_IN (decl)
5361 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5362 return false;
5363
5364 /* Functions represented in the callgraph need an assembler name. */
5365 if (cgraph_node::get (decl) != NULL)
5366 return true;
5367
5368 /* Unused and not public functions don't need an assembler name. */
5369 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5370 return false;
5371 }
5372
5373 return true;
5374 }
5375
5376
5377 /* Reset all language specific information still present in symbol
5378 DECL. */
5379
5380 static void
5381 free_lang_data_in_decl (tree decl)
5382 {
5383 gcc_assert (DECL_P (decl));
5384
5385 /* Give the FE a chance to remove its own data first. */
5386 lang_hooks.free_lang_data (decl);
5387
5388 TREE_LANG_FLAG_0 (decl) = 0;
5389 TREE_LANG_FLAG_1 (decl) = 0;
5390 TREE_LANG_FLAG_2 (decl) = 0;
5391 TREE_LANG_FLAG_3 (decl) = 0;
5392 TREE_LANG_FLAG_4 (decl) = 0;
5393 TREE_LANG_FLAG_5 (decl) = 0;
5394 TREE_LANG_FLAG_6 (decl) = 0;
5395
5396 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5397 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5398 if (TREE_CODE (decl) == FIELD_DECL)
5399 {
5400 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5401 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5402 DECL_QUALIFIER (decl) = NULL_TREE;
5403 }
5404
5405 if (TREE_CODE (decl) == FUNCTION_DECL)
5406 {
5407 struct cgraph_node *node;
5408 if (!(node = cgraph_node::get (decl))
5409 || (!node->definition && !node->clones))
5410 {
5411 if (node)
5412 node->release_body ();
5413 else
5414 {
5415 release_function_body (decl);
5416 DECL_ARGUMENTS (decl) = NULL;
5417 DECL_RESULT (decl) = NULL;
5418 DECL_INITIAL (decl) = error_mark_node;
5419 }
5420 }
5421 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5422 {
5423 tree t;
5424
5425 /* If DECL has a gimple body, then the context for its
5426 arguments must be DECL. Otherwise, it doesn't really
5427 matter, as we will not be emitting any code for DECL. In
5428 general, there may be other instances of DECL created by
5429 the front end and since PARM_DECLs are generally shared,
5430 their DECL_CONTEXT changes as the replicas of DECL are
5431 created. The only time where DECL_CONTEXT is important
5432 is for the FUNCTION_DECLs that have a gimple body (since
5433 the PARM_DECL will be used in the function's body). */
5434 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5435 DECL_CONTEXT (t) = decl;
5436 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5437 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5438 = target_option_default_node;
5439 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5440 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5441 = optimization_default_node;
5442 }
5443
5444 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5445 At this point, it is not needed anymore. */
5446 DECL_SAVED_TREE (decl) = NULL_TREE;
5447
5448 /* Clear the abstract origin if it refers to a method. Otherwise
5449 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5450 origin will not be output correctly. */
5451 if (DECL_ABSTRACT_ORIGIN (decl)
5452 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5453 && RECORD_OR_UNION_TYPE_P
5454 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5455 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5456
5457 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5458 DECL_VINDEX referring to itself into a vtable slot number as it
5459 should. Happens with functions that are copied and then forgotten
5460 about. Just clear it, it won't matter anymore. */
5461 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5462 DECL_VINDEX (decl) = NULL_TREE;
5463 }
5464 else if (TREE_CODE (decl) == VAR_DECL)
5465 {
5466 if ((DECL_EXTERNAL (decl)
5467 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5468 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5469 DECL_INITIAL (decl) = NULL_TREE;
5470 }
5471 else if (TREE_CODE (decl) == TYPE_DECL)
5472 {
5473 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5474 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5475 DECL_INITIAL (decl) = NULL_TREE;
5476 }
5477 else if (TREE_CODE (decl) == FIELD_DECL)
5478 DECL_INITIAL (decl) = NULL_TREE;
5479 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5480 && DECL_INITIAL (decl)
5481 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5482 {
5483 /* Strip builtins from the translation-unit BLOCK. We still have targets
5484 without builtin_decl_explicit support and also builtins are shared
5485 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5486 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5487 while (*nextp)
5488 {
5489 tree var = *nextp;
5490 if (TREE_CODE (var) == FUNCTION_DECL
5491 && DECL_BUILT_IN (var))
5492 *nextp = TREE_CHAIN (var);
5493 else
5494 nextp = &TREE_CHAIN (var);
5495 }
5496 }
5497 }
5498
5499
5500 /* Data used when collecting DECLs and TYPEs for language data removal. */
5501
5502 struct free_lang_data_d
5503 {
5504 /* Worklist to avoid excessive recursion. */
5505 vec<tree> worklist;
5506
5507 /* Set of traversed objects. Used to avoid duplicate visits. */
5508 hash_set<tree> *pset;
5509
5510 /* Array of symbols to process with free_lang_data_in_decl. */
5511 vec<tree> decls;
5512
5513 /* Array of types to process with free_lang_data_in_type. */
5514 vec<tree> types;
5515 };
5516
5517
5518 /* Save all language fields needed to generate proper debug information
5519 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5520
5521 static void
5522 save_debug_info_for_decl (tree t)
5523 {
5524 /*struct saved_debug_info_d *sdi;*/
5525
5526 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5527
5528 /* FIXME. Partial implementation for saving debug info removed. */
5529 }
5530
5531
5532 /* Save all language fields needed to generate proper debug information
5533 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5534
5535 static void
5536 save_debug_info_for_type (tree t)
5537 {
5538 /*struct saved_debug_info_d *sdi;*/
5539
5540 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5541
5542 /* FIXME. Partial implementation for saving debug info removed. */
5543 }
5544
5545
5546 /* Add type or decl T to one of the list of tree nodes that need their
5547 language data removed. The lists are held inside FLD. */
5548
5549 static void
5550 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5551 {
5552 if (DECL_P (t))
5553 {
5554 fld->decls.safe_push (t);
5555 if (debug_info_level > DINFO_LEVEL_TERSE)
5556 save_debug_info_for_decl (t);
5557 }
5558 else if (TYPE_P (t))
5559 {
5560 fld->types.safe_push (t);
5561 if (debug_info_level > DINFO_LEVEL_TERSE)
5562 save_debug_info_for_type (t);
5563 }
5564 else
5565 gcc_unreachable ();
5566 }
5567
5568 /* Push tree node T into FLD->WORKLIST. */
5569
5570 static inline void
5571 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5572 {
5573 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5574 fld->worklist.safe_push ((t));
5575 }
5576
5577
5578 /* Operand callback helper for free_lang_data_in_node. *TP is the
5579 subtree operand being considered. */
5580
5581 static tree
5582 find_decls_types_r (tree *tp, int *ws, void *data)
5583 {
5584 tree t = *tp;
5585 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5586
5587 if (TREE_CODE (t) == TREE_LIST)
5588 return NULL_TREE;
5589
5590 /* Language specific nodes will be removed, so there is no need
5591 to gather anything under them. */
5592 if (is_lang_specific (t))
5593 {
5594 *ws = 0;
5595 return NULL_TREE;
5596 }
5597
5598 if (DECL_P (t))
5599 {
5600 /* Note that walk_tree does not traverse every possible field in
5601 decls, so we have to do our own traversals here. */
5602 add_tree_to_fld_list (t, fld);
5603
5604 fld_worklist_push (DECL_NAME (t), fld);
5605 fld_worklist_push (DECL_CONTEXT (t), fld);
5606 fld_worklist_push (DECL_SIZE (t), fld);
5607 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5608
5609 /* We are going to remove everything under DECL_INITIAL for
5610 TYPE_DECLs. No point walking them. */
5611 if (TREE_CODE (t) != TYPE_DECL)
5612 fld_worklist_push (DECL_INITIAL (t), fld);
5613
5614 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5615 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5616
5617 if (TREE_CODE (t) == FUNCTION_DECL)
5618 {
5619 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5620 fld_worklist_push (DECL_RESULT (t), fld);
5621 }
5622 else if (TREE_CODE (t) == TYPE_DECL)
5623 {
5624 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5625 }
5626 else if (TREE_CODE (t) == FIELD_DECL)
5627 {
5628 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5629 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5630 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5631 fld_worklist_push (DECL_FCONTEXT (t), fld);
5632 }
5633
5634 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5635 && DECL_HAS_VALUE_EXPR_P (t))
5636 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5637
5638 if (TREE_CODE (t) != FIELD_DECL
5639 && TREE_CODE (t) != TYPE_DECL)
5640 fld_worklist_push (TREE_CHAIN (t), fld);
5641 *ws = 0;
5642 }
5643 else if (TYPE_P (t))
5644 {
5645 /* Note that walk_tree does not traverse every possible field in
5646 types, so we have to do our own traversals here. */
5647 add_tree_to_fld_list (t, fld);
5648
5649 if (!RECORD_OR_UNION_TYPE_P (t))
5650 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5651 fld_worklist_push (TYPE_SIZE (t), fld);
5652 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5653 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5654 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5655 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5656 fld_worklist_push (TYPE_NAME (t), fld);
5657 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5658 them and thus do not and want not to reach unused pointer types
5659 this way. */
5660 if (!POINTER_TYPE_P (t))
5661 fld_worklist_push (TYPE_MINVAL (t), fld);
5662 if (!RECORD_OR_UNION_TYPE_P (t))
5663 fld_worklist_push (TYPE_MAXVAL (t), fld);
5664 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5665 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5666 do not and want not to reach unused variants this way. */
5667 if (TYPE_CONTEXT (t))
5668 {
5669 tree ctx = TYPE_CONTEXT (t);
5670 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5671 So push that instead. */
5672 while (ctx && TREE_CODE (ctx) == BLOCK)
5673 ctx = BLOCK_SUPERCONTEXT (ctx);
5674 fld_worklist_push (ctx, fld);
5675 }
5676 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5677 and want not to reach unused types this way. */
5678
5679 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5680 {
5681 unsigned i;
5682 tree tem;
5683 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5684 fld_worklist_push (TREE_TYPE (tem), fld);
5685 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5686 if (tem
5687 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5688 && TREE_CODE (tem) == TREE_LIST)
5689 do
5690 {
5691 fld_worklist_push (TREE_VALUE (tem), fld);
5692 tem = TREE_CHAIN (tem);
5693 }
5694 while (tem);
5695 }
5696 if (RECORD_OR_UNION_TYPE_P (t))
5697 {
5698 tree tem;
5699 /* Push all TYPE_FIELDS - there can be interleaving interesting
5700 and non-interesting things. */
5701 tem = TYPE_FIELDS (t);
5702 while (tem)
5703 {
5704 if (TREE_CODE (tem) == FIELD_DECL
5705 || (TREE_CODE (tem) == TYPE_DECL
5706 && !DECL_IGNORED_P (tem)
5707 && debug_info_level > DINFO_LEVEL_TERSE
5708 && !is_redundant_typedef (tem)))
5709 fld_worklist_push (tem, fld);
5710 tem = TREE_CHAIN (tem);
5711 }
5712 }
5713
5714 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5715 *ws = 0;
5716 }
5717 else if (TREE_CODE (t) == BLOCK)
5718 {
5719 tree tem;
5720 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5721 fld_worklist_push (tem, fld);
5722 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5723 fld_worklist_push (tem, fld);
5724 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5725 }
5726
5727 if (TREE_CODE (t) != IDENTIFIER_NODE
5728 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5729 fld_worklist_push (TREE_TYPE (t), fld);
5730
5731 return NULL_TREE;
5732 }
5733
5734
5735 /* Find decls and types in T. */
5736
5737 static void
5738 find_decls_types (tree t, struct free_lang_data_d *fld)
5739 {
5740 while (1)
5741 {
5742 if (!fld->pset->contains (t))
5743 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5744 if (fld->worklist.is_empty ())
5745 break;
5746 t = fld->worklist.pop ();
5747 }
5748 }
5749
5750 /* Translate all the types in LIST with the corresponding runtime
5751 types. */
5752
5753 static tree
5754 get_eh_types_for_runtime (tree list)
5755 {
5756 tree head, prev;
5757
5758 if (list == NULL_TREE)
5759 return NULL_TREE;
5760
5761 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5762 prev = head;
5763 list = TREE_CHAIN (list);
5764 while (list)
5765 {
5766 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5767 TREE_CHAIN (prev) = n;
5768 prev = TREE_CHAIN (prev);
5769 list = TREE_CHAIN (list);
5770 }
5771
5772 return head;
5773 }
5774
5775
5776 /* Find decls and types referenced in EH region R and store them in
5777 FLD->DECLS and FLD->TYPES. */
5778
5779 static void
5780 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5781 {
5782 switch (r->type)
5783 {
5784 case ERT_CLEANUP:
5785 break;
5786
5787 case ERT_TRY:
5788 {
5789 eh_catch c;
5790
5791 /* The types referenced in each catch must first be changed to the
5792 EH types used at runtime. This removes references to FE types
5793 in the region. */
5794 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5795 {
5796 c->type_list = get_eh_types_for_runtime (c->type_list);
5797 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5798 }
5799 }
5800 break;
5801
5802 case ERT_ALLOWED_EXCEPTIONS:
5803 r->u.allowed.type_list
5804 = get_eh_types_for_runtime (r->u.allowed.type_list);
5805 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5806 break;
5807
5808 case ERT_MUST_NOT_THROW:
5809 walk_tree (&r->u.must_not_throw.failure_decl,
5810 find_decls_types_r, fld, fld->pset);
5811 break;
5812 }
5813 }
5814
5815
5816 /* Find decls and types referenced in cgraph node N and store them in
5817 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5818 look for *every* kind of DECL and TYPE node reachable from N,
5819 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5820 NAMESPACE_DECLs, etc). */
5821
5822 static void
5823 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5824 {
5825 basic_block bb;
5826 struct function *fn;
5827 unsigned ix;
5828 tree t;
5829
5830 find_decls_types (n->decl, fld);
5831
5832 if (!gimple_has_body_p (n->decl))
5833 return;
5834
5835 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5836
5837 fn = DECL_STRUCT_FUNCTION (n->decl);
5838
5839 /* Traverse locals. */
5840 FOR_EACH_LOCAL_DECL (fn, ix, t)
5841 find_decls_types (t, fld);
5842
5843 /* Traverse EH regions in FN. */
5844 {
5845 eh_region r;
5846 FOR_ALL_EH_REGION_FN (r, fn)
5847 find_decls_types_in_eh_region (r, fld);
5848 }
5849
5850 /* Traverse every statement in FN. */
5851 FOR_EACH_BB_FN (bb, fn)
5852 {
5853 gphi_iterator psi;
5854 gimple_stmt_iterator si;
5855 unsigned i;
5856
5857 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5858 {
5859 gphi *phi = psi.phi ();
5860
5861 for (i = 0; i < gimple_phi_num_args (phi); i++)
5862 {
5863 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5864 find_decls_types (*arg_p, fld);
5865 }
5866 }
5867
5868 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5869 {
5870 gimple *stmt = gsi_stmt (si);
5871
5872 if (is_gimple_call (stmt))
5873 find_decls_types (gimple_call_fntype (stmt), fld);
5874
5875 for (i = 0; i < gimple_num_ops (stmt); i++)
5876 {
5877 tree arg = gimple_op (stmt, i);
5878 find_decls_types (arg, fld);
5879 }
5880 }
5881 }
5882 }
5883
5884
5885 /* Find decls and types referenced in varpool node N and store them in
5886 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5887 look for *every* kind of DECL and TYPE node reachable from N,
5888 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5889 NAMESPACE_DECLs, etc). */
5890
5891 static void
5892 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5893 {
5894 find_decls_types (v->decl, fld);
5895 }
5896
5897 /* If T needs an assembler name, have one created for it. */
5898
5899 void
5900 assign_assembler_name_if_neeeded (tree t)
5901 {
5902 if (need_assembler_name_p (t))
5903 {
5904 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5905 diagnostics that use input_location to show locus
5906 information. The problem here is that, at this point,
5907 input_location is generally anchored to the end of the file
5908 (since the parser is long gone), so we don't have a good
5909 position to pin it to.
5910
5911 To alleviate this problem, this uses the location of T's
5912 declaration. Examples of this are
5913 testsuite/g++.dg/template/cond2.C and
5914 testsuite/g++.dg/template/pr35240.C. */
5915 location_t saved_location = input_location;
5916 input_location = DECL_SOURCE_LOCATION (t);
5917
5918 decl_assembler_name (t);
5919
5920 input_location = saved_location;
5921 }
5922 }
5923
5924
5925 /* Free language specific information for every operand and expression
5926 in every node of the call graph. This process operates in three stages:
5927
5928 1- Every callgraph node and varpool node is traversed looking for
5929 decls and types embedded in them. This is a more exhaustive
5930 search than that done by find_referenced_vars, because it will
5931 also collect individual fields, decls embedded in types, etc.
5932
5933 2- All the decls found are sent to free_lang_data_in_decl.
5934
5935 3- All the types found are sent to free_lang_data_in_type.
5936
5937 The ordering between decls and types is important because
5938 free_lang_data_in_decl sets assembler names, which includes
5939 mangling. So types cannot be freed up until assembler names have
5940 been set up. */
5941
5942 static void
5943 free_lang_data_in_cgraph (void)
5944 {
5945 struct cgraph_node *n;
5946 varpool_node *v;
5947 struct free_lang_data_d fld;
5948 tree t;
5949 unsigned i;
5950 alias_pair *p;
5951
5952 /* Initialize sets and arrays to store referenced decls and types. */
5953 fld.pset = new hash_set<tree>;
5954 fld.worklist.create (0);
5955 fld.decls.create (100);
5956 fld.types.create (100);
5957
5958 /* Find decls and types in the body of every function in the callgraph. */
5959 FOR_EACH_FUNCTION (n)
5960 find_decls_types_in_node (n, &fld);
5961
5962 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5963 find_decls_types (p->decl, &fld);
5964
5965 /* Find decls and types in every varpool symbol. */
5966 FOR_EACH_VARIABLE (v)
5967 find_decls_types_in_var (v, &fld);
5968
5969 /* Set the assembler name on every decl found. We need to do this
5970 now because free_lang_data_in_decl will invalidate data needed
5971 for mangling. This breaks mangling on interdependent decls. */
5972 FOR_EACH_VEC_ELT (fld.decls, i, t)
5973 assign_assembler_name_if_neeeded (t);
5974
5975 /* Traverse every decl found freeing its language data. */
5976 FOR_EACH_VEC_ELT (fld.decls, i, t)
5977 free_lang_data_in_decl (t);
5978
5979 /* Traverse every type found freeing its language data. */
5980 FOR_EACH_VEC_ELT (fld.types, i, t)
5981 free_lang_data_in_type (t);
5982 if (flag_checking)
5983 {
5984 FOR_EACH_VEC_ELT (fld.types, i, t)
5985 verify_type (t);
5986 }
5987
5988 delete fld.pset;
5989 fld.worklist.release ();
5990 fld.decls.release ();
5991 fld.types.release ();
5992 }
5993
5994
5995 /* Free resources that are used by FE but are not needed once they are done. */
5996
5997 static unsigned
5998 free_lang_data (void)
5999 {
6000 unsigned i;
6001
6002 /* If we are the LTO frontend we have freed lang-specific data already. */
6003 if (in_lto_p
6004 || (!flag_generate_lto && !flag_generate_offload))
6005 return 0;
6006
6007 /* Allocate and assign alias sets to the standard integer types
6008 while the slots are still in the way the frontends generated them. */
6009 for (i = 0; i < itk_none; ++i)
6010 if (integer_types[i])
6011 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6012
6013 /* Traverse the IL resetting language specific information for
6014 operands, expressions, etc. */
6015 free_lang_data_in_cgraph ();
6016
6017 /* Create gimple variants for common types. */
6018 ptrdiff_type_node = integer_type_node;
6019 fileptr_type_node = ptr_type_node;
6020
6021 /* Reset some langhooks. Do not reset types_compatible_p, it may
6022 still be used indirectly via the get_alias_set langhook. */
6023 lang_hooks.dwarf_name = lhd_dwarf_name;
6024 lang_hooks.decl_printable_name = gimple_decl_printable_name;
6025 lang_hooks.gimplify_expr = lhd_gimplify_expr;
6026
6027 /* We do not want the default decl_assembler_name implementation,
6028 rather if we have fixed everything we want a wrapper around it
6029 asserting that all non-local symbols already got their assembler
6030 name and only produce assembler names for local symbols. Or rather
6031 make sure we never call decl_assembler_name on local symbols and
6032 devise a separate, middle-end private scheme for it. */
6033
6034 /* Reset diagnostic machinery. */
6035 tree_diagnostics_defaults (global_dc);
6036
6037 return 0;
6038 }
6039
6040
6041 namespace {
6042
6043 const pass_data pass_data_ipa_free_lang_data =
6044 {
6045 SIMPLE_IPA_PASS, /* type */
6046 "*free_lang_data", /* name */
6047 OPTGROUP_NONE, /* optinfo_flags */
6048 TV_IPA_FREE_LANG_DATA, /* tv_id */
6049 0, /* properties_required */
6050 0, /* properties_provided */
6051 0, /* properties_destroyed */
6052 0, /* todo_flags_start */
6053 0, /* todo_flags_finish */
6054 };
6055
6056 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6057 {
6058 public:
6059 pass_ipa_free_lang_data (gcc::context *ctxt)
6060 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6061 {}
6062
6063 /* opt_pass methods: */
6064 virtual unsigned int execute (function *) { return free_lang_data (); }
6065
6066 }; // class pass_ipa_free_lang_data
6067
6068 } // anon namespace
6069
6070 simple_ipa_opt_pass *
6071 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6072 {
6073 return new pass_ipa_free_lang_data (ctxt);
6074 }
6075
6076 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
6077 ATTR_NAME. Also used internally by remove_attribute(). */
6078 bool
6079 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
6080 {
6081 size_t ident_len = IDENTIFIER_LENGTH (ident);
6082
6083 if (ident_len == attr_len)
6084 {
6085 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
6086 return true;
6087 }
6088 else if (ident_len == attr_len + 4)
6089 {
6090 /* There is the possibility that ATTR is 'text' and IDENT is
6091 '__text__'. */
6092 const char *p = IDENTIFIER_POINTER (ident);
6093 if (p[0] == '_' && p[1] == '_'
6094 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6095 && strncmp (attr_name, p + 2, attr_len) == 0)
6096 return true;
6097 }
6098
6099 return false;
6100 }
6101
6102 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6103 of ATTR_NAME, and LIST is not NULL_TREE. */
6104 tree
6105 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6106 {
6107 while (list)
6108 {
6109 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6110
6111 if (ident_len == attr_len)
6112 {
6113 if (!strcmp (attr_name,
6114 IDENTIFIER_POINTER (get_attribute_name (list))))
6115 break;
6116 }
6117 /* TODO: If we made sure that attributes were stored in the
6118 canonical form without '__...__' (ie, as in 'text' as opposed
6119 to '__text__') then we could avoid the following case. */
6120 else if (ident_len == attr_len + 4)
6121 {
6122 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6123 if (p[0] == '_' && p[1] == '_'
6124 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6125 && strncmp (attr_name, p + 2, attr_len) == 0)
6126 break;
6127 }
6128 list = TREE_CHAIN (list);
6129 }
6130
6131 return list;
6132 }
6133
6134 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6135 return a pointer to the attribute's list first element if the attribute
6136 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6137 '__text__'). */
6138
6139 tree
6140 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6141 tree list)
6142 {
6143 while (list)
6144 {
6145 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6146
6147 if (attr_len > ident_len)
6148 {
6149 list = TREE_CHAIN (list);
6150 continue;
6151 }
6152
6153 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6154
6155 if (strncmp (attr_name, p, attr_len) == 0)
6156 break;
6157
6158 /* TODO: If we made sure that attributes were stored in the
6159 canonical form without '__...__' (ie, as in 'text' as opposed
6160 to '__text__') then we could avoid the following case. */
6161 if (p[0] == '_' && p[1] == '_' &&
6162 strncmp (attr_name, p + 2, attr_len) == 0)
6163 break;
6164
6165 list = TREE_CHAIN (list);
6166 }
6167
6168 return list;
6169 }
6170
6171
6172 /* A variant of lookup_attribute() that can be used with an identifier
6173 as the first argument, and where the identifier can be either
6174 'text' or '__text__'.
6175
6176 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6177 return a pointer to the attribute's list element if the attribute
6178 is part of the list, or NULL_TREE if not found. If the attribute
6179 appears more than once, this only returns the first occurrence; the
6180 TREE_CHAIN of the return value should be passed back in if further
6181 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6182 can be in the form 'text' or '__text__'. */
6183 static tree
6184 lookup_ident_attribute (tree attr_identifier, tree list)
6185 {
6186 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6187
6188 while (list)
6189 {
6190 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6191 == IDENTIFIER_NODE);
6192
6193 if (cmp_attrib_identifiers (attr_identifier,
6194 get_attribute_name (list)))
6195 /* Found it. */
6196 break;
6197 list = TREE_CHAIN (list);
6198 }
6199
6200 return list;
6201 }
6202
6203 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6204 modified list. */
6205
6206 tree
6207 remove_attribute (const char *attr_name, tree list)
6208 {
6209 tree *p;
6210 size_t attr_len = strlen (attr_name);
6211
6212 gcc_checking_assert (attr_name[0] != '_');
6213
6214 for (p = &list; *p; )
6215 {
6216 tree l = *p;
6217 /* TODO: If we were storing attributes in normalized form, here
6218 we could use a simple strcmp(). */
6219 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6220 *p = TREE_CHAIN (l);
6221 else
6222 p = &TREE_CHAIN (l);
6223 }
6224
6225 return list;
6226 }
6227
6228 /* Return an attribute list that is the union of a1 and a2. */
6229
6230 tree
6231 merge_attributes (tree a1, tree a2)
6232 {
6233 tree attributes;
6234
6235 /* Either one unset? Take the set one. */
6236
6237 if ((attributes = a1) == 0)
6238 attributes = a2;
6239
6240 /* One that completely contains the other? Take it. */
6241
6242 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6243 {
6244 if (attribute_list_contained (a2, a1))
6245 attributes = a2;
6246 else
6247 {
6248 /* Pick the longest list, and hang on the other list. */
6249
6250 if (list_length (a1) < list_length (a2))
6251 attributes = a2, a2 = a1;
6252
6253 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6254 {
6255 tree a;
6256 for (a = lookup_ident_attribute (get_attribute_name (a2),
6257 attributes);
6258 a != NULL_TREE && !attribute_value_equal (a, a2);
6259 a = lookup_ident_attribute (get_attribute_name (a2),
6260 TREE_CHAIN (a)))
6261 ;
6262 if (a == NULL_TREE)
6263 {
6264 a1 = copy_node (a2);
6265 TREE_CHAIN (a1) = attributes;
6266 attributes = a1;
6267 }
6268 }
6269 }
6270 }
6271 return attributes;
6272 }
6273
6274 /* Given types T1 and T2, merge their attributes and return
6275 the result. */
6276
6277 tree
6278 merge_type_attributes (tree t1, tree t2)
6279 {
6280 return merge_attributes (TYPE_ATTRIBUTES (t1),
6281 TYPE_ATTRIBUTES (t2));
6282 }
6283
6284 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6285 the result. */
6286
6287 tree
6288 merge_decl_attributes (tree olddecl, tree newdecl)
6289 {
6290 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6291 DECL_ATTRIBUTES (newdecl));
6292 }
6293
6294 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6295
6296 /* Specialization of merge_decl_attributes for various Windows targets.
6297
6298 This handles the following situation:
6299
6300 __declspec (dllimport) int foo;
6301 int foo;
6302
6303 The second instance of `foo' nullifies the dllimport. */
6304
6305 tree
6306 merge_dllimport_decl_attributes (tree old, tree new_tree)
6307 {
6308 tree a;
6309 int delete_dllimport_p = 1;
6310
6311 /* What we need to do here is remove from `old' dllimport if it doesn't
6312 appear in `new'. dllimport behaves like extern: if a declaration is
6313 marked dllimport and a definition appears later, then the object
6314 is not dllimport'd. We also remove a `new' dllimport if the old list
6315 contains dllexport: dllexport always overrides dllimport, regardless
6316 of the order of declaration. */
6317 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6318 delete_dllimport_p = 0;
6319 else if (DECL_DLLIMPORT_P (new_tree)
6320 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6321 {
6322 DECL_DLLIMPORT_P (new_tree) = 0;
6323 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6324 "dllimport ignored", new_tree);
6325 }
6326 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6327 {
6328 /* Warn about overriding a symbol that has already been used, e.g.:
6329 extern int __attribute__ ((dllimport)) foo;
6330 int* bar () {return &foo;}
6331 int foo;
6332 */
6333 if (TREE_USED (old))
6334 {
6335 warning (0, "%q+D redeclared without dllimport attribute "
6336 "after being referenced with dll linkage", new_tree);
6337 /* If we have used a variable's address with dllimport linkage,
6338 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6339 decl may already have had TREE_CONSTANT computed.
6340 We still remove the attribute so that assembler code refers
6341 to '&foo rather than '_imp__foo'. */
6342 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6343 DECL_DLLIMPORT_P (new_tree) = 1;
6344 }
6345
6346 /* Let an inline definition silently override the external reference,
6347 but otherwise warn about attribute inconsistency. */
6348 else if (TREE_CODE (new_tree) == VAR_DECL
6349 || !DECL_DECLARED_INLINE_P (new_tree))
6350 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6351 "previous dllimport ignored", new_tree);
6352 }
6353 else
6354 delete_dllimport_p = 0;
6355
6356 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6357
6358 if (delete_dllimport_p)
6359 a = remove_attribute ("dllimport", a);
6360
6361 return a;
6362 }
6363
6364 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6365 struct attribute_spec.handler. */
6366
6367 tree
6368 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6369 bool *no_add_attrs)
6370 {
6371 tree node = *pnode;
6372 bool is_dllimport;
6373
6374 /* These attributes may apply to structure and union types being created,
6375 but otherwise should pass to the declaration involved. */
6376 if (!DECL_P (node))
6377 {
6378 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6379 | (int) ATTR_FLAG_ARRAY_NEXT))
6380 {
6381 *no_add_attrs = true;
6382 return tree_cons (name, args, NULL_TREE);
6383 }
6384 if (TREE_CODE (node) == RECORD_TYPE
6385 || TREE_CODE (node) == UNION_TYPE)
6386 {
6387 node = TYPE_NAME (node);
6388 if (!node)
6389 return NULL_TREE;
6390 }
6391 else
6392 {
6393 warning (OPT_Wattributes, "%qE attribute ignored",
6394 name);
6395 *no_add_attrs = true;
6396 return NULL_TREE;
6397 }
6398 }
6399
6400 if (TREE_CODE (node) != FUNCTION_DECL
6401 && TREE_CODE (node) != VAR_DECL
6402 && TREE_CODE (node) != TYPE_DECL)
6403 {
6404 *no_add_attrs = true;
6405 warning (OPT_Wattributes, "%qE attribute ignored",
6406 name);
6407 return NULL_TREE;
6408 }
6409
6410 if (TREE_CODE (node) == TYPE_DECL
6411 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6412 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6413 {
6414 *no_add_attrs = true;
6415 warning (OPT_Wattributes, "%qE attribute ignored",
6416 name);
6417 return NULL_TREE;
6418 }
6419
6420 is_dllimport = is_attribute_p ("dllimport", name);
6421
6422 /* Report error on dllimport ambiguities seen now before they cause
6423 any damage. */
6424 if (is_dllimport)
6425 {
6426 /* Honor any target-specific overrides. */
6427 if (!targetm.valid_dllimport_attribute_p (node))
6428 *no_add_attrs = true;
6429
6430 else if (TREE_CODE (node) == FUNCTION_DECL
6431 && DECL_DECLARED_INLINE_P (node))
6432 {
6433 warning (OPT_Wattributes, "inline function %q+D declared as "
6434 " dllimport: attribute ignored", node);
6435 *no_add_attrs = true;
6436 }
6437 /* Like MS, treat definition of dllimported variables and
6438 non-inlined functions on declaration as syntax errors. */
6439 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6440 {
6441 error ("function %q+D definition is marked dllimport", node);
6442 *no_add_attrs = true;
6443 }
6444
6445 else if (TREE_CODE (node) == VAR_DECL)
6446 {
6447 if (DECL_INITIAL (node))
6448 {
6449 error ("variable %q+D definition is marked dllimport",
6450 node);
6451 *no_add_attrs = true;
6452 }
6453
6454 /* `extern' needn't be specified with dllimport.
6455 Specify `extern' now and hope for the best. Sigh. */
6456 DECL_EXTERNAL (node) = 1;
6457 /* Also, implicitly give dllimport'd variables declared within
6458 a function global scope, unless declared static. */
6459 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6460 TREE_PUBLIC (node) = 1;
6461 }
6462
6463 if (*no_add_attrs == false)
6464 DECL_DLLIMPORT_P (node) = 1;
6465 }
6466 else if (TREE_CODE (node) == FUNCTION_DECL
6467 && DECL_DECLARED_INLINE_P (node)
6468 && flag_keep_inline_dllexport)
6469 /* An exported function, even if inline, must be emitted. */
6470 DECL_EXTERNAL (node) = 0;
6471
6472 /* Report error if symbol is not accessible at global scope. */
6473 if (!TREE_PUBLIC (node)
6474 && (TREE_CODE (node) == VAR_DECL
6475 || TREE_CODE (node) == FUNCTION_DECL))
6476 {
6477 error ("external linkage required for symbol %q+D because of "
6478 "%qE attribute", node, name);
6479 *no_add_attrs = true;
6480 }
6481
6482 /* A dllexport'd entity must have default visibility so that other
6483 program units (shared libraries or the main executable) can see
6484 it. A dllimport'd entity must have default visibility so that
6485 the linker knows that undefined references within this program
6486 unit can be resolved by the dynamic linker. */
6487 if (!*no_add_attrs)
6488 {
6489 if (DECL_VISIBILITY_SPECIFIED (node)
6490 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6491 error ("%qE implies default visibility, but %qD has already "
6492 "been declared with a different visibility",
6493 name, node);
6494 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6495 DECL_VISIBILITY_SPECIFIED (node) = 1;
6496 }
6497
6498 return NULL_TREE;
6499 }
6500
6501 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6502 \f
6503 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6504 of the various TYPE_QUAL values. */
6505
6506 static void
6507 set_type_quals (tree type, int type_quals)
6508 {
6509 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6510 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6511 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6512 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6513 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6514 }
6515
6516 /* Returns true iff unqualified CAND and BASE are equivalent. */
6517
6518 bool
6519 check_base_type (const_tree cand, const_tree base)
6520 {
6521 return (TYPE_NAME (cand) == TYPE_NAME (base)
6522 /* Apparently this is needed for Objective-C. */
6523 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6524 /* Check alignment. */
6525 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6526 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6527 TYPE_ATTRIBUTES (base)));
6528 }
6529
6530 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6531
6532 bool
6533 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6534 {
6535 return (TYPE_QUALS (cand) == type_quals
6536 && check_base_type (cand, base));
6537 }
6538
6539 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6540
6541 static bool
6542 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6543 {
6544 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6545 && TYPE_NAME (cand) == TYPE_NAME (base)
6546 /* Apparently this is needed for Objective-C. */
6547 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6548 /* Check alignment. */
6549 && TYPE_ALIGN (cand) == align
6550 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6551 TYPE_ATTRIBUTES (base)));
6552 }
6553
6554 /* This function checks to see if TYPE matches the size one of the built-in
6555 atomic types, and returns that core atomic type. */
6556
6557 static tree
6558 find_atomic_core_type (tree type)
6559 {
6560 tree base_atomic_type;
6561
6562 /* Only handle complete types. */
6563 if (TYPE_SIZE (type) == NULL_TREE)
6564 return NULL_TREE;
6565
6566 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6567 switch (type_size)
6568 {
6569 case 8:
6570 base_atomic_type = atomicQI_type_node;
6571 break;
6572
6573 case 16:
6574 base_atomic_type = atomicHI_type_node;
6575 break;
6576
6577 case 32:
6578 base_atomic_type = atomicSI_type_node;
6579 break;
6580
6581 case 64:
6582 base_atomic_type = atomicDI_type_node;
6583 break;
6584
6585 case 128:
6586 base_atomic_type = atomicTI_type_node;
6587 break;
6588
6589 default:
6590 base_atomic_type = NULL_TREE;
6591 }
6592
6593 return base_atomic_type;
6594 }
6595
6596 /* Return a version of the TYPE, qualified as indicated by the
6597 TYPE_QUALS, if one exists. If no qualified version exists yet,
6598 return NULL_TREE. */
6599
6600 tree
6601 get_qualified_type (tree type, int type_quals)
6602 {
6603 tree t;
6604
6605 if (TYPE_QUALS (type) == type_quals)
6606 return type;
6607
6608 /* Search the chain of variants to see if there is already one there just
6609 like the one we need to have. If so, use that existing one. We must
6610 preserve the TYPE_NAME, since there is code that depends on this. */
6611 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6612 if (check_qualified_type (t, type, type_quals))
6613 return t;
6614
6615 return NULL_TREE;
6616 }
6617
6618 /* Like get_qualified_type, but creates the type if it does not
6619 exist. This function never returns NULL_TREE. */
6620
6621 tree
6622 build_qualified_type (tree type, int type_quals)
6623 {
6624 tree t;
6625
6626 /* See if we already have the appropriate qualified variant. */
6627 t = get_qualified_type (type, type_quals);
6628
6629 /* If not, build it. */
6630 if (!t)
6631 {
6632 t = build_variant_type_copy (type);
6633 set_type_quals (t, type_quals);
6634
6635 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6636 {
6637 /* See if this object can map to a basic atomic type. */
6638 tree atomic_type = find_atomic_core_type (type);
6639 if (atomic_type)
6640 {
6641 /* Ensure the alignment of this type is compatible with
6642 the required alignment of the atomic type. */
6643 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6644 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6645 }
6646 }
6647
6648 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6649 /* Propagate structural equality. */
6650 SET_TYPE_STRUCTURAL_EQUALITY (t);
6651 else if (TYPE_CANONICAL (type) != type)
6652 /* Build the underlying canonical type, since it is different
6653 from TYPE. */
6654 {
6655 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6656 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6657 }
6658 else
6659 /* T is its own canonical type. */
6660 TYPE_CANONICAL (t) = t;
6661
6662 }
6663
6664 return t;
6665 }
6666
6667 /* Create a variant of type T with alignment ALIGN. */
6668
6669 tree
6670 build_aligned_type (tree type, unsigned int align)
6671 {
6672 tree t;
6673
6674 if (TYPE_PACKED (type)
6675 || TYPE_ALIGN (type) == align)
6676 return type;
6677
6678 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6679 if (check_aligned_type (t, type, align))
6680 return t;
6681
6682 t = build_variant_type_copy (type);
6683 SET_TYPE_ALIGN (t, align);
6684
6685 return t;
6686 }
6687
6688 /* Create a new distinct copy of TYPE. The new type is made its own
6689 MAIN_VARIANT. If TYPE requires structural equality checks, the
6690 resulting type requires structural equality checks; otherwise, its
6691 TYPE_CANONICAL points to itself. */
6692
6693 tree
6694 build_distinct_type_copy (tree type)
6695 {
6696 tree t = copy_node (type);
6697
6698 TYPE_POINTER_TO (t) = 0;
6699 TYPE_REFERENCE_TO (t) = 0;
6700
6701 /* Set the canonical type either to a new equivalence class, or
6702 propagate the need for structural equality checks. */
6703 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6704 SET_TYPE_STRUCTURAL_EQUALITY (t);
6705 else
6706 TYPE_CANONICAL (t) = t;
6707
6708 /* Make it its own variant. */
6709 TYPE_MAIN_VARIANT (t) = t;
6710 TYPE_NEXT_VARIANT (t) = 0;
6711
6712 /* We do not record methods in type copies nor variants
6713 so we do not need to keep them up to date when new method
6714 is inserted. */
6715 if (RECORD_OR_UNION_TYPE_P (t))
6716 TYPE_METHODS (t) = NULL_TREE;
6717
6718 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6719 whose TREE_TYPE is not t. This can also happen in the Ada
6720 frontend when using subtypes. */
6721
6722 return t;
6723 }
6724
6725 /* Create a new variant of TYPE, equivalent but distinct. This is so
6726 the caller can modify it. TYPE_CANONICAL for the return type will
6727 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6728 are considered equal by the language itself (or that both types
6729 require structural equality checks). */
6730
6731 tree
6732 build_variant_type_copy (tree type)
6733 {
6734 tree t, m = TYPE_MAIN_VARIANT (type);
6735
6736 t = build_distinct_type_copy (type);
6737
6738 /* Since we're building a variant, assume that it is a non-semantic
6739 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6740 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6741 /* Type variants have no alias set defined. */
6742 TYPE_ALIAS_SET (t) = -1;
6743
6744 /* Add the new type to the chain of variants of TYPE. */
6745 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6746 TYPE_NEXT_VARIANT (m) = t;
6747 TYPE_MAIN_VARIANT (t) = m;
6748
6749 return t;
6750 }
6751 \f
6752 /* Return true if the from tree in both tree maps are equal. */
6753
6754 int
6755 tree_map_base_eq (const void *va, const void *vb)
6756 {
6757 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6758 *const b = (const struct tree_map_base *) vb;
6759 return (a->from == b->from);
6760 }
6761
6762 /* Hash a from tree in a tree_base_map. */
6763
6764 unsigned int
6765 tree_map_base_hash (const void *item)
6766 {
6767 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6768 }
6769
6770 /* Return true if this tree map structure is marked for garbage collection
6771 purposes. We simply return true if the from tree is marked, so that this
6772 structure goes away when the from tree goes away. */
6773
6774 int
6775 tree_map_base_marked_p (const void *p)
6776 {
6777 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6778 }
6779
6780 /* Hash a from tree in a tree_map. */
6781
6782 unsigned int
6783 tree_map_hash (const void *item)
6784 {
6785 return (((const struct tree_map *) item)->hash);
6786 }
6787
6788 /* Hash a from tree in a tree_decl_map. */
6789
6790 unsigned int
6791 tree_decl_map_hash (const void *item)
6792 {
6793 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6794 }
6795
6796 /* Return the initialization priority for DECL. */
6797
6798 priority_type
6799 decl_init_priority_lookup (tree decl)
6800 {
6801 symtab_node *snode = symtab_node::get (decl);
6802
6803 if (!snode)
6804 return DEFAULT_INIT_PRIORITY;
6805 return
6806 snode->get_init_priority ();
6807 }
6808
6809 /* Return the finalization priority for DECL. */
6810
6811 priority_type
6812 decl_fini_priority_lookup (tree decl)
6813 {
6814 cgraph_node *node = cgraph_node::get (decl);
6815
6816 if (!node)
6817 return DEFAULT_INIT_PRIORITY;
6818 return
6819 node->get_fini_priority ();
6820 }
6821
6822 /* Set the initialization priority for DECL to PRIORITY. */
6823
6824 void
6825 decl_init_priority_insert (tree decl, priority_type priority)
6826 {
6827 struct symtab_node *snode;
6828
6829 if (priority == DEFAULT_INIT_PRIORITY)
6830 {
6831 snode = symtab_node::get (decl);
6832 if (!snode)
6833 return;
6834 }
6835 else if (TREE_CODE (decl) == VAR_DECL)
6836 snode = varpool_node::get_create (decl);
6837 else
6838 snode = cgraph_node::get_create (decl);
6839 snode->set_init_priority (priority);
6840 }
6841
6842 /* Set the finalization priority for DECL to PRIORITY. */
6843
6844 void
6845 decl_fini_priority_insert (tree decl, priority_type priority)
6846 {
6847 struct cgraph_node *node;
6848
6849 if (priority == DEFAULT_INIT_PRIORITY)
6850 {
6851 node = cgraph_node::get (decl);
6852 if (!node)
6853 return;
6854 }
6855 else
6856 node = cgraph_node::get_create (decl);
6857 node->set_fini_priority (priority);
6858 }
6859
6860 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6861
6862 static void
6863 print_debug_expr_statistics (void)
6864 {
6865 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6866 (long) debug_expr_for_decl->size (),
6867 (long) debug_expr_for_decl->elements (),
6868 debug_expr_for_decl->collisions ());
6869 }
6870
6871 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6872
6873 static void
6874 print_value_expr_statistics (void)
6875 {
6876 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6877 (long) value_expr_for_decl->size (),
6878 (long) value_expr_for_decl->elements (),
6879 value_expr_for_decl->collisions ());
6880 }
6881
6882 /* Lookup a debug expression for FROM, and return it if we find one. */
6883
6884 tree
6885 decl_debug_expr_lookup (tree from)
6886 {
6887 struct tree_decl_map *h, in;
6888 in.base.from = from;
6889
6890 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6891 if (h)
6892 return h->to;
6893 return NULL_TREE;
6894 }
6895
6896 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6897
6898 void
6899 decl_debug_expr_insert (tree from, tree to)
6900 {
6901 struct tree_decl_map *h;
6902
6903 h = ggc_alloc<tree_decl_map> ();
6904 h->base.from = from;
6905 h->to = to;
6906 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6907 }
6908
6909 /* Lookup a value expression for FROM, and return it if we find one. */
6910
6911 tree
6912 decl_value_expr_lookup (tree from)
6913 {
6914 struct tree_decl_map *h, in;
6915 in.base.from = from;
6916
6917 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6918 if (h)
6919 return h->to;
6920 return NULL_TREE;
6921 }
6922
6923 /* Insert a mapping FROM->TO in the value expression hashtable. */
6924
6925 void
6926 decl_value_expr_insert (tree from, tree to)
6927 {
6928 struct tree_decl_map *h;
6929
6930 h = ggc_alloc<tree_decl_map> ();
6931 h->base.from = from;
6932 h->to = to;
6933 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6934 }
6935
6936 /* Lookup a vector of debug arguments for FROM, and return it if we
6937 find one. */
6938
6939 vec<tree, va_gc> **
6940 decl_debug_args_lookup (tree from)
6941 {
6942 struct tree_vec_map *h, in;
6943
6944 if (!DECL_HAS_DEBUG_ARGS_P (from))
6945 return NULL;
6946 gcc_checking_assert (debug_args_for_decl != NULL);
6947 in.base.from = from;
6948 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6949 if (h)
6950 return &h->to;
6951 return NULL;
6952 }
6953
6954 /* Insert a mapping FROM->empty vector of debug arguments in the value
6955 expression hashtable. */
6956
6957 vec<tree, va_gc> **
6958 decl_debug_args_insert (tree from)
6959 {
6960 struct tree_vec_map *h;
6961 tree_vec_map **loc;
6962
6963 if (DECL_HAS_DEBUG_ARGS_P (from))
6964 return decl_debug_args_lookup (from);
6965 if (debug_args_for_decl == NULL)
6966 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6967 h = ggc_alloc<tree_vec_map> ();
6968 h->base.from = from;
6969 h->to = NULL;
6970 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6971 *loc = h;
6972 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6973 return &h->to;
6974 }
6975
6976 /* Hashing of types so that we don't make duplicates.
6977 The entry point is `type_hash_canon'. */
6978
6979 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6980 with types in the TREE_VALUE slots), by adding the hash codes
6981 of the individual types. */
6982
6983 static void
6984 type_hash_list (const_tree list, inchash::hash &hstate)
6985 {
6986 const_tree tail;
6987
6988 for (tail = list; tail; tail = TREE_CHAIN (tail))
6989 if (TREE_VALUE (tail) != error_mark_node)
6990 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6991 }
6992
6993 /* These are the Hashtable callback functions. */
6994
6995 /* Returns true iff the types are equivalent. */
6996
6997 bool
6998 type_cache_hasher::equal (type_hash *a, type_hash *b)
6999 {
7000 /* First test the things that are the same for all types. */
7001 if (a->hash != b->hash
7002 || TREE_CODE (a->type) != TREE_CODE (b->type)
7003 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
7004 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
7005 TYPE_ATTRIBUTES (b->type))
7006 || (TREE_CODE (a->type) != COMPLEX_TYPE
7007 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
7008 return 0;
7009
7010 /* Be careful about comparing arrays before and after the element type
7011 has been completed; don't compare TYPE_ALIGN unless both types are
7012 complete. */
7013 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
7014 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
7015 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
7016 return 0;
7017
7018 switch (TREE_CODE (a->type))
7019 {
7020 case VOID_TYPE:
7021 case COMPLEX_TYPE:
7022 case POINTER_TYPE:
7023 case REFERENCE_TYPE:
7024 case NULLPTR_TYPE:
7025 return 1;
7026
7027 case VECTOR_TYPE:
7028 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
7029
7030 case ENUMERAL_TYPE:
7031 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
7032 && !(TYPE_VALUES (a->type)
7033 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
7034 && TYPE_VALUES (b->type)
7035 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7036 && type_list_equal (TYPE_VALUES (a->type),
7037 TYPE_VALUES (b->type))))
7038 return 0;
7039
7040 /* ... fall through ... */
7041
7042 case INTEGER_TYPE:
7043 case REAL_TYPE:
7044 case BOOLEAN_TYPE:
7045 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7046 return false;
7047 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7048 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7049 TYPE_MAX_VALUE (b->type)))
7050 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7051 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7052 TYPE_MIN_VALUE (b->type))));
7053
7054 case FIXED_POINT_TYPE:
7055 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7056
7057 case OFFSET_TYPE:
7058 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7059
7060 case METHOD_TYPE:
7061 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7062 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7063 || (TYPE_ARG_TYPES (a->type)
7064 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7065 && TYPE_ARG_TYPES (b->type)
7066 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7067 && type_list_equal (TYPE_ARG_TYPES (a->type),
7068 TYPE_ARG_TYPES (b->type)))))
7069 break;
7070 return 0;
7071 case ARRAY_TYPE:
7072 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
7073
7074 case RECORD_TYPE:
7075 case UNION_TYPE:
7076 case QUAL_UNION_TYPE:
7077 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7078 || (TYPE_FIELDS (a->type)
7079 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7080 && TYPE_FIELDS (b->type)
7081 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7082 && type_list_equal (TYPE_FIELDS (a->type),
7083 TYPE_FIELDS (b->type))));
7084
7085 case FUNCTION_TYPE:
7086 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7087 || (TYPE_ARG_TYPES (a->type)
7088 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7089 && TYPE_ARG_TYPES (b->type)
7090 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7091 && type_list_equal (TYPE_ARG_TYPES (a->type),
7092 TYPE_ARG_TYPES (b->type))))
7093 break;
7094 return 0;
7095
7096 default:
7097 return 0;
7098 }
7099
7100 if (lang_hooks.types.type_hash_eq != NULL)
7101 return lang_hooks.types.type_hash_eq (a->type, b->type);
7102
7103 return 1;
7104 }
7105
7106 /* Given TYPE, and HASHCODE its hash code, return the canonical
7107 object for an identical type if one already exists.
7108 Otherwise, return TYPE, and record it as the canonical object.
7109
7110 To use this function, first create a type of the sort you want.
7111 Then compute its hash code from the fields of the type that
7112 make it different from other similar types.
7113 Then call this function and use the value. */
7114
7115 tree
7116 type_hash_canon (unsigned int hashcode, tree type)
7117 {
7118 type_hash in;
7119 type_hash **loc;
7120
7121 /* The hash table only contains main variants, so ensure that's what we're
7122 being passed. */
7123 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7124
7125 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7126 must call that routine before comparing TYPE_ALIGNs. */
7127 layout_type (type);
7128
7129 in.hash = hashcode;
7130 in.type = type;
7131
7132 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7133 if (*loc)
7134 {
7135 tree t1 = ((type_hash *) *loc)->type;
7136 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7137 free_node (type);
7138 return t1;
7139 }
7140 else
7141 {
7142 struct type_hash *h;
7143
7144 h = ggc_alloc<type_hash> ();
7145 h->hash = hashcode;
7146 h->type = type;
7147 *loc = h;
7148
7149 return type;
7150 }
7151 }
7152
7153 static void
7154 print_type_hash_statistics (void)
7155 {
7156 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7157 (long) type_hash_table->size (),
7158 (long) type_hash_table->elements (),
7159 type_hash_table->collisions ());
7160 }
7161
7162 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7163 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7164 by adding the hash codes of the individual attributes. */
7165
7166 static void
7167 attribute_hash_list (const_tree list, inchash::hash &hstate)
7168 {
7169 const_tree tail;
7170
7171 for (tail = list; tail; tail = TREE_CHAIN (tail))
7172 /* ??? Do we want to add in TREE_VALUE too? */
7173 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7174 }
7175
7176 /* Given two lists of attributes, return true if list l2 is
7177 equivalent to l1. */
7178
7179 int
7180 attribute_list_equal (const_tree l1, const_tree l2)
7181 {
7182 if (l1 == l2)
7183 return 1;
7184
7185 return attribute_list_contained (l1, l2)
7186 && attribute_list_contained (l2, l1);
7187 }
7188
7189 /* Given two lists of attributes, return true if list L2 is
7190 completely contained within L1. */
7191 /* ??? This would be faster if attribute names were stored in a canonicalized
7192 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7193 must be used to show these elements are equivalent (which they are). */
7194 /* ??? It's not clear that attributes with arguments will always be handled
7195 correctly. */
7196
7197 int
7198 attribute_list_contained (const_tree l1, const_tree l2)
7199 {
7200 const_tree t1, t2;
7201
7202 /* First check the obvious, maybe the lists are identical. */
7203 if (l1 == l2)
7204 return 1;
7205
7206 /* Maybe the lists are similar. */
7207 for (t1 = l1, t2 = l2;
7208 t1 != 0 && t2 != 0
7209 && get_attribute_name (t1) == get_attribute_name (t2)
7210 && TREE_VALUE (t1) == TREE_VALUE (t2);
7211 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7212 ;
7213
7214 /* Maybe the lists are equal. */
7215 if (t1 == 0 && t2 == 0)
7216 return 1;
7217
7218 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7219 {
7220 const_tree attr;
7221 /* This CONST_CAST is okay because lookup_attribute does not
7222 modify its argument and the return value is assigned to a
7223 const_tree. */
7224 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7225 CONST_CAST_TREE (l1));
7226 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7227 attr = lookup_ident_attribute (get_attribute_name (t2),
7228 TREE_CHAIN (attr)))
7229 ;
7230
7231 if (attr == NULL_TREE)
7232 return 0;
7233 }
7234
7235 return 1;
7236 }
7237
7238 /* Given two lists of types
7239 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7240 return 1 if the lists contain the same types in the same order.
7241 Also, the TREE_PURPOSEs must match. */
7242
7243 int
7244 type_list_equal (const_tree l1, const_tree l2)
7245 {
7246 const_tree t1, t2;
7247
7248 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7249 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7250 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7251 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7252 && (TREE_TYPE (TREE_PURPOSE (t1))
7253 == TREE_TYPE (TREE_PURPOSE (t2))))))
7254 return 0;
7255
7256 return t1 == t2;
7257 }
7258
7259 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7260 given by TYPE. If the argument list accepts variable arguments,
7261 then this function counts only the ordinary arguments. */
7262
7263 int
7264 type_num_arguments (const_tree type)
7265 {
7266 int i = 0;
7267 tree t;
7268
7269 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7270 /* If the function does not take a variable number of arguments,
7271 the last element in the list will have type `void'. */
7272 if (VOID_TYPE_P (TREE_VALUE (t)))
7273 break;
7274 else
7275 ++i;
7276
7277 return i;
7278 }
7279
7280 /* Nonzero if integer constants T1 and T2
7281 represent the same constant value. */
7282
7283 int
7284 tree_int_cst_equal (const_tree t1, const_tree t2)
7285 {
7286 if (t1 == t2)
7287 return 1;
7288
7289 if (t1 == 0 || t2 == 0)
7290 return 0;
7291
7292 if (TREE_CODE (t1) == INTEGER_CST
7293 && TREE_CODE (t2) == INTEGER_CST
7294 && wi::to_widest (t1) == wi::to_widest (t2))
7295 return 1;
7296
7297 return 0;
7298 }
7299
7300 /* Return true if T is an INTEGER_CST whose numerical value (extended
7301 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7302
7303 bool
7304 tree_fits_shwi_p (const_tree t)
7305 {
7306 return (t != NULL_TREE
7307 && TREE_CODE (t) == INTEGER_CST
7308 && wi::fits_shwi_p (wi::to_widest (t)));
7309 }
7310
7311 /* Return true if T is an INTEGER_CST whose numerical value (extended
7312 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7313
7314 bool
7315 tree_fits_uhwi_p (const_tree t)
7316 {
7317 return (t != NULL_TREE
7318 && TREE_CODE (t) == INTEGER_CST
7319 && wi::fits_uhwi_p (wi::to_widest (t)));
7320 }
7321
7322 /* T is an INTEGER_CST whose numerical value (extended according to
7323 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7324 HOST_WIDE_INT. */
7325
7326 HOST_WIDE_INT
7327 tree_to_shwi (const_tree t)
7328 {
7329 gcc_assert (tree_fits_shwi_p (t));
7330 return TREE_INT_CST_LOW (t);
7331 }
7332
7333 /* T is an INTEGER_CST whose numerical value (extended according to
7334 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7335 HOST_WIDE_INT. */
7336
7337 unsigned HOST_WIDE_INT
7338 tree_to_uhwi (const_tree t)
7339 {
7340 gcc_assert (tree_fits_uhwi_p (t));
7341 return TREE_INT_CST_LOW (t);
7342 }
7343
7344 /* Return the most significant (sign) bit of T. */
7345
7346 int
7347 tree_int_cst_sign_bit (const_tree t)
7348 {
7349 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7350
7351 return wi::extract_uhwi (t, bitno, 1);
7352 }
7353
7354 /* Return an indication of the sign of the integer constant T.
7355 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7356 Note that -1 will never be returned if T's type is unsigned. */
7357
7358 int
7359 tree_int_cst_sgn (const_tree t)
7360 {
7361 if (wi::eq_p (t, 0))
7362 return 0;
7363 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7364 return 1;
7365 else if (wi::neg_p (t))
7366 return -1;
7367 else
7368 return 1;
7369 }
7370
7371 /* Return the minimum number of bits needed to represent VALUE in a
7372 signed or unsigned type, UNSIGNEDP says which. */
7373
7374 unsigned int
7375 tree_int_cst_min_precision (tree value, signop sgn)
7376 {
7377 /* If the value is negative, compute its negative minus 1. The latter
7378 adjustment is because the absolute value of the largest negative value
7379 is one larger than the largest positive value. This is equivalent to
7380 a bit-wise negation, so use that operation instead. */
7381
7382 if (tree_int_cst_sgn (value) < 0)
7383 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7384
7385 /* Return the number of bits needed, taking into account the fact
7386 that we need one more bit for a signed than unsigned type.
7387 If value is 0 or -1, the minimum precision is 1 no matter
7388 whether unsignedp is true or false. */
7389
7390 if (integer_zerop (value))
7391 return 1;
7392 else
7393 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7394 }
7395
7396 /* Return truthvalue of whether T1 is the same tree structure as T2.
7397 Return 1 if they are the same.
7398 Return 0 if they are understandably different.
7399 Return -1 if either contains tree structure not understood by
7400 this function. */
7401
7402 int
7403 simple_cst_equal (const_tree t1, const_tree t2)
7404 {
7405 enum tree_code code1, code2;
7406 int cmp;
7407 int i;
7408
7409 if (t1 == t2)
7410 return 1;
7411 if (t1 == 0 || t2 == 0)
7412 return 0;
7413
7414 code1 = TREE_CODE (t1);
7415 code2 = TREE_CODE (t2);
7416
7417 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7418 {
7419 if (CONVERT_EXPR_CODE_P (code2)
7420 || code2 == NON_LVALUE_EXPR)
7421 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7422 else
7423 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7424 }
7425
7426 else if (CONVERT_EXPR_CODE_P (code2)
7427 || code2 == NON_LVALUE_EXPR)
7428 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7429
7430 if (code1 != code2)
7431 return 0;
7432
7433 switch (code1)
7434 {
7435 case INTEGER_CST:
7436 return wi::to_widest (t1) == wi::to_widest (t2);
7437
7438 case REAL_CST:
7439 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7440
7441 case FIXED_CST:
7442 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7443
7444 case STRING_CST:
7445 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7446 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7447 TREE_STRING_LENGTH (t1)));
7448
7449 case CONSTRUCTOR:
7450 {
7451 unsigned HOST_WIDE_INT idx;
7452 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7453 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7454
7455 if (vec_safe_length (v1) != vec_safe_length (v2))
7456 return false;
7457
7458 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7459 /* ??? Should we handle also fields here? */
7460 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7461 return false;
7462 return true;
7463 }
7464
7465 case SAVE_EXPR:
7466 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7467
7468 case CALL_EXPR:
7469 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7470 if (cmp <= 0)
7471 return cmp;
7472 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7473 return 0;
7474 {
7475 const_tree arg1, arg2;
7476 const_call_expr_arg_iterator iter1, iter2;
7477 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7478 arg2 = first_const_call_expr_arg (t2, &iter2);
7479 arg1 && arg2;
7480 arg1 = next_const_call_expr_arg (&iter1),
7481 arg2 = next_const_call_expr_arg (&iter2))
7482 {
7483 cmp = simple_cst_equal (arg1, arg2);
7484 if (cmp <= 0)
7485 return cmp;
7486 }
7487 return arg1 == arg2;
7488 }
7489
7490 case TARGET_EXPR:
7491 /* Special case: if either target is an unallocated VAR_DECL,
7492 it means that it's going to be unified with whatever the
7493 TARGET_EXPR is really supposed to initialize, so treat it
7494 as being equivalent to anything. */
7495 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7496 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7497 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7498 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7499 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7500 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7501 cmp = 1;
7502 else
7503 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7504
7505 if (cmp <= 0)
7506 return cmp;
7507
7508 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7509
7510 case WITH_CLEANUP_EXPR:
7511 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7512 if (cmp <= 0)
7513 return cmp;
7514
7515 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7516
7517 case COMPONENT_REF:
7518 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7519 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7520
7521 return 0;
7522
7523 case VAR_DECL:
7524 case PARM_DECL:
7525 case CONST_DECL:
7526 case FUNCTION_DECL:
7527 return 0;
7528
7529 default:
7530 break;
7531 }
7532
7533 /* This general rule works for most tree codes. All exceptions should be
7534 handled above. If this is a language-specific tree code, we can't
7535 trust what might be in the operand, so say we don't know
7536 the situation. */
7537 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7538 return -1;
7539
7540 switch (TREE_CODE_CLASS (code1))
7541 {
7542 case tcc_unary:
7543 case tcc_binary:
7544 case tcc_comparison:
7545 case tcc_expression:
7546 case tcc_reference:
7547 case tcc_statement:
7548 cmp = 1;
7549 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7550 {
7551 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7552 if (cmp <= 0)
7553 return cmp;
7554 }
7555
7556 return cmp;
7557
7558 default:
7559 return -1;
7560 }
7561 }
7562
7563 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7564 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7565 than U, respectively. */
7566
7567 int
7568 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7569 {
7570 if (tree_int_cst_sgn (t) < 0)
7571 return -1;
7572 else if (!tree_fits_uhwi_p (t))
7573 return 1;
7574 else if (TREE_INT_CST_LOW (t) == u)
7575 return 0;
7576 else if (TREE_INT_CST_LOW (t) < u)
7577 return -1;
7578 else
7579 return 1;
7580 }
7581
7582 /* Return true if SIZE represents a constant size that is in bounds of
7583 what the middle-end and the backend accepts (covering not more than
7584 half of the address-space). */
7585
7586 bool
7587 valid_constant_size_p (const_tree size)
7588 {
7589 if (! tree_fits_uhwi_p (size)
7590 || TREE_OVERFLOW (size)
7591 || tree_int_cst_sign_bit (size) != 0)
7592 return false;
7593 return true;
7594 }
7595
7596 /* Return the precision of the type, or for a complex or vector type the
7597 precision of the type of its elements. */
7598
7599 unsigned int
7600 element_precision (const_tree type)
7601 {
7602 if (!TYPE_P (type))
7603 type = TREE_TYPE (type);
7604 enum tree_code code = TREE_CODE (type);
7605 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7606 type = TREE_TYPE (type);
7607
7608 return TYPE_PRECISION (type);
7609 }
7610
7611 /* Return true if CODE represents an associative tree code. Otherwise
7612 return false. */
7613 bool
7614 associative_tree_code (enum tree_code code)
7615 {
7616 switch (code)
7617 {
7618 case BIT_IOR_EXPR:
7619 case BIT_AND_EXPR:
7620 case BIT_XOR_EXPR:
7621 case PLUS_EXPR:
7622 case MULT_EXPR:
7623 case MIN_EXPR:
7624 case MAX_EXPR:
7625 return true;
7626
7627 default:
7628 break;
7629 }
7630 return false;
7631 }
7632
7633 /* Return true if CODE represents a commutative tree code. Otherwise
7634 return false. */
7635 bool
7636 commutative_tree_code (enum tree_code code)
7637 {
7638 switch (code)
7639 {
7640 case PLUS_EXPR:
7641 case MULT_EXPR:
7642 case MULT_HIGHPART_EXPR:
7643 case MIN_EXPR:
7644 case MAX_EXPR:
7645 case BIT_IOR_EXPR:
7646 case BIT_XOR_EXPR:
7647 case BIT_AND_EXPR:
7648 case NE_EXPR:
7649 case EQ_EXPR:
7650 case UNORDERED_EXPR:
7651 case ORDERED_EXPR:
7652 case UNEQ_EXPR:
7653 case LTGT_EXPR:
7654 case TRUTH_AND_EXPR:
7655 case TRUTH_XOR_EXPR:
7656 case TRUTH_OR_EXPR:
7657 case WIDEN_MULT_EXPR:
7658 case VEC_WIDEN_MULT_HI_EXPR:
7659 case VEC_WIDEN_MULT_LO_EXPR:
7660 case VEC_WIDEN_MULT_EVEN_EXPR:
7661 case VEC_WIDEN_MULT_ODD_EXPR:
7662 return true;
7663
7664 default:
7665 break;
7666 }
7667 return false;
7668 }
7669
7670 /* Return true if CODE represents a ternary tree code for which the
7671 first two operands are commutative. Otherwise return false. */
7672 bool
7673 commutative_ternary_tree_code (enum tree_code code)
7674 {
7675 switch (code)
7676 {
7677 case WIDEN_MULT_PLUS_EXPR:
7678 case WIDEN_MULT_MINUS_EXPR:
7679 case DOT_PROD_EXPR:
7680 case FMA_EXPR:
7681 return true;
7682
7683 default:
7684 break;
7685 }
7686 return false;
7687 }
7688
7689 /* Returns true if CODE can overflow. */
7690
7691 bool
7692 operation_can_overflow (enum tree_code code)
7693 {
7694 switch (code)
7695 {
7696 case PLUS_EXPR:
7697 case MINUS_EXPR:
7698 case MULT_EXPR:
7699 case LSHIFT_EXPR:
7700 /* Can overflow in various ways. */
7701 return true;
7702 case TRUNC_DIV_EXPR:
7703 case EXACT_DIV_EXPR:
7704 case FLOOR_DIV_EXPR:
7705 case CEIL_DIV_EXPR:
7706 /* For INT_MIN / -1. */
7707 return true;
7708 case NEGATE_EXPR:
7709 case ABS_EXPR:
7710 /* For -INT_MIN. */
7711 return true;
7712 default:
7713 /* These operators cannot overflow. */
7714 return false;
7715 }
7716 }
7717
7718 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7719 ftrapv doesn't generate trapping insns for CODE. */
7720
7721 bool
7722 operation_no_trapping_overflow (tree type, enum tree_code code)
7723 {
7724 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7725
7726 /* We don't generate instructions that trap on overflow for complex or vector
7727 types. */
7728 if (!INTEGRAL_TYPE_P (type))
7729 return true;
7730
7731 if (!TYPE_OVERFLOW_TRAPS (type))
7732 return true;
7733
7734 switch (code)
7735 {
7736 case PLUS_EXPR:
7737 case MINUS_EXPR:
7738 case MULT_EXPR:
7739 case NEGATE_EXPR:
7740 case ABS_EXPR:
7741 /* These operators can overflow, and -ftrapv generates trapping code for
7742 these. */
7743 return false;
7744 case TRUNC_DIV_EXPR:
7745 case EXACT_DIV_EXPR:
7746 case FLOOR_DIV_EXPR:
7747 case CEIL_DIV_EXPR:
7748 case LSHIFT_EXPR:
7749 /* These operators can overflow, but -ftrapv does not generate trapping
7750 code for these. */
7751 return true;
7752 default:
7753 /* These operators cannot overflow. */
7754 return true;
7755 }
7756 }
7757
7758 namespace inchash
7759 {
7760
7761 /* Generate a hash value for an expression. This can be used iteratively
7762 by passing a previous result as the HSTATE argument.
7763
7764 This function is intended to produce the same hash for expressions which
7765 would compare equal using operand_equal_p. */
7766 void
7767 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7768 {
7769 int i;
7770 enum tree_code code;
7771 enum tree_code_class tclass;
7772
7773 if (t == NULL_TREE)
7774 {
7775 hstate.merge_hash (0);
7776 return;
7777 }
7778
7779 if (!(flags & OEP_ADDRESS_OF))
7780 STRIP_NOPS (t);
7781
7782 code = TREE_CODE (t);
7783
7784 switch (code)
7785 {
7786 /* Alas, constants aren't shared, so we can't rely on pointer
7787 identity. */
7788 case VOID_CST:
7789 hstate.merge_hash (0);
7790 return;
7791 case INTEGER_CST:
7792 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7793 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7794 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7795 return;
7796 case REAL_CST:
7797 {
7798 unsigned int val2;
7799 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7800 val2 = rvc_zero;
7801 else
7802 val2 = real_hash (TREE_REAL_CST_PTR (t));
7803 hstate.merge_hash (val2);
7804 return;
7805 }
7806 case FIXED_CST:
7807 {
7808 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7809 hstate.merge_hash (val2);
7810 return;
7811 }
7812 case STRING_CST:
7813 hstate.add ((const void *) TREE_STRING_POINTER (t),
7814 TREE_STRING_LENGTH (t));
7815 return;
7816 case COMPLEX_CST:
7817 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7818 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7819 return;
7820 case VECTOR_CST:
7821 {
7822 unsigned i;
7823 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7824 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate, flags);
7825 return;
7826 }
7827 case SSA_NAME:
7828 /* We can just compare by pointer. */
7829 hstate.add_wide_int (SSA_NAME_VERSION (t));
7830 return;
7831 case PLACEHOLDER_EXPR:
7832 /* The node itself doesn't matter. */
7833 return;
7834 case BLOCK:
7835 case OMP_CLAUSE:
7836 /* Ignore. */
7837 return;
7838 case TREE_LIST:
7839 /* A list of expressions, for a CALL_EXPR or as the elements of a
7840 VECTOR_CST. */
7841 for (; t; t = TREE_CHAIN (t))
7842 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7843 return;
7844 case CONSTRUCTOR:
7845 {
7846 unsigned HOST_WIDE_INT idx;
7847 tree field, value;
7848 flags &= ~OEP_ADDRESS_OF;
7849 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7850 {
7851 inchash::add_expr (field, hstate, flags);
7852 inchash::add_expr (value, hstate, flags);
7853 }
7854 return;
7855 }
7856 case STATEMENT_LIST:
7857 {
7858 tree_stmt_iterator i;
7859 for (i = tsi_start (CONST_CAST_TREE (t));
7860 !tsi_end_p (i); tsi_next (&i))
7861 inchash::add_expr (tsi_stmt (i), hstate, flags);
7862 return;
7863 }
7864 case FUNCTION_DECL:
7865 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7866 Otherwise nodes that compare equal according to operand_equal_p might
7867 get different hash codes. However, don't do this for machine specific
7868 or front end builtins, since the function code is overloaded in those
7869 cases. */
7870 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7871 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7872 {
7873 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7874 code = TREE_CODE (t);
7875 }
7876 /* FALL THROUGH */
7877 default:
7878 tclass = TREE_CODE_CLASS (code);
7879
7880 if (tclass == tcc_declaration)
7881 {
7882 /* DECL's have a unique ID */
7883 hstate.add_wide_int (DECL_UID (t));
7884 }
7885 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7886 {
7887 /* For comparisons that can be swapped, use the lower
7888 tree code. */
7889 enum tree_code ccode = swap_tree_comparison (code);
7890 if (code < ccode)
7891 ccode = code;
7892 hstate.add_object (ccode);
7893 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7894 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7895 }
7896 else if (CONVERT_EXPR_CODE_P (code))
7897 {
7898 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7899 operand_equal_p. */
7900 enum tree_code ccode = NOP_EXPR;
7901 hstate.add_object (ccode);
7902
7903 /* Don't hash the type, that can lead to having nodes which
7904 compare equal according to operand_equal_p, but which
7905 have different hash codes. Make sure to include signedness
7906 in the hash computation. */
7907 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7908 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7909 }
7910 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7911 else if (code == MEM_REF
7912 && (flags & OEP_ADDRESS_OF) != 0
7913 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7914 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7915 && integer_zerop (TREE_OPERAND (t, 1)))
7916 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7917 hstate, flags);
7918 /* Don't ICE on FE specific trees, or their arguments etc.
7919 during operand_equal_p hash verification. */
7920 else if (!IS_EXPR_CODE_CLASS (tclass))
7921 gcc_assert (flags & OEP_HASH_CHECK);
7922 else
7923 {
7924 unsigned int sflags = flags;
7925
7926 hstate.add_object (code);
7927
7928 switch (code)
7929 {
7930 case ADDR_EXPR:
7931 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7932 flags |= OEP_ADDRESS_OF;
7933 sflags = flags;
7934 break;
7935
7936 case INDIRECT_REF:
7937 case MEM_REF:
7938 case TARGET_MEM_REF:
7939 flags &= ~OEP_ADDRESS_OF;
7940 sflags = flags;
7941 break;
7942
7943 case ARRAY_REF:
7944 case ARRAY_RANGE_REF:
7945 case COMPONENT_REF:
7946 case BIT_FIELD_REF:
7947 sflags &= ~OEP_ADDRESS_OF;
7948 break;
7949
7950 case COND_EXPR:
7951 flags &= ~OEP_ADDRESS_OF;
7952 break;
7953
7954 case FMA_EXPR:
7955 case WIDEN_MULT_PLUS_EXPR:
7956 case WIDEN_MULT_MINUS_EXPR:
7957 {
7958 /* The multiplication operands are commutative. */
7959 inchash::hash one, two;
7960 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7961 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7962 hstate.add_commutative (one, two);
7963 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
7964 return;
7965 }
7966
7967 case CALL_EXPR:
7968 if (CALL_EXPR_FN (t) == NULL_TREE)
7969 hstate.add_int (CALL_EXPR_IFN (t));
7970 break;
7971
7972 case TARGET_EXPR:
7973 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
7974 Usually different TARGET_EXPRs just should use
7975 different temporaries in their slots. */
7976 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
7977 return;
7978
7979 default:
7980 break;
7981 }
7982
7983 /* Don't hash the type, that can lead to having nodes which
7984 compare equal according to operand_equal_p, but which
7985 have different hash codes. */
7986 if (code == NON_LVALUE_EXPR)
7987 {
7988 /* Make sure to include signness in the hash computation. */
7989 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7990 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7991 }
7992
7993 else if (commutative_tree_code (code))
7994 {
7995 /* It's a commutative expression. We want to hash it the same
7996 however it appears. We do this by first hashing both operands
7997 and then rehashing based on the order of their independent
7998 hashes. */
7999 inchash::hash one, two;
8000 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
8001 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
8002 hstate.add_commutative (one, two);
8003 }
8004 else
8005 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
8006 inchash::add_expr (TREE_OPERAND (t, i), hstate,
8007 i == 0 ? flags : sflags);
8008 }
8009 return;
8010 }
8011 }
8012
8013 }
8014
8015 /* Constructors for pointer, array and function types.
8016 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
8017 constructed by language-dependent code, not here.) */
8018
8019 /* Construct, lay out and return the type of pointers to TO_TYPE with
8020 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
8021 reference all of memory. If such a type has already been
8022 constructed, reuse it. */
8023
8024 tree
8025 build_pointer_type_for_mode (tree to_type, machine_mode mode,
8026 bool can_alias_all)
8027 {
8028 tree t;
8029 bool could_alias = can_alias_all;
8030
8031 if (to_type == error_mark_node)
8032 return error_mark_node;
8033
8034 /* If the pointed-to type has the may_alias attribute set, force
8035 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8036 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8037 can_alias_all = true;
8038
8039 /* In some cases, languages will have things that aren't a POINTER_TYPE
8040 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
8041 In that case, return that type without regard to the rest of our
8042 operands.
8043
8044 ??? This is a kludge, but consistent with the way this function has
8045 always operated and there doesn't seem to be a good way to avoid this
8046 at the moment. */
8047 if (TYPE_POINTER_TO (to_type) != 0
8048 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
8049 return TYPE_POINTER_TO (to_type);
8050
8051 /* First, if we already have a type for pointers to TO_TYPE and it's
8052 the proper mode, use it. */
8053 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
8054 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8055 return t;
8056
8057 t = make_node (POINTER_TYPE);
8058
8059 TREE_TYPE (t) = to_type;
8060 SET_TYPE_MODE (t, mode);
8061 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8062 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
8063 TYPE_POINTER_TO (to_type) = t;
8064
8065 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8066 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8067 SET_TYPE_STRUCTURAL_EQUALITY (t);
8068 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8069 TYPE_CANONICAL (t)
8070 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
8071 mode, false);
8072
8073 /* Lay out the type. This function has many callers that are concerned
8074 with expression-construction, and this simplifies them all. */
8075 layout_type (t);
8076
8077 return t;
8078 }
8079
8080 /* By default build pointers in ptr_mode. */
8081
8082 tree
8083 build_pointer_type (tree to_type)
8084 {
8085 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8086 : TYPE_ADDR_SPACE (to_type);
8087 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8088 return build_pointer_type_for_mode (to_type, pointer_mode, false);
8089 }
8090
8091 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
8092
8093 tree
8094 build_reference_type_for_mode (tree to_type, machine_mode mode,
8095 bool can_alias_all)
8096 {
8097 tree t;
8098 bool could_alias = can_alias_all;
8099
8100 if (to_type == error_mark_node)
8101 return error_mark_node;
8102
8103 /* If the pointed-to type has the may_alias attribute set, force
8104 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8105 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8106 can_alias_all = true;
8107
8108 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
8109 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
8110 In that case, return that type without regard to the rest of our
8111 operands.
8112
8113 ??? This is a kludge, but consistent with the way this function has
8114 always operated and there doesn't seem to be a good way to avoid this
8115 at the moment. */
8116 if (TYPE_REFERENCE_TO (to_type) != 0
8117 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
8118 return TYPE_REFERENCE_TO (to_type);
8119
8120 /* First, if we already have a type for pointers to TO_TYPE and it's
8121 the proper mode, use it. */
8122 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
8123 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8124 return t;
8125
8126 t = make_node (REFERENCE_TYPE);
8127
8128 TREE_TYPE (t) = to_type;
8129 SET_TYPE_MODE (t, mode);
8130 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8131 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
8132 TYPE_REFERENCE_TO (to_type) = t;
8133
8134 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8135 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8136 SET_TYPE_STRUCTURAL_EQUALITY (t);
8137 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8138 TYPE_CANONICAL (t)
8139 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
8140 mode, false);
8141
8142 layout_type (t);
8143
8144 return t;
8145 }
8146
8147
8148 /* Build the node for the type of references-to-TO_TYPE by default
8149 in ptr_mode. */
8150
8151 tree
8152 build_reference_type (tree to_type)
8153 {
8154 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8155 : TYPE_ADDR_SPACE (to_type);
8156 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8157 return build_reference_type_for_mode (to_type, pointer_mode, false);
8158 }
8159
8160 #define MAX_INT_CACHED_PREC \
8161 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8162 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8163
8164 /* Builds a signed or unsigned integer type of precision PRECISION.
8165 Used for C bitfields whose precision does not match that of
8166 built-in target types. */
8167 tree
8168 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8169 int unsignedp)
8170 {
8171 tree itype, ret;
8172
8173 if (unsignedp)
8174 unsignedp = MAX_INT_CACHED_PREC + 1;
8175
8176 if (precision <= MAX_INT_CACHED_PREC)
8177 {
8178 itype = nonstandard_integer_type_cache[precision + unsignedp];
8179 if (itype)
8180 return itype;
8181 }
8182
8183 itype = make_node (INTEGER_TYPE);
8184 TYPE_PRECISION (itype) = precision;
8185
8186 if (unsignedp)
8187 fixup_unsigned_type (itype);
8188 else
8189 fixup_signed_type (itype);
8190
8191 ret = itype;
8192 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
8193 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
8194 if (precision <= MAX_INT_CACHED_PREC)
8195 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8196
8197 return ret;
8198 }
8199
8200 #define MAX_BOOL_CACHED_PREC \
8201 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8202 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8203
8204 /* Builds a boolean type of precision PRECISION.
8205 Used for boolean vectors to choose proper vector element size. */
8206 tree
8207 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8208 {
8209 tree type;
8210
8211 if (precision <= MAX_BOOL_CACHED_PREC)
8212 {
8213 type = nonstandard_boolean_type_cache[precision];
8214 if (type)
8215 return type;
8216 }
8217
8218 type = make_node (BOOLEAN_TYPE);
8219 TYPE_PRECISION (type) = precision;
8220 fixup_signed_type (type);
8221
8222 if (precision <= MAX_INT_CACHED_PREC)
8223 nonstandard_boolean_type_cache[precision] = type;
8224
8225 return type;
8226 }
8227
8228 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8229 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8230 is true, reuse such a type that has already been constructed. */
8231
8232 static tree
8233 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8234 {
8235 tree itype = make_node (INTEGER_TYPE);
8236 inchash::hash hstate;
8237
8238 TREE_TYPE (itype) = type;
8239
8240 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8241 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8242
8243 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8244 SET_TYPE_MODE (itype, TYPE_MODE (type));
8245 TYPE_SIZE (itype) = TYPE_SIZE (type);
8246 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8247 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8248 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8249
8250 if (!shared)
8251 return itype;
8252
8253 if ((TYPE_MIN_VALUE (itype)
8254 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8255 || (TYPE_MAX_VALUE (itype)
8256 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8257 {
8258 /* Since we cannot reliably merge this type, we need to compare it using
8259 structural equality checks. */
8260 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8261 return itype;
8262 }
8263
8264 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
8265 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8266 hstate.merge_hash (TYPE_HASH (type));
8267 itype = type_hash_canon (hstate.end (), itype);
8268
8269 return itype;
8270 }
8271
8272 /* Wrapper around build_range_type_1 with SHARED set to true. */
8273
8274 tree
8275 build_range_type (tree type, tree lowval, tree highval)
8276 {
8277 return build_range_type_1 (type, lowval, highval, true);
8278 }
8279
8280 /* Wrapper around build_range_type_1 with SHARED set to false. */
8281
8282 tree
8283 build_nonshared_range_type (tree type, tree lowval, tree highval)
8284 {
8285 return build_range_type_1 (type, lowval, highval, false);
8286 }
8287
8288 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8289 MAXVAL should be the maximum value in the domain
8290 (one less than the length of the array).
8291
8292 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8293 We don't enforce this limit, that is up to caller (e.g. language front end).
8294 The limit exists because the result is a signed type and we don't handle
8295 sizes that use more than one HOST_WIDE_INT. */
8296
8297 tree
8298 build_index_type (tree maxval)
8299 {
8300 return build_range_type (sizetype, size_zero_node, maxval);
8301 }
8302
8303 /* Return true if the debug information for TYPE, a subtype, should be emitted
8304 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8305 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8306 debug info and doesn't reflect the source code. */
8307
8308 bool
8309 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8310 {
8311 tree base_type = TREE_TYPE (type), low, high;
8312
8313 /* Subrange types have a base type which is an integral type. */
8314 if (!INTEGRAL_TYPE_P (base_type))
8315 return false;
8316
8317 /* Get the real bounds of the subtype. */
8318 if (lang_hooks.types.get_subrange_bounds)
8319 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8320 else
8321 {
8322 low = TYPE_MIN_VALUE (type);
8323 high = TYPE_MAX_VALUE (type);
8324 }
8325
8326 /* If the type and its base type have the same representation and the same
8327 name, then the type is not a subrange but a copy of the base type. */
8328 if ((TREE_CODE (base_type) == INTEGER_TYPE
8329 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8330 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8331 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8332 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8333 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8334 return false;
8335
8336 if (lowval)
8337 *lowval = low;
8338 if (highval)
8339 *highval = high;
8340 return true;
8341 }
8342
8343 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8344 and number of elements specified by the range of values of INDEX_TYPE.
8345 If SHARED is true, reuse such a type that has already been constructed. */
8346
8347 static tree
8348 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8349 {
8350 tree t;
8351
8352 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8353 {
8354 error ("arrays of functions are not meaningful");
8355 elt_type = integer_type_node;
8356 }
8357
8358 t = make_node (ARRAY_TYPE);
8359 TREE_TYPE (t) = elt_type;
8360 TYPE_DOMAIN (t) = index_type;
8361 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8362 layout_type (t);
8363
8364 /* If the element type is incomplete at this point we get marked for
8365 structural equality. Do not record these types in the canonical
8366 type hashtable. */
8367 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8368 return t;
8369
8370 if (shared)
8371 {
8372 inchash::hash hstate;
8373 hstate.add_object (TYPE_HASH (elt_type));
8374 if (index_type)
8375 hstate.add_object (TYPE_HASH (index_type));
8376 t = type_hash_canon (hstate.end (), t);
8377 }
8378
8379 if (TYPE_CANONICAL (t) == t)
8380 {
8381 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8382 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8383 || in_lto_p)
8384 SET_TYPE_STRUCTURAL_EQUALITY (t);
8385 else if (TYPE_CANONICAL (elt_type) != elt_type
8386 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8387 TYPE_CANONICAL (t)
8388 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8389 index_type
8390 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8391 shared);
8392 }
8393
8394 return t;
8395 }
8396
8397 /* Wrapper around build_array_type_1 with SHARED set to true. */
8398
8399 tree
8400 build_array_type (tree elt_type, tree index_type)
8401 {
8402 return build_array_type_1 (elt_type, index_type, true);
8403 }
8404
8405 /* Wrapper around build_array_type_1 with SHARED set to false. */
8406
8407 tree
8408 build_nonshared_array_type (tree elt_type, tree index_type)
8409 {
8410 return build_array_type_1 (elt_type, index_type, false);
8411 }
8412
8413 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8414 sizetype. */
8415
8416 tree
8417 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8418 {
8419 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8420 }
8421
8422 /* Recursively examines the array elements of TYPE, until a non-array
8423 element type is found. */
8424
8425 tree
8426 strip_array_types (tree type)
8427 {
8428 while (TREE_CODE (type) == ARRAY_TYPE)
8429 type = TREE_TYPE (type);
8430
8431 return type;
8432 }
8433
8434 /* Computes the canonical argument types from the argument type list
8435 ARGTYPES.
8436
8437 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8438 on entry to this function, or if any of the ARGTYPES are
8439 structural.
8440
8441 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8442 true on entry to this function, or if any of the ARGTYPES are
8443 non-canonical.
8444
8445 Returns a canonical argument list, which may be ARGTYPES when the
8446 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8447 true) or would not differ from ARGTYPES. */
8448
8449 static tree
8450 maybe_canonicalize_argtypes (tree argtypes,
8451 bool *any_structural_p,
8452 bool *any_noncanonical_p)
8453 {
8454 tree arg;
8455 bool any_noncanonical_argtypes_p = false;
8456
8457 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8458 {
8459 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8460 /* Fail gracefully by stating that the type is structural. */
8461 *any_structural_p = true;
8462 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8463 *any_structural_p = true;
8464 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8465 || TREE_PURPOSE (arg))
8466 /* If the argument has a default argument, we consider it
8467 non-canonical even though the type itself is canonical.
8468 That way, different variants of function and method types
8469 with default arguments will all point to the variant with
8470 no defaults as their canonical type. */
8471 any_noncanonical_argtypes_p = true;
8472 }
8473
8474 if (*any_structural_p)
8475 return argtypes;
8476
8477 if (any_noncanonical_argtypes_p)
8478 {
8479 /* Build the canonical list of argument types. */
8480 tree canon_argtypes = NULL_TREE;
8481 bool is_void = false;
8482
8483 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8484 {
8485 if (arg == void_list_node)
8486 is_void = true;
8487 else
8488 canon_argtypes = tree_cons (NULL_TREE,
8489 TYPE_CANONICAL (TREE_VALUE (arg)),
8490 canon_argtypes);
8491 }
8492
8493 canon_argtypes = nreverse (canon_argtypes);
8494 if (is_void)
8495 canon_argtypes = chainon (canon_argtypes, void_list_node);
8496
8497 /* There is a non-canonical type. */
8498 *any_noncanonical_p = true;
8499 return canon_argtypes;
8500 }
8501
8502 /* The canonical argument types are the same as ARGTYPES. */
8503 return argtypes;
8504 }
8505
8506 /* Construct, lay out and return
8507 the type of functions returning type VALUE_TYPE
8508 given arguments of types ARG_TYPES.
8509 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8510 are data type nodes for the arguments of the function.
8511 If such a type has already been constructed, reuse it. */
8512
8513 tree
8514 build_function_type (tree value_type, tree arg_types)
8515 {
8516 tree t;
8517 inchash::hash hstate;
8518 bool any_structural_p, any_noncanonical_p;
8519 tree canon_argtypes;
8520
8521 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8522 {
8523 error ("function return type cannot be function");
8524 value_type = integer_type_node;
8525 }
8526
8527 /* Make a node of the sort we want. */
8528 t = make_node (FUNCTION_TYPE);
8529 TREE_TYPE (t) = value_type;
8530 TYPE_ARG_TYPES (t) = arg_types;
8531
8532 /* If we already have such a type, use the old one. */
8533 hstate.add_object (TYPE_HASH (value_type));
8534 type_hash_list (arg_types, hstate);
8535 t = type_hash_canon (hstate.end (), t);
8536
8537 /* Set up the canonical type. */
8538 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8539 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8540 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8541 &any_structural_p,
8542 &any_noncanonical_p);
8543 if (any_structural_p)
8544 SET_TYPE_STRUCTURAL_EQUALITY (t);
8545 else if (any_noncanonical_p)
8546 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8547 canon_argtypes);
8548
8549 if (!COMPLETE_TYPE_P (t))
8550 layout_type (t);
8551 return t;
8552 }
8553
8554 /* Build a function type. The RETURN_TYPE is the type returned by the
8555 function. If VAARGS is set, no void_type_node is appended to the
8556 list. ARGP must be always be terminated be a NULL_TREE. */
8557
8558 static tree
8559 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8560 {
8561 tree t, args, last;
8562
8563 t = va_arg (argp, tree);
8564 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8565 args = tree_cons (NULL_TREE, t, args);
8566
8567 if (vaargs)
8568 {
8569 last = args;
8570 if (args != NULL_TREE)
8571 args = nreverse (args);
8572 gcc_assert (last != void_list_node);
8573 }
8574 else if (args == NULL_TREE)
8575 args = void_list_node;
8576 else
8577 {
8578 last = args;
8579 args = nreverse (args);
8580 TREE_CHAIN (last) = void_list_node;
8581 }
8582 args = build_function_type (return_type, args);
8583
8584 return args;
8585 }
8586
8587 /* Build a function type. The RETURN_TYPE is the type returned by the
8588 function. If additional arguments are provided, they are
8589 additional argument types. The list of argument types must always
8590 be terminated by NULL_TREE. */
8591
8592 tree
8593 build_function_type_list (tree return_type, ...)
8594 {
8595 tree args;
8596 va_list p;
8597
8598 va_start (p, return_type);
8599 args = build_function_type_list_1 (false, return_type, p);
8600 va_end (p);
8601 return args;
8602 }
8603
8604 /* Build a variable argument function type. The RETURN_TYPE is the
8605 type returned by the function. If additional arguments are provided,
8606 they are additional argument types. The list of argument types must
8607 always be terminated by NULL_TREE. */
8608
8609 tree
8610 build_varargs_function_type_list (tree return_type, ...)
8611 {
8612 tree args;
8613 va_list p;
8614
8615 va_start (p, return_type);
8616 args = build_function_type_list_1 (true, return_type, p);
8617 va_end (p);
8618
8619 return args;
8620 }
8621
8622 /* Build a function type. RETURN_TYPE is the type returned by the
8623 function; VAARGS indicates whether the function takes varargs. The
8624 function takes N named arguments, the types of which are provided in
8625 ARG_TYPES. */
8626
8627 static tree
8628 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8629 tree *arg_types)
8630 {
8631 int i;
8632 tree t = vaargs ? NULL_TREE : void_list_node;
8633
8634 for (i = n - 1; i >= 0; i--)
8635 t = tree_cons (NULL_TREE, arg_types[i], t);
8636
8637 return build_function_type (return_type, t);
8638 }
8639
8640 /* Build a function type. RETURN_TYPE is the type returned by the
8641 function. The function takes N named arguments, the types of which
8642 are provided in ARG_TYPES. */
8643
8644 tree
8645 build_function_type_array (tree return_type, int n, tree *arg_types)
8646 {
8647 return build_function_type_array_1 (false, return_type, n, arg_types);
8648 }
8649
8650 /* Build a variable argument function type. RETURN_TYPE is the type
8651 returned by the function. The function takes N named arguments, the
8652 types of which are provided in ARG_TYPES. */
8653
8654 tree
8655 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8656 {
8657 return build_function_type_array_1 (true, return_type, n, arg_types);
8658 }
8659
8660 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8661 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8662 for the method. An implicit additional parameter (of type
8663 pointer-to-BASETYPE) is added to the ARGTYPES. */
8664
8665 tree
8666 build_method_type_directly (tree basetype,
8667 tree rettype,
8668 tree argtypes)
8669 {
8670 tree t;
8671 tree ptype;
8672 inchash::hash hstate;
8673 bool any_structural_p, any_noncanonical_p;
8674 tree canon_argtypes;
8675
8676 /* Make a node of the sort we want. */
8677 t = make_node (METHOD_TYPE);
8678
8679 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8680 TREE_TYPE (t) = rettype;
8681 ptype = build_pointer_type (basetype);
8682
8683 /* The actual arglist for this function includes a "hidden" argument
8684 which is "this". Put it into the list of argument types. */
8685 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8686 TYPE_ARG_TYPES (t) = argtypes;
8687
8688 /* If we already have such a type, use the old one. */
8689 hstate.add_object (TYPE_HASH (basetype));
8690 hstate.add_object (TYPE_HASH (rettype));
8691 type_hash_list (argtypes, hstate);
8692 t = type_hash_canon (hstate.end (), t);
8693
8694 /* Set up the canonical type. */
8695 any_structural_p
8696 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8697 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8698 any_noncanonical_p
8699 = (TYPE_CANONICAL (basetype) != basetype
8700 || TYPE_CANONICAL (rettype) != rettype);
8701 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8702 &any_structural_p,
8703 &any_noncanonical_p);
8704 if (any_structural_p)
8705 SET_TYPE_STRUCTURAL_EQUALITY (t);
8706 else if (any_noncanonical_p)
8707 TYPE_CANONICAL (t)
8708 = build_method_type_directly (TYPE_CANONICAL (basetype),
8709 TYPE_CANONICAL (rettype),
8710 canon_argtypes);
8711 if (!COMPLETE_TYPE_P (t))
8712 layout_type (t);
8713
8714 return t;
8715 }
8716
8717 /* Construct, lay out and return the type of methods belonging to class
8718 BASETYPE and whose arguments and values are described by TYPE.
8719 If that type exists already, reuse it.
8720 TYPE must be a FUNCTION_TYPE node. */
8721
8722 tree
8723 build_method_type (tree basetype, tree type)
8724 {
8725 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8726
8727 return build_method_type_directly (basetype,
8728 TREE_TYPE (type),
8729 TYPE_ARG_TYPES (type));
8730 }
8731
8732 /* Construct, lay out and return the type of offsets to a value
8733 of type TYPE, within an object of type BASETYPE.
8734 If a suitable offset type exists already, reuse it. */
8735
8736 tree
8737 build_offset_type (tree basetype, tree type)
8738 {
8739 tree t;
8740 inchash::hash hstate;
8741
8742 /* Make a node of the sort we want. */
8743 t = make_node (OFFSET_TYPE);
8744
8745 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8746 TREE_TYPE (t) = type;
8747
8748 /* If we already have such a type, use the old one. */
8749 hstate.add_object (TYPE_HASH (basetype));
8750 hstate.add_object (TYPE_HASH (type));
8751 t = type_hash_canon (hstate.end (), t);
8752
8753 if (!COMPLETE_TYPE_P (t))
8754 layout_type (t);
8755
8756 if (TYPE_CANONICAL (t) == t)
8757 {
8758 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8759 || TYPE_STRUCTURAL_EQUALITY_P (type))
8760 SET_TYPE_STRUCTURAL_EQUALITY (t);
8761 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8762 || TYPE_CANONICAL (type) != type)
8763 TYPE_CANONICAL (t)
8764 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8765 TYPE_CANONICAL (type));
8766 }
8767
8768 return t;
8769 }
8770
8771 /* Create a complex type whose components are COMPONENT_TYPE. */
8772
8773 tree
8774 build_complex_type (tree component_type)
8775 {
8776 tree t;
8777 inchash::hash hstate;
8778
8779 gcc_assert (INTEGRAL_TYPE_P (component_type)
8780 || SCALAR_FLOAT_TYPE_P (component_type)
8781 || FIXED_POINT_TYPE_P (component_type));
8782
8783 /* Make a node of the sort we want. */
8784 t = make_node (COMPLEX_TYPE);
8785
8786 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8787 SET_TYPE_MODE (t, GET_MODE_COMPLEX_MODE (TYPE_MODE (component_type)));
8788
8789 /* If we already have such a type, use the old one. */
8790 hstate.add_object (TYPE_HASH (component_type));
8791 t = type_hash_canon (hstate.end (), t);
8792
8793 if (!COMPLETE_TYPE_P (t))
8794 layout_type (t);
8795
8796 if (TYPE_CANONICAL (t) == t)
8797 {
8798 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8799 SET_TYPE_STRUCTURAL_EQUALITY (t);
8800 else if (TYPE_CANONICAL (component_type) != component_type)
8801 TYPE_CANONICAL (t)
8802 = build_complex_type (TYPE_CANONICAL (component_type));
8803 }
8804
8805 /* We need to create a name, since complex is a fundamental type. */
8806 if (! TYPE_NAME (t))
8807 {
8808 const char *name;
8809 if (component_type == char_type_node)
8810 name = "complex char";
8811 else if (component_type == signed_char_type_node)
8812 name = "complex signed char";
8813 else if (component_type == unsigned_char_type_node)
8814 name = "complex unsigned char";
8815 else if (component_type == short_integer_type_node)
8816 name = "complex short int";
8817 else if (component_type == short_unsigned_type_node)
8818 name = "complex short unsigned int";
8819 else if (component_type == integer_type_node)
8820 name = "complex int";
8821 else if (component_type == unsigned_type_node)
8822 name = "complex unsigned int";
8823 else if (component_type == long_integer_type_node)
8824 name = "complex long int";
8825 else if (component_type == long_unsigned_type_node)
8826 name = "complex long unsigned int";
8827 else if (component_type == long_long_integer_type_node)
8828 name = "complex long long int";
8829 else if (component_type == long_long_unsigned_type_node)
8830 name = "complex long long unsigned int";
8831 else
8832 name = 0;
8833
8834 if (name != 0)
8835 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8836 get_identifier (name), t);
8837 }
8838
8839 return build_qualified_type (t, TYPE_QUALS (component_type));
8840 }
8841
8842 /* If TYPE is a real or complex floating-point type and the target
8843 does not directly support arithmetic on TYPE then return the wider
8844 type to be used for arithmetic on TYPE. Otherwise, return
8845 NULL_TREE. */
8846
8847 tree
8848 excess_precision_type (tree type)
8849 {
8850 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8851 {
8852 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8853 switch (TREE_CODE (type))
8854 {
8855 case REAL_TYPE:
8856 switch (flt_eval_method)
8857 {
8858 case 1:
8859 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8860 return double_type_node;
8861 break;
8862 case 2:
8863 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8864 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8865 return long_double_type_node;
8866 break;
8867 default:
8868 gcc_unreachable ();
8869 }
8870 break;
8871 case COMPLEX_TYPE:
8872 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8873 return NULL_TREE;
8874 switch (flt_eval_method)
8875 {
8876 case 1:
8877 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8878 return complex_double_type_node;
8879 break;
8880 case 2:
8881 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8882 || (TYPE_MODE (TREE_TYPE (type))
8883 == TYPE_MODE (double_type_node)))
8884 return complex_long_double_type_node;
8885 break;
8886 default:
8887 gcc_unreachable ();
8888 }
8889 break;
8890 default:
8891 break;
8892 }
8893 }
8894 return NULL_TREE;
8895 }
8896 \f
8897 /* Return OP, stripped of any conversions to wider types as much as is safe.
8898 Converting the value back to OP's type makes a value equivalent to OP.
8899
8900 If FOR_TYPE is nonzero, we return a value which, if converted to
8901 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8902
8903 OP must have integer, real or enumeral type. Pointers are not allowed!
8904
8905 There are some cases where the obvious value we could return
8906 would regenerate to OP if converted to OP's type,
8907 but would not extend like OP to wider types.
8908 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8909 For example, if OP is (unsigned short)(signed char)-1,
8910 we avoid returning (signed char)-1 if FOR_TYPE is int,
8911 even though extending that to an unsigned short would regenerate OP,
8912 since the result of extending (signed char)-1 to (int)
8913 is different from (int) OP. */
8914
8915 tree
8916 get_unwidened (tree op, tree for_type)
8917 {
8918 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8919 tree type = TREE_TYPE (op);
8920 unsigned final_prec
8921 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8922 int uns
8923 = (for_type != 0 && for_type != type
8924 && final_prec > TYPE_PRECISION (type)
8925 && TYPE_UNSIGNED (type));
8926 tree win = op;
8927
8928 while (CONVERT_EXPR_P (op))
8929 {
8930 int bitschange;
8931
8932 /* TYPE_PRECISION on vector types has different meaning
8933 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8934 so avoid them here. */
8935 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8936 break;
8937
8938 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8939 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8940
8941 /* Truncations are many-one so cannot be removed.
8942 Unless we are later going to truncate down even farther. */
8943 if (bitschange < 0
8944 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8945 break;
8946
8947 /* See what's inside this conversion. If we decide to strip it,
8948 we will set WIN. */
8949 op = TREE_OPERAND (op, 0);
8950
8951 /* If we have not stripped any zero-extensions (uns is 0),
8952 we can strip any kind of extension.
8953 If we have previously stripped a zero-extension,
8954 only zero-extensions can safely be stripped.
8955 Any extension can be stripped if the bits it would produce
8956 are all going to be discarded later by truncating to FOR_TYPE. */
8957
8958 if (bitschange > 0)
8959 {
8960 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8961 win = op;
8962 /* TYPE_UNSIGNED says whether this is a zero-extension.
8963 Let's avoid computing it if it does not affect WIN
8964 and if UNS will not be needed again. */
8965 if ((uns
8966 || CONVERT_EXPR_P (op))
8967 && TYPE_UNSIGNED (TREE_TYPE (op)))
8968 {
8969 uns = 1;
8970 win = op;
8971 }
8972 }
8973 }
8974
8975 /* If we finally reach a constant see if it fits in for_type and
8976 in that case convert it. */
8977 if (for_type
8978 && TREE_CODE (win) == INTEGER_CST
8979 && TREE_TYPE (win) != for_type
8980 && int_fits_type_p (win, for_type))
8981 win = fold_convert (for_type, win);
8982
8983 return win;
8984 }
8985 \f
8986 /* Return OP or a simpler expression for a narrower value
8987 which can be sign-extended or zero-extended to give back OP.
8988 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8989 or 0 if the value should be sign-extended. */
8990
8991 tree
8992 get_narrower (tree op, int *unsignedp_ptr)
8993 {
8994 int uns = 0;
8995 int first = 1;
8996 tree win = op;
8997 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8998
8999 while (TREE_CODE (op) == NOP_EXPR)
9000 {
9001 int bitschange
9002 = (TYPE_PRECISION (TREE_TYPE (op))
9003 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
9004
9005 /* Truncations are many-one so cannot be removed. */
9006 if (bitschange < 0)
9007 break;
9008
9009 /* See what's inside this conversion. If we decide to strip it,
9010 we will set WIN. */
9011
9012 if (bitschange > 0)
9013 {
9014 op = TREE_OPERAND (op, 0);
9015 /* An extension: the outermost one can be stripped,
9016 but remember whether it is zero or sign extension. */
9017 if (first)
9018 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9019 /* Otherwise, if a sign extension has been stripped,
9020 only sign extensions can now be stripped;
9021 if a zero extension has been stripped, only zero-extensions. */
9022 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
9023 break;
9024 first = 0;
9025 }
9026 else /* bitschange == 0 */
9027 {
9028 /* A change in nominal type can always be stripped, but we must
9029 preserve the unsignedness. */
9030 if (first)
9031 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9032 first = 0;
9033 op = TREE_OPERAND (op, 0);
9034 /* Keep trying to narrow, but don't assign op to win if it
9035 would turn an integral type into something else. */
9036 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
9037 continue;
9038 }
9039
9040 win = op;
9041 }
9042
9043 if (TREE_CODE (op) == COMPONENT_REF
9044 /* Since type_for_size always gives an integer type. */
9045 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
9046 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
9047 /* Ensure field is laid out already. */
9048 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
9049 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
9050 {
9051 unsigned HOST_WIDE_INT innerprec
9052 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
9053 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
9054 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
9055 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
9056
9057 /* We can get this structure field in a narrower type that fits it,
9058 but the resulting extension to its nominal type (a fullword type)
9059 must satisfy the same conditions as for other extensions.
9060
9061 Do this only for fields that are aligned (not bit-fields),
9062 because when bit-field insns will be used there is no
9063 advantage in doing this. */
9064
9065 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
9066 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
9067 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
9068 && type != 0)
9069 {
9070 if (first)
9071 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
9072 win = fold_convert (type, op);
9073 }
9074 }
9075
9076 *unsignedp_ptr = uns;
9077 return win;
9078 }
9079 \f
9080 /* Returns true if integer constant C has a value that is permissible
9081 for type TYPE (an INTEGER_TYPE). */
9082
9083 bool
9084 int_fits_type_p (const_tree c, const_tree type)
9085 {
9086 tree type_low_bound, type_high_bound;
9087 bool ok_for_low_bound, ok_for_high_bound;
9088 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
9089
9090 retry:
9091 type_low_bound = TYPE_MIN_VALUE (type);
9092 type_high_bound = TYPE_MAX_VALUE (type);
9093
9094 /* If at least one bound of the type is a constant integer, we can check
9095 ourselves and maybe make a decision. If no such decision is possible, but
9096 this type is a subtype, try checking against that. Otherwise, use
9097 fits_to_tree_p, which checks against the precision.
9098
9099 Compute the status for each possibly constant bound, and return if we see
9100 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
9101 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
9102 for "constant known to fit". */
9103
9104 /* Check if c >= type_low_bound. */
9105 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9106 {
9107 if (tree_int_cst_lt (c, type_low_bound))
9108 return false;
9109 ok_for_low_bound = true;
9110 }
9111 else
9112 ok_for_low_bound = false;
9113
9114 /* Check if c <= type_high_bound. */
9115 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9116 {
9117 if (tree_int_cst_lt (type_high_bound, c))
9118 return false;
9119 ok_for_high_bound = true;
9120 }
9121 else
9122 ok_for_high_bound = false;
9123
9124 /* If the constant fits both bounds, the result is known. */
9125 if (ok_for_low_bound && ok_for_high_bound)
9126 return true;
9127
9128 /* Perform some generic filtering which may allow making a decision
9129 even if the bounds are not constant. First, negative integers
9130 never fit in unsigned types, */
9131 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
9132 return false;
9133
9134 /* Second, narrower types always fit in wider ones. */
9135 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9136 return true;
9137
9138 /* Third, unsigned integers with top bit set never fit signed types. */
9139 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9140 {
9141 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
9142 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9143 {
9144 /* When a tree_cst is converted to a wide-int, the precision
9145 is taken from the type. However, if the precision of the
9146 mode underneath the type is smaller than that, it is
9147 possible that the value will not fit. The test below
9148 fails if any bit is set between the sign bit of the
9149 underlying mode and the top bit of the type. */
9150 if (wi::ne_p (wi::zext (c, prec - 1), c))
9151 return false;
9152 }
9153 else if (wi::neg_p (c))
9154 return false;
9155 }
9156
9157 /* If we haven't been able to decide at this point, there nothing more we
9158 can check ourselves here. Look at the base type if we have one and it
9159 has the same precision. */
9160 if (TREE_CODE (type) == INTEGER_TYPE
9161 && TREE_TYPE (type) != 0
9162 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9163 {
9164 type = TREE_TYPE (type);
9165 goto retry;
9166 }
9167
9168 /* Or to fits_to_tree_p, if nothing else. */
9169 return wi::fits_to_tree_p (c, type);
9170 }
9171
9172 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9173 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9174 represented (assuming two's-complement arithmetic) within the bit
9175 precision of the type are returned instead. */
9176
9177 void
9178 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9179 {
9180 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9181 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9182 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
9183 else
9184 {
9185 if (TYPE_UNSIGNED (type))
9186 mpz_set_ui (min, 0);
9187 else
9188 {
9189 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9190 wi::to_mpz (mn, min, SIGNED);
9191 }
9192 }
9193
9194 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9195 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9196 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
9197 else
9198 {
9199 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9200 wi::to_mpz (mn, max, TYPE_SIGN (type));
9201 }
9202 }
9203
9204 /* Return true if VAR is an automatic variable defined in function FN. */
9205
9206 bool
9207 auto_var_in_fn_p (const_tree var, const_tree fn)
9208 {
9209 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9210 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
9211 || TREE_CODE (var) == PARM_DECL)
9212 && ! TREE_STATIC (var))
9213 || TREE_CODE (var) == LABEL_DECL
9214 || TREE_CODE (var) == RESULT_DECL));
9215 }
9216
9217 /* Subprogram of following function. Called by walk_tree.
9218
9219 Return *TP if it is an automatic variable or parameter of the
9220 function passed in as DATA. */
9221
9222 static tree
9223 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9224 {
9225 tree fn = (tree) data;
9226
9227 if (TYPE_P (*tp))
9228 *walk_subtrees = 0;
9229
9230 else if (DECL_P (*tp)
9231 && auto_var_in_fn_p (*tp, fn))
9232 return *tp;
9233
9234 return NULL_TREE;
9235 }
9236
9237 /* Returns true if T is, contains, or refers to a type with variable
9238 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9239 arguments, but not the return type. If FN is nonzero, only return
9240 true if a modifier of the type or position of FN is a variable or
9241 parameter inside FN.
9242
9243 This concept is more general than that of C99 'variably modified types':
9244 in C99, a struct type is never variably modified because a VLA may not
9245 appear as a structure member. However, in GNU C code like:
9246
9247 struct S { int i[f()]; };
9248
9249 is valid, and other languages may define similar constructs. */
9250
9251 bool
9252 variably_modified_type_p (tree type, tree fn)
9253 {
9254 tree t;
9255
9256 /* Test if T is either variable (if FN is zero) or an expression containing
9257 a variable in FN. If TYPE isn't gimplified, return true also if
9258 gimplify_one_sizepos would gimplify the expression into a local
9259 variable. */
9260 #define RETURN_TRUE_IF_VAR(T) \
9261 do { tree _t = (T); \
9262 if (_t != NULL_TREE \
9263 && _t != error_mark_node \
9264 && TREE_CODE (_t) != INTEGER_CST \
9265 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9266 && (!fn \
9267 || (!TYPE_SIZES_GIMPLIFIED (type) \
9268 && !is_gimple_sizepos (_t)) \
9269 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9270 return true; } while (0)
9271
9272 if (type == error_mark_node)
9273 return false;
9274
9275 /* If TYPE itself has variable size, it is variably modified. */
9276 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9277 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9278
9279 switch (TREE_CODE (type))
9280 {
9281 case POINTER_TYPE:
9282 case REFERENCE_TYPE:
9283 case VECTOR_TYPE:
9284 if (variably_modified_type_p (TREE_TYPE (type), fn))
9285 return true;
9286 break;
9287
9288 case FUNCTION_TYPE:
9289 case METHOD_TYPE:
9290 /* If TYPE is a function type, it is variably modified if the
9291 return type is variably modified. */
9292 if (variably_modified_type_p (TREE_TYPE (type), fn))
9293 return true;
9294 break;
9295
9296 case INTEGER_TYPE:
9297 case REAL_TYPE:
9298 case FIXED_POINT_TYPE:
9299 case ENUMERAL_TYPE:
9300 case BOOLEAN_TYPE:
9301 /* Scalar types are variably modified if their end points
9302 aren't constant. */
9303 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9304 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9305 break;
9306
9307 case RECORD_TYPE:
9308 case UNION_TYPE:
9309 case QUAL_UNION_TYPE:
9310 /* We can't see if any of the fields are variably-modified by the
9311 definition we normally use, since that would produce infinite
9312 recursion via pointers. */
9313 /* This is variably modified if some field's type is. */
9314 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9315 if (TREE_CODE (t) == FIELD_DECL)
9316 {
9317 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9318 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9319 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9320
9321 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9322 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9323 }
9324 break;
9325
9326 case ARRAY_TYPE:
9327 /* Do not call ourselves to avoid infinite recursion. This is
9328 variably modified if the element type is. */
9329 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9330 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9331 break;
9332
9333 default:
9334 break;
9335 }
9336
9337 /* The current language may have other cases to check, but in general,
9338 all other types are not variably modified. */
9339 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9340
9341 #undef RETURN_TRUE_IF_VAR
9342 }
9343
9344 /* Given a DECL or TYPE, return the scope in which it was declared, or
9345 NULL_TREE if there is no containing scope. */
9346
9347 tree
9348 get_containing_scope (const_tree t)
9349 {
9350 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9351 }
9352
9353 /* Return the innermost context enclosing DECL that is
9354 a FUNCTION_DECL, or zero if none. */
9355
9356 tree
9357 decl_function_context (const_tree decl)
9358 {
9359 tree context;
9360
9361 if (TREE_CODE (decl) == ERROR_MARK)
9362 return 0;
9363
9364 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9365 where we look up the function at runtime. Such functions always take
9366 a first argument of type 'pointer to real context'.
9367
9368 C++ should really be fixed to use DECL_CONTEXT for the real context,
9369 and use something else for the "virtual context". */
9370 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9371 context
9372 = TYPE_MAIN_VARIANT
9373 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9374 else
9375 context = DECL_CONTEXT (decl);
9376
9377 while (context && TREE_CODE (context) != FUNCTION_DECL)
9378 {
9379 if (TREE_CODE (context) == BLOCK)
9380 context = BLOCK_SUPERCONTEXT (context);
9381 else
9382 context = get_containing_scope (context);
9383 }
9384
9385 return context;
9386 }
9387
9388 /* Return the innermost context enclosing DECL that is
9389 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9390 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9391
9392 tree
9393 decl_type_context (const_tree decl)
9394 {
9395 tree context = DECL_CONTEXT (decl);
9396
9397 while (context)
9398 switch (TREE_CODE (context))
9399 {
9400 case NAMESPACE_DECL:
9401 case TRANSLATION_UNIT_DECL:
9402 return NULL_TREE;
9403
9404 case RECORD_TYPE:
9405 case UNION_TYPE:
9406 case QUAL_UNION_TYPE:
9407 return context;
9408
9409 case TYPE_DECL:
9410 case FUNCTION_DECL:
9411 context = DECL_CONTEXT (context);
9412 break;
9413
9414 case BLOCK:
9415 context = BLOCK_SUPERCONTEXT (context);
9416 break;
9417
9418 default:
9419 gcc_unreachable ();
9420 }
9421
9422 return NULL_TREE;
9423 }
9424
9425 /* CALL is a CALL_EXPR. Return the declaration for the function
9426 called, or NULL_TREE if the called function cannot be
9427 determined. */
9428
9429 tree
9430 get_callee_fndecl (const_tree call)
9431 {
9432 tree addr;
9433
9434 if (call == error_mark_node)
9435 return error_mark_node;
9436
9437 /* It's invalid to call this function with anything but a
9438 CALL_EXPR. */
9439 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9440
9441 /* The first operand to the CALL is the address of the function
9442 called. */
9443 addr = CALL_EXPR_FN (call);
9444
9445 /* If there is no function, return early. */
9446 if (addr == NULL_TREE)
9447 return NULL_TREE;
9448
9449 STRIP_NOPS (addr);
9450
9451 /* If this is a readonly function pointer, extract its initial value. */
9452 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9453 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9454 && DECL_INITIAL (addr))
9455 addr = DECL_INITIAL (addr);
9456
9457 /* If the address is just `&f' for some function `f', then we know
9458 that `f' is being called. */
9459 if (TREE_CODE (addr) == ADDR_EXPR
9460 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9461 return TREE_OPERAND (addr, 0);
9462
9463 /* We couldn't figure out what was being called. */
9464 return NULL_TREE;
9465 }
9466
9467 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9468 return the associated function code, otherwise return CFN_LAST. */
9469
9470 combined_fn
9471 get_call_combined_fn (const_tree call)
9472 {
9473 /* It's invalid to call this function with anything but a CALL_EXPR. */
9474 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9475
9476 if (!CALL_EXPR_FN (call))
9477 return as_combined_fn (CALL_EXPR_IFN (call));
9478
9479 tree fndecl = get_callee_fndecl (call);
9480 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9481 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9482
9483 return CFN_LAST;
9484 }
9485
9486 #define TREE_MEM_USAGE_SPACES 40
9487
9488 /* Print debugging information about tree nodes generated during the compile,
9489 and any language-specific information. */
9490
9491 void
9492 dump_tree_statistics (void)
9493 {
9494 if (GATHER_STATISTICS)
9495 {
9496 int i;
9497 int total_nodes, total_bytes;
9498 fprintf (stderr, "\nKind Nodes Bytes\n");
9499 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9500 total_nodes = total_bytes = 0;
9501 for (i = 0; i < (int) all_kinds; i++)
9502 {
9503 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9504 tree_node_counts[i], tree_node_sizes[i]);
9505 total_nodes += tree_node_counts[i];
9506 total_bytes += tree_node_sizes[i];
9507 }
9508 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9509 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9510 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9511 fprintf (stderr, "Code Nodes\n");
9512 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9513 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9514 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9515 tree_code_counts[i]);
9516 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9517 fprintf (stderr, "\n");
9518 ssanames_print_statistics ();
9519 fprintf (stderr, "\n");
9520 phinodes_print_statistics ();
9521 fprintf (stderr, "\n");
9522 }
9523 else
9524 fprintf (stderr, "(No per-node statistics)\n");
9525
9526 print_type_hash_statistics ();
9527 print_debug_expr_statistics ();
9528 print_value_expr_statistics ();
9529 lang_hooks.print_statistics ();
9530 }
9531 \f
9532 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9533
9534 /* Generate a crc32 of a byte. */
9535
9536 static unsigned
9537 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9538 {
9539 unsigned ix;
9540
9541 for (ix = bits; ix--; value <<= 1)
9542 {
9543 unsigned feedback;
9544
9545 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9546 chksum <<= 1;
9547 chksum ^= feedback;
9548 }
9549 return chksum;
9550 }
9551
9552 /* Generate a crc32 of a 32-bit unsigned. */
9553
9554 unsigned
9555 crc32_unsigned (unsigned chksum, unsigned value)
9556 {
9557 return crc32_unsigned_bits (chksum, value, 32);
9558 }
9559
9560 /* Generate a crc32 of a byte. */
9561
9562 unsigned
9563 crc32_byte (unsigned chksum, char byte)
9564 {
9565 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9566 }
9567
9568 /* Generate a crc32 of a string. */
9569
9570 unsigned
9571 crc32_string (unsigned chksum, const char *string)
9572 {
9573 do
9574 {
9575 chksum = crc32_byte (chksum, *string);
9576 }
9577 while (*string++);
9578 return chksum;
9579 }
9580
9581 /* P is a string that will be used in a symbol. Mask out any characters
9582 that are not valid in that context. */
9583
9584 void
9585 clean_symbol_name (char *p)
9586 {
9587 for (; *p; p++)
9588 if (! (ISALNUM (*p)
9589 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9590 || *p == '$'
9591 #endif
9592 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9593 || *p == '.'
9594 #endif
9595 ))
9596 *p = '_';
9597 }
9598
9599 /* For anonymous aggregate types, we need some sort of name to
9600 hold on to. In practice, this should not appear, but it should
9601 not be harmful if it does. */
9602 bool
9603 anon_aggrname_p(const_tree id_node)
9604 {
9605 #ifndef NO_DOT_IN_LABEL
9606 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9607 && IDENTIFIER_POINTER (id_node)[1] == '_');
9608 #else /* NO_DOT_IN_LABEL */
9609 #ifndef NO_DOLLAR_IN_LABEL
9610 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9611 && IDENTIFIER_POINTER (id_node)[1] == '_');
9612 #else /* NO_DOLLAR_IN_LABEL */
9613 #define ANON_AGGRNAME_PREFIX "__anon_"
9614 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9615 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9616 #endif /* NO_DOLLAR_IN_LABEL */
9617 #endif /* NO_DOT_IN_LABEL */
9618 }
9619
9620 /* Return a format for an anonymous aggregate name. */
9621 const char *
9622 anon_aggrname_format()
9623 {
9624 #ifndef NO_DOT_IN_LABEL
9625 return "._%d";
9626 #else /* NO_DOT_IN_LABEL */
9627 #ifndef NO_DOLLAR_IN_LABEL
9628 return "$_%d";
9629 #else /* NO_DOLLAR_IN_LABEL */
9630 return "__anon_%d";
9631 #endif /* NO_DOLLAR_IN_LABEL */
9632 #endif /* NO_DOT_IN_LABEL */
9633 }
9634
9635 /* Generate a name for a special-purpose function.
9636 The generated name may need to be unique across the whole link.
9637 Changes to this function may also require corresponding changes to
9638 xstrdup_mask_random.
9639 TYPE is some string to identify the purpose of this function to the
9640 linker or collect2; it must start with an uppercase letter,
9641 one of:
9642 I - for constructors
9643 D - for destructors
9644 N - for C++ anonymous namespaces
9645 F - for DWARF unwind frame information. */
9646
9647 tree
9648 get_file_function_name (const char *type)
9649 {
9650 char *buf;
9651 const char *p;
9652 char *q;
9653
9654 /* If we already have a name we know to be unique, just use that. */
9655 if (first_global_object_name)
9656 p = q = ASTRDUP (first_global_object_name);
9657 /* If the target is handling the constructors/destructors, they
9658 will be local to this file and the name is only necessary for
9659 debugging purposes.
9660 We also assign sub_I and sub_D sufixes to constructors called from
9661 the global static constructors. These are always local. */
9662 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9663 || (strncmp (type, "sub_", 4) == 0
9664 && (type[4] == 'I' || type[4] == 'D')))
9665 {
9666 const char *file = main_input_filename;
9667 if (! file)
9668 file = LOCATION_FILE (input_location);
9669 /* Just use the file's basename, because the full pathname
9670 might be quite long. */
9671 p = q = ASTRDUP (lbasename (file));
9672 }
9673 else
9674 {
9675 /* Otherwise, the name must be unique across the entire link.
9676 We don't have anything that we know to be unique to this translation
9677 unit, so use what we do have and throw in some randomness. */
9678 unsigned len;
9679 const char *name = weak_global_object_name;
9680 const char *file = main_input_filename;
9681
9682 if (! name)
9683 name = "";
9684 if (! file)
9685 file = LOCATION_FILE (input_location);
9686
9687 len = strlen (file);
9688 q = (char *) alloca (9 + 17 + len + 1);
9689 memcpy (q, file, len + 1);
9690
9691 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9692 crc32_string (0, name), get_random_seed (false));
9693
9694 p = q;
9695 }
9696
9697 clean_symbol_name (q);
9698 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9699 + strlen (type));
9700
9701 /* Set up the name of the file-level functions we may need.
9702 Use a global object (which is already required to be unique over
9703 the program) rather than the file name (which imposes extra
9704 constraints). */
9705 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9706
9707 return get_identifier (buf);
9708 }
9709 \f
9710 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9711
9712 /* Complain that the tree code of NODE does not match the expected 0
9713 terminated list of trailing codes. The trailing code list can be
9714 empty, for a more vague error message. FILE, LINE, and FUNCTION
9715 are of the caller. */
9716
9717 void
9718 tree_check_failed (const_tree node, const char *file,
9719 int line, const char *function, ...)
9720 {
9721 va_list args;
9722 const char *buffer;
9723 unsigned length = 0;
9724 enum tree_code code;
9725
9726 va_start (args, function);
9727 while ((code = (enum tree_code) va_arg (args, int)))
9728 length += 4 + strlen (get_tree_code_name (code));
9729 va_end (args);
9730 if (length)
9731 {
9732 char *tmp;
9733 va_start (args, function);
9734 length += strlen ("expected ");
9735 buffer = tmp = (char *) alloca (length);
9736 length = 0;
9737 while ((code = (enum tree_code) va_arg (args, int)))
9738 {
9739 const char *prefix = length ? " or " : "expected ";
9740
9741 strcpy (tmp + length, prefix);
9742 length += strlen (prefix);
9743 strcpy (tmp + length, get_tree_code_name (code));
9744 length += strlen (get_tree_code_name (code));
9745 }
9746 va_end (args);
9747 }
9748 else
9749 buffer = "unexpected node";
9750
9751 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9752 buffer, get_tree_code_name (TREE_CODE (node)),
9753 function, trim_filename (file), line);
9754 }
9755
9756 /* Complain that the tree code of NODE does match the expected 0
9757 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9758 the caller. */
9759
9760 void
9761 tree_not_check_failed (const_tree node, const char *file,
9762 int line, const char *function, ...)
9763 {
9764 va_list args;
9765 char *buffer;
9766 unsigned length = 0;
9767 enum tree_code code;
9768
9769 va_start (args, function);
9770 while ((code = (enum tree_code) va_arg (args, int)))
9771 length += 4 + strlen (get_tree_code_name (code));
9772 va_end (args);
9773 va_start (args, function);
9774 buffer = (char *) alloca (length);
9775 length = 0;
9776 while ((code = (enum tree_code) va_arg (args, int)))
9777 {
9778 if (length)
9779 {
9780 strcpy (buffer + length, " or ");
9781 length += 4;
9782 }
9783 strcpy (buffer + length, get_tree_code_name (code));
9784 length += strlen (get_tree_code_name (code));
9785 }
9786 va_end (args);
9787
9788 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9789 buffer, get_tree_code_name (TREE_CODE (node)),
9790 function, trim_filename (file), line);
9791 }
9792
9793 /* Similar to tree_check_failed, except that we check for a class of tree
9794 code, given in CL. */
9795
9796 void
9797 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9798 const char *file, int line, const char *function)
9799 {
9800 internal_error
9801 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9802 TREE_CODE_CLASS_STRING (cl),
9803 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9804 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9805 }
9806
9807 /* Similar to tree_check_failed, except that instead of specifying a
9808 dozen codes, use the knowledge that they're all sequential. */
9809
9810 void
9811 tree_range_check_failed (const_tree node, const char *file, int line,
9812 const char *function, enum tree_code c1,
9813 enum tree_code c2)
9814 {
9815 char *buffer;
9816 unsigned length = 0;
9817 unsigned int c;
9818
9819 for (c = c1; c <= c2; ++c)
9820 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9821
9822 length += strlen ("expected ");
9823 buffer = (char *) alloca (length);
9824 length = 0;
9825
9826 for (c = c1; c <= c2; ++c)
9827 {
9828 const char *prefix = length ? " or " : "expected ";
9829
9830 strcpy (buffer + length, prefix);
9831 length += strlen (prefix);
9832 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9833 length += strlen (get_tree_code_name ((enum tree_code) c));
9834 }
9835
9836 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9837 buffer, get_tree_code_name (TREE_CODE (node)),
9838 function, trim_filename (file), line);
9839 }
9840
9841
9842 /* Similar to tree_check_failed, except that we check that a tree does
9843 not have the specified code, given in CL. */
9844
9845 void
9846 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9847 const char *file, int line, const char *function)
9848 {
9849 internal_error
9850 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9851 TREE_CODE_CLASS_STRING (cl),
9852 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9853 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9854 }
9855
9856
9857 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9858
9859 void
9860 omp_clause_check_failed (const_tree node, const char *file, int line,
9861 const char *function, enum omp_clause_code code)
9862 {
9863 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9864 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9865 function, trim_filename (file), line);
9866 }
9867
9868
9869 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9870
9871 void
9872 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9873 const char *function, enum omp_clause_code c1,
9874 enum omp_clause_code c2)
9875 {
9876 char *buffer;
9877 unsigned length = 0;
9878 unsigned int c;
9879
9880 for (c = c1; c <= c2; ++c)
9881 length += 4 + strlen (omp_clause_code_name[c]);
9882
9883 length += strlen ("expected ");
9884 buffer = (char *) alloca (length);
9885 length = 0;
9886
9887 for (c = c1; c <= c2; ++c)
9888 {
9889 const char *prefix = length ? " or " : "expected ";
9890
9891 strcpy (buffer + length, prefix);
9892 length += strlen (prefix);
9893 strcpy (buffer + length, omp_clause_code_name[c]);
9894 length += strlen (omp_clause_code_name[c]);
9895 }
9896
9897 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9898 buffer, omp_clause_code_name[TREE_CODE (node)],
9899 function, trim_filename (file), line);
9900 }
9901
9902
9903 #undef DEFTREESTRUCT
9904 #define DEFTREESTRUCT(VAL, NAME) NAME,
9905
9906 static const char *ts_enum_names[] = {
9907 #include "treestruct.def"
9908 };
9909 #undef DEFTREESTRUCT
9910
9911 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9912
9913 /* Similar to tree_class_check_failed, except that we check for
9914 whether CODE contains the tree structure identified by EN. */
9915
9916 void
9917 tree_contains_struct_check_failed (const_tree node,
9918 const enum tree_node_structure_enum en,
9919 const char *file, int line,
9920 const char *function)
9921 {
9922 internal_error
9923 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9924 TS_ENUM_NAME (en),
9925 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9926 }
9927
9928
9929 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9930 (dynamically sized) vector. */
9931
9932 void
9933 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9934 const char *function)
9935 {
9936 internal_error
9937 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9938 idx + 1, len, function, trim_filename (file), line);
9939 }
9940
9941 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9942 (dynamically sized) vector. */
9943
9944 void
9945 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9946 const char *function)
9947 {
9948 internal_error
9949 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9950 idx + 1, len, function, trim_filename (file), line);
9951 }
9952
9953 /* Similar to above, except that the check is for the bounds of the operand
9954 vector of an expression node EXP. */
9955
9956 void
9957 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9958 int line, const char *function)
9959 {
9960 enum tree_code code = TREE_CODE (exp);
9961 internal_error
9962 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9963 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9964 function, trim_filename (file), line);
9965 }
9966
9967 /* Similar to above, except that the check is for the number of
9968 operands of an OMP_CLAUSE node. */
9969
9970 void
9971 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9972 int line, const char *function)
9973 {
9974 internal_error
9975 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9976 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9977 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9978 trim_filename (file), line);
9979 }
9980 #endif /* ENABLE_TREE_CHECKING */
9981 \f
9982 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9983 and mapped to the machine mode MODE. Initialize its fields and build
9984 the information necessary for debugging output. */
9985
9986 static tree
9987 make_vector_type (tree innertype, int nunits, machine_mode mode)
9988 {
9989 tree t;
9990 inchash::hash hstate;
9991 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9992
9993 t = make_node (VECTOR_TYPE);
9994 TREE_TYPE (t) = mv_innertype;
9995 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9996 SET_TYPE_MODE (t, mode);
9997
9998 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9999 SET_TYPE_STRUCTURAL_EQUALITY (t);
10000 else if ((TYPE_CANONICAL (mv_innertype) != innertype
10001 || mode != VOIDmode)
10002 && !VECTOR_BOOLEAN_TYPE_P (t))
10003 TYPE_CANONICAL (t)
10004 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
10005
10006 layout_type (t);
10007
10008 hstate.add_wide_int (VECTOR_TYPE);
10009 hstate.add_wide_int (nunits);
10010 hstate.add_wide_int (mode);
10011 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
10012 t = type_hash_canon (hstate.end (), t);
10013
10014 /* We have built a main variant, based on the main variant of the
10015 inner type. Use it to build the variant we return. */
10016 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
10017 && TREE_TYPE (t) != innertype)
10018 return build_type_attribute_qual_variant (t,
10019 TYPE_ATTRIBUTES (innertype),
10020 TYPE_QUALS (innertype));
10021
10022 return t;
10023 }
10024
10025 static tree
10026 make_or_reuse_type (unsigned size, int unsignedp)
10027 {
10028 int i;
10029
10030 if (size == INT_TYPE_SIZE)
10031 return unsignedp ? unsigned_type_node : integer_type_node;
10032 if (size == CHAR_TYPE_SIZE)
10033 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
10034 if (size == SHORT_TYPE_SIZE)
10035 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
10036 if (size == LONG_TYPE_SIZE)
10037 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
10038 if (size == LONG_LONG_TYPE_SIZE)
10039 return (unsignedp ? long_long_unsigned_type_node
10040 : long_long_integer_type_node);
10041
10042 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10043 if (size == int_n_data[i].bitsize
10044 && int_n_enabled_p[i])
10045 return (unsignedp ? int_n_trees[i].unsigned_type
10046 : int_n_trees[i].signed_type);
10047
10048 if (unsignedp)
10049 return make_unsigned_type (size);
10050 else
10051 return make_signed_type (size);
10052 }
10053
10054 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
10055
10056 static tree
10057 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
10058 {
10059 if (satp)
10060 {
10061 if (size == SHORT_FRACT_TYPE_SIZE)
10062 return unsignedp ? sat_unsigned_short_fract_type_node
10063 : sat_short_fract_type_node;
10064 if (size == FRACT_TYPE_SIZE)
10065 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
10066 if (size == LONG_FRACT_TYPE_SIZE)
10067 return unsignedp ? sat_unsigned_long_fract_type_node
10068 : sat_long_fract_type_node;
10069 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10070 return unsignedp ? sat_unsigned_long_long_fract_type_node
10071 : sat_long_long_fract_type_node;
10072 }
10073 else
10074 {
10075 if (size == SHORT_FRACT_TYPE_SIZE)
10076 return unsignedp ? unsigned_short_fract_type_node
10077 : short_fract_type_node;
10078 if (size == FRACT_TYPE_SIZE)
10079 return unsignedp ? unsigned_fract_type_node : fract_type_node;
10080 if (size == LONG_FRACT_TYPE_SIZE)
10081 return unsignedp ? unsigned_long_fract_type_node
10082 : long_fract_type_node;
10083 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10084 return unsignedp ? unsigned_long_long_fract_type_node
10085 : long_long_fract_type_node;
10086 }
10087
10088 return make_fract_type (size, unsignedp, satp);
10089 }
10090
10091 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
10092
10093 static tree
10094 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10095 {
10096 if (satp)
10097 {
10098 if (size == SHORT_ACCUM_TYPE_SIZE)
10099 return unsignedp ? sat_unsigned_short_accum_type_node
10100 : sat_short_accum_type_node;
10101 if (size == ACCUM_TYPE_SIZE)
10102 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10103 if (size == LONG_ACCUM_TYPE_SIZE)
10104 return unsignedp ? sat_unsigned_long_accum_type_node
10105 : sat_long_accum_type_node;
10106 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10107 return unsignedp ? sat_unsigned_long_long_accum_type_node
10108 : sat_long_long_accum_type_node;
10109 }
10110 else
10111 {
10112 if (size == SHORT_ACCUM_TYPE_SIZE)
10113 return unsignedp ? unsigned_short_accum_type_node
10114 : short_accum_type_node;
10115 if (size == ACCUM_TYPE_SIZE)
10116 return unsignedp ? unsigned_accum_type_node : accum_type_node;
10117 if (size == LONG_ACCUM_TYPE_SIZE)
10118 return unsignedp ? unsigned_long_accum_type_node
10119 : long_accum_type_node;
10120 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10121 return unsignedp ? unsigned_long_long_accum_type_node
10122 : long_long_accum_type_node;
10123 }
10124
10125 return make_accum_type (size, unsignedp, satp);
10126 }
10127
10128
10129 /* Create an atomic variant node for TYPE. This routine is called
10130 during initialization of data types to create the 5 basic atomic
10131 types. The generic build_variant_type function requires these to
10132 already be set up in order to function properly, so cannot be
10133 called from there. If ALIGN is non-zero, then ensure alignment is
10134 overridden to this value. */
10135
10136 static tree
10137 build_atomic_base (tree type, unsigned int align)
10138 {
10139 tree t;
10140
10141 /* Make sure its not already registered. */
10142 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10143 return t;
10144
10145 t = build_variant_type_copy (type);
10146 set_type_quals (t, TYPE_QUAL_ATOMIC);
10147
10148 if (align)
10149 SET_TYPE_ALIGN (t, align);
10150
10151 return t;
10152 }
10153
10154 /* Create nodes for all integer types (and error_mark_node) using the sizes
10155 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10156
10157 void
10158 build_common_tree_nodes (bool signed_char)
10159 {
10160 int i;
10161
10162 error_mark_node = make_node (ERROR_MARK);
10163 TREE_TYPE (error_mark_node) = error_mark_node;
10164
10165 initialize_sizetypes ();
10166
10167 /* Define both `signed char' and `unsigned char'. */
10168 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10169 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10170 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10171 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10172
10173 /* Define `char', which is like either `signed char' or `unsigned char'
10174 but not the same as either. */
10175 char_type_node
10176 = (signed_char
10177 ? make_signed_type (CHAR_TYPE_SIZE)
10178 : make_unsigned_type (CHAR_TYPE_SIZE));
10179 TYPE_STRING_FLAG (char_type_node) = 1;
10180
10181 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10182 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10183 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10184 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10185 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10186 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10187 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10188 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10189
10190 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10191 {
10192 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10193 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10194 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
10195 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
10196
10197 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
10198 && int_n_enabled_p[i])
10199 {
10200 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10201 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10202 }
10203 }
10204
10205 /* Define a boolean type. This type only represents boolean values but
10206 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10207 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10208 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10209 TYPE_PRECISION (boolean_type_node) = 1;
10210 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10211
10212 /* Define what type to use for size_t. */
10213 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10214 size_type_node = unsigned_type_node;
10215 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10216 size_type_node = long_unsigned_type_node;
10217 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10218 size_type_node = long_long_unsigned_type_node;
10219 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10220 size_type_node = short_unsigned_type_node;
10221 else
10222 {
10223 int i;
10224
10225 size_type_node = NULL_TREE;
10226 for (i = 0; i < NUM_INT_N_ENTS; i++)
10227 if (int_n_enabled_p[i])
10228 {
10229 char name[50];
10230 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10231
10232 if (strcmp (name, SIZE_TYPE) == 0)
10233 {
10234 size_type_node = int_n_trees[i].unsigned_type;
10235 }
10236 }
10237 if (size_type_node == NULL_TREE)
10238 gcc_unreachable ();
10239 }
10240
10241 /* Fill in the rest of the sized types. Reuse existing type nodes
10242 when possible. */
10243 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10244 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10245 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10246 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10247 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10248
10249 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10250 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10251 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10252 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10253 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10254
10255 /* Don't call build_qualified type for atomics. That routine does
10256 special processing for atomics, and until they are initialized
10257 it's better not to make that call.
10258
10259 Check to see if there is a target override for atomic types. */
10260
10261 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10262 targetm.atomic_align_for_mode (QImode));
10263 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10264 targetm.atomic_align_for_mode (HImode));
10265 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10266 targetm.atomic_align_for_mode (SImode));
10267 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10268 targetm.atomic_align_for_mode (DImode));
10269 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10270 targetm.atomic_align_for_mode (TImode));
10271
10272 access_public_node = get_identifier ("public");
10273 access_protected_node = get_identifier ("protected");
10274 access_private_node = get_identifier ("private");
10275
10276 /* Define these next since types below may used them. */
10277 integer_zero_node = build_int_cst (integer_type_node, 0);
10278 integer_one_node = build_int_cst (integer_type_node, 1);
10279 integer_three_node = build_int_cst (integer_type_node, 3);
10280 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10281
10282 size_zero_node = size_int (0);
10283 size_one_node = size_int (1);
10284 bitsize_zero_node = bitsize_int (0);
10285 bitsize_one_node = bitsize_int (1);
10286 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10287
10288 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10289 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10290
10291 void_type_node = make_node (VOID_TYPE);
10292 layout_type (void_type_node);
10293
10294 pointer_bounds_type_node = targetm.chkp_bound_type ();
10295
10296 /* We are not going to have real types in C with less than byte alignment,
10297 so we might as well not have any types that claim to have it. */
10298 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10299 TYPE_USER_ALIGN (void_type_node) = 0;
10300
10301 void_node = make_node (VOID_CST);
10302 TREE_TYPE (void_node) = void_type_node;
10303
10304 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10305 layout_type (TREE_TYPE (null_pointer_node));
10306
10307 ptr_type_node = build_pointer_type (void_type_node);
10308 const_ptr_type_node
10309 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10310 fileptr_type_node = ptr_type_node;
10311
10312 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10313
10314 float_type_node = make_node (REAL_TYPE);
10315 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10316 layout_type (float_type_node);
10317
10318 double_type_node = make_node (REAL_TYPE);
10319 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10320 layout_type (double_type_node);
10321
10322 long_double_type_node = make_node (REAL_TYPE);
10323 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10324 layout_type (long_double_type_node);
10325
10326 float_ptr_type_node = build_pointer_type (float_type_node);
10327 double_ptr_type_node = build_pointer_type (double_type_node);
10328 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10329 integer_ptr_type_node = build_pointer_type (integer_type_node);
10330
10331 /* Fixed size integer types. */
10332 uint16_type_node = make_or_reuse_type (16, 1);
10333 uint32_type_node = make_or_reuse_type (32, 1);
10334 uint64_type_node = make_or_reuse_type (64, 1);
10335
10336 /* Decimal float types. */
10337 dfloat32_type_node = make_node (REAL_TYPE);
10338 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10339 layout_type (dfloat32_type_node);
10340 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10341 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10342
10343 dfloat64_type_node = make_node (REAL_TYPE);
10344 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10345 layout_type (dfloat64_type_node);
10346 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10347 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10348
10349 dfloat128_type_node = make_node (REAL_TYPE);
10350 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10351 layout_type (dfloat128_type_node);
10352 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10353 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10354
10355 complex_integer_type_node = build_complex_type (integer_type_node);
10356 complex_float_type_node = build_complex_type (float_type_node);
10357 complex_double_type_node = build_complex_type (double_type_node);
10358 complex_long_double_type_node = build_complex_type (long_double_type_node);
10359
10360 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10361 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10362 sat_ ## KIND ## _type_node = \
10363 make_sat_signed_ ## KIND ## _type (SIZE); \
10364 sat_unsigned_ ## KIND ## _type_node = \
10365 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10366 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10367 unsigned_ ## KIND ## _type_node = \
10368 make_unsigned_ ## KIND ## _type (SIZE);
10369
10370 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10371 sat_ ## WIDTH ## KIND ## _type_node = \
10372 make_sat_signed_ ## KIND ## _type (SIZE); \
10373 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10374 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10375 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10376 unsigned_ ## WIDTH ## KIND ## _type_node = \
10377 make_unsigned_ ## KIND ## _type (SIZE);
10378
10379 /* Make fixed-point type nodes based on four different widths. */
10380 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10381 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10382 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10383 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10384 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10385
10386 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10387 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10388 NAME ## _type_node = \
10389 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10390 u ## NAME ## _type_node = \
10391 make_or_reuse_unsigned_ ## KIND ## _type \
10392 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10393 sat_ ## NAME ## _type_node = \
10394 make_or_reuse_sat_signed_ ## KIND ## _type \
10395 (GET_MODE_BITSIZE (MODE ## mode)); \
10396 sat_u ## NAME ## _type_node = \
10397 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10398 (GET_MODE_BITSIZE (U ## MODE ## mode));
10399
10400 /* Fixed-point type and mode nodes. */
10401 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10402 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10403 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10404 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10405 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10406 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10407 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10408 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10409 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10410 MAKE_FIXED_MODE_NODE (accum, da, DA)
10411 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10412
10413 {
10414 tree t = targetm.build_builtin_va_list ();
10415
10416 /* Many back-ends define record types without setting TYPE_NAME.
10417 If we copied the record type here, we'd keep the original
10418 record type without a name. This breaks name mangling. So,
10419 don't copy record types and let c_common_nodes_and_builtins()
10420 declare the type to be __builtin_va_list. */
10421 if (TREE_CODE (t) != RECORD_TYPE)
10422 t = build_variant_type_copy (t);
10423
10424 va_list_type_node = t;
10425 }
10426 }
10427
10428 /* Modify DECL for given flags.
10429 TM_PURE attribute is set only on types, so the function will modify
10430 DECL's type when ECF_TM_PURE is used. */
10431
10432 void
10433 set_call_expr_flags (tree decl, int flags)
10434 {
10435 if (flags & ECF_NOTHROW)
10436 TREE_NOTHROW (decl) = 1;
10437 if (flags & ECF_CONST)
10438 TREE_READONLY (decl) = 1;
10439 if (flags & ECF_PURE)
10440 DECL_PURE_P (decl) = 1;
10441 if (flags & ECF_LOOPING_CONST_OR_PURE)
10442 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10443 if (flags & ECF_NOVOPS)
10444 DECL_IS_NOVOPS (decl) = 1;
10445 if (flags & ECF_NORETURN)
10446 TREE_THIS_VOLATILE (decl) = 1;
10447 if (flags & ECF_MALLOC)
10448 DECL_IS_MALLOC (decl) = 1;
10449 if (flags & ECF_RETURNS_TWICE)
10450 DECL_IS_RETURNS_TWICE (decl) = 1;
10451 if (flags & ECF_LEAF)
10452 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10453 NULL, DECL_ATTRIBUTES (decl));
10454 if (flags & ECF_RET1)
10455 DECL_ATTRIBUTES (decl)
10456 = tree_cons (get_identifier ("fn spec"),
10457 build_tree_list (NULL_TREE, build_string (1, "1")),
10458 DECL_ATTRIBUTES (decl));
10459 if ((flags & ECF_TM_PURE) && flag_tm)
10460 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10461 /* Looping const or pure is implied by noreturn.
10462 There is currently no way to declare looping const or looping pure alone. */
10463 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10464 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10465 }
10466
10467
10468 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10469
10470 static void
10471 local_define_builtin (const char *name, tree type, enum built_in_function code,
10472 const char *library_name, int ecf_flags)
10473 {
10474 tree decl;
10475
10476 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10477 library_name, NULL_TREE);
10478 set_call_expr_flags (decl, ecf_flags);
10479
10480 set_builtin_decl (code, decl, true);
10481 }
10482
10483 /* Call this function after instantiating all builtins that the language
10484 front end cares about. This will build the rest of the builtins
10485 and internal functions that are relied upon by the tree optimizers and
10486 the middle-end. */
10487
10488 void
10489 build_common_builtin_nodes (void)
10490 {
10491 tree tmp, ftype;
10492 int ecf_flags;
10493
10494 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10495 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10496 {
10497 ftype = build_function_type (void_type_node, void_list_node);
10498 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10499 local_define_builtin ("__builtin_unreachable", ftype,
10500 BUILT_IN_UNREACHABLE,
10501 "__builtin_unreachable",
10502 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10503 | ECF_CONST);
10504 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10505 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10506 "abort",
10507 ECF_LEAF | ECF_NORETURN | ECF_CONST);
10508 }
10509
10510 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10511 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10512 {
10513 ftype = build_function_type_list (ptr_type_node,
10514 ptr_type_node, const_ptr_type_node,
10515 size_type_node, NULL_TREE);
10516
10517 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10518 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10519 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10520 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10521 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10522 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10523 }
10524
10525 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10526 {
10527 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10528 const_ptr_type_node, size_type_node,
10529 NULL_TREE);
10530 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10531 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10532 }
10533
10534 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10535 {
10536 ftype = build_function_type_list (ptr_type_node,
10537 ptr_type_node, integer_type_node,
10538 size_type_node, NULL_TREE);
10539 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10540 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10541 }
10542
10543 /* If we're checking the stack, `alloca' can throw. */
10544 const int alloca_flags
10545 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10546
10547 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10548 {
10549 ftype = build_function_type_list (ptr_type_node,
10550 size_type_node, NULL_TREE);
10551 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10552 "alloca", alloca_flags);
10553 }
10554
10555 ftype = build_function_type_list (ptr_type_node, size_type_node,
10556 size_type_node, NULL_TREE);
10557 local_define_builtin ("__builtin_alloca_with_align", ftype,
10558 BUILT_IN_ALLOCA_WITH_ALIGN,
10559 "__builtin_alloca_with_align",
10560 alloca_flags);
10561
10562 ftype = build_function_type_list (void_type_node,
10563 ptr_type_node, ptr_type_node,
10564 ptr_type_node, NULL_TREE);
10565 local_define_builtin ("__builtin_init_trampoline", ftype,
10566 BUILT_IN_INIT_TRAMPOLINE,
10567 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10568 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10569 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10570 "__builtin_init_heap_trampoline",
10571 ECF_NOTHROW | ECF_LEAF);
10572
10573 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10574 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10575 BUILT_IN_ADJUST_TRAMPOLINE,
10576 "__builtin_adjust_trampoline",
10577 ECF_CONST | ECF_NOTHROW);
10578
10579 ftype = build_function_type_list (void_type_node,
10580 ptr_type_node, ptr_type_node, NULL_TREE);
10581 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10582 BUILT_IN_NONLOCAL_GOTO,
10583 "__builtin_nonlocal_goto",
10584 ECF_NORETURN | ECF_NOTHROW);
10585
10586 ftype = build_function_type_list (void_type_node,
10587 ptr_type_node, ptr_type_node, NULL_TREE);
10588 local_define_builtin ("__builtin_setjmp_setup", ftype,
10589 BUILT_IN_SETJMP_SETUP,
10590 "__builtin_setjmp_setup", ECF_NOTHROW);
10591
10592 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10593 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10594 BUILT_IN_SETJMP_RECEIVER,
10595 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10596
10597 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10598 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10599 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10600
10601 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10602 local_define_builtin ("__builtin_stack_restore", ftype,
10603 BUILT_IN_STACK_RESTORE,
10604 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10605
10606 /* If there's a possibility that we might use the ARM EABI, build the
10607 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10608 if (targetm.arm_eabi_unwinder)
10609 {
10610 ftype = build_function_type_list (void_type_node, NULL_TREE);
10611 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10612 BUILT_IN_CXA_END_CLEANUP,
10613 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10614 }
10615
10616 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10617 local_define_builtin ("__builtin_unwind_resume", ftype,
10618 BUILT_IN_UNWIND_RESUME,
10619 ((targetm_common.except_unwind_info (&global_options)
10620 == UI_SJLJ)
10621 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10622 ECF_NORETURN);
10623
10624 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10625 {
10626 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10627 NULL_TREE);
10628 local_define_builtin ("__builtin_return_address", ftype,
10629 BUILT_IN_RETURN_ADDRESS,
10630 "__builtin_return_address",
10631 ECF_NOTHROW);
10632 }
10633
10634 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10635 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10636 {
10637 ftype = build_function_type_list (void_type_node, ptr_type_node,
10638 ptr_type_node, NULL_TREE);
10639 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10640 local_define_builtin ("__cyg_profile_func_enter", ftype,
10641 BUILT_IN_PROFILE_FUNC_ENTER,
10642 "__cyg_profile_func_enter", 0);
10643 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10644 local_define_builtin ("__cyg_profile_func_exit", ftype,
10645 BUILT_IN_PROFILE_FUNC_EXIT,
10646 "__cyg_profile_func_exit", 0);
10647 }
10648
10649 /* The exception object and filter values from the runtime. The argument
10650 must be zero before exception lowering, i.e. from the front end. After
10651 exception lowering, it will be the region number for the exception
10652 landing pad. These functions are PURE instead of CONST to prevent
10653 them from being hoisted past the exception edge that will initialize
10654 its value in the landing pad. */
10655 ftype = build_function_type_list (ptr_type_node,
10656 integer_type_node, NULL_TREE);
10657 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10658 /* Only use TM_PURE if we have TM language support. */
10659 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10660 ecf_flags |= ECF_TM_PURE;
10661 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10662 "__builtin_eh_pointer", ecf_flags);
10663
10664 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10665 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10666 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10667 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10668
10669 ftype = build_function_type_list (void_type_node,
10670 integer_type_node, integer_type_node,
10671 NULL_TREE);
10672 local_define_builtin ("__builtin_eh_copy_values", ftype,
10673 BUILT_IN_EH_COPY_VALUES,
10674 "__builtin_eh_copy_values", ECF_NOTHROW);
10675
10676 /* Complex multiplication and division. These are handled as builtins
10677 rather than optabs because emit_library_call_value doesn't support
10678 complex. Further, we can do slightly better with folding these
10679 beasties if the real and complex parts of the arguments are separate. */
10680 {
10681 int mode;
10682
10683 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10684 {
10685 char mode_name_buf[4], *q;
10686 const char *p;
10687 enum built_in_function mcode, dcode;
10688 tree type, inner_type;
10689 const char *prefix = "__";
10690
10691 if (targetm.libfunc_gnu_prefix)
10692 prefix = "__gnu_";
10693
10694 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10695 if (type == NULL)
10696 continue;
10697 inner_type = TREE_TYPE (type);
10698
10699 ftype = build_function_type_list (type, inner_type, inner_type,
10700 inner_type, inner_type, NULL_TREE);
10701
10702 mcode = ((enum built_in_function)
10703 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10704 dcode = ((enum built_in_function)
10705 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10706
10707 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10708 *q = TOLOWER (*p);
10709 *q = '\0';
10710
10711 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10712 NULL);
10713 local_define_builtin (built_in_names[mcode], ftype, mcode,
10714 built_in_names[mcode],
10715 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10716
10717 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10718 NULL);
10719 local_define_builtin (built_in_names[dcode], ftype, dcode,
10720 built_in_names[dcode],
10721 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10722 }
10723 }
10724
10725 init_internal_fns ();
10726 }
10727
10728 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10729 better way.
10730
10731 If we requested a pointer to a vector, build up the pointers that
10732 we stripped off while looking for the inner type. Similarly for
10733 return values from functions.
10734
10735 The argument TYPE is the top of the chain, and BOTTOM is the
10736 new type which we will point to. */
10737
10738 tree
10739 reconstruct_complex_type (tree type, tree bottom)
10740 {
10741 tree inner, outer;
10742
10743 if (TREE_CODE (type) == POINTER_TYPE)
10744 {
10745 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10746 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10747 TYPE_REF_CAN_ALIAS_ALL (type));
10748 }
10749 else if (TREE_CODE (type) == REFERENCE_TYPE)
10750 {
10751 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10752 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10753 TYPE_REF_CAN_ALIAS_ALL (type));
10754 }
10755 else if (TREE_CODE (type) == ARRAY_TYPE)
10756 {
10757 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10758 outer = build_array_type (inner, TYPE_DOMAIN (type));
10759 }
10760 else if (TREE_CODE (type) == FUNCTION_TYPE)
10761 {
10762 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10763 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10764 }
10765 else if (TREE_CODE (type) == METHOD_TYPE)
10766 {
10767 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10768 /* The build_method_type_directly() routine prepends 'this' to argument list,
10769 so we must compensate by getting rid of it. */
10770 outer
10771 = build_method_type_directly
10772 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10773 inner,
10774 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10775 }
10776 else if (TREE_CODE (type) == OFFSET_TYPE)
10777 {
10778 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10779 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10780 }
10781 else
10782 return bottom;
10783
10784 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10785 TYPE_QUALS (type));
10786 }
10787
10788 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10789 the inner type. */
10790 tree
10791 build_vector_type_for_mode (tree innertype, machine_mode mode)
10792 {
10793 int nunits;
10794
10795 switch (GET_MODE_CLASS (mode))
10796 {
10797 case MODE_VECTOR_INT:
10798 case MODE_VECTOR_FLOAT:
10799 case MODE_VECTOR_FRACT:
10800 case MODE_VECTOR_UFRACT:
10801 case MODE_VECTOR_ACCUM:
10802 case MODE_VECTOR_UACCUM:
10803 nunits = GET_MODE_NUNITS (mode);
10804 break;
10805
10806 case MODE_INT:
10807 /* Check that there are no leftover bits. */
10808 gcc_assert (GET_MODE_BITSIZE (mode)
10809 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10810
10811 nunits = GET_MODE_BITSIZE (mode)
10812 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10813 break;
10814
10815 default:
10816 gcc_unreachable ();
10817 }
10818
10819 return make_vector_type (innertype, nunits, mode);
10820 }
10821
10822 /* Similarly, but takes the inner type and number of units, which must be
10823 a power of two. */
10824
10825 tree
10826 build_vector_type (tree innertype, int nunits)
10827 {
10828 return make_vector_type (innertype, nunits, VOIDmode);
10829 }
10830
10831 /* Build truth vector with specified length and number of units. */
10832
10833 tree
10834 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10835 {
10836 machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits,
10837 vector_size);
10838
10839 gcc_assert (mask_mode != VOIDmode);
10840
10841 unsigned HOST_WIDE_INT vsize;
10842 if (mask_mode == BLKmode)
10843 vsize = vector_size * BITS_PER_UNIT;
10844 else
10845 vsize = GET_MODE_BITSIZE (mask_mode);
10846
10847 unsigned HOST_WIDE_INT esize = vsize / nunits;
10848 gcc_assert (esize * nunits == vsize);
10849
10850 tree bool_type = build_nonstandard_boolean_type (esize);
10851
10852 return make_vector_type (bool_type, nunits, mask_mode);
10853 }
10854
10855 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10856
10857 tree
10858 build_same_sized_truth_vector_type (tree vectype)
10859 {
10860 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10861 return vectype;
10862
10863 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10864
10865 if (!size)
10866 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10867
10868 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10869 }
10870
10871 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10872
10873 tree
10874 build_opaque_vector_type (tree innertype, int nunits)
10875 {
10876 tree t = make_vector_type (innertype, nunits, VOIDmode);
10877 tree cand;
10878 /* We always build the non-opaque variant before the opaque one,
10879 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10880 cand = TYPE_NEXT_VARIANT (t);
10881 if (cand
10882 && TYPE_VECTOR_OPAQUE (cand)
10883 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10884 return cand;
10885 /* Othewise build a variant type and make sure to queue it after
10886 the non-opaque type. */
10887 cand = build_distinct_type_copy (t);
10888 TYPE_VECTOR_OPAQUE (cand) = true;
10889 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10890 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10891 TYPE_NEXT_VARIANT (t) = cand;
10892 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10893 return cand;
10894 }
10895
10896
10897 /* Given an initializer INIT, return TRUE if INIT is zero or some
10898 aggregate of zeros. Otherwise return FALSE. */
10899 bool
10900 initializer_zerop (const_tree init)
10901 {
10902 tree elt;
10903
10904 STRIP_NOPS (init);
10905
10906 switch (TREE_CODE (init))
10907 {
10908 case INTEGER_CST:
10909 return integer_zerop (init);
10910
10911 case REAL_CST:
10912 /* ??? Note that this is not correct for C4X float formats. There,
10913 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10914 negative exponent. */
10915 return real_zerop (init)
10916 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10917
10918 case FIXED_CST:
10919 return fixed_zerop (init);
10920
10921 case COMPLEX_CST:
10922 return integer_zerop (init)
10923 || (real_zerop (init)
10924 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10925 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10926
10927 case VECTOR_CST:
10928 {
10929 unsigned i;
10930 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10931 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10932 return false;
10933 return true;
10934 }
10935
10936 case CONSTRUCTOR:
10937 {
10938 unsigned HOST_WIDE_INT idx;
10939
10940 if (TREE_CLOBBER_P (init))
10941 return false;
10942 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10943 if (!initializer_zerop (elt))
10944 return false;
10945 return true;
10946 }
10947
10948 case STRING_CST:
10949 {
10950 int i;
10951
10952 /* We need to loop through all elements to handle cases like
10953 "\0" and "\0foobar". */
10954 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10955 if (TREE_STRING_POINTER (init)[i] != '\0')
10956 return false;
10957
10958 return true;
10959 }
10960
10961 default:
10962 return false;
10963 }
10964 }
10965
10966 /* Check if vector VEC consists of all the equal elements and
10967 that the number of elements corresponds to the type of VEC.
10968 The function returns first element of the vector
10969 or NULL_TREE if the vector is not uniform. */
10970 tree
10971 uniform_vector_p (const_tree vec)
10972 {
10973 tree first, t;
10974 unsigned i;
10975
10976 if (vec == NULL_TREE)
10977 return NULL_TREE;
10978
10979 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10980
10981 if (TREE_CODE (vec) == VECTOR_CST)
10982 {
10983 first = VECTOR_CST_ELT (vec, 0);
10984 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10985 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10986 return NULL_TREE;
10987
10988 return first;
10989 }
10990
10991 else if (TREE_CODE (vec) == CONSTRUCTOR)
10992 {
10993 first = error_mark_node;
10994
10995 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10996 {
10997 if (i == 0)
10998 {
10999 first = t;
11000 continue;
11001 }
11002 if (!operand_equal_p (first, t, 0))
11003 return NULL_TREE;
11004 }
11005 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
11006 return NULL_TREE;
11007
11008 return first;
11009 }
11010
11011 return NULL_TREE;
11012 }
11013
11014 /* Build an empty statement at location LOC. */
11015
11016 tree
11017 build_empty_stmt (location_t loc)
11018 {
11019 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11020 SET_EXPR_LOCATION (t, loc);
11021 return t;
11022 }
11023
11024
11025 /* Build an OpenMP clause with code CODE. LOC is the location of the
11026 clause. */
11027
11028 tree
11029 build_omp_clause (location_t loc, enum omp_clause_code code)
11030 {
11031 tree t;
11032 int size, length;
11033
11034 length = omp_clause_num_ops[code];
11035 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11036
11037 record_node_allocation_statistics (OMP_CLAUSE, size);
11038
11039 t = (tree) ggc_internal_alloc (size);
11040 memset (t, 0, size);
11041 TREE_SET_CODE (t, OMP_CLAUSE);
11042 OMP_CLAUSE_SET_CODE (t, code);
11043 OMP_CLAUSE_LOCATION (t) = loc;
11044
11045 return t;
11046 }
11047
11048 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
11049 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11050 Except for the CODE and operand count field, other storage for the
11051 object is initialized to zeros. */
11052
11053 tree
11054 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
11055 {
11056 tree t;
11057 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11058
11059 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11060 gcc_assert (len >= 1);
11061
11062 record_node_allocation_statistics (code, length);
11063
11064 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11065
11066 TREE_SET_CODE (t, code);
11067
11068 /* Can't use TREE_OPERAND to store the length because if checking is
11069 enabled, it will try to check the length before we store it. :-P */
11070 t->exp.operands[0] = build_int_cst (sizetype, len);
11071
11072 return t;
11073 }
11074
11075 /* Helper function for build_call_* functions; build a CALL_EXPR with
11076 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11077 the argument slots. */
11078
11079 static tree
11080 build_call_1 (tree return_type, tree fn, int nargs)
11081 {
11082 tree t;
11083
11084 t = build_vl_exp (CALL_EXPR, nargs + 3);
11085 TREE_TYPE (t) = return_type;
11086 CALL_EXPR_FN (t) = fn;
11087 CALL_EXPR_STATIC_CHAIN (t) = NULL;
11088
11089 return t;
11090 }
11091
11092 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11093 FN and a null static chain slot. NARGS is the number of call arguments
11094 which are specified as "..." arguments. */
11095
11096 tree
11097 build_call_nary (tree return_type, tree fn, int nargs, ...)
11098 {
11099 tree ret;
11100 va_list args;
11101 va_start (args, nargs);
11102 ret = build_call_valist (return_type, fn, nargs, args);
11103 va_end (args);
11104 return ret;
11105 }
11106
11107 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11108 FN and a null static chain slot. NARGS is the number of call arguments
11109 which are specified as a va_list ARGS. */
11110
11111 tree
11112 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11113 {
11114 tree t;
11115 int i;
11116
11117 t = build_call_1 (return_type, fn, nargs);
11118 for (i = 0; i < nargs; i++)
11119 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11120 process_call_operands (t);
11121 return t;
11122 }
11123
11124 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11125 FN and a null static chain slot. NARGS is the number of call arguments
11126 which are specified as a tree array ARGS. */
11127
11128 tree
11129 build_call_array_loc (location_t loc, tree return_type, tree fn,
11130 int nargs, const tree *args)
11131 {
11132 tree t;
11133 int i;
11134
11135 t = build_call_1 (return_type, fn, nargs);
11136 for (i = 0; i < nargs; i++)
11137 CALL_EXPR_ARG (t, i) = args[i];
11138 process_call_operands (t);
11139 SET_EXPR_LOCATION (t, loc);
11140 return t;
11141 }
11142
11143 /* Like build_call_array, but takes a vec. */
11144
11145 tree
11146 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11147 {
11148 tree ret, t;
11149 unsigned int ix;
11150
11151 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11152 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11153 CALL_EXPR_ARG (ret, ix) = t;
11154 process_call_operands (ret);
11155 return ret;
11156 }
11157
11158 /* Conveniently construct a function call expression. FNDECL names the
11159 function to be called and N arguments are passed in the array
11160 ARGARRAY. */
11161
11162 tree
11163 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11164 {
11165 tree fntype = TREE_TYPE (fndecl);
11166 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11167
11168 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11169 }
11170
11171 /* Conveniently construct a function call expression. FNDECL names the
11172 function to be called and the arguments are passed in the vector
11173 VEC. */
11174
11175 tree
11176 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11177 {
11178 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11179 vec_safe_address (vec));
11180 }
11181
11182
11183 /* Conveniently construct a function call expression. FNDECL names the
11184 function to be called, N is the number of arguments, and the "..."
11185 parameters are the argument expressions. */
11186
11187 tree
11188 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11189 {
11190 va_list ap;
11191 tree *argarray = XALLOCAVEC (tree, n);
11192 int i;
11193
11194 va_start (ap, n);
11195 for (i = 0; i < n; i++)
11196 argarray[i] = va_arg (ap, tree);
11197 va_end (ap);
11198 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11199 }
11200
11201 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11202 varargs macros aren't supported by all bootstrap compilers. */
11203
11204 tree
11205 build_call_expr (tree fndecl, int n, ...)
11206 {
11207 va_list ap;
11208 tree *argarray = XALLOCAVEC (tree, n);
11209 int i;
11210
11211 va_start (ap, n);
11212 for (i = 0; i < n; i++)
11213 argarray[i] = va_arg (ap, tree);
11214 va_end (ap);
11215 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11216 }
11217
11218 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11219 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11220 It will get gimplified later into an ordinary internal function. */
11221
11222 tree
11223 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11224 tree type, int n, const tree *args)
11225 {
11226 tree t = build_call_1 (type, NULL_TREE, n);
11227 for (int i = 0; i < n; ++i)
11228 CALL_EXPR_ARG (t, i) = args[i];
11229 SET_EXPR_LOCATION (t, loc);
11230 CALL_EXPR_IFN (t) = ifn;
11231 return t;
11232 }
11233
11234 /* Build internal call expression. This is just like CALL_EXPR, except
11235 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11236 internal function. */
11237
11238 tree
11239 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11240 tree type, int n, ...)
11241 {
11242 va_list ap;
11243 tree *argarray = XALLOCAVEC (tree, n);
11244 int i;
11245
11246 va_start (ap, n);
11247 for (i = 0; i < n; i++)
11248 argarray[i] = va_arg (ap, tree);
11249 va_end (ap);
11250 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11251 }
11252
11253 /* Return a function call to FN, if the target is guaranteed to support it,
11254 or null otherwise.
11255
11256 N is the number of arguments, passed in the "...", and TYPE is the
11257 type of the return value. */
11258
11259 tree
11260 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11261 int n, ...)
11262 {
11263 va_list ap;
11264 tree *argarray = XALLOCAVEC (tree, n);
11265 int i;
11266
11267 va_start (ap, n);
11268 for (i = 0; i < n; i++)
11269 argarray[i] = va_arg (ap, tree);
11270 va_end (ap);
11271 if (internal_fn_p (fn))
11272 {
11273 internal_fn ifn = as_internal_fn (fn);
11274 if (direct_internal_fn_p (ifn))
11275 {
11276 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11277 if (!direct_internal_fn_supported_p (ifn, types,
11278 OPTIMIZE_FOR_BOTH))
11279 return NULL_TREE;
11280 }
11281 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11282 }
11283 else
11284 {
11285 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11286 if (!fndecl)
11287 return NULL_TREE;
11288 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11289 }
11290 }
11291
11292 /* Create a new constant string literal and return a char* pointer to it.
11293 The STRING_CST value is the LEN characters at STR. */
11294 tree
11295 build_string_literal (int len, const char *str)
11296 {
11297 tree t, elem, index, type;
11298
11299 t = build_string (len, str);
11300 elem = build_type_variant (char_type_node, 1, 0);
11301 index = build_index_type (size_int (len - 1));
11302 type = build_array_type (elem, index);
11303 TREE_TYPE (t) = type;
11304 TREE_CONSTANT (t) = 1;
11305 TREE_READONLY (t) = 1;
11306 TREE_STATIC (t) = 1;
11307
11308 type = build_pointer_type (elem);
11309 t = build1 (ADDR_EXPR, type,
11310 build4 (ARRAY_REF, elem,
11311 t, integer_zero_node, NULL_TREE, NULL_TREE));
11312 return t;
11313 }
11314
11315
11316
11317 /* Return true if T (assumed to be a DECL) must be assigned a memory
11318 location. */
11319
11320 bool
11321 needs_to_live_in_memory (const_tree t)
11322 {
11323 return (TREE_ADDRESSABLE (t)
11324 || is_global_var (t)
11325 || (TREE_CODE (t) == RESULT_DECL
11326 && !DECL_BY_REFERENCE (t)
11327 && aggregate_value_p (t, current_function_decl)));
11328 }
11329
11330 /* Return value of a constant X and sign-extend it. */
11331
11332 HOST_WIDE_INT
11333 int_cst_value (const_tree x)
11334 {
11335 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11336 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11337
11338 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11339 gcc_assert (cst_and_fits_in_hwi (x));
11340
11341 if (bits < HOST_BITS_PER_WIDE_INT)
11342 {
11343 bool negative = ((val >> (bits - 1)) & 1) != 0;
11344 if (negative)
11345 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
11346 else
11347 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
11348 }
11349
11350 return val;
11351 }
11352
11353 /* If TYPE is an integral or pointer type, return an integer type with
11354 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11355 if TYPE is already an integer type of signedness UNSIGNEDP. */
11356
11357 tree
11358 signed_or_unsigned_type_for (int unsignedp, tree type)
11359 {
11360 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11361 return type;
11362
11363 if (TREE_CODE (type) == VECTOR_TYPE)
11364 {
11365 tree inner = TREE_TYPE (type);
11366 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11367 if (!inner2)
11368 return NULL_TREE;
11369 if (inner == inner2)
11370 return type;
11371 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11372 }
11373
11374 if (!INTEGRAL_TYPE_P (type)
11375 && !POINTER_TYPE_P (type)
11376 && TREE_CODE (type) != OFFSET_TYPE)
11377 return NULL_TREE;
11378
11379 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11380 }
11381
11382 /* If TYPE is an integral or pointer type, return an integer type with
11383 the same precision which is unsigned, or itself if TYPE is already an
11384 unsigned integer type. */
11385
11386 tree
11387 unsigned_type_for (tree type)
11388 {
11389 return signed_or_unsigned_type_for (1, type);
11390 }
11391
11392 /* If TYPE is an integral or pointer type, return an integer type with
11393 the same precision which is signed, or itself if TYPE is already a
11394 signed integer type. */
11395
11396 tree
11397 signed_type_for (tree type)
11398 {
11399 return signed_or_unsigned_type_for (0, type);
11400 }
11401
11402 /* If TYPE is a vector type, return a signed integer vector type with the
11403 same width and number of subparts. Otherwise return boolean_type_node. */
11404
11405 tree
11406 truth_type_for (tree type)
11407 {
11408 if (TREE_CODE (type) == VECTOR_TYPE)
11409 {
11410 if (VECTOR_BOOLEAN_TYPE_P (type))
11411 return type;
11412 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11413 GET_MODE_SIZE (TYPE_MODE (type)));
11414 }
11415 else
11416 return boolean_type_node;
11417 }
11418
11419 /* Returns the largest value obtainable by casting something in INNER type to
11420 OUTER type. */
11421
11422 tree
11423 upper_bound_in_type (tree outer, tree inner)
11424 {
11425 unsigned int det = 0;
11426 unsigned oprec = TYPE_PRECISION (outer);
11427 unsigned iprec = TYPE_PRECISION (inner);
11428 unsigned prec;
11429
11430 /* Compute a unique number for every combination. */
11431 det |= (oprec > iprec) ? 4 : 0;
11432 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11433 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11434
11435 /* Determine the exponent to use. */
11436 switch (det)
11437 {
11438 case 0:
11439 case 1:
11440 /* oprec <= iprec, outer: signed, inner: don't care. */
11441 prec = oprec - 1;
11442 break;
11443 case 2:
11444 case 3:
11445 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11446 prec = oprec;
11447 break;
11448 case 4:
11449 /* oprec > iprec, outer: signed, inner: signed. */
11450 prec = iprec - 1;
11451 break;
11452 case 5:
11453 /* oprec > iprec, outer: signed, inner: unsigned. */
11454 prec = iprec;
11455 break;
11456 case 6:
11457 /* oprec > iprec, outer: unsigned, inner: signed. */
11458 prec = oprec;
11459 break;
11460 case 7:
11461 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11462 prec = iprec;
11463 break;
11464 default:
11465 gcc_unreachable ();
11466 }
11467
11468 return wide_int_to_tree (outer,
11469 wi::mask (prec, false, TYPE_PRECISION (outer)));
11470 }
11471
11472 /* Returns the smallest value obtainable by casting something in INNER type to
11473 OUTER type. */
11474
11475 tree
11476 lower_bound_in_type (tree outer, tree inner)
11477 {
11478 unsigned oprec = TYPE_PRECISION (outer);
11479 unsigned iprec = TYPE_PRECISION (inner);
11480
11481 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11482 and obtain 0. */
11483 if (TYPE_UNSIGNED (outer)
11484 /* If we are widening something of an unsigned type, OUTER type
11485 contains all values of INNER type. In particular, both INNER
11486 and OUTER types have zero in common. */
11487 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11488 return build_int_cst (outer, 0);
11489 else
11490 {
11491 /* If we are widening a signed type to another signed type, we
11492 want to obtain -2^^(iprec-1). If we are keeping the
11493 precision or narrowing to a signed type, we want to obtain
11494 -2^(oprec-1). */
11495 unsigned prec = oprec > iprec ? iprec : oprec;
11496 return wide_int_to_tree (outer,
11497 wi::mask (prec - 1, true,
11498 TYPE_PRECISION (outer)));
11499 }
11500 }
11501
11502 /* Return nonzero if two operands that are suitable for PHI nodes are
11503 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11504 SSA_NAME or invariant. Note that this is strictly an optimization.
11505 That is, callers of this function can directly call operand_equal_p
11506 and get the same result, only slower. */
11507
11508 int
11509 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11510 {
11511 if (arg0 == arg1)
11512 return 1;
11513 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11514 return 0;
11515 return operand_equal_p (arg0, arg1, 0);
11516 }
11517
11518 /* Returns number of zeros at the end of binary representation of X. */
11519
11520 tree
11521 num_ending_zeros (const_tree x)
11522 {
11523 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11524 }
11525
11526
11527 #define WALK_SUBTREE(NODE) \
11528 do \
11529 { \
11530 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11531 if (result) \
11532 return result; \
11533 } \
11534 while (0)
11535
11536 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11537 be walked whenever a type is seen in the tree. Rest of operands and return
11538 value are as for walk_tree. */
11539
11540 static tree
11541 walk_type_fields (tree type, walk_tree_fn func, void *data,
11542 hash_set<tree> *pset, walk_tree_lh lh)
11543 {
11544 tree result = NULL_TREE;
11545
11546 switch (TREE_CODE (type))
11547 {
11548 case POINTER_TYPE:
11549 case REFERENCE_TYPE:
11550 case VECTOR_TYPE:
11551 /* We have to worry about mutually recursive pointers. These can't
11552 be written in C. They can in Ada. It's pathological, but
11553 there's an ACATS test (c38102a) that checks it. Deal with this
11554 by checking if we're pointing to another pointer, that one
11555 points to another pointer, that one does too, and we have no htab.
11556 If so, get a hash table. We check three levels deep to avoid
11557 the cost of the hash table if we don't need one. */
11558 if (POINTER_TYPE_P (TREE_TYPE (type))
11559 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11560 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11561 && !pset)
11562 {
11563 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11564 func, data);
11565 if (result)
11566 return result;
11567
11568 break;
11569 }
11570
11571 /* ... fall through ... */
11572
11573 case COMPLEX_TYPE:
11574 WALK_SUBTREE (TREE_TYPE (type));
11575 break;
11576
11577 case METHOD_TYPE:
11578 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11579
11580 /* Fall through. */
11581
11582 case FUNCTION_TYPE:
11583 WALK_SUBTREE (TREE_TYPE (type));
11584 {
11585 tree arg;
11586
11587 /* We never want to walk into default arguments. */
11588 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11589 WALK_SUBTREE (TREE_VALUE (arg));
11590 }
11591 break;
11592
11593 case ARRAY_TYPE:
11594 /* Don't follow this nodes's type if a pointer for fear that
11595 we'll have infinite recursion. If we have a PSET, then we
11596 need not fear. */
11597 if (pset
11598 || (!POINTER_TYPE_P (TREE_TYPE (type))
11599 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11600 WALK_SUBTREE (TREE_TYPE (type));
11601 WALK_SUBTREE (TYPE_DOMAIN (type));
11602 break;
11603
11604 case OFFSET_TYPE:
11605 WALK_SUBTREE (TREE_TYPE (type));
11606 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11607 break;
11608
11609 default:
11610 break;
11611 }
11612
11613 return NULL_TREE;
11614 }
11615
11616 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11617 called with the DATA and the address of each sub-tree. If FUNC returns a
11618 non-NULL value, the traversal is stopped, and the value returned by FUNC
11619 is returned. If PSET is non-NULL it is used to record the nodes visited,
11620 and to avoid visiting a node more than once. */
11621
11622 tree
11623 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11624 hash_set<tree> *pset, walk_tree_lh lh)
11625 {
11626 enum tree_code code;
11627 int walk_subtrees;
11628 tree result;
11629
11630 #define WALK_SUBTREE_TAIL(NODE) \
11631 do \
11632 { \
11633 tp = & (NODE); \
11634 goto tail_recurse; \
11635 } \
11636 while (0)
11637
11638 tail_recurse:
11639 /* Skip empty subtrees. */
11640 if (!*tp)
11641 return NULL_TREE;
11642
11643 /* Don't walk the same tree twice, if the user has requested
11644 that we avoid doing so. */
11645 if (pset && pset->add (*tp))
11646 return NULL_TREE;
11647
11648 /* Call the function. */
11649 walk_subtrees = 1;
11650 result = (*func) (tp, &walk_subtrees, data);
11651
11652 /* If we found something, return it. */
11653 if (result)
11654 return result;
11655
11656 code = TREE_CODE (*tp);
11657
11658 /* Even if we didn't, FUNC may have decided that there was nothing
11659 interesting below this point in the tree. */
11660 if (!walk_subtrees)
11661 {
11662 /* But we still need to check our siblings. */
11663 if (code == TREE_LIST)
11664 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11665 else if (code == OMP_CLAUSE)
11666 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11667 else
11668 return NULL_TREE;
11669 }
11670
11671 if (lh)
11672 {
11673 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11674 if (result || !walk_subtrees)
11675 return result;
11676 }
11677
11678 switch (code)
11679 {
11680 case ERROR_MARK:
11681 case IDENTIFIER_NODE:
11682 case INTEGER_CST:
11683 case REAL_CST:
11684 case FIXED_CST:
11685 case VECTOR_CST:
11686 case STRING_CST:
11687 case BLOCK:
11688 case PLACEHOLDER_EXPR:
11689 case SSA_NAME:
11690 case FIELD_DECL:
11691 case RESULT_DECL:
11692 /* None of these have subtrees other than those already walked
11693 above. */
11694 break;
11695
11696 case TREE_LIST:
11697 WALK_SUBTREE (TREE_VALUE (*tp));
11698 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11699 break;
11700
11701 case TREE_VEC:
11702 {
11703 int len = TREE_VEC_LENGTH (*tp);
11704
11705 if (len == 0)
11706 break;
11707
11708 /* Walk all elements but the first. */
11709 while (--len)
11710 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11711
11712 /* Now walk the first one as a tail call. */
11713 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11714 }
11715
11716 case COMPLEX_CST:
11717 WALK_SUBTREE (TREE_REALPART (*tp));
11718 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11719
11720 case CONSTRUCTOR:
11721 {
11722 unsigned HOST_WIDE_INT idx;
11723 constructor_elt *ce;
11724
11725 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11726 idx++)
11727 WALK_SUBTREE (ce->value);
11728 }
11729 break;
11730
11731 case SAVE_EXPR:
11732 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11733
11734 case BIND_EXPR:
11735 {
11736 tree decl;
11737 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11738 {
11739 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11740 into declarations that are just mentioned, rather than
11741 declared; they don't really belong to this part of the tree.
11742 And, we can see cycles: the initializer for a declaration
11743 can refer to the declaration itself. */
11744 WALK_SUBTREE (DECL_INITIAL (decl));
11745 WALK_SUBTREE (DECL_SIZE (decl));
11746 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11747 }
11748 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11749 }
11750
11751 case STATEMENT_LIST:
11752 {
11753 tree_stmt_iterator i;
11754 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11755 WALK_SUBTREE (*tsi_stmt_ptr (i));
11756 }
11757 break;
11758
11759 case OMP_CLAUSE:
11760 switch (OMP_CLAUSE_CODE (*tp))
11761 {
11762 case OMP_CLAUSE_GANG:
11763 case OMP_CLAUSE__GRIDDIM_:
11764 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11765 /* FALLTHRU */
11766
11767 case OMP_CLAUSE_DEVICE_RESIDENT:
11768 case OMP_CLAUSE_ASYNC:
11769 case OMP_CLAUSE_WAIT:
11770 case OMP_CLAUSE_WORKER:
11771 case OMP_CLAUSE_VECTOR:
11772 case OMP_CLAUSE_NUM_GANGS:
11773 case OMP_CLAUSE_NUM_WORKERS:
11774 case OMP_CLAUSE_VECTOR_LENGTH:
11775 case OMP_CLAUSE_PRIVATE:
11776 case OMP_CLAUSE_SHARED:
11777 case OMP_CLAUSE_FIRSTPRIVATE:
11778 case OMP_CLAUSE_COPYIN:
11779 case OMP_CLAUSE_COPYPRIVATE:
11780 case OMP_CLAUSE_FINAL:
11781 case OMP_CLAUSE_IF:
11782 case OMP_CLAUSE_NUM_THREADS:
11783 case OMP_CLAUSE_SCHEDULE:
11784 case OMP_CLAUSE_UNIFORM:
11785 case OMP_CLAUSE_DEPEND:
11786 case OMP_CLAUSE_NUM_TEAMS:
11787 case OMP_CLAUSE_THREAD_LIMIT:
11788 case OMP_CLAUSE_DEVICE:
11789 case OMP_CLAUSE_DIST_SCHEDULE:
11790 case OMP_CLAUSE_SAFELEN:
11791 case OMP_CLAUSE_SIMDLEN:
11792 case OMP_CLAUSE_ORDERED:
11793 case OMP_CLAUSE_PRIORITY:
11794 case OMP_CLAUSE_GRAINSIZE:
11795 case OMP_CLAUSE_NUM_TASKS:
11796 case OMP_CLAUSE_HINT:
11797 case OMP_CLAUSE_TO_DECLARE:
11798 case OMP_CLAUSE_LINK:
11799 case OMP_CLAUSE_USE_DEVICE_PTR:
11800 case OMP_CLAUSE_IS_DEVICE_PTR:
11801 case OMP_CLAUSE__LOOPTEMP_:
11802 case OMP_CLAUSE__SIMDUID_:
11803 case OMP_CLAUSE__CILK_FOR_COUNT_:
11804 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11805 /* FALLTHRU */
11806
11807 case OMP_CLAUSE_INDEPENDENT:
11808 case OMP_CLAUSE_NOWAIT:
11809 case OMP_CLAUSE_DEFAULT:
11810 case OMP_CLAUSE_UNTIED:
11811 case OMP_CLAUSE_MERGEABLE:
11812 case OMP_CLAUSE_PROC_BIND:
11813 case OMP_CLAUSE_INBRANCH:
11814 case OMP_CLAUSE_NOTINBRANCH:
11815 case OMP_CLAUSE_FOR:
11816 case OMP_CLAUSE_PARALLEL:
11817 case OMP_CLAUSE_SECTIONS:
11818 case OMP_CLAUSE_TASKGROUP:
11819 case OMP_CLAUSE_NOGROUP:
11820 case OMP_CLAUSE_THREADS:
11821 case OMP_CLAUSE_SIMD:
11822 case OMP_CLAUSE_DEFAULTMAP:
11823 case OMP_CLAUSE_AUTO:
11824 case OMP_CLAUSE_SEQ:
11825 case OMP_CLAUSE_TILE:
11826 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11827
11828 case OMP_CLAUSE_LASTPRIVATE:
11829 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11830 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11831 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11832
11833 case OMP_CLAUSE_COLLAPSE:
11834 {
11835 int i;
11836 for (i = 0; i < 3; i++)
11837 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11838 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11839 }
11840
11841 case OMP_CLAUSE_LINEAR:
11842 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11843 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11844 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11845 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11846
11847 case OMP_CLAUSE_ALIGNED:
11848 case OMP_CLAUSE_FROM:
11849 case OMP_CLAUSE_TO:
11850 case OMP_CLAUSE_MAP:
11851 case OMP_CLAUSE__CACHE_:
11852 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11853 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11854 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11855
11856 case OMP_CLAUSE_REDUCTION:
11857 {
11858 int i;
11859 for (i = 0; i < 5; i++)
11860 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11861 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11862 }
11863
11864 default:
11865 gcc_unreachable ();
11866 }
11867 break;
11868
11869 case TARGET_EXPR:
11870 {
11871 int i, len;
11872
11873 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11874 But, we only want to walk once. */
11875 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11876 for (i = 0; i < len; ++i)
11877 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11878 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11879 }
11880
11881 case DECL_EXPR:
11882 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11883 defining. We only want to walk into these fields of a type in this
11884 case and not in the general case of a mere reference to the type.
11885
11886 The criterion is as follows: if the field can be an expression, it
11887 must be walked only here. This should be in keeping with the fields
11888 that are directly gimplified in gimplify_type_sizes in order for the
11889 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11890 variable-sized types.
11891
11892 Note that DECLs get walked as part of processing the BIND_EXPR. */
11893 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11894 {
11895 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11896 if (TREE_CODE (*type_p) == ERROR_MARK)
11897 return NULL_TREE;
11898
11899 /* Call the function for the type. See if it returns anything or
11900 doesn't want us to continue. If we are to continue, walk both
11901 the normal fields and those for the declaration case. */
11902 result = (*func) (type_p, &walk_subtrees, data);
11903 if (result || !walk_subtrees)
11904 return result;
11905
11906 /* But do not walk a pointed-to type since it may itself need to
11907 be walked in the declaration case if it isn't anonymous. */
11908 if (!POINTER_TYPE_P (*type_p))
11909 {
11910 result = walk_type_fields (*type_p, func, data, pset, lh);
11911 if (result)
11912 return result;
11913 }
11914
11915 /* If this is a record type, also walk the fields. */
11916 if (RECORD_OR_UNION_TYPE_P (*type_p))
11917 {
11918 tree field;
11919
11920 for (field = TYPE_FIELDS (*type_p); field;
11921 field = DECL_CHAIN (field))
11922 {
11923 /* We'd like to look at the type of the field, but we can
11924 easily get infinite recursion. So assume it's pointed
11925 to elsewhere in the tree. Also, ignore things that
11926 aren't fields. */
11927 if (TREE_CODE (field) != FIELD_DECL)
11928 continue;
11929
11930 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11931 WALK_SUBTREE (DECL_SIZE (field));
11932 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11933 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11934 WALK_SUBTREE (DECL_QUALIFIER (field));
11935 }
11936 }
11937
11938 /* Same for scalar types. */
11939 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11940 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11941 || TREE_CODE (*type_p) == INTEGER_TYPE
11942 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11943 || TREE_CODE (*type_p) == REAL_TYPE)
11944 {
11945 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11946 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11947 }
11948
11949 WALK_SUBTREE (TYPE_SIZE (*type_p));
11950 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11951 }
11952 /* FALLTHRU */
11953
11954 default:
11955 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11956 {
11957 int i, len;
11958
11959 /* Walk over all the sub-trees of this operand. */
11960 len = TREE_OPERAND_LENGTH (*tp);
11961
11962 /* Go through the subtrees. We need to do this in forward order so
11963 that the scope of a FOR_EXPR is handled properly. */
11964 if (len)
11965 {
11966 for (i = 0; i < len - 1; ++i)
11967 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11968 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11969 }
11970 }
11971 /* If this is a type, walk the needed fields in the type. */
11972 else if (TYPE_P (*tp))
11973 return walk_type_fields (*tp, func, data, pset, lh);
11974 break;
11975 }
11976
11977 /* We didn't find what we were looking for. */
11978 return NULL_TREE;
11979
11980 #undef WALK_SUBTREE_TAIL
11981 }
11982 #undef WALK_SUBTREE
11983
11984 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11985
11986 tree
11987 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11988 walk_tree_lh lh)
11989 {
11990 tree result;
11991
11992 hash_set<tree> pset;
11993 result = walk_tree_1 (tp, func, data, &pset, lh);
11994 return result;
11995 }
11996
11997
11998 tree
11999 tree_block (tree t)
12000 {
12001 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12002
12003 if (IS_EXPR_CODE_CLASS (c))
12004 return LOCATION_BLOCK (t->exp.locus);
12005 gcc_unreachable ();
12006 return NULL;
12007 }
12008
12009 void
12010 tree_set_block (tree t, tree b)
12011 {
12012 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12013
12014 if (IS_EXPR_CODE_CLASS (c))
12015 {
12016 t->exp.locus = set_block (t->exp.locus, b);
12017 }
12018 else
12019 gcc_unreachable ();
12020 }
12021
12022 /* Create a nameless artificial label and put it in the current
12023 function context. The label has a location of LOC. Returns the
12024 newly created label. */
12025
12026 tree
12027 create_artificial_label (location_t loc)
12028 {
12029 tree lab = build_decl (loc,
12030 LABEL_DECL, NULL_TREE, void_type_node);
12031
12032 DECL_ARTIFICIAL (lab) = 1;
12033 DECL_IGNORED_P (lab) = 1;
12034 DECL_CONTEXT (lab) = current_function_decl;
12035 return lab;
12036 }
12037
12038 /* Given a tree, try to return a useful variable name that we can use
12039 to prefix a temporary that is being assigned the value of the tree.
12040 I.E. given <temp> = &A, return A. */
12041
12042 const char *
12043 get_name (tree t)
12044 {
12045 tree stripped_decl;
12046
12047 stripped_decl = t;
12048 STRIP_NOPS (stripped_decl);
12049 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12050 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12051 else if (TREE_CODE (stripped_decl) == SSA_NAME)
12052 {
12053 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12054 if (!name)
12055 return NULL;
12056 return IDENTIFIER_POINTER (name);
12057 }
12058 else
12059 {
12060 switch (TREE_CODE (stripped_decl))
12061 {
12062 case ADDR_EXPR:
12063 return get_name (TREE_OPERAND (stripped_decl, 0));
12064 default:
12065 return NULL;
12066 }
12067 }
12068 }
12069
12070 /* Return true if TYPE has a variable argument list. */
12071
12072 bool
12073 stdarg_p (const_tree fntype)
12074 {
12075 function_args_iterator args_iter;
12076 tree n = NULL_TREE, t;
12077
12078 if (!fntype)
12079 return false;
12080
12081 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12082 {
12083 n = t;
12084 }
12085
12086 return n != NULL_TREE && n != void_type_node;
12087 }
12088
12089 /* Return true if TYPE has a prototype. */
12090
12091 bool
12092 prototype_p (const_tree fntype)
12093 {
12094 tree t;
12095
12096 gcc_assert (fntype != NULL_TREE);
12097
12098 t = TYPE_ARG_TYPES (fntype);
12099 return (t != NULL_TREE);
12100 }
12101
12102 /* If BLOCK is inlined from an __attribute__((__artificial__))
12103 routine, return pointer to location from where it has been
12104 called. */
12105 location_t *
12106 block_nonartificial_location (tree block)
12107 {
12108 location_t *ret = NULL;
12109
12110 while (block && TREE_CODE (block) == BLOCK
12111 && BLOCK_ABSTRACT_ORIGIN (block))
12112 {
12113 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12114
12115 while (TREE_CODE (ao) == BLOCK
12116 && BLOCK_ABSTRACT_ORIGIN (ao)
12117 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
12118 ao = BLOCK_ABSTRACT_ORIGIN (ao);
12119
12120 if (TREE_CODE (ao) == FUNCTION_DECL)
12121 {
12122 /* If AO is an artificial inline, point RET to the
12123 call site locus at which it has been inlined and continue
12124 the loop, in case AO's caller is also an artificial
12125 inline. */
12126 if (DECL_DECLARED_INLINE_P (ao)
12127 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12128 ret = &BLOCK_SOURCE_LOCATION (block);
12129 else
12130 break;
12131 }
12132 else if (TREE_CODE (ao) != BLOCK)
12133 break;
12134
12135 block = BLOCK_SUPERCONTEXT (block);
12136 }
12137 return ret;
12138 }
12139
12140
12141 /* If EXP is inlined from an __attribute__((__artificial__))
12142 function, return the location of the original call expression. */
12143
12144 location_t
12145 tree_nonartificial_location (tree exp)
12146 {
12147 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12148
12149 if (loc)
12150 return *loc;
12151 else
12152 return EXPR_LOCATION (exp);
12153 }
12154
12155
12156 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12157 nodes. */
12158
12159 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12160
12161 hashval_t
12162 cl_option_hasher::hash (tree x)
12163 {
12164 const_tree const t = x;
12165 const char *p;
12166 size_t i;
12167 size_t len = 0;
12168 hashval_t hash = 0;
12169
12170 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12171 {
12172 p = (const char *)TREE_OPTIMIZATION (t);
12173 len = sizeof (struct cl_optimization);
12174 }
12175
12176 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12177 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12178
12179 else
12180 gcc_unreachable ();
12181
12182 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12183 something else. */
12184 for (i = 0; i < len; i++)
12185 if (p[i])
12186 hash = (hash << 4) ^ ((i << 2) | p[i]);
12187
12188 return hash;
12189 }
12190
12191 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12192 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12193 same. */
12194
12195 bool
12196 cl_option_hasher::equal (tree x, tree y)
12197 {
12198 const_tree const xt = x;
12199 const_tree const yt = y;
12200 const char *xp;
12201 const char *yp;
12202 size_t len;
12203
12204 if (TREE_CODE (xt) != TREE_CODE (yt))
12205 return 0;
12206
12207 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12208 {
12209 xp = (const char *)TREE_OPTIMIZATION (xt);
12210 yp = (const char *)TREE_OPTIMIZATION (yt);
12211 len = sizeof (struct cl_optimization);
12212 }
12213
12214 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12215 {
12216 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12217 TREE_TARGET_OPTION (yt));
12218 }
12219
12220 else
12221 gcc_unreachable ();
12222
12223 return (memcmp (xp, yp, len) == 0);
12224 }
12225
12226 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12227
12228 tree
12229 build_optimization_node (struct gcc_options *opts)
12230 {
12231 tree t;
12232
12233 /* Use the cache of optimization nodes. */
12234
12235 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12236 opts);
12237
12238 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12239 t = *slot;
12240 if (!t)
12241 {
12242 /* Insert this one into the hash table. */
12243 t = cl_optimization_node;
12244 *slot = t;
12245
12246 /* Make a new node for next time round. */
12247 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12248 }
12249
12250 return t;
12251 }
12252
12253 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12254
12255 tree
12256 build_target_option_node (struct gcc_options *opts)
12257 {
12258 tree t;
12259
12260 /* Use the cache of optimization nodes. */
12261
12262 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12263 opts);
12264
12265 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12266 t = *slot;
12267 if (!t)
12268 {
12269 /* Insert this one into the hash table. */
12270 t = cl_target_option_node;
12271 *slot = t;
12272
12273 /* Make a new node for next time round. */
12274 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12275 }
12276
12277 return t;
12278 }
12279
12280 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12281 so that they aren't saved during PCH writing. */
12282
12283 void
12284 prepare_target_option_nodes_for_pch (void)
12285 {
12286 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12287 for (; iter != cl_option_hash_table->end (); ++iter)
12288 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12289 TREE_TARGET_GLOBALS (*iter) = NULL;
12290 }
12291
12292 /* Determine the "ultimate origin" of a block. The block may be an inlined
12293 instance of an inlined instance of a block which is local to an inline
12294 function, so we have to trace all of the way back through the origin chain
12295 to find out what sort of node actually served as the original seed for the
12296 given block. */
12297
12298 tree
12299 block_ultimate_origin (const_tree block)
12300 {
12301 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
12302
12303 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12304 we're trying to output the abstract instance of this function. */
12305 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12306 return NULL_TREE;
12307
12308 if (immediate_origin == NULL_TREE)
12309 return NULL_TREE;
12310 else
12311 {
12312 tree ret_val;
12313 tree lookahead = immediate_origin;
12314
12315 do
12316 {
12317 ret_val = lookahead;
12318 lookahead = (TREE_CODE (ret_val) == BLOCK
12319 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12320 }
12321 while (lookahead != NULL && lookahead != ret_val);
12322
12323 /* The block's abstract origin chain may not be the *ultimate* origin of
12324 the block. It could lead to a DECL that has an abstract origin set.
12325 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12326 will give us if it has one). Note that DECL's abstract origins are
12327 supposed to be the most distant ancestor (or so decl_ultimate_origin
12328 claims), so we don't need to loop following the DECL origins. */
12329 if (DECL_P (ret_val))
12330 return DECL_ORIGIN (ret_val);
12331
12332 return ret_val;
12333 }
12334 }
12335
12336 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12337 no instruction. */
12338
12339 bool
12340 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12341 {
12342 /* Do not strip casts into or out of differing address spaces. */
12343 if (POINTER_TYPE_P (outer_type)
12344 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12345 {
12346 if (!POINTER_TYPE_P (inner_type)
12347 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12348 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12349 return false;
12350 }
12351 else if (POINTER_TYPE_P (inner_type)
12352 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12353 {
12354 /* We already know that outer_type is not a pointer with
12355 a non-generic address space. */
12356 return false;
12357 }
12358
12359 /* Use precision rather then machine mode when we can, which gives
12360 the correct answer even for submode (bit-field) types. */
12361 if ((INTEGRAL_TYPE_P (outer_type)
12362 || POINTER_TYPE_P (outer_type)
12363 || TREE_CODE (outer_type) == OFFSET_TYPE)
12364 && (INTEGRAL_TYPE_P (inner_type)
12365 || POINTER_TYPE_P (inner_type)
12366 || TREE_CODE (inner_type) == OFFSET_TYPE))
12367 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12368
12369 /* Otherwise fall back on comparing machine modes (e.g. for
12370 aggregate types, floats). */
12371 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12372 }
12373
12374 /* Return true iff conversion in EXP generates no instruction. Mark
12375 it inline so that we fully inline into the stripping functions even
12376 though we have two uses of this function. */
12377
12378 static inline bool
12379 tree_nop_conversion (const_tree exp)
12380 {
12381 tree outer_type, inner_type;
12382
12383 if (!CONVERT_EXPR_P (exp)
12384 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12385 return false;
12386 if (TREE_OPERAND (exp, 0) == error_mark_node)
12387 return false;
12388
12389 outer_type = TREE_TYPE (exp);
12390 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12391
12392 if (!inner_type)
12393 return false;
12394
12395 return tree_nop_conversion_p (outer_type, inner_type);
12396 }
12397
12398 /* Return true iff conversion in EXP generates no instruction. Don't
12399 consider conversions changing the signedness. */
12400
12401 static bool
12402 tree_sign_nop_conversion (const_tree exp)
12403 {
12404 tree outer_type, inner_type;
12405
12406 if (!tree_nop_conversion (exp))
12407 return false;
12408
12409 outer_type = TREE_TYPE (exp);
12410 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12411
12412 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12413 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12414 }
12415
12416 /* Strip conversions from EXP according to tree_nop_conversion and
12417 return the resulting expression. */
12418
12419 tree
12420 tree_strip_nop_conversions (tree exp)
12421 {
12422 while (tree_nop_conversion (exp))
12423 exp = TREE_OPERAND (exp, 0);
12424 return exp;
12425 }
12426
12427 /* Strip conversions from EXP according to tree_sign_nop_conversion
12428 and return the resulting expression. */
12429
12430 tree
12431 tree_strip_sign_nop_conversions (tree exp)
12432 {
12433 while (tree_sign_nop_conversion (exp))
12434 exp = TREE_OPERAND (exp, 0);
12435 return exp;
12436 }
12437
12438 /* Avoid any floating point extensions from EXP. */
12439 tree
12440 strip_float_extensions (tree exp)
12441 {
12442 tree sub, expt, subt;
12443
12444 /* For floating point constant look up the narrowest type that can hold
12445 it properly and handle it like (type)(narrowest_type)constant.
12446 This way we can optimize for instance a=a*2.0 where "a" is float
12447 but 2.0 is double constant. */
12448 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12449 {
12450 REAL_VALUE_TYPE orig;
12451 tree type = NULL;
12452
12453 orig = TREE_REAL_CST (exp);
12454 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12455 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12456 type = float_type_node;
12457 else if (TYPE_PRECISION (TREE_TYPE (exp))
12458 > TYPE_PRECISION (double_type_node)
12459 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12460 type = double_type_node;
12461 if (type)
12462 return build_real_truncate (type, orig);
12463 }
12464
12465 if (!CONVERT_EXPR_P (exp))
12466 return exp;
12467
12468 sub = TREE_OPERAND (exp, 0);
12469 subt = TREE_TYPE (sub);
12470 expt = TREE_TYPE (exp);
12471
12472 if (!FLOAT_TYPE_P (subt))
12473 return exp;
12474
12475 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12476 return exp;
12477
12478 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12479 return exp;
12480
12481 return strip_float_extensions (sub);
12482 }
12483
12484 /* Strip out all handled components that produce invariant
12485 offsets. */
12486
12487 const_tree
12488 strip_invariant_refs (const_tree op)
12489 {
12490 while (handled_component_p (op))
12491 {
12492 switch (TREE_CODE (op))
12493 {
12494 case ARRAY_REF:
12495 case ARRAY_RANGE_REF:
12496 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12497 || TREE_OPERAND (op, 2) != NULL_TREE
12498 || TREE_OPERAND (op, 3) != NULL_TREE)
12499 return NULL;
12500 break;
12501
12502 case COMPONENT_REF:
12503 if (TREE_OPERAND (op, 2) != NULL_TREE)
12504 return NULL;
12505 break;
12506
12507 default:;
12508 }
12509 op = TREE_OPERAND (op, 0);
12510 }
12511
12512 return op;
12513 }
12514
12515 static GTY(()) tree gcc_eh_personality_decl;
12516
12517 /* Return the GCC personality function decl. */
12518
12519 tree
12520 lhd_gcc_personality (void)
12521 {
12522 if (!gcc_eh_personality_decl)
12523 gcc_eh_personality_decl = build_personality_function ("gcc");
12524 return gcc_eh_personality_decl;
12525 }
12526
12527 /* TARGET is a call target of GIMPLE call statement
12528 (obtained by gimple_call_fn). Return true if it is
12529 OBJ_TYPE_REF representing an virtual call of C++ method.
12530 (As opposed to OBJ_TYPE_REF representing objc calls
12531 through a cast where middle-end devirtualization machinery
12532 can't apply.) */
12533
12534 bool
12535 virtual_method_call_p (const_tree target)
12536 {
12537 if (TREE_CODE (target) != OBJ_TYPE_REF)
12538 return false;
12539 tree t = TREE_TYPE (target);
12540 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12541 t = TREE_TYPE (t);
12542 if (TREE_CODE (t) == FUNCTION_TYPE)
12543 return false;
12544 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12545 /* If we do not have BINFO associated, it means that type was built
12546 without devirtualization enabled. Do not consider this a virtual
12547 call. */
12548 if (!TYPE_BINFO (obj_type_ref_class (target)))
12549 return false;
12550 return true;
12551 }
12552
12553 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12554
12555 tree
12556 obj_type_ref_class (const_tree ref)
12557 {
12558 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12559 ref = TREE_TYPE (ref);
12560 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12561 ref = TREE_TYPE (ref);
12562 /* We look for type THIS points to. ObjC also builds
12563 OBJ_TYPE_REF with non-method calls, Their first parameter
12564 ID however also corresponds to class type. */
12565 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12566 || TREE_CODE (ref) == FUNCTION_TYPE);
12567 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12568 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12569 return TREE_TYPE (ref);
12570 }
12571
12572 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12573
12574 static tree
12575 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12576 {
12577 unsigned int i;
12578 tree base_binfo, b;
12579
12580 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12581 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12582 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12583 return base_binfo;
12584 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12585 return b;
12586 return NULL;
12587 }
12588
12589 /* Try to find a base info of BINFO that would have its field decl at offset
12590 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12591 found, return, otherwise return NULL_TREE. */
12592
12593 tree
12594 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12595 {
12596 tree type = BINFO_TYPE (binfo);
12597
12598 while (true)
12599 {
12600 HOST_WIDE_INT pos, size;
12601 tree fld;
12602 int i;
12603
12604 if (types_same_for_odr (type, expected_type))
12605 return binfo;
12606 if (offset < 0)
12607 return NULL_TREE;
12608
12609 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12610 {
12611 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12612 continue;
12613
12614 pos = int_bit_position (fld);
12615 size = tree_to_uhwi (DECL_SIZE (fld));
12616 if (pos <= offset && (pos + size) > offset)
12617 break;
12618 }
12619 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12620 return NULL_TREE;
12621
12622 /* Offset 0 indicates the primary base, whose vtable contents are
12623 represented in the binfo for the derived class. */
12624 else if (offset != 0)
12625 {
12626 tree found_binfo = NULL, base_binfo;
12627 /* Offsets in BINFO are in bytes relative to the whole structure
12628 while POS is in bits relative to the containing field. */
12629 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12630 / BITS_PER_UNIT);
12631
12632 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12633 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12634 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12635 {
12636 found_binfo = base_binfo;
12637 break;
12638 }
12639 if (found_binfo)
12640 binfo = found_binfo;
12641 else
12642 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12643 binfo_offset);
12644 }
12645
12646 type = TREE_TYPE (fld);
12647 offset -= pos;
12648 }
12649 }
12650
12651 /* Returns true if X is a typedef decl. */
12652
12653 bool
12654 is_typedef_decl (const_tree x)
12655 {
12656 return (x && TREE_CODE (x) == TYPE_DECL
12657 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12658 }
12659
12660 /* Returns true iff TYPE is a type variant created for a typedef. */
12661
12662 bool
12663 typedef_variant_p (const_tree type)
12664 {
12665 return is_typedef_decl (TYPE_NAME (type));
12666 }
12667
12668 /* Warn about a use of an identifier which was marked deprecated. */
12669 void
12670 warn_deprecated_use (tree node, tree attr)
12671 {
12672 const char *msg;
12673
12674 if (node == 0 || !warn_deprecated_decl)
12675 return;
12676
12677 if (!attr)
12678 {
12679 if (DECL_P (node))
12680 attr = DECL_ATTRIBUTES (node);
12681 else if (TYPE_P (node))
12682 {
12683 tree decl = TYPE_STUB_DECL (node);
12684 if (decl)
12685 attr = lookup_attribute ("deprecated",
12686 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12687 }
12688 }
12689
12690 if (attr)
12691 attr = lookup_attribute ("deprecated", attr);
12692
12693 if (attr)
12694 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12695 else
12696 msg = NULL;
12697
12698 bool w;
12699 if (DECL_P (node))
12700 {
12701 if (msg)
12702 w = warning (OPT_Wdeprecated_declarations,
12703 "%qD is deprecated: %s", node, msg);
12704 else
12705 w = warning (OPT_Wdeprecated_declarations,
12706 "%qD is deprecated", node);
12707 if (w)
12708 inform (DECL_SOURCE_LOCATION (node), "declared here");
12709 }
12710 else if (TYPE_P (node))
12711 {
12712 tree what = NULL_TREE;
12713 tree decl = TYPE_STUB_DECL (node);
12714
12715 if (TYPE_NAME (node))
12716 {
12717 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12718 what = TYPE_NAME (node);
12719 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12720 && DECL_NAME (TYPE_NAME (node)))
12721 what = DECL_NAME (TYPE_NAME (node));
12722 }
12723
12724 if (decl)
12725 {
12726 if (what)
12727 {
12728 if (msg)
12729 w = warning (OPT_Wdeprecated_declarations,
12730 "%qE is deprecated: %s", what, msg);
12731 else
12732 w = warning (OPT_Wdeprecated_declarations,
12733 "%qE is deprecated", what);
12734 }
12735 else
12736 {
12737 if (msg)
12738 w = warning (OPT_Wdeprecated_declarations,
12739 "type is deprecated: %s", msg);
12740 else
12741 w = warning (OPT_Wdeprecated_declarations,
12742 "type is deprecated");
12743 }
12744 if (w)
12745 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12746 }
12747 else
12748 {
12749 if (what)
12750 {
12751 if (msg)
12752 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12753 what, msg);
12754 else
12755 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12756 }
12757 else
12758 {
12759 if (msg)
12760 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12761 msg);
12762 else
12763 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12764 }
12765 }
12766 }
12767 }
12768
12769 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12770 somewhere in it. */
12771
12772 bool
12773 contains_bitfld_component_ref_p (const_tree ref)
12774 {
12775 while (handled_component_p (ref))
12776 {
12777 if (TREE_CODE (ref) == COMPONENT_REF
12778 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12779 return true;
12780 ref = TREE_OPERAND (ref, 0);
12781 }
12782
12783 return false;
12784 }
12785
12786 /* Try to determine whether a TRY_CATCH expression can fall through.
12787 This is a subroutine of block_may_fallthru. */
12788
12789 static bool
12790 try_catch_may_fallthru (const_tree stmt)
12791 {
12792 tree_stmt_iterator i;
12793
12794 /* If the TRY block can fall through, the whole TRY_CATCH can
12795 fall through. */
12796 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12797 return true;
12798
12799 i = tsi_start (TREE_OPERAND (stmt, 1));
12800 switch (TREE_CODE (tsi_stmt (i)))
12801 {
12802 case CATCH_EXPR:
12803 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12804 catch expression and a body. The whole TRY_CATCH may fall
12805 through iff any of the catch bodies falls through. */
12806 for (; !tsi_end_p (i); tsi_next (&i))
12807 {
12808 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12809 return true;
12810 }
12811 return false;
12812
12813 case EH_FILTER_EXPR:
12814 /* The exception filter expression only matters if there is an
12815 exception. If the exception does not match EH_FILTER_TYPES,
12816 we will execute EH_FILTER_FAILURE, and we will fall through
12817 if that falls through. If the exception does match
12818 EH_FILTER_TYPES, the stack unwinder will continue up the
12819 stack, so we will not fall through. We don't know whether we
12820 will throw an exception which matches EH_FILTER_TYPES or not,
12821 so we just ignore EH_FILTER_TYPES and assume that we might
12822 throw an exception which doesn't match. */
12823 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12824
12825 default:
12826 /* This case represents statements to be executed when an
12827 exception occurs. Those statements are implicitly followed
12828 by a RESX statement to resume execution after the exception.
12829 So in this case the TRY_CATCH never falls through. */
12830 return false;
12831 }
12832 }
12833
12834 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12835 need not be 100% accurate; simply be conservative and return true if we
12836 don't know. This is used only to avoid stupidly generating extra code.
12837 If we're wrong, we'll just delete the extra code later. */
12838
12839 bool
12840 block_may_fallthru (const_tree block)
12841 {
12842 /* This CONST_CAST is okay because expr_last returns its argument
12843 unmodified and we assign it to a const_tree. */
12844 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12845
12846 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12847 {
12848 case GOTO_EXPR:
12849 case RETURN_EXPR:
12850 /* Easy cases. If the last statement of the block implies
12851 control transfer, then we can't fall through. */
12852 return false;
12853
12854 case SWITCH_EXPR:
12855 /* If SWITCH_LABELS is set, this is lowered, and represents a
12856 branch to a selected label and hence can not fall through.
12857 Otherwise SWITCH_BODY is set, and the switch can fall
12858 through. */
12859 return SWITCH_LABELS (stmt) == NULL_TREE;
12860
12861 case COND_EXPR:
12862 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12863 return true;
12864 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12865
12866 case BIND_EXPR:
12867 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12868
12869 case TRY_CATCH_EXPR:
12870 return try_catch_may_fallthru (stmt);
12871
12872 case TRY_FINALLY_EXPR:
12873 /* The finally clause is always executed after the try clause,
12874 so if it does not fall through, then the try-finally will not
12875 fall through. Otherwise, if the try clause does not fall
12876 through, then when the finally clause falls through it will
12877 resume execution wherever the try clause was going. So the
12878 whole try-finally will only fall through if both the try
12879 clause and the finally clause fall through. */
12880 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12881 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12882
12883 case MODIFY_EXPR:
12884 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12885 stmt = TREE_OPERAND (stmt, 1);
12886 else
12887 return true;
12888 /* FALLTHRU */
12889
12890 case CALL_EXPR:
12891 /* Functions that do not return do not fall through. */
12892 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12893
12894 case CLEANUP_POINT_EXPR:
12895 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12896
12897 case TARGET_EXPR:
12898 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12899
12900 case ERROR_MARK:
12901 return true;
12902
12903 default:
12904 return lang_hooks.block_may_fallthru (stmt);
12905 }
12906 }
12907
12908 /* True if we are using EH to handle cleanups. */
12909 static bool using_eh_for_cleanups_flag = false;
12910
12911 /* This routine is called from front ends to indicate eh should be used for
12912 cleanups. */
12913 void
12914 using_eh_for_cleanups (void)
12915 {
12916 using_eh_for_cleanups_flag = true;
12917 }
12918
12919 /* Query whether EH is used for cleanups. */
12920 bool
12921 using_eh_for_cleanups_p (void)
12922 {
12923 return using_eh_for_cleanups_flag;
12924 }
12925
12926 /* Wrapper for tree_code_name to ensure that tree code is valid */
12927 const char *
12928 get_tree_code_name (enum tree_code code)
12929 {
12930 const char *invalid = "<invalid tree code>";
12931
12932 if (code >= MAX_TREE_CODES)
12933 return invalid;
12934
12935 return tree_code_name[code];
12936 }
12937
12938 /* Drops the TREE_OVERFLOW flag from T. */
12939
12940 tree
12941 drop_tree_overflow (tree t)
12942 {
12943 gcc_checking_assert (TREE_OVERFLOW (t));
12944
12945 /* For tree codes with a sharing machinery re-build the result. */
12946 if (TREE_CODE (t) == INTEGER_CST)
12947 return wide_int_to_tree (TREE_TYPE (t), t);
12948
12949 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12950 and drop the flag. */
12951 t = copy_node (t);
12952 TREE_OVERFLOW (t) = 0;
12953 return t;
12954 }
12955
12956 /* Given a memory reference expression T, return its base address.
12957 The base address of a memory reference expression is the main
12958 object being referenced. For instance, the base address for
12959 'array[i].fld[j]' is 'array'. You can think of this as stripping
12960 away the offset part from a memory address.
12961
12962 This function calls handled_component_p to strip away all the inner
12963 parts of the memory reference until it reaches the base object. */
12964
12965 tree
12966 get_base_address (tree t)
12967 {
12968 while (handled_component_p (t))
12969 t = TREE_OPERAND (t, 0);
12970
12971 if ((TREE_CODE (t) == MEM_REF
12972 || TREE_CODE (t) == TARGET_MEM_REF)
12973 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12974 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12975
12976 /* ??? Either the alias oracle or all callers need to properly deal
12977 with WITH_SIZE_EXPRs before we can look through those. */
12978 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12979 return NULL_TREE;
12980
12981 return t;
12982 }
12983
12984 /* Return a tree of sizetype representing the size, in bytes, of the element
12985 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12986
12987 tree
12988 array_ref_element_size (tree exp)
12989 {
12990 tree aligned_size = TREE_OPERAND (exp, 3);
12991 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12992 location_t loc = EXPR_LOCATION (exp);
12993
12994 /* If a size was specified in the ARRAY_REF, it's the size measured
12995 in alignment units of the element type. So multiply by that value. */
12996 if (aligned_size)
12997 {
12998 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12999 sizetype from another type of the same width and signedness. */
13000 if (TREE_TYPE (aligned_size) != sizetype)
13001 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13002 return size_binop_loc (loc, MULT_EXPR, aligned_size,
13003 size_int (TYPE_ALIGN_UNIT (elmt_type)));
13004 }
13005
13006 /* Otherwise, take the size from that of the element type. Substitute
13007 any PLACEHOLDER_EXPR that we have. */
13008 else
13009 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13010 }
13011
13012 /* Return a tree representing the lower bound of the array mentioned in
13013 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13014
13015 tree
13016 array_ref_low_bound (tree exp)
13017 {
13018 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13019
13020 /* If a lower bound is specified in EXP, use it. */
13021 if (TREE_OPERAND (exp, 2))
13022 return TREE_OPERAND (exp, 2);
13023
13024 /* Otherwise, if there is a domain type and it has a lower bound, use it,
13025 substituting for a PLACEHOLDER_EXPR as needed. */
13026 if (domain_type && TYPE_MIN_VALUE (domain_type))
13027 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13028
13029 /* Otherwise, return a zero of the appropriate type. */
13030 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
13031 }
13032
13033 /* Return a tree representing the upper bound of the array mentioned in
13034 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13035
13036 tree
13037 array_ref_up_bound (tree exp)
13038 {
13039 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13040
13041 /* If there is a domain type and it has an upper bound, use it, substituting
13042 for a PLACEHOLDER_EXPR as needed. */
13043 if (domain_type && TYPE_MAX_VALUE (domain_type))
13044 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13045
13046 /* Otherwise fail. */
13047 return NULL_TREE;
13048 }
13049
13050 /* Returns true if REF is an array reference to an array at the end of
13051 a structure. If this is the case, the array may be allocated larger
13052 than its upper bound implies. */
13053
13054 bool
13055 array_at_struct_end_p (tree ref)
13056 {
13057 if (TREE_CODE (ref) != ARRAY_REF
13058 && TREE_CODE (ref) != ARRAY_RANGE_REF)
13059 return false;
13060
13061 while (handled_component_p (ref))
13062 {
13063 /* If the reference chain contains a component reference to a
13064 non-union type and there follows another field the reference
13065 is not at the end of a structure. */
13066 if (TREE_CODE (ref) == COMPONENT_REF
13067 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13068 {
13069 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13070 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13071 nextf = DECL_CHAIN (nextf);
13072 if (nextf)
13073 return false;
13074 }
13075
13076 ref = TREE_OPERAND (ref, 0);
13077 }
13078
13079 /* If the reference is based on a declared entity, the size of the array
13080 is constrained by its given domain. (Do not trust commons PR/69368). */
13081 if (DECL_P (ref)
13082 && !(flag_unconstrained_commons
13083 && TREE_CODE (ref) == VAR_DECL && DECL_COMMON (ref)))
13084 return false;
13085
13086 return true;
13087 }
13088
13089 /* Return a tree representing the offset, in bytes, of the field referenced
13090 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13091
13092 tree
13093 component_ref_field_offset (tree exp)
13094 {
13095 tree aligned_offset = TREE_OPERAND (exp, 2);
13096 tree field = TREE_OPERAND (exp, 1);
13097 location_t loc = EXPR_LOCATION (exp);
13098
13099 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13100 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13101 value. */
13102 if (aligned_offset)
13103 {
13104 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13105 sizetype from another type of the same width and signedness. */
13106 if (TREE_TYPE (aligned_offset) != sizetype)
13107 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13108 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13109 size_int (DECL_OFFSET_ALIGN (field)
13110 / BITS_PER_UNIT));
13111 }
13112
13113 /* Otherwise, take the offset from that of the field. Substitute
13114 any PLACEHOLDER_EXPR that we have. */
13115 else
13116 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13117 }
13118
13119 /* Return the machine mode of T. For vectors, returns the mode of the
13120 inner type. The main use case is to feed the result to HONOR_NANS,
13121 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13122
13123 machine_mode
13124 element_mode (const_tree t)
13125 {
13126 if (!TYPE_P (t))
13127 t = TREE_TYPE (t);
13128 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13129 t = TREE_TYPE (t);
13130 return TYPE_MODE (t);
13131 }
13132
13133
13134 /* Veirfy that basic properties of T match TV and thus T can be a variant of
13135 TV. TV should be the more specified variant (i.e. the main variant). */
13136
13137 static bool
13138 verify_type_variant (const_tree t, tree tv)
13139 {
13140 /* Type variant can differ by:
13141
13142 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13143 ENCODE_QUAL_ADDR_SPACE.
13144 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13145 in this case some values may not be set in the variant types
13146 (see TYPE_COMPLETE_P checks).
13147 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13148 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13149 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13150 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13151 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13152 this is necessary to make it possible to merge types form different TUs
13153 - arrays, pointers and references may have TREE_TYPE that is a variant
13154 of TREE_TYPE of their main variants.
13155 - aggregates may have new TYPE_FIELDS list that list variants of
13156 the main variant TYPE_FIELDS.
13157 - vector types may differ by TYPE_VECTOR_OPAQUE
13158 - TYPE_METHODS is always NULL for vairant types and maintained for
13159 main variant only.
13160 */
13161
13162 /* Convenience macro for matching individual fields. */
13163 #define verify_variant_match(flag) \
13164 do { \
13165 if (flag (tv) != flag (t)) \
13166 { \
13167 error ("type variant differs by " #flag "."); \
13168 debug_tree (tv); \
13169 return false; \
13170 } \
13171 } while (false)
13172
13173 /* tree_base checks. */
13174
13175 verify_variant_match (TREE_CODE);
13176 /* FIXME: Ada builds non-artificial variants of artificial types. */
13177 if (TYPE_ARTIFICIAL (tv) && 0)
13178 verify_variant_match (TYPE_ARTIFICIAL);
13179 if (POINTER_TYPE_P (tv))
13180 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13181 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13182 verify_variant_match (TYPE_UNSIGNED);
13183 verify_variant_match (TYPE_ALIGN_OK);
13184 verify_variant_match (TYPE_PACKED);
13185 if (TREE_CODE (t) == REFERENCE_TYPE)
13186 verify_variant_match (TYPE_REF_IS_RVALUE);
13187 if (AGGREGATE_TYPE_P (t))
13188 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13189 else
13190 verify_variant_match (TYPE_SATURATING);
13191 /* FIXME: This check trigger during libstdc++ build. */
13192 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13193 verify_variant_match (TYPE_FINAL_P);
13194
13195 /* tree_type_common checks. */
13196
13197 if (COMPLETE_TYPE_P (t))
13198 {
13199 verify_variant_match (TYPE_SIZE);
13200 verify_variant_match (TYPE_MODE);
13201 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
13202 /* FIXME: ideally we should compare pointer equality, but java FE
13203 produce variants where size is INTEGER_CST of different type (int
13204 wrt size_type) during libjava biuld. */
13205 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
13206 {
13207 error ("type variant has different TYPE_SIZE_UNIT");
13208 debug_tree (tv);
13209 error ("type variant's TYPE_SIZE_UNIT");
13210 debug_tree (TYPE_SIZE_UNIT (tv));
13211 error ("type's TYPE_SIZE_UNIT");
13212 debug_tree (TYPE_SIZE_UNIT (t));
13213 return false;
13214 }
13215 }
13216 verify_variant_match (TYPE_PRECISION);
13217 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13218 if (RECORD_OR_UNION_TYPE_P (t))
13219 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13220 else if (TREE_CODE (t) == ARRAY_TYPE)
13221 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13222 /* During LTO we merge variant lists from diferent translation units
13223 that may differ BY TYPE_CONTEXT that in turn may point
13224 to TRANSLATION_UNIT_DECL.
13225 Ada also builds variants of types with different TYPE_CONTEXT. */
13226 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13227 verify_variant_match (TYPE_CONTEXT);
13228 verify_variant_match (TYPE_STRING_FLAG);
13229 if (TYPE_ALIAS_SET_KNOWN_P (t))
13230 {
13231 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
13232 debug_tree (tv);
13233 return false;
13234 }
13235
13236 /* tree_type_non_common checks. */
13237
13238 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13239 and dangle the pointer from time to time. */
13240 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13241 && (in_lto_p || !TYPE_VFIELD (tv)
13242 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13243 {
13244 error ("type variant has different TYPE_VFIELD");
13245 debug_tree (tv);
13246 return false;
13247 }
13248 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13249 || TREE_CODE (t) == INTEGER_TYPE
13250 || TREE_CODE (t) == BOOLEAN_TYPE
13251 || TREE_CODE (t) == REAL_TYPE
13252 || TREE_CODE (t) == FIXED_POINT_TYPE)
13253 {
13254 verify_variant_match (TYPE_MAX_VALUE);
13255 verify_variant_match (TYPE_MIN_VALUE);
13256 }
13257 if (TREE_CODE (t) == METHOD_TYPE)
13258 verify_variant_match (TYPE_METHOD_BASETYPE);
13259 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
13260 {
13261 error ("type variant has TYPE_METHODS");
13262 debug_tree (tv);
13263 return false;
13264 }
13265 if (TREE_CODE (t) == OFFSET_TYPE)
13266 verify_variant_match (TYPE_OFFSET_BASETYPE);
13267 if (TREE_CODE (t) == ARRAY_TYPE)
13268 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13269 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13270 or even type's main variant. This is needed to make bootstrap pass
13271 and the bug seems new in GCC 5.
13272 C++ FE should be updated to make this consistent and we should check
13273 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13274 is a match with main variant.
13275
13276 Also disable the check for Java for now because of parser hack that builds
13277 first an dummy BINFO and then sometimes replace it by real BINFO in some
13278 of the copies. */
13279 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13280 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13281 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13282 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13283 at LTO time only. */
13284 && (in_lto_p && odr_type_p (t)))
13285 {
13286 error ("type variant has different TYPE_BINFO");
13287 debug_tree (tv);
13288 error ("type variant's TYPE_BINFO");
13289 debug_tree (TYPE_BINFO (tv));
13290 error ("type's TYPE_BINFO");
13291 debug_tree (TYPE_BINFO (t));
13292 return false;
13293 }
13294
13295 /* Check various uses of TYPE_VALUES_RAW. */
13296 if (TREE_CODE (t) == ENUMERAL_TYPE)
13297 verify_variant_match (TYPE_VALUES);
13298 else if (TREE_CODE (t) == ARRAY_TYPE)
13299 verify_variant_match (TYPE_DOMAIN);
13300 /* Permit incomplete variants of complete type. While FEs may complete
13301 all variants, this does not happen for C++ templates in all cases. */
13302 else if (RECORD_OR_UNION_TYPE_P (t)
13303 && COMPLETE_TYPE_P (t)
13304 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13305 {
13306 tree f1, f2;
13307
13308 /* Fortran builds qualified variants as new records with items of
13309 qualified type. Verify that they looks same. */
13310 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13311 f1 && f2;
13312 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13313 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13314 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13315 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13316 /* FIXME: gfc_nonrestricted_type builds all types as variants
13317 with exception of pointer types. It deeply copies the type
13318 which means that we may end up with a variant type
13319 referring non-variant pointer. We may change it to
13320 produce types as variants, too, like
13321 objc_get_protocol_qualified_type does. */
13322 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13323 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13324 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13325 break;
13326 if (f1 || f2)
13327 {
13328 error ("type variant has different TYPE_FIELDS");
13329 debug_tree (tv);
13330 error ("first mismatch is field");
13331 debug_tree (f1);
13332 error ("and field");
13333 debug_tree (f2);
13334 return false;
13335 }
13336 }
13337 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13338 verify_variant_match (TYPE_ARG_TYPES);
13339 /* For C++ the qualified variant of array type is really an array type
13340 of qualified TREE_TYPE.
13341 objc builds variants of pointer where pointer to type is a variant, too
13342 in objc_get_protocol_qualified_type. */
13343 if (TREE_TYPE (t) != TREE_TYPE (tv)
13344 && ((TREE_CODE (t) != ARRAY_TYPE
13345 && !POINTER_TYPE_P (t))
13346 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13347 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13348 {
13349 error ("type variant has different TREE_TYPE");
13350 debug_tree (tv);
13351 error ("type variant's TREE_TYPE");
13352 debug_tree (TREE_TYPE (tv));
13353 error ("type's TREE_TYPE");
13354 debug_tree (TREE_TYPE (t));
13355 return false;
13356 }
13357 if (type_with_alias_set_p (t)
13358 && !gimple_canonical_types_compatible_p (t, tv, false))
13359 {
13360 error ("type is not compatible with its vairant");
13361 debug_tree (tv);
13362 error ("type variant's TREE_TYPE");
13363 debug_tree (TREE_TYPE (tv));
13364 error ("type's TREE_TYPE");
13365 debug_tree (TREE_TYPE (t));
13366 return false;
13367 }
13368 return true;
13369 #undef verify_variant_match
13370 }
13371
13372
13373 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13374 the middle-end types_compatible_p function. It needs to avoid
13375 claiming types are different for types that should be treated
13376 the same with respect to TBAA. Canonical types are also used
13377 for IL consistency checks via the useless_type_conversion_p
13378 predicate which does not handle all type kinds itself but falls
13379 back to pointer-comparison of TYPE_CANONICAL for aggregates
13380 for example. */
13381
13382 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13383 type calculation because we need to allow inter-operability between signed
13384 and unsigned variants. */
13385
13386 bool
13387 type_with_interoperable_signedness (const_tree type)
13388 {
13389 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13390 signed char and unsigned char. Similarly fortran FE builds
13391 C_SIZE_T as signed type, while C defines it unsigned. */
13392
13393 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13394 == INTEGER_TYPE
13395 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13396 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13397 }
13398
13399 /* Return true iff T1 and T2 are structurally identical for what
13400 TBAA is concerned.
13401 This function is used both by lto.c canonical type merging and by the
13402 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13403 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13404 only for LTO because only in these cases TYPE_CANONICAL equivalence
13405 correspond to one defined by gimple_canonical_types_compatible_p. */
13406
13407 bool
13408 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13409 bool trust_type_canonical)
13410 {
13411 /* Type variants should be same as the main variant. When not doing sanity
13412 checking to verify this fact, go to main variants and save some work. */
13413 if (trust_type_canonical)
13414 {
13415 t1 = TYPE_MAIN_VARIANT (t1);
13416 t2 = TYPE_MAIN_VARIANT (t2);
13417 }
13418
13419 /* Check first for the obvious case of pointer identity. */
13420 if (t1 == t2)
13421 return true;
13422
13423 /* Check that we have two types to compare. */
13424 if (t1 == NULL_TREE || t2 == NULL_TREE)
13425 return false;
13426
13427 /* We consider complete types always compatible with incomplete type.
13428 This does not make sense for canonical type calculation and thus we
13429 need to ensure that we are never called on it.
13430
13431 FIXME: For more correctness the function probably should have three modes
13432 1) mode assuming that types are complete mathcing their structure
13433 2) mode allowing incomplete types but producing equivalence classes
13434 and thus ignoring all info from complete types
13435 3) mode allowing incomplete types to match complete but checking
13436 compatibility between complete types.
13437
13438 1 and 2 can be used for canonical type calculation. 3 is the real
13439 definition of type compatibility that can be used i.e. for warnings during
13440 declaration merging. */
13441
13442 gcc_assert (!trust_type_canonical
13443 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13444 /* If the types have been previously registered and found equal
13445 they still are. */
13446
13447 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13448 && trust_type_canonical)
13449 {
13450 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13451 they are always NULL, but they are set to non-NULL for types
13452 constructed by build_pointer_type and variants. In this case the
13453 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13454 all pointers are considered equal. Be sure to not return false
13455 negatives. */
13456 gcc_checking_assert (canonical_type_used_p (t1)
13457 && canonical_type_used_p (t2));
13458 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13459 }
13460
13461 /* Can't be the same type if the types don't have the same code. */
13462 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13463 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13464 return false;
13465
13466 /* Qualifiers do not matter for canonical type comparison purposes. */
13467
13468 /* Void types and nullptr types are always the same. */
13469 if (TREE_CODE (t1) == VOID_TYPE
13470 || TREE_CODE (t1) == NULLPTR_TYPE)
13471 return true;
13472
13473 /* Can't be the same type if they have different mode. */
13474 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13475 return false;
13476
13477 /* Non-aggregate types can be handled cheaply. */
13478 if (INTEGRAL_TYPE_P (t1)
13479 || SCALAR_FLOAT_TYPE_P (t1)
13480 || FIXED_POINT_TYPE_P (t1)
13481 || TREE_CODE (t1) == VECTOR_TYPE
13482 || TREE_CODE (t1) == COMPLEX_TYPE
13483 || TREE_CODE (t1) == OFFSET_TYPE
13484 || POINTER_TYPE_P (t1))
13485 {
13486 /* Can't be the same type if they have different recision. */
13487 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13488 return false;
13489
13490 /* In some cases the signed and unsigned types are required to be
13491 inter-operable. */
13492 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13493 && !type_with_interoperable_signedness (t1))
13494 return false;
13495
13496 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13497 interoperable with "signed char". Unless all frontends are revisited
13498 to agree on these types, we must ignore the flag completely. */
13499
13500 /* Fortran standard define C_PTR type that is compatible with every
13501 C pointer. For this reason we need to glob all pointers into one.
13502 Still pointers in different address spaces are not compatible. */
13503 if (POINTER_TYPE_P (t1))
13504 {
13505 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13506 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13507 return false;
13508 }
13509
13510 /* Tail-recurse to components. */
13511 if (TREE_CODE (t1) == VECTOR_TYPE
13512 || TREE_CODE (t1) == COMPLEX_TYPE)
13513 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13514 TREE_TYPE (t2),
13515 trust_type_canonical);
13516
13517 return true;
13518 }
13519
13520 /* Do type-specific comparisons. */
13521 switch (TREE_CODE (t1))
13522 {
13523 case ARRAY_TYPE:
13524 /* Array types are the same if the element types are the same and
13525 the number of elements are the same. */
13526 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13527 trust_type_canonical)
13528 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13529 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13530 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13531 return false;
13532 else
13533 {
13534 tree i1 = TYPE_DOMAIN (t1);
13535 tree i2 = TYPE_DOMAIN (t2);
13536
13537 /* For an incomplete external array, the type domain can be
13538 NULL_TREE. Check this condition also. */
13539 if (i1 == NULL_TREE && i2 == NULL_TREE)
13540 return true;
13541 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13542 return false;
13543 else
13544 {
13545 tree min1 = TYPE_MIN_VALUE (i1);
13546 tree min2 = TYPE_MIN_VALUE (i2);
13547 tree max1 = TYPE_MAX_VALUE (i1);
13548 tree max2 = TYPE_MAX_VALUE (i2);
13549
13550 /* The minimum/maximum values have to be the same. */
13551 if ((min1 == min2
13552 || (min1 && min2
13553 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13554 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13555 || operand_equal_p (min1, min2, 0))))
13556 && (max1 == max2
13557 || (max1 && max2
13558 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13559 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13560 || operand_equal_p (max1, max2, 0)))))
13561 return true;
13562 else
13563 return false;
13564 }
13565 }
13566
13567 case METHOD_TYPE:
13568 case FUNCTION_TYPE:
13569 /* Function types are the same if the return type and arguments types
13570 are the same. */
13571 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13572 trust_type_canonical))
13573 return false;
13574
13575 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13576 return true;
13577 else
13578 {
13579 tree parms1, parms2;
13580
13581 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13582 parms1 && parms2;
13583 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13584 {
13585 if (!gimple_canonical_types_compatible_p
13586 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13587 trust_type_canonical))
13588 return false;
13589 }
13590
13591 if (parms1 || parms2)
13592 return false;
13593
13594 return true;
13595 }
13596
13597 case RECORD_TYPE:
13598 case UNION_TYPE:
13599 case QUAL_UNION_TYPE:
13600 {
13601 tree f1, f2;
13602
13603 /* Don't try to compare variants of an incomplete type, before
13604 TYPE_FIELDS has been copied around. */
13605 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13606 return true;
13607
13608
13609 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13610 return false;
13611
13612 /* For aggregate types, all the fields must be the same. */
13613 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13614 f1 || f2;
13615 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13616 {
13617 /* Skip non-fields. */
13618 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13619 f1 = TREE_CHAIN (f1);
13620 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13621 f2 = TREE_CHAIN (f2);
13622 if (!f1 || !f2)
13623 break;
13624 /* The fields must have the same name, offset and type. */
13625 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13626 || !gimple_compare_field_offset (f1, f2)
13627 || !gimple_canonical_types_compatible_p
13628 (TREE_TYPE (f1), TREE_TYPE (f2),
13629 trust_type_canonical))
13630 return false;
13631 }
13632
13633 /* If one aggregate has more fields than the other, they
13634 are not the same. */
13635 if (f1 || f2)
13636 return false;
13637
13638 return true;
13639 }
13640
13641 default:
13642 /* Consider all types with language specific trees in them mutually
13643 compatible. This is executed only from verify_type and false
13644 positives can be tolerated. */
13645 gcc_assert (!in_lto_p);
13646 return true;
13647 }
13648 }
13649
13650 /* Verify type T. */
13651
13652 void
13653 verify_type (const_tree t)
13654 {
13655 bool error_found = false;
13656 tree mv = TYPE_MAIN_VARIANT (t);
13657 if (!mv)
13658 {
13659 error ("Main variant is not defined");
13660 error_found = true;
13661 }
13662 else if (mv != TYPE_MAIN_VARIANT (mv))
13663 {
13664 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13665 debug_tree (mv);
13666 error_found = true;
13667 }
13668 else if (t != mv && !verify_type_variant (t, mv))
13669 error_found = true;
13670
13671 tree ct = TYPE_CANONICAL (t);
13672 if (!ct)
13673 ;
13674 else if (TYPE_CANONICAL (t) != ct)
13675 {
13676 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13677 debug_tree (ct);
13678 error_found = true;
13679 }
13680 /* Method and function types can not be used to address memory and thus
13681 TYPE_CANONICAL really matters only for determining useless conversions.
13682
13683 FIXME: C++ FE produce declarations of builtin functions that are not
13684 compatible with main variants. */
13685 else if (TREE_CODE (t) == FUNCTION_TYPE)
13686 ;
13687 else if (t != ct
13688 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13689 with variably sized arrays because their sizes possibly
13690 gimplified to different variables. */
13691 && !variably_modified_type_p (ct, NULL)
13692 && !gimple_canonical_types_compatible_p (t, ct, false))
13693 {
13694 error ("TYPE_CANONICAL is not compatible");
13695 debug_tree (ct);
13696 error_found = true;
13697 }
13698
13699 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13700 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13701 {
13702 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13703 debug_tree (ct);
13704 error_found = true;
13705 }
13706 /* FIXME: this is violated by the C++ FE as discussed in PR70029, when
13707 FUNCTION_*_QUALIFIED flags are set. */
13708 if (0 && TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13709 {
13710 error ("TYPE_CANONICAL of main variant is not main variant");
13711 debug_tree (ct);
13712 debug_tree (TYPE_MAIN_VARIANT (ct));
13713 error_found = true;
13714 }
13715
13716
13717 /* Check various uses of TYPE_MINVAL. */
13718 if (RECORD_OR_UNION_TYPE_P (t))
13719 {
13720 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13721 and danagle the pointer from time to time. */
13722 if (TYPE_VFIELD (t)
13723 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13724 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13725 {
13726 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13727 debug_tree (TYPE_VFIELD (t));
13728 error_found = true;
13729 }
13730 }
13731 else if (TREE_CODE (t) == POINTER_TYPE)
13732 {
13733 if (TYPE_NEXT_PTR_TO (t)
13734 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13735 {
13736 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13737 debug_tree (TYPE_NEXT_PTR_TO (t));
13738 error_found = true;
13739 }
13740 }
13741 else if (TREE_CODE (t) == REFERENCE_TYPE)
13742 {
13743 if (TYPE_NEXT_REF_TO (t)
13744 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13745 {
13746 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13747 debug_tree (TYPE_NEXT_REF_TO (t));
13748 error_found = true;
13749 }
13750 }
13751 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13752 || TREE_CODE (t) == FIXED_POINT_TYPE)
13753 {
13754 /* FIXME: The following check should pass:
13755 useless_type_conversion_p (const_cast <tree> (t),
13756 TREE_TYPE (TYPE_MIN_VALUE (t))
13757 but does not for C sizetypes in LTO. */
13758 }
13759 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13760 else if (TYPE_MINVAL (t)
13761 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13762 || in_lto_p))
13763 {
13764 error ("TYPE_MINVAL non-NULL");
13765 debug_tree (TYPE_MINVAL (t));
13766 error_found = true;
13767 }
13768
13769 /* Check various uses of TYPE_MAXVAL. */
13770 if (RECORD_OR_UNION_TYPE_P (t))
13771 {
13772 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13773 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13774 && TYPE_METHODS (t) != error_mark_node)
13775 {
13776 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13777 debug_tree (TYPE_METHODS (t));
13778 error_found = true;
13779 }
13780 }
13781 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13782 {
13783 if (TYPE_METHOD_BASETYPE (t)
13784 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13785 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13786 {
13787 error ("TYPE_METHOD_BASETYPE is not record nor union");
13788 debug_tree (TYPE_METHOD_BASETYPE (t));
13789 error_found = true;
13790 }
13791 }
13792 else if (TREE_CODE (t) == OFFSET_TYPE)
13793 {
13794 if (TYPE_OFFSET_BASETYPE (t)
13795 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13796 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13797 {
13798 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13799 debug_tree (TYPE_OFFSET_BASETYPE (t));
13800 error_found = true;
13801 }
13802 }
13803 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13804 || TREE_CODE (t) == FIXED_POINT_TYPE)
13805 {
13806 /* FIXME: The following check should pass:
13807 useless_type_conversion_p (const_cast <tree> (t),
13808 TREE_TYPE (TYPE_MAX_VALUE (t))
13809 but does not for C sizetypes in LTO. */
13810 }
13811 else if (TREE_CODE (t) == ARRAY_TYPE)
13812 {
13813 if (TYPE_ARRAY_MAX_SIZE (t)
13814 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13815 {
13816 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13817 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13818 error_found = true;
13819 }
13820 }
13821 else if (TYPE_MAXVAL (t))
13822 {
13823 error ("TYPE_MAXVAL non-NULL");
13824 debug_tree (TYPE_MAXVAL (t));
13825 error_found = true;
13826 }
13827
13828 /* Check various uses of TYPE_BINFO. */
13829 if (RECORD_OR_UNION_TYPE_P (t))
13830 {
13831 if (!TYPE_BINFO (t))
13832 ;
13833 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13834 {
13835 error ("TYPE_BINFO is not TREE_BINFO");
13836 debug_tree (TYPE_BINFO (t));
13837 error_found = true;
13838 }
13839 /* FIXME: Java builds invalid empty binfos that do not have
13840 TREE_TYPE set. */
13841 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13842 {
13843 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13844 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13845 error_found = true;
13846 }
13847 }
13848 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13849 {
13850 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13851 debug_tree (TYPE_LANG_SLOT_1 (t));
13852 error_found = true;
13853 }
13854
13855 /* Check various uses of TYPE_VALUES_RAW. */
13856 if (TREE_CODE (t) == ENUMERAL_TYPE)
13857 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13858 {
13859 tree value = TREE_VALUE (l);
13860 tree name = TREE_PURPOSE (l);
13861
13862 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13863 CONST_DECL of ENUMERAL TYPE. */
13864 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13865 {
13866 error ("Enum value is not CONST_DECL or INTEGER_CST");
13867 debug_tree (value);
13868 debug_tree (name);
13869 error_found = true;
13870 }
13871 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13872 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13873 {
13874 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13875 debug_tree (value);
13876 debug_tree (name);
13877 error_found = true;
13878 }
13879 if (TREE_CODE (name) != IDENTIFIER_NODE)
13880 {
13881 error ("Enum value name is not IDENTIFIER_NODE");
13882 debug_tree (value);
13883 debug_tree (name);
13884 error_found = true;
13885 }
13886 }
13887 else if (TREE_CODE (t) == ARRAY_TYPE)
13888 {
13889 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13890 {
13891 error ("Array TYPE_DOMAIN is not integer type");
13892 debug_tree (TYPE_DOMAIN (t));
13893 error_found = true;
13894 }
13895 }
13896 else if (RECORD_OR_UNION_TYPE_P (t))
13897 {
13898 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13899 {
13900 error ("TYPE_FIELDS defined in incomplete type");
13901 error_found = true;
13902 }
13903 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13904 {
13905 /* TODO: verify properties of decls. */
13906 if (TREE_CODE (fld) == FIELD_DECL)
13907 ;
13908 else if (TREE_CODE (fld) == TYPE_DECL)
13909 ;
13910 else if (TREE_CODE (fld) == CONST_DECL)
13911 ;
13912 else if (TREE_CODE (fld) == VAR_DECL)
13913 ;
13914 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13915 ;
13916 else if (TREE_CODE (fld) == USING_DECL)
13917 ;
13918 else
13919 {
13920 error ("Wrong tree in TYPE_FIELDS list");
13921 debug_tree (fld);
13922 error_found = true;
13923 }
13924 }
13925 }
13926 else if (TREE_CODE (t) == INTEGER_TYPE
13927 || TREE_CODE (t) == BOOLEAN_TYPE
13928 || TREE_CODE (t) == OFFSET_TYPE
13929 || TREE_CODE (t) == REFERENCE_TYPE
13930 || TREE_CODE (t) == NULLPTR_TYPE
13931 || TREE_CODE (t) == POINTER_TYPE)
13932 {
13933 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13934 {
13935 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13936 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13937 error_found = true;
13938 }
13939 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13940 {
13941 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13942 debug_tree (TYPE_CACHED_VALUES (t));
13943 error_found = true;
13944 }
13945 /* Verify just enough of cache to ensure that no one copied it to new type.
13946 All copying should go by copy_node that should clear it. */
13947 else if (TYPE_CACHED_VALUES_P (t))
13948 {
13949 int i;
13950 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13951 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13952 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13953 {
13954 error ("wrong TYPE_CACHED_VALUES entry");
13955 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13956 error_found = true;
13957 break;
13958 }
13959 }
13960 }
13961 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13962 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13963 {
13964 /* C++ FE uses TREE_PURPOSE to store initial values. */
13965 if (TREE_PURPOSE (l) && in_lto_p)
13966 {
13967 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13968 debug_tree (l);
13969 error_found = true;
13970 }
13971 if (!TYPE_P (TREE_VALUE (l)))
13972 {
13973 error ("Wrong entry in TYPE_ARG_TYPES list");
13974 debug_tree (l);
13975 error_found = true;
13976 }
13977 }
13978 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13979 {
13980 error ("TYPE_VALUES_RAW field is non-NULL");
13981 debug_tree (TYPE_VALUES_RAW (t));
13982 error_found = true;
13983 }
13984 if (TREE_CODE (t) != INTEGER_TYPE
13985 && TREE_CODE (t) != BOOLEAN_TYPE
13986 && TREE_CODE (t) != OFFSET_TYPE
13987 && TREE_CODE (t) != REFERENCE_TYPE
13988 && TREE_CODE (t) != NULLPTR_TYPE
13989 && TREE_CODE (t) != POINTER_TYPE
13990 && TYPE_CACHED_VALUES_P (t))
13991 {
13992 error ("TYPE_CACHED_VALUES_P is set while it should not");
13993 error_found = true;
13994 }
13995 if (TYPE_STRING_FLAG (t)
13996 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13997 {
13998 error ("TYPE_STRING_FLAG is set on wrong type code");
13999 error_found = true;
14000 }
14001 else if (TYPE_STRING_FLAG (t))
14002 {
14003 const_tree b = t;
14004 if (TREE_CODE (b) == ARRAY_TYPE)
14005 b = TREE_TYPE (t);
14006 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
14007 that is 32bits. */
14008 if (TREE_CODE (b) != INTEGER_TYPE)
14009 {
14010 error ("TYPE_STRING_FLAG is set on type that does not look like "
14011 "char nor array of chars");
14012 error_found = true;
14013 }
14014 }
14015
14016 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14017 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14018 of a type. */
14019 if (TREE_CODE (t) == METHOD_TYPE
14020 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14021 {
14022 error ("TYPE_METHOD_BASETYPE is not main variant");
14023 error_found = true;
14024 }
14025
14026 if (error_found)
14027 {
14028 debug_tree (const_cast <tree> (t));
14029 internal_error ("verify_type failed");
14030 }
14031 }
14032
14033
14034 /* Return true if ARG is marked with the nonnull attribute in the
14035 current function signature. */
14036
14037 bool
14038 nonnull_arg_p (const_tree arg)
14039 {
14040 tree t, attrs, fntype;
14041 unsigned HOST_WIDE_INT arg_num;
14042
14043 gcc_assert (TREE_CODE (arg) == PARM_DECL
14044 && (POINTER_TYPE_P (TREE_TYPE (arg))
14045 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14046
14047 /* The static chain decl is always non null. */
14048 if (arg == cfun->static_chain_decl)
14049 return true;
14050
14051 /* THIS argument of method is always non-NULL. */
14052 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14053 && arg == DECL_ARGUMENTS (cfun->decl)
14054 && flag_delete_null_pointer_checks)
14055 return true;
14056
14057 /* Values passed by reference are always non-NULL. */
14058 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14059 && flag_delete_null_pointer_checks)
14060 return true;
14061
14062 fntype = TREE_TYPE (cfun->decl);
14063 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14064 {
14065 attrs = lookup_attribute ("nonnull", attrs);
14066
14067 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14068 if (attrs == NULL_TREE)
14069 return false;
14070
14071 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14072 if (TREE_VALUE (attrs) == NULL_TREE)
14073 return true;
14074
14075 /* Get the position number for ARG in the function signature. */
14076 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14077 t;
14078 t = DECL_CHAIN (t), arg_num++)
14079 {
14080 if (t == arg)
14081 break;
14082 }
14083
14084 gcc_assert (t == arg);
14085
14086 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14087 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14088 {
14089 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14090 return true;
14091 }
14092 }
14093
14094 return false;
14095 }
14096
14097 /* Given location LOC, strip away any packed range information
14098 or ad-hoc information. */
14099
14100 location_t
14101 get_pure_location (location_t loc)
14102 {
14103 if (IS_ADHOC_LOC (loc))
14104 loc
14105 = line_table->location_adhoc_data_map.data[loc & MAX_SOURCE_LOCATION].locus;
14106
14107 if (loc >= LINEMAPS_MACRO_LOWEST_LOCATION (line_table))
14108 return loc;
14109
14110 if (loc < RESERVED_LOCATION_COUNT)
14111 return loc;
14112
14113 const line_map *map = linemap_lookup (line_table, loc);
14114 const line_map_ordinary *ordmap = linemap_check_ordinary (map);
14115
14116 return loc & ~((1 << ordmap->m_range_bits) - 1);
14117 }
14118
14119 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14120 information. */
14121
14122 location_t
14123 set_block (location_t loc, tree block)
14124 {
14125 location_t pure_loc = get_pure_location (loc);
14126 source_range src_range = get_range_from_loc (line_table, loc);
14127 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14128 }
14129
14130 location_t
14131 set_source_range (tree expr, location_t start, location_t finish)
14132 {
14133 source_range src_range;
14134 src_range.m_start = start;
14135 src_range.m_finish = finish;
14136 return set_source_range (expr, src_range);
14137 }
14138
14139 location_t
14140 set_source_range (tree expr, source_range src_range)
14141 {
14142 if (!EXPR_P (expr))
14143 return UNKNOWN_LOCATION;
14144
14145 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14146 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14147 pure_loc,
14148 src_range,
14149 NULL);
14150 SET_EXPR_LOCATION (expr, adhoc);
14151 return adhoc;
14152 }
14153
14154 location_t
14155 make_location (location_t caret, location_t start, location_t finish)
14156 {
14157 location_t pure_loc = get_pure_location (caret);
14158 source_range src_range;
14159 src_range.m_start = start;
14160 src_range.m_finish = finish;
14161 location_t combined_loc = COMBINE_LOCATION_DATA (line_table,
14162 pure_loc,
14163 src_range,
14164 NULL);
14165 return combined_loc;
14166 }
14167
14168 /* Return the name of combined function FN, for debugging purposes. */
14169
14170 const char *
14171 combined_fn_name (combined_fn fn)
14172 {
14173 if (builtin_fn_p (fn))
14174 {
14175 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14176 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14177 }
14178 else
14179 return internal_fn_name (as_internal_fn (fn));
14180 }
14181
14182 #include "gt-tree.h"