alias.c: Reorder #include statements and remove duplicates.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "rtl.h"
36 #include "tree.h"
37 #include "gimple.h"
38 #include "tree-pass.h"
39 #include "tm_p.h"
40 #include "ssa.h"
41 #include "expmed.h"
42 #include "insn-config.h"
43 #include "emit-rtl.h"
44 #include "cgraph.h"
45 #include "diagnostic.h"
46 #include "flags.h"
47 #include "alias.h"
48 #include "fold-const.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "attribs.h"
52 #include "varasm.h"
53 #include "toplev.h" /* get_random_seed */
54 #include "output.h"
55 #include "common/common-target.h"
56 #include "langhooks.h"
57 #include "tree-inline.h"
58 #include "tree-iterator.h"
59 #include "internal-fn.h"
60 #include "gimple-iterator.h"
61 #include "gimplify.h"
62 #include "dojump.h"
63 #include "explow.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "tree-dfa.h"
67 #include "params.h"
68 #include "langhooks-def.h"
69 #include "tree-diagnostic.h"
70 #include "tree-pretty-print.h"
71 #include "except.h"
72 #include "debug.h"
73 #include "intl.h"
74 #include "builtins.h"
75 #include "print-tree.h"
76 #include "ipa-utils.h"
77
78 /* Tree code classes. */
79
80 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
81 #define END_OF_BASE_TREE_CODES tcc_exceptional,
82
83 const enum tree_code_class tree_code_type[] = {
84 #include "all-tree.def"
85 };
86
87 #undef DEFTREECODE
88 #undef END_OF_BASE_TREE_CODES
89
90 /* Table indexed by tree code giving number of expression
91 operands beyond the fixed part of the node structure.
92 Not used for types or decls. */
93
94 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
95 #define END_OF_BASE_TREE_CODES 0,
96
97 const unsigned char tree_code_length[] = {
98 #include "all-tree.def"
99 };
100
101 #undef DEFTREECODE
102 #undef END_OF_BASE_TREE_CODES
103
104 /* Names of tree components.
105 Used for printing out the tree and error messages. */
106 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
107 #define END_OF_BASE_TREE_CODES "@dummy",
108
109 static const char *const tree_code_name[] = {
110 #include "all-tree.def"
111 };
112
113 #undef DEFTREECODE
114 #undef END_OF_BASE_TREE_CODES
115
116 /* Each tree code class has an associated string representation.
117 These must correspond to the tree_code_class entries. */
118
119 const char *const tree_code_class_strings[] =
120 {
121 "exceptional",
122 "constant",
123 "type",
124 "declaration",
125 "reference",
126 "comparison",
127 "unary",
128 "binary",
129 "statement",
130 "vl_exp",
131 "expression"
132 };
133
134 /* obstack.[ch] explicitly declined to prototype this. */
135 extern int _obstack_allocated_p (struct obstack *h, void *obj);
136
137 /* Statistics-gathering stuff. */
138
139 static int tree_code_counts[MAX_TREE_CODES];
140 int tree_node_counts[(int) all_kinds];
141 int tree_node_sizes[(int) all_kinds];
142
143 /* Keep in sync with tree.h:enum tree_node_kind. */
144 static const char * const tree_node_kind_names[] = {
145 "decls",
146 "types",
147 "blocks",
148 "stmts",
149 "refs",
150 "exprs",
151 "constants",
152 "identifiers",
153 "vecs",
154 "binfos",
155 "ssa names",
156 "constructors",
157 "random kinds",
158 "lang_decl kinds",
159 "lang_type kinds",
160 "omp clauses",
161 };
162
163 /* Unique id for next decl created. */
164 static GTY(()) int next_decl_uid;
165 /* Unique id for next type created. */
166 static GTY(()) int next_type_uid = 1;
167 /* Unique id for next debug decl created. Use negative numbers,
168 to catch erroneous uses. */
169 static GTY(()) int next_debug_decl_uid;
170
171 /* Since we cannot rehash a type after it is in the table, we have to
172 keep the hash code. */
173
174 struct GTY((for_user)) type_hash {
175 unsigned long hash;
176 tree type;
177 };
178
179 /* Initial size of the hash table (rounded to next prime). */
180 #define TYPE_HASH_INITIAL_SIZE 1000
181
182 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
183 {
184 static hashval_t hash (type_hash *t) { return t->hash; }
185 static bool equal (type_hash *a, type_hash *b);
186
187 static int
188 keep_cache_entry (type_hash *&t)
189 {
190 return ggc_marked_p (t->type);
191 }
192 };
193
194 /* Now here is the hash table. When recording a type, it is added to
195 the slot whose index is the hash code. Note that the hash table is
196 used for several kinds of types (function types, array types and
197 array index range types, for now). While all these live in the
198 same table, they are completely independent, and the hash code is
199 computed differently for each of these. */
200
201 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
202
203 /* Hash table and temporary node for larger integer const values. */
204 static GTY (()) tree int_cst_node;
205
206 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
207 {
208 static hashval_t hash (tree t);
209 static bool equal (tree x, tree y);
210 };
211
212 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
213
214 /* Hash table for optimization flags and target option flags. Use the same
215 hash table for both sets of options. Nodes for building the current
216 optimization and target option nodes. The assumption is most of the time
217 the options created will already be in the hash table, so we avoid
218 allocating and freeing up a node repeatably. */
219 static GTY (()) tree cl_optimization_node;
220 static GTY (()) tree cl_target_option_node;
221
222 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
223 {
224 static hashval_t hash (tree t);
225 static bool equal (tree x, tree y);
226 };
227
228 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
229
230 /* General tree->tree mapping structure for use in hash tables. */
231
232
233 static GTY ((cache))
234 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
235
236 static GTY ((cache))
237 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
238
239 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
240 {
241 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
242
243 static bool
244 equal (tree_vec_map *a, tree_vec_map *b)
245 {
246 return a->base.from == b->base.from;
247 }
248
249 static int
250 keep_cache_entry (tree_vec_map *&m)
251 {
252 return ggc_marked_p (m->base.from);
253 }
254 };
255
256 static GTY ((cache))
257 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
258
259 static void set_type_quals (tree, int);
260 static void print_type_hash_statistics (void);
261 static void print_debug_expr_statistics (void);
262 static void print_value_expr_statistics (void);
263 static void type_hash_list (const_tree, inchash::hash &);
264 static void attribute_hash_list (const_tree, inchash::hash &);
265
266 tree global_trees[TI_MAX];
267 tree integer_types[itk_none];
268
269 bool int_n_enabled_p[NUM_INT_N_ENTS];
270 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
271
272 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
273
274 /* Number of operands for each OpenMP clause. */
275 unsigned const char omp_clause_num_ops[] =
276 {
277 0, /* OMP_CLAUSE_ERROR */
278 1, /* OMP_CLAUSE_PRIVATE */
279 1, /* OMP_CLAUSE_SHARED */
280 1, /* OMP_CLAUSE_FIRSTPRIVATE */
281 2, /* OMP_CLAUSE_LASTPRIVATE */
282 5, /* OMP_CLAUSE_REDUCTION */
283 1, /* OMP_CLAUSE_COPYIN */
284 1, /* OMP_CLAUSE_COPYPRIVATE */
285 3, /* OMP_CLAUSE_LINEAR */
286 2, /* OMP_CLAUSE_ALIGNED */
287 1, /* OMP_CLAUSE_DEPEND */
288 1, /* OMP_CLAUSE_UNIFORM */
289 1, /* OMP_CLAUSE_TO_DECLARE */
290 1, /* OMP_CLAUSE_LINK */
291 2, /* OMP_CLAUSE_FROM */
292 2, /* OMP_CLAUSE_TO */
293 2, /* OMP_CLAUSE_MAP */
294 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
295 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
296 2, /* OMP_CLAUSE__CACHE_ */
297 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
298 1, /* OMP_CLAUSE_USE_DEVICE */
299 2, /* OMP_CLAUSE_GANG */
300 1, /* OMP_CLAUSE_ASYNC */
301 1, /* OMP_CLAUSE_WAIT */
302 0, /* OMP_CLAUSE_AUTO */
303 0, /* OMP_CLAUSE_SEQ */
304 1, /* OMP_CLAUSE__LOOPTEMP_ */
305 1, /* OMP_CLAUSE_IF */
306 1, /* OMP_CLAUSE_NUM_THREADS */
307 1, /* OMP_CLAUSE_SCHEDULE */
308 0, /* OMP_CLAUSE_NOWAIT */
309 1, /* OMP_CLAUSE_ORDERED */
310 0, /* OMP_CLAUSE_DEFAULT */
311 3, /* OMP_CLAUSE_COLLAPSE */
312 0, /* OMP_CLAUSE_UNTIED */
313 1, /* OMP_CLAUSE_FINAL */
314 0, /* OMP_CLAUSE_MERGEABLE */
315 1, /* OMP_CLAUSE_DEVICE */
316 1, /* OMP_CLAUSE_DIST_SCHEDULE */
317 0, /* OMP_CLAUSE_INBRANCH */
318 0, /* OMP_CLAUSE_NOTINBRANCH */
319 1, /* OMP_CLAUSE_NUM_TEAMS */
320 1, /* OMP_CLAUSE_THREAD_LIMIT */
321 0, /* OMP_CLAUSE_PROC_BIND */
322 1, /* OMP_CLAUSE_SAFELEN */
323 1, /* OMP_CLAUSE_SIMDLEN */
324 0, /* OMP_CLAUSE_FOR */
325 0, /* OMP_CLAUSE_PARALLEL */
326 0, /* OMP_CLAUSE_SECTIONS */
327 0, /* OMP_CLAUSE_TASKGROUP */
328 1, /* OMP_CLAUSE_PRIORITY */
329 1, /* OMP_CLAUSE_GRAINSIZE */
330 1, /* OMP_CLAUSE_NUM_TASKS */
331 0, /* OMP_CLAUSE_NOGROUP */
332 0, /* OMP_CLAUSE_THREADS */
333 0, /* OMP_CLAUSE_SIMD */
334 1, /* OMP_CLAUSE_HINT */
335 0, /* OMP_CLAUSE_DEFALTMAP */
336 1, /* OMP_CLAUSE__SIMDUID_ */
337 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
338 0, /* OMP_CLAUSE_INDEPENDENT */
339 1, /* OMP_CLAUSE_WORKER */
340 1, /* OMP_CLAUSE_VECTOR */
341 1, /* OMP_CLAUSE_NUM_GANGS */
342 1, /* OMP_CLAUSE_NUM_WORKERS */
343 1, /* OMP_CLAUSE_VECTOR_LENGTH */
344 };
345
346 const char * const omp_clause_code_name[] =
347 {
348 "error_clause",
349 "private",
350 "shared",
351 "firstprivate",
352 "lastprivate",
353 "reduction",
354 "copyin",
355 "copyprivate",
356 "linear",
357 "aligned",
358 "depend",
359 "uniform",
360 "to",
361 "link",
362 "from",
363 "to",
364 "map",
365 "use_device_ptr",
366 "is_device_ptr",
367 "_cache_",
368 "device_resident",
369 "use_device",
370 "gang",
371 "async",
372 "wait",
373 "auto",
374 "seq",
375 "_looptemp_",
376 "if",
377 "num_threads",
378 "schedule",
379 "nowait",
380 "ordered",
381 "default",
382 "collapse",
383 "untied",
384 "final",
385 "mergeable",
386 "device",
387 "dist_schedule",
388 "inbranch",
389 "notinbranch",
390 "num_teams",
391 "thread_limit",
392 "proc_bind",
393 "safelen",
394 "simdlen",
395 "for",
396 "parallel",
397 "sections",
398 "taskgroup",
399 "priority",
400 "grainsize",
401 "num_tasks",
402 "nogroup",
403 "threads",
404 "simd",
405 "hint",
406 "defaultmap",
407 "_simduid_",
408 "_Cilk_for_count_",
409 "independent",
410 "worker",
411 "vector",
412 "num_gangs",
413 "num_workers",
414 "vector_length"
415 };
416
417
418 /* Return the tree node structure used by tree code CODE. */
419
420 static inline enum tree_node_structure_enum
421 tree_node_structure_for_code (enum tree_code code)
422 {
423 switch (TREE_CODE_CLASS (code))
424 {
425 case tcc_declaration:
426 {
427 switch (code)
428 {
429 case FIELD_DECL:
430 return TS_FIELD_DECL;
431 case PARM_DECL:
432 return TS_PARM_DECL;
433 case VAR_DECL:
434 return TS_VAR_DECL;
435 case LABEL_DECL:
436 return TS_LABEL_DECL;
437 case RESULT_DECL:
438 return TS_RESULT_DECL;
439 case DEBUG_EXPR_DECL:
440 return TS_DECL_WRTL;
441 case CONST_DECL:
442 return TS_CONST_DECL;
443 case TYPE_DECL:
444 return TS_TYPE_DECL;
445 case FUNCTION_DECL:
446 return TS_FUNCTION_DECL;
447 case TRANSLATION_UNIT_DECL:
448 return TS_TRANSLATION_UNIT_DECL;
449 default:
450 return TS_DECL_NON_COMMON;
451 }
452 }
453 case tcc_type:
454 return TS_TYPE_NON_COMMON;
455 case tcc_reference:
456 case tcc_comparison:
457 case tcc_unary:
458 case tcc_binary:
459 case tcc_expression:
460 case tcc_statement:
461 case tcc_vl_exp:
462 return TS_EXP;
463 default: /* tcc_constant and tcc_exceptional */
464 break;
465 }
466 switch (code)
467 {
468 /* tcc_constant cases. */
469 case VOID_CST: return TS_TYPED;
470 case INTEGER_CST: return TS_INT_CST;
471 case REAL_CST: return TS_REAL_CST;
472 case FIXED_CST: return TS_FIXED_CST;
473 case COMPLEX_CST: return TS_COMPLEX;
474 case VECTOR_CST: return TS_VECTOR;
475 case STRING_CST: return TS_STRING;
476 /* tcc_exceptional cases. */
477 case ERROR_MARK: return TS_COMMON;
478 case IDENTIFIER_NODE: return TS_IDENTIFIER;
479 case TREE_LIST: return TS_LIST;
480 case TREE_VEC: return TS_VEC;
481 case SSA_NAME: return TS_SSA_NAME;
482 case PLACEHOLDER_EXPR: return TS_COMMON;
483 case STATEMENT_LIST: return TS_STATEMENT_LIST;
484 case BLOCK: return TS_BLOCK;
485 case CONSTRUCTOR: return TS_CONSTRUCTOR;
486 case TREE_BINFO: return TS_BINFO;
487 case OMP_CLAUSE: return TS_OMP_CLAUSE;
488 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
489 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
490
491 default:
492 gcc_unreachable ();
493 }
494 }
495
496
497 /* Initialize tree_contains_struct to describe the hierarchy of tree
498 nodes. */
499
500 static void
501 initialize_tree_contains_struct (void)
502 {
503 unsigned i;
504
505 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
506 {
507 enum tree_code code;
508 enum tree_node_structure_enum ts_code;
509
510 code = (enum tree_code) i;
511 ts_code = tree_node_structure_for_code (code);
512
513 /* Mark the TS structure itself. */
514 tree_contains_struct[code][ts_code] = 1;
515
516 /* Mark all the structures that TS is derived from. */
517 switch (ts_code)
518 {
519 case TS_TYPED:
520 case TS_BLOCK:
521 MARK_TS_BASE (code);
522 break;
523
524 case TS_COMMON:
525 case TS_INT_CST:
526 case TS_REAL_CST:
527 case TS_FIXED_CST:
528 case TS_VECTOR:
529 case TS_STRING:
530 case TS_COMPLEX:
531 case TS_SSA_NAME:
532 case TS_CONSTRUCTOR:
533 case TS_EXP:
534 case TS_STATEMENT_LIST:
535 MARK_TS_TYPED (code);
536 break;
537
538 case TS_IDENTIFIER:
539 case TS_DECL_MINIMAL:
540 case TS_TYPE_COMMON:
541 case TS_LIST:
542 case TS_VEC:
543 case TS_BINFO:
544 case TS_OMP_CLAUSE:
545 case TS_OPTIMIZATION:
546 case TS_TARGET_OPTION:
547 MARK_TS_COMMON (code);
548 break;
549
550 case TS_TYPE_WITH_LANG_SPECIFIC:
551 MARK_TS_TYPE_COMMON (code);
552 break;
553
554 case TS_TYPE_NON_COMMON:
555 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
556 break;
557
558 case TS_DECL_COMMON:
559 MARK_TS_DECL_MINIMAL (code);
560 break;
561
562 case TS_DECL_WRTL:
563 case TS_CONST_DECL:
564 MARK_TS_DECL_COMMON (code);
565 break;
566
567 case TS_DECL_NON_COMMON:
568 MARK_TS_DECL_WITH_VIS (code);
569 break;
570
571 case TS_DECL_WITH_VIS:
572 case TS_PARM_DECL:
573 case TS_LABEL_DECL:
574 case TS_RESULT_DECL:
575 MARK_TS_DECL_WRTL (code);
576 break;
577
578 case TS_FIELD_DECL:
579 MARK_TS_DECL_COMMON (code);
580 break;
581
582 case TS_VAR_DECL:
583 MARK_TS_DECL_WITH_VIS (code);
584 break;
585
586 case TS_TYPE_DECL:
587 case TS_FUNCTION_DECL:
588 MARK_TS_DECL_NON_COMMON (code);
589 break;
590
591 case TS_TRANSLATION_UNIT_DECL:
592 MARK_TS_DECL_COMMON (code);
593 break;
594
595 default:
596 gcc_unreachable ();
597 }
598 }
599
600 /* Basic consistency checks for attributes used in fold. */
601 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
602 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
603 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
604 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
605 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
606 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
607 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
608 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
609 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
610 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
611 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
612 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
613 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
614 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
615 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
616 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
617 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
618 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
619 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
620 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
621 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
622 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
623 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
624 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
625 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
626 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
627 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
628 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
629 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
630 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
631 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
632 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
633 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
634 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
635 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
636 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
637 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
638 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
640 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
641 }
642
643
644 /* Init tree.c. */
645
646 void
647 init_ttree (void)
648 {
649 /* Initialize the hash table of types. */
650 type_hash_table
651 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
652
653 debug_expr_for_decl
654 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
655
656 value_expr_for_decl
657 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
658
659 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
660
661 int_cst_node = make_int_cst (1, 1);
662
663 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
664
665 cl_optimization_node = make_node (OPTIMIZATION_NODE);
666 cl_target_option_node = make_node (TARGET_OPTION_NODE);
667
668 /* Initialize the tree_contains_struct array. */
669 initialize_tree_contains_struct ();
670 lang_hooks.init_ts ();
671 }
672
673 \f
674 /* The name of the object as the assembler will see it (but before any
675 translations made by ASM_OUTPUT_LABELREF). Often this is the same
676 as DECL_NAME. It is an IDENTIFIER_NODE. */
677 tree
678 decl_assembler_name (tree decl)
679 {
680 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
681 lang_hooks.set_decl_assembler_name (decl);
682 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
683 }
684
685 /* When the target supports COMDAT groups, this indicates which group the
686 DECL is associated with. This can be either an IDENTIFIER_NODE or a
687 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
688 tree
689 decl_comdat_group (const_tree node)
690 {
691 struct symtab_node *snode = symtab_node::get (node);
692 if (!snode)
693 return NULL;
694 return snode->get_comdat_group ();
695 }
696
697 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
698 tree
699 decl_comdat_group_id (const_tree node)
700 {
701 struct symtab_node *snode = symtab_node::get (node);
702 if (!snode)
703 return NULL;
704 return snode->get_comdat_group_id ();
705 }
706
707 /* When the target supports named section, return its name as IDENTIFIER_NODE
708 or NULL if it is in no section. */
709 const char *
710 decl_section_name (const_tree node)
711 {
712 struct symtab_node *snode = symtab_node::get (node);
713 if (!snode)
714 return NULL;
715 return snode->get_section ();
716 }
717
718 /* Set section name of NODE to VALUE (that is expected to be
719 identifier node) */
720 void
721 set_decl_section_name (tree node, const char *value)
722 {
723 struct symtab_node *snode;
724
725 if (value == NULL)
726 {
727 snode = symtab_node::get (node);
728 if (!snode)
729 return;
730 }
731 else if (TREE_CODE (node) == VAR_DECL)
732 snode = varpool_node::get_create (node);
733 else
734 snode = cgraph_node::get_create (node);
735 snode->set_section (value);
736 }
737
738 /* Return TLS model of a variable NODE. */
739 enum tls_model
740 decl_tls_model (const_tree node)
741 {
742 struct varpool_node *snode = varpool_node::get (node);
743 if (!snode)
744 return TLS_MODEL_NONE;
745 return snode->tls_model;
746 }
747
748 /* Set TLS model of variable NODE to MODEL. */
749 void
750 set_decl_tls_model (tree node, enum tls_model model)
751 {
752 struct varpool_node *vnode;
753
754 if (model == TLS_MODEL_NONE)
755 {
756 vnode = varpool_node::get (node);
757 if (!vnode)
758 return;
759 }
760 else
761 vnode = varpool_node::get_create (node);
762 vnode->tls_model = model;
763 }
764
765 /* Compute the number of bytes occupied by a tree with code CODE.
766 This function cannot be used for nodes that have variable sizes,
767 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
768 size_t
769 tree_code_size (enum tree_code code)
770 {
771 switch (TREE_CODE_CLASS (code))
772 {
773 case tcc_declaration: /* A decl node */
774 {
775 switch (code)
776 {
777 case FIELD_DECL:
778 return sizeof (struct tree_field_decl);
779 case PARM_DECL:
780 return sizeof (struct tree_parm_decl);
781 case VAR_DECL:
782 return sizeof (struct tree_var_decl);
783 case LABEL_DECL:
784 return sizeof (struct tree_label_decl);
785 case RESULT_DECL:
786 return sizeof (struct tree_result_decl);
787 case CONST_DECL:
788 return sizeof (struct tree_const_decl);
789 case TYPE_DECL:
790 return sizeof (struct tree_type_decl);
791 case FUNCTION_DECL:
792 return sizeof (struct tree_function_decl);
793 case DEBUG_EXPR_DECL:
794 return sizeof (struct tree_decl_with_rtl);
795 case TRANSLATION_UNIT_DECL:
796 return sizeof (struct tree_translation_unit_decl);
797 case NAMESPACE_DECL:
798 case IMPORTED_DECL:
799 case NAMELIST_DECL:
800 return sizeof (struct tree_decl_non_common);
801 default:
802 return lang_hooks.tree_size (code);
803 }
804 }
805
806 case tcc_type: /* a type node */
807 return sizeof (struct tree_type_non_common);
808
809 case tcc_reference: /* a reference */
810 case tcc_expression: /* an expression */
811 case tcc_statement: /* an expression with side effects */
812 case tcc_comparison: /* a comparison expression */
813 case tcc_unary: /* a unary arithmetic expression */
814 case tcc_binary: /* a binary arithmetic expression */
815 return (sizeof (struct tree_exp)
816 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
817
818 case tcc_constant: /* a constant */
819 switch (code)
820 {
821 case VOID_CST: return sizeof (struct tree_typed);
822 case INTEGER_CST: gcc_unreachable ();
823 case REAL_CST: return sizeof (struct tree_real_cst);
824 case FIXED_CST: return sizeof (struct tree_fixed_cst);
825 case COMPLEX_CST: return sizeof (struct tree_complex);
826 case VECTOR_CST: return sizeof (struct tree_vector);
827 case STRING_CST: gcc_unreachable ();
828 default:
829 return lang_hooks.tree_size (code);
830 }
831
832 case tcc_exceptional: /* something random, like an identifier. */
833 switch (code)
834 {
835 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
836 case TREE_LIST: return sizeof (struct tree_list);
837
838 case ERROR_MARK:
839 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
840
841 case TREE_VEC:
842 case OMP_CLAUSE: gcc_unreachable ();
843
844 case SSA_NAME: return sizeof (struct tree_ssa_name);
845
846 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
847 case BLOCK: return sizeof (struct tree_block);
848 case CONSTRUCTOR: return sizeof (struct tree_constructor);
849 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
850 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
851
852 default:
853 return lang_hooks.tree_size (code);
854 }
855
856 default:
857 gcc_unreachable ();
858 }
859 }
860
861 /* Compute the number of bytes occupied by NODE. This routine only
862 looks at TREE_CODE, except for those nodes that have variable sizes. */
863 size_t
864 tree_size (const_tree node)
865 {
866 const enum tree_code code = TREE_CODE (node);
867 switch (code)
868 {
869 case INTEGER_CST:
870 return (sizeof (struct tree_int_cst)
871 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
872
873 case TREE_BINFO:
874 return (offsetof (struct tree_binfo, base_binfos)
875 + vec<tree, va_gc>
876 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
877
878 case TREE_VEC:
879 return (sizeof (struct tree_vec)
880 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
881
882 case VECTOR_CST:
883 return (sizeof (struct tree_vector)
884 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
885
886 case STRING_CST:
887 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
888
889 case OMP_CLAUSE:
890 return (sizeof (struct tree_omp_clause)
891 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
892 * sizeof (tree));
893
894 default:
895 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
896 return (sizeof (struct tree_exp)
897 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
898 else
899 return tree_code_size (code);
900 }
901 }
902
903 /* Record interesting allocation statistics for a tree node with CODE
904 and LENGTH. */
905
906 static void
907 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
908 size_t length ATTRIBUTE_UNUSED)
909 {
910 enum tree_code_class type = TREE_CODE_CLASS (code);
911 tree_node_kind kind;
912
913 if (!GATHER_STATISTICS)
914 return;
915
916 switch (type)
917 {
918 case tcc_declaration: /* A decl node */
919 kind = d_kind;
920 break;
921
922 case tcc_type: /* a type node */
923 kind = t_kind;
924 break;
925
926 case tcc_statement: /* an expression with side effects */
927 kind = s_kind;
928 break;
929
930 case tcc_reference: /* a reference */
931 kind = r_kind;
932 break;
933
934 case tcc_expression: /* an expression */
935 case tcc_comparison: /* a comparison expression */
936 case tcc_unary: /* a unary arithmetic expression */
937 case tcc_binary: /* a binary arithmetic expression */
938 kind = e_kind;
939 break;
940
941 case tcc_constant: /* a constant */
942 kind = c_kind;
943 break;
944
945 case tcc_exceptional: /* something random, like an identifier. */
946 switch (code)
947 {
948 case IDENTIFIER_NODE:
949 kind = id_kind;
950 break;
951
952 case TREE_VEC:
953 kind = vec_kind;
954 break;
955
956 case TREE_BINFO:
957 kind = binfo_kind;
958 break;
959
960 case SSA_NAME:
961 kind = ssa_name_kind;
962 break;
963
964 case BLOCK:
965 kind = b_kind;
966 break;
967
968 case CONSTRUCTOR:
969 kind = constr_kind;
970 break;
971
972 case OMP_CLAUSE:
973 kind = omp_clause_kind;
974 break;
975
976 default:
977 kind = x_kind;
978 break;
979 }
980 break;
981
982 case tcc_vl_exp:
983 kind = e_kind;
984 break;
985
986 default:
987 gcc_unreachable ();
988 }
989
990 tree_code_counts[(int) code]++;
991 tree_node_counts[(int) kind]++;
992 tree_node_sizes[(int) kind] += length;
993 }
994
995 /* Allocate and return a new UID from the DECL_UID namespace. */
996
997 int
998 allocate_decl_uid (void)
999 {
1000 return next_decl_uid++;
1001 }
1002
1003 /* Return a newly allocated node of code CODE. For decl and type
1004 nodes, some other fields are initialized. The rest of the node is
1005 initialized to zero. This function cannot be used for TREE_VEC,
1006 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1007 tree_code_size.
1008
1009 Achoo! I got a code in the node. */
1010
1011 tree
1012 make_node_stat (enum tree_code code MEM_STAT_DECL)
1013 {
1014 tree t;
1015 enum tree_code_class type = TREE_CODE_CLASS (code);
1016 size_t length = tree_code_size (code);
1017
1018 record_node_allocation_statistics (code, length);
1019
1020 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1021 TREE_SET_CODE (t, code);
1022
1023 switch (type)
1024 {
1025 case tcc_statement:
1026 TREE_SIDE_EFFECTS (t) = 1;
1027 break;
1028
1029 case tcc_declaration:
1030 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1031 {
1032 if (code == FUNCTION_DECL)
1033 {
1034 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1035 DECL_MODE (t) = FUNCTION_MODE;
1036 }
1037 else
1038 DECL_ALIGN (t) = 1;
1039 }
1040 DECL_SOURCE_LOCATION (t) = input_location;
1041 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1042 DECL_UID (t) = --next_debug_decl_uid;
1043 else
1044 {
1045 DECL_UID (t) = allocate_decl_uid ();
1046 SET_DECL_PT_UID (t, -1);
1047 }
1048 if (TREE_CODE (t) == LABEL_DECL)
1049 LABEL_DECL_UID (t) = -1;
1050
1051 break;
1052
1053 case tcc_type:
1054 TYPE_UID (t) = next_type_uid++;
1055 TYPE_ALIGN (t) = BITS_PER_UNIT;
1056 TYPE_USER_ALIGN (t) = 0;
1057 TYPE_MAIN_VARIANT (t) = t;
1058 TYPE_CANONICAL (t) = t;
1059
1060 /* Default to no attributes for type, but let target change that. */
1061 TYPE_ATTRIBUTES (t) = NULL_TREE;
1062 targetm.set_default_type_attributes (t);
1063
1064 /* We have not yet computed the alias set for this type. */
1065 TYPE_ALIAS_SET (t) = -1;
1066 break;
1067
1068 case tcc_constant:
1069 TREE_CONSTANT (t) = 1;
1070 break;
1071
1072 case tcc_expression:
1073 switch (code)
1074 {
1075 case INIT_EXPR:
1076 case MODIFY_EXPR:
1077 case VA_ARG_EXPR:
1078 case PREDECREMENT_EXPR:
1079 case PREINCREMENT_EXPR:
1080 case POSTDECREMENT_EXPR:
1081 case POSTINCREMENT_EXPR:
1082 /* All of these have side-effects, no matter what their
1083 operands are. */
1084 TREE_SIDE_EFFECTS (t) = 1;
1085 break;
1086
1087 default:
1088 break;
1089 }
1090 break;
1091
1092 case tcc_exceptional:
1093 switch (code)
1094 {
1095 case TARGET_OPTION_NODE:
1096 TREE_TARGET_OPTION(t)
1097 = ggc_cleared_alloc<struct cl_target_option> ();
1098 break;
1099
1100 case OPTIMIZATION_NODE:
1101 TREE_OPTIMIZATION (t)
1102 = ggc_cleared_alloc<struct cl_optimization> ();
1103 break;
1104
1105 default:
1106 break;
1107 }
1108 break;
1109
1110 default:
1111 /* Other classes need no special treatment. */
1112 break;
1113 }
1114
1115 return t;
1116 }
1117 \f
1118 /* Return a new node with the same contents as NODE except that its
1119 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1120
1121 tree
1122 copy_node_stat (tree node MEM_STAT_DECL)
1123 {
1124 tree t;
1125 enum tree_code code = TREE_CODE (node);
1126 size_t length;
1127
1128 gcc_assert (code != STATEMENT_LIST);
1129
1130 length = tree_size (node);
1131 record_node_allocation_statistics (code, length);
1132 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1133 memcpy (t, node, length);
1134
1135 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1136 TREE_CHAIN (t) = 0;
1137 TREE_ASM_WRITTEN (t) = 0;
1138 TREE_VISITED (t) = 0;
1139
1140 if (TREE_CODE_CLASS (code) == tcc_declaration)
1141 {
1142 if (code == DEBUG_EXPR_DECL)
1143 DECL_UID (t) = --next_debug_decl_uid;
1144 else
1145 {
1146 DECL_UID (t) = allocate_decl_uid ();
1147 if (DECL_PT_UID_SET_P (node))
1148 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1149 }
1150 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1151 && DECL_HAS_VALUE_EXPR_P (node))
1152 {
1153 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1154 DECL_HAS_VALUE_EXPR_P (t) = 1;
1155 }
1156 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1157 if (TREE_CODE (node) == VAR_DECL)
1158 {
1159 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1160 t->decl_with_vis.symtab_node = NULL;
1161 }
1162 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1163 {
1164 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1165 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1166 }
1167 if (TREE_CODE (node) == FUNCTION_DECL)
1168 {
1169 DECL_STRUCT_FUNCTION (t) = NULL;
1170 t->decl_with_vis.symtab_node = NULL;
1171 }
1172 }
1173 else if (TREE_CODE_CLASS (code) == tcc_type)
1174 {
1175 TYPE_UID (t) = next_type_uid++;
1176 /* The following is so that the debug code for
1177 the copy is different from the original type.
1178 The two statements usually duplicate each other
1179 (because they clear fields of the same union),
1180 but the optimizer should catch that. */
1181 TYPE_SYMTAB_POINTER (t) = 0;
1182 TYPE_SYMTAB_ADDRESS (t) = 0;
1183
1184 /* Do not copy the values cache. */
1185 if (TYPE_CACHED_VALUES_P (t))
1186 {
1187 TYPE_CACHED_VALUES_P (t) = 0;
1188 TYPE_CACHED_VALUES (t) = NULL_TREE;
1189 }
1190 }
1191 else if (code == TARGET_OPTION_NODE)
1192 {
1193 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1194 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1195 sizeof (struct cl_target_option));
1196 }
1197 else if (code == OPTIMIZATION_NODE)
1198 {
1199 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1200 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1201 sizeof (struct cl_optimization));
1202 }
1203
1204 return t;
1205 }
1206
1207 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1208 For example, this can copy a list made of TREE_LIST nodes. */
1209
1210 tree
1211 copy_list (tree list)
1212 {
1213 tree head;
1214 tree prev, next;
1215
1216 if (list == 0)
1217 return 0;
1218
1219 head = prev = copy_node (list);
1220 next = TREE_CHAIN (list);
1221 while (next)
1222 {
1223 TREE_CHAIN (prev) = copy_node (next);
1224 prev = TREE_CHAIN (prev);
1225 next = TREE_CHAIN (next);
1226 }
1227 return head;
1228 }
1229
1230 \f
1231 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1232 INTEGER_CST with value CST and type TYPE. */
1233
1234 static unsigned int
1235 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1236 {
1237 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1238 /* We need an extra zero HWI if CST is an unsigned integer with its
1239 upper bit set, and if CST occupies a whole number of HWIs. */
1240 if (TYPE_UNSIGNED (type)
1241 && wi::neg_p (cst)
1242 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1243 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1244 return cst.get_len ();
1245 }
1246
1247 /* Return a new INTEGER_CST with value CST and type TYPE. */
1248
1249 static tree
1250 build_new_int_cst (tree type, const wide_int &cst)
1251 {
1252 unsigned int len = cst.get_len ();
1253 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1254 tree nt = make_int_cst (len, ext_len);
1255
1256 if (len < ext_len)
1257 {
1258 --ext_len;
1259 TREE_INT_CST_ELT (nt, ext_len) = 0;
1260 for (unsigned int i = len; i < ext_len; ++i)
1261 TREE_INT_CST_ELT (nt, i) = -1;
1262 }
1263 else if (TYPE_UNSIGNED (type)
1264 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1265 {
1266 len--;
1267 TREE_INT_CST_ELT (nt, len)
1268 = zext_hwi (cst.elt (len),
1269 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1270 }
1271
1272 for (unsigned int i = 0; i < len; i++)
1273 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1274 TREE_TYPE (nt) = type;
1275 return nt;
1276 }
1277
1278 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1279
1280 tree
1281 build_int_cst (tree type, HOST_WIDE_INT low)
1282 {
1283 /* Support legacy code. */
1284 if (!type)
1285 type = integer_type_node;
1286
1287 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1288 }
1289
1290 tree
1291 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1292 {
1293 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1294 }
1295
1296 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1297
1298 tree
1299 build_int_cst_type (tree type, HOST_WIDE_INT low)
1300 {
1301 gcc_assert (type);
1302 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1303 }
1304
1305 /* Constructs tree in type TYPE from with value given by CST. Signedness
1306 of CST is assumed to be the same as the signedness of TYPE. */
1307
1308 tree
1309 double_int_to_tree (tree type, double_int cst)
1310 {
1311 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1312 }
1313
1314 /* We force the wide_int CST to the range of the type TYPE by sign or
1315 zero extending it. OVERFLOWABLE indicates if we are interested in
1316 overflow of the value, when >0 we are only interested in signed
1317 overflow, for <0 we are interested in any overflow. OVERFLOWED
1318 indicates whether overflow has already occurred. CONST_OVERFLOWED
1319 indicates whether constant overflow has already occurred. We force
1320 T's value to be within range of T's type (by setting to 0 or 1 all
1321 the bits outside the type's range). We set TREE_OVERFLOWED if,
1322 OVERFLOWED is nonzero,
1323 or OVERFLOWABLE is >0 and signed overflow occurs
1324 or OVERFLOWABLE is <0 and any overflow occurs
1325 We return a new tree node for the extended wide_int. The node
1326 is shared if no overflow flags are set. */
1327
1328
1329 tree
1330 force_fit_type (tree type, const wide_int_ref &cst,
1331 int overflowable, bool overflowed)
1332 {
1333 signop sign = TYPE_SIGN (type);
1334
1335 /* If we need to set overflow flags, return a new unshared node. */
1336 if (overflowed || !wi::fits_to_tree_p (cst, type))
1337 {
1338 if (overflowed
1339 || overflowable < 0
1340 || (overflowable > 0 && sign == SIGNED))
1341 {
1342 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1343 tree t = build_new_int_cst (type, tmp);
1344 TREE_OVERFLOW (t) = 1;
1345 return t;
1346 }
1347 }
1348
1349 /* Else build a shared node. */
1350 return wide_int_to_tree (type, cst);
1351 }
1352
1353 /* These are the hash table functions for the hash table of INTEGER_CST
1354 nodes of a sizetype. */
1355
1356 /* Return the hash code X, an INTEGER_CST. */
1357
1358 hashval_t
1359 int_cst_hasher::hash (tree x)
1360 {
1361 const_tree const t = x;
1362 hashval_t code = TYPE_UID (TREE_TYPE (t));
1363 int i;
1364
1365 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1366 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1367
1368 return code;
1369 }
1370
1371 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1372 is the same as that given by *Y, which is the same. */
1373
1374 bool
1375 int_cst_hasher::equal (tree x, tree y)
1376 {
1377 const_tree const xt = x;
1378 const_tree const yt = y;
1379
1380 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1381 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1382 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1383 return false;
1384
1385 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1386 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1387 return false;
1388
1389 return true;
1390 }
1391
1392 /* Create an INT_CST node of TYPE and value CST.
1393 The returned node is always shared. For small integers we use a
1394 per-type vector cache, for larger ones we use a single hash table.
1395 The value is extended from its precision according to the sign of
1396 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1397 the upper bits and ensures that hashing and value equality based
1398 upon the underlying HOST_WIDE_INTs works without masking. */
1399
1400 tree
1401 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1402 {
1403 tree t;
1404 int ix = -1;
1405 int limit = 0;
1406
1407 gcc_assert (type);
1408 unsigned int prec = TYPE_PRECISION (type);
1409 signop sgn = TYPE_SIGN (type);
1410
1411 /* Verify that everything is canonical. */
1412 int l = pcst.get_len ();
1413 if (l > 1)
1414 {
1415 if (pcst.elt (l - 1) == 0)
1416 gcc_checking_assert (pcst.elt (l - 2) < 0);
1417 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1418 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1419 }
1420
1421 wide_int cst = wide_int::from (pcst, prec, sgn);
1422 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1423
1424 if (ext_len == 1)
1425 {
1426 /* We just need to store a single HOST_WIDE_INT. */
1427 HOST_WIDE_INT hwi;
1428 if (TYPE_UNSIGNED (type))
1429 hwi = cst.to_uhwi ();
1430 else
1431 hwi = cst.to_shwi ();
1432
1433 switch (TREE_CODE (type))
1434 {
1435 case NULLPTR_TYPE:
1436 gcc_assert (hwi == 0);
1437 /* Fallthru. */
1438
1439 case POINTER_TYPE:
1440 case REFERENCE_TYPE:
1441 case POINTER_BOUNDS_TYPE:
1442 /* Cache NULL pointer and zero bounds. */
1443 if (hwi == 0)
1444 {
1445 limit = 1;
1446 ix = 0;
1447 }
1448 break;
1449
1450 case BOOLEAN_TYPE:
1451 /* Cache false or true. */
1452 limit = 2;
1453 if (hwi < 2)
1454 ix = hwi;
1455 break;
1456
1457 case INTEGER_TYPE:
1458 case OFFSET_TYPE:
1459 if (TYPE_SIGN (type) == UNSIGNED)
1460 {
1461 /* Cache [0, N). */
1462 limit = INTEGER_SHARE_LIMIT;
1463 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1464 ix = hwi;
1465 }
1466 else
1467 {
1468 /* Cache [-1, N). */
1469 limit = INTEGER_SHARE_LIMIT + 1;
1470 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1471 ix = hwi + 1;
1472 }
1473 break;
1474
1475 case ENUMERAL_TYPE:
1476 break;
1477
1478 default:
1479 gcc_unreachable ();
1480 }
1481
1482 if (ix >= 0)
1483 {
1484 /* Look for it in the type's vector of small shared ints. */
1485 if (!TYPE_CACHED_VALUES_P (type))
1486 {
1487 TYPE_CACHED_VALUES_P (type) = 1;
1488 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1489 }
1490
1491 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1492 if (t)
1493 /* Make sure no one is clobbering the shared constant. */
1494 gcc_checking_assert (TREE_TYPE (t) == type
1495 && TREE_INT_CST_NUNITS (t) == 1
1496 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1497 && TREE_INT_CST_EXT_NUNITS (t) == 1
1498 && TREE_INT_CST_ELT (t, 0) == hwi);
1499 else
1500 {
1501 /* Create a new shared int. */
1502 t = build_new_int_cst (type, cst);
1503 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1504 }
1505 }
1506 else
1507 {
1508 /* Use the cache of larger shared ints, using int_cst_node as
1509 a temporary. */
1510
1511 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1512 TREE_TYPE (int_cst_node) = type;
1513
1514 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1515 t = *slot;
1516 if (!t)
1517 {
1518 /* Insert this one into the hash table. */
1519 t = int_cst_node;
1520 *slot = t;
1521 /* Make a new node for next time round. */
1522 int_cst_node = make_int_cst (1, 1);
1523 }
1524 }
1525 }
1526 else
1527 {
1528 /* The value either hashes properly or we drop it on the floor
1529 for the gc to take care of. There will not be enough of them
1530 to worry about. */
1531
1532 tree nt = build_new_int_cst (type, cst);
1533 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1534 t = *slot;
1535 if (!t)
1536 {
1537 /* Insert this one into the hash table. */
1538 t = nt;
1539 *slot = t;
1540 }
1541 }
1542
1543 return t;
1544 }
1545
1546 void
1547 cache_integer_cst (tree t)
1548 {
1549 tree type = TREE_TYPE (t);
1550 int ix = -1;
1551 int limit = 0;
1552 int prec = TYPE_PRECISION (type);
1553
1554 gcc_assert (!TREE_OVERFLOW (t));
1555
1556 switch (TREE_CODE (type))
1557 {
1558 case NULLPTR_TYPE:
1559 gcc_assert (integer_zerop (t));
1560 /* Fallthru. */
1561
1562 case POINTER_TYPE:
1563 case REFERENCE_TYPE:
1564 /* Cache NULL pointer. */
1565 if (integer_zerop (t))
1566 {
1567 limit = 1;
1568 ix = 0;
1569 }
1570 break;
1571
1572 case BOOLEAN_TYPE:
1573 /* Cache false or true. */
1574 limit = 2;
1575 if (wi::ltu_p (t, 2))
1576 ix = TREE_INT_CST_ELT (t, 0);
1577 break;
1578
1579 case INTEGER_TYPE:
1580 case OFFSET_TYPE:
1581 if (TYPE_UNSIGNED (type))
1582 {
1583 /* Cache 0..N */
1584 limit = INTEGER_SHARE_LIMIT;
1585
1586 /* This is a little hokie, but if the prec is smaller than
1587 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1588 obvious test will not get the correct answer. */
1589 if (prec < HOST_BITS_PER_WIDE_INT)
1590 {
1591 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1592 ix = tree_to_uhwi (t);
1593 }
1594 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1595 ix = tree_to_uhwi (t);
1596 }
1597 else
1598 {
1599 /* Cache -1..N */
1600 limit = INTEGER_SHARE_LIMIT + 1;
1601
1602 if (integer_minus_onep (t))
1603 ix = 0;
1604 else if (!wi::neg_p (t))
1605 {
1606 if (prec < HOST_BITS_PER_WIDE_INT)
1607 {
1608 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1609 ix = tree_to_shwi (t) + 1;
1610 }
1611 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1612 ix = tree_to_shwi (t) + 1;
1613 }
1614 }
1615 break;
1616
1617 case ENUMERAL_TYPE:
1618 break;
1619
1620 default:
1621 gcc_unreachable ();
1622 }
1623
1624 if (ix >= 0)
1625 {
1626 /* Look for it in the type's vector of small shared ints. */
1627 if (!TYPE_CACHED_VALUES_P (type))
1628 {
1629 TYPE_CACHED_VALUES_P (type) = 1;
1630 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1631 }
1632
1633 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1634 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1635 }
1636 else
1637 {
1638 /* Use the cache of larger shared ints. */
1639 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1640 /* If there is already an entry for the number verify it's the
1641 same. */
1642 if (*slot)
1643 gcc_assert (wi::eq_p (tree (*slot), t));
1644 else
1645 /* Otherwise insert this one into the hash table. */
1646 *slot = t;
1647 }
1648 }
1649
1650
1651 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1652 and the rest are zeros. */
1653
1654 tree
1655 build_low_bits_mask (tree type, unsigned bits)
1656 {
1657 gcc_assert (bits <= TYPE_PRECISION (type));
1658
1659 return wide_int_to_tree (type, wi::mask (bits, false,
1660 TYPE_PRECISION (type)));
1661 }
1662
1663 /* Checks that X is integer constant that can be expressed in (unsigned)
1664 HOST_WIDE_INT without loss of precision. */
1665
1666 bool
1667 cst_and_fits_in_hwi (const_tree x)
1668 {
1669 if (TREE_CODE (x) != INTEGER_CST)
1670 return false;
1671
1672 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1673 return false;
1674
1675 return TREE_INT_CST_NUNITS (x) == 1;
1676 }
1677
1678 /* Build a newly constructed VECTOR_CST node of length LEN. */
1679
1680 tree
1681 make_vector_stat (unsigned len MEM_STAT_DECL)
1682 {
1683 tree t;
1684 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1685
1686 record_node_allocation_statistics (VECTOR_CST, length);
1687
1688 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1689
1690 TREE_SET_CODE (t, VECTOR_CST);
1691 TREE_CONSTANT (t) = 1;
1692
1693 return t;
1694 }
1695
1696 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1697 are in a list pointed to by VALS. */
1698
1699 tree
1700 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1701 {
1702 int over = 0;
1703 unsigned cnt = 0;
1704 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1705 TREE_TYPE (v) = type;
1706
1707 /* Iterate through elements and check for overflow. */
1708 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1709 {
1710 tree value = vals[cnt];
1711
1712 VECTOR_CST_ELT (v, cnt) = value;
1713
1714 /* Don't crash if we get an address constant. */
1715 if (!CONSTANT_CLASS_P (value))
1716 continue;
1717
1718 over |= TREE_OVERFLOW (value);
1719 }
1720
1721 TREE_OVERFLOW (v) = over;
1722 return v;
1723 }
1724
1725 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1726 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1727
1728 tree
1729 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1730 {
1731 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1732 unsigned HOST_WIDE_INT idx;
1733 tree value;
1734
1735 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1736 vec[idx] = value;
1737 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1738 vec[idx] = build_zero_cst (TREE_TYPE (type));
1739
1740 return build_vector (type, vec);
1741 }
1742
1743 /* Build a vector of type VECTYPE where all the elements are SCs. */
1744 tree
1745 build_vector_from_val (tree vectype, tree sc)
1746 {
1747 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1748
1749 if (sc == error_mark_node)
1750 return sc;
1751
1752 /* Verify that the vector type is suitable for SC. Note that there
1753 is some inconsistency in the type-system with respect to restrict
1754 qualifications of pointers. Vector types always have a main-variant
1755 element type and the qualification is applied to the vector-type.
1756 So TREE_TYPE (vector-type) does not return a properly qualified
1757 vector element-type. */
1758 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1759 TREE_TYPE (vectype)));
1760
1761 if (CONSTANT_CLASS_P (sc))
1762 {
1763 tree *v = XALLOCAVEC (tree, nunits);
1764 for (i = 0; i < nunits; ++i)
1765 v[i] = sc;
1766 return build_vector (vectype, v);
1767 }
1768 else
1769 {
1770 vec<constructor_elt, va_gc> *v;
1771 vec_alloc (v, nunits);
1772 for (i = 0; i < nunits; ++i)
1773 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1774 return build_constructor (vectype, v);
1775 }
1776 }
1777
1778 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1779 are in the vec pointed to by VALS. */
1780 tree
1781 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1782 {
1783 tree c = make_node (CONSTRUCTOR);
1784 unsigned int i;
1785 constructor_elt *elt;
1786 bool constant_p = true;
1787 bool side_effects_p = false;
1788
1789 TREE_TYPE (c) = type;
1790 CONSTRUCTOR_ELTS (c) = vals;
1791
1792 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1793 {
1794 /* Mostly ctors will have elts that don't have side-effects, so
1795 the usual case is to scan all the elements. Hence a single
1796 loop for both const and side effects, rather than one loop
1797 each (with early outs). */
1798 if (!TREE_CONSTANT (elt->value))
1799 constant_p = false;
1800 if (TREE_SIDE_EFFECTS (elt->value))
1801 side_effects_p = true;
1802 }
1803
1804 TREE_SIDE_EFFECTS (c) = side_effects_p;
1805 TREE_CONSTANT (c) = constant_p;
1806
1807 return c;
1808 }
1809
1810 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1811 INDEX and VALUE. */
1812 tree
1813 build_constructor_single (tree type, tree index, tree value)
1814 {
1815 vec<constructor_elt, va_gc> *v;
1816 constructor_elt elt = {index, value};
1817
1818 vec_alloc (v, 1);
1819 v->quick_push (elt);
1820
1821 return build_constructor (type, v);
1822 }
1823
1824
1825 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1826 are in a list pointed to by VALS. */
1827 tree
1828 build_constructor_from_list (tree type, tree vals)
1829 {
1830 tree t;
1831 vec<constructor_elt, va_gc> *v = NULL;
1832
1833 if (vals)
1834 {
1835 vec_alloc (v, list_length (vals));
1836 for (t = vals; t; t = TREE_CHAIN (t))
1837 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1838 }
1839
1840 return build_constructor (type, v);
1841 }
1842
1843 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1844 of elements, provided as index/value pairs. */
1845
1846 tree
1847 build_constructor_va (tree type, int nelts, ...)
1848 {
1849 vec<constructor_elt, va_gc> *v = NULL;
1850 va_list p;
1851
1852 va_start (p, nelts);
1853 vec_alloc (v, nelts);
1854 while (nelts--)
1855 {
1856 tree index = va_arg (p, tree);
1857 tree value = va_arg (p, tree);
1858 CONSTRUCTOR_APPEND_ELT (v, index, value);
1859 }
1860 va_end (p);
1861 return build_constructor (type, v);
1862 }
1863
1864 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1865
1866 tree
1867 build_fixed (tree type, FIXED_VALUE_TYPE f)
1868 {
1869 tree v;
1870 FIXED_VALUE_TYPE *fp;
1871
1872 v = make_node (FIXED_CST);
1873 fp = ggc_alloc<fixed_value> ();
1874 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1875
1876 TREE_TYPE (v) = type;
1877 TREE_FIXED_CST_PTR (v) = fp;
1878 return v;
1879 }
1880
1881 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1882
1883 tree
1884 build_real (tree type, REAL_VALUE_TYPE d)
1885 {
1886 tree v;
1887 REAL_VALUE_TYPE *dp;
1888 int overflow = 0;
1889
1890 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1891 Consider doing it via real_convert now. */
1892
1893 v = make_node (REAL_CST);
1894 dp = ggc_alloc<real_value> ();
1895 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1896
1897 TREE_TYPE (v) = type;
1898 TREE_REAL_CST_PTR (v) = dp;
1899 TREE_OVERFLOW (v) = overflow;
1900 return v;
1901 }
1902
1903 /* Like build_real, but first truncate D to the type. */
1904
1905 tree
1906 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1907 {
1908 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1909 }
1910
1911 /* Return a new REAL_CST node whose type is TYPE
1912 and whose value is the integer value of the INTEGER_CST node I. */
1913
1914 REAL_VALUE_TYPE
1915 real_value_from_int_cst (const_tree type, const_tree i)
1916 {
1917 REAL_VALUE_TYPE d;
1918
1919 /* Clear all bits of the real value type so that we can later do
1920 bitwise comparisons to see if two values are the same. */
1921 memset (&d, 0, sizeof d);
1922
1923 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1924 TYPE_SIGN (TREE_TYPE (i)));
1925 return d;
1926 }
1927
1928 /* Given a tree representing an integer constant I, return a tree
1929 representing the same value as a floating-point constant of type TYPE. */
1930
1931 tree
1932 build_real_from_int_cst (tree type, const_tree i)
1933 {
1934 tree v;
1935 int overflow = TREE_OVERFLOW (i);
1936
1937 v = build_real (type, real_value_from_int_cst (type, i));
1938
1939 TREE_OVERFLOW (v) |= overflow;
1940 return v;
1941 }
1942
1943 /* Return a newly constructed STRING_CST node whose value is
1944 the LEN characters at STR.
1945 Note that for a C string literal, LEN should include the trailing NUL.
1946 The TREE_TYPE is not initialized. */
1947
1948 tree
1949 build_string (int len, const char *str)
1950 {
1951 tree s;
1952 size_t length;
1953
1954 /* Do not waste bytes provided by padding of struct tree_string. */
1955 length = len + offsetof (struct tree_string, str) + 1;
1956
1957 record_node_allocation_statistics (STRING_CST, length);
1958
1959 s = (tree) ggc_internal_alloc (length);
1960
1961 memset (s, 0, sizeof (struct tree_typed));
1962 TREE_SET_CODE (s, STRING_CST);
1963 TREE_CONSTANT (s) = 1;
1964 TREE_STRING_LENGTH (s) = len;
1965 memcpy (s->string.str, str, len);
1966 s->string.str[len] = '\0';
1967
1968 return s;
1969 }
1970
1971 /* Return a newly constructed COMPLEX_CST node whose value is
1972 specified by the real and imaginary parts REAL and IMAG.
1973 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1974 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1975
1976 tree
1977 build_complex (tree type, tree real, tree imag)
1978 {
1979 tree t = make_node (COMPLEX_CST);
1980
1981 TREE_REALPART (t) = real;
1982 TREE_IMAGPART (t) = imag;
1983 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1984 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1985 return t;
1986 }
1987
1988 /* Build a complex (inf +- 0i), such as for the result of cproj.
1989 TYPE is the complex tree type of the result. If NEG is true, the
1990 imaginary zero is negative. */
1991
1992 tree
1993 build_complex_inf (tree type, bool neg)
1994 {
1995 REAL_VALUE_TYPE rinf, rzero = dconst0;
1996
1997 real_inf (&rinf);
1998 rzero.sign = neg;
1999 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2000 build_real (TREE_TYPE (type), rzero));
2001 }
2002
2003 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2004 element is set to 1. In particular, this is 1 + i for complex types. */
2005
2006 tree
2007 build_each_one_cst (tree type)
2008 {
2009 if (TREE_CODE (type) == COMPLEX_TYPE)
2010 {
2011 tree scalar = build_one_cst (TREE_TYPE (type));
2012 return build_complex (type, scalar, scalar);
2013 }
2014 else
2015 return build_one_cst (type);
2016 }
2017
2018 /* Return a constant of arithmetic type TYPE which is the
2019 multiplicative identity of the set TYPE. */
2020
2021 tree
2022 build_one_cst (tree type)
2023 {
2024 switch (TREE_CODE (type))
2025 {
2026 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2027 case POINTER_TYPE: case REFERENCE_TYPE:
2028 case OFFSET_TYPE:
2029 return build_int_cst (type, 1);
2030
2031 case REAL_TYPE:
2032 return build_real (type, dconst1);
2033
2034 case FIXED_POINT_TYPE:
2035 /* We can only generate 1 for accum types. */
2036 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2037 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2038
2039 case VECTOR_TYPE:
2040 {
2041 tree scalar = build_one_cst (TREE_TYPE (type));
2042
2043 return build_vector_from_val (type, scalar);
2044 }
2045
2046 case COMPLEX_TYPE:
2047 return build_complex (type,
2048 build_one_cst (TREE_TYPE (type)),
2049 build_zero_cst (TREE_TYPE (type)));
2050
2051 default:
2052 gcc_unreachable ();
2053 }
2054 }
2055
2056 /* Return an integer of type TYPE containing all 1's in as much precision as
2057 it contains, or a complex or vector whose subparts are such integers. */
2058
2059 tree
2060 build_all_ones_cst (tree type)
2061 {
2062 if (TREE_CODE (type) == COMPLEX_TYPE)
2063 {
2064 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2065 return build_complex (type, scalar, scalar);
2066 }
2067 else
2068 return build_minus_one_cst (type);
2069 }
2070
2071 /* Return a constant of arithmetic type TYPE which is the
2072 opposite of the multiplicative identity of the set TYPE. */
2073
2074 tree
2075 build_minus_one_cst (tree type)
2076 {
2077 switch (TREE_CODE (type))
2078 {
2079 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2080 case POINTER_TYPE: case REFERENCE_TYPE:
2081 case OFFSET_TYPE:
2082 return build_int_cst (type, -1);
2083
2084 case REAL_TYPE:
2085 return build_real (type, dconstm1);
2086
2087 case FIXED_POINT_TYPE:
2088 /* We can only generate 1 for accum types. */
2089 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2090 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2091 TYPE_MODE (type)));
2092
2093 case VECTOR_TYPE:
2094 {
2095 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2096
2097 return build_vector_from_val (type, scalar);
2098 }
2099
2100 case COMPLEX_TYPE:
2101 return build_complex (type,
2102 build_minus_one_cst (TREE_TYPE (type)),
2103 build_zero_cst (TREE_TYPE (type)));
2104
2105 default:
2106 gcc_unreachable ();
2107 }
2108 }
2109
2110 /* Build 0 constant of type TYPE. This is used by constructor folding
2111 and thus the constant should be represented in memory by
2112 zero(es). */
2113
2114 tree
2115 build_zero_cst (tree type)
2116 {
2117 switch (TREE_CODE (type))
2118 {
2119 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2120 case POINTER_TYPE: case REFERENCE_TYPE:
2121 case OFFSET_TYPE: case NULLPTR_TYPE:
2122 return build_int_cst (type, 0);
2123
2124 case REAL_TYPE:
2125 return build_real (type, dconst0);
2126
2127 case FIXED_POINT_TYPE:
2128 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2129
2130 case VECTOR_TYPE:
2131 {
2132 tree scalar = build_zero_cst (TREE_TYPE (type));
2133
2134 return build_vector_from_val (type, scalar);
2135 }
2136
2137 case COMPLEX_TYPE:
2138 {
2139 tree zero = build_zero_cst (TREE_TYPE (type));
2140
2141 return build_complex (type, zero, zero);
2142 }
2143
2144 default:
2145 if (!AGGREGATE_TYPE_P (type))
2146 return fold_convert (type, integer_zero_node);
2147 return build_constructor (type, NULL);
2148 }
2149 }
2150
2151
2152 /* Build a BINFO with LEN language slots. */
2153
2154 tree
2155 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2156 {
2157 tree t;
2158 size_t length = (offsetof (struct tree_binfo, base_binfos)
2159 + vec<tree, va_gc>::embedded_size (base_binfos));
2160
2161 record_node_allocation_statistics (TREE_BINFO, length);
2162
2163 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2164
2165 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2166
2167 TREE_SET_CODE (t, TREE_BINFO);
2168
2169 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2170
2171 return t;
2172 }
2173
2174 /* Create a CASE_LABEL_EXPR tree node and return it. */
2175
2176 tree
2177 build_case_label (tree low_value, tree high_value, tree label_decl)
2178 {
2179 tree t = make_node (CASE_LABEL_EXPR);
2180
2181 TREE_TYPE (t) = void_type_node;
2182 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2183
2184 CASE_LOW (t) = low_value;
2185 CASE_HIGH (t) = high_value;
2186 CASE_LABEL (t) = label_decl;
2187 CASE_CHAIN (t) = NULL_TREE;
2188
2189 return t;
2190 }
2191
2192 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2193 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2194 The latter determines the length of the HOST_WIDE_INT vector. */
2195
2196 tree
2197 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2198 {
2199 tree t;
2200 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2201 + sizeof (struct tree_int_cst));
2202
2203 gcc_assert (len);
2204 record_node_allocation_statistics (INTEGER_CST, length);
2205
2206 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2207
2208 TREE_SET_CODE (t, INTEGER_CST);
2209 TREE_INT_CST_NUNITS (t) = len;
2210 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2211 /* to_offset can only be applied to trees that are offset_int-sized
2212 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2213 must be exactly the precision of offset_int and so LEN is correct. */
2214 if (ext_len <= OFFSET_INT_ELTS)
2215 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2216 else
2217 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2218
2219 TREE_CONSTANT (t) = 1;
2220
2221 return t;
2222 }
2223
2224 /* Build a newly constructed TREE_VEC node of length LEN. */
2225
2226 tree
2227 make_tree_vec_stat (int len MEM_STAT_DECL)
2228 {
2229 tree t;
2230 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2231
2232 record_node_allocation_statistics (TREE_VEC, length);
2233
2234 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2235
2236 TREE_SET_CODE (t, TREE_VEC);
2237 TREE_VEC_LENGTH (t) = len;
2238
2239 return t;
2240 }
2241
2242 /* Grow a TREE_VEC node to new length LEN. */
2243
2244 tree
2245 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2246 {
2247 gcc_assert (TREE_CODE (v) == TREE_VEC);
2248
2249 int oldlen = TREE_VEC_LENGTH (v);
2250 gcc_assert (len > oldlen);
2251
2252 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2253 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2254
2255 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2256
2257 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2258
2259 TREE_VEC_LENGTH (v) = len;
2260
2261 return v;
2262 }
2263 \f
2264 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2265 fixed, and scalar, complex or vector. */
2266
2267 int
2268 zerop (const_tree expr)
2269 {
2270 return (integer_zerop (expr)
2271 || real_zerop (expr)
2272 || fixed_zerop (expr));
2273 }
2274
2275 /* Return 1 if EXPR is the integer constant zero or a complex constant
2276 of zero. */
2277
2278 int
2279 integer_zerop (const_tree expr)
2280 {
2281 STRIP_NOPS (expr);
2282
2283 switch (TREE_CODE (expr))
2284 {
2285 case INTEGER_CST:
2286 return wi::eq_p (expr, 0);
2287 case COMPLEX_CST:
2288 return (integer_zerop (TREE_REALPART (expr))
2289 && integer_zerop (TREE_IMAGPART (expr)));
2290 case VECTOR_CST:
2291 {
2292 unsigned i;
2293 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2294 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2295 return false;
2296 return true;
2297 }
2298 default:
2299 return false;
2300 }
2301 }
2302
2303 /* Return 1 if EXPR is the integer constant one or the corresponding
2304 complex constant. */
2305
2306 int
2307 integer_onep (const_tree expr)
2308 {
2309 STRIP_NOPS (expr);
2310
2311 switch (TREE_CODE (expr))
2312 {
2313 case INTEGER_CST:
2314 return wi::eq_p (wi::to_widest (expr), 1);
2315 case COMPLEX_CST:
2316 return (integer_onep (TREE_REALPART (expr))
2317 && integer_zerop (TREE_IMAGPART (expr)));
2318 case VECTOR_CST:
2319 {
2320 unsigned i;
2321 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2322 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2323 return false;
2324 return true;
2325 }
2326 default:
2327 return false;
2328 }
2329 }
2330
2331 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2332 return 1 if every piece is the integer constant one. */
2333
2334 int
2335 integer_each_onep (const_tree expr)
2336 {
2337 STRIP_NOPS (expr);
2338
2339 if (TREE_CODE (expr) == COMPLEX_CST)
2340 return (integer_onep (TREE_REALPART (expr))
2341 && integer_onep (TREE_IMAGPART (expr)));
2342 else
2343 return integer_onep (expr);
2344 }
2345
2346 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2347 it contains, or a complex or vector whose subparts are such integers. */
2348
2349 int
2350 integer_all_onesp (const_tree expr)
2351 {
2352 STRIP_NOPS (expr);
2353
2354 if (TREE_CODE (expr) == COMPLEX_CST
2355 && integer_all_onesp (TREE_REALPART (expr))
2356 && integer_all_onesp (TREE_IMAGPART (expr)))
2357 return 1;
2358
2359 else if (TREE_CODE (expr) == VECTOR_CST)
2360 {
2361 unsigned i;
2362 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2363 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2364 return 0;
2365 return 1;
2366 }
2367
2368 else if (TREE_CODE (expr) != INTEGER_CST)
2369 return 0;
2370
2371 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2372 }
2373
2374 /* Return 1 if EXPR is the integer constant minus one. */
2375
2376 int
2377 integer_minus_onep (const_tree expr)
2378 {
2379 STRIP_NOPS (expr);
2380
2381 if (TREE_CODE (expr) == COMPLEX_CST)
2382 return (integer_all_onesp (TREE_REALPART (expr))
2383 && integer_zerop (TREE_IMAGPART (expr)));
2384 else
2385 return integer_all_onesp (expr);
2386 }
2387
2388 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2389 one bit on). */
2390
2391 int
2392 integer_pow2p (const_tree expr)
2393 {
2394 STRIP_NOPS (expr);
2395
2396 if (TREE_CODE (expr) == COMPLEX_CST
2397 && integer_pow2p (TREE_REALPART (expr))
2398 && integer_zerop (TREE_IMAGPART (expr)))
2399 return 1;
2400
2401 if (TREE_CODE (expr) != INTEGER_CST)
2402 return 0;
2403
2404 return wi::popcount (expr) == 1;
2405 }
2406
2407 /* Return 1 if EXPR is an integer constant other than zero or a
2408 complex constant other than zero. */
2409
2410 int
2411 integer_nonzerop (const_tree expr)
2412 {
2413 STRIP_NOPS (expr);
2414
2415 return ((TREE_CODE (expr) == INTEGER_CST
2416 && !wi::eq_p (expr, 0))
2417 || (TREE_CODE (expr) == COMPLEX_CST
2418 && (integer_nonzerop (TREE_REALPART (expr))
2419 || integer_nonzerop (TREE_IMAGPART (expr)))));
2420 }
2421
2422 /* Return 1 if EXPR is the integer constant one. For vector,
2423 return 1 if every piece is the integer constant minus one
2424 (representing the value TRUE). */
2425
2426 int
2427 integer_truep (const_tree expr)
2428 {
2429 STRIP_NOPS (expr);
2430
2431 if (TREE_CODE (expr) == VECTOR_CST)
2432 return integer_all_onesp (expr);
2433 return integer_onep (expr);
2434 }
2435
2436 /* Return 1 if EXPR is the fixed-point constant zero. */
2437
2438 int
2439 fixed_zerop (const_tree expr)
2440 {
2441 return (TREE_CODE (expr) == FIXED_CST
2442 && TREE_FIXED_CST (expr).data.is_zero ());
2443 }
2444
2445 /* Return the power of two represented by a tree node known to be a
2446 power of two. */
2447
2448 int
2449 tree_log2 (const_tree expr)
2450 {
2451 STRIP_NOPS (expr);
2452
2453 if (TREE_CODE (expr) == COMPLEX_CST)
2454 return tree_log2 (TREE_REALPART (expr));
2455
2456 return wi::exact_log2 (expr);
2457 }
2458
2459 /* Similar, but return the largest integer Y such that 2 ** Y is less
2460 than or equal to EXPR. */
2461
2462 int
2463 tree_floor_log2 (const_tree expr)
2464 {
2465 STRIP_NOPS (expr);
2466
2467 if (TREE_CODE (expr) == COMPLEX_CST)
2468 return tree_log2 (TREE_REALPART (expr));
2469
2470 return wi::floor_log2 (expr);
2471 }
2472
2473 /* Return number of known trailing zero bits in EXPR, or, if the value of
2474 EXPR is known to be zero, the precision of it's type. */
2475
2476 unsigned int
2477 tree_ctz (const_tree expr)
2478 {
2479 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2480 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2481 return 0;
2482
2483 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2484 switch (TREE_CODE (expr))
2485 {
2486 case INTEGER_CST:
2487 ret1 = wi::ctz (expr);
2488 return MIN (ret1, prec);
2489 case SSA_NAME:
2490 ret1 = wi::ctz (get_nonzero_bits (expr));
2491 return MIN (ret1, prec);
2492 case PLUS_EXPR:
2493 case MINUS_EXPR:
2494 case BIT_IOR_EXPR:
2495 case BIT_XOR_EXPR:
2496 case MIN_EXPR:
2497 case MAX_EXPR:
2498 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2499 if (ret1 == 0)
2500 return ret1;
2501 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2502 return MIN (ret1, ret2);
2503 case POINTER_PLUS_EXPR:
2504 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2505 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2506 /* Second operand is sizetype, which could be in theory
2507 wider than pointer's precision. Make sure we never
2508 return more than prec. */
2509 ret2 = MIN (ret2, prec);
2510 return MIN (ret1, ret2);
2511 case BIT_AND_EXPR:
2512 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2513 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2514 return MAX (ret1, ret2);
2515 case MULT_EXPR:
2516 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2517 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2518 return MIN (ret1 + ret2, prec);
2519 case LSHIFT_EXPR:
2520 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2521 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2522 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2523 {
2524 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2525 return MIN (ret1 + ret2, prec);
2526 }
2527 return ret1;
2528 case RSHIFT_EXPR:
2529 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2530 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2531 {
2532 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2533 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2534 if (ret1 > ret2)
2535 return ret1 - ret2;
2536 }
2537 return 0;
2538 case TRUNC_DIV_EXPR:
2539 case CEIL_DIV_EXPR:
2540 case FLOOR_DIV_EXPR:
2541 case ROUND_DIV_EXPR:
2542 case EXACT_DIV_EXPR:
2543 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2544 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2545 {
2546 int l = tree_log2 (TREE_OPERAND (expr, 1));
2547 if (l >= 0)
2548 {
2549 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2550 ret2 = l;
2551 if (ret1 > ret2)
2552 return ret1 - ret2;
2553 }
2554 }
2555 return 0;
2556 CASE_CONVERT:
2557 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2558 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2559 ret1 = prec;
2560 return MIN (ret1, prec);
2561 case SAVE_EXPR:
2562 return tree_ctz (TREE_OPERAND (expr, 0));
2563 case COND_EXPR:
2564 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2565 if (ret1 == 0)
2566 return 0;
2567 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2568 return MIN (ret1, ret2);
2569 case COMPOUND_EXPR:
2570 return tree_ctz (TREE_OPERAND (expr, 1));
2571 case ADDR_EXPR:
2572 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2573 if (ret1 > BITS_PER_UNIT)
2574 {
2575 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2576 return MIN (ret1, prec);
2577 }
2578 return 0;
2579 default:
2580 return 0;
2581 }
2582 }
2583
2584 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2585 decimal float constants, so don't return 1 for them. */
2586
2587 int
2588 real_zerop (const_tree expr)
2589 {
2590 STRIP_NOPS (expr);
2591
2592 switch (TREE_CODE (expr))
2593 {
2594 case REAL_CST:
2595 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2596 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2597 case COMPLEX_CST:
2598 return real_zerop (TREE_REALPART (expr))
2599 && real_zerop (TREE_IMAGPART (expr));
2600 case VECTOR_CST:
2601 {
2602 unsigned i;
2603 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2604 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2605 return false;
2606 return true;
2607 }
2608 default:
2609 return false;
2610 }
2611 }
2612
2613 /* Return 1 if EXPR is the real constant one in real or complex form.
2614 Trailing zeroes matter for decimal float constants, so don't return
2615 1 for them. */
2616
2617 int
2618 real_onep (const_tree expr)
2619 {
2620 STRIP_NOPS (expr);
2621
2622 switch (TREE_CODE (expr))
2623 {
2624 case REAL_CST:
2625 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2626 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2627 case COMPLEX_CST:
2628 return real_onep (TREE_REALPART (expr))
2629 && real_zerop (TREE_IMAGPART (expr));
2630 case VECTOR_CST:
2631 {
2632 unsigned i;
2633 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2634 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2635 return false;
2636 return true;
2637 }
2638 default:
2639 return false;
2640 }
2641 }
2642
2643 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2644 matter for decimal float constants, so don't return 1 for them. */
2645
2646 int
2647 real_minus_onep (const_tree expr)
2648 {
2649 STRIP_NOPS (expr);
2650
2651 switch (TREE_CODE (expr))
2652 {
2653 case REAL_CST:
2654 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2655 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2656 case COMPLEX_CST:
2657 return real_minus_onep (TREE_REALPART (expr))
2658 && real_zerop (TREE_IMAGPART (expr));
2659 case VECTOR_CST:
2660 {
2661 unsigned i;
2662 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2663 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2664 return false;
2665 return true;
2666 }
2667 default:
2668 return false;
2669 }
2670 }
2671
2672 /* Nonzero if EXP is a constant or a cast of a constant. */
2673
2674 int
2675 really_constant_p (const_tree exp)
2676 {
2677 /* This is not quite the same as STRIP_NOPS. It does more. */
2678 while (CONVERT_EXPR_P (exp)
2679 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2680 exp = TREE_OPERAND (exp, 0);
2681 return TREE_CONSTANT (exp);
2682 }
2683 \f
2684 /* Return first list element whose TREE_VALUE is ELEM.
2685 Return 0 if ELEM is not in LIST. */
2686
2687 tree
2688 value_member (tree elem, tree list)
2689 {
2690 while (list)
2691 {
2692 if (elem == TREE_VALUE (list))
2693 return list;
2694 list = TREE_CHAIN (list);
2695 }
2696 return NULL_TREE;
2697 }
2698
2699 /* Return first list element whose TREE_PURPOSE is ELEM.
2700 Return 0 if ELEM is not in LIST. */
2701
2702 tree
2703 purpose_member (const_tree elem, tree list)
2704 {
2705 while (list)
2706 {
2707 if (elem == TREE_PURPOSE (list))
2708 return list;
2709 list = TREE_CHAIN (list);
2710 }
2711 return NULL_TREE;
2712 }
2713
2714 /* Return true if ELEM is in V. */
2715
2716 bool
2717 vec_member (const_tree elem, vec<tree, va_gc> *v)
2718 {
2719 unsigned ix;
2720 tree t;
2721 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2722 if (elem == t)
2723 return true;
2724 return false;
2725 }
2726
2727 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2728 NULL_TREE. */
2729
2730 tree
2731 chain_index (int idx, tree chain)
2732 {
2733 for (; chain && idx > 0; --idx)
2734 chain = TREE_CHAIN (chain);
2735 return chain;
2736 }
2737
2738 /* Return nonzero if ELEM is part of the chain CHAIN. */
2739
2740 int
2741 chain_member (const_tree elem, const_tree chain)
2742 {
2743 while (chain)
2744 {
2745 if (elem == chain)
2746 return 1;
2747 chain = DECL_CHAIN (chain);
2748 }
2749
2750 return 0;
2751 }
2752
2753 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2754 We expect a null pointer to mark the end of the chain.
2755 This is the Lisp primitive `length'. */
2756
2757 int
2758 list_length (const_tree t)
2759 {
2760 const_tree p = t;
2761 #ifdef ENABLE_TREE_CHECKING
2762 const_tree q = t;
2763 #endif
2764 int len = 0;
2765
2766 while (p)
2767 {
2768 p = TREE_CHAIN (p);
2769 #ifdef ENABLE_TREE_CHECKING
2770 if (len % 2)
2771 q = TREE_CHAIN (q);
2772 gcc_assert (p != q);
2773 #endif
2774 len++;
2775 }
2776
2777 return len;
2778 }
2779
2780 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2781 UNION_TYPE TYPE, or NULL_TREE if none. */
2782
2783 tree
2784 first_field (const_tree type)
2785 {
2786 tree t = TYPE_FIELDS (type);
2787 while (t && TREE_CODE (t) != FIELD_DECL)
2788 t = TREE_CHAIN (t);
2789 return t;
2790 }
2791
2792 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2793 by modifying the last node in chain 1 to point to chain 2.
2794 This is the Lisp primitive `nconc'. */
2795
2796 tree
2797 chainon (tree op1, tree op2)
2798 {
2799 tree t1;
2800
2801 if (!op1)
2802 return op2;
2803 if (!op2)
2804 return op1;
2805
2806 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2807 continue;
2808 TREE_CHAIN (t1) = op2;
2809
2810 #ifdef ENABLE_TREE_CHECKING
2811 {
2812 tree t2;
2813 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2814 gcc_assert (t2 != t1);
2815 }
2816 #endif
2817
2818 return op1;
2819 }
2820
2821 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2822
2823 tree
2824 tree_last (tree chain)
2825 {
2826 tree next;
2827 if (chain)
2828 while ((next = TREE_CHAIN (chain)))
2829 chain = next;
2830 return chain;
2831 }
2832
2833 /* Reverse the order of elements in the chain T,
2834 and return the new head of the chain (old last element). */
2835
2836 tree
2837 nreverse (tree t)
2838 {
2839 tree prev = 0, decl, next;
2840 for (decl = t; decl; decl = next)
2841 {
2842 /* We shouldn't be using this function to reverse BLOCK chains; we
2843 have blocks_nreverse for that. */
2844 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2845 next = TREE_CHAIN (decl);
2846 TREE_CHAIN (decl) = prev;
2847 prev = decl;
2848 }
2849 return prev;
2850 }
2851 \f
2852 /* Return a newly created TREE_LIST node whose
2853 purpose and value fields are PARM and VALUE. */
2854
2855 tree
2856 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2857 {
2858 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2859 TREE_PURPOSE (t) = parm;
2860 TREE_VALUE (t) = value;
2861 return t;
2862 }
2863
2864 /* Build a chain of TREE_LIST nodes from a vector. */
2865
2866 tree
2867 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2868 {
2869 tree ret = NULL_TREE;
2870 tree *pp = &ret;
2871 unsigned int i;
2872 tree t;
2873 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2874 {
2875 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2876 pp = &TREE_CHAIN (*pp);
2877 }
2878 return ret;
2879 }
2880
2881 /* Return a newly created TREE_LIST node whose
2882 purpose and value fields are PURPOSE and VALUE
2883 and whose TREE_CHAIN is CHAIN. */
2884
2885 tree
2886 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2887 {
2888 tree node;
2889
2890 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2891 memset (node, 0, sizeof (struct tree_common));
2892
2893 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2894
2895 TREE_SET_CODE (node, TREE_LIST);
2896 TREE_CHAIN (node) = chain;
2897 TREE_PURPOSE (node) = purpose;
2898 TREE_VALUE (node) = value;
2899 return node;
2900 }
2901
2902 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2903 trees. */
2904
2905 vec<tree, va_gc> *
2906 ctor_to_vec (tree ctor)
2907 {
2908 vec<tree, va_gc> *vec;
2909 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2910 unsigned int ix;
2911 tree val;
2912
2913 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2914 vec->quick_push (val);
2915
2916 return vec;
2917 }
2918 \f
2919 /* Return the size nominally occupied by an object of type TYPE
2920 when it resides in memory. The value is measured in units of bytes,
2921 and its data type is that normally used for type sizes
2922 (which is the first type created by make_signed_type or
2923 make_unsigned_type). */
2924
2925 tree
2926 size_in_bytes (const_tree type)
2927 {
2928 tree t;
2929
2930 if (type == error_mark_node)
2931 return integer_zero_node;
2932
2933 type = TYPE_MAIN_VARIANT (type);
2934 t = TYPE_SIZE_UNIT (type);
2935
2936 if (t == 0)
2937 {
2938 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2939 return size_zero_node;
2940 }
2941
2942 return t;
2943 }
2944
2945 /* Return the size of TYPE (in bytes) as a wide integer
2946 or return -1 if the size can vary or is larger than an integer. */
2947
2948 HOST_WIDE_INT
2949 int_size_in_bytes (const_tree type)
2950 {
2951 tree t;
2952
2953 if (type == error_mark_node)
2954 return 0;
2955
2956 type = TYPE_MAIN_VARIANT (type);
2957 t = TYPE_SIZE_UNIT (type);
2958
2959 if (t && tree_fits_uhwi_p (t))
2960 return TREE_INT_CST_LOW (t);
2961 else
2962 return -1;
2963 }
2964
2965 /* Return the maximum size of TYPE (in bytes) as a wide integer
2966 or return -1 if the size can vary or is larger than an integer. */
2967
2968 HOST_WIDE_INT
2969 max_int_size_in_bytes (const_tree type)
2970 {
2971 HOST_WIDE_INT size = -1;
2972 tree size_tree;
2973
2974 /* If this is an array type, check for a possible MAX_SIZE attached. */
2975
2976 if (TREE_CODE (type) == ARRAY_TYPE)
2977 {
2978 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2979
2980 if (size_tree && tree_fits_uhwi_p (size_tree))
2981 size = tree_to_uhwi (size_tree);
2982 }
2983
2984 /* If we still haven't been able to get a size, see if the language
2985 can compute a maximum size. */
2986
2987 if (size == -1)
2988 {
2989 size_tree = lang_hooks.types.max_size (type);
2990
2991 if (size_tree && tree_fits_uhwi_p (size_tree))
2992 size = tree_to_uhwi (size_tree);
2993 }
2994
2995 return size;
2996 }
2997 \f
2998 /* Return the bit position of FIELD, in bits from the start of the record.
2999 This is a tree of type bitsizetype. */
3000
3001 tree
3002 bit_position (const_tree field)
3003 {
3004 return bit_from_pos (DECL_FIELD_OFFSET (field),
3005 DECL_FIELD_BIT_OFFSET (field));
3006 }
3007 \f
3008 /* Return the byte position of FIELD, in bytes from the start of the record.
3009 This is a tree of type sizetype. */
3010
3011 tree
3012 byte_position (const_tree field)
3013 {
3014 return byte_from_pos (DECL_FIELD_OFFSET (field),
3015 DECL_FIELD_BIT_OFFSET (field));
3016 }
3017
3018 /* Likewise, but return as an integer. It must be representable in
3019 that way (since it could be a signed value, we don't have the
3020 option of returning -1 like int_size_in_byte can. */
3021
3022 HOST_WIDE_INT
3023 int_byte_position (const_tree field)
3024 {
3025 return tree_to_shwi (byte_position (field));
3026 }
3027 \f
3028 /* Return the strictest alignment, in bits, that T is known to have. */
3029
3030 unsigned int
3031 expr_align (const_tree t)
3032 {
3033 unsigned int align0, align1;
3034
3035 switch (TREE_CODE (t))
3036 {
3037 CASE_CONVERT: case NON_LVALUE_EXPR:
3038 /* If we have conversions, we know that the alignment of the
3039 object must meet each of the alignments of the types. */
3040 align0 = expr_align (TREE_OPERAND (t, 0));
3041 align1 = TYPE_ALIGN (TREE_TYPE (t));
3042 return MAX (align0, align1);
3043
3044 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3045 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3046 case CLEANUP_POINT_EXPR:
3047 /* These don't change the alignment of an object. */
3048 return expr_align (TREE_OPERAND (t, 0));
3049
3050 case COND_EXPR:
3051 /* The best we can do is say that the alignment is the least aligned
3052 of the two arms. */
3053 align0 = expr_align (TREE_OPERAND (t, 1));
3054 align1 = expr_align (TREE_OPERAND (t, 2));
3055 return MIN (align0, align1);
3056
3057 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3058 meaningfully, it's always 1. */
3059 case LABEL_DECL: case CONST_DECL:
3060 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3061 case FUNCTION_DECL:
3062 gcc_assert (DECL_ALIGN (t) != 0);
3063 return DECL_ALIGN (t);
3064
3065 default:
3066 break;
3067 }
3068
3069 /* Otherwise take the alignment from that of the type. */
3070 return TYPE_ALIGN (TREE_TYPE (t));
3071 }
3072 \f
3073 /* Return, as a tree node, the number of elements for TYPE (which is an
3074 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3075
3076 tree
3077 array_type_nelts (const_tree type)
3078 {
3079 tree index_type, min, max;
3080
3081 /* If they did it with unspecified bounds, then we should have already
3082 given an error about it before we got here. */
3083 if (! TYPE_DOMAIN (type))
3084 return error_mark_node;
3085
3086 index_type = TYPE_DOMAIN (type);
3087 min = TYPE_MIN_VALUE (index_type);
3088 max = TYPE_MAX_VALUE (index_type);
3089
3090 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3091 if (!max)
3092 return error_mark_node;
3093
3094 return (integer_zerop (min)
3095 ? max
3096 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3097 }
3098 \f
3099 /* If arg is static -- a reference to an object in static storage -- then
3100 return the object. This is not the same as the C meaning of `static'.
3101 If arg isn't static, return NULL. */
3102
3103 tree
3104 staticp (tree arg)
3105 {
3106 switch (TREE_CODE (arg))
3107 {
3108 case FUNCTION_DECL:
3109 /* Nested functions are static, even though taking their address will
3110 involve a trampoline as we unnest the nested function and create
3111 the trampoline on the tree level. */
3112 return arg;
3113
3114 case VAR_DECL:
3115 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3116 && ! DECL_THREAD_LOCAL_P (arg)
3117 && ! DECL_DLLIMPORT_P (arg)
3118 ? arg : NULL);
3119
3120 case CONST_DECL:
3121 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3122 ? arg : NULL);
3123
3124 case CONSTRUCTOR:
3125 return TREE_STATIC (arg) ? arg : NULL;
3126
3127 case LABEL_DECL:
3128 case STRING_CST:
3129 return arg;
3130
3131 case COMPONENT_REF:
3132 /* If the thing being referenced is not a field, then it is
3133 something language specific. */
3134 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3135
3136 /* If we are referencing a bitfield, we can't evaluate an
3137 ADDR_EXPR at compile time and so it isn't a constant. */
3138 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3139 return NULL;
3140
3141 return staticp (TREE_OPERAND (arg, 0));
3142
3143 case BIT_FIELD_REF:
3144 return NULL;
3145
3146 case INDIRECT_REF:
3147 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3148
3149 case ARRAY_REF:
3150 case ARRAY_RANGE_REF:
3151 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3152 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3153 return staticp (TREE_OPERAND (arg, 0));
3154 else
3155 return NULL;
3156
3157 case COMPOUND_LITERAL_EXPR:
3158 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3159
3160 default:
3161 return NULL;
3162 }
3163 }
3164
3165 \f
3166
3167
3168 /* Return whether OP is a DECL whose address is function-invariant. */
3169
3170 bool
3171 decl_address_invariant_p (const_tree op)
3172 {
3173 /* The conditions below are slightly less strict than the one in
3174 staticp. */
3175
3176 switch (TREE_CODE (op))
3177 {
3178 case PARM_DECL:
3179 case RESULT_DECL:
3180 case LABEL_DECL:
3181 case FUNCTION_DECL:
3182 return true;
3183
3184 case VAR_DECL:
3185 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3186 || DECL_THREAD_LOCAL_P (op)
3187 || DECL_CONTEXT (op) == current_function_decl
3188 || decl_function_context (op) == current_function_decl)
3189 return true;
3190 break;
3191
3192 case CONST_DECL:
3193 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3194 || decl_function_context (op) == current_function_decl)
3195 return true;
3196 break;
3197
3198 default:
3199 break;
3200 }
3201
3202 return false;
3203 }
3204
3205 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3206
3207 bool
3208 decl_address_ip_invariant_p (const_tree op)
3209 {
3210 /* The conditions below are slightly less strict than the one in
3211 staticp. */
3212
3213 switch (TREE_CODE (op))
3214 {
3215 case LABEL_DECL:
3216 case FUNCTION_DECL:
3217 case STRING_CST:
3218 return true;
3219
3220 case VAR_DECL:
3221 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3222 && !DECL_DLLIMPORT_P (op))
3223 || DECL_THREAD_LOCAL_P (op))
3224 return true;
3225 break;
3226
3227 case CONST_DECL:
3228 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3229 return true;
3230 break;
3231
3232 default:
3233 break;
3234 }
3235
3236 return false;
3237 }
3238
3239
3240 /* Return true if T is function-invariant (internal function, does
3241 not handle arithmetic; that's handled in skip_simple_arithmetic and
3242 tree_invariant_p). */
3243
3244 static bool tree_invariant_p (tree t);
3245
3246 static bool
3247 tree_invariant_p_1 (tree t)
3248 {
3249 tree op;
3250
3251 if (TREE_CONSTANT (t)
3252 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3253 return true;
3254
3255 switch (TREE_CODE (t))
3256 {
3257 case SAVE_EXPR:
3258 return true;
3259
3260 case ADDR_EXPR:
3261 op = TREE_OPERAND (t, 0);
3262 while (handled_component_p (op))
3263 {
3264 switch (TREE_CODE (op))
3265 {
3266 case ARRAY_REF:
3267 case ARRAY_RANGE_REF:
3268 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3269 || TREE_OPERAND (op, 2) != NULL_TREE
3270 || TREE_OPERAND (op, 3) != NULL_TREE)
3271 return false;
3272 break;
3273
3274 case COMPONENT_REF:
3275 if (TREE_OPERAND (op, 2) != NULL_TREE)
3276 return false;
3277 break;
3278
3279 default:;
3280 }
3281 op = TREE_OPERAND (op, 0);
3282 }
3283
3284 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3285
3286 default:
3287 break;
3288 }
3289
3290 return false;
3291 }
3292
3293 /* Return true if T is function-invariant. */
3294
3295 static bool
3296 tree_invariant_p (tree t)
3297 {
3298 tree inner = skip_simple_arithmetic (t);
3299 return tree_invariant_p_1 (inner);
3300 }
3301
3302 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3303 Do this to any expression which may be used in more than one place,
3304 but must be evaluated only once.
3305
3306 Normally, expand_expr would reevaluate the expression each time.
3307 Calling save_expr produces something that is evaluated and recorded
3308 the first time expand_expr is called on it. Subsequent calls to
3309 expand_expr just reuse the recorded value.
3310
3311 The call to expand_expr that generates code that actually computes
3312 the value is the first call *at compile time*. Subsequent calls
3313 *at compile time* generate code to use the saved value.
3314 This produces correct result provided that *at run time* control
3315 always flows through the insns made by the first expand_expr
3316 before reaching the other places where the save_expr was evaluated.
3317 You, the caller of save_expr, must make sure this is so.
3318
3319 Constants, and certain read-only nodes, are returned with no
3320 SAVE_EXPR because that is safe. Expressions containing placeholders
3321 are not touched; see tree.def for an explanation of what these
3322 are used for. */
3323
3324 tree
3325 save_expr (tree expr)
3326 {
3327 tree t = fold (expr);
3328 tree inner;
3329
3330 /* If the tree evaluates to a constant, then we don't want to hide that
3331 fact (i.e. this allows further folding, and direct checks for constants).
3332 However, a read-only object that has side effects cannot be bypassed.
3333 Since it is no problem to reevaluate literals, we just return the
3334 literal node. */
3335 inner = skip_simple_arithmetic (t);
3336 if (TREE_CODE (inner) == ERROR_MARK)
3337 return inner;
3338
3339 if (tree_invariant_p_1 (inner))
3340 return t;
3341
3342 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3343 it means that the size or offset of some field of an object depends on
3344 the value within another field.
3345
3346 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3347 and some variable since it would then need to be both evaluated once and
3348 evaluated more than once. Front-ends must assure this case cannot
3349 happen by surrounding any such subexpressions in their own SAVE_EXPR
3350 and forcing evaluation at the proper time. */
3351 if (contains_placeholder_p (inner))
3352 return t;
3353
3354 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3355 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3356
3357 /* This expression might be placed ahead of a jump to ensure that the
3358 value was computed on both sides of the jump. So make sure it isn't
3359 eliminated as dead. */
3360 TREE_SIDE_EFFECTS (t) = 1;
3361 return t;
3362 }
3363
3364 /* Look inside EXPR into any simple arithmetic operations. Return the
3365 outermost non-arithmetic or non-invariant node. */
3366
3367 tree
3368 skip_simple_arithmetic (tree expr)
3369 {
3370 /* We don't care about whether this can be used as an lvalue in this
3371 context. */
3372 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3373 expr = TREE_OPERAND (expr, 0);
3374
3375 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3376 a constant, it will be more efficient to not make another SAVE_EXPR since
3377 it will allow better simplification and GCSE will be able to merge the
3378 computations if they actually occur. */
3379 while (true)
3380 {
3381 if (UNARY_CLASS_P (expr))
3382 expr = TREE_OPERAND (expr, 0);
3383 else if (BINARY_CLASS_P (expr))
3384 {
3385 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3386 expr = TREE_OPERAND (expr, 0);
3387 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3388 expr = TREE_OPERAND (expr, 1);
3389 else
3390 break;
3391 }
3392 else
3393 break;
3394 }
3395
3396 return expr;
3397 }
3398
3399 /* Look inside EXPR into simple arithmetic operations involving constants.
3400 Return the outermost non-arithmetic or non-constant node. */
3401
3402 tree
3403 skip_simple_constant_arithmetic (tree expr)
3404 {
3405 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3406 expr = TREE_OPERAND (expr, 0);
3407
3408 while (true)
3409 {
3410 if (UNARY_CLASS_P (expr))
3411 expr = TREE_OPERAND (expr, 0);
3412 else if (BINARY_CLASS_P (expr))
3413 {
3414 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3415 expr = TREE_OPERAND (expr, 0);
3416 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3417 expr = TREE_OPERAND (expr, 1);
3418 else
3419 break;
3420 }
3421 else
3422 break;
3423 }
3424
3425 return expr;
3426 }
3427
3428 /* Return which tree structure is used by T. */
3429
3430 enum tree_node_structure_enum
3431 tree_node_structure (const_tree t)
3432 {
3433 const enum tree_code code = TREE_CODE (t);
3434 return tree_node_structure_for_code (code);
3435 }
3436
3437 /* Set various status flags when building a CALL_EXPR object T. */
3438
3439 static void
3440 process_call_operands (tree t)
3441 {
3442 bool side_effects = TREE_SIDE_EFFECTS (t);
3443 bool read_only = false;
3444 int i = call_expr_flags (t);
3445
3446 /* Calls have side-effects, except those to const or pure functions. */
3447 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3448 side_effects = true;
3449 /* Propagate TREE_READONLY of arguments for const functions. */
3450 if (i & ECF_CONST)
3451 read_only = true;
3452
3453 if (!side_effects || read_only)
3454 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3455 {
3456 tree op = TREE_OPERAND (t, i);
3457 if (op && TREE_SIDE_EFFECTS (op))
3458 side_effects = true;
3459 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3460 read_only = false;
3461 }
3462
3463 TREE_SIDE_EFFECTS (t) = side_effects;
3464 TREE_READONLY (t) = read_only;
3465 }
3466 \f
3467 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3468 size or offset that depends on a field within a record. */
3469
3470 bool
3471 contains_placeholder_p (const_tree exp)
3472 {
3473 enum tree_code code;
3474
3475 if (!exp)
3476 return 0;
3477
3478 code = TREE_CODE (exp);
3479 if (code == PLACEHOLDER_EXPR)
3480 return 1;
3481
3482 switch (TREE_CODE_CLASS (code))
3483 {
3484 case tcc_reference:
3485 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3486 position computations since they will be converted into a
3487 WITH_RECORD_EXPR involving the reference, which will assume
3488 here will be valid. */
3489 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3490
3491 case tcc_exceptional:
3492 if (code == TREE_LIST)
3493 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3494 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3495 break;
3496
3497 case tcc_unary:
3498 case tcc_binary:
3499 case tcc_comparison:
3500 case tcc_expression:
3501 switch (code)
3502 {
3503 case COMPOUND_EXPR:
3504 /* Ignoring the first operand isn't quite right, but works best. */
3505 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3506
3507 case COND_EXPR:
3508 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3509 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3510 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3511
3512 case SAVE_EXPR:
3513 /* The save_expr function never wraps anything containing
3514 a PLACEHOLDER_EXPR. */
3515 return 0;
3516
3517 default:
3518 break;
3519 }
3520
3521 switch (TREE_CODE_LENGTH (code))
3522 {
3523 case 1:
3524 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3525 case 2:
3526 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3527 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3528 default:
3529 return 0;
3530 }
3531
3532 case tcc_vl_exp:
3533 switch (code)
3534 {
3535 case CALL_EXPR:
3536 {
3537 const_tree arg;
3538 const_call_expr_arg_iterator iter;
3539 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3540 if (CONTAINS_PLACEHOLDER_P (arg))
3541 return 1;
3542 return 0;
3543 }
3544 default:
3545 return 0;
3546 }
3547
3548 default:
3549 return 0;
3550 }
3551 return 0;
3552 }
3553
3554 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3555 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3556 field positions. */
3557
3558 static bool
3559 type_contains_placeholder_1 (const_tree type)
3560 {
3561 /* If the size contains a placeholder or the parent type (component type in
3562 the case of arrays) type involves a placeholder, this type does. */
3563 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3564 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3565 || (!POINTER_TYPE_P (type)
3566 && TREE_TYPE (type)
3567 && type_contains_placeholder_p (TREE_TYPE (type))))
3568 return true;
3569
3570 /* Now do type-specific checks. Note that the last part of the check above
3571 greatly limits what we have to do below. */
3572 switch (TREE_CODE (type))
3573 {
3574 case VOID_TYPE:
3575 case POINTER_BOUNDS_TYPE:
3576 case COMPLEX_TYPE:
3577 case ENUMERAL_TYPE:
3578 case BOOLEAN_TYPE:
3579 case POINTER_TYPE:
3580 case OFFSET_TYPE:
3581 case REFERENCE_TYPE:
3582 case METHOD_TYPE:
3583 case FUNCTION_TYPE:
3584 case VECTOR_TYPE:
3585 case NULLPTR_TYPE:
3586 return false;
3587
3588 case INTEGER_TYPE:
3589 case REAL_TYPE:
3590 case FIXED_POINT_TYPE:
3591 /* Here we just check the bounds. */
3592 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3593 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3594
3595 case ARRAY_TYPE:
3596 /* We have already checked the component type above, so just check the
3597 domain type. */
3598 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3599
3600 case RECORD_TYPE:
3601 case UNION_TYPE:
3602 case QUAL_UNION_TYPE:
3603 {
3604 tree field;
3605
3606 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3607 if (TREE_CODE (field) == FIELD_DECL
3608 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3609 || (TREE_CODE (type) == QUAL_UNION_TYPE
3610 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3611 || type_contains_placeholder_p (TREE_TYPE (field))))
3612 return true;
3613
3614 return false;
3615 }
3616
3617 default:
3618 gcc_unreachable ();
3619 }
3620 }
3621
3622 /* Wrapper around above function used to cache its result. */
3623
3624 bool
3625 type_contains_placeholder_p (tree type)
3626 {
3627 bool result;
3628
3629 /* If the contains_placeholder_bits field has been initialized,
3630 then we know the answer. */
3631 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3632 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3633
3634 /* Indicate that we've seen this type node, and the answer is false.
3635 This is what we want to return if we run into recursion via fields. */
3636 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3637
3638 /* Compute the real value. */
3639 result = type_contains_placeholder_1 (type);
3640
3641 /* Store the real value. */
3642 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3643
3644 return result;
3645 }
3646 \f
3647 /* Push tree EXP onto vector QUEUE if it is not already present. */
3648
3649 static void
3650 push_without_duplicates (tree exp, vec<tree> *queue)
3651 {
3652 unsigned int i;
3653 tree iter;
3654
3655 FOR_EACH_VEC_ELT (*queue, i, iter)
3656 if (simple_cst_equal (iter, exp) == 1)
3657 break;
3658
3659 if (!iter)
3660 queue->safe_push (exp);
3661 }
3662
3663 /* Given a tree EXP, find all occurrences of references to fields
3664 in a PLACEHOLDER_EXPR and place them in vector REFS without
3665 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3666 we assume here that EXP contains only arithmetic expressions
3667 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3668 argument list. */
3669
3670 void
3671 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3672 {
3673 enum tree_code code = TREE_CODE (exp);
3674 tree inner;
3675 int i;
3676
3677 /* We handle TREE_LIST and COMPONENT_REF separately. */
3678 if (code == TREE_LIST)
3679 {
3680 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3681 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3682 }
3683 else if (code == COMPONENT_REF)
3684 {
3685 for (inner = TREE_OPERAND (exp, 0);
3686 REFERENCE_CLASS_P (inner);
3687 inner = TREE_OPERAND (inner, 0))
3688 ;
3689
3690 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3691 push_without_duplicates (exp, refs);
3692 else
3693 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3694 }
3695 else
3696 switch (TREE_CODE_CLASS (code))
3697 {
3698 case tcc_constant:
3699 break;
3700
3701 case tcc_declaration:
3702 /* Variables allocated to static storage can stay. */
3703 if (!TREE_STATIC (exp))
3704 push_without_duplicates (exp, refs);
3705 break;
3706
3707 case tcc_expression:
3708 /* This is the pattern built in ada/make_aligning_type. */
3709 if (code == ADDR_EXPR
3710 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3711 {
3712 push_without_duplicates (exp, refs);
3713 break;
3714 }
3715
3716 /* Fall through... */
3717
3718 case tcc_exceptional:
3719 case tcc_unary:
3720 case tcc_binary:
3721 case tcc_comparison:
3722 case tcc_reference:
3723 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3724 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3725 break;
3726
3727 case tcc_vl_exp:
3728 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3729 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3730 break;
3731
3732 default:
3733 gcc_unreachable ();
3734 }
3735 }
3736
3737 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3738 return a tree with all occurrences of references to F in a
3739 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3740 CONST_DECLs. Note that we assume here that EXP contains only
3741 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3742 occurring only in their argument list. */
3743
3744 tree
3745 substitute_in_expr (tree exp, tree f, tree r)
3746 {
3747 enum tree_code code = TREE_CODE (exp);
3748 tree op0, op1, op2, op3;
3749 tree new_tree;
3750
3751 /* We handle TREE_LIST and COMPONENT_REF separately. */
3752 if (code == TREE_LIST)
3753 {
3754 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3755 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3756 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3757 return exp;
3758
3759 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3760 }
3761 else if (code == COMPONENT_REF)
3762 {
3763 tree inner;
3764
3765 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3766 and it is the right field, replace it with R. */
3767 for (inner = TREE_OPERAND (exp, 0);
3768 REFERENCE_CLASS_P (inner);
3769 inner = TREE_OPERAND (inner, 0))
3770 ;
3771
3772 /* The field. */
3773 op1 = TREE_OPERAND (exp, 1);
3774
3775 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3776 return r;
3777
3778 /* If this expression hasn't been completed let, leave it alone. */
3779 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3780 return exp;
3781
3782 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3783 if (op0 == TREE_OPERAND (exp, 0))
3784 return exp;
3785
3786 new_tree
3787 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3788 }
3789 else
3790 switch (TREE_CODE_CLASS (code))
3791 {
3792 case tcc_constant:
3793 return exp;
3794
3795 case tcc_declaration:
3796 if (exp == f)
3797 return r;
3798 else
3799 return exp;
3800
3801 case tcc_expression:
3802 if (exp == f)
3803 return r;
3804
3805 /* Fall through... */
3806
3807 case tcc_exceptional:
3808 case tcc_unary:
3809 case tcc_binary:
3810 case tcc_comparison:
3811 case tcc_reference:
3812 switch (TREE_CODE_LENGTH (code))
3813 {
3814 case 0:
3815 return exp;
3816
3817 case 1:
3818 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3819 if (op0 == TREE_OPERAND (exp, 0))
3820 return exp;
3821
3822 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3823 break;
3824
3825 case 2:
3826 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3827 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3828
3829 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3830 return exp;
3831
3832 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3833 break;
3834
3835 case 3:
3836 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3837 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3838 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3839
3840 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3841 && op2 == TREE_OPERAND (exp, 2))
3842 return exp;
3843
3844 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3845 break;
3846
3847 case 4:
3848 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3849 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3850 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3851 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3852
3853 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3854 && op2 == TREE_OPERAND (exp, 2)
3855 && op3 == TREE_OPERAND (exp, 3))
3856 return exp;
3857
3858 new_tree
3859 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3860 break;
3861
3862 default:
3863 gcc_unreachable ();
3864 }
3865 break;
3866
3867 case tcc_vl_exp:
3868 {
3869 int i;
3870
3871 new_tree = NULL_TREE;
3872
3873 /* If we are trying to replace F with a constant, inline back
3874 functions which do nothing else than computing a value from
3875 the arguments they are passed. This makes it possible to
3876 fold partially or entirely the replacement expression. */
3877 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3878 {
3879 tree t = maybe_inline_call_in_expr (exp);
3880 if (t)
3881 return SUBSTITUTE_IN_EXPR (t, f, r);
3882 }
3883
3884 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3885 {
3886 tree op = TREE_OPERAND (exp, i);
3887 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3888 if (new_op != op)
3889 {
3890 if (!new_tree)
3891 new_tree = copy_node (exp);
3892 TREE_OPERAND (new_tree, i) = new_op;
3893 }
3894 }
3895
3896 if (new_tree)
3897 {
3898 new_tree = fold (new_tree);
3899 if (TREE_CODE (new_tree) == CALL_EXPR)
3900 process_call_operands (new_tree);
3901 }
3902 else
3903 return exp;
3904 }
3905 break;
3906
3907 default:
3908 gcc_unreachable ();
3909 }
3910
3911 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3912
3913 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3914 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3915
3916 return new_tree;
3917 }
3918
3919 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3920 for it within OBJ, a tree that is an object or a chain of references. */
3921
3922 tree
3923 substitute_placeholder_in_expr (tree exp, tree obj)
3924 {
3925 enum tree_code code = TREE_CODE (exp);
3926 tree op0, op1, op2, op3;
3927 tree new_tree;
3928
3929 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3930 in the chain of OBJ. */
3931 if (code == PLACEHOLDER_EXPR)
3932 {
3933 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3934 tree elt;
3935
3936 for (elt = obj; elt != 0;
3937 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3938 || TREE_CODE (elt) == COND_EXPR)
3939 ? TREE_OPERAND (elt, 1)
3940 : (REFERENCE_CLASS_P (elt)
3941 || UNARY_CLASS_P (elt)
3942 || BINARY_CLASS_P (elt)
3943 || VL_EXP_CLASS_P (elt)
3944 || EXPRESSION_CLASS_P (elt))
3945 ? TREE_OPERAND (elt, 0) : 0))
3946 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3947 return elt;
3948
3949 for (elt = obj; elt != 0;
3950 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3951 || TREE_CODE (elt) == COND_EXPR)
3952 ? TREE_OPERAND (elt, 1)
3953 : (REFERENCE_CLASS_P (elt)
3954 || UNARY_CLASS_P (elt)
3955 || BINARY_CLASS_P (elt)
3956 || VL_EXP_CLASS_P (elt)
3957 || EXPRESSION_CLASS_P (elt))
3958 ? TREE_OPERAND (elt, 0) : 0))
3959 if (POINTER_TYPE_P (TREE_TYPE (elt))
3960 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3961 == need_type))
3962 return fold_build1 (INDIRECT_REF, need_type, elt);
3963
3964 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3965 survives until RTL generation, there will be an error. */
3966 return exp;
3967 }
3968
3969 /* TREE_LIST is special because we need to look at TREE_VALUE
3970 and TREE_CHAIN, not TREE_OPERANDS. */
3971 else if (code == TREE_LIST)
3972 {
3973 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3974 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3975 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3976 return exp;
3977
3978 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3979 }
3980 else
3981 switch (TREE_CODE_CLASS (code))
3982 {
3983 case tcc_constant:
3984 case tcc_declaration:
3985 return exp;
3986
3987 case tcc_exceptional:
3988 case tcc_unary:
3989 case tcc_binary:
3990 case tcc_comparison:
3991 case tcc_expression:
3992 case tcc_reference:
3993 case tcc_statement:
3994 switch (TREE_CODE_LENGTH (code))
3995 {
3996 case 0:
3997 return exp;
3998
3999 case 1:
4000 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4001 if (op0 == TREE_OPERAND (exp, 0))
4002 return exp;
4003
4004 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4005 break;
4006
4007 case 2:
4008 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4009 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4010
4011 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4012 return exp;
4013
4014 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4015 break;
4016
4017 case 3:
4018 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4019 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4020 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4021
4022 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4023 && op2 == TREE_OPERAND (exp, 2))
4024 return exp;
4025
4026 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4027 break;
4028
4029 case 4:
4030 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4031 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4032 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4033 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4034
4035 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4036 && op2 == TREE_OPERAND (exp, 2)
4037 && op3 == TREE_OPERAND (exp, 3))
4038 return exp;
4039
4040 new_tree
4041 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4042 break;
4043
4044 default:
4045 gcc_unreachable ();
4046 }
4047 break;
4048
4049 case tcc_vl_exp:
4050 {
4051 int i;
4052
4053 new_tree = NULL_TREE;
4054
4055 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4056 {
4057 tree op = TREE_OPERAND (exp, i);
4058 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4059 if (new_op != op)
4060 {
4061 if (!new_tree)
4062 new_tree = copy_node (exp);
4063 TREE_OPERAND (new_tree, i) = new_op;
4064 }
4065 }
4066
4067 if (new_tree)
4068 {
4069 new_tree = fold (new_tree);
4070 if (TREE_CODE (new_tree) == CALL_EXPR)
4071 process_call_operands (new_tree);
4072 }
4073 else
4074 return exp;
4075 }
4076 break;
4077
4078 default:
4079 gcc_unreachable ();
4080 }
4081
4082 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4083
4084 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4085 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4086
4087 return new_tree;
4088 }
4089 \f
4090
4091 /* Subroutine of stabilize_reference; this is called for subtrees of
4092 references. Any expression with side-effects must be put in a SAVE_EXPR
4093 to ensure that it is only evaluated once.
4094
4095 We don't put SAVE_EXPR nodes around everything, because assigning very
4096 simple expressions to temporaries causes us to miss good opportunities
4097 for optimizations. Among other things, the opportunity to fold in the
4098 addition of a constant into an addressing mode often gets lost, e.g.
4099 "y[i+1] += x;". In general, we take the approach that we should not make
4100 an assignment unless we are forced into it - i.e., that any non-side effect
4101 operator should be allowed, and that cse should take care of coalescing
4102 multiple utterances of the same expression should that prove fruitful. */
4103
4104 static tree
4105 stabilize_reference_1 (tree e)
4106 {
4107 tree result;
4108 enum tree_code code = TREE_CODE (e);
4109
4110 /* We cannot ignore const expressions because it might be a reference
4111 to a const array but whose index contains side-effects. But we can
4112 ignore things that are actual constant or that already have been
4113 handled by this function. */
4114
4115 if (tree_invariant_p (e))
4116 return e;
4117
4118 switch (TREE_CODE_CLASS (code))
4119 {
4120 case tcc_exceptional:
4121 case tcc_type:
4122 case tcc_declaration:
4123 case tcc_comparison:
4124 case tcc_statement:
4125 case tcc_expression:
4126 case tcc_reference:
4127 case tcc_vl_exp:
4128 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4129 so that it will only be evaluated once. */
4130 /* The reference (r) and comparison (<) classes could be handled as
4131 below, but it is generally faster to only evaluate them once. */
4132 if (TREE_SIDE_EFFECTS (e))
4133 return save_expr (e);
4134 return e;
4135
4136 case tcc_constant:
4137 /* Constants need no processing. In fact, we should never reach
4138 here. */
4139 return e;
4140
4141 case tcc_binary:
4142 /* Division is slow and tends to be compiled with jumps,
4143 especially the division by powers of 2 that is often
4144 found inside of an array reference. So do it just once. */
4145 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4146 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4147 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4148 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4149 return save_expr (e);
4150 /* Recursively stabilize each operand. */
4151 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4152 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4153 break;
4154
4155 case tcc_unary:
4156 /* Recursively stabilize each operand. */
4157 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4158 break;
4159
4160 default:
4161 gcc_unreachable ();
4162 }
4163
4164 TREE_TYPE (result) = TREE_TYPE (e);
4165 TREE_READONLY (result) = TREE_READONLY (e);
4166 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4167 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4168
4169 return result;
4170 }
4171
4172 /* Stabilize a reference so that we can use it any number of times
4173 without causing its operands to be evaluated more than once.
4174 Returns the stabilized reference. This works by means of save_expr,
4175 so see the caveats in the comments about save_expr.
4176
4177 Also allows conversion expressions whose operands are references.
4178 Any other kind of expression is returned unchanged. */
4179
4180 tree
4181 stabilize_reference (tree ref)
4182 {
4183 tree result;
4184 enum tree_code code = TREE_CODE (ref);
4185
4186 switch (code)
4187 {
4188 case VAR_DECL:
4189 case PARM_DECL:
4190 case RESULT_DECL:
4191 /* No action is needed in this case. */
4192 return ref;
4193
4194 CASE_CONVERT:
4195 case FLOAT_EXPR:
4196 case FIX_TRUNC_EXPR:
4197 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4198 break;
4199
4200 case INDIRECT_REF:
4201 result = build_nt (INDIRECT_REF,
4202 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4203 break;
4204
4205 case COMPONENT_REF:
4206 result = build_nt (COMPONENT_REF,
4207 stabilize_reference (TREE_OPERAND (ref, 0)),
4208 TREE_OPERAND (ref, 1), NULL_TREE);
4209 break;
4210
4211 case BIT_FIELD_REF:
4212 result = build_nt (BIT_FIELD_REF,
4213 stabilize_reference (TREE_OPERAND (ref, 0)),
4214 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4215 break;
4216
4217 case ARRAY_REF:
4218 result = build_nt (ARRAY_REF,
4219 stabilize_reference (TREE_OPERAND (ref, 0)),
4220 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4221 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4222 break;
4223
4224 case ARRAY_RANGE_REF:
4225 result = build_nt (ARRAY_RANGE_REF,
4226 stabilize_reference (TREE_OPERAND (ref, 0)),
4227 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4228 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4229 break;
4230
4231 case COMPOUND_EXPR:
4232 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4233 it wouldn't be ignored. This matters when dealing with
4234 volatiles. */
4235 return stabilize_reference_1 (ref);
4236
4237 /* If arg isn't a kind of lvalue we recognize, make no change.
4238 Caller should recognize the error for an invalid lvalue. */
4239 default:
4240 return ref;
4241
4242 case ERROR_MARK:
4243 return error_mark_node;
4244 }
4245
4246 TREE_TYPE (result) = TREE_TYPE (ref);
4247 TREE_READONLY (result) = TREE_READONLY (ref);
4248 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4249 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4250
4251 return result;
4252 }
4253 \f
4254 /* Low-level constructors for expressions. */
4255
4256 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4257 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4258
4259 void
4260 recompute_tree_invariant_for_addr_expr (tree t)
4261 {
4262 tree node;
4263 bool tc = true, se = false;
4264
4265 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4266
4267 /* We started out assuming this address is both invariant and constant, but
4268 does not have side effects. Now go down any handled components and see if
4269 any of them involve offsets that are either non-constant or non-invariant.
4270 Also check for side-effects.
4271
4272 ??? Note that this code makes no attempt to deal with the case where
4273 taking the address of something causes a copy due to misalignment. */
4274
4275 #define UPDATE_FLAGS(NODE) \
4276 do { tree _node = (NODE); \
4277 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4278 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4279
4280 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4281 node = TREE_OPERAND (node, 0))
4282 {
4283 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4284 array reference (probably made temporarily by the G++ front end),
4285 so ignore all the operands. */
4286 if ((TREE_CODE (node) == ARRAY_REF
4287 || TREE_CODE (node) == ARRAY_RANGE_REF)
4288 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4289 {
4290 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4291 if (TREE_OPERAND (node, 2))
4292 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4293 if (TREE_OPERAND (node, 3))
4294 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4295 }
4296 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4297 FIELD_DECL, apparently. The G++ front end can put something else
4298 there, at least temporarily. */
4299 else if (TREE_CODE (node) == COMPONENT_REF
4300 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4301 {
4302 if (TREE_OPERAND (node, 2))
4303 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4304 }
4305 }
4306
4307 node = lang_hooks.expr_to_decl (node, &tc, &se);
4308
4309 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4310 the address, since &(*a)->b is a form of addition. If it's a constant, the
4311 address is constant too. If it's a decl, its address is constant if the
4312 decl is static. Everything else is not constant and, furthermore,
4313 taking the address of a volatile variable is not volatile. */
4314 if (TREE_CODE (node) == INDIRECT_REF
4315 || TREE_CODE (node) == MEM_REF)
4316 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4317 else if (CONSTANT_CLASS_P (node))
4318 ;
4319 else if (DECL_P (node))
4320 tc &= (staticp (node) != NULL_TREE);
4321 else
4322 {
4323 tc = false;
4324 se |= TREE_SIDE_EFFECTS (node);
4325 }
4326
4327
4328 TREE_CONSTANT (t) = tc;
4329 TREE_SIDE_EFFECTS (t) = se;
4330 #undef UPDATE_FLAGS
4331 }
4332
4333 /* Build an expression of code CODE, data type TYPE, and operands as
4334 specified. Expressions and reference nodes can be created this way.
4335 Constants, decls, types and misc nodes cannot be.
4336
4337 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4338 enough for all extant tree codes. */
4339
4340 tree
4341 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4342 {
4343 tree t;
4344
4345 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4346
4347 t = make_node_stat (code PASS_MEM_STAT);
4348 TREE_TYPE (t) = tt;
4349
4350 return t;
4351 }
4352
4353 tree
4354 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4355 {
4356 int length = sizeof (struct tree_exp);
4357 tree t;
4358
4359 record_node_allocation_statistics (code, length);
4360
4361 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4362
4363 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4364
4365 memset (t, 0, sizeof (struct tree_common));
4366
4367 TREE_SET_CODE (t, code);
4368
4369 TREE_TYPE (t) = type;
4370 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4371 TREE_OPERAND (t, 0) = node;
4372 if (node && !TYPE_P (node))
4373 {
4374 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4375 TREE_READONLY (t) = TREE_READONLY (node);
4376 }
4377
4378 if (TREE_CODE_CLASS (code) == tcc_statement)
4379 TREE_SIDE_EFFECTS (t) = 1;
4380 else switch (code)
4381 {
4382 case VA_ARG_EXPR:
4383 /* All of these have side-effects, no matter what their
4384 operands are. */
4385 TREE_SIDE_EFFECTS (t) = 1;
4386 TREE_READONLY (t) = 0;
4387 break;
4388
4389 case INDIRECT_REF:
4390 /* Whether a dereference is readonly has nothing to do with whether
4391 its operand is readonly. */
4392 TREE_READONLY (t) = 0;
4393 break;
4394
4395 case ADDR_EXPR:
4396 if (node)
4397 recompute_tree_invariant_for_addr_expr (t);
4398 break;
4399
4400 default:
4401 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4402 && node && !TYPE_P (node)
4403 && TREE_CONSTANT (node))
4404 TREE_CONSTANT (t) = 1;
4405 if (TREE_CODE_CLASS (code) == tcc_reference
4406 && node && TREE_THIS_VOLATILE (node))
4407 TREE_THIS_VOLATILE (t) = 1;
4408 break;
4409 }
4410
4411 return t;
4412 }
4413
4414 #define PROCESS_ARG(N) \
4415 do { \
4416 TREE_OPERAND (t, N) = arg##N; \
4417 if (arg##N &&!TYPE_P (arg##N)) \
4418 { \
4419 if (TREE_SIDE_EFFECTS (arg##N)) \
4420 side_effects = 1; \
4421 if (!TREE_READONLY (arg##N) \
4422 && !CONSTANT_CLASS_P (arg##N)) \
4423 (void) (read_only = 0); \
4424 if (!TREE_CONSTANT (arg##N)) \
4425 (void) (constant = 0); \
4426 } \
4427 } while (0)
4428
4429 tree
4430 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4431 {
4432 bool constant, read_only, side_effects;
4433 tree t;
4434
4435 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4436
4437 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4438 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4439 /* When sizetype precision doesn't match that of pointers
4440 we need to be able to build explicit extensions or truncations
4441 of the offset argument. */
4442 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4443 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4444 && TREE_CODE (arg1) == INTEGER_CST);
4445
4446 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4447 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4448 && ptrofftype_p (TREE_TYPE (arg1)));
4449
4450 t = make_node_stat (code PASS_MEM_STAT);
4451 TREE_TYPE (t) = tt;
4452
4453 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4454 result based on those same flags for the arguments. But if the
4455 arguments aren't really even `tree' expressions, we shouldn't be trying
4456 to do this. */
4457
4458 /* Expressions without side effects may be constant if their
4459 arguments are as well. */
4460 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4461 || TREE_CODE_CLASS (code) == tcc_binary);
4462 read_only = 1;
4463 side_effects = TREE_SIDE_EFFECTS (t);
4464
4465 PROCESS_ARG (0);
4466 PROCESS_ARG (1);
4467
4468 TREE_SIDE_EFFECTS (t) = side_effects;
4469 if (code == MEM_REF)
4470 {
4471 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4472 {
4473 tree o = TREE_OPERAND (arg0, 0);
4474 TREE_READONLY (t) = TREE_READONLY (o);
4475 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4476 }
4477 }
4478 else
4479 {
4480 TREE_READONLY (t) = read_only;
4481 TREE_CONSTANT (t) = constant;
4482 TREE_THIS_VOLATILE (t)
4483 = (TREE_CODE_CLASS (code) == tcc_reference
4484 && arg0 && TREE_THIS_VOLATILE (arg0));
4485 }
4486
4487 return t;
4488 }
4489
4490
4491 tree
4492 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4493 tree arg2 MEM_STAT_DECL)
4494 {
4495 bool constant, read_only, side_effects;
4496 tree t;
4497
4498 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4499 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4500
4501 t = make_node_stat (code PASS_MEM_STAT);
4502 TREE_TYPE (t) = tt;
4503
4504 read_only = 1;
4505
4506 /* As a special exception, if COND_EXPR has NULL branches, we
4507 assume that it is a gimple statement and always consider
4508 it to have side effects. */
4509 if (code == COND_EXPR
4510 && tt == void_type_node
4511 && arg1 == NULL_TREE
4512 && arg2 == NULL_TREE)
4513 side_effects = true;
4514 else
4515 side_effects = TREE_SIDE_EFFECTS (t);
4516
4517 PROCESS_ARG (0);
4518 PROCESS_ARG (1);
4519 PROCESS_ARG (2);
4520
4521 if (code == COND_EXPR)
4522 TREE_READONLY (t) = read_only;
4523
4524 TREE_SIDE_EFFECTS (t) = side_effects;
4525 TREE_THIS_VOLATILE (t)
4526 = (TREE_CODE_CLASS (code) == tcc_reference
4527 && arg0 && TREE_THIS_VOLATILE (arg0));
4528
4529 return t;
4530 }
4531
4532 tree
4533 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4534 tree arg2, tree arg3 MEM_STAT_DECL)
4535 {
4536 bool constant, read_only, side_effects;
4537 tree t;
4538
4539 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4540
4541 t = make_node_stat (code PASS_MEM_STAT);
4542 TREE_TYPE (t) = tt;
4543
4544 side_effects = TREE_SIDE_EFFECTS (t);
4545
4546 PROCESS_ARG (0);
4547 PROCESS_ARG (1);
4548 PROCESS_ARG (2);
4549 PROCESS_ARG (3);
4550
4551 TREE_SIDE_EFFECTS (t) = side_effects;
4552 TREE_THIS_VOLATILE (t)
4553 = (TREE_CODE_CLASS (code) == tcc_reference
4554 && arg0 && TREE_THIS_VOLATILE (arg0));
4555
4556 return t;
4557 }
4558
4559 tree
4560 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4561 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4562 {
4563 bool constant, read_only, side_effects;
4564 tree t;
4565
4566 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4567
4568 t = make_node_stat (code PASS_MEM_STAT);
4569 TREE_TYPE (t) = tt;
4570
4571 side_effects = TREE_SIDE_EFFECTS (t);
4572
4573 PROCESS_ARG (0);
4574 PROCESS_ARG (1);
4575 PROCESS_ARG (2);
4576 PROCESS_ARG (3);
4577 PROCESS_ARG (4);
4578
4579 TREE_SIDE_EFFECTS (t) = side_effects;
4580 if (code == TARGET_MEM_REF)
4581 {
4582 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4583 {
4584 tree o = TREE_OPERAND (arg0, 0);
4585 TREE_READONLY (t) = TREE_READONLY (o);
4586 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4587 }
4588 }
4589 else
4590 TREE_THIS_VOLATILE (t)
4591 = (TREE_CODE_CLASS (code) == tcc_reference
4592 && arg0 && TREE_THIS_VOLATILE (arg0));
4593
4594 return t;
4595 }
4596
4597 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4598 on the pointer PTR. */
4599
4600 tree
4601 build_simple_mem_ref_loc (location_t loc, tree ptr)
4602 {
4603 HOST_WIDE_INT offset = 0;
4604 tree ptype = TREE_TYPE (ptr);
4605 tree tem;
4606 /* For convenience allow addresses that collapse to a simple base
4607 and offset. */
4608 if (TREE_CODE (ptr) == ADDR_EXPR
4609 && (handled_component_p (TREE_OPERAND (ptr, 0))
4610 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4611 {
4612 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4613 gcc_assert (ptr);
4614 ptr = build_fold_addr_expr (ptr);
4615 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4616 }
4617 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4618 ptr, build_int_cst (ptype, offset));
4619 SET_EXPR_LOCATION (tem, loc);
4620 return tem;
4621 }
4622
4623 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4624
4625 offset_int
4626 mem_ref_offset (const_tree t)
4627 {
4628 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4629 }
4630
4631 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4632 offsetted by OFFSET units. */
4633
4634 tree
4635 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4636 {
4637 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4638 build_fold_addr_expr (base),
4639 build_int_cst (ptr_type_node, offset));
4640 tree addr = build1 (ADDR_EXPR, type, ref);
4641 recompute_tree_invariant_for_addr_expr (addr);
4642 return addr;
4643 }
4644
4645 /* Similar except don't specify the TREE_TYPE
4646 and leave the TREE_SIDE_EFFECTS as 0.
4647 It is permissible for arguments to be null,
4648 or even garbage if their values do not matter. */
4649
4650 tree
4651 build_nt (enum tree_code code, ...)
4652 {
4653 tree t;
4654 int length;
4655 int i;
4656 va_list p;
4657
4658 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4659
4660 va_start (p, code);
4661
4662 t = make_node (code);
4663 length = TREE_CODE_LENGTH (code);
4664
4665 for (i = 0; i < length; i++)
4666 TREE_OPERAND (t, i) = va_arg (p, tree);
4667
4668 va_end (p);
4669 return t;
4670 }
4671
4672 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4673 tree vec. */
4674
4675 tree
4676 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4677 {
4678 tree ret, t;
4679 unsigned int ix;
4680
4681 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4682 CALL_EXPR_FN (ret) = fn;
4683 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4684 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4685 CALL_EXPR_ARG (ret, ix) = t;
4686 return ret;
4687 }
4688 \f
4689 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4690 We do NOT enter this node in any sort of symbol table.
4691
4692 LOC is the location of the decl.
4693
4694 layout_decl is used to set up the decl's storage layout.
4695 Other slots are initialized to 0 or null pointers. */
4696
4697 tree
4698 build_decl_stat (location_t loc, enum tree_code code, tree name,
4699 tree type MEM_STAT_DECL)
4700 {
4701 tree t;
4702
4703 t = make_node_stat (code PASS_MEM_STAT);
4704 DECL_SOURCE_LOCATION (t) = loc;
4705
4706 /* if (type == error_mark_node)
4707 type = integer_type_node; */
4708 /* That is not done, deliberately, so that having error_mark_node
4709 as the type can suppress useless errors in the use of this variable. */
4710
4711 DECL_NAME (t) = name;
4712 TREE_TYPE (t) = type;
4713
4714 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4715 layout_decl (t, 0);
4716
4717 return t;
4718 }
4719
4720 /* Builds and returns function declaration with NAME and TYPE. */
4721
4722 tree
4723 build_fn_decl (const char *name, tree type)
4724 {
4725 tree id = get_identifier (name);
4726 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4727
4728 DECL_EXTERNAL (decl) = 1;
4729 TREE_PUBLIC (decl) = 1;
4730 DECL_ARTIFICIAL (decl) = 1;
4731 TREE_NOTHROW (decl) = 1;
4732
4733 return decl;
4734 }
4735
4736 vec<tree, va_gc> *all_translation_units;
4737
4738 /* Builds a new translation-unit decl with name NAME, queues it in the
4739 global list of translation-unit decls and returns it. */
4740
4741 tree
4742 build_translation_unit_decl (tree name)
4743 {
4744 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4745 name, NULL_TREE);
4746 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4747 vec_safe_push (all_translation_units, tu);
4748 return tu;
4749 }
4750
4751 \f
4752 /* BLOCK nodes are used to represent the structure of binding contours
4753 and declarations, once those contours have been exited and their contents
4754 compiled. This information is used for outputting debugging info. */
4755
4756 tree
4757 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4758 {
4759 tree block = make_node (BLOCK);
4760
4761 BLOCK_VARS (block) = vars;
4762 BLOCK_SUBBLOCKS (block) = subblocks;
4763 BLOCK_SUPERCONTEXT (block) = supercontext;
4764 BLOCK_CHAIN (block) = chain;
4765 return block;
4766 }
4767
4768 \f
4769 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4770
4771 LOC is the location to use in tree T. */
4772
4773 void
4774 protected_set_expr_location (tree t, location_t loc)
4775 {
4776 if (CAN_HAVE_LOCATION_P (t))
4777 SET_EXPR_LOCATION (t, loc);
4778 }
4779 \f
4780 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4781 is ATTRIBUTE. */
4782
4783 tree
4784 build_decl_attribute_variant (tree ddecl, tree attribute)
4785 {
4786 DECL_ATTRIBUTES (ddecl) = attribute;
4787 return ddecl;
4788 }
4789
4790 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4791 is ATTRIBUTE and its qualifiers are QUALS.
4792
4793 Record such modified types already made so we don't make duplicates. */
4794
4795 tree
4796 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4797 {
4798 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4799 {
4800 inchash::hash hstate;
4801 tree ntype;
4802 int i;
4803 tree t;
4804 enum tree_code code = TREE_CODE (ttype);
4805
4806 /* Building a distinct copy of a tagged type is inappropriate; it
4807 causes breakage in code that expects there to be a one-to-one
4808 relationship between a struct and its fields.
4809 build_duplicate_type is another solution (as used in
4810 handle_transparent_union_attribute), but that doesn't play well
4811 with the stronger C++ type identity model. */
4812 if (TREE_CODE (ttype) == RECORD_TYPE
4813 || TREE_CODE (ttype) == UNION_TYPE
4814 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4815 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4816 {
4817 warning (OPT_Wattributes,
4818 "ignoring attributes applied to %qT after definition",
4819 TYPE_MAIN_VARIANT (ttype));
4820 return build_qualified_type (ttype, quals);
4821 }
4822
4823 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4824 ntype = build_distinct_type_copy (ttype);
4825
4826 TYPE_ATTRIBUTES (ntype) = attribute;
4827
4828 hstate.add_int (code);
4829 if (TREE_TYPE (ntype))
4830 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4831 attribute_hash_list (attribute, hstate);
4832
4833 switch (TREE_CODE (ntype))
4834 {
4835 case FUNCTION_TYPE:
4836 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4837 break;
4838 case ARRAY_TYPE:
4839 if (TYPE_DOMAIN (ntype))
4840 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4841 break;
4842 case INTEGER_TYPE:
4843 t = TYPE_MAX_VALUE (ntype);
4844 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4845 hstate.add_object (TREE_INT_CST_ELT (t, i));
4846 break;
4847 case REAL_TYPE:
4848 case FIXED_POINT_TYPE:
4849 {
4850 unsigned int precision = TYPE_PRECISION (ntype);
4851 hstate.add_object (precision);
4852 }
4853 break;
4854 default:
4855 break;
4856 }
4857
4858 ntype = type_hash_canon (hstate.end(), ntype);
4859
4860 /* If the target-dependent attributes make NTYPE different from
4861 its canonical type, we will need to use structural equality
4862 checks for this type. */
4863 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4864 || !comp_type_attributes (ntype, ttype))
4865 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4866 else if (TYPE_CANONICAL (ntype) == ntype)
4867 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4868
4869 ttype = build_qualified_type (ntype, quals);
4870 }
4871 else if (TYPE_QUALS (ttype) != quals)
4872 ttype = build_qualified_type (ttype, quals);
4873
4874 return ttype;
4875 }
4876
4877 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4878 the same. */
4879
4880 static bool
4881 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4882 {
4883 tree cl1, cl2;
4884 for (cl1 = clauses1, cl2 = clauses2;
4885 cl1 && cl2;
4886 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4887 {
4888 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4889 return false;
4890 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4891 {
4892 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4893 OMP_CLAUSE_DECL (cl2)) != 1)
4894 return false;
4895 }
4896 switch (OMP_CLAUSE_CODE (cl1))
4897 {
4898 case OMP_CLAUSE_ALIGNED:
4899 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4900 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4901 return false;
4902 break;
4903 case OMP_CLAUSE_LINEAR:
4904 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4905 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4906 return false;
4907 break;
4908 case OMP_CLAUSE_SIMDLEN:
4909 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4910 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4911 return false;
4912 default:
4913 break;
4914 }
4915 }
4916 return true;
4917 }
4918
4919 /* Compare two constructor-element-type constants. Return 1 if the lists
4920 are known to be equal; otherwise return 0. */
4921
4922 static bool
4923 simple_cst_list_equal (const_tree l1, const_tree l2)
4924 {
4925 while (l1 != NULL_TREE && l2 != NULL_TREE)
4926 {
4927 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4928 return false;
4929
4930 l1 = TREE_CHAIN (l1);
4931 l2 = TREE_CHAIN (l2);
4932 }
4933
4934 return l1 == l2;
4935 }
4936
4937 /* Compare two identifier nodes representing attributes. Either one may
4938 be in wrapped __ATTR__ form. Return true if they are the same, false
4939 otherwise. */
4940
4941 static bool
4942 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4943 {
4944 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4945 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4946 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4947
4948 /* Identifiers can be compared directly for equality. */
4949 if (attr1 == attr2)
4950 return true;
4951
4952 /* If they are not equal, they may still be one in the form
4953 'text' while the other one is in the form '__text__'. TODO:
4954 If we were storing attributes in normalized 'text' form, then
4955 this could all go away and we could take full advantage of
4956 the fact that we're comparing identifiers. :-) */
4957 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4958 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4959
4960 if (attr2_len == attr1_len + 4)
4961 {
4962 const char *p = IDENTIFIER_POINTER (attr2);
4963 const char *q = IDENTIFIER_POINTER (attr1);
4964 if (p[0] == '_' && p[1] == '_'
4965 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4966 && strncmp (q, p + 2, attr1_len) == 0)
4967 return true;;
4968 }
4969 else if (attr2_len + 4 == attr1_len)
4970 {
4971 const char *p = IDENTIFIER_POINTER (attr2);
4972 const char *q = IDENTIFIER_POINTER (attr1);
4973 if (q[0] == '_' && q[1] == '_'
4974 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4975 && strncmp (q + 2, p, attr2_len) == 0)
4976 return true;
4977 }
4978
4979 return false;
4980 }
4981
4982 /* Compare two attributes for their value identity. Return true if the
4983 attribute values are known to be equal; otherwise return false. */
4984
4985 bool
4986 attribute_value_equal (const_tree attr1, const_tree attr2)
4987 {
4988 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4989 return true;
4990
4991 if (TREE_VALUE (attr1) != NULL_TREE
4992 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4993 && TREE_VALUE (attr2) != NULL_TREE
4994 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4995 {
4996 /* Handle attribute format. */
4997 if (is_attribute_p ("format", TREE_PURPOSE (attr1)))
4998 {
4999 attr1 = TREE_VALUE (attr1);
5000 attr2 = TREE_VALUE (attr2);
5001 /* Compare the archetypes (printf/scanf/strftime/...). */
5002 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
5003 TREE_VALUE (attr2)))
5004 return false;
5005 /* Archetypes are the same. Compare the rest. */
5006 return (simple_cst_list_equal (TREE_CHAIN (attr1),
5007 TREE_CHAIN (attr2)) == 1);
5008 }
5009 return (simple_cst_list_equal (TREE_VALUE (attr1),
5010 TREE_VALUE (attr2)) == 1);
5011 }
5012
5013 if ((flag_openmp || flag_openmp_simd)
5014 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
5015 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
5016 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
5017 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
5018 TREE_VALUE (attr2));
5019
5020 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
5021 }
5022
5023 /* Return 0 if the attributes for two types are incompatible, 1 if they
5024 are compatible, and 2 if they are nearly compatible (which causes a
5025 warning to be generated). */
5026 int
5027 comp_type_attributes (const_tree type1, const_tree type2)
5028 {
5029 const_tree a1 = TYPE_ATTRIBUTES (type1);
5030 const_tree a2 = TYPE_ATTRIBUTES (type2);
5031 const_tree a;
5032
5033 if (a1 == a2)
5034 return 1;
5035 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
5036 {
5037 const struct attribute_spec *as;
5038 const_tree attr;
5039
5040 as = lookup_attribute_spec (get_attribute_name (a));
5041 if (!as || as->affects_type_identity == false)
5042 continue;
5043
5044 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
5045 if (!attr || !attribute_value_equal (a, attr))
5046 break;
5047 }
5048 if (!a)
5049 {
5050 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
5051 {
5052 const struct attribute_spec *as;
5053
5054 as = lookup_attribute_spec (get_attribute_name (a));
5055 if (!as || as->affects_type_identity == false)
5056 continue;
5057
5058 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5059 break;
5060 /* We don't need to compare trees again, as we did this
5061 already in first loop. */
5062 }
5063 /* All types - affecting identity - are equal, so
5064 there is no need to call target hook for comparison. */
5065 if (!a)
5066 return 1;
5067 }
5068 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
5069 return 0;
5070 /* As some type combinations - like default calling-convention - might
5071 be compatible, we have to call the target hook to get the final result. */
5072 return targetm.comp_type_attributes (type1, type2);
5073 }
5074
5075 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5076 is ATTRIBUTE.
5077
5078 Record such modified types already made so we don't make duplicates. */
5079
5080 tree
5081 build_type_attribute_variant (tree ttype, tree attribute)
5082 {
5083 return build_type_attribute_qual_variant (ttype, attribute,
5084 TYPE_QUALS (ttype));
5085 }
5086
5087
5088 /* Reset the expression *EXPR_P, a size or position.
5089
5090 ??? We could reset all non-constant sizes or positions. But it's cheap
5091 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5092
5093 We need to reset self-referential sizes or positions because they cannot
5094 be gimplified and thus can contain a CALL_EXPR after the gimplification
5095 is finished, which will run afoul of LTO streaming. And they need to be
5096 reset to something essentially dummy but not constant, so as to preserve
5097 the properties of the object they are attached to. */
5098
5099 static inline void
5100 free_lang_data_in_one_sizepos (tree *expr_p)
5101 {
5102 tree expr = *expr_p;
5103 if (CONTAINS_PLACEHOLDER_P (expr))
5104 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5105 }
5106
5107
5108 /* Reset all the fields in a binfo node BINFO. We only keep
5109 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5110
5111 static void
5112 free_lang_data_in_binfo (tree binfo)
5113 {
5114 unsigned i;
5115 tree t;
5116
5117 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5118
5119 BINFO_VIRTUALS (binfo) = NULL_TREE;
5120 BINFO_BASE_ACCESSES (binfo) = NULL;
5121 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5122 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5123
5124 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5125 free_lang_data_in_binfo (t);
5126 }
5127
5128
5129 /* Reset all language specific information still present in TYPE. */
5130
5131 static void
5132 free_lang_data_in_type (tree type)
5133 {
5134 gcc_assert (TYPE_P (type));
5135
5136 /* Give the FE a chance to remove its own data first. */
5137 lang_hooks.free_lang_data (type);
5138
5139 TREE_LANG_FLAG_0 (type) = 0;
5140 TREE_LANG_FLAG_1 (type) = 0;
5141 TREE_LANG_FLAG_2 (type) = 0;
5142 TREE_LANG_FLAG_3 (type) = 0;
5143 TREE_LANG_FLAG_4 (type) = 0;
5144 TREE_LANG_FLAG_5 (type) = 0;
5145 TREE_LANG_FLAG_6 (type) = 0;
5146
5147 if (TREE_CODE (type) == FUNCTION_TYPE)
5148 {
5149 /* Remove the const and volatile qualifiers from arguments. The
5150 C++ front end removes them, but the C front end does not,
5151 leading to false ODR violation errors when merging two
5152 instances of the same function signature compiled by
5153 different front ends. */
5154 tree p;
5155
5156 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5157 {
5158 tree arg_type = TREE_VALUE (p);
5159
5160 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5161 {
5162 int quals = TYPE_QUALS (arg_type)
5163 & ~TYPE_QUAL_CONST
5164 & ~TYPE_QUAL_VOLATILE;
5165 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5166 free_lang_data_in_type (TREE_VALUE (p));
5167 }
5168 /* C++ FE uses TREE_PURPOSE to store initial values. */
5169 TREE_PURPOSE (p) = NULL;
5170 }
5171 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5172 TYPE_MINVAL (type) = NULL;
5173 }
5174 if (TREE_CODE (type) == METHOD_TYPE)
5175 {
5176 tree p;
5177
5178 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5179 {
5180 /* C++ FE uses TREE_PURPOSE to store initial values. */
5181 TREE_PURPOSE (p) = NULL;
5182 }
5183 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5184 TYPE_MINVAL (type) = NULL;
5185 }
5186
5187 /* Remove members that are not actually FIELD_DECLs from the field
5188 list of an aggregate. These occur in C++. */
5189 if (RECORD_OR_UNION_TYPE_P (type))
5190 {
5191 tree prev, member;
5192
5193 /* Note that TYPE_FIELDS can be shared across distinct
5194 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5195 to be removed, we cannot set its TREE_CHAIN to NULL.
5196 Otherwise, we would not be able to find all the other fields
5197 in the other instances of this TREE_TYPE.
5198
5199 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5200 prev = NULL_TREE;
5201 member = TYPE_FIELDS (type);
5202 while (member)
5203 {
5204 if (TREE_CODE (member) == FIELD_DECL
5205 || TREE_CODE (member) == TYPE_DECL)
5206 {
5207 if (prev)
5208 TREE_CHAIN (prev) = member;
5209 else
5210 TYPE_FIELDS (type) = member;
5211 prev = member;
5212 }
5213
5214 member = TREE_CHAIN (member);
5215 }
5216
5217 if (prev)
5218 TREE_CHAIN (prev) = NULL_TREE;
5219 else
5220 TYPE_FIELDS (type) = NULL_TREE;
5221
5222 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5223 and danagle the pointer from time to time. */
5224 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5225 TYPE_VFIELD (type) = NULL_TREE;
5226
5227 /* Remove TYPE_METHODS list. While it would be nice to keep it
5228 to enable ODR warnings about different method lists, doing so
5229 seems to impractically increase size of LTO data streamed.
5230 Keep the infrmation if TYPE_METHODS was non-NULL. This is used
5231 by function.c and pretty printers. */
5232 if (TYPE_METHODS (type))
5233 TYPE_METHODS (type) = error_mark_node;
5234 if (TYPE_BINFO (type))
5235 {
5236 free_lang_data_in_binfo (TYPE_BINFO (type));
5237 /* We need to preserve link to bases and virtual table for all
5238 polymorphic types to make devirtualization machinery working.
5239 Debug output cares only about bases, but output also
5240 virtual table pointers so merging of -fdevirtualize and
5241 -fno-devirtualize units is easier. */
5242 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5243 || !flag_devirtualize)
5244 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5245 && !BINFO_VTABLE (TYPE_BINFO (type)))
5246 || debug_info_level != DINFO_LEVEL_NONE))
5247 TYPE_BINFO (type) = NULL;
5248 }
5249 }
5250 else
5251 {
5252 /* For non-aggregate types, clear out the language slot (which
5253 overloads TYPE_BINFO). */
5254 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5255
5256 if (INTEGRAL_TYPE_P (type)
5257 || SCALAR_FLOAT_TYPE_P (type)
5258 || FIXED_POINT_TYPE_P (type))
5259 {
5260 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5261 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5262 }
5263 }
5264
5265 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5266 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5267
5268 if (TYPE_CONTEXT (type)
5269 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5270 {
5271 tree ctx = TYPE_CONTEXT (type);
5272 do
5273 {
5274 ctx = BLOCK_SUPERCONTEXT (ctx);
5275 }
5276 while (ctx && TREE_CODE (ctx) == BLOCK);
5277 TYPE_CONTEXT (type) = ctx;
5278 }
5279 }
5280
5281
5282 /* Return true if DECL may need an assembler name to be set. */
5283
5284 static inline bool
5285 need_assembler_name_p (tree decl)
5286 {
5287 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5288 Rule merging. This makes type_odr_p to return true on those types during
5289 LTO and by comparing the mangled name, we can say what types are intended
5290 to be equivalent across compilation unit.
5291
5292 We do not store names of type_in_anonymous_namespace_p.
5293
5294 Record, union and enumeration type have linkage that allows use
5295 to check type_in_anonymous_namespace_p. We do not mangle compound types
5296 that always can be compared structurally.
5297
5298 Similarly for builtin types, we compare properties of their main variant.
5299 A special case are integer types where mangling do make differences
5300 between char/signed char/unsigned char etc. Storing name for these makes
5301 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5302 See cp/mangle.c:write_builtin_type for details. */
5303
5304 if (flag_lto_odr_type_mering
5305 && TREE_CODE (decl) == TYPE_DECL
5306 && DECL_NAME (decl)
5307 && decl == TYPE_NAME (TREE_TYPE (decl))
5308 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5309 && (type_with_linkage_p (TREE_TYPE (decl))
5310 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5311 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5312 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5313 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5314 if (TREE_CODE (decl) != FUNCTION_DECL
5315 && TREE_CODE (decl) != VAR_DECL)
5316 return false;
5317
5318 /* If DECL already has its assembler name set, it does not need a
5319 new one. */
5320 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5321 || DECL_ASSEMBLER_NAME_SET_P (decl))
5322 return false;
5323
5324 /* Abstract decls do not need an assembler name. */
5325 if (DECL_ABSTRACT_P (decl))
5326 return false;
5327
5328 /* For VAR_DECLs, only static, public and external symbols need an
5329 assembler name. */
5330 if (TREE_CODE (decl) == VAR_DECL
5331 && !TREE_STATIC (decl)
5332 && !TREE_PUBLIC (decl)
5333 && !DECL_EXTERNAL (decl))
5334 return false;
5335
5336 if (TREE_CODE (decl) == FUNCTION_DECL)
5337 {
5338 /* Do not set assembler name on builtins. Allow RTL expansion to
5339 decide whether to expand inline or via a regular call. */
5340 if (DECL_BUILT_IN (decl)
5341 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5342 return false;
5343
5344 /* Functions represented in the callgraph need an assembler name. */
5345 if (cgraph_node::get (decl) != NULL)
5346 return true;
5347
5348 /* Unused and not public functions don't need an assembler name. */
5349 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5350 return false;
5351 }
5352
5353 return true;
5354 }
5355
5356
5357 /* Reset all language specific information still present in symbol
5358 DECL. */
5359
5360 static void
5361 free_lang_data_in_decl (tree decl)
5362 {
5363 gcc_assert (DECL_P (decl));
5364
5365 /* Give the FE a chance to remove its own data first. */
5366 lang_hooks.free_lang_data (decl);
5367
5368 TREE_LANG_FLAG_0 (decl) = 0;
5369 TREE_LANG_FLAG_1 (decl) = 0;
5370 TREE_LANG_FLAG_2 (decl) = 0;
5371 TREE_LANG_FLAG_3 (decl) = 0;
5372 TREE_LANG_FLAG_4 (decl) = 0;
5373 TREE_LANG_FLAG_5 (decl) = 0;
5374 TREE_LANG_FLAG_6 (decl) = 0;
5375
5376 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5377 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5378 if (TREE_CODE (decl) == FIELD_DECL)
5379 {
5380 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5381 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5382 DECL_QUALIFIER (decl) = NULL_TREE;
5383 }
5384
5385 if (TREE_CODE (decl) == FUNCTION_DECL)
5386 {
5387 struct cgraph_node *node;
5388 if (!(node = cgraph_node::get (decl))
5389 || (!node->definition && !node->clones))
5390 {
5391 if (node)
5392 node->release_body ();
5393 else
5394 {
5395 release_function_body (decl);
5396 DECL_ARGUMENTS (decl) = NULL;
5397 DECL_RESULT (decl) = NULL;
5398 DECL_INITIAL (decl) = error_mark_node;
5399 }
5400 }
5401 if (gimple_has_body_p (decl))
5402 {
5403 tree t;
5404
5405 /* If DECL has a gimple body, then the context for its
5406 arguments must be DECL. Otherwise, it doesn't really
5407 matter, as we will not be emitting any code for DECL. In
5408 general, there may be other instances of DECL created by
5409 the front end and since PARM_DECLs are generally shared,
5410 their DECL_CONTEXT changes as the replicas of DECL are
5411 created. The only time where DECL_CONTEXT is important
5412 is for the FUNCTION_DECLs that have a gimple body (since
5413 the PARM_DECL will be used in the function's body). */
5414 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5415 DECL_CONTEXT (t) = decl;
5416 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5417 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5418 = target_option_default_node;
5419 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5420 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5421 = optimization_default_node;
5422 }
5423
5424 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5425 At this point, it is not needed anymore. */
5426 DECL_SAVED_TREE (decl) = NULL_TREE;
5427
5428 /* Clear the abstract origin if it refers to a method. Otherwise
5429 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5430 origin will not be output correctly. */
5431 if (DECL_ABSTRACT_ORIGIN (decl)
5432 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5433 && RECORD_OR_UNION_TYPE_P
5434 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5435 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5436
5437 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5438 DECL_VINDEX referring to itself into a vtable slot number as it
5439 should. Happens with functions that are copied and then forgotten
5440 about. Just clear it, it won't matter anymore. */
5441 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5442 DECL_VINDEX (decl) = NULL_TREE;
5443 }
5444 else if (TREE_CODE (decl) == VAR_DECL)
5445 {
5446 if ((DECL_EXTERNAL (decl)
5447 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5448 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5449 DECL_INITIAL (decl) = NULL_TREE;
5450 }
5451 else if (TREE_CODE (decl) == TYPE_DECL
5452 || TREE_CODE (decl) == FIELD_DECL)
5453 DECL_INITIAL (decl) = NULL_TREE;
5454 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5455 && DECL_INITIAL (decl)
5456 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5457 {
5458 /* Strip builtins from the translation-unit BLOCK. We still have targets
5459 without builtin_decl_explicit support and also builtins are shared
5460 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5461 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5462 while (*nextp)
5463 {
5464 tree var = *nextp;
5465 if (TREE_CODE (var) == FUNCTION_DECL
5466 && DECL_BUILT_IN (var))
5467 *nextp = TREE_CHAIN (var);
5468 else
5469 nextp = &TREE_CHAIN (var);
5470 }
5471 }
5472 }
5473
5474
5475 /* Data used when collecting DECLs and TYPEs for language data removal. */
5476
5477 struct free_lang_data_d
5478 {
5479 /* Worklist to avoid excessive recursion. */
5480 vec<tree> worklist;
5481
5482 /* Set of traversed objects. Used to avoid duplicate visits. */
5483 hash_set<tree> *pset;
5484
5485 /* Array of symbols to process with free_lang_data_in_decl. */
5486 vec<tree> decls;
5487
5488 /* Array of types to process with free_lang_data_in_type. */
5489 vec<tree> types;
5490 };
5491
5492
5493 /* Save all language fields needed to generate proper debug information
5494 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5495
5496 static void
5497 save_debug_info_for_decl (tree t)
5498 {
5499 /*struct saved_debug_info_d *sdi;*/
5500
5501 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5502
5503 /* FIXME. Partial implementation for saving debug info removed. */
5504 }
5505
5506
5507 /* Save all language fields needed to generate proper debug information
5508 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5509
5510 static void
5511 save_debug_info_for_type (tree t)
5512 {
5513 /*struct saved_debug_info_d *sdi;*/
5514
5515 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5516
5517 /* FIXME. Partial implementation for saving debug info removed. */
5518 }
5519
5520
5521 /* Add type or decl T to one of the list of tree nodes that need their
5522 language data removed. The lists are held inside FLD. */
5523
5524 static void
5525 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5526 {
5527 if (DECL_P (t))
5528 {
5529 fld->decls.safe_push (t);
5530 if (debug_info_level > DINFO_LEVEL_TERSE)
5531 save_debug_info_for_decl (t);
5532 }
5533 else if (TYPE_P (t))
5534 {
5535 fld->types.safe_push (t);
5536 if (debug_info_level > DINFO_LEVEL_TERSE)
5537 save_debug_info_for_type (t);
5538 }
5539 else
5540 gcc_unreachable ();
5541 }
5542
5543 /* Push tree node T into FLD->WORKLIST. */
5544
5545 static inline void
5546 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5547 {
5548 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5549 fld->worklist.safe_push ((t));
5550 }
5551
5552
5553 /* Operand callback helper for free_lang_data_in_node. *TP is the
5554 subtree operand being considered. */
5555
5556 static tree
5557 find_decls_types_r (tree *tp, int *ws, void *data)
5558 {
5559 tree t = *tp;
5560 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5561
5562 if (TREE_CODE (t) == TREE_LIST)
5563 return NULL_TREE;
5564
5565 /* Language specific nodes will be removed, so there is no need
5566 to gather anything under them. */
5567 if (is_lang_specific (t))
5568 {
5569 *ws = 0;
5570 return NULL_TREE;
5571 }
5572
5573 if (DECL_P (t))
5574 {
5575 /* Note that walk_tree does not traverse every possible field in
5576 decls, so we have to do our own traversals here. */
5577 add_tree_to_fld_list (t, fld);
5578
5579 fld_worklist_push (DECL_NAME (t), fld);
5580 fld_worklist_push (DECL_CONTEXT (t), fld);
5581 fld_worklist_push (DECL_SIZE (t), fld);
5582 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5583
5584 /* We are going to remove everything under DECL_INITIAL for
5585 TYPE_DECLs. No point walking them. */
5586 if (TREE_CODE (t) != TYPE_DECL)
5587 fld_worklist_push (DECL_INITIAL (t), fld);
5588
5589 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5590 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5591
5592 if (TREE_CODE (t) == FUNCTION_DECL)
5593 {
5594 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5595 fld_worklist_push (DECL_RESULT (t), fld);
5596 }
5597 else if (TREE_CODE (t) == TYPE_DECL)
5598 {
5599 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5600 }
5601 else if (TREE_CODE (t) == FIELD_DECL)
5602 {
5603 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5604 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5605 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5606 fld_worklist_push (DECL_FCONTEXT (t), fld);
5607 }
5608
5609 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5610 && DECL_HAS_VALUE_EXPR_P (t))
5611 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5612
5613 if (TREE_CODE (t) != FIELD_DECL
5614 && TREE_CODE (t) != TYPE_DECL)
5615 fld_worklist_push (TREE_CHAIN (t), fld);
5616 *ws = 0;
5617 }
5618 else if (TYPE_P (t))
5619 {
5620 /* Note that walk_tree does not traverse every possible field in
5621 types, so we have to do our own traversals here. */
5622 add_tree_to_fld_list (t, fld);
5623
5624 if (!RECORD_OR_UNION_TYPE_P (t))
5625 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5626 fld_worklist_push (TYPE_SIZE (t), fld);
5627 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5628 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5629 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5630 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5631 fld_worklist_push (TYPE_NAME (t), fld);
5632 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5633 them and thus do not and want not to reach unused pointer types
5634 this way. */
5635 if (!POINTER_TYPE_P (t))
5636 fld_worklist_push (TYPE_MINVAL (t), fld);
5637 if (!RECORD_OR_UNION_TYPE_P (t))
5638 fld_worklist_push (TYPE_MAXVAL (t), fld);
5639 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5640 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5641 do not and want not to reach unused variants this way. */
5642 if (TYPE_CONTEXT (t))
5643 {
5644 tree ctx = TYPE_CONTEXT (t);
5645 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5646 So push that instead. */
5647 while (ctx && TREE_CODE (ctx) == BLOCK)
5648 ctx = BLOCK_SUPERCONTEXT (ctx);
5649 fld_worklist_push (ctx, fld);
5650 }
5651 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5652 and want not to reach unused types this way. */
5653
5654 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5655 {
5656 unsigned i;
5657 tree tem;
5658 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5659 fld_worklist_push (TREE_TYPE (tem), fld);
5660 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5661 if (tem
5662 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5663 && TREE_CODE (tem) == TREE_LIST)
5664 do
5665 {
5666 fld_worklist_push (TREE_VALUE (tem), fld);
5667 tem = TREE_CHAIN (tem);
5668 }
5669 while (tem);
5670 }
5671 if (RECORD_OR_UNION_TYPE_P (t))
5672 {
5673 tree tem;
5674 /* Push all TYPE_FIELDS - there can be interleaving interesting
5675 and non-interesting things. */
5676 tem = TYPE_FIELDS (t);
5677 while (tem)
5678 {
5679 if (TREE_CODE (tem) == FIELD_DECL
5680 || TREE_CODE (tem) == TYPE_DECL)
5681 fld_worklist_push (tem, fld);
5682 tem = TREE_CHAIN (tem);
5683 }
5684 }
5685
5686 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5687 *ws = 0;
5688 }
5689 else if (TREE_CODE (t) == BLOCK)
5690 {
5691 tree tem;
5692 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5693 fld_worklist_push (tem, fld);
5694 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5695 fld_worklist_push (tem, fld);
5696 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5697 }
5698
5699 if (TREE_CODE (t) != IDENTIFIER_NODE
5700 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5701 fld_worklist_push (TREE_TYPE (t), fld);
5702
5703 return NULL_TREE;
5704 }
5705
5706
5707 /* Find decls and types in T. */
5708
5709 static void
5710 find_decls_types (tree t, struct free_lang_data_d *fld)
5711 {
5712 while (1)
5713 {
5714 if (!fld->pset->contains (t))
5715 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5716 if (fld->worklist.is_empty ())
5717 break;
5718 t = fld->worklist.pop ();
5719 }
5720 }
5721
5722 /* Translate all the types in LIST with the corresponding runtime
5723 types. */
5724
5725 static tree
5726 get_eh_types_for_runtime (tree list)
5727 {
5728 tree head, prev;
5729
5730 if (list == NULL_TREE)
5731 return NULL_TREE;
5732
5733 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5734 prev = head;
5735 list = TREE_CHAIN (list);
5736 while (list)
5737 {
5738 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5739 TREE_CHAIN (prev) = n;
5740 prev = TREE_CHAIN (prev);
5741 list = TREE_CHAIN (list);
5742 }
5743
5744 return head;
5745 }
5746
5747
5748 /* Find decls and types referenced in EH region R and store them in
5749 FLD->DECLS and FLD->TYPES. */
5750
5751 static void
5752 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5753 {
5754 switch (r->type)
5755 {
5756 case ERT_CLEANUP:
5757 break;
5758
5759 case ERT_TRY:
5760 {
5761 eh_catch c;
5762
5763 /* The types referenced in each catch must first be changed to the
5764 EH types used at runtime. This removes references to FE types
5765 in the region. */
5766 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5767 {
5768 c->type_list = get_eh_types_for_runtime (c->type_list);
5769 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5770 }
5771 }
5772 break;
5773
5774 case ERT_ALLOWED_EXCEPTIONS:
5775 r->u.allowed.type_list
5776 = get_eh_types_for_runtime (r->u.allowed.type_list);
5777 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5778 break;
5779
5780 case ERT_MUST_NOT_THROW:
5781 walk_tree (&r->u.must_not_throw.failure_decl,
5782 find_decls_types_r, fld, fld->pset);
5783 break;
5784 }
5785 }
5786
5787
5788 /* Find decls and types referenced in cgraph node N and store them in
5789 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5790 look for *every* kind of DECL and TYPE node reachable from N,
5791 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5792 NAMESPACE_DECLs, etc). */
5793
5794 static void
5795 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5796 {
5797 basic_block bb;
5798 struct function *fn;
5799 unsigned ix;
5800 tree t;
5801
5802 find_decls_types (n->decl, fld);
5803
5804 if (!gimple_has_body_p (n->decl))
5805 return;
5806
5807 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5808
5809 fn = DECL_STRUCT_FUNCTION (n->decl);
5810
5811 /* Traverse locals. */
5812 FOR_EACH_LOCAL_DECL (fn, ix, t)
5813 find_decls_types (t, fld);
5814
5815 /* Traverse EH regions in FN. */
5816 {
5817 eh_region r;
5818 FOR_ALL_EH_REGION_FN (r, fn)
5819 find_decls_types_in_eh_region (r, fld);
5820 }
5821
5822 /* Traverse every statement in FN. */
5823 FOR_EACH_BB_FN (bb, fn)
5824 {
5825 gphi_iterator psi;
5826 gimple_stmt_iterator si;
5827 unsigned i;
5828
5829 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5830 {
5831 gphi *phi = psi.phi ();
5832
5833 for (i = 0; i < gimple_phi_num_args (phi); i++)
5834 {
5835 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5836 find_decls_types (*arg_p, fld);
5837 }
5838 }
5839
5840 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5841 {
5842 gimple *stmt = gsi_stmt (si);
5843
5844 if (is_gimple_call (stmt))
5845 find_decls_types (gimple_call_fntype (stmt), fld);
5846
5847 for (i = 0; i < gimple_num_ops (stmt); i++)
5848 {
5849 tree arg = gimple_op (stmt, i);
5850 find_decls_types (arg, fld);
5851 }
5852 }
5853 }
5854 }
5855
5856
5857 /* Find decls and types referenced in varpool node N and store them in
5858 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5859 look for *every* kind of DECL and TYPE node reachable from N,
5860 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5861 NAMESPACE_DECLs, etc). */
5862
5863 static void
5864 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5865 {
5866 find_decls_types (v->decl, fld);
5867 }
5868
5869 /* If T needs an assembler name, have one created for it. */
5870
5871 void
5872 assign_assembler_name_if_neeeded (tree t)
5873 {
5874 if (need_assembler_name_p (t))
5875 {
5876 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5877 diagnostics that use input_location to show locus
5878 information. The problem here is that, at this point,
5879 input_location is generally anchored to the end of the file
5880 (since the parser is long gone), so we don't have a good
5881 position to pin it to.
5882
5883 To alleviate this problem, this uses the location of T's
5884 declaration. Examples of this are
5885 testsuite/g++.dg/template/cond2.C and
5886 testsuite/g++.dg/template/pr35240.C. */
5887 location_t saved_location = input_location;
5888 input_location = DECL_SOURCE_LOCATION (t);
5889
5890 decl_assembler_name (t);
5891
5892 input_location = saved_location;
5893 }
5894 }
5895
5896
5897 /* Free language specific information for every operand and expression
5898 in every node of the call graph. This process operates in three stages:
5899
5900 1- Every callgraph node and varpool node is traversed looking for
5901 decls and types embedded in them. This is a more exhaustive
5902 search than that done by find_referenced_vars, because it will
5903 also collect individual fields, decls embedded in types, etc.
5904
5905 2- All the decls found are sent to free_lang_data_in_decl.
5906
5907 3- All the types found are sent to free_lang_data_in_type.
5908
5909 The ordering between decls and types is important because
5910 free_lang_data_in_decl sets assembler names, which includes
5911 mangling. So types cannot be freed up until assembler names have
5912 been set up. */
5913
5914 static void
5915 free_lang_data_in_cgraph (void)
5916 {
5917 struct cgraph_node *n;
5918 varpool_node *v;
5919 struct free_lang_data_d fld;
5920 tree t;
5921 unsigned i;
5922 alias_pair *p;
5923
5924 /* Initialize sets and arrays to store referenced decls and types. */
5925 fld.pset = new hash_set<tree>;
5926 fld.worklist.create (0);
5927 fld.decls.create (100);
5928 fld.types.create (100);
5929
5930 /* Find decls and types in the body of every function in the callgraph. */
5931 FOR_EACH_FUNCTION (n)
5932 find_decls_types_in_node (n, &fld);
5933
5934 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5935 find_decls_types (p->decl, &fld);
5936
5937 /* Find decls and types in every varpool symbol. */
5938 FOR_EACH_VARIABLE (v)
5939 find_decls_types_in_var (v, &fld);
5940
5941 /* Set the assembler name on every decl found. We need to do this
5942 now because free_lang_data_in_decl will invalidate data needed
5943 for mangling. This breaks mangling on interdependent decls. */
5944 FOR_EACH_VEC_ELT (fld.decls, i, t)
5945 assign_assembler_name_if_neeeded (t);
5946
5947 /* Traverse every decl found freeing its language data. */
5948 FOR_EACH_VEC_ELT (fld.decls, i, t)
5949 free_lang_data_in_decl (t);
5950
5951 /* Traverse every type found freeing its language data. */
5952 FOR_EACH_VEC_ELT (fld.types, i, t)
5953 free_lang_data_in_type (t);
5954 if (flag_checking)
5955 {
5956 FOR_EACH_VEC_ELT (fld.types, i, t)
5957 verify_type (t);
5958 }
5959
5960 delete fld.pset;
5961 fld.worklist.release ();
5962 fld.decls.release ();
5963 fld.types.release ();
5964 }
5965
5966
5967 /* Free resources that are used by FE but are not needed once they are done. */
5968
5969 static unsigned
5970 free_lang_data (void)
5971 {
5972 unsigned i;
5973
5974 /* If we are the LTO frontend we have freed lang-specific data already. */
5975 if (in_lto_p
5976 || (!flag_generate_lto && !flag_generate_offload))
5977 return 0;
5978
5979 /* Allocate and assign alias sets to the standard integer types
5980 while the slots are still in the way the frontends generated them. */
5981 for (i = 0; i < itk_none; ++i)
5982 if (integer_types[i])
5983 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5984
5985 /* Traverse the IL resetting language specific information for
5986 operands, expressions, etc. */
5987 free_lang_data_in_cgraph ();
5988
5989 /* Create gimple variants for common types. */
5990 ptrdiff_type_node = integer_type_node;
5991 fileptr_type_node = ptr_type_node;
5992
5993 /* Reset some langhooks. Do not reset types_compatible_p, it may
5994 still be used indirectly via the get_alias_set langhook. */
5995 lang_hooks.dwarf_name = lhd_dwarf_name;
5996 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5997 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5998
5999 /* We do not want the default decl_assembler_name implementation,
6000 rather if we have fixed everything we want a wrapper around it
6001 asserting that all non-local symbols already got their assembler
6002 name and only produce assembler names for local symbols. Or rather
6003 make sure we never call decl_assembler_name on local symbols and
6004 devise a separate, middle-end private scheme for it. */
6005
6006 /* Reset diagnostic machinery. */
6007 tree_diagnostics_defaults (global_dc);
6008
6009 return 0;
6010 }
6011
6012
6013 namespace {
6014
6015 const pass_data pass_data_ipa_free_lang_data =
6016 {
6017 SIMPLE_IPA_PASS, /* type */
6018 "*free_lang_data", /* name */
6019 OPTGROUP_NONE, /* optinfo_flags */
6020 TV_IPA_FREE_LANG_DATA, /* tv_id */
6021 0, /* properties_required */
6022 0, /* properties_provided */
6023 0, /* properties_destroyed */
6024 0, /* todo_flags_start */
6025 0, /* todo_flags_finish */
6026 };
6027
6028 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6029 {
6030 public:
6031 pass_ipa_free_lang_data (gcc::context *ctxt)
6032 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6033 {}
6034
6035 /* opt_pass methods: */
6036 virtual unsigned int execute (function *) { return free_lang_data (); }
6037
6038 }; // class pass_ipa_free_lang_data
6039
6040 } // anon namespace
6041
6042 simple_ipa_opt_pass *
6043 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6044 {
6045 return new pass_ipa_free_lang_data (ctxt);
6046 }
6047
6048 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
6049 ATTR_NAME. Also used internally by remove_attribute(). */
6050 bool
6051 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
6052 {
6053 size_t ident_len = IDENTIFIER_LENGTH (ident);
6054
6055 if (ident_len == attr_len)
6056 {
6057 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
6058 return true;
6059 }
6060 else if (ident_len == attr_len + 4)
6061 {
6062 /* There is the possibility that ATTR is 'text' and IDENT is
6063 '__text__'. */
6064 const char *p = IDENTIFIER_POINTER (ident);
6065 if (p[0] == '_' && p[1] == '_'
6066 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6067 && strncmp (attr_name, p + 2, attr_len) == 0)
6068 return true;
6069 }
6070
6071 return false;
6072 }
6073
6074 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6075 of ATTR_NAME, and LIST is not NULL_TREE. */
6076 tree
6077 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6078 {
6079 while (list)
6080 {
6081 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6082
6083 if (ident_len == attr_len)
6084 {
6085 if (!strcmp (attr_name,
6086 IDENTIFIER_POINTER (get_attribute_name (list))))
6087 break;
6088 }
6089 /* TODO: If we made sure that attributes were stored in the
6090 canonical form without '__...__' (ie, as in 'text' as opposed
6091 to '__text__') then we could avoid the following case. */
6092 else if (ident_len == attr_len + 4)
6093 {
6094 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6095 if (p[0] == '_' && p[1] == '_'
6096 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6097 && strncmp (attr_name, p + 2, attr_len) == 0)
6098 break;
6099 }
6100 list = TREE_CHAIN (list);
6101 }
6102
6103 return list;
6104 }
6105
6106 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6107 return a pointer to the attribute's list first element if the attribute
6108 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6109 '__text__'). */
6110
6111 tree
6112 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6113 tree list)
6114 {
6115 while (list)
6116 {
6117 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6118
6119 if (attr_len > ident_len)
6120 {
6121 list = TREE_CHAIN (list);
6122 continue;
6123 }
6124
6125 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6126
6127 if (strncmp (attr_name, p, attr_len) == 0)
6128 break;
6129
6130 /* TODO: If we made sure that attributes were stored in the
6131 canonical form without '__...__' (ie, as in 'text' as opposed
6132 to '__text__') then we could avoid the following case. */
6133 if (p[0] == '_' && p[1] == '_' &&
6134 strncmp (attr_name, p + 2, attr_len) == 0)
6135 break;
6136
6137 list = TREE_CHAIN (list);
6138 }
6139
6140 return list;
6141 }
6142
6143
6144 /* A variant of lookup_attribute() that can be used with an identifier
6145 as the first argument, and where the identifier can be either
6146 'text' or '__text__'.
6147
6148 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6149 return a pointer to the attribute's list element if the attribute
6150 is part of the list, or NULL_TREE if not found. If the attribute
6151 appears more than once, this only returns the first occurrence; the
6152 TREE_CHAIN of the return value should be passed back in if further
6153 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6154 can be in the form 'text' or '__text__'. */
6155 static tree
6156 lookup_ident_attribute (tree attr_identifier, tree list)
6157 {
6158 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6159
6160 while (list)
6161 {
6162 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6163 == IDENTIFIER_NODE);
6164
6165 if (cmp_attrib_identifiers (attr_identifier,
6166 get_attribute_name (list)))
6167 /* Found it. */
6168 break;
6169 list = TREE_CHAIN (list);
6170 }
6171
6172 return list;
6173 }
6174
6175 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6176 modified list. */
6177
6178 tree
6179 remove_attribute (const char *attr_name, tree list)
6180 {
6181 tree *p;
6182 size_t attr_len = strlen (attr_name);
6183
6184 gcc_checking_assert (attr_name[0] != '_');
6185
6186 for (p = &list; *p; )
6187 {
6188 tree l = *p;
6189 /* TODO: If we were storing attributes in normalized form, here
6190 we could use a simple strcmp(). */
6191 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6192 *p = TREE_CHAIN (l);
6193 else
6194 p = &TREE_CHAIN (l);
6195 }
6196
6197 return list;
6198 }
6199
6200 /* Return an attribute list that is the union of a1 and a2. */
6201
6202 tree
6203 merge_attributes (tree a1, tree a2)
6204 {
6205 tree attributes;
6206
6207 /* Either one unset? Take the set one. */
6208
6209 if ((attributes = a1) == 0)
6210 attributes = a2;
6211
6212 /* One that completely contains the other? Take it. */
6213
6214 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6215 {
6216 if (attribute_list_contained (a2, a1))
6217 attributes = a2;
6218 else
6219 {
6220 /* Pick the longest list, and hang on the other list. */
6221
6222 if (list_length (a1) < list_length (a2))
6223 attributes = a2, a2 = a1;
6224
6225 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6226 {
6227 tree a;
6228 for (a = lookup_ident_attribute (get_attribute_name (a2),
6229 attributes);
6230 a != NULL_TREE && !attribute_value_equal (a, a2);
6231 a = lookup_ident_attribute (get_attribute_name (a2),
6232 TREE_CHAIN (a)))
6233 ;
6234 if (a == NULL_TREE)
6235 {
6236 a1 = copy_node (a2);
6237 TREE_CHAIN (a1) = attributes;
6238 attributes = a1;
6239 }
6240 }
6241 }
6242 }
6243 return attributes;
6244 }
6245
6246 /* Given types T1 and T2, merge their attributes and return
6247 the result. */
6248
6249 tree
6250 merge_type_attributes (tree t1, tree t2)
6251 {
6252 return merge_attributes (TYPE_ATTRIBUTES (t1),
6253 TYPE_ATTRIBUTES (t2));
6254 }
6255
6256 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6257 the result. */
6258
6259 tree
6260 merge_decl_attributes (tree olddecl, tree newdecl)
6261 {
6262 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6263 DECL_ATTRIBUTES (newdecl));
6264 }
6265
6266 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6267
6268 /* Specialization of merge_decl_attributes for various Windows targets.
6269
6270 This handles the following situation:
6271
6272 __declspec (dllimport) int foo;
6273 int foo;
6274
6275 The second instance of `foo' nullifies the dllimport. */
6276
6277 tree
6278 merge_dllimport_decl_attributes (tree old, tree new_tree)
6279 {
6280 tree a;
6281 int delete_dllimport_p = 1;
6282
6283 /* What we need to do here is remove from `old' dllimport if it doesn't
6284 appear in `new'. dllimport behaves like extern: if a declaration is
6285 marked dllimport and a definition appears later, then the object
6286 is not dllimport'd. We also remove a `new' dllimport if the old list
6287 contains dllexport: dllexport always overrides dllimport, regardless
6288 of the order of declaration. */
6289 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6290 delete_dllimport_p = 0;
6291 else if (DECL_DLLIMPORT_P (new_tree)
6292 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6293 {
6294 DECL_DLLIMPORT_P (new_tree) = 0;
6295 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6296 "dllimport ignored", new_tree);
6297 }
6298 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6299 {
6300 /* Warn about overriding a symbol that has already been used, e.g.:
6301 extern int __attribute__ ((dllimport)) foo;
6302 int* bar () {return &foo;}
6303 int foo;
6304 */
6305 if (TREE_USED (old))
6306 {
6307 warning (0, "%q+D redeclared without dllimport attribute "
6308 "after being referenced with dll linkage", new_tree);
6309 /* If we have used a variable's address with dllimport linkage,
6310 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6311 decl may already have had TREE_CONSTANT computed.
6312 We still remove the attribute so that assembler code refers
6313 to '&foo rather than '_imp__foo'. */
6314 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6315 DECL_DLLIMPORT_P (new_tree) = 1;
6316 }
6317
6318 /* Let an inline definition silently override the external reference,
6319 but otherwise warn about attribute inconsistency. */
6320 else if (TREE_CODE (new_tree) == VAR_DECL
6321 || !DECL_DECLARED_INLINE_P (new_tree))
6322 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6323 "previous dllimport ignored", new_tree);
6324 }
6325 else
6326 delete_dllimport_p = 0;
6327
6328 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6329
6330 if (delete_dllimport_p)
6331 a = remove_attribute ("dllimport", a);
6332
6333 return a;
6334 }
6335
6336 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6337 struct attribute_spec.handler. */
6338
6339 tree
6340 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6341 bool *no_add_attrs)
6342 {
6343 tree node = *pnode;
6344 bool is_dllimport;
6345
6346 /* These attributes may apply to structure and union types being created,
6347 but otherwise should pass to the declaration involved. */
6348 if (!DECL_P (node))
6349 {
6350 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6351 | (int) ATTR_FLAG_ARRAY_NEXT))
6352 {
6353 *no_add_attrs = true;
6354 return tree_cons (name, args, NULL_TREE);
6355 }
6356 if (TREE_CODE (node) == RECORD_TYPE
6357 || TREE_CODE (node) == UNION_TYPE)
6358 {
6359 node = TYPE_NAME (node);
6360 if (!node)
6361 return NULL_TREE;
6362 }
6363 else
6364 {
6365 warning (OPT_Wattributes, "%qE attribute ignored",
6366 name);
6367 *no_add_attrs = true;
6368 return NULL_TREE;
6369 }
6370 }
6371
6372 if (TREE_CODE (node) != FUNCTION_DECL
6373 && TREE_CODE (node) != VAR_DECL
6374 && TREE_CODE (node) != TYPE_DECL)
6375 {
6376 *no_add_attrs = true;
6377 warning (OPT_Wattributes, "%qE attribute ignored",
6378 name);
6379 return NULL_TREE;
6380 }
6381
6382 if (TREE_CODE (node) == TYPE_DECL
6383 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6384 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6385 {
6386 *no_add_attrs = true;
6387 warning (OPT_Wattributes, "%qE attribute ignored",
6388 name);
6389 return NULL_TREE;
6390 }
6391
6392 is_dllimport = is_attribute_p ("dllimport", name);
6393
6394 /* Report error on dllimport ambiguities seen now before they cause
6395 any damage. */
6396 if (is_dllimport)
6397 {
6398 /* Honor any target-specific overrides. */
6399 if (!targetm.valid_dllimport_attribute_p (node))
6400 *no_add_attrs = true;
6401
6402 else if (TREE_CODE (node) == FUNCTION_DECL
6403 && DECL_DECLARED_INLINE_P (node))
6404 {
6405 warning (OPT_Wattributes, "inline function %q+D declared as "
6406 " dllimport: attribute ignored", node);
6407 *no_add_attrs = true;
6408 }
6409 /* Like MS, treat definition of dllimported variables and
6410 non-inlined functions on declaration as syntax errors. */
6411 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6412 {
6413 error ("function %q+D definition is marked dllimport", node);
6414 *no_add_attrs = true;
6415 }
6416
6417 else if (TREE_CODE (node) == VAR_DECL)
6418 {
6419 if (DECL_INITIAL (node))
6420 {
6421 error ("variable %q+D definition is marked dllimport",
6422 node);
6423 *no_add_attrs = true;
6424 }
6425
6426 /* `extern' needn't be specified with dllimport.
6427 Specify `extern' now and hope for the best. Sigh. */
6428 DECL_EXTERNAL (node) = 1;
6429 /* Also, implicitly give dllimport'd variables declared within
6430 a function global scope, unless declared static. */
6431 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6432 TREE_PUBLIC (node) = 1;
6433 }
6434
6435 if (*no_add_attrs == false)
6436 DECL_DLLIMPORT_P (node) = 1;
6437 }
6438 else if (TREE_CODE (node) == FUNCTION_DECL
6439 && DECL_DECLARED_INLINE_P (node)
6440 && flag_keep_inline_dllexport)
6441 /* An exported function, even if inline, must be emitted. */
6442 DECL_EXTERNAL (node) = 0;
6443
6444 /* Report error if symbol is not accessible at global scope. */
6445 if (!TREE_PUBLIC (node)
6446 && (TREE_CODE (node) == VAR_DECL
6447 || TREE_CODE (node) == FUNCTION_DECL))
6448 {
6449 error ("external linkage required for symbol %q+D because of "
6450 "%qE attribute", node, name);
6451 *no_add_attrs = true;
6452 }
6453
6454 /* A dllexport'd entity must have default visibility so that other
6455 program units (shared libraries or the main executable) can see
6456 it. A dllimport'd entity must have default visibility so that
6457 the linker knows that undefined references within this program
6458 unit can be resolved by the dynamic linker. */
6459 if (!*no_add_attrs)
6460 {
6461 if (DECL_VISIBILITY_SPECIFIED (node)
6462 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6463 error ("%qE implies default visibility, but %qD has already "
6464 "been declared with a different visibility",
6465 name, node);
6466 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6467 DECL_VISIBILITY_SPECIFIED (node) = 1;
6468 }
6469
6470 return NULL_TREE;
6471 }
6472
6473 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6474 \f
6475 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6476 of the various TYPE_QUAL values. */
6477
6478 static void
6479 set_type_quals (tree type, int type_quals)
6480 {
6481 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6482 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6483 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6484 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6485 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6486 }
6487
6488 /* Returns true iff unqualified CAND and BASE are equivalent. */
6489
6490 bool
6491 check_base_type (const_tree cand, const_tree base)
6492 {
6493 return (TYPE_NAME (cand) == TYPE_NAME (base)
6494 /* Apparently this is needed for Objective-C. */
6495 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6496 /* Check alignment. */
6497 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6498 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6499 TYPE_ATTRIBUTES (base)));
6500 }
6501
6502 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6503
6504 bool
6505 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6506 {
6507 return (TYPE_QUALS (cand) == type_quals
6508 && check_base_type (cand, base));
6509 }
6510
6511 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6512
6513 static bool
6514 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6515 {
6516 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6517 && TYPE_NAME (cand) == TYPE_NAME (base)
6518 /* Apparently this is needed for Objective-C. */
6519 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6520 /* Check alignment. */
6521 && TYPE_ALIGN (cand) == align
6522 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6523 TYPE_ATTRIBUTES (base)));
6524 }
6525
6526 /* This function checks to see if TYPE matches the size one of the built-in
6527 atomic types, and returns that core atomic type. */
6528
6529 static tree
6530 find_atomic_core_type (tree type)
6531 {
6532 tree base_atomic_type;
6533
6534 /* Only handle complete types. */
6535 if (TYPE_SIZE (type) == NULL_TREE)
6536 return NULL_TREE;
6537
6538 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6539 switch (type_size)
6540 {
6541 case 8:
6542 base_atomic_type = atomicQI_type_node;
6543 break;
6544
6545 case 16:
6546 base_atomic_type = atomicHI_type_node;
6547 break;
6548
6549 case 32:
6550 base_atomic_type = atomicSI_type_node;
6551 break;
6552
6553 case 64:
6554 base_atomic_type = atomicDI_type_node;
6555 break;
6556
6557 case 128:
6558 base_atomic_type = atomicTI_type_node;
6559 break;
6560
6561 default:
6562 base_atomic_type = NULL_TREE;
6563 }
6564
6565 return base_atomic_type;
6566 }
6567
6568 /* Return a version of the TYPE, qualified as indicated by the
6569 TYPE_QUALS, if one exists. If no qualified version exists yet,
6570 return NULL_TREE. */
6571
6572 tree
6573 get_qualified_type (tree type, int type_quals)
6574 {
6575 tree t;
6576
6577 if (TYPE_QUALS (type) == type_quals)
6578 return type;
6579
6580 /* Search the chain of variants to see if there is already one there just
6581 like the one we need to have. If so, use that existing one. We must
6582 preserve the TYPE_NAME, since there is code that depends on this. */
6583 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6584 if (check_qualified_type (t, type, type_quals))
6585 return t;
6586
6587 return NULL_TREE;
6588 }
6589
6590 /* Like get_qualified_type, but creates the type if it does not
6591 exist. This function never returns NULL_TREE. */
6592
6593 tree
6594 build_qualified_type (tree type, int type_quals)
6595 {
6596 tree t;
6597
6598 /* See if we already have the appropriate qualified variant. */
6599 t = get_qualified_type (type, type_quals);
6600
6601 /* If not, build it. */
6602 if (!t)
6603 {
6604 t = build_variant_type_copy (type);
6605 set_type_quals (t, type_quals);
6606
6607 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6608 {
6609 /* See if this object can map to a basic atomic type. */
6610 tree atomic_type = find_atomic_core_type (type);
6611 if (atomic_type)
6612 {
6613 /* Ensure the alignment of this type is compatible with
6614 the required alignment of the atomic type. */
6615 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6616 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6617 }
6618 }
6619
6620 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6621 /* Propagate structural equality. */
6622 SET_TYPE_STRUCTURAL_EQUALITY (t);
6623 else if (TYPE_CANONICAL (type) != type)
6624 /* Build the underlying canonical type, since it is different
6625 from TYPE. */
6626 {
6627 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6628 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6629 }
6630 else
6631 /* T is its own canonical type. */
6632 TYPE_CANONICAL (t) = t;
6633
6634 }
6635
6636 return t;
6637 }
6638
6639 /* Create a variant of type T with alignment ALIGN. */
6640
6641 tree
6642 build_aligned_type (tree type, unsigned int align)
6643 {
6644 tree t;
6645
6646 if (TYPE_PACKED (type)
6647 || TYPE_ALIGN (type) == align)
6648 return type;
6649
6650 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6651 if (check_aligned_type (t, type, align))
6652 return t;
6653
6654 t = build_variant_type_copy (type);
6655 TYPE_ALIGN (t) = align;
6656
6657 return t;
6658 }
6659
6660 /* Create a new distinct copy of TYPE. The new type is made its own
6661 MAIN_VARIANT. If TYPE requires structural equality checks, the
6662 resulting type requires structural equality checks; otherwise, its
6663 TYPE_CANONICAL points to itself. */
6664
6665 tree
6666 build_distinct_type_copy (tree type)
6667 {
6668 tree t = copy_node (type);
6669
6670 TYPE_POINTER_TO (t) = 0;
6671 TYPE_REFERENCE_TO (t) = 0;
6672
6673 /* Set the canonical type either to a new equivalence class, or
6674 propagate the need for structural equality checks. */
6675 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6676 SET_TYPE_STRUCTURAL_EQUALITY (t);
6677 else
6678 TYPE_CANONICAL (t) = t;
6679
6680 /* Make it its own variant. */
6681 TYPE_MAIN_VARIANT (t) = t;
6682 TYPE_NEXT_VARIANT (t) = 0;
6683
6684 /* We do not record methods in type copies nor variants
6685 so we do not need to keep them up to date when new method
6686 is inserted. */
6687 if (RECORD_OR_UNION_TYPE_P (t))
6688 TYPE_METHODS (t) = NULL_TREE;
6689
6690 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6691 whose TREE_TYPE is not t. This can also happen in the Ada
6692 frontend when using subtypes. */
6693
6694 return t;
6695 }
6696
6697 /* Create a new variant of TYPE, equivalent but distinct. This is so
6698 the caller can modify it. TYPE_CANONICAL for the return type will
6699 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6700 are considered equal by the language itself (or that both types
6701 require structural equality checks). */
6702
6703 tree
6704 build_variant_type_copy (tree type)
6705 {
6706 tree t, m = TYPE_MAIN_VARIANT (type);
6707
6708 t = build_distinct_type_copy (type);
6709
6710 /* Since we're building a variant, assume that it is a non-semantic
6711 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6712 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6713
6714 /* Add the new type to the chain of variants of TYPE. */
6715 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6716 TYPE_NEXT_VARIANT (m) = t;
6717 TYPE_MAIN_VARIANT (t) = m;
6718
6719 return t;
6720 }
6721 \f
6722 /* Return true if the from tree in both tree maps are equal. */
6723
6724 int
6725 tree_map_base_eq (const void *va, const void *vb)
6726 {
6727 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6728 *const b = (const struct tree_map_base *) vb;
6729 return (a->from == b->from);
6730 }
6731
6732 /* Hash a from tree in a tree_base_map. */
6733
6734 unsigned int
6735 tree_map_base_hash (const void *item)
6736 {
6737 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6738 }
6739
6740 /* Return true if this tree map structure is marked for garbage collection
6741 purposes. We simply return true if the from tree is marked, so that this
6742 structure goes away when the from tree goes away. */
6743
6744 int
6745 tree_map_base_marked_p (const void *p)
6746 {
6747 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6748 }
6749
6750 /* Hash a from tree in a tree_map. */
6751
6752 unsigned int
6753 tree_map_hash (const void *item)
6754 {
6755 return (((const struct tree_map *) item)->hash);
6756 }
6757
6758 /* Hash a from tree in a tree_decl_map. */
6759
6760 unsigned int
6761 tree_decl_map_hash (const void *item)
6762 {
6763 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6764 }
6765
6766 /* Return the initialization priority for DECL. */
6767
6768 priority_type
6769 decl_init_priority_lookup (tree decl)
6770 {
6771 symtab_node *snode = symtab_node::get (decl);
6772
6773 if (!snode)
6774 return DEFAULT_INIT_PRIORITY;
6775 return
6776 snode->get_init_priority ();
6777 }
6778
6779 /* Return the finalization priority for DECL. */
6780
6781 priority_type
6782 decl_fini_priority_lookup (tree decl)
6783 {
6784 cgraph_node *node = cgraph_node::get (decl);
6785
6786 if (!node)
6787 return DEFAULT_INIT_PRIORITY;
6788 return
6789 node->get_fini_priority ();
6790 }
6791
6792 /* Set the initialization priority for DECL to PRIORITY. */
6793
6794 void
6795 decl_init_priority_insert (tree decl, priority_type priority)
6796 {
6797 struct symtab_node *snode;
6798
6799 if (priority == DEFAULT_INIT_PRIORITY)
6800 {
6801 snode = symtab_node::get (decl);
6802 if (!snode)
6803 return;
6804 }
6805 else if (TREE_CODE (decl) == VAR_DECL)
6806 snode = varpool_node::get_create (decl);
6807 else
6808 snode = cgraph_node::get_create (decl);
6809 snode->set_init_priority (priority);
6810 }
6811
6812 /* Set the finalization priority for DECL to PRIORITY. */
6813
6814 void
6815 decl_fini_priority_insert (tree decl, priority_type priority)
6816 {
6817 struct cgraph_node *node;
6818
6819 if (priority == DEFAULT_INIT_PRIORITY)
6820 {
6821 node = cgraph_node::get (decl);
6822 if (!node)
6823 return;
6824 }
6825 else
6826 node = cgraph_node::get_create (decl);
6827 node->set_fini_priority (priority);
6828 }
6829
6830 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6831
6832 static void
6833 print_debug_expr_statistics (void)
6834 {
6835 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6836 (long) debug_expr_for_decl->size (),
6837 (long) debug_expr_for_decl->elements (),
6838 debug_expr_for_decl->collisions ());
6839 }
6840
6841 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6842
6843 static void
6844 print_value_expr_statistics (void)
6845 {
6846 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6847 (long) value_expr_for_decl->size (),
6848 (long) value_expr_for_decl->elements (),
6849 value_expr_for_decl->collisions ());
6850 }
6851
6852 /* Lookup a debug expression for FROM, and return it if we find one. */
6853
6854 tree
6855 decl_debug_expr_lookup (tree from)
6856 {
6857 struct tree_decl_map *h, in;
6858 in.base.from = from;
6859
6860 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6861 if (h)
6862 return h->to;
6863 return NULL_TREE;
6864 }
6865
6866 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6867
6868 void
6869 decl_debug_expr_insert (tree from, tree to)
6870 {
6871 struct tree_decl_map *h;
6872
6873 h = ggc_alloc<tree_decl_map> ();
6874 h->base.from = from;
6875 h->to = to;
6876 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6877 }
6878
6879 /* Lookup a value expression for FROM, and return it if we find one. */
6880
6881 tree
6882 decl_value_expr_lookup (tree from)
6883 {
6884 struct tree_decl_map *h, in;
6885 in.base.from = from;
6886
6887 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6888 if (h)
6889 return h->to;
6890 return NULL_TREE;
6891 }
6892
6893 /* Insert a mapping FROM->TO in the value expression hashtable. */
6894
6895 void
6896 decl_value_expr_insert (tree from, tree to)
6897 {
6898 struct tree_decl_map *h;
6899
6900 h = ggc_alloc<tree_decl_map> ();
6901 h->base.from = from;
6902 h->to = to;
6903 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6904 }
6905
6906 /* Lookup a vector of debug arguments for FROM, and return it if we
6907 find one. */
6908
6909 vec<tree, va_gc> **
6910 decl_debug_args_lookup (tree from)
6911 {
6912 struct tree_vec_map *h, in;
6913
6914 if (!DECL_HAS_DEBUG_ARGS_P (from))
6915 return NULL;
6916 gcc_checking_assert (debug_args_for_decl != NULL);
6917 in.base.from = from;
6918 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6919 if (h)
6920 return &h->to;
6921 return NULL;
6922 }
6923
6924 /* Insert a mapping FROM->empty vector of debug arguments in the value
6925 expression hashtable. */
6926
6927 vec<tree, va_gc> **
6928 decl_debug_args_insert (tree from)
6929 {
6930 struct tree_vec_map *h;
6931 tree_vec_map **loc;
6932
6933 if (DECL_HAS_DEBUG_ARGS_P (from))
6934 return decl_debug_args_lookup (from);
6935 if (debug_args_for_decl == NULL)
6936 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6937 h = ggc_alloc<tree_vec_map> ();
6938 h->base.from = from;
6939 h->to = NULL;
6940 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6941 *loc = h;
6942 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6943 return &h->to;
6944 }
6945
6946 /* Hashing of types so that we don't make duplicates.
6947 The entry point is `type_hash_canon'. */
6948
6949 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6950 with types in the TREE_VALUE slots), by adding the hash codes
6951 of the individual types. */
6952
6953 static void
6954 type_hash_list (const_tree list, inchash::hash &hstate)
6955 {
6956 const_tree tail;
6957
6958 for (tail = list; tail; tail = TREE_CHAIN (tail))
6959 if (TREE_VALUE (tail) != error_mark_node)
6960 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6961 }
6962
6963 /* These are the Hashtable callback functions. */
6964
6965 /* Returns true iff the types are equivalent. */
6966
6967 bool
6968 type_cache_hasher::equal (type_hash *a, type_hash *b)
6969 {
6970 /* First test the things that are the same for all types. */
6971 if (a->hash != b->hash
6972 || TREE_CODE (a->type) != TREE_CODE (b->type)
6973 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6974 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6975 TYPE_ATTRIBUTES (b->type))
6976 || (TREE_CODE (a->type) != COMPLEX_TYPE
6977 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6978 return 0;
6979
6980 /* Be careful about comparing arrays before and after the element type
6981 has been completed; don't compare TYPE_ALIGN unless both types are
6982 complete. */
6983 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6984 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6985 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6986 return 0;
6987
6988 switch (TREE_CODE (a->type))
6989 {
6990 case VOID_TYPE:
6991 case COMPLEX_TYPE:
6992 case POINTER_TYPE:
6993 case REFERENCE_TYPE:
6994 case NULLPTR_TYPE:
6995 return 1;
6996
6997 case VECTOR_TYPE:
6998 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6999
7000 case ENUMERAL_TYPE:
7001 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
7002 && !(TYPE_VALUES (a->type)
7003 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
7004 && TYPE_VALUES (b->type)
7005 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7006 && type_list_equal (TYPE_VALUES (a->type),
7007 TYPE_VALUES (b->type))))
7008 return 0;
7009
7010 /* ... fall through ... */
7011
7012 case INTEGER_TYPE:
7013 case REAL_TYPE:
7014 case BOOLEAN_TYPE:
7015 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7016 return false;
7017 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7018 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7019 TYPE_MAX_VALUE (b->type)))
7020 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7021 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7022 TYPE_MIN_VALUE (b->type))));
7023
7024 case FIXED_POINT_TYPE:
7025 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7026
7027 case OFFSET_TYPE:
7028 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7029
7030 case METHOD_TYPE:
7031 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7032 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7033 || (TYPE_ARG_TYPES (a->type)
7034 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7035 && TYPE_ARG_TYPES (b->type)
7036 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7037 && type_list_equal (TYPE_ARG_TYPES (a->type),
7038 TYPE_ARG_TYPES (b->type)))))
7039 break;
7040 return 0;
7041 case ARRAY_TYPE:
7042 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
7043
7044 case RECORD_TYPE:
7045 case UNION_TYPE:
7046 case QUAL_UNION_TYPE:
7047 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7048 || (TYPE_FIELDS (a->type)
7049 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7050 && TYPE_FIELDS (b->type)
7051 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7052 && type_list_equal (TYPE_FIELDS (a->type),
7053 TYPE_FIELDS (b->type))));
7054
7055 case FUNCTION_TYPE:
7056 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7057 || (TYPE_ARG_TYPES (a->type)
7058 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7059 && TYPE_ARG_TYPES (b->type)
7060 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7061 && type_list_equal (TYPE_ARG_TYPES (a->type),
7062 TYPE_ARG_TYPES (b->type))))
7063 break;
7064 return 0;
7065
7066 default:
7067 return 0;
7068 }
7069
7070 if (lang_hooks.types.type_hash_eq != NULL)
7071 return lang_hooks.types.type_hash_eq (a->type, b->type);
7072
7073 return 1;
7074 }
7075
7076 /* Given TYPE, and HASHCODE its hash code, return the canonical
7077 object for an identical type if one already exists.
7078 Otherwise, return TYPE, and record it as the canonical object.
7079
7080 To use this function, first create a type of the sort you want.
7081 Then compute its hash code from the fields of the type that
7082 make it different from other similar types.
7083 Then call this function and use the value. */
7084
7085 tree
7086 type_hash_canon (unsigned int hashcode, tree type)
7087 {
7088 type_hash in;
7089 type_hash **loc;
7090
7091 /* The hash table only contains main variants, so ensure that's what we're
7092 being passed. */
7093 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7094
7095 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7096 must call that routine before comparing TYPE_ALIGNs. */
7097 layout_type (type);
7098
7099 in.hash = hashcode;
7100 in.type = type;
7101
7102 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7103 if (*loc)
7104 {
7105 tree t1 = ((type_hash *) *loc)->type;
7106 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7107 if (GATHER_STATISTICS)
7108 {
7109 tree_code_counts[(int) TREE_CODE (type)]--;
7110 tree_node_counts[(int) t_kind]--;
7111 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
7112 }
7113 return t1;
7114 }
7115 else
7116 {
7117 struct type_hash *h;
7118
7119 h = ggc_alloc<type_hash> ();
7120 h->hash = hashcode;
7121 h->type = type;
7122 *loc = h;
7123
7124 return type;
7125 }
7126 }
7127
7128 static void
7129 print_type_hash_statistics (void)
7130 {
7131 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7132 (long) type_hash_table->size (),
7133 (long) type_hash_table->elements (),
7134 type_hash_table->collisions ());
7135 }
7136
7137 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7138 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7139 by adding the hash codes of the individual attributes. */
7140
7141 static void
7142 attribute_hash_list (const_tree list, inchash::hash &hstate)
7143 {
7144 const_tree tail;
7145
7146 for (tail = list; tail; tail = TREE_CHAIN (tail))
7147 /* ??? Do we want to add in TREE_VALUE too? */
7148 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7149 }
7150
7151 /* Given two lists of attributes, return true if list l2 is
7152 equivalent to l1. */
7153
7154 int
7155 attribute_list_equal (const_tree l1, const_tree l2)
7156 {
7157 if (l1 == l2)
7158 return 1;
7159
7160 return attribute_list_contained (l1, l2)
7161 && attribute_list_contained (l2, l1);
7162 }
7163
7164 /* Given two lists of attributes, return true if list L2 is
7165 completely contained within L1. */
7166 /* ??? This would be faster if attribute names were stored in a canonicalized
7167 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7168 must be used to show these elements are equivalent (which they are). */
7169 /* ??? It's not clear that attributes with arguments will always be handled
7170 correctly. */
7171
7172 int
7173 attribute_list_contained (const_tree l1, const_tree l2)
7174 {
7175 const_tree t1, t2;
7176
7177 /* First check the obvious, maybe the lists are identical. */
7178 if (l1 == l2)
7179 return 1;
7180
7181 /* Maybe the lists are similar. */
7182 for (t1 = l1, t2 = l2;
7183 t1 != 0 && t2 != 0
7184 && get_attribute_name (t1) == get_attribute_name (t2)
7185 && TREE_VALUE (t1) == TREE_VALUE (t2);
7186 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7187 ;
7188
7189 /* Maybe the lists are equal. */
7190 if (t1 == 0 && t2 == 0)
7191 return 1;
7192
7193 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7194 {
7195 const_tree attr;
7196 /* This CONST_CAST is okay because lookup_attribute does not
7197 modify its argument and the return value is assigned to a
7198 const_tree. */
7199 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7200 CONST_CAST_TREE (l1));
7201 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7202 attr = lookup_ident_attribute (get_attribute_name (t2),
7203 TREE_CHAIN (attr)))
7204 ;
7205
7206 if (attr == NULL_TREE)
7207 return 0;
7208 }
7209
7210 return 1;
7211 }
7212
7213 /* Given two lists of types
7214 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7215 return 1 if the lists contain the same types in the same order.
7216 Also, the TREE_PURPOSEs must match. */
7217
7218 int
7219 type_list_equal (const_tree l1, const_tree l2)
7220 {
7221 const_tree t1, t2;
7222
7223 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7224 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7225 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7226 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7227 && (TREE_TYPE (TREE_PURPOSE (t1))
7228 == TREE_TYPE (TREE_PURPOSE (t2))))))
7229 return 0;
7230
7231 return t1 == t2;
7232 }
7233
7234 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7235 given by TYPE. If the argument list accepts variable arguments,
7236 then this function counts only the ordinary arguments. */
7237
7238 int
7239 type_num_arguments (const_tree type)
7240 {
7241 int i = 0;
7242 tree t;
7243
7244 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7245 /* If the function does not take a variable number of arguments,
7246 the last element in the list will have type `void'. */
7247 if (VOID_TYPE_P (TREE_VALUE (t)))
7248 break;
7249 else
7250 ++i;
7251
7252 return i;
7253 }
7254
7255 /* Nonzero if integer constants T1 and T2
7256 represent the same constant value. */
7257
7258 int
7259 tree_int_cst_equal (const_tree t1, const_tree t2)
7260 {
7261 if (t1 == t2)
7262 return 1;
7263
7264 if (t1 == 0 || t2 == 0)
7265 return 0;
7266
7267 if (TREE_CODE (t1) == INTEGER_CST
7268 && TREE_CODE (t2) == INTEGER_CST
7269 && wi::to_widest (t1) == wi::to_widest (t2))
7270 return 1;
7271
7272 return 0;
7273 }
7274
7275 /* Return true if T is an INTEGER_CST whose numerical value (extended
7276 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7277
7278 bool
7279 tree_fits_shwi_p (const_tree t)
7280 {
7281 return (t != NULL_TREE
7282 && TREE_CODE (t) == INTEGER_CST
7283 && wi::fits_shwi_p (wi::to_widest (t)));
7284 }
7285
7286 /* Return true if T is an INTEGER_CST whose numerical value (extended
7287 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7288
7289 bool
7290 tree_fits_uhwi_p (const_tree t)
7291 {
7292 return (t != NULL_TREE
7293 && TREE_CODE (t) == INTEGER_CST
7294 && wi::fits_uhwi_p (wi::to_widest (t)));
7295 }
7296
7297 /* T is an INTEGER_CST whose numerical value (extended according to
7298 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7299 HOST_WIDE_INT. */
7300
7301 HOST_WIDE_INT
7302 tree_to_shwi (const_tree t)
7303 {
7304 gcc_assert (tree_fits_shwi_p (t));
7305 return TREE_INT_CST_LOW (t);
7306 }
7307
7308 /* T is an INTEGER_CST whose numerical value (extended according to
7309 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7310 HOST_WIDE_INT. */
7311
7312 unsigned HOST_WIDE_INT
7313 tree_to_uhwi (const_tree t)
7314 {
7315 gcc_assert (tree_fits_uhwi_p (t));
7316 return TREE_INT_CST_LOW (t);
7317 }
7318
7319 /* Return the most significant (sign) bit of T. */
7320
7321 int
7322 tree_int_cst_sign_bit (const_tree t)
7323 {
7324 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7325
7326 return wi::extract_uhwi (t, bitno, 1);
7327 }
7328
7329 /* Return an indication of the sign of the integer constant T.
7330 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7331 Note that -1 will never be returned if T's type is unsigned. */
7332
7333 int
7334 tree_int_cst_sgn (const_tree t)
7335 {
7336 if (wi::eq_p (t, 0))
7337 return 0;
7338 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7339 return 1;
7340 else if (wi::neg_p (t))
7341 return -1;
7342 else
7343 return 1;
7344 }
7345
7346 /* Return the minimum number of bits needed to represent VALUE in a
7347 signed or unsigned type, UNSIGNEDP says which. */
7348
7349 unsigned int
7350 tree_int_cst_min_precision (tree value, signop sgn)
7351 {
7352 /* If the value is negative, compute its negative minus 1. The latter
7353 adjustment is because the absolute value of the largest negative value
7354 is one larger than the largest positive value. This is equivalent to
7355 a bit-wise negation, so use that operation instead. */
7356
7357 if (tree_int_cst_sgn (value) < 0)
7358 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7359
7360 /* Return the number of bits needed, taking into account the fact
7361 that we need one more bit for a signed than unsigned type.
7362 If value is 0 or -1, the minimum precision is 1 no matter
7363 whether unsignedp is true or false. */
7364
7365 if (integer_zerop (value))
7366 return 1;
7367 else
7368 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7369 }
7370
7371 /* Return truthvalue of whether T1 is the same tree structure as T2.
7372 Return 1 if they are the same.
7373 Return 0 if they are understandably different.
7374 Return -1 if either contains tree structure not understood by
7375 this function. */
7376
7377 int
7378 simple_cst_equal (const_tree t1, const_tree t2)
7379 {
7380 enum tree_code code1, code2;
7381 int cmp;
7382 int i;
7383
7384 if (t1 == t2)
7385 return 1;
7386 if (t1 == 0 || t2 == 0)
7387 return 0;
7388
7389 code1 = TREE_CODE (t1);
7390 code2 = TREE_CODE (t2);
7391
7392 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7393 {
7394 if (CONVERT_EXPR_CODE_P (code2)
7395 || code2 == NON_LVALUE_EXPR)
7396 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7397 else
7398 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7399 }
7400
7401 else if (CONVERT_EXPR_CODE_P (code2)
7402 || code2 == NON_LVALUE_EXPR)
7403 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7404
7405 if (code1 != code2)
7406 return 0;
7407
7408 switch (code1)
7409 {
7410 case INTEGER_CST:
7411 return wi::to_widest (t1) == wi::to_widest (t2);
7412
7413 case REAL_CST:
7414 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7415
7416 case FIXED_CST:
7417 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7418
7419 case STRING_CST:
7420 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7421 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7422 TREE_STRING_LENGTH (t1)));
7423
7424 case CONSTRUCTOR:
7425 {
7426 unsigned HOST_WIDE_INT idx;
7427 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7428 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7429
7430 if (vec_safe_length (v1) != vec_safe_length (v2))
7431 return false;
7432
7433 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7434 /* ??? Should we handle also fields here? */
7435 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7436 return false;
7437 return true;
7438 }
7439
7440 case SAVE_EXPR:
7441 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7442
7443 case CALL_EXPR:
7444 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7445 if (cmp <= 0)
7446 return cmp;
7447 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7448 return 0;
7449 {
7450 const_tree arg1, arg2;
7451 const_call_expr_arg_iterator iter1, iter2;
7452 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7453 arg2 = first_const_call_expr_arg (t2, &iter2);
7454 arg1 && arg2;
7455 arg1 = next_const_call_expr_arg (&iter1),
7456 arg2 = next_const_call_expr_arg (&iter2))
7457 {
7458 cmp = simple_cst_equal (arg1, arg2);
7459 if (cmp <= 0)
7460 return cmp;
7461 }
7462 return arg1 == arg2;
7463 }
7464
7465 case TARGET_EXPR:
7466 /* Special case: if either target is an unallocated VAR_DECL,
7467 it means that it's going to be unified with whatever the
7468 TARGET_EXPR is really supposed to initialize, so treat it
7469 as being equivalent to anything. */
7470 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7471 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7472 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7473 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7474 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7475 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7476 cmp = 1;
7477 else
7478 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7479
7480 if (cmp <= 0)
7481 return cmp;
7482
7483 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7484
7485 case WITH_CLEANUP_EXPR:
7486 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7487 if (cmp <= 0)
7488 return cmp;
7489
7490 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7491
7492 case COMPONENT_REF:
7493 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7494 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7495
7496 return 0;
7497
7498 case VAR_DECL:
7499 case PARM_DECL:
7500 case CONST_DECL:
7501 case FUNCTION_DECL:
7502 return 0;
7503
7504 default:
7505 break;
7506 }
7507
7508 /* This general rule works for most tree codes. All exceptions should be
7509 handled above. If this is a language-specific tree code, we can't
7510 trust what might be in the operand, so say we don't know
7511 the situation. */
7512 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7513 return -1;
7514
7515 switch (TREE_CODE_CLASS (code1))
7516 {
7517 case tcc_unary:
7518 case tcc_binary:
7519 case tcc_comparison:
7520 case tcc_expression:
7521 case tcc_reference:
7522 case tcc_statement:
7523 cmp = 1;
7524 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7525 {
7526 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7527 if (cmp <= 0)
7528 return cmp;
7529 }
7530
7531 return cmp;
7532
7533 default:
7534 return -1;
7535 }
7536 }
7537
7538 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7539 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7540 than U, respectively. */
7541
7542 int
7543 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7544 {
7545 if (tree_int_cst_sgn (t) < 0)
7546 return -1;
7547 else if (!tree_fits_uhwi_p (t))
7548 return 1;
7549 else if (TREE_INT_CST_LOW (t) == u)
7550 return 0;
7551 else if (TREE_INT_CST_LOW (t) < u)
7552 return -1;
7553 else
7554 return 1;
7555 }
7556
7557 /* Return true if SIZE represents a constant size that is in bounds of
7558 what the middle-end and the backend accepts (covering not more than
7559 half of the address-space). */
7560
7561 bool
7562 valid_constant_size_p (const_tree size)
7563 {
7564 if (! tree_fits_uhwi_p (size)
7565 || TREE_OVERFLOW (size)
7566 || tree_int_cst_sign_bit (size) != 0)
7567 return false;
7568 return true;
7569 }
7570
7571 /* Return the precision of the type, or for a complex or vector type the
7572 precision of the type of its elements. */
7573
7574 unsigned int
7575 element_precision (const_tree type)
7576 {
7577 if (!TYPE_P (type))
7578 type = TREE_TYPE (type);
7579 enum tree_code code = TREE_CODE (type);
7580 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7581 type = TREE_TYPE (type);
7582
7583 return TYPE_PRECISION (type);
7584 }
7585
7586 /* Return true if CODE represents an associative tree code. Otherwise
7587 return false. */
7588 bool
7589 associative_tree_code (enum tree_code code)
7590 {
7591 switch (code)
7592 {
7593 case BIT_IOR_EXPR:
7594 case BIT_AND_EXPR:
7595 case BIT_XOR_EXPR:
7596 case PLUS_EXPR:
7597 case MULT_EXPR:
7598 case MIN_EXPR:
7599 case MAX_EXPR:
7600 return true;
7601
7602 default:
7603 break;
7604 }
7605 return false;
7606 }
7607
7608 /* Return true if CODE represents a commutative tree code. Otherwise
7609 return false. */
7610 bool
7611 commutative_tree_code (enum tree_code code)
7612 {
7613 switch (code)
7614 {
7615 case PLUS_EXPR:
7616 case MULT_EXPR:
7617 case MULT_HIGHPART_EXPR:
7618 case MIN_EXPR:
7619 case MAX_EXPR:
7620 case BIT_IOR_EXPR:
7621 case BIT_XOR_EXPR:
7622 case BIT_AND_EXPR:
7623 case NE_EXPR:
7624 case EQ_EXPR:
7625 case UNORDERED_EXPR:
7626 case ORDERED_EXPR:
7627 case UNEQ_EXPR:
7628 case LTGT_EXPR:
7629 case TRUTH_AND_EXPR:
7630 case TRUTH_XOR_EXPR:
7631 case TRUTH_OR_EXPR:
7632 case WIDEN_MULT_EXPR:
7633 case VEC_WIDEN_MULT_HI_EXPR:
7634 case VEC_WIDEN_MULT_LO_EXPR:
7635 case VEC_WIDEN_MULT_EVEN_EXPR:
7636 case VEC_WIDEN_MULT_ODD_EXPR:
7637 return true;
7638
7639 default:
7640 break;
7641 }
7642 return false;
7643 }
7644
7645 /* Return true if CODE represents a ternary tree code for which the
7646 first two operands are commutative. Otherwise return false. */
7647 bool
7648 commutative_ternary_tree_code (enum tree_code code)
7649 {
7650 switch (code)
7651 {
7652 case WIDEN_MULT_PLUS_EXPR:
7653 case WIDEN_MULT_MINUS_EXPR:
7654 case DOT_PROD_EXPR:
7655 case FMA_EXPR:
7656 return true;
7657
7658 default:
7659 break;
7660 }
7661 return false;
7662 }
7663
7664 /* Returns true if CODE can overflow. */
7665
7666 bool
7667 operation_can_overflow (enum tree_code code)
7668 {
7669 switch (code)
7670 {
7671 case PLUS_EXPR:
7672 case MINUS_EXPR:
7673 case MULT_EXPR:
7674 case LSHIFT_EXPR:
7675 /* Can overflow in various ways. */
7676 return true;
7677 case TRUNC_DIV_EXPR:
7678 case EXACT_DIV_EXPR:
7679 case FLOOR_DIV_EXPR:
7680 case CEIL_DIV_EXPR:
7681 /* For INT_MIN / -1. */
7682 return true;
7683 case NEGATE_EXPR:
7684 case ABS_EXPR:
7685 /* For -INT_MIN. */
7686 return true;
7687 default:
7688 /* These operators cannot overflow. */
7689 return false;
7690 }
7691 }
7692
7693 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7694 ftrapv doesn't generate trapping insns for CODE. */
7695
7696 bool
7697 operation_no_trapping_overflow (tree type, enum tree_code code)
7698 {
7699 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7700
7701 /* We don't generate instructions that trap on overflow for complex or vector
7702 types. */
7703 if (!INTEGRAL_TYPE_P (type))
7704 return true;
7705
7706 if (!TYPE_OVERFLOW_TRAPS (type))
7707 return true;
7708
7709 switch (code)
7710 {
7711 case PLUS_EXPR:
7712 case MINUS_EXPR:
7713 case MULT_EXPR:
7714 case NEGATE_EXPR:
7715 case ABS_EXPR:
7716 /* These operators can overflow, and -ftrapv generates trapping code for
7717 these. */
7718 return false;
7719 case TRUNC_DIV_EXPR:
7720 case EXACT_DIV_EXPR:
7721 case FLOOR_DIV_EXPR:
7722 case CEIL_DIV_EXPR:
7723 case LSHIFT_EXPR:
7724 /* These operators can overflow, but -ftrapv does not generate trapping
7725 code for these. */
7726 return true;
7727 default:
7728 /* These operators cannot overflow. */
7729 return true;
7730 }
7731 }
7732
7733 namespace inchash
7734 {
7735
7736 /* Generate a hash value for an expression. This can be used iteratively
7737 by passing a previous result as the HSTATE argument.
7738
7739 This function is intended to produce the same hash for expressions which
7740 would compare equal using operand_equal_p. */
7741 void
7742 add_expr (const_tree t, inchash::hash &hstate)
7743 {
7744 int i;
7745 enum tree_code code;
7746 enum tree_code_class tclass;
7747
7748 if (t == NULL_TREE)
7749 {
7750 hstate.merge_hash (0);
7751 return;
7752 }
7753
7754 code = TREE_CODE (t);
7755
7756 switch (code)
7757 {
7758 /* Alas, constants aren't shared, so we can't rely on pointer
7759 identity. */
7760 case VOID_CST:
7761 hstate.merge_hash (0);
7762 return;
7763 case INTEGER_CST:
7764 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7765 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7766 return;
7767 case REAL_CST:
7768 {
7769 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7770 hstate.merge_hash (val2);
7771 return;
7772 }
7773 case FIXED_CST:
7774 {
7775 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7776 hstate.merge_hash (val2);
7777 return;
7778 }
7779 case STRING_CST:
7780 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7781 return;
7782 case COMPLEX_CST:
7783 inchash::add_expr (TREE_REALPART (t), hstate);
7784 inchash::add_expr (TREE_IMAGPART (t), hstate);
7785 return;
7786 case VECTOR_CST:
7787 {
7788 unsigned i;
7789 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7790 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7791 return;
7792 }
7793 case SSA_NAME:
7794 /* We can just compare by pointer. */
7795 hstate.add_wide_int (SSA_NAME_VERSION (t));
7796 return;
7797 case PLACEHOLDER_EXPR:
7798 /* The node itself doesn't matter. */
7799 return;
7800 case TREE_LIST:
7801 /* A list of expressions, for a CALL_EXPR or as the elements of a
7802 VECTOR_CST. */
7803 for (; t; t = TREE_CHAIN (t))
7804 inchash::add_expr (TREE_VALUE (t), hstate);
7805 return;
7806 case CONSTRUCTOR:
7807 {
7808 unsigned HOST_WIDE_INT idx;
7809 tree field, value;
7810 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7811 {
7812 inchash::add_expr (field, hstate);
7813 inchash::add_expr (value, hstate);
7814 }
7815 return;
7816 }
7817 case FUNCTION_DECL:
7818 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7819 Otherwise nodes that compare equal according to operand_equal_p might
7820 get different hash codes. However, don't do this for machine specific
7821 or front end builtins, since the function code is overloaded in those
7822 cases. */
7823 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7824 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7825 {
7826 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7827 code = TREE_CODE (t);
7828 }
7829 /* FALL THROUGH */
7830 default:
7831 tclass = TREE_CODE_CLASS (code);
7832
7833 if (tclass == tcc_declaration)
7834 {
7835 /* DECL's have a unique ID */
7836 hstate.add_wide_int (DECL_UID (t));
7837 }
7838 else
7839 {
7840 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7841
7842 hstate.add_object (code);
7843
7844 /* Don't hash the type, that can lead to having nodes which
7845 compare equal according to operand_equal_p, but which
7846 have different hash codes. */
7847 if (CONVERT_EXPR_CODE_P (code)
7848 || code == NON_LVALUE_EXPR)
7849 {
7850 /* Make sure to include signness in the hash computation. */
7851 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7852 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7853 }
7854
7855 else if (commutative_tree_code (code))
7856 {
7857 /* It's a commutative expression. We want to hash it the same
7858 however it appears. We do this by first hashing both operands
7859 and then rehashing based on the order of their independent
7860 hashes. */
7861 inchash::hash one, two;
7862 inchash::add_expr (TREE_OPERAND (t, 0), one);
7863 inchash::add_expr (TREE_OPERAND (t, 1), two);
7864 hstate.add_commutative (one, two);
7865 }
7866 else
7867 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7868 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7869 }
7870 return;
7871 }
7872 }
7873
7874 }
7875
7876 /* Constructors for pointer, array and function types.
7877 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7878 constructed by language-dependent code, not here.) */
7879
7880 /* Construct, lay out and return the type of pointers to TO_TYPE with
7881 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7882 reference all of memory. If such a type has already been
7883 constructed, reuse it. */
7884
7885 tree
7886 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7887 bool can_alias_all)
7888 {
7889 tree t;
7890 bool could_alias = can_alias_all;
7891
7892 if (to_type == error_mark_node)
7893 return error_mark_node;
7894
7895 /* If the pointed-to type has the may_alias attribute set, force
7896 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7897 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7898 can_alias_all = true;
7899
7900 /* In some cases, languages will have things that aren't a POINTER_TYPE
7901 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7902 In that case, return that type without regard to the rest of our
7903 operands.
7904
7905 ??? This is a kludge, but consistent with the way this function has
7906 always operated and there doesn't seem to be a good way to avoid this
7907 at the moment. */
7908 if (TYPE_POINTER_TO (to_type) != 0
7909 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7910 return TYPE_POINTER_TO (to_type);
7911
7912 /* First, if we already have a type for pointers to TO_TYPE and it's
7913 the proper mode, use it. */
7914 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7915 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7916 return t;
7917
7918 t = make_node (POINTER_TYPE);
7919
7920 TREE_TYPE (t) = to_type;
7921 SET_TYPE_MODE (t, mode);
7922 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7923 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7924 TYPE_POINTER_TO (to_type) = t;
7925
7926 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7927 SET_TYPE_STRUCTURAL_EQUALITY (t);
7928 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7929 TYPE_CANONICAL (t)
7930 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7931 mode, false);
7932
7933 /* Lay out the type. This function has many callers that are concerned
7934 with expression-construction, and this simplifies them all. */
7935 layout_type (t);
7936
7937 return t;
7938 }
7939
7940 /* By default build pointers in ptr_mode. */
7941
7942 tree
7943 build_pointer_type (tree to_type)
7944 {
7945 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7946 : TYPE_ADDR_SPACE (to_type);
7947 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7948 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7949 }
7950
7951 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7952
7953 tree
7954 build_reference_type_for_mode (tree to_type, machine_mode mode,
7955 bool can_alias_all)
7956 {
7957 tree t;
7958 bool could_alias = can_alias_all;
7959
7960 if (to_type == error_mark_node)
7961 return error_mark_node;
7962
7963 /* If the pointed-to type has the may_alias attribute set, force
7964 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7965 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7966 can_alias_all = true;
7967
7968 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7969 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7970 In that case, return that type without regard to the rest of our
7971 operands.
7972
7973 ??? This is a kludge, but consistent with the way this function has
7974 always operated and there doesn't seem to be a good way to avoid this
7975 at the moment. */
7976 if (TYPE_REFERENCE_TO (to_type) != 0
7977 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7978 return TYPE_REFERENCE_TO (to_type);
7979
7980 /* First, if we already have a type for pointers to TO_TYPE and it's
7981 the proper mode, use it. */
7982 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7983 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7984 return t;
7985
7986 t = make_node (REFERENCE_TYPE);
7987
7988 TREE_TYPE (t) = to_type;
7989 SET_TYPE_MODE (t, mode);
7990 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7991 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7992 TYPE_REFERENCE_TO (to_type) = t;
7993
7994 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7995 SET_TYPE_STRUCTURAL_EQUALITY (t);
7996 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7997 TYPE_CANONICAL (t)
7998 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7999 mode, false);
8000
8001 layout_type (t);
8002
8003 return t;
8004 }
8005
8006
8007 /* Build the node for the type of references-to-TO_TYPE by default
8008 in ptr_mode. */
8009
8010 tree
8011 build_reference_type (tree to_type)
8012 {
8013 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8014 : TYPE_ADDR_SPACE (to_type);
8015 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8016 return build_reference_type_for_mode (to_type, pointer_mode, false);
8017 }
8018
8019 #define MAX_INT_CACHED_PREC \
8020 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8021 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8022
8023 /* Builds a signed or unsigned integer type of precision PRECISION.
8024 Used for C bitfields whose precision does not match that of
8025 built-in target types. */
8026 tree
8027 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8028 int unsignedp)
8029 {
8030 tree itype, ret;
8031
8032 if (unsignedp)
8033 unsignedp = MAX_INT_CACHED_PREC + 1;
8034
8035 if (precision <= MAX_INT_CACHED_PREC)
8036 {
8037 itype = nonstandard_integer_type_cache[precision + unsignedp];
8038 if (itype)
8039 return itype;
8040 }
8041
8042 itype = make_node (INTEGER_TYPE);
8043 TYPE_PRECISION (itype) = precision;
8044
8045 if (unsignedp)
8046 fixup_unsigned_type (itype);
8047 else
8048 fixup_signed_type (itype);
8049
8050 ret = itype;
8051 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
8052 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
8053 if (precision <= MAX_INT_CACHED_PREC)
8054 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8055
8056 return ret;
8057 }
8058
8059 #define MAX_BOOL_CACHED_PREC \
8060 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8061 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8062
8063 /* Builds a boolean type of precision PRECISION.
8064 Used for boolean vectors to choose proper vector element size. */
8065 tree
8066 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8067 {
8068 tree type;
8069
8070 if (precision <= MAX_BOOL_CACHED_PREC)
8071 {
8072 type = nonstandard_boolean_type_cache[precision];
8073 if (type)
8074 return type;
8075 }
8076
8077 type = make_node (BOOLEAN_TYPE);
8078 TYPE_PRECISION (type) = precision;
8079 fixup_unsigned_type (type);
8080
8081 if (precision <= MAX_INT_CACHED_PREC)
8082 nonstandard_boolean_type_cache[precision] = type;
8083
8084 return type;
8085 }
8086
8087 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8088 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8089 is true, reuse such a type that has already been constructed. */
8090
8091 static tree
8092 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8093 {
8094 tree itype = make_node (INTEGER_TYPE);
8095 inchash::hash hstate;
8096
8097 TREE_TYPE (itype) = type;
8098
8099 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8100 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8101
8102 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8103 SET_TYPE_MODE (itype, TYPE_MODE (type));
8104 TYPE_SIZE (itype) = TYPE_SIZE (type);
8105 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8106 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
8107 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8108
8109 if (!shared)
8110 return itype;
8111
8112 if ((TYPE_MIN_VALUE (itype)
8113 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8114 || (TYPE_MAX_VALUE (itype)
8115 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8116 {
8117 /* Since we cannot reliably merge this type, we need to compare it using
8118 structural equality checks. */
8119 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8120 return itype;
8121 }
8122
8123 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
8124 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8125 hstate.merge_hash (TYPE_HASH (type));
8126 itype = type_hash_canon (hstate.end (), itype);
8127
8128 return itype;
8129 }
8130
8131 /* Wrapper around build_range_type_1 with SHARED set to true. */
8132
8133 tree
8134 build_range_type (tree type, tree lowval, tree highval)
8135 {
8136 return build_range_type_1 (type, lowval, highval, true);
8137 }
8138
8139 /* Wrapper around build_range_type_1 with SHARED set to false. */
8140
8141 tree
8142 build_nonshared_range_type (tree type, tree lowval, tree highval)
8143 {
8144 return build_range_type_1 (type, lowval, highval, false);
8145 }
8146
8147 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8148 MAXVAL should be the maximum value in the domain
8149 (one less than the length of the array).
8150
8151 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8152 We don't enforce this limit, that is up to caller (e.g. language front end).
8153 The limit exists because the result is a signed type and we don't handle
8154 sizes that use more than one HOST_WIDE_INT. */
8155
8156 tree
8157 build_index_type (tree maxval)
8158 {
8159 return build_range_type (sizetype, size_zero_node, maxval);
8160 }
8161
8162 /* Return true if the debug information for TYPE, a subtype, should be emitted
8163 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8164 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8165 debug info and doesn't reflect the source code. */
8166
8167 bool
8168 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8169 {
8170 tree base_type = TREE_TYPE (type), low, high;
8171
8172 /* Subrange types have a base type which is an integral type. */
8173 if (!INTEGRAL_TYPE_P (base_type))
8174 return false;
8175
8176 /* Get the real bounds of the subtype. */
8177 if (lang_hooks.types.get_subrange_bounds)
8178 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8179 else
8180 {
8181 low = TYPE_MIN_VALUE (type);
8182 high = TYPE_MAX_VALUE (type);
8183 }
8184
8185 /* If the type and its base type have the same representation and the same
8186 name, then the type is not a subrange but a copy of the base type. */
8187 if ((TREE_CODE (base_type) == INTEGER_TYPE
8188 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8189 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8190 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8191 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8192 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8193 return false;
8194
8195 if (lowval)
8196 *lowval = low;
8197 if (highval)
8198 *highval = high;
8199 return true;
8200 }
8201
8202 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8203 and number of elements specified by the range of values of INDEX_TYPE.
8204 If SHARED is true, reuse such a type that has already been constructed. */
8205
8206 static tree
8207 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8208 {
8209 tree t;
8210
8211 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8212 {
8213 error ("arrays of functions are not meaningful");
8214 elt_type = integer_type_node;
8215 }
8216
8217 t = make_node (ARRAY_TYPE);
8218 TREE_TYPE (t) = elt_type;
8219 TYPE_DOMAIN (t) = index_type;
8220 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8221 layout_type (t);
8222
8223 /* If the element type is incomplete at this point we get marked for
8224 structural equality. Do not record these types in the canonical
8225 type hashtable. */
8226 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8227 return t;
8228
8229 if (shared)
8230 {
8231 inchash::hash hstate;
8232 hstate.add_object (TYPE_HASH (elt_type));
8233 if (index_type)
8234 hstate.add_object (TYPE_HASH (index_type));
8235 t = type_hash_canon (hstate.end (), t);
8236 }
8237
8238 if (TYPE_CANONICAL (t) == t)
8239 {
8240 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8241 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
8242 SET_TYPE_STRUCTURAL_EQUALITY (t);
8243 else if (TYPE_CANONICAL (elt_type) != elt_type
8244 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8245 TYPE_CANONICAL (t)
8246 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8247 index_type
8248 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8249 shared);
8250 }
8251
8252 return t;
8253 }
8254
8255 /* Wrapper around build_array_type_1 with SHARED set to true. */
8256
8257 tree
8258 build_array_type (tree elt_type, tree index_type)
8259 {
8260 return build_array_type_1 (elt_type, index_type, true);
8261 }
8262
8263 /* Wrapper around build_array_type_1 with SHARED set to false. */
8264
8265 tree
8266 build_nonshared_array_type (tree elt_type, tree index_type)
8267 {
8268 return build_array_type_1 (elt_type, index_type, false);
8269 }
8270
8271 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8272 sizetype. */
8273
8274 tree
8275 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8276 {
8277 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8278 }
8279
8280 /* Recursively examines the array elements of TYPE, until a non-array
8281 element type is found. */
8282
8283 tree
8284 strip_array_types (tree type)
8285 {
8286 while (TREE_CODE (type) == ARRAY_TYPE)
8287 type = TREE_TYPE (type);
8288
8289 return type;
8290 }
8291
8292 /* Computes the canonical argument types from the argument type list
8293 ARGTYPES.
8294
8295 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8296 on entry to this function, or if any of the ARGTYPES are
8297 structural.
8298
8299 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8300 true on entry to this function, or if any of the ARGTYPES are
8301 non-canonical.
8302
8303 Returns a canonical argument list, which may be ARGTYPES when the
8304 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8305 true) or would not differ from ARGTYPES. */
8306
8307 static tree
8308 maybe_canonicalize_argtypes (tree argtypes,
8309 bool *any_structural_p,
8310 bool *any_noncanonical_p)
8311 {
8312 tree arg;
8313 bool any_noncanonical_argtypes_p = false;
8314
8315 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8316 {
8317 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8318 /* Fail gracefully by stating that the type is structural. */
8319 *any_structural_p = true;
8320 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8321 *any_structural_p = true;
8322 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8323 || TREE_PURPOSE (arg))
8324 /* If the argument has a default argument, we consider it
8325 non-canonical even though the type itself is canonical.
8326 That way, different variants of function and method types
8327 with default arguments will all point to the variant with
8328 no defaults as their canonical type. */
8329 any_noncanonical_argtypes_p = true;
8330 }
8331
8332 if (*any_structural_p)
8333 return argtypes;
8334
8335 if (any_noncanonical_argtypes_p)
8336 {
8337 /* Build the canonical list of argument types. */
8338 tree canon_argtypes = NULL_TREE;
8339 bool is_void = false;
8340
8341 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8342 {
8343 if (arg == void_list_node)
8344 is_void = true;
8345 else
8346 canon_argtypes = tree_cons (NULL_TREE,
8347 TYPE_CANONICAL (TREE_VALUE (arg)),
8348 canon_argtypes);
8349 }
8350
8351 canon_argtypes = nreverse (canon_argtypes);
8352 if (is_void)
8353 canon_argtypes = chainon (canon_argtypes, void_list_node);
8354
8355 /* There is a non-canonical type. */
8356 *any_noncanonical_p = true;
8357 return canon_argtypes;
8358 }
8359
8360 /* The canonical argument types are the same as ARGTYPES. */
8361 return argtypes;
8362 }
8363
8364 /* Construct, lay out and return
8365 the type of functions returning type VALUE_TYPE
8366 given arguments of types ARG_TYPES.
8367 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8368 are data type nodes for the arguments of the function.
8369 If such a type has already been constructed, reuse it. */
8370
8371 tree
8372 build_function_type (tree value_type, tree arg_types)
8373 {
8374 tree t;
8375 inchash::hash hstate;
8376 bool any_structural_p, any_noncanonical_p;
8377 tree canon_argtypes;
8378
8379 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8380 {
8381 error ("function return type cannot be function");
8382 value_type = integer_type_node;
8383 }
8384
8385 /* Make a node of the sort we want. */
8386 t = make_node (FUNCTION_TYPE);
8387 TREE_TYPE (t) = value_type;
8388 TYPE_ARG_TYPES (t) = arg_types;
8389
8390 /* If we already have such a type, use the old one. */
8391 hstate.add_object (TYPE_HASH (value_type));
8392 type_hash_list (arg_types, hstate);
8393 t = type_hash_canon (hstate.end (), t);
8394
8395 /* Set up the canonical type. */
8396 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8397 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8398 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8399 &any_structural_p,
8400 &any_noncanonical_p);
8401 if (any_structural_p)
8402 SET_TYPE_STRUCTURAL_EQUALITY (t);
8403 else if (any_noncanonical_p)
8404 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8405 canon_argtypes);
8406
8407 if (!COMPLETE_TYPE_P (t))
8408 layout_type (t);
8409 return t;
8410 }
8411
8412 /* Build a function type. The RETURN_TYPE is the type returned by the
8413 function. If VAARGS is set, no void_type_node is appended to the
8414 the list. ARGP must be always be terminated be a NULL_TREE. */
8415
8416 static tree
8417 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8418 {
8419 tree t, args, last;
8420
8421 t = va_arg (argp, tree);
8422 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8423 args = tree_cons (NULL_TREE, t, args);
8424
8425 if (vaargs)
8426 {
8427 last = args;
8428 if (args != NULL_TREE)
8429 args = nreverse (args);
8430 gcc_assert (last != void_list_node);
8431 }
8432 else if (args == NULL_TREE)
8433 args = void_list_node;
8434 else
8435 {
8436 last = args;
8437 args = nreverse (args);
8438 TREE_CHAIN (last) = void_list_node;
8439 }
8440 args = build_function_type (return_type, args);
8441
8442 return args;
8443 }
8444
8445 /* Build a function type. The RETURN_TYPE is the type returned by the
8446 function. If additional arguments are provided, they are
8447 additional argument types. The list of argument types must always
8448 be terminated by NULL_TREE. */
8449
8450 tree
8451 build_function_type_list (tree return_type, ...)
8452 {
8453 tree args;
8454 va_list p;
8455
8456 va_start (p, return_type);
8457 args = build_function_type_list_1 (false, return_type, p);
8458 va_end (p);
8459 return args;
8460 }
8461
8462 /* Build a variable argument function type. The RETURN_TYPE is the
8463 type returned by the function. If additional arguments are provided,
8464 they are additional argument types. The list of argument types must
8465 always be terminated by NULL_TREE. */
8466
8467 tree
8468 build_varargs_function_type_list (tree return_type, ...)
8469 {
8470 tree args;
8471 va_list p;
8472
8473 va_start (p, return_type);
8474 args = build_function_type_list_1 (true, return_type, p);
8475 va_end (p);
8476
8477 return args;
8478 }
8479
8480 /* Build a function type. RETURN_TYPE is the type returned by the
8481 function; VAARGS indicates whether the function takes varargs. The
8482 function takes N named arguments, the types of which are provided in
8483 ARG_TYPES. */
8484
8485 static tree
8486 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8487 tree *arg_types)
8488 {
8489 int i;
8490 tree t = vaargs ? NULL_TREE : void_list_node;
8491
8492 for (i = n - 1; i >= 0; i--)
8493 t = tree_cons (NULL_TREE, arg_types[i], t);
8494
8495 return build_function_type (return_type, t);
8496 }
8497
8498 /* Build a function type. RETURN_TYPE is the type returned by the
8499 function. The function takes N named arguments, the types of which
8500 are provided in ARG_TYPES. */
8501
8502 tree
8503 build_function_type_array (tree return_type, int n, tree *arg_types)
8504 {
8505 return build_function_type_array_1 (false, return_type, n, arg_types);
8506 }
8507
8508 /* Build a variable argument function type. RETURN_TYPE is the type
8509 returned by the function. The function takes N named arguments, the
8510 types of which are provided in ARG_TYPES. */
8511
8512 tree
8513 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8514 {
8515 return build_function_type_array_1 (true, return_type, n, arg_types);
8516 }
8517
8518 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8519 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8520 for the method. An implicit additional parameter (of type
8521 pointer-to-BASETYPE) is added to the ARGTYPES. */
8522
8523 tree
8524 build_method_type_directly (tree basetype,
8525 tree rettype,
8526 tree argtypes)
8527 {
8528 tree t;
8529 tree ptype;
8530 inchash::hash hstate;
8531 bool any_structural_p, any_noncanonical_p;
8532 tree canon_argtypes;
8533
8534 /* Make a node of the sort we want. */
8535 t = make_node (METHOD_TYPE);
8536
8537 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8538 TREE_TYPE (t) = rettype;
8539 ptype = build_pointer_type (basetype);
8540
8541 /* The actual arglist for this function includes a "hidden" argument
8542 which is "this". Put it into the list of argument types. */
8543 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8544 TYPE_ARG_TYPES (t) = argtypes;
8545
8546 /* If we already have such a type, use the old one. */
8547 hstate.add_object (TYPE_HASH (basetype));
8548 hstate.add_object (TYPE_HASH (rettype));
8549 type_hash_list (argtypes, hstate);
8550 t = type_hash_canon (hstate.end (), t);
8551
8552 /* Set up the canonical type. */
8553 any_structural_p
8554 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8555 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8556 any_noncanonical_p
8557 = (TYPE_CANONICAL (basetype) != basetype
8558 || TYPE_CANONICAL (rettype) != rettype);
8559 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8560 &any_structural_p,
8561 &any_noncanonical_p);
8562 if (any_structural_p)
8563 SET_TYPE_STRUCTURAL_EQUALITY (t);
8564 else if (any_noncanonical_p)
8565 TYPE_CANONICAL (t)
8566 = build_method_type_directly (TYPE_CANONICAL (basetype),
8567 TYPE_CANONICAL (rettype),
8568 canon_argtypes);
8569 if (!COMPLETE_TYPE_P (t))
8570 layout_type (t);
8571
8572 return t;
8573 }
8574
8575 /* Construct, lay out and return the type of methods belonging to class
8576 BASETYPE and whose arguments and values are described by TYPE.
8577 If that type exists already, reuse it.
8578 TYPE must be a FUNCTION_TYPE node. */
8579
8580 tree
8581 build_method_type (tree basetype, tree type)
8582 {
8583 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8584
8585 return build_method_type_directly (basetype,
8586 TREE_TYPE (type),
8587 TYPE_ARG_TYPES (type));
8588 }
8589
8590 /* Construct, lay out and return the type of offsets to a value
8591 of type TYPE, within an object of type BASETYPE.
8592 If a suitable offset type exists already, reuse it. */
8593
8594 tree
8595 build_offset_type (tree basetype, tree type)
8596 {
8597 tree t;
8598 inchash::hash hstate;
8599
8600 /* Make a node of the sort we want. */
8601 t = make_node (OFFSET_TYPE);
8602
8603 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8604 TREE_TYPE (t) = type;
8605
8606 /* If we already have such a type, use the old one. */
8607 hstate.add_object (TYPE_HASH (basetype));
8608 hstate.add_object (TYPE_HASH (type));
8609 t = type_hash_canon (hstate.end (), t);
8610
8611 if (!COMPLETE_TYPE_P (t))
8612 layout_type (t);
8613
8614 if (TYPE_CANONICAL (t) == t)
8615 {
8616 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8617 || TYPE_STRUCTURAL_EQUALITY_P (type))
8618 SET_TYPE_STRUCTURAL_EQUALITY (t);
8619 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8620 || TYPE_CANONICAL (type) != type)
8621 TYPE_CANONICAL (t)
8622 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8623 TYPE_CANONICAL (type));
8624 }
8625
8626 return t;
8627 }
8628
8629 /* Create a complex type whose components are COMPONENT_TYPE. */
8630
8631 tree
8632 build_complex_type (tree component_type)
8633 {
8634 tree t;
8635 inchash::hash hstate;
8636
8637 gcc_assert (INTEGRAL_TYPE_P (component_type)
8638 || SCALAR_FLOAT_TYPE_P (component_type)
8639 || FIXED_POINT_TYPE_P (component_type));
8640
8641 /* Make a node of the sort we want. */
8642 t = make_node (COMPLEX_TYPE);
8643
8644 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8645
8646 /* If we already have such a type, use the old one. */
8647 hstate.add_object (TYPE_HASH (component_type));
8648 t = type_hash_canon (hstate.end (), t);
8649
8650 if (!COMPLETE_TYPE_P (t))
8651 layout_type (t);
8652
8653 if (TYPE_CANONICAL (t) == t)
8654 {
8655 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8656 SET_TYPE_STRUCTURAL_EQUALITY (t);
8657 else if (TYPE_CANONICAL (component_type) != component_type)
8658 TYPE_CANONICAL (t)
8659 = build_complex_type (TYPE_CANONICAL (component_type));
8660 }
8661
8662 /* We need to create a name, since complex is a fundamental type. */
8663 if (! TYPE_NAME (t))
8664 {
8665 const char *name;
8666 if (component_type == char_type_node)
8667 name = "complex char";
8668 else if (component_type == signed_char_type_node)
8669 name = "complex signed char";
8670 else if (component_type == unsigned_char_type_node)
8671 name = "complex unsigned char";
8672 else if (component_type == short_integer_type_node)
8673 name = "complex short int";
8674 else if (component_type == short_unsigned_type_node)
8675 name = "complex short unsigned int";
8676 else if (component_type == integer_type_node)
8677 name = "complex int";
8678 else if (component_type == unsigned_type_node)
8679 name = "complex unsigned int";
8680 else if (component_type == long_integer_type_node)
8681 name = "complex long int";
8682 else if (component_type == long_unsigned_type_node)
8683 name = "complex long unsigned int";
8684 else if (component_type == long_long_integer_type_node)
8685 name = "complex long long int";
8686 else if (component_type == long_long_unsigned_type_node)
8687 name = "complex long long unsigned int";
8688 else
8689 name = 0;
8690
8691 if (name != 0)
8692 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8693 get_identifier (name), t);
8694 }
8695
8696 return build_qualified_type (t, TYPE_QUALS (component_type));
8697 }
8698
8699 /* If TYPE is a real or complex floating-point type and the target
8700 does not directly support arithmetic on TYPE then return the wider
8701 type to be used for arithmetic on TYPE. Otherwise, return
8702 NULL_TREE. */
8703
8704 tree
8705 excess_precision_type (tree type)
8706 {
8707 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8708 {
8709 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8710 switch (TREE_CODE (type))
8711 {
8712 case REAL_TYPE:
8713 switch (flt_eval_method)
8714 {
8715 case 1:
8716 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8717 return double_type_node;
8718 break;
8719 case 2:
8720 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8721 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8722 return long_double_type_node;
8723 break;
8724 default:
8725 gcc_unreachable ();
8726 }
8727 break;
8728 case COMPLEX_TYPE:
8729 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8730 return NULL_TREE;
8731 switch (flt_eval_method)
8732 {
8733 case 1:
8734 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8735 return complex_double_type_node;
8736 break;
8737 case 2:
8738 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8739 || (TYPE_MODE (TREE_TYPE (type))
8740 == TYPE_MODE (double_type_node)))
8741 return complex_long_double_type_node;
8742 break;
8743 default:
8744 gcc_unreachable ();
8745 }
8746 break;
8747 default:
8748 break;
8749 }
8750 }
8751 return NULL_TREE;
8752 }
8753 \f
8754 /* Return OP, stripped of any conversions to wider types as much as is safe.
8755 Converting the value back to OP's type makes a value equivalent to OP.
8756
8757 If FOR_TYPE is nonzero, we return a value which, if converted to
8758 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8759
8760 OP must have integer, real or enumeral type. Pointers are not allowed!
8761
8762 There are some cases where the obvious value we could return
8763 would regenerate to OP if converted to OP's type,
8764 but would not extend like OP to wider types.
8765 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8766 For example, if OP is (unsigned short)(signed char)-1,
8767 we avoid returning (signed char)-1 if FOR_TYPE is int,
8768 even though extending that to an unsigned short would regenerate OP,
8769 since the result of extending (signed char)-1 to (int)
8770 is different from (int) OP. */
8771
8772 tree
8773 get_unwidened (tree op, tree for_type)
8774 {
8775 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8776 tree type = TREE_TYPE (op);
8777 unsigned final_prec
8778 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8779 int uns
8780 = (for_type != 0 && for_type != type
8781 && final_prec > TYPE_PRECISION (type)
8782 && TYPE_UNSIGNED (type));
8783 tree win = op;
8784
8785 while (CONVERT_EXPR_P (op))
8786 {
8787 int bitschange;
8788
8789 /* TYPE_PRECISION on vector types has different meaning
8790 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8791 so avoid them here. */
8792 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8793 break;
8794
8795 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8796 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8797
8798 /* Truncations are many-one so cannot be removed.
8799 Unless we are later going to truncate down even farther. */
8800 if (bitschange < 0
8801 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8802 break;
8803
8804 /* See what's inside this conversion. If we decide to strip it,
8805 we will set WIN. */
8806 op = TREE_OPERAND (op, 0);
8807
8808 /* If we have not stripped any zero-extensions (uns is 0),
8809 we can strip any kind of extension.
8810 If we have previously stripped a zero-extension,
8811 only zero-extensions can safely be stripped.
8812 Any extension can be stripped if the bits it would produce
8813 are all going to be discarded later by truncating to FOR_TYPE. */
8814
8815 if (bitschange > 0)
8816 {
8817 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8818 win = op;
8819 /* TYPE_UNSIGNED says whether this is a zero-extension.
8820 Let's avoid computing it if it does not affect WIN
8821 and if UNS will not be needed again. */
8822 if ((uns
8823 || CONVERT_EXPR_P (op))
8824 && TYPE_UNSIGNED (TREE_TYPE (op)))
8825 {
8826 uns = 1;
8827 win = op;
8828 }
8829 }
8830 }
8831
8832 /* If we finally reach a constant see if it fits in for_type and
8833 in that case convert it. */
8834 if (for_type
8835 && TREE_CODE (win) == INTEGER_CST
8836 && TREE_TYPE (win) != for_type
8837 && int_fits_type_p (win, for_type))
8838 win = fold_convert (for_type, win);
8839
8840 return win;
8841 }
8842 \f
8843 /* Return OP or a simpler expression for a narrower value
8844 which can be sign-extended or zero-extended to give back OP.
8845 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8846 or 0 if the value should be sign-extended. */
8847
8848 tree
8849 get_narrower (tree op, int *unsignedp_ptr)
8850 {
8851 int uns = 0;
8852 int first = 1;
8853 tree win = op;
8854 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8855
8856 while (TREE_CODE (op) == NOP_EXPR)
8857 {
8858 int bitschange
8859 = (TYPE_PRECISION (TREE_TYPE (op))
8860 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8861
8862 /* Truncations are many-one so cannot be removed. */
8863 if (bitschange < 0)
8864 break;
8865
8866 /* See what's inside this conversion. If we decide to strip it,
8867 we will set WIN. */
8868
8869 if (bitschange > 0)
8870 {
8871 op = TREE_OPERAND (op, 0);
8872 /* An extension: the outermost one can be stripped,
8873 but remember whether it is zero or sign extension. */
8874 if (first)
8875 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8876 /* Otherwise, if a sign extension has been stripped,
8877 only sign extensions can now be stripped;
8878 if a zero extension has been stripped, only zero-extensions. */
8879 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8880 break;
8881 first = 0;
8882 }
8883 else /* bitschange == 0 */
8884 {
8885 /* A change in nominal type can always be stripped, but we must
8886 preserve the unsignedness. */
8887 if (first)
8888 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8889 first = 0;
8890 op = TREE_OPERAND (op, 0);
8891 /* Keep trying to narrow, but don't assign op to win if it
8892 would turn an integral type into something else. */
8893 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8894 continue;
8895 }
8896
8897 win = op;
8898 }
8899
8900 if (TREE_CODE (op) == COMPONENT_REF
8901 /* Since type_for_size always gives an integer type. */
8902 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8903 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8904 /* Ensure field is laid out already. */
8905 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8906 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8907 {
8908 unsigned HOST_WIDE_INT innerprec
8909 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8910 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8911 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8912 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8913
8914 /* We can get this structure field in a narrower type that fits it,
8915 but the resulting extension to its nominal type (a fullword type)
8916 must satisfy the same conditions as for other extensions.
8917
8918 Do this only for fields that are aligned (not bit-fields),
8919 because when bit-field insns will be used there is no
8920 advantage in doing this. */
8921
8922 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8923 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8924 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8925 && type != 0)
8926 {
8927 if (first)
8928 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8929 win = fold_convert (type, op);
8930 }
8931 }
8932
8933 *unsignedp_ptr = uns;
8934 return win;
8935 }
8936 \f
8937 /* Returns true if integer constant C has a value that is permissible
8938 for type TYPE (an INTEGER_TYPE). */
8939
8940 bool
8941 int_fits_type_p (const_tree c, const_tree type)
8942 {
8943 tree type_low_bound, type_high_bound;
8944 bool ok_for_low_bound, ok_for_high_bound;
8945 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8946
8947 retry:
8948 type_low_bound = TYPE_MIN_VALUE (type);
8949 type_high_bound = TYPE_MAX_VALUE (type);
8950
8951 /* If at least one bound of the type is a constant integer, we can check
8952 ourselves and maybe make a decision. If no such decision is possible, but
8953 this type is a subtype, try checking against that. Otherwise, use
8954 fits_to_tree_p, which checks against the precision.
8955
8956 Compute the status for each possibly constant bound, and return if we see
8957 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8958 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8959 for "constant known to fit". */
8960
8961 /* Check if c >= type_low_bound. */
8962 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8963 {
8964 if (tree_int_cst_lt (c, type_low_bound))
8965 return false;
8966 ok_for_low_bound = true;
8967 }
8968 else
8969 ok_for_low_bound = false;
8970
8971 /* Check if c <= type_high_bound. */
8972 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8973 {
8974 if (tree_int_cst_lt (type_high_bound, c))
8975 return false;
8976 ok_for_high_bound = true;
8977 }
8978 else
8979 ok_for_high_bound = false;
8980
8981 /* If the constant fits both bounds, the result is known. */
8982 if (ok_for_low_bound && ok_for_high_bound)
8983 return true;
8984
8985 /* Perform some generic filtering which may allow making a decision
8986 even if the bounds are not constant. First, negative integers
8987 never fit in unsigned types, */
8988 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8989 return false;
8990
8991 /* Second, narrower types always fit in wider ones. */
8992 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8993 return true;
8994
8995 /* Third, unsigned integers with top bit set never fit signed types. */
8996 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8997 {
8998 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8999 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9000 {
9001 /* When a tree_cst is converted to a wide-int, the precision
9002 is taken from the type. However, if the precision of the
9003 mode underneath the type is smaller than that, it is
9004 possible that the value will not fit. The test below
9005 fails if any bit is set between the sign bit of the
9006 underlying mode and the top bit of the type. */
9007 if (wi::ne_p (wi::zext (c, prec - 1), c))
9008 return false;
9009 }
9010 else if (wi::neg_p (c))
9011 return false;
9012 }
9013
9014 /* If we haven't been able to decide at this point, there nothing more we
9015 can check ourselves here. Look at the base type if we have one and it
9016 has the same precision. */
9017 if (TREE_CODE (type) == INTEGER_TYPE
9018 && TREE_TYPE (type) != 0
9019 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9020 {
9021 type = TREE_TYPE (type);
9022 goto retry;
9023 }
9024
9025 /* Or to fits_to_tree_p, if nothing else. */
9026 return wi::fits_to_tree_p (c, type);
9027 }
9028
9029 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9030 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9031 represented (assuming two's-complement arithmetic) within the bit
9032 precision of the type are returned instead. */
9033
9034 void
9035 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9036 {
9037 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9038 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9039 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
9040 else
9041 {
9042 if (TYPE_UNSIGNED (type))
9043 mpz_set_ui (min, 0);
9044 else
9045 {
9046 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9047 wi::to_mpz (mn, min, SIGNED);
9048 }
9049 }
9050
9051 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9052 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9053 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
9054 else
9055 {
9056 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9057 wi::to_mpz (mn, max, TYPE_SIGN (type));
9058 }
9059 }
9060
9061 /* Return true if VAR is an automatic variable defined in function FN. */
9062
9063 bool
9064 auto_var_in_fn_p (const_tree var, const_tree fn)
9065 {
9066 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9067 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
9068 || TREE_CODE (var) == PARM_DECL)
9069 && ! TREE_STATIC (var))
9070 || TREE_CODE (var) == LABEL_DECL
9071 || TREE_CODE (var) == RESULT_DECL));
9072 }
9073
9074 /* Subprogram of following function. Called by walk_tree.
9075
9076 Return *TP if it is an automatic variable or parameter of the
9077 function passed in as DATA. */
9078
9079 static tree
9080 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9081 {
9082 tree fn = (tree) data;
9083
9084 if (TYPE_P (*tp))
9085 *walk_subtrees = 0;
9086
9087 else if (DECL_P (*tp)
9088 && auto_var_in_fn_p (*tp, fn))
9089 return *tp;
9090
9091 return NULL_TREE;
9092 }
9093
9094 /* Returns true if T is, contains, or refers to a type with variable
9095 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9096 arguments, but not the return type. If FN is nonzero, only return
9097 true if a modifier of the type or position of FN is a variable or
9098 parameter inside FN.
9099
9100 This concept is more general than that of C99 'variably modified types':
9101 in C99, a struct type is never variably modified because a VLA may not
9102 appear as a structure member. However, in GNU C code like:
9103
9104 struct S { int i[f()]; };
9105
9106 is valid, and other languages may define similar constructs. */
9107
9108 bool
9109 variably_modified_type_p (tree type, tree fn)
9110 {
9111 tree t;
9112
9113 /* Test if T is either variable (if FN is zero) or an expression containing
9114 a variable in FN. If TYPE isn't gimplified, return true also if
9115 gimplify_one_sizepos would gimplify the expression into a local
9116 variable. */
9117 #define RETURN_TRUE_IF_VAR(T) \
9118 do { tree _t = (T); \
9119 if (_t != NULL_TREE \
9120 && _t != error_mark_node \
9121 && TREE_CODE (_t) != INTEGER_CST \
9122 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9123 && (!fn \
9124 || (!TYPE_SIZES_GIMPLIFIED (type) \
9125 && !is_gimple_sizepos (_t)) \
9126 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9127 return true; } while (0)
9128
9129 if (type == error_mark_node)
9130 return false;
9131
9132 /* If TYPE itself has variable size, it is variably modified. */
9133 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9134 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9135
9136 switch (TREE_CODE (type))
9137 {
9138 case POINTER_TYPE:
9139 case REFERENCE_TYPE:
9140 case VECTOR_TYPE:
9141 if (variably_modified_type_p (TREE_TYPE (type), fn))
9142 return true;
9143 break;
9144
9145 case FUNCTION_TYPE:
9146 case METHOD_TYPE:
9147 /* If TYPE is a function type, it is variably modified if the
9148 return type is variably modified. */
9149 if (variably_modified_type_p (TREE_TYPE (type), fn))
9150 return true;
9151 break;
9152
9153 case INTEGER_TYPE:
9154 case REAL_TYPE:
9155 case FIXED_POINT_TYPE:
9156 case ENUMERAL_TYPE:
9157 case BOOLEAN_TYPE:
9158 /* Scalar types are variably modified if their end points
9159 aren't constant. */
9160 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9161 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9162 break;
9163
9164 case RECORD_TYPE:
9165 case UNION_TYPE:
9166 case QUAL_UNION_TYPE:
9167 /* We can't see if any of the fields are variably-modified by the
9168 definition we normally use, since that would produce infinite
9169 recursion via pointers. */
9170 /* This is variably modified if some field's type is. */
9171 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9172 if (TREE_CODE (t) == FIELD_DECL)
9173 {
9174 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9175 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9176 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9177
9178 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9179 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9180 }
9181 break;
9182
9183 case ARRAY_TYPE:
9184 /* Do not call ourselves to avoid infinite recursion. This is
9185 variably modified if the element type is. */
9186 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9187 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9188 break;
9189
9190 default:
9191 break;
9192 }
9193
9194 /* The current language may have other cases to check, but in general,
9195 all other types are not variably modified. */
9196 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9197
9198 #undef RETURN_TRUE_IF_VAR
9199 }
9200
9201 /* Given a DECL or TYPE, return the scope in which it was declared, or
9202 NULL_TREE if there is no containing scope. */
9203
9204 tree
9205 get_containing_scope (const_tree t)
9206 {
9207 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9208 }
9209
9210 /* Return the innermost context enclosing DECL that is
9211 a FUNCTION_DECL, or zero if none. */
9212
9213 tree
9214 decl_function_context (const_tree decl)
9215 {
9216 tree context;
9217
9218 if (TREE_CODE (decl) == ERROR_MARK)
9219 return 0;
9220
9221 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9222 where we look up the function at runtime. Such functions always take
9223 a first argument of type 'pointer to real context'.
9224
9225 C++ should really be fixed to use DECL_CONTEXT for the real context,
9226 and use something else for the "virtual context". */
9227 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9228 context
9229 = TYPE_MAIN_VARIANT
9230 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9231 else
9232 context = DECL_CONTEXT (decl);
9233
9234 while (context && TREE_CODE (context) != FUNCTION_DECL)
9235 {
9236 if (TREE_CODE (context) == BLOCK)
9237 context = BLOCK_SUPERCONTEXT (context);
9238 else
9239 context = get_containing_scope (context);
9240 }
9241
9242 return context;
9243 }
9244
9245 /* Return the innermost context enclosing DECL that is
9246 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9247 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9248
9249 tree
9250 decl_type_context (const_tree decl)
9251 {
9252 tree context = DECL_CONTEXT (decl);
9253
9254 while (context)
9255 switch (TREE_CODE (context))
9256 {
9257 case NAMESPACE_DECL:
9258 case TRANSLATION_UNIT_DECL:
9259 return NULL_TREE;
9260
9261 case RECORD_TYPE:
9262 case UNION_TYPE:
9263 case QUAL_UNION_TYPE:
9264 return context;
9265
9266 case TYPE_DECL:
9267 case FUNCTION_DECL:
9268 context = DECL_CONTEXT (context);
9269 break;
9270
9271 case BLOCK:
9272 context = BLOCK_SUPERCONTEXT (context);
9273 break;
9274
9275 default:
9276 gcc_unreachable ();
9277 }
9278
9279 return NULL_TREE;
9280 }
9281
9282 /* CALL is a CALL_EXPR. Return the declaration for the function
9283 called, or NULL_TREE if the called function cannot be
9284 determined. */
9285
9286 tree
9287 get_callee_fndecl (const_tree call)
9288 {
9289 tree addr;
9290
9291 if (call == error_mark_node)
9292 return error_mark_node;
9293
9294 /* It's invalid to call this function with anything but a
9295 CALL_EXPR. */
9296 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9297
9298 /* The first operand to the CALL is the address of the function
9299 called. */
9300 addr = CALL_EXPR_FN (call);
9301
9302 /* If there is no function, return early. */
9303 if (addr == NULL_TREE)
9304 return NULL_TREE;
9305
9306 STRIP_NOPS (addr);
9307
9308 /* If this is a readonly function pointer, extract its initial value. */
9309 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9310 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9311 && DECL_INITIAL (addr))
9312 addr = DECL_INITIAL (addr);
9313
9314 /* If the address is just `&f' for some function `f', then we know
9315 that `f' is being called. */
9316 if (TREE_CODE (addr) == ADDR_EXPR
9317 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9318 return TREE_OPERAND (addr, 0);
9319
9320 /* We couldn't figure out what was being called. */
9321 return NULL_TREE;
9322 }
9323
9324 #define TREE_MEM_USAGE_SPACES 40
9325
9326 /* Print debugging information about tree nodes generated during the compile,
9327 and any language-specific information. */
9328
9329 void
9330 dump_tree_statistics (void)
9331 {
9332 if (GATHER_STATISTICS)
9333 {
9334 int i;
9335 int total_nodes, total_bytes;
9336 fprintf (stderr, "\nKind Nodes Bytes\n");
9337 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9338 total_nodes = total_bytes = 0;
9339 for (i = 0; i < (int) all_kinds; i++)
9340 {
9341 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9342 tree_node_counts[i], tree_node_sizes[i]);
9343 total_nodes += tree_node_counts[i];
9344 total_bytes += tree_node_sizes[i];
9345 }
9346 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9347 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9348 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9349 fprintf (stderr, "Code Nodes\n");
9350 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9351 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9352 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9353 tree_code_counts[i]);
9354 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9355 fprintf (stderr, "\n");
9356 ssanames_print_statistics ();
9357 fprintf (stderr, "\n");
9358 phinodes_print_statistics ();
9359 fprintf (stderr, "\n");
9360 }
9361 else
9362 fprintf (stderr, "(No per-node statistics)\n");
9363
9364 print_type_hash_statistics ();
9365 print_debug_expr_statistics ();
9366 print_value_expr_statistics ();
9367 lang_hooks.print_statistics ();
9368 }
9369 \f
9370 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9371
9372 /* Generate a crc32 of a byte. */
9373
9374 static unsigned
9375 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9376 {
9377 unsigned ix;
9378
9379 for (ix = bits; ix--; value <<= 1)
9380 {
9381 unsigned feedback;
9382
9383 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9384 chksum <<= 1;
9385 chksum ^= feedback;
9386 }
9387 return chksum;
9388 }
9389
9390 /* Generate a crc32 of a 32-bit unsigned. */
9391
9392 unsigned
9393 crc32_unsigned (unsigned chksum, unsigned value)
9394 {
9395 return crc32_unsigned_bits (chksum, value, 32);
9396 }
9397
9398 /* Generate a crc32 of a byte. */
9399
9400 unsigned
9401 crc32_byte (unsigned chksum, char byte)
9402 {
9403 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9404 }
9405
9406 /* Generate a crc32 of a string. */
9407
9408 unsigned
9409 crc32_string (unsigned chksum, const char *string)
9410 {
9411 do
9412 {
9413 chksum = crc32_byte (chksum, *string);
9414 }
9415 while (*string++);
9416 return chksum;
9417 }
9418
9419 /* P is a string that will be used in a symbol. Mask out any characters
9420 that are not valid in that context. */
9421
9422 void
9423 clean_symbol_name (char *p)
9424 {
9425 for (; *p; p++)
9426 if (! (ISALNUM (*p)
9427 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9428 || *p == '$'
9429 #endif
9430 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9431 || *p == '.'
9432 #endif
9433 ))
9434 *p = '_';
9435 }
9436
9437 /* For anonymous aggregate types, we need some sort of name to
9438 hold on to. In practice, this should not appear, but it should
9439 not be harmful if it does. */
9440 bool
9441 anon_aggrname_p(const_tree id_node)
9442 {
9443 #ifndef NO_DOT_IN_LABEL
9444 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9445 && IDENTIFIER_POINTER (id_node)[1] == '_');
9446 #else /* NO_DOT_IN_LABEL */
9447 #ifndef NO_DOLLAR_IN_LABEL
9448 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9449 && IDENTIFIER_POINTER (id_node)[1] == '_');
9450 #else /* NO_DOLLAR_IN_LABEL */
9451 #define ANON_AGGRNAME_PREFIX "__anon_"
9452 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9453 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9454 #endif /* NO_DOLLAR_IN_LABEL */
9455 #endif /* NO_DOT_IN_LABEL */
9456 }
9457
9458 /* Return a format for an anonymous aggregate name. */
9459 const char *
9460 anon_aggrname_format()
9461 {
9462 #ifndef NO_DOT_IN_LABEL
9463 return "._%d";
9464 #else /* NO_DOT_IN_LABEL */
9465 #ifndef NO_DOLLAR_IN_LABEL
9466 return "$_%d";
9467 #else /* NO_DOLLAR_IN_LABEL */
9468 return "__anon_%d";
9469 #endif /* NO_DOLLAR_IN_LABEL */
9470 #endif /* NO_DOT_IN_LABEL */
9471 }
9472
9473 /* Generate a name for a special-purpose function.
9474 The generated name may need to be unique across the whole link.
9475 Changes to this function may also require corresponding changes to
9476 xstrdup_mask_random.
9477 TYPE is some string to identify the purpose of this function to the
9478 linker or collect2; it must start with an uppercase letter,
9479 one of:
9480 I - for constructors
9481 D - for destructors
9482 N - for C++ anonymous namespaces
9483 F - for DWARF unwind frame information. */
9484
9485 tree
9486 get_file_function_name (const char *type)
9487 {
9488 char *buf;
9489 const char *p;
9490 char *q;
9491
9492 /* If we already have a name we know to be unique, just use that. */
9493 if (first_global_object_name)
9494 p = q = ASTRDUP (first_global_object_name);
9495 /* If the target is handling the constructors/destructors, they
9496 will be local to this file and the name is only necessary for
9497 debugging purposes.
9498 We also assign sub_I and sub_D sufixes to constructors called from
9499 the global static constructors. These are always local. */
9500 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9501 || (strncmp (type, "sub_", 4) == 0
9502 && (type[4] == 'I' || type[4] == 'D')))
9503 {
9504 const char *file = main_input_filename;
9505 if (! file)
9506 file = LOCATION_FILE (input_location);
9507 /* Just use the file's basename, because the full pathname
9508 might be quite long. */
9509 p = q = ASTRDUP (lbasename (file));
9510 }
9511 else
9512 {
9513 /* Otherwise, the name must be unique across the entire link.
9514 We don't have anything that we know to be unique to this translation
9515 unit, so use what we do have and throw in some randomness. */
9516 unsigned len;
9517 const char *name = weak_global_object_name;
9518 const char *file = main_input_filename;
9519
9520 if (! name)
9521 name = "";
9522 if (! file)
9523 file = LOCATION_FILE (input_location);
9524
9525 len = strlen (file);
9526 q = (char *) alloca (9 + 17 + len + 1);
9527 memcpy (q, file, len + 1);
9528
9529 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9530 crc32_string (0, name), get_random_seed (false));
9531
9532 p = q;
9533 }
9534
9535 clean_symbol_name (q);
9536 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9537 + strlen (type));
9538
9539 /* Set up the name of the file-level functions we may need.
9540 Use a global object (which is already required to be unique over
9541 the program) rather than the file name (which imposes extra
9542 constraints). */
9543 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9544
9545 return get_identifier (buf);
9546 }
9547 \f
9548 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9549
9550 /* Complain that the tree code of NODE does not match the expected 0
9551 terminated list of trailing codes. The trailing code list can be
9552 empty, for a more vague error message. FILE, LINE, and FUNCTION
9553 are of the caller. */
9554
9555 void
9556 tree_check_failed (const_tree node, const char *file,
9557 int line, const char *function, ...)
9558 {
9559 va_list args;
9560 const char *buffer;
9561 unsigned length = 0;
9562 enum tree_code code;
9563
9564 va_start (args, function);
9565 while ((code = (enum tree_code) va_arg (args, int)))
9566 length += 4 + strlen (get_tree_code_name (code));
9567 va_end (args);
9568 if (length)
9569 {
9570 char *tmp;
9571 va_start (args, function);
9572 length += strlen ("expected ");
9573 buffer = tmp = (char *) alloca (length);
9574 length = 0;
9575 while ((code = (enum tree_code) va_arg (args, int)))
9576 {
9577 const char *prefix = length ? " or " : "expected ";
9578
9579 strcpy (tmp + length, prefix);
9580 length += strlen (prefix);
9581 strcpy (tmp + length, get_tree_code_name (code));
9582 length += strlen (get_tree_code_name (code));
9583 }
9584 va_end (args);
9585 }
9586 else
9587 buffer = "unexpected node";
9588
9589 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9590 buffer, get_tree_code_name (TREE_CODE (node)),
9591 function, trim_filename (file), line);
9592 }
9593
9594 /* Complain that the tree code of NODE does match the expected 0
9595 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9596 the caller. */
9597
9598 void
9599 tree_not_check_failed (const_tree node, const char *file,
9600 int line, const char *function, ...)
9601 {
9602 va_list args;
9603 char *buffer;
9604 unsigned length = 0;
9605 enum tree_code code;
9606
9607 va_start (args, function);
9608 while ((code = (enum tree_code) va_arg (args, int)))
9609 length += 4 + strlen (get_tree_code_name (code));
9610 va_end (args);
9611 va_start (args, function);
9612 buffer = (char *) alloca (length);
9613 length = 0;
9614 while ((code = (enum tree_code) va_arg (args, int)))
9615 {
9616 if (length)
9617 {
9618 strcpy (buffer + length, " or ");
9619 length += 4;
9620 }
9621 strcpy (buffer + length, get_tree_code_name (code));
9622 length += strlen (get_tree_code_name (code));
9623 }
9624 va_end (args);
9625
9626 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9627 buffer, get_tree_code_name (TREE_CODE (node)),
9628 function, trim_filename (file), line);
9629 }
9630
9631 /* Similar to tree_check_failed, except that we check for a class of tree
9632 code, given in CL. */
9633
9634 void
9635 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9636 const char *file, int line, const char *function)
9637 {
9638 internal_error
9639 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9640 TREE_CODE_CLASS_STRING (cl),
9641 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9642 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9643 }
9644
9645 /* Similar to tree_check_failed, except that instead of specifying a
9646 dozen codes, use the knowledge that they're all sequential. */
9647
9648 void
9649 tree_range_check_failed (const_tree node, const char *file, int line,
9650 const char *function, enum tree_code c1,
9651 enum tree_code c2)
9652 {
9653 char *buffer;
9654 unsigned length = 0;
9655 unsigned int c;
9656
9657 for (c = c1; c <= c2; ++c)
9658 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9659
9660 length += strlen ("expected ");
9661 buffer = (char *) alloca (length);
9662 length = 0;
9663
9664 for (c = c1; c <= c2; ++c)
9665 {
9666 const char *prefix = length ? " or " : "expected ";
9667
9668 strcpy (buffer + length, prefix);
9669 length += strlen (prefix);
9670 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9671 length += strlen (get_tree_code_name ((enum tree_code) c));
9672 }
9673
9674 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9675 buffer, get_tree_code_name (TREE_CODE (node)),
9676 function, trim_filename (file), line);
9677 }
9678
9679
9680 /* Similar to tree_check_failed, except that we check that a tree does
9681 not have the specified code, given in CL. */
9682
9683 void
9684 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9685 const char *file, int line, const char *function)
9686 {
9687 internal_error
9688 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9689 TREE_CODE_CLASS_STRING (cl),
9690 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9691 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9692 }
9693
9694
9695 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9696
9697 void
9698 omp_clause_check_failed (const_tree node, const char *file, int line,
9699 const char *function, enum omp_clause_code code)
9700 {
9701 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9702 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9703 function, trim_filename (file), line);
9704 }
9705
9706
9707 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9708
9709 void
9710 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9711 const char *function, enum omp_clause_code c1,
9712 enum omp_clause_code c2)
9713 {
9714 char *buffer;
9715 unsigned length = 0;
9716 unsigned int c;
9717
9718 for (c = c1; c <= c2; ++c)
9719 length += 4 + strlen (omp_clause_code_name[c]);
9720
9721 length += strlen ("expected ");
9722 buffer = (char *) alloca (length);
9723 length = 0;
9724
9725 for (c = c1; c <= c2; ++c)
9726 {
9727 const char *prefix = length ? " or " : "expected ";
9728
9729 strcpy (buffer + length, prefix);
9730 length += strlen (prefix);
9731 strcpy (buffer + length, omp_clause_code_name[c]);
9732 length += strlen (omp_clause_code_name[c]);
9733 }
9734
9735 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9736 buffer, omp_clause_code_name[TREE_CODE (node)],
9737 function, trim_filename (file), line);
9738 }
9739
9740
9741 #undef DEFTREESTRUCT
9742 #define DEFTREESTRUCT(VAL, NAME) NAME,
9743
9744 static const char *ts_enum_names[] = {
9745 #include "treestruct.def"
9746 };
9747 #undef DEFTREESTRUCT
9748
9749 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9750
9751 /* Similar to tree_class_check_failed, except that we check for
9752 whether CODE contains the tree structure identified by EN. */
9753
9754 void
9755 tree_contains_struct_check_failed (const_tree node,
9756 const enum tree_node_structure_enum en,
9757 const char *file, int line,
9758 const char *function)
9759 {
9760 internal_error
9761 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9762 TS_ENUM_NAME (en),
9763 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9764 }
9765
9766
9767 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9768 (dynamically sized) vector. */
9769
9770 void
9771 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9772 const char *function)
9773 {
9774 internal_error
9775 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9776 idx + 1, len, function, trim_filename (file), line);
9777 }
9778
9779 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9780 (dynamically sized) vector. */
9781
9782 void
9783 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9784 const char *function)
9785 {
9786 internal_error
9787 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9788 idx + 1, len, function, trim_filename (file), line);
9789 }
9790
9791 /* Similar to above, except that the check is for the bounds of the operand
9792 vector of an expression node EXP. */
9793
9794 void
9795 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9796 int line, const char *function)
9797 {
9798 enum tree_code code = TREE_CODE (exp);
9799 internal_error
9800 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9801 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9802 function, trim_filename (file), line);
9803 }
9804
9805 /* Similar to above, except that the check is for the number of
9806 operands of an OMP_CLAUSE node. */
9807
9808 void
9809 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9810 int line, const char *function)
9811 {
9812 internal_error
9813 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9814 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9815 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9816 trim_filename (file), line);
9817 }
9818 #endif /* ENABLE_TREE_CHECKING */
9819 \f
9820 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9821 and mapped to the machine mode MODE. Initialize its fields and build
9822 the information necessary for debugging output. */
9823
9824 static tree
9825 make_vector_type (tree innertype, int nunits, machine_mode mode)
9826 {
9827 tree t;
9828 inchash::hash hstate;
9829
9830 t = make_node (VECTOR_TYPE);
9831 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9832 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9833 SET_TYPE_MODE (t, mode);
9834
9835 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9836 SET_TYPE_STRUCTURAL_EQUALITY (t);
9837 else if ((TYPE_CANONICAL (innertype) != innertype
9838 || mode != VOIDmode)
9839 && !VECTOR_BOOLEAN_TYPE_P (t))
9840 TYPE_CANONICAL (t)
9841 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9842
9843 layout_type (t);
9844
9845 hstate.add_wide_int (VECTOR_TYPE);
9846 hstate.add_wide_int (nunits);
9847 hstate.add_wide_int (mode);
9848 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9849 t = type_hash_canon (hstate.end (), t);
9850
9851 /* We have built a main variant, based on the main variant of the
9852 inner type. Use it to build the variant we return. */
9853 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9854 && TREE_TYPE (t) != innertype)
9855 return build_type_attribute_qual_variant (t,
9856 TYPE_ATTRIBUTES (innertype),
9857 TYPE_QUALS (innertype));
9858
9859 return t;
9860 }
9861
9862 static tree
9863 make_or_reuse_type (unsigned size, int unsignedp)
9864 {
9865 int i;
9866
9867 if (size == INT_TYPE_SIZE)
9868 return unsignedp ? unsigned_type_node : integer_type_node;
9869 if (size == CHAR_TYPE_SIZE)
9870 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9871 if (size == SHORT_TYPE_SIZE)
9872 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9873 if (size == LONG_TYPE_SIZE)
9874 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9875 if (size == LONG_LONG_TYPE_SIZE)
9876 return (unsignedp ? long_long_unsigned_type_node
9877 : long_long_integer_type_node);
9878
9879 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9880 if (size == int_n_data[i].bitsize
9881 && int_n_enabled_p[i])
9882 return (unsignedp ? int_n_trees[i].unsigned_type
9883 : int_n_trees[i].signed_type);
9884
9885 if (unsignedp)
9886 return make_unsigned_type (size);
9887 else
9888 return make_signed_type (size);
9889 }
9890
9891 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9892
9893 static tree
9894 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9895 {
9896 if (satp)
9897 {
9898 if (size == SHORT_FRACT_TYPE_SIZE)
9899 return unsignedp ? sat_unsigned_short_fract_type_node
9900 : sat_short_fract_type_node;
9901 if (size == FRACT_TYPE_SIZE)
9902 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9903 if (size == LONG_FRACT_TYPE_SIZE)
9904 return unsignedp ? sat_unsigned_long_fract_type_node
9905 : sat_long_fract_type_node;
9906 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9907 return unsignedp ? sat_unsigned_long_long_fract_type_node
9908 : sat_long_long_fract_type_node;
9909 }
9910 else
9911 {
9912 if (size == SHORT_FRACT_TYPE_SIZE)
9913 return unsignedp ? unsigned_short_fract_type_node
9914 : short_fract_type_node;
9915 if (size == FRACT_TYPE_SIZE)
9916 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9917 if (size == LONG_FRACT_TYPE_SIZE)
9918 return unsignedp ? unsigned_long_fract_type_node
9919 : long_fract_type_node;
9920 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9921 return unsignedp ? unsigned_long_long_fract_type_node
9922 : long_long_fract_type_node;
9923 }
9924
9925 return make_fract_type (size, unsignedp, satp);
9926 }
9927
9928 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9929
9930 static tree
9931 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9932 {
9933 if (satp)
9934 {
9935 if (size == SHORT_ACCUM_TYPE_SIZE)
9936 return unsignedp ? sat_unsigned_short_accum_type_node
9937 : sat_short_accum_type_node;
9938 if (size == ACCUM_TYPE_SIZE)
9939 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9940 if (size == LONG_ACCUM_TYPE_SIZE)
9941 return unsignedp ? sat_unsigned_long_accum_type_node
9942 : sat_long_accum_type_node;
9943 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9944 return unsignedp ? sat_unsigned_long_long_accum_type_node
9945 : sat_long_long_accum_type_node;
9946 }
9947 else
9948 {
9949 if (size == SHORT_ACCUM_TYPE_SIZE)
9950 return unsignedp ? unsigned_short_accum_type_node
9951 : short_accum_type_node;
9952 if (size == ACCUM_TYPE_SIZE)
9953 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9954 if (size == LONG_ACCUM_TYPE_SIZE)
9955 return unsignedp ? unsigned_long_accum_type_node
9956 : long_accum_type_node;
9957 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9958 return unsignedp ? unsigned_long_long_accum_type_node
9959 : long_long_accum_type_node;
9960 }
9961
9962 return make_accum_type (size, unsignedp, satp);
9963 }
9964
9965
9966 /* Create an atomic variant node for TYPE. This routine is called
9967 during initialization of data types to create the 5 basic atomic
9968 types. The generic build_variant_type function requires these to
9969 already be set up in order to function properly, so cannot be
9970 called from there. If ALIGN is non-zero, then ensure alignment is
9971 overridden to this value. */
9972
9973 static tree
9974 build_atomic_base (tree type, unsigned int align)
9975 {
9976 tree t;
9977
9978 /* Make sure its not already registered. */
9979 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9980 return t;
9981
9982 t = build_variant_type_copy (type);
9983 set_type_quals (t, TYPE_QUAL_ATOMIC);
9984
9985 if (align)
9986 TYPE_ALIGN (t) = align;
9987
9988 return t;
9989 }
9990
9991 /* Create nodes for all integer types (and error_mark_node) using the sizes
9992 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9993 SHORT_DOUBLE specifies whether double should be of the same precision
9994 as float. */
9995
9996 void
9997 build_common_tree_nodes (bool signed_char, bool short_double)
9998 {
9999 int i;
10000
10001 error_mark_node = make_node (ERROR_MARK);
10002 TREE_TYPE (error_mark_node) = error_mark_node;
10003
10004 initialize_sizetypes ();
10005
10006 /* Define both `signed char' and `unsigned char'. */
10007 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10008 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10009 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10010 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10011
10012 /* Define `char', which is like either `signed char' or `unsigned char'
10013 but not the same as either. */
10014 char_type_node
10015 = (signed_char
10016 ? make_signed_type (CHAR_TYPE_SIZE)
10017 : make_unsigned_type (CHAR_TYPE_SIZE));
10018 TYPE_STRING_FLAG (char_type_node) = 1;
10019
10020 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10021 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10022 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10023 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10024 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10025 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10026 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10027 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10028
10029 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10030 {
10031 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10032 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10033 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
10034 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
10035
10036 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
10037 && int_n_enabled_p[i])
10038 {
10039 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10040 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10041 }
10042 }
10043
10044 /* Define a boolean type. This type only represents boolean values but
10045 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10046 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10047 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10048 TYPE_PRECISION (boolean_type_node) = 1;
10049 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10050
10051 /* Define what type to use for size_t. */
10052 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10053 size_type_node = unsigned_type_node;
10054 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10055 size_type_node = long_unsigned_type_node;
10056 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10057 size_type_node = long_long_unsigned_type_node;
10058 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10059 size_type_node = short_unsigned_type_node;
10060 else
10061 {
10062 int i;
10063
10064 size_type_node = NULL_TREE;
10065 for (i = 0; i < NUM_INT_N_ENTS; i++)
10066 if (int_n_enabled_p[i])
10067 {
10068 char name[50];
10069 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10070
10071 if (strcmp (name, SIZE_TYPE) == 0)
10072 {
10073 size_type_node = int_n_trees[i].unsigned_type;
10074 }
10075 }
10076 if (size_type_node == NULL_TREE)
10077 gcc_unreachable ();
10078 }
10079
10080 /* Fill in the rest of the sized types. Reuse existing type nodes
10081 when possible. */
10082 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10083 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10084 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10085 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10086 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10087
10088 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10089 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10090 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10091 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10092 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10093
10094 /* Don't call build_qualified type for atomics. That routine does
10095 special processing for atomics, and until they are initialized
10096 it's better not to make that call.
10097
10098 Check to see if there is a target override for atomic types. */
10099
10100 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10101 targetm.atomic_align_for_mode (QImode));
10102 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10103 targetm.atomic_align_for_mode (HImode));
10104 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10105 targetm.atomic_align_for_mode (SImode));
10106 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10107 targetm.atomic_align_for_mode (DImode));
10108 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10109 targetm.atomic_align_for_mode (TImode));
10110
10111 access_public_node = get_identifier ("public");
10112 access_protected_node = get_identifier ("protected");
10113 access_private_node = get_identifier ("private");
10114
10115 /* Define these next since types below may used them. */
10116 integer_zero_node = build_int_cst (integer_type_node, 0);
10117 integer_one_node = build_int_cst (integer_type_node, 1);
10118 integer_three_node = build_int_cst (integer_type_node, 3);
10119 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10120
10121 size_zero_node = size_int (0);
10122 size_one_node = size_int (1);
10123 bitsize_zero_node = bitsize_int (0);
10124 bitsize_one_node = bitsize_int (1);
10125 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10126
10127 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10128 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10129
10130 void_type_node = make_node (VOID_TYPE);
10131 layout_type (void_type_node);
10132
10133 pointer_bounds_type_node = targetm.chkp_bound_type ();
10134
10135 /* We are not going to have real types in C with less than byte alignment,
10136 so we might as well not have any types that claim to have it. */
10137 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
10138 TYPE_USER_ALIGN (void_type_node) = 0;
10139
10140 void_node = make_node (VOID_CST);
10141 TREE_TYPE (void_node) = void_type_node;
10142
10143 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10144 layout_type (TREE_TYPE (null_pointer_node));
10145
10146 ptr_type_node = build_pointer_type (void_type_node);
10147 const_ptr_type_node
10148 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10149 fileptr_type_node = ptr_type_node;
10150
10151 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10152
10153 float_type_node = make_node (REAL_TYPE);
10154 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10155 layout_type (float_type_node);
10156
10157 double_type_node = make_node (REAL_TYPE);
10158 if (short_double)
10159 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
10160 else
10161 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10162 layout_type (double_type_node);
10163
10164 long_double_type_node = make_node (REAL_TYPE);
10165 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10166 layout_type (long_double_type_node);
10167
10168 float_ptr_type_node = build_pointer_type (float_type_node);
10169 double_ptr_type_node = build_pointer_type (double_type_node);
10170 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10171 integer_ptr_type_node = build_pointer_type (integer_type_node);
10172
10173 /* Fixed size integer types. */
10174 uint16_type_node = make_or_reuse_type (16, 1);
10175 uint32_type_node = make_or_reuse_type (32, 1);
10176 uint64_type_node = make_or_reuse_type (64, 1);
10177
10178 /* Decimal float types. */
10179 dfloat32_type_node = make_node (REAL_TYPE);
10180 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10181 layout_type (dfloat32_type_node);
10182 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10183 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10184
10185 dfloat64_type_node = make_node (REAL_TYPE);
10186 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10187 layout_type (dfloat64_type_node);
10188 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10189 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10190
10191 dfloat128_type_node = make_node (REAL_TYPE);
10192 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10193 layout_type (dfloat128_type_node);
10194 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10195 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10196
10197 complex_integer_type_node = build_complex_type (integer_type_node);
10198 complex_float_type_node = build_complex_type (float_type_node);
10199 complex_double_type_node = build_complex_type (double_type_node);
10200 complex_long_double_type_node = build_complex_type (long_double_type_node);
10201
10202 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10203 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10204 sat_ ## KIND ## _type_node = \
10205 make_sat_signed_ ## KIND ## _type (SIZE); \
10206 sat_unsigned_ ## KIND ## _type_node = \
10207 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10208 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10209 unsigned_ ## KIND ## _type_node = \
10210 make_unsigned_ ## KIND ## _type (SIZE);
10211
10212 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10213 sat_ ## WIDTH ## KIND ## _type_node = \
10214 make_sat_signed_ ## KIND ## _type (SIZE); \
10215 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10216 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10217 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10218 unsigned_ ## WIDTH ## KIND ## _type_node = \
10219 make_unsigned_ ## KIND ## _type (SIZE);
10220
10221 /* Make fixed-point type nodes based on four different widths. */
10222 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10223 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10224 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10225 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10226 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10227
10228 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10229 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10230 NAME ## _type_node = \
10231 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10232 u ## NAME ## _type_node = \
10233 make_or_reuse_unsigned_ ## KIND ## _type \
10234 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10235 sat_ ## NAME ## _type_node = \
10236 make_or_reuse_sat_signed_ ## KIND ## _type \
10237 (GET_MODE_BITSIZE (MODE ## mode)); \
10238 sat_u ## NAME ## _type_node = \
10239 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10240 (GET_MODE_BITSIZE (U ## MODE ## mode));
10241
10242 /* Fixed-point type and mode nodes. */
10243 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10244 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10245 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10246 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10247 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10248 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10249 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10250 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10251 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10252 MAKE_FIXED_MODE_NODE (accum, da, DA)
10253 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10254
10255 {
10256 tree t = targetm.build_builtin_va_list ();
10257
10258 /* Many back-ends define record types without setting TYPE_NAME.
10259 If we copied the record type here, we'd keep the original
10260 record type without a name. This breaks name mangling. So,
10261 don't copy record types and let c_common_nodes_and_builtins()
10262 declare the type to be __builtin_va_list. */
10263 if (TREE_CODE (t) != RECORD_TYPE)
10264 t = build_variant_type_copy (t);
10265
10266 va_list_type_node = t;
10267 }
10268 }
10269
10270 /* Modify DECL for given flags.
10271 TM_PURE attribute is set only on types, so the function will modify
10272 DECL's type when ECF_TM_PURE is used. */
10273
10274 void
10275 set_call_expr_flags (tree decl, int flags)
10276 {
10277 if (flags & ECF_NOTHROW)
10278 TREE_NOTHROW (decl) = 1;
10279 if (flags & ECF_CONST)
10280 TREE_READONLY (decl) = 1;
10281 if (flags & ECF_PURE)
10282 DECL_PURE_P (decl) = 1;
10283 if (flags & ECF_LOOPING_CONST_OR_PURE)
10284 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10285 if (flags & ECF_NOVOPS)
10286 DECL_IS_NOVOPS (decl) = 1;
10287 if (flags & ECF_NORETURN)
10288 TREE_THIS_VOLATILE (decl) = 1;
10289 if (flags & ECF_MALLOC)
10290 DECL_IS_MALLOC (decl) = 1;
10291 if (flags & ECF_RETURNS_TWICE)
10292 DECL_IS_RETURNS_TWICE (decl) = 1;
10293 if (flags & ECF_LEAF)
10294 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10295 NULL, DECL_ATTRIBUTES (decl));
10296 if ((flags & ECF_TM_PURE) && flag_tm)
10297 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10298 /* Looping const or pure is implied by noreturn.
10299 There is currently no way to declare looping const or looping pure alone. */
10300 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10301 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10302 }
10303
10304
10305 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10306
10307 static void
10308 local_define_builtin (const char *name, tree type, enum built_in_function code,
10309 const char *library_name, int ecf_flags)
10310 {
10311 tree decl;
10312
10313 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10314 library_name, NULL_TREE);
10315 set_call_expr_flags (decl, ecf_flags);
10316
10317 set_builtin_decl (code, decl, true);
10318 }
10319
10320 /* Call this function after instantiating all builtins that the language
10321 front end cares about. This will build the rest of the builtins
10322 and internal functions that are relied upon by the tree optimizers and
10323 the middle-end. */
10324
10325 void
10326 build_common_builtin_nodes (void)
10327 {
10328 tree tmp, ftype;
10329 int ecf_flags;
10330
10331 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10332 {
10333 ftype = build_function_type (void_type_node, void_list_node);
10334 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10335 "__builtin_unreachable",
10336 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10337 | ECF_CONST);
10338 }
10339
10340 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10341 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10342 {
10343 ftype = build_function_type_list (ptr_type_node,
10344 ptr_type_node, const_ptr_type_node,
10345 size_type_node, NULL_TREE);
10346
10347 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10348 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10349 "memcpy", ECF_NOTHROW | ECF_LEAF);
10350 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10351 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10352 "memmove", ECF_NOTHROW | ECF_LEAF);
10353 }
10354
10355 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10356 {
10357 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10358 const_ptr_type_node, size_type_node,
10359 NULL_TREE);
10360 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10361 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10362 }
10363
10364 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10365 {
10366 ftype = build_function_type_list (ptr_type_node,
10367 ptr_type_node, integer_type_node,
10368 size_type_node, NULL_TREE);
10369 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10370 "memset", ECF_NOTHROW | ECF_LEAF);
10371 }
10372
10373 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10374 {
10375 ftype = build_function_type_list (ptr_type_node,
10376 size_type_node, NULL_TREE);
10377 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10378 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10379 }
10380
10381 ftype = build_function_type_list (ptr_type_node, size_type_node,
10382 size_type_node, NULL_TREE);
10383 local_define_builtin ("__builtin_alloca_with_align", ftype,
10384 BUILT_IN_ALLOCA_WITH_ALIGN,
10385 "__builtin_alloca_with_align",
10386 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10387
10388 /* If we're checking the stack, `alloca' can throw. */
10389 if (flag_stack_check)
10390 {
10391 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10392 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10393 }
10394
10395 ftype = build_function_type_list (void_type_node,
10396 ptr_type_node, ptr_type_node,
10397 ptr_type_node, NULL_TREE);
10398 local_define_builtin ("__builtin_init_trampoline", ftype,
10399 BUILT_IN_INIT_TRAMPOLINE,
10400 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10401 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10402 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10403 "__builtin_init_heap_trampoline",
10404 ECF_NOTHROW | ECF_LEAF);
10405
10406 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10407 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10408 BUILT_IN_ADJUST_TRAMPOLINE,
10409 "__builtin_adjust_trampoline",
10410 ECF_CONST | ECF_NOTHROW);
10411
10412 ftype = build_function_type_list (void_type_node,
10413 ptr_type_node, ptr_type_node, NULL_TREE);
10414 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10415 BUILT_IN_NONLOCAL_GOTO,
10416 "__builtin_nonlocal_goto",
10417 ECF_NORETURN | ECF_NOTHROW);
10418
10419 ftype = build_function_type_list (void_type_node,
10420 ptr_type_node, ptr_type_node, NULL_TREE);
10421 local_define_builtin ("__builtin_setjmp_setup", ftype,
10422 BUILT_IN_SETJMP_SETUP,
10423 "__builtin_setjmp_setup", ECF_NOTHROW);
10424
10425 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10426 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10427 BUILT_IN_SETJMP_RECEIVER,
10428 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10429
10430 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10431 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10432 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10433
10434 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10435 local_define_builtin ("__builtin_stack_restore", ftype,
10436 BUILT_IN_STACK_RESTORE,
10437 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10438
10439 /* If there's a possibility that we might use the ARM EABI, build the
10440 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10441 if (targetm.arm_eabi_unwinder)
10442 {
10443 ftype = build_function_type_list (void_type_node, NULL_TREE);
10444 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10445 BUILT_IN_CXA_END_CLEANUP,
10446 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10447 }
10448
10449 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10450 local_define_builtin ("__builtin_unwind_resume", ftype,
10451 BUILT_IN_UNWIND_RESUME,
10452 ((targetm_common.except_unwind_info (&global_options)
10453 == UI_SJLJ)
10454 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10455 ECF_NORETURN);
10456
10457 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10458 {
10459 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10460 NULL_TREE);
10461 local_define_builtin ("__builtin_return_address", ftype,
10462 BUILT_IN_RETURN_ADDRESS,
10463 "__builtin_return_address",
10464 ECF_NOTHROW);
10465 }
10466
10467 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10468 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10469 {
10470 ftype = build_function_type_list (void_type_node, ptr_type_node,
10471 ptr_type_node, NULL_TREE);
10472 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10473 local_define_builtin ("__cyg_profile_func_enter", ftype,
10474 BUILT_IN_PROFILE_FUNC_ENTER,
10475 "__cyg_profile_func_enter", 0);
10476 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10477 local_define_builtin ("__cyg_profile_func_exit", ftype,
10478 BUILT_IN_PROFILE_FUNC_EXIT,
10479 "__cyg_profile_func_exit", 0);
10480 }
10481
10482 /* The exception object and filter values from the runtime. The argument
10483 must be zero before exception lowering, i.e. from the front end. After
10484 exception lowering, it will be the region number for the exception
10485 landing pad. These functions are PURE instead of CONST to prevent
10486 them from being hoisted past the exception edge that will initialize
10487 its value in the landing pad. */
10488 ftype = build_function_type_list (ptr_type_node,
10489 integer_type_node, NULL_TREE);
10490 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10491 /* Only use TM_PURE if we have TM language support. */
10492 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10493 ecf_flags |= ECF_TM_PURE;
10494 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10495 "__builtin_eh_pointer", ecf_flags);
10496
10497 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10498 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10499 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10500 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10501
10502 ftype = build_function_type_list (void_type_node,
10503 integer_type_node, integer_type_node,
10504 NULL_TREE);
10505 local_define_builtin ("__builtin_eh_copy_values", ftype,
10506 BUILT_IN_EH_COPY_VALUES,
10507 "__builtin_eh_copy_values", ECF_NOTHROW);
10508
10509 /* Complex multiplication and division. These are handled as builtins
10510 rather than optabs because emit_library_call_value doesn't support
10511 complex. Further, we can do slightly better with folding these
10512 beasties if the real and complex parts of the arguments are separate. */
10513 {
10514 int mode;
10515
10516 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10517 {
10518 char mode_name_buf[4], *q;
10519 const char *p;
10520 enum built_in_function mcode, dcode;
10521 tree type, inner_type;
10522 const char *prefix = "__";
10523
10524 if (targetm.libfunc_gnu_prefix)
10525 prefix = "__gnu_";
10526
10527 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10528 if (type == NULL)
10529 continue;
10530 inner_type = TREE_TYPE (type);
10531
10532 ftype = build_function_type_list (type, inner_type, inner_type,
10533 inner_type, inner_type, NULL_TREE);
10534
10535 mcode = ((enum built_in_function)
10536 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10537 dcode = ((enum built_in_function)
10538 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10539
10540 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10541 *q = TOLOWER (*p);
10542 *q = '\0';
10543
10544 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10545 NULL);
10546 local_define_builtin (built_in_names[mcode], ftype, mcode,
10547 built_in_names[mcode],
10548 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10549
10550 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10551 NULL);
10552 local_define_builtin (built_in_names[dcode], ftype, dcode,
10553 built_in_names[dcode],
10554 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10555 }
10556 }
10557
10558 init_internal_fns ();
10559 }
10560
10561 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10562 better way.
10563
10564 If we requested a pointer to a vector, build up the pointers that
10565 we stripped off while looking for the inner type. Similarly for
10566 return values from functions.
10567
10568 The argument TYPE is the top of the chain, and BOTTOM is the
10569 new type which we will point to. */
10570
10571 tree
10572 reconstruct_complex_type (tree type, tree bottom)
10573 {
10574 tree inner, outer;
10575
10576 if (TREE_CODE (type) == POINTER_TYPE)
10577 {
10578 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10579 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10580 TYPE_REF_CAN_ALIAS_ALL (type));
10581 }
10582 else if (TREE_CODE (type) == REFERENCE_TYPE)
10583 {
10584 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10585 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10586 TYPE_REF_CAN_ALIAS_ALL (type));
10587 }
10588 else if (TREE_CODE (type) == ARRAY_TYPE)
10589 {
10590 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10591 outer = build_array_type (inner, TYPE_DOMAIN (type));
10592 }
10593 else if (TREE_CODE (type) == FUNCTION_TYPE)
10594 {
10595 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10596 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10597 }
10598 else if (TREE_CODE (type) == METHOD_TYPE)
10599 {
10600 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10601 /* The build_method_type_directly() routine prepends 'this' to argument list,
10602 so we must compensate by getting rid of it. */
10603 outer
10604 = build_method_type_directly
10605 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10606 inner,
10607 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10608 }
10609 else if (TREE_CODE (type) == OFFSET_TYPE)
10610 {
10611 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10612 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10613 }
10614 else
10615 return bottom;
10616
10617 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10618 TYPE_QUALS (type));
10619 }
10620
10621 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10622 the inner type. */
10623 tree
10624 build_vector_type_for_mode (tree innertype, machine_mode mode)
10625 {
10626 int nunits;
10627
10628 switch (GET_MODE_CLASS (mode))
10629 {
10630 case MODE_VECTOR_INT:
10631 case MODE_VECTOR_FLOAT:
10632 case MODE_VECTOR_FRACT:
10633 case MODE_VECTOR_UFRACT:
10634 case MODE_VECTOR_ACCUM:
10635 case MODE_VECTOR_UACCUM:
10636 nunits = GET_MODE_NUNITS (mode);
10637 break;
10638
10639 case MODE_INT:
10640 /* Check that there are no leftover bits. */
10641 gcc_assert (GET_MODE_BITSIZE (mode)
10642 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10643
10644 nunits = GET_MODE_BITSIZE (mode)
10645 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10646 break;
10647
10648 default:
10649 gcc_unreachable ();
10650 }
10651
10652 return make_vector_type (innertype, nunits, mode);
10653 }
10654
10655 /* Similarly, but takes the inner type and number of units, which must be
10656 a power of two. */
10657
10658 tree
10659 build_vector_type (tree innertype, int nunits)
10660 {
10661 return make_vector_type (innertype, nunits, VOIDmode);
10662 }
10663
10664 /* Build truth vector with specified length and number of units. */
10665
10666 tree
10667 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10668 {
10669 machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits,
10670 vector_size);
10671
10672 gcc_assert (mask_mode != VOIDmode);
10673
10674 unsigned HOST_WIDE_INT vsize;
10675 if (mask_mode == BLKmode)
10676 vsize = vector_size * BITS_PER_UNIT;
10677 else
10678 vsize = GET_MODE_BITSIZE (mask_mode);
10679
10680 unsigned HOST_WIDE_INT esize = vsize / nunits;
10681 gcc_assert (esize * nunits == vsize);
10682
10683 tree bool_type = build_nonstandard_boolean_type (esize);
10684
10685 return make_vector_type (bool_type, nunits, mask_mode);
10686 }
10687
10688 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10689
10690 tree
10691 build_same_sized_truth_vector_type (tree vectype)
10692 {
10693 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10694 return vectype;
10695
10696 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10697
10698 if (!size)
10699 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10700
10701 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10702 }
10703
10704 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10705
10706 tree
10707 build_opaque_vector_type (tree innertype, int nunits)
10708 {
10709 tree t = make_vector_type (innertype, nunits, VOIDmode);
10710 tree cand;
10711 /* We always build the non-opaque variant before the opaque one,
10712 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10713 cand = TYPE_NEXT_VARIANT (t);
10714 if (cand
10715 && TYPE_VECTOR_OPAQUE (cand)
10716 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10717 return cand;
10718 /* Othewise build a variant type and make sure to queue it after
10719 the non-opaque type. */
10720 cand = build_distinct_type_copy (t);
10721 TYPE_VECTOR_OPAQUE (cand) = true;
10722 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10723 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10724 TYPE_NEXT_VARIANT (t) = cand;
10725 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10726 return cand;
10727 }
10728
10729
10730 /* Given an initializer INIT, return TRUE if INIT is zero or some
10731 aggregate of zeros. Otherwise return FALSE. */
10732 bool
10733 initializer_zerop (const_tree init)
10734 {
10735 tree elt;
10736
10737 STRIP_NOPS (init);
10738
10739 switch (TREE_CODE (init))
10740 {
10741 case INTEGER_CST:
10742 return integer_zerop (init);
10743
10744 case REAL_CST:
10745 /* ??? Note that this is not correct for C4X float formats. There,
10746 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10747 negative exponent. */
10748 return real_zerop (init)
10749 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10750
10751 case FIXED_CST:
10752 return fixed_zerop (init);
10753
10754 case COMPLEX_CST:
10755 return integer_zerop (init)
10756 || (real_zerop (init)
10757 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10758 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10759
10760 case VECTOR_CST:
10761 {
10762 unsigned i;
10763 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10764 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10765 return false;
10766 return true;
10767 }
10768
10769 case CONSTRUCTOR:
10770 {
10771 unsigned HOST_WIDE_INT idx;
10772
10773 if (TREE_CLOBBER_P (init))
10774 return false;
10775 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10776 if (!initializer_zerop (elt))
10777 return false;
10778 return true;
10779 }
10780
10781 case STRING_CST:
10782 {
10783 int i;
10784
10785 /* We need to loop through all elements to handle cases like
10786 "\0" and "\0foobar". */
10787 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10788 if (TREE_STRING_POINTER (init)[i] != '\0')
10789 return false;
10790
10791 return true;
10792 }
10793
10794 default:
10795 return false;
10796 }
10797 }
10798
10799 /* Check if vector VEC consists of all the equal elements and
10800 that the number of elements corresponds to the type of VEC.
10801 The function returns first element of the vector
10802 or NULL_TREE if the vector is not uniform. */
10803 tree
10804 uniform_vector_p (const_tree vec)
10805 {
10806 tree first, t;
10807 unsigned i;
10808
10809 if (vec == NULL_TREE)
10810 return NULL_TREE;
10811
10812 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10813
10814 if (TREE_CODE (vec) == VECTOR_CST)
10815 {
10816 first = VECTOR_CST_ELT (vec, 0);
10817 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10818 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10819 return NULL_TREE;
10820
10821 return first;
10822 }
10823
10824 else if (TREE_CODE (vec) == CONSTRUCTOR)
10825 {
10826 first = error_mark_node;
10827
10828 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10829 {
10830 if (i == 0)
10831 {
10832 first = t;
10833 continue;
10834 }
10835 if (!operand_equal_p (first, t, 0))
10836 return NULL_TREE;
10837 }
10838 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10839 return NULL_TREE;
10840
10841 return first;
10842 }
10843
10844 return NULL_TREE;
10845 }
10846
10847 /* Build an empty statement at location LOC. */
10848
10849 tree
10850 build_empty_stmt (location_t loc)
10851 {
10852 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10853 SET_EXPR_LOCATION (t, loc);
10854 return t;
10855 }
10856
10857
10858 /* Build an OpenMP clause with code CODE. LOC is the location of the
10859 clause. */
10860
10861 tree
10862 build_omp_clause (location_t loc, enum omp_clause_code code)
10863 {
10864 tree t;
10865 int size, length;
10866
10867 length = omp_clause_num_ops[code];
10868 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10869
10870 record_node_allocation_statistics (OMP_CLAUSE, size);
10871
10872 t = (tree) ggc_internal_alloc (size);
10873 memset (t, 0, size);
10874 TREE_SET_CODE (t, OMP_CLAUSE);
10875 OMP_CLAUSE_SET_CODE (t, code);
10876 OMP_CLAUSE_LOCATION (t) = loc;
10877
10878 return t;
10879 }
10880
10881 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10882 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10883 Except for the CODE and operand count field, other storage for the
10884 object is initialized to zeros. */
10885
10886 tree
10887 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10888 {
10889 tree t;
10890 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10891
10892 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10893 gcc_assert (len >= 1);
10894
10895 record_node_allocation_statistics (code, length);
10896
10897 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10898
10899 TREE_SET_CODE (t, code);
10900
10901 /* Can't use TREE_OPERAND to store the length because if checking is
10902 enabled, it will try to check the length before we store it. :-P */
10903 t->exp.operands[0] = build_int_cst (sizetype, len);
10904
10905 return t;
10906 }
10907
10908 /* Helper function for build_call_* functions; build a CALL_EXPR with
10909 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10910 the argument slots. */
10911
10912 static tree
10913 build_call_1 (tree return_type, tree fn, int nargs)
10914 {
10915 tree t;
10916
10917 t = build_vl_exp (CALL_EXPR, nargs + 3);
10918 TREE_TYPE (t) = return_type;
10919 CALL_EXPR_FN (t) = fn;
10920 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10921
10922 return t;
10923 }
10924
10925 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10926 FN and a null static chain slot. NARGS is the number of call arguments
10927 which are specified as "..." arguments. */
10928
10929 tree
10930 build_call_nary (tree return_type, tree fn, int nargs, ...)
10931 {
10932 tree ret;
10933 va_list args;
10934 va_start (args, nargs);
10935 ret = build_call_valist (return_type, fn, nargs, args);
10936 va_end (args);
10937 return ret;
10938 }
10939
10940 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10941 FN and a null static chain slot. NARGS is the number of call arguments
10942 which are specified as a va_list ARGS. */
10943
10944 tree
10945 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10946 {
10947 tree t;
10948 int i;
10949
10950 t = build_call_1 (return_type, fn, nargs);
10951 for (i = 0; i < nargs; i++)
10952 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10953 process_call_operands (t);
10954 return t;
10955 }
10956
10957 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10958 FN and a null static chain slot. NARGS is the number of call arguments
10959 which are specified as a tree array ARGS. */
10960
10961 tree
10962 build_call_array_loc (location_t loc, tree return_type, tree fn,
10963 int nargs, const tree *args)
10964 {
10965 tree t;
10966 int i;
10967
10968 t = build_call_1 (return_type, fn, nargs);
10969 for (i = 0; i < nargs; i++)
10970 CALL_EXPR_ARG (t, i) = args[i];
10971 process_call_operands (t);
10972 SET_EXPR_LOCATION (t, loc);
10973 return t;
10974 }
10975
10976 /* Like build_call_array, but takes a vec. */
10977
10978 tree
10979 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10980 {
10981 tree ret, t;
10982 unsigned int ix;
10983
10984 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10985 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10986 CALL_EXPR_ARG (ret, ix) = t;
10987 process_call_operands (ret);
10988 return ret;
10989 }
10990
10991 /* Conveniently construct a function call expression. FNDECL names the
10992 function to be called and N arguments are passed in the array
10993 ARGARRAY. */
10994
10995 tree
10996 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10997 {
10998 tree fntype = TREE_TYPE (fndecl);
10999 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11000
11001 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11002 }
11003
11004 /* Conveniently construct a function call expression. FNDECL names the
11005 function to be called and the arguments are passed in the vector
11006 VEC. */
11007
11008 tree
11009 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11010 {
11011 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11012 vec_safe_address (vec));
11013 }
11014
11015
11016 /* Conveniently construct a function call expression. FNDECL names the
11017 function to be called, N is the number of arguments, and the "..."
11018 parameters are the argument expressions. */
11019
11020 tree
11021 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11022 {
11023 va_list ap;
11024 tree *argarray = XALLOCAVEC (tree, n);
11025 int i;
11026
11027 va_start (ap, n);
11028 for (i = 0; i < n; i++)
11029 argarray[i] = va_arg (ap, tree);
11030 va_end (ap);
11031 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11032 }
11033
11034 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11035 varargs macros aren't supported by all bootstrap compilers. */
11036
11037 tree
11038 build_call_expr (tree fndecl, int n, ...)
11039 {
11040 va_list ap;
11041 tree *argarray = XALLOCAVEC (tree, n);
11042 int i;
11043
11044 va_start (ap, n);
11045 for (i = 0; i < n; i++)
11046 argarray[i] = va_arg (ap, tree);
11047 va_end (ap);
11048 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11049 }
11050
11051 /* Build internal call expression. This is just like CALL_EXPR, except
11052 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11053 internal function. */
11054
11055 tree
11056 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11057 tree type, int n, ...)
11058 {
11059 va_list ap;
11060 int i;
11061
11062 tree fn = build_call_1 (type, NULL_TREE, n);
11063 va_start (ap, n);
11064 for (i = 0; i < n; i++)
11065 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
11066 va_end (ap);
11067 SET_EXPR_LOCATION (fn, loc);
11068 CALL_EXPR_IFN (fn) = ifn;
11069 return fn;
11070 }
11071
11072 /* Create a new constant string literal and return a char* pointer to it.
11073 The STRING_CST value is the LEN characters at STR. */
11074 tree
11075 build_string_literal (int len, const char *str)
11076 {
11077 tree t, elem, index, type;
11078
11079 t = build_string (len, str);
11080 elem = build_type_variant (char_type_node, 1, 0);
11081 index = build_index_type (size_int (len - 1));
11082 type = build_array_type (elem, index);
11083 TREE_TYPE (t) = type;
11084 TREE_CONSTANT (t) = 1;
11085 TREE_READONLY (t) = 1;
11086 TREE_STATIC (t) = 1;
11087
11088 type = build_pointer_type (elem);
11089 t = build1 (ADDR_EXPR, type,
11090 build4 (ARRAY_REF, elem,
11091 t, integer_zero_node, NULL_TREE, NULL_TREE));
11092 return t;
11093 }
11094
11095
11096
11097 /* Return true if T (assumed to be a DECL) must be assigned a memory
11098 location. */
11099
11100 bool
11101 needs_to_live_in_memory (const_tree t)
11102 {
11103 return (TREE_ADDRESSABLE (t)
11104 || is_global_var (t)
11105 || (TREE_CODE (t) == RESULT_DECL
11106 && !DECL_BY_REFERENCE (t)
11107 && aggregate_value_p (t, current_function_decl)));
11108 }
11109
11110 /* Return value of a constant X and sign-extend it. */
11111
11112 HOST_WIDE_INT
11113 int_cst_value (const_tree x)
11114 {
11115 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11116 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11117
11118 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11119 gcc_assert (cst_and_fits_in_hwi (x));
11120
11121 if (bits < HOST_BITS_PER_WIDE_INT)
11122 {
11123 bool negative = ((val >> (bits - 1)) & 1) != 0;
11124 if (negative)
11125 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
11126 else
11127 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
11128 }
11129
11130 return val;
11131 }
11132
11133 /* If TYPE is an integral or pointer type, return an integer type with
11134 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11135 if TYPE is already an integer type of signedness UNSIGNEDP. */
11136
11137 tree
11138 signed_or_unsigned_type_for (int unsignedp, tree type)
11139 {
11140 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11141 return type;
11142
11143 if (TREE_CODE (type) == VECTOR_TYPE)
11144 {
11145 tree inner = TREE_TYPE (type);
11146 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11147 if (!inner2)
11148 return NULL_TREE;
11149 if (inner == inner2)
11150 return type;
11151 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11152 }
11153
11154 if (!INTEGRAL_TYPE_P (type)
11155 && !POINTER_TYPE_P (type)
11156 && TREE_CODE (type) != OFFSET_TYPE)
11157 return NULL_TREE;
11158
11159 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11160 }
11161
11162 /* If TYPE is an integral or pointer type, return an integer type with
11163 the same precision which is unsigned, or itself if TYPE is already an
11164 unsigned integer type. */
11165
11166 tree
11167 unsigned_type_for (tree type)
11168 {
11169 return signed_or_unsigned_type_for (1, type);
11170 }
11171
11172 /* If TYPE is an integral or pointer type, return an integer type with
11173 the same precision which is signed, or itself if TYPE is already a
11174 signed integer type. */
11175
11176 tree
11177 signed_type_for (tree type)
11178 {
11179 return signed_or_unsigned_type_for (0, type);
11180 }
11181
11182 /* If TYPE is a vector type, return a signed integer vector type with the
11183 same width and number of subparts. Otherwise return boolean_type_node. */
11184
11185 tree
11186 truth_type_for (tree type)
11187 {
11188 if (TREE_CODE (type) == VECTOR_TYPE)
11189 {
11190 if (VECTOR_BOOLEAN_TYPE_P (type))
11191 return type;
11192 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11193 GET_MODE_SIZE (TYPE_MODE (type)));
11194 }
11195 else
11196 return boolean_type_node;
11197 }
11198
11199 /* Returns the largest value obtainable by casting something in INNER type to
11200 OUTER type. */
11201
11202 tree
11203 upper_bound_in_type (tree outer, tree inner)
11204 {
11205 unsigned int det = 0;
11206 unsigned oprec = TYPE_PRECISION (outer);
11207 unsigned iprec = TYPE_PRECISION (inner);
11208 unsigned prec;
11209
11210 /* Compute a unique number for every combination. */
11211 det |= (oprec > iprec) ? 4 : 0;
11212 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11213 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11214
11215 /* Determine the exponent to use. */
11216 switch (det)
11217 {
11218 case 0:
11219 case 1:
11220 /* oprec <= iprec, outer: signed, inner: don't care. */
11221 prec = oprec - 1;
11222 break;
11223 case 2:
11224 case 3:
11225 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11226 prec = oprec;
11227 break;
11228 case 4:
11229 /* oprec > iprec, outer: signed, inner: signed. */
11230 prec = iprec - 1;
11231 break;
11232 case 5:
11233 /* oprec > iprec, outer: signed, inner: unsigned. */
11234 prec = iprec;
11235 break;
11236 case 6:
11237 /* oprec > iprec, outer: unsigned, inner: signed. */
11238 prec = oprec;
11239 break;
11240 case 7:
11241 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11242 prec = iprec;
11243 break;
11244 default:
11245 gcc_unreachable ();
11246 }
11247
11248 return wide_int_to_tree (outer,
11249 wi::mask (prec, false, TYPE_PRECISION (outer)));
11250 }
11251
11252 /* Returns the smallest value obtainable by casting something in INNER type to
11253 OUTER type. */
11254
11255 tree
11256 lower_bound_in_type (tree outer, tree inner)
11257 {
11258 unsigned oprec = TYPE_PRECISION (outer);
11259 unsigned iprec = TYPE_PRECISION (inner);
11260
11261 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11262 and obtain 0. */
11263 if (TYPE_UNSIGNED (outer)
11264 /* If we are widening something of an unsigned type, OUTER type
11265 contains all values of INNER type. In particular, both INNER
11266 and OUTER types have zero in common. */
11267 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11268 return build_int_cst (outer, 0);
11269 else
11270 {
11271 /* If we are widening a signed type to another signed type, we
11272 want to obtain -2^^(iprec-1). If we are keeping the
11273 precision or narrowing to a signed type, we want to obtain
11274 -2^(oprec-1). */
11275 unsigned prec = oprec > iprec ? iprec : oprec;
11276 return wide_int_to_tree (outer,
11277 wi::mask (prec - 1, true,
11278 TYPE_PRECISION (outer)));
11279 }
11280 }
11281
11282 /* Return nonzero if two operands that are suitable for PHI nodes are
11283 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11284 SSA_NAME or invariant. Note that this is strictly an optimization.
11285 That is, callers of this function can directly call operand_equal_p
11286 and get the same result, only slower. */
11287
11288 int
11289 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11290 {
11291 if (arg0 == arg1)
11292 return 1;
11293 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11294 return 0;
11295 return operand_equal_p (arg0, arg1, 0);
11296 }
11297
11298 /* Returns number of zeros at the end of binary representation of X. */
11299
11300 tree
11301 num_ending_zeros (const_tree x)
11302 {
11303 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11304 }
11305
11306
11307 #define WALK_SUBTREE(NODE) \
11308 do \
11309 { \
11310 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11311 if (result) \
11312 return result; \
11313 } \
11314 while (0)
11315
11316 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11317 be walked whenever a type is seen in the tree. Rest of operands and return
11318 value are as for walk_tree. */
11319
11320 static tree
11321 walk_type_fields (tree type, walk_tree_fn func, void *data,
11322 hash_set<tree> *pset, walk_tree_lh lh)
11323 {
11324 tree result = NULL_TREE;
11325
11326 switch (TREE_CODE (type))
11327 {
11328 case POINTER_TYPE:
11329 case REFERENCE_TYPE:
11330 case VECTOR_TYPE:
11331 /* We have to worry about mutually recursive pointers. These can't
11332 be written in C. They can in Ada. It's pathological, but
11333 there's an ACATS test (c38102a) that checks it. Deal with this
11334 by checking if we're pointing to another pointer, that one
11335 points to another pointer, that one does too, and we have no htab.
11336 If so, get a hash table. We check three levels deep to avoid
11337 the cost of the hash table if we don't need one. */
11338 if (POINTER_TYPE_P (TREE_TYPE (type))
11339 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11340 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11341 && !pset)
11342 {
11343 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11344 func, data);
11345 if (result)
11346 return result;
11347
11348 break;
11349 }
11350
11351 /* ... fall through ... */
11352
11353 case COMPLEX_TYPE:
11354 WALK_SUBTREE (TREE_TYPE (type));
11355 break;
11356
11357 case METHOD_TYPE:
11358 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11359
11360 /* Fall through. */
11361
11362 case FUNCTION_TYPE:
11363 WALK_SUBTREE (TREE_TYPE (type));
11364 {
11365 tree arg;
11366
11367 /* We never want to walk into default arguments. */
11368 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11369 WALK_SUBTREE (TREE_VALUE (arg));
11370 }
11371 break;
11372
11373 case ARRAY_TYPE:
11374 /* Don't follow this nodes's type if a pointer for fear that
11375 we'll have infinite recursion. If we have a PSET, then we
11376 need not fear. */
11377 if (pset
11378 || (!POINTER_TYPE_P (TREE_TYPE (type))
11379 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11380 WALK_SUBTREE (TREE_TYPE (type));
11381 WALK_SUBTREE (TYPE_DOMAIN (type));
11382 break;
11383
11384 case OFFSET_TYPE:
11385 WALK_SUBTREE (TREE_TYPE (type));
11386 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11387 break;
11388
11389 default:
11390 break;
11391 }
11392
11393 return NULL_TREE;
11394 }
11395
11396 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11397 called with the DATA and the address of each sub-tree. If FUNC returns a
11398 non-NULL value, the traversal is stopped, and the value returned by FUNC
11399 is returned. If PSET is non-NULL it is used to record the nodes visited,
11400 and to avoid visiting a node more than once. */
11401
11402 tree
11403 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11404 hash_set<tree> *pset, walk_tree_lh lh)
11405 {
11406 enum tree_code code;
11407 int walk_subtrees;
11408 tree result;
11409
11410 #define WALK_SUBTREE_TAIL(NODE) \
11411 do \
11412 { \
11413 tp = & (NODE); \
11414 goto tail_recurse; \
11415 } \
11416 while (0)
11417
11418 tail_recurse:
11419 /* Skip empty subtrees. */
11420 if (!*tp)
11421 return NULL_TREE;
11422
11423 /* Don't walk the same tree twice, if the user has requested
11424 that we avoid doing so. */
11425 if (pset && pset->add (*tp))
11426 return NULL_TREE;
11427
11428 /* Call the function. */
11429 walk_subtrees = 1;
11430 result = (*func) (tp, &walk_subtrees, data);
11431
11432 /* If we found something, return it. */
11433 if (result)
11434 return result;
11435
11436 code = TREE_CODE (*tp);
11437
11438 /* Even if we didn't, FUNC may have decided that there was nothing
11439 interesting below this point in the tree. */
11440 if (!walk_subtrees)
11441 {
11442 /* But we still need to check our siblings. */
11443 if (code == TREE_LIST)
11444 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11445 else if (code == OMP_CLAUSE)
11446 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11447 else
11448 return NULL_TREE;
11449 }
11450
11451 if (lh)
11452 {
11453 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11454 if (result || !walk_subtrees)
11455 return result;
11456 }
11457
11458 switch (code)
11459 {
11460 case ERROR_MARK:
11461 case IDENTIFIER_NODE:
11462 case INTEGER_CST:
11463 case REAL_CST:
11464 case FIXED_CST:
11465 case VECTOR_CST:
11466 case STRING_CST:
11467 case BLOCK:
11468 case PLACEHOLDER_EXPR:
11469 case SSA_NAME:
11470 case FIELD_DECL:
11471 case RESULT_DECL:
11472 /* None of these have subtrees other than those already walked
11473 above. */
11474 break;
11475
11476 case TREE_LIST:
11477 WALK_SUBTREE (TREE_VALUE (*tp));
11478 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11479 break;
11480
11481 case TREE_VEC:
11482 {
11483 int len = TREE_VEC_LENGTH (*tp);
11484
11485 if (len == 0)
11486 break;
11487
11488 /* Walk all elements but the first. */
11489 while (--len)
11490 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11491
11492 /* Now walk the first one as a tail call. */
11493 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11494 }
11495
11496 case COMPLEX_CST:
11497 WALK_SUBTREE (TREE_REALPART (*tp));
11498 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11499
11500 case CONSTRUCTOR:
11501 {
11502 unsigned HOST_WIDE_INT idx;
11503 constructor_elt *ce;
11504
11505 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11506 idx++)
11507 WALK_SUBTREE (ce->value);
11508 }
11509 break;
11510
11511 case SAVE_EXPR:
11512 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11513
11514 case BIND_EXPR:
11515 {
11516 tree decl;
11517 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11518 {
11519 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11520 into declarations that are just mentioned, rather than
11521 declared; they don't really belong to this part of the tree.
11522 And, we can see cycles: the initializer for a declaration
11523 can refer to the declaration itself. */
11524 WALK_SUBTREE (DECL_INITIAL (decl));
11525 WALK_SUBTREE (DECL_SIZE (decl));
11526 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11527 }
11528 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11529 }
11530
11531 case STATEMENT_LIST:
11532 {
11533 tree_stmt_iterator i;
11534 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11535 WALK_SUBTREE (*tsi_stmt_ptr (i));
11536 }
11537 break;
11538
11539 case OMP_CLAUSE:
11540 switch (OMP_CLAUSE_CODE (*tp))
11541 {
11542 case OMP_CLAUSE_GANG:
11543 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11544 /* FALLTHRU */
11545
11546 case OMP_CLAUSE_DEVICE_RESIDENT:
11547 case OMP_CLAUSE_USE_DEVICE:
11548 case OMP_CLAUSE_ASYNC:
11549 case OMP_CLAUSE_WAIT:
11550 case OMP_CLAUSE_WORKER:
11551 case OMP_CLAUSE_VECTOR:
11552 case OMP_CLAUSE_NUM_GANGS:
11553 case OMP_CLAUSE_NUM_WORKERS:
11554 case OMP_CLAUSE_VECTOR_LENGTH:
11555 case OMP_CLAUSE_PRIVATE:
11556 case OMP_CLAUSE_SHARED:
11557 case OMP_CLAUSE_FIRSTPRIVATE:
11558 case OMP_CLAUSE_COPYIN:
11559 case OMP_CLAUSE_COPYPRIVATE:
11560 case OMP_CLAUSE_FINAL:
11561 case OMP_CLAUSE_IF:
11562 case OMP_CLAUSE_NUM_THREADS:
11563 case OMP_CLAUSE_SCHEDULE:
11564 case OMP_CLAUSE_UNIFORM:
11565 case OMP_CLAUSE_DEPEND:
11566 case OMP_CLAUSE_NUM_TEAMS:
11567 case OMP_CLAUSE_THREAD_LIMIT:
11568 case OMP_CLAUSE_DEVICE:
11569 case OMP_CLAUSE_DIST_SCHEDULE:
11570 case OMP_CLAUSE_SAFELEN:
11571 case OMP_CLAUSE_SIMDLEN:
11572 case OMP_CLAUSE_ORDERED:
11573 case OMP_CLAUSE_PRIORITY:
11574 case OMP_CLAUSE_GRAINSIZE:
11575 case OMP_CLAUSE_NUM_TASKS:
11576 case OMP_CLAUSE_HINT:
11577 case OMP_CLAUSE_TO_DECLARE:
11578 case OMP_CLAUSE_LINK:
11579 case OMP_CLAUSE_USE_DEVICE_PTR:
11580 case OMP_CLAUSE_IS_DEVICE_PTR:
11581 case OMP_CLAUSE__LOOPTEMP_:
11582 case OMP_CLAUSE__SIMDUID_:
11583 case OMP_CLAUSE__CILK_FOR_COUNT_:
11584 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11585 /* FALLTHRU */
11586
11587 case OMP_CLAUSE_INDEPENDENT:
11588 case OMP_CLAUSE_NOWAIT:
11589 case OMP_CLAUSE_DEFAULT:
11590 case OMP_CLAUSE_UNTIED:
11591 case OMP_CLAUSE_MERGEABLE:
11592 case OMP_CLAUSE_PROC_BIND:
11593 case OMP_CLAUSE_INBRANCH:
11594 case OMP_CLAUSE_NOTINBRANCH:
11595 case OMP_CLAUSE_FOR:
11596 case OMP_CLAUSE_PARALLEL:
11597 case OMP_CLAUSE_SECTIONS:
11598 case OMP_CLAUSE_TASKGROUP:
11599 case OMP_CLAUSE_NOGROUP:
11600 case OMP_CLAUSE_THREADS:
11601 case OMP_CLAUSE_SIMD:
11602 case OMP_CLAUSE_DEFAULTMAP:
11603 case OMP_CLAUSE_AUTO:
11604 case OMP_CLAUSE_SEQ:
11605 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11606
11607 case OMP_CLAUSE_LASTPRIVATE:
11608 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11609 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11610 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11611
11612 case OMP_CLAUSE_COLLAPSE:
11613 {
11614 int i;
11615 for (i = 0; i < 3; i++)
11616 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11617 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11618 }
11619
11620 case OMP_CLAUSE_LINEAR:
11621 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11622 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11623 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11624 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11625
11626 case OMP_CLAUSE_ALIGNED:
11627 case OMP_CLAUSE_FROM:
11628 case OMP_CLAUSE_TO:
11629 case OMP_CLAUSE_MAP:
11630 case OMP_CLAUSE__CACHE_:
11631 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11632 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11633 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11634
11635 case OMP_CLAUSE_REDUCTION:
11636 {
11637 int i;
11638 for (i = 0; i < 5; i++)
11639 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11640 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11641 }
11642
11643 default:
11644 gcc_unreachable ();
11645 }
11646 break;
11647
11648 case TARGET_EXPR:
11649 {
11650 int i, len;
11651
11652 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11653 But, we only want to walk once. */
11654 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11655 for (i = 0; i < len; ++i)
11656 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11657 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11658 }
11659
11660 case DECL_EXPR:
11661 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11662 defining. We only want to walk into these fields of a type in this
11663 case and not in the general case of a mere reference to the type.
11664
11665 The criterion is as follows: if the field can be an expression, it
11666 must be walked only here. This should be in keeping with the fields
11667 that are directly gimplified in gimplify_type_sizes in order for the
11668 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11669 variable-sized types.
11670
11671 Note that DECLs get walked as part of processing the BIND_EXPR. */
11672 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11673 {
11674 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11675 if (TREE_CODE (*type_p) == ERROR_MARK)
11676 return NULL_TREE;
11677
11678 /* Call the function for the type. See if it returns anything or
11679 doesn't want us to continue. If we are to continue, walk both
11680 the normal fields and those for the declaration case. */
11681 result = (*func) (type_p, &walk_subtrees, data);
11682 if (result || !walk_subtrees)
11683 return result;
11684
11685 /* But do not walk a pointed-to type since it may itself need to
11686 be walked in the declaration case if it isn't anonymous. */
11687 if (!POINTER_TYPE_P (*type_p))
11688 {
11689 result = walk_type_fields (*type_p, func, data, pset, lh);
11690 if (result)
11691 return result;
11692 }
11693
11694 /* If this is a record type, also walk the fields. */
11695 if (RECORD_OR_UNION_TYPE_P (*type_p))
11696 {
11697 tree field;
11698
11699 for (field = TYPE_FIELDS (*type_p); field;
11700 field = DECL_CHAIN (field))
11701 {
11702 /* We'd like to look at the type of the field, but we can
11703 easily get infinite recursion. So assume it's pointed
11704 to elsewhere in the tree. Also, ignore things that
11705 aren't fields. */
11706 if (TREE_CODE (field) != FIELD_DECL)
11707 continue;
11708
11709 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11710 WALK_SUBTREE (DECL_SIZE (field));
11711 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11712 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11713 WALK_SUBTREE (DECL_QUALIFIER (field));
11714 }
11715 }
11716
11717 /* Same for scalar types. */
11718 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11719 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11720 || TREE_CODE (*type_p) == INTEGER_TYPE
11721 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11722 || TREE_CODE (*type_p) == REAL_TYPE)
11723 {
11724 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11725 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11726 }
11727
11728 WALK_SUBTREE (TYPE_SIZE (*type_p));
11729 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11730 }
11731 /* FALLTHRU */
11732
11733 default:
11734 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11735 {
11736 int i, len;
11737
11738 /* Walk over all the sub-trees of this operand. */
11739 len = TREE_OPERAND_LENGTH (*tp);
11740
11741 /* Go through the subtrees. We need to do this in forward order so
11742 that the scope of a FOR_EXPR is handled properly. */
11743 if (len)
11744 {
11745 for (i = 0; i < len - 1; ++i)
11746 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11747 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11748 }
11749 }
11750 /* If this is a type, walk the needed fields in the type. */
11751 else if (TYPE_P (*tp))
11752 return walk_type_fields (*tp, func, data, pset, lh);
11753 break;
11754 }
11755
11756 /* We didn't find what we were looking for. */
11757 return NULL_TREE;
11758
11759 #undef WALK_SUBTREE_TAIL
11760 }
11761 #undef WALK_SUBTREE
11762
11763 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11764
11765 tree
11766 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11767 walk_tree_lh lh)
11768 {
11769 tree result;
11770
11771 hash_set<tree> pset;
11772 result = walk_tree_1 (tp, func, data, &pset, lh);
11773 return result;
11774 }
11775
11776
11777 tree
11778 tree_block (tree t)
11779 {
11780 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11781
11782 if (IS_EXPR_CODE_CLASS (c))
11783 return LOCATION_BLOCK (t->exp.locus);
11784 gcc_unreachable ();
11785 return NULL;
11786 }
11787
11788 void
11789 tree_set_block (tree t, tree b)
11790 {
11791 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11792
11793 if (IS_EXPR_CODE_CLASS (c))
11794 {
11795 if (b)
11796 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11797 else
11798 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11799 }
11800 else
11801 gcc_unreachable ();
11802 }
11803
11804 /* Create a nameless artificial label and put it in the current
11805 function context. The label has a location of LOC. Returns the
11806 newly created label. */
11807
11808 tree
11809 create_artificial_label (location_t loc)
11810 {
11811 tree lab = build_decl (loc,
11812 LABEL_DECL, NULL_TREE, void_type_node);
11813
11814 DECL_ARTIFICIAL (lab) = 1;
11815 DECL_IGNORED_P (lab) = 1;
11816 DECL_CONTEXT (lab) = current_function_decl;
11817 return lab;
11818 }
11819
11820 /* Given a tree, try to return a useful variable name that we can use
11821 to prefix a temporary that is being assigned the value of the tree.
11822 I.E. given <temp> = &A, return A. */
11823
11824 const char *
11825 get_name (tree t)
11826 {
11827 tree stripped_decl;
11828
11829 stripped_decl = t;
11830 STRIP_NOPS (stripped_decl);
11831 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11832 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11833 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11834 {
11835 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11836 if (!name)
11837 return NULL;
11838 return IDENTIFIER_POINTER (name);
11839 }
11840 else
11841 {
11842 switch (TREE_CODE (stripped_decl))
11843 {
11844 case ADDR_EXPR:
11845 return get_name (TREE_OPERAND (stripped_decl, 0));
11846 default:
11847 return NULL;
11848 }
11849 }
11850 }
11851
11852 /* Return true if TYPE has a variable argument list. */
11853
11854 bool
11855 stdarg_p (const_tree fntype)
11856 {
11857 function_args_iterator args_iter;
11858 tree n = NULL_TREE, t;
11859
11860 if (!fntype)
11861 return false;
11862
11863 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11864 {
11865 n = t;
11866 }
11867
11868 return n != NULL_TREE && n != void_type_node;
11869 }
11870
11871 /* Return true if TYPE has a prototype. */
11872
11873 bool
11874 prototype_p (const_tree fntype)
11875 {
11876 tree t;
11877
11878 gcc_assert (fntype != NULL_TREE);
11879
11880 t = TYPE_ARG_TYPES (fntype);
11881 return (t != NULL_TREE);
11882 }
11883
11884 /* If BLOCK is inlined from an __attribute__((__artificial__))
11885 routine, return pointer to location from where it has been
11886 called. */
11887 location_t *
11888 block_nonartificial_location (tree block)
11889 {
11890 location_t *ret = NULL;
11891
11892 while (block && TREE_CODE (block) == BLOCK
11893 && BLOCK_ABSTRACT_ORIGIN (block))
11894 {
11895 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11896
11897 while (TREE_CODE (ao) == BLOCK
11898 && BLOCK_ABSTRACT_ORIGIN (ao)
11899 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11900 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11901
11902 if (TREE_CODE (ao) == FUNCTION_DECL)
11903 {
11904 /* If AO is an artificial inline, point RET to the
11905 call site locus at which it has been inlined and continue
11906 the loop, in case AO's caller is also an artificial
11907 inline. */
11908 if (DECL_DECLARED_INLINE_P (ao)
11909 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11910 ret = &BLOCK_SOURCE_LOCATION (block);
11911 else
11912 break;
11913 }
11914 else if (TREE_CODE (ao) != BLOCK)
11915 break;
11916
11917 block = BLOCK_SUPERCONTEXT (block);
11918 }
11919 return ret;
11920 }
11921
11922
11923 /* If EXP is inlined from an __attribute__((__artificial__))
11924 function, return the location of the original call expression. */
11925
11926 location_t
11927 tree_nonartificial_location (tree exp)
11928 {
11929 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11930
11931 if (loc)
11932 return *loc;
11933 else
11934 return EXPR_LOCATION (exp);
11935 }
11936
11937
11938 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11939 nodes. */
11940
11941 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11942
11943 hashval_t
11944 cl_option_hasher::hash (tree x)
11945 {
11946 const_tree const t = x;
11947 const char *p;
11948 size_t i;
11949 size_t len = 0;
11950 hashval_t hash = 0;
11951
11952 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11953 {
11954 p = (const char *)TREE_OPTIMIZATION (t);
11955 len = sizeof (struct cl_optimization);
11956 }
11957
11958 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11959 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11960
11961 else
11962 gcc_unreachable ();
11963
11964 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11965 something else. */
11966 for (i = 0; i < len; i++)
11967 if (p[i])
11968 hash = (hash << 4) ^ ((i << 2) | p[i]);
11969
11970 return hash;
11971 }
11972
11973 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11974 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11975 same. */
11976
11977 bool
11978 cl_option_hasher::equal (tree x, tree y)
11979 {
11980 const_tree const xt = x;
11981 const_tree const yt = y;
11982 const char *xp;
11983 const char *yp;
11984 size_t len;
11985
11986 if (TREE_CODE (xt) != TREE_CODE (yt))
11987 return 0;
11988
11989 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11990 {
11991 xp = (const char *)TREE_OPTIMIZATION (xt);
11992 yp = (const char *)TREE_OPTIMIZATION (yt);
11993 len = sizeof (struct cl_optimization);
11994 }
11995
11996 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11997 {
11998 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11999 TREE_TARGET_OPTION (yt));
12000 }
12001
12002 else
12003 gcc_unreachable ();
12004
12005 return (memcmp (xp, yp, len) == 0);
12006 }
12007
12008 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12009
12010 tree
12011 build_optimization_node (struct gcc_options *opts)
12012 {
12013 tree t;
12014
12015 /* Use the cache of optimization nodes. */
12016
12017 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12018 opts);
12019
12020 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12021 t = *slot;
12022 if (!t)
12023 {
12024 /* Insert this one into the hash table. */
12025 t = cl_optimization_node;
12026 *slot = t;
12027
12028 /* Make a new node for next time round. */
12029 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12030 }
12031
12032 return t;
12033 }
12034
12035 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12036
12037 tree
12038 build_target_option_node (struct gcc_options *opts)
12039 {
12040 tree t;
12041
12042 /* Use the cache of optimization nodes. */
12043
12044 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12045 opts);
12046
12047 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12048 t = *slot;
12049 if (!t)
12050 {
12051 /* Insert this one into the hash table. */
12052 t = cl_target_option_node;
12053 *slot = t;
12054
12055 /* Make a new node for next time round. */
12056 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12057 }
12058
12059 return t;
12060 }
12061
12062 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12063 so that they aren't saved during PCH writing. */
12064
12065 void
12066 prepare_target_option_nodes_for_pch (void)
12067 {
12068 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12069 for (; iter != cl_option_hash_table->end (); ++iter)
12070 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12071 TREE_TARGET_GLOBALS (*iter) = NULL;
12072 }
12073
12074 /* Determine the "ultimate origin" of a block. The block may be an inlined
12075 instance of an inlined instance of a block which is local to an inline
12076 function, so we have to trace all of the way back through the origin chain
12077 to find out what sort of node actually served as the original seed for the
12078 given block. */
12079
12080 tree
12081 block_ultimate_origin (const_tree block)
12082 {
12083 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
12084
12085 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12086 we're trying to output the abstract instance of this function. */
12087 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12088 return NULL_TREE;
12089
12090 if (immediate_origin == NULL_TREE)
12091 return NULL_TREE;
12092 else
12093 {
12094 tree ret_val;
12095 tree lookahead = immediate_origin;
12096
12097 do
12098 {
12099 ret_val = lookahead;
12100 lookahead = (TREE_CODE (ret_val) == BLOCK
12101 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12102 }
12103 while (lookahead != NULL && lookahead != ret_val);
12104
12105 /* The block's abstract origin chain may not be the *ultimate* origin of
12106 the block. It could lead to a DECL that has an abstract origin set.
12107 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12108 will give us if it has one). Note that DECL's abstract origins are
12109 supposed to be the most distant ancestor (or so decl_ultimate_origin
12110 claims), so we don't need to loop following the DECL origins. */
12111 if (DECL_P (ret_val))
12112 return DECL_ORIGIN (ret_val);
12113
12114 return ret_val;
12115 }
12116 }
12117
12118 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12119 no instruction. */
12120
12121 bool
12122 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12123 {
12124 /* Use precision rather then machine mode when we can, which gives
12125 the correct answer even for submode (bit-field) types. */
12126 if ((INTEGRAL_TYPE_P (outer_type)
12127 || POINTER_TYPE_P (outer_type)
12128 || TREE_CODE (outer_type) == OFFSET_TYPE)
12129 && (INTEGRAL_TYPE_P (inner_type)
12130 || POINTER_TYPE_P (inner_type)
12131 || TREE_CODE (inner_type) == OFFSET_TYPE))
12132 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12133
12134 /* Otherwise fall back on comparing machine modes (e.g. for
12135 aggregate types, floats). */
12136 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12137 }
12138
12139 /* Return true iff conversion in EXP generates no instruction. Mark
12140 it inline so that we fully inline into the stripping functions even
12141 though we have two uses of this function. */
12142
12143 static inline bool
12144 tree_nop_conversion (const_tree exp)
12145 {
12146 tree outer_type, inner_type;
12147
12148 if (!CONVERT_EXPR_P (exp)
12149 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12150 return false;
12151 if (TREE_OPERAND (exp, 0) == error_mark_node)
12152 return false;
12153
12154 outer_type = TREE_TYPE (exp);
12155 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12156
12157 if (!inner_type)
12158 return false;
12159
12160 return tree_nop_conversion_p (outer_type, inner_type);
12161 }
12162
12163 /* Return true iff conversion in EXP generates no instruction. Don't
12164 consider conversions changing the signedness. */
12165
12166 static bool
12167 tree_sign_nop_conversion (const_tree exp)
12168 {
12169 tree outer_type, inner_type;
12170
12171 if (!tree_nop_conversion (exp))
12172 return false;
12173
12174 outer_type = TREE_TYPE (exp);
12175 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12176
12177 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12178 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12179 }
12180
12181 /* Strip conversions from EXP according to tree_nop_conversion and
12182 return the resulting expression. */
12183
12184 tree
12185 tree_strip_nop_conversions (tree exp)
12186 {
12187 while (tree_nop_conversion (exp))
12188 exp = TREE_OPERAND (exp, 0);
12189 return exp;
12190 }
12191
12192 /* Strip conversions from EXP according to tree_sign_nop_conversion
12193 and return the resulting expression. */
12194
12195 tree
12196 tree_strip_sign_nop_conversions (tree exp)
12197 {
12198 while (tree_sign_nop_conversion (exp))
12199 exp = TREE_OPERAND (exp, 0);
12200 return exp;
12201 }
12202
12203 /* Avoid any floating point extensions from EXP. */
12204 tree
12205 strip_float_extensions (tree exp)
12206 {
12207 tree sub, expt, subt;
12208
12209 /* For floating point constant look up the narrowest type that can hold
12210 it properly and handle it like (type)(narrowest_type)constant.
12211 This way we can optimize for instance a=a*2.0 where "a" is float
12212 but 2.0 is double constant. */
12213 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12214 {
12215 REAL_VALUE_TYPE orig;
12216 tree type = NULL;
12217
12218 orig = TREE_REAL_CST (exp);
12219 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12220 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12221 type = float_type_node;
12222 else if (TYPE_PRECISION (TREE_TYPE (exp))
12223 > TYPE_PRECISION (double_type_node)
12224 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12225 type = double_type_node;
12226 if (type)
12227 return build_real_truncate (type, orig);
12228 }
12229
12230 if (!CONVERT_EXPR_P (exp))
12231 return exp;
12232
12233 sub = TREE_OPERAND (exp, 0);
12234 subt = TREE_TYPE (sub);
12235 expt = TREE_TYPE (exp);
12236
12237 if (!FLOAT_TYPE_P (subt))
12238 return exp;
12239
12240 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12241 return exp;
12242
12243 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12244 return exp;
12245
12246 return strip_float_extensions (sub);
12247 }
12248
12249 /* Strip out all handled components that produce invariant
12250 offsets. */
12251
12252 const_tree
12253 strip_invariant_refs (const_tree op)
12254 {
12255 while (handled_component_p (op))
12256 {
12257 switch (TREE_CODE (op))
12258 {
12259 case ARRAY_REF:
12260 case ARRAY_RANGE_REF:
12261 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12262 || TREE_OPERAND (op, 2) != NULL_TREE
12263 || TREE_OPERAND (op, 3) != NULL_TREE)
12264 return NULL;
12265 break;
12266
12267 case COMPONENT_REF:
12268 if (TREE_OPERAND (op, 2) != NULL_TREE)
12269 return NULL;
12270 break;
12271
12272 default:;
12273 }
12274 op = TREE_OPERAND (op, 0);
12275 }
12276
12277 return op;
12278 }
12279
12280 static GTY(()) tree gcc_eh_personality_decl;
12281
12282 /* Return the GCC personality function decl. */
12283
12284 tree
12285 lhd_gcc_personality (void)
12286 {
12287 if (!gcc_eh_personality_decl)
12288 gcc_eh_personality_decl = build_personality_function ("gcc");
12289 return gcc_eh_personality_decl;
12290 }
12291
12292 /* TARGET is a call target of GIMPLE call statement
12293 (obtained by gimple_call_fn). Return true if it is
12294 OBJ_TYPE_REF representing an virtual call of C++ method.
12295 (As opposed to OBJ_TYPE_REF representing objc calls
12296 through a cast where middle-end devirtualization machinery
12297 can't apply.) */
12298
12299 bool
12300 virtual_method_call_p (const_tree target)
12301 {
12302 if (TREE_CODE (target) != OBJ_TYPE_REF)
12303 return false;
12304 tree t = TREE_TYPE (target);
12305 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12306 t = TREE_TYPE (t);
12307 if (TREE_CODE (t) == FUNCTION_TYPE)
12308 return false;
12309 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12310 /* If we do not have BINFO associated, it means that type was built
12311 without devirtualization enabled. Do not consider this a virtual
12312 call. */
12313 if (!TYPE_BINFO (obj_type_ref_class (target)))
12314 return false;
12315 return true;
12316 }
12317
12318 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12319
12320 tree
12321 obj_type_ref_class (const_tree ref)
12322 {
12323 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12324 ref = TREE_TYPE (ref);
12325 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12326 ref = TREE_TYPE (ref);
12327 /* We look for type THIS points to. ObjC also builds
12328 OBJ_TYPE_REF with non-method calls, Their first parameter
12329 ID however also corresponds to class type. */
12330 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12331 || TREE_CODE (ref) == FUNCTION_TYPE);
12332 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12333 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12334 return TREE_TYPE (ref);
12335 }
12336
12337 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12338
12339 static tree
12340 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12341 {
12342 unsigned int i;
12343 tree base_binfo, b;
12344
12345 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12346 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12347 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12348 return base_binfo;
12349 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12350 return b;
12351 return NULL;
12352 }
12353
12354 /* Try to find a base info of BINFO that would have its field decl at offset
12355 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12356 found, return, otherwise return NULL_TREE. */
12357
12358 tree
12359 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12360 {
12361 tree type = BINFO_TYPE (binfo);
12362
12363 while (true)
12364 {
12365 HOST_WIDE_INT pos, size;
12366 tree fld;
12367 int i;
12368
12369 if (types_same_for_odr (type, expected_type))
12370 return binfo;
12371 if (offset < 0)
12372 return NULL_TREE;
12373
12374 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12375 {
12376 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12377 continue;
12378
12379 pos = int_bit_position (fld);
12380 size = tree_to_uhwi (DECL_SIZE (fld));
12381 if (pos <= offset && (pos + size) > offset)
12382 break;
12383 }
12384 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12385 return NULL_TREE;
12386
12387 /* Offset 0 indicates the primary base, whose vtable contents are
12388 represented in the binfo for the derived class. */
12389 else if (offset != 0)
12390 {
12391 tree found_binfo = NULL, base_binfo;
12392 /* Offsets in BINFO are in bytes relative to the whole structure
12393 while POS is in bits relative to the containing field. */
12394 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12395 / BITS_PER_UNIT);
12396
12397 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12398 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12399 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12400 {
12401 found_binfo = base_binfo;
12402 break;
12403 }
12404 if (found_binfo)
12405 binfo = found_binfo;
12406 else
12407 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12408 binfo_offset);
12409 }
12410
12411 type = TREE_TYPE (fld);
12412 offset -= pos;
12413 }
12414 }
12415
12416 /* Returns true if X is a typedef decl. */
12417
12418 bool
12419 is_typedef_decl (const_tree x)
12420 {
12421 return (x && TREE_CODE (x) == TYPE_DECL
12422 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12423 }
12424
12425 /* Returns true iff TYPE is a type variant created for a typedef. */
12426
12427 bool
12428 typedef_variant_p (const_tree type)
12429 {
12430 return is_typedef_decl (TYPE_NAME (type));
12431 }
12432
12433 /* Warn about a use of an identifier which was marked deprecated. */
12434 void
12435 warn_deprecated_use (tree node, tree attr)
12436 {
12437 const char *msg;
12438
12439 if (node == 0 || !warn_deprecated_decl)
12440 return;
12441
12442 if (!attr)
12443 {
12444 if (DECL_P (node))
12445 attr = DECL_ATTRIBUTES (node);
12446 else if (TYPE_P (node))
12447 {
12448 tree decl = TYPE_STUB_DECL (node);
12449 if (decl)
12450 attr = lookup_attribute ("deprecated",
12451 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12452 }
12453 }
12454
12455 if (attr)
12456 attr = lookup_attribute ("deprecated", attr);
12457
12458 if (attr)
12459 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12460 else
12461 msg = NULL;
12462
12463 bool w;
12464 if (DECL_P (node))
12465 {
12466 if (msg)
12467 w = warning (OPT_Wdeprecated_declarations,
12468 "%qD is deprecated: %s", node, msg);
12469 else
12470 w = warning (OPT_Wdeprecated_declarations,
12471 "%qD is deprecated", node);
12472 if (w)
12473 inform (DECL_SOURCE_LOCATION (node), "declared here");
12474 }
12475 else if (TYPE_P (node))
12476 {
12477 tree what = NULL_TREE;
12478 tree decl = TYPE_STUB_DECL (node);
12479
12480 if (TYPE_NAME (node))
12481 {
12482 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12483 what = TYPE_NAME (node);
12484 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12485 && DECL_NAME (TYPE_NAME (node)))
12486 what = DECL_NAME (TYPE_NAME (node));
12487 }
12488
12489 if (decl)
12490 {
12491 if (what)
12492 {
12493 if (msg)
12494 w = warning (OPT_Wdeprecated_declarations,
12495 "%qE is deprecated: %s", what, msg);
12496 else
12497 w = warning (OPT_Wdeprecated_declarations,
12498 "%qE is deprecated", what);
12499 }
12500 else
12501 {
12502 if (msg)
12503 w = warning (OPT_Wdeprecated_declarations,
12504 "type is deprecated: %s", msg);
12505 else
12506 w = warning (OPT_Wdeprecated_declarations,
12507 "type is deprecated");
12508 }
12509 if (w)
12510 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12511 }
12512 else
12513 {
12514 if (what)
12515 {
12516 if (msg)
12517 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12518 what, msg);
12519 else
12520 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12521 }
12522 else
12523 {
12524 if (msg)
12525 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12526 msg);
12527 else
12528 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12529 }
12530 }
12531 }
12532 }
12533
12534 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12535 somewhere in it. */
12536
12537 bool
12538 contains_bitfld_component_ref_p (const_tree ref)
12539 {
12540 while (handled_component_p (ref))
12541 {
12542 if (TREE_CODE (ref) == COMPONENT_REF
12543 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12544 return true;
12545 ref = TREE_OPERAND (ref, 0);
12546 }
12547
12548 return false;
12549 }
12550
12551 /* Try to determine whether a TRY_CATCH expression can fall through.
12552 This is a subroutine of block_may_fallthru. */
12553
12554 static bool
12555 try_catch_may_fallthru (const_tree stmt)
12556 {
12557 tree_stmt_iterator i;
12558
12559 /* If the TRY block can fall through, the whole TRY_CATCH can
12560 fall through. */
12561 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12562 return true;
12563
12564 i = tsi_start (TREE_OPERAND (stmt, 1));
12565 switch (TREE_CODE (tsi_stmt (i)))
12566 {
12567 case CATCH_EXPR:
12568 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12569 catch expression and a body. The whole TRY_CATCH may fall
12570 through iff any of the catch bodies falls through. */
12571 for (; !tsi_end_p (i); tsi_next (&i))
12572 {
12573 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12574 return true;
12575 }
12576 return false;
12577
12578 case EH_FILTER_EXPR:
12579 /* The exception filter expression only matters if there is an
12580 exception. If the exception does not match EH_FILTER_TYPES,
12581 we will execute EH_FILTER_FAILURE, and we will fall through
12582 if that falls through. If the exception does match
12583 EH_FILTER_TYPES, the stack unwinder will continue up the
12584 stack, so we will not fall through. We don't know whether we
12585 will throw an exception which matches EH_FILTER_TYPES or not,
12586 so we just ignore EH_FILTER_TYPES and assume that we might
12587 throw an exception which doesn't match. */
12588 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12589
12590 default:
12591 /* This case represents statements to be executed when an
12592 exception occurs. Those statements are implicitly followed
12593 by a RESX statement to resume execution after the exception.
12594 So in this case the TRY_CATCH never falls through. */
12595 return false;
12596 }
12597 }
12598
12599 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12600 need not be 100% accurate; simply be conservative and return true if we
12601 don't know. This is used only to avoid stupidly generating extra code.
12602 If we're wrong, we'll just delete the extra code later. */
12603
12604 bool
12605 block_may_fallthru (const_tree block)
12606 {
12607 /* This CONST_CAST is okay because expr_last returns its argument
12608 unmodified and we assign it to a const_tree. */
12609 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12610
12611 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12612 {
12613 case GOTO_EXPR:
12614 case RETURN_EXPR:
12615 /* Easy cases. If the last statement of the block implies
12616 control transfer, then we can't fall through. */
12617 return false;
12618
12619 case SWITCH_EXPR:
12620 /* If SWITCH_LABELS is set, this is lowered, and represents a
12621 branch to a selected label and hence can not fall through.
12622 Otherwise SWITCH_BODY is set, and the switch can fall
12623 through. */
12624 return SWITCH_LABELS (stmt) == NULL_TREE;
12625
12626 case COND_EXPR:
12627 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12628 return true;
12629 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12630
12631 case BIND_EXPR:
12632 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12633
12634 case TRY_CATCH_EXPR:
12635 return try_catch_may_fallthru (stmt);
12636
12637 case TRY_FINALLY_EXPR:
12638 /* The finally clause is always executed after the try clause,
12639 so if it does not fall through, then the try-finally will not
12640 fall through. Otherwise, if the try clause does not fall
12641 through, then when the finally clause falls through it will
12642 resume execution wherever the try clause was going. So the
12643 whole try-finally will only fall through if both the try
12644 clause and the finally clause fall through. */
12645 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12646 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12647
12648 case MODIFY_EXPR:
12649 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12650 stmt = TREE_OPERAND (stmt, 1);
12651 else
12652 return true;
12653 /* FALLTHRU */
12654
12655 case CALL_EXPR:
12656 /* Functions that do not return do not fall through. */
12657 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12658
12659 case CLEANUP_POINT_EXPR:
12660 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12661
12662 case TARGET_EXPR:
12663 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12664
12665 case ERROR_MARK:
12666 return true;
12667
12668 default:
12669 return lang_hooks.block_may_fallthru (stmt);
12670 }
12671 }
12672
12673 /* True if we are using EH to handle cleanups. */
12674 static bool using_eh_for_cleanups_flag = false;
12675
12676 /* This routine is called from front ends to indicate eh should be used for
12677 cleanups. */
12678 void
12679 using_eh_for_cleanups (void)
12680 {
12681 using_eh_for_cleanups_flag = true;
12682 }
12683
12684 /* Query whether EH is used for cleanups. */
12685 bool
12686 using_eh_for_cleanups_p (void)
12687 {
12688 return using_eh_for_cleanups_flag;
12689 }
12690
12691 /* Wrapper for tree_code_name to ensure that tree code is valid */
12692 const char *
12693 get_tree_code_name (enum tree_code code)
12694 {
12695 const char *invalid = "<invalid tree code>";
12696
12697 if (code >= MAX_TREE_CODES)
12698 return invalid;
12699
12700 return tree_code_name[code];
12701 }
12702
12703 /* Drops the TREE_OVERFLOW flag from T. */
12704
12705 tree
12706 drop_tree_overflow (tree t)
12707 {
12708 gcc_checking_assert (TREE_OVERFLOW (t));
12709
12710 /* For tree codes with a sharing machinery re-build the result. */
12711 if (TREE_CODE (t) == INTEGER_CST)
12712 return wide_int_to_tree (TREE_TYPE (t), t);
12713
12714 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12715 and drop the flag. */
12716 t = copy_node (t);
12717 TREE_OVERFLOW (t) = 0;
12718 return t;
12719 }
12720
12721 /* Given a memory reference expression T, return its base address.
12722 The base address of a memory reference expression is the main
12723 object being referenced. For instance, the base address for
12724 'array[i].fld[j]' is 'array'. You can think of this as stripping
12725 away the offset part from a memory address.
12726
12727 This function calls handled_component_p to strip away all the inner
12728 parts of the memory reference until it reaches the base object. */
12729
12730 tree
12731 get_base_address (tree t)
12732 {
12733 while (handled_component_p (t))
12734 t = TREE_OPERAND (t, 0);
12735
12736 if ((TREE_CODE (t) == MEM_REF
12737 || TREE_CODE (t) == TARGET_MEM_REF)
12738 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12739 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12740
12741 /* ??? Either the alias oracle or all callers need to properly deal
12742 with WITH_SIZE_EXPRs before we can look through those. */
12743 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12744 return NULL_TREE;
12745
12746 return t;
12747 }
12748
12749 /* Return a tree of sizetype representing the size, in bytes, of the element
12750 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12751
12752 tree
12753 array_ref_element_size (tree exp)
12754 {
12755 tree aligned_size = TREE_OPERAND (exp, 3);
12756 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12757 location_t loc = EXPR_LOCATION (exp);
12758
12759 /* If a size was specified in the ARRAY_REF, it's the size measured
12760 in alignment units of the element type. So multiply by that value. */
12761 if (aligned_size)
12762 {
12763 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12764 sizetype from another type of the same width and signedness. */
12765 if (TREE_TYPE (aligned_size) != sizetype)
12766 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12767 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12768 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12769 }
12770
12771 /* Otherwise, take the size from that of the element type. Substitute
12772 any PLACEHOLDER_EXPR that we have. */
12773 else
12774 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12775 }
12776
12777 /* Return a tree representing the lower bound of the array mentioned in
12778 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12779
12780 tree
12781 array_ref_low_bound (tree exp)
12782 {
12783 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12784
12785 /* If a lower bound is specified in EXP, use it. */
12786 if (TREE_OPERAND (exp, 2))
12787 return TREE_OPERAND (exp, 2);
12788
12789 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12790 substituting for a PLACEHOLDER_EXPR as needed. */
12791 if (domain_type && TYPE_MIN_VALUE (domain_type))
12792 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12793
12794 /* Otherwise, return a zero of the appropriate type. */
12795 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12796 }
12797
12798 /* Return a tree representing the upper bound of the array mentioned in
12799 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12800
12801 tree
12802 array_ref_up_bound (tree exp)
12803 {
12804 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12805
12806 /* If there is a domain type and it has an upper bound, use it, substituting
12807 for a PLACEHOLDER_EXPR as needed. */
12808 if (domain_type && TYPE_MAX_VALUE (domain_type))
12809 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12810
12811 /* Otherwise fail. */
12812 return NULL_TREE;
12813 }
12814
12815 /* Returns true if REF is an array reference to an array at the end of
12816 a structure. If this is the case, the array may be allocated larger
12817 than its upper bound implies. */
12818
12819 bool
12820 array_at_struct_end_p (tree ref)
12821 {
12822 if (TREE_CODE (ref) != ARRAY_REF
12823 && TREE_CODE (ref) != ARRAY_RANGE_REF)
12824 return false;
12825
12826 while (handled_component_p (ref))
12827 {
12828 /* If the reference chain contains a component reference to a
12829 non-union type and there follows another field the reference
12830 is not at the end of a structure. */
12831 if (TREE_CODE (ref) == COMPONENT_REF
12832 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12833 {
12834 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12835 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12836 nextf = DECL_CHAIN (nextf);
12837 if (nextf)
12838 return false;
12839 }
12840
12841 ref = TREE_OPERAND (ref, 0);
12842 }
12843
12844 /* If the reference is based on a declared entity, the size of the array
12845 is constrained by its given domain. */
12846 if (DECL_P (ref))
12847 return false;
12848
12849 return true;
12850 }
12851
12852 /* Return a tree representing the offset, in bytes, of the field referenced
12853 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12854
12855 tree
12856 component_ref_field_offset (tree exp)
12857 {
12858 tree aligned_offset = TREE_OPERAND (exp, 2);
12859 tree field = TREE_OPERAND (exp, 1);
12860 location_t loc = EXPR_LOCATION (exp);
12861
12862 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12863 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12864 value. */
12865 if (aligned_offset)
12866 {
12867 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12868 sizetype from another type of the same width and signedness. */
12869 if (TREE_TYPE (aligned_offset) != sizetype)
12870 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12871 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12872 size_int (DECL_OFFSET_ALIGN (field)
12873 / BITS_PER_UNIT));
12874 }
12875
12876 /* Otherwise, take the offset from that of the field. Substitute
12877 any PLACEHOLDER_EXPR that we have. */
12878 else
12879 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12880 }
12881
12882 /* Return the machine mode of T. For vectors, returns the mode of the
12883 inner type. The main use case is to feed the result to HONOR_NANS,
12884 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12885
12886 machine_mode
12887 element_mode (const_tree t)
12888 {
12889 if (!TYPE_P (t))
12890 t = TREE_TYPE (t);
12891 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12892 t = TREE_TYPE (t);
12893 return TYPE_MODE (t);
12894 }
12895
12896
12897 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12898 TV. TV should be the more specified variant (i.e. the main variant). */
12899
12900 static bool
12901 verify_type_variant (const_tree t, tree tv)
12902 {
12903 /* Type variant can differ by:
12904
12905 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12906 ENCODE_QUAL_ADDR_SPACE.
12907 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12908 in this case some values may not be set in the variant types
12909 (see TYPE_COMPLETE_P checks).
12910 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12911 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12912 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12913 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12914 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12915 this is necessary to make it possible to merge types form different TUs
12916 - arrays, pointers and references may have TREE_TYPE that is a variant
12917 of TREE_TYPE of their main variants.
12918 - aggregates may have new TYPE_FIELDS list that list variants of
12919 the main variant TYPE_FIELDS.
12920 - vector types may differ by TYPE_VECTOR_OPAQUE
12921 - TYPE_METHODS is always NULL for vairant types and maintained for
12922 main variant only.
12923 */
12924
12925 /* Convenience macro for matching individual fields. */
12926 #define verify_variant_match(flag) \
12927 do { \
12928 if (flag (tv) != flag (t)) \
12929 { \
12930 error ("type variant differs by " #flag "."); \
12931 debug_tree (tv); \
12932 return false; \
12933 } \
12934 } while (false)
12935
12936 /* tree_base checks. */
12937
12938 verify_variant_match (TREE_CODE);
12939 /* FIXME: Ada builds non-artificial variants of artificial types. */
12940 if (TYPE_ARTIFICIAL (tv) && 0)
12941 verify_variant_match (TYPE_ARTIFICIAL);
12942 if (POINTER_TYPE_P (tv))
12943 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12944 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12945 verify_variant_match (TYPE_UNSIGNED);
12946 verify_variant_match (TYPE_ALIGN_OK);
12947 verify_variant_match (TYPE_PACKED);
12948 if (TREE_CODE (t) == REFERENCE_TYPE)
12949 verify_variant_match (TYPE_REF_IS_RVALUE);
12950 verify_variant_match (TYPE_SATURATING);
12951 /* FIXME: This check trigger during libstdc++ build. */
12952 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12953 verify_variant_match (TYPE_FINAL_P);
12954
12955 /* tree_type_common checks. */
12956
12957 if (COMPLETE_TYPE_P (t))
12958 {
12959 verify_variant_match (TYPE_SIZE);
12960 verify_variant_match (TYPE_MODE);
12961 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
12962 /* FIXME: ideally we should compare pointer equality, but java FE
12963 produce variants where size is INTEGER_CST of different type (int
12964 wrt size_type) during libjava biuld. */
12965 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
12966 {
12967 error ("type variant has different TYPE_SIZE_UNIT");
12968 debug_tree (tv);
12969 error ("type variant's TYPE_SIZE_UNIT");
12970 debug_tree (TYPE_SIZE_UNIT (tv));
12971 error ("type's TYPE_SIZE_UNIT");
12972 debug_tree (TYPE_SIZE_UNIT (t));
12973 return false;
12974 }
12975 }
12976 verify_variant_match (TYPE_PRECISION);
12977 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12978 if (RECORD_OR_UNION_TYPE_P (t))
12979 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12980 else if (TREE_CODE (t) == ARRAY_TYPE)
12981 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12982 /* During LTO we merge variant lists from diferent translation units
12983 that may differ BY TYPE_CONTEXT that in turn may point
12984 to TRANSLATION_UNIT_DECL.
12985 Ada also builds variants of types with different TYPE_CONTEXT. */
12986 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12987 verify_variant_match (TYPE_CONTEXT);
12988 verify_variant_match (TYPE_STRING_FLAG);
12989 if (TYPE_ALIAS_SET_KNOWN_P (t) && TYPE_ALIAS_SET_KNOWN_P (tv))
12990 verify_variant_match (TYPE_ALIAS_SET);
12991
12992 /* tree_type_non_common checks. */
12993
12994 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12995 and dangle the pointer from time to time. */
12996 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12997 && (in_lto_p || !TYPE_VFIELD (tv)
12998 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12999 {
13000 error ("type variant has different TYPE_VFIELD");
13001 debug_tree (tv);
13002 return false;
13003 }
13004 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13005 || TREE_CODE (t) == INTEGER_TYPE
13006 || TREE_CODE (t) == BOOLEAN_TYPE
13007 || TREE_CODE (t) == REAL_TYPE
13008 || TREE_CODE (t) == FIXED_POINT_TYPE)
13009 {
13010 verify_variant_match (TYPE_MAX_VALUE);
13011 verify_variant_match (TYPE_MIN_VALUE);
13012 }
13013 if (TREE_CODE (t) == METHOD_TYPE)
13014 verify_variant_match (TYPE_METHOD_BASETYPE);
13015 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
13016 {
13017 error ("type variant has TYPE_METHODS");
13018 debug_tree (tv);
13019 return false;
13020 }
13021 if (TREE_CODE (t) == OFFSET_TYPE)
13022 verify_variant_match (TYPE_OFFSET_BASETYPE);
13023 if (TREE_CODE (t) == ARRAY_TYPE)
13024 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13025 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13026 or even type's main variant. This is needed to make bootstrap pass
13027 and the bug seems new in GCC 5.
13028 C++ FE should be updated to make this consistent and we should check
13029 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13030 is a match with main variant.
13031
13032 Also disable the check for Java for now because of parser hack that builds
13033 first an dummy BINFO and then sometimes replace it by real BINFO in some
13034 of the copies. */
13035 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13036 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13037 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13038 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13039 at LTO time only. */
13040 && (in_lto_p && odr_type_p (t)))
13041 {
13042 error ("type variant has different TYPE_BINFO");
13043 debug_tree (tv);
13044 error ("type variant's TYPE_BINFO");
13045 debug_tree (TYPE_BINFO (tv));
13046 error ("type's TYPE_BINFO");
13047 debug_tree (TYPE_BINFO (t));
13048 return false;
13049 }
13050
13051 /* Check various uses of TYPE_VALUES_RAW. */
13052 if (TREE_CODE (t) == ENUMERAL_TYPE)
13053 verify_variant_match (TYPE_VALUES);
13054 else if (TREE_CODE (t) == ARRAY_TYPE)
13055 verify_variant_match (TYPE_DOMAIN);
13056 /* Permit incomplete variants of complete type. While FEs may complete
13057 all variants, this does not happen for C++ templates in all cases. */
13058 else if (RECORD_OR_UNION_TYPE_P (t)
13059 && COMPLETE_TYPE_P (t)
13060 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13061 {
13062 tree f1, f2;
13063
13064 /* Fortran builds qualified variants as new records with items of
13065 qualified type. Verify that they looks same. */
13066 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13067 f1 && f2;
13068 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13069 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13070 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13071 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13072 /* FIXME: gfc_nonrestricted_type builds all types as variants
13073 with exception of pointer types. It deeply copies the type
13074 which means that we may end up with a variant type
13075 referring non-variant pointer. We may change it to
13076 produce types as variants, too, like
13077 objc_get_protocol_qualified_type does. */
13078 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13079 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13080 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13081 break;
13082 if (f1 || f2)
13083 {
13084 error ("type variant has different TYPE_FIELDS");
13085 debug_tree (tv);
13086 error ("first mismatch is field");
13087 debug_tree (f1);
13088 error ("and field");
13089 debug_tree (f2);
13090 return false;
13091 }
13092 }
13093 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13094 verify_variant_match (TYPE_ARG_TYPES);
13095 /* For C++ the qualified variant of array type is really an array type
13096 of qualified TREE_TYPE.
13097 objc builds variants of pointer where pointer to type is a variant, too
13098 in objc_get_protocol_qualified_type. */
13099 if (TREE_TYPE (t) != TREE_TYPE (tv)
13100 && ((TREE_CODE (t) != ARRAY_TYPE
13101 && !POINTER_TYPE_P (t))
13102 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13103 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13104 {
13105 error ("type variant has different TREE_TYPE");
13106 debug_tree (tv);
13107 error ("type variant's TREE_TYPE");
13108 debug_tree (TREE_TYPE (tv));
13109 error ("type's TREE_TYPE");
13110 debug_tree (TREE_TYPE (t));
13111 return false;
13112 }
13113 if (type_with_alias_set_p (t)
13114 && !gimple_canonical_types_compatible_p (t, tv, false))
13115 {
13116 error ("type is not compatible with its vairant");
13117 debug_tree (tv);
13118 error ("type variant's TREE_TYPE");
13119 debug_tree (TREE_TYPE (tv));
13120 error ("type's TREE_TYPE");
13121 debug_tree (TREE_TYPE (t));
13122 return false;
13123 }
13124 return true;
13125 #undef verify_variant_match
13126 }
13127
13128
13129 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13130 the middle-end types_compatible_p function. It needs to avoid
13131 claiming types are different for types that should be treated
13132 the same with respect to TBAA. Canonical types are also used
13133 for IL consistency checks via the useless_type_conversion_p
13134 predicate which does not handle all type kinds itself but falls
13135 back to pointer-comparison of TYPE_CANONICAL for aggregates
13136 for example. */
13137
13138 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13139 type calculation because we need to allow inter-operability between signed
13140 and unsigned variants. */
13141
13142 bool
13143 type_with_interoperable_signedness (const_tree type)
13144 {
13145 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13146 signed char and unsigned char. Similarly fortran FE builds
13147 C_SIZE_T as signed type, while C defines it unsigned. */
13148
13149 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13150 == INTEGER_TYPE
13151 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13152 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13153 }
13154
13155 /* Return true iff T1 and T2 are structurally identical for what
13156 TBAA is concerned.
13157 This function is used both by lto.c canonical type merging and by the
13158 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13159 that have TYPE_CANONICAL defined and assume them equivalent. */
13160
13161 bool
13162 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13163 bool trust_type_canonical)
13164 {
13165 /* Type variants should be same as the main variant. When not doing sanity
13166 checking to verify this fact, go to main variants and save some work. */
13167 if (trust_type_canonical)
13168 {
13169 t1 = TYPE_MAIN_VARIANT (t1);
13170 t2 = TYPE_MAIN_VARIANT (t2);
13171 }
13172
13173 /* Check first for the obvious case of pointer identity. */
13174 if (t1 == t2)
13175 return true;
13176
13177 /* Check that we have two types to compare. */
13178 if (t1 == NULL_TREE || t2 == NULL_TREE)
13179 return false;
13180
13181 /* We consider complete types always compatible with incomplete type.
13182 This does not make sense for canonical type calculation and thus we
13183 need to ensure that we are never called on it.
13184
13185 FIXME: For more correctness the function probably should have three modes
13186 1) mode assuming that types are complete mathcing their structure
13187 2) mode allowing incomplete types but producing equivalence classes
13188 and thus ignoring all info from complete types
13189 3) mode allowing incomplete types to match complete but checking
13190 compatibility between complete types.
13191
13192 1 and 2 can be used for canonical type calculation. 3 is the real
13193 definition of type compatibility that can be used i.e. for warnings during
13194 declaration merging. */
13195
13196 gcc_assert (!trust_type_canonical
13197 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13198 /* If the types have been previously registered and found equal
13199 they still are. */
13200 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13201 && trust_type_canonical)
13202 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13203
13204 /* Can't be the same type if the types don't have the same code. */
13205 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13206 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13207 return false;
13208
13209 /* Qualifiers do not matter for canonical type comparison purposes. */
13210
13211 /* Void types and nullptr types are always the same. */
13212 if (TREE_CODE (t1) == VOID_TYPE
13213 || TREE_CODE (t1) == NULLPTR_TYPE)
13214 return true;
13215
13216 /* Can't be the same type if they have different mode. */
13217 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13218 return false;
13219
13220 /* Non-aggregate types can be handled cheaply. */
13221 if (INTEGRAL_TYPE_P (t1)
13222 || SCALAR_FLOAT_TYPE_P (t1)
13223 || FIXED_POINT_TYPE_P (t1)
13224 || TREE_CODE (t1) == VECTOR_TYPE
13225 || TREE_CODE (t1) == COMPLEX_TYPE
13226 || TREE_CODE (t1) == OFFSET_TYPE
13227 || POINTER_TYPE_P (t1))
13228 {
13229 /* Can't be the same type if they have different recision. */
13230 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13231 return false;
13232
13233 /* In some cases the signed and unsigned types are required to be
13234 inter-operable. */
13235 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13236 && !type_with_interoperable_signedness (t1))
13237 return false;
13238
13239 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13240 interoperable with "signed char". Unless all frontends are revisited
13241 to agree on these types, we must ignore the flag completely. */
13242
13243 /* Fortran standard define C_PTR type that is compatible with every
13244 C pointer. For this reason we need to glob all pointers into one.
13245 Still pointers in different address spaces are not compatible. */
13246 if (POINTER_TYPE_P (t1))
13247 {
13248 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13249 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13250 return false;
13251 }
13252
13253 /* Tail-recurse to components. */
13254 if (TREE_CODE (t1) == VECTOR_TYPE
13255 || TREE_CODE (t1) == COMPLEX_TYPE)
13256 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13257 TREE_TYPE (t2),
13258 trust_type_canonical);
13259
13260 return true;
13261 }
13262
13263 /* Do type-specific comparisons. */
13264 switch (TREE_CODE (t1))
13265 {
13266 case ARRAY_TYPE:
13267 /* Array types are the same if the element types are the same and
13268 the number of elements are the same. */
13269 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13270 trust_type_canonical)
13271 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13272 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13273 return false;
13274 else
13275 {
13276 tree i1 = TYPE_DOMAIN (t1);
13277 tree i2 = TYPE_DOMAIN (t2);
13278
13279 /* For an incomplete external array, the type domain can be
13280 NULL_TREE. Check this condition also. */
13281 if (i1 == NULL_TREE && i2 == NULL_TREE)
13282 return true;
13283 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13284 return false;
13285 else
13286 {
13287 tree min1 = TYPE_MIN_VALUE (i1);
13288 tree min2 = TYPE_MIN_VALUE (i2);
13289 tree max1 = TYPE_MAX_VALUE (i1);
13290 tree max2 = TYPE_MAX_VALUE (i2);
13291
13292 /* The minimum/maximum values have to be the same. */
13293 if ((min1 == min2
13294 || (min1 && min2
13295 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13296 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13297 || operand_equal_p (min1, min2, 0))))
13298 && (max1 == max2
13299 || (max1 && max2
13300 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13301 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13302 || operand_equal_p (max1, max2, 0)))))
13303 return true;
13304 else
13305 return false;
13306 }
13307 }
13308
13309 case METHOD_TYPE:
13310 case FUNCTION_TYPE:
13311 /* Function types are the same if the return type and arguments types
13312 are the same. */
13313 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13314 trust_type_canonical))
13315 return false;
13316
13317 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13318 return true;
13319 else
13320 {
13321 tree parms1, parms2;
13322
13323 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13324 parms1 && parms2;
13325 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13326 {
13327 if (!gimple_canonical_types_compatible_p
13328 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13329 trust_type_canonical))
13330 return false;
13331 }
13332
13333 if (parms1 || parms2)
13334 return false;
13335
13336 return true;
13337 }
13338
13339 case RECORD_TYPE:
13340 case UNION_TYPE:
13341 case QUAL_UNION_TYPE:
13342 {
13343 tree f1, f2;
13344
13345 /* For aggregate types, all the fields must be the same. */
13346 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13347 f1 || f2;
13348 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13349 {
13350 /* Skip non-fields. */
13351 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13352 f1 = TREE_CHAIN (f1);
13353 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13354 f2 = TREE_CHAIN (f2);
13355 if (!f1 || !f2)
13356 break;
13357 /* The fields must have the same name, offset and type. */
13358 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13359 || !gimple_compare_field_offset (f1, f2)
13360 || !gimple_canonical_types_compatible_p
13361 (TREE_TYPE (f1), TREE_TYPE (f2),
13362 trust_type_canonical))
13363 return false;
13364 }
13365
13366 /* If one aggregate has more fields than the other, they
13367 are not the same. */
13368 if (f1 || f2)
13369 return false;
13370
13371 return true;
13372 }
13373
13374 default:
13375 /* Consider all types with language specific trees in them mutually
13376 compatible. This is executed only from verify_type and false
13377 positives can be tolerated. */
13378 gcc_assert (!in_lto_p);
13379 return true;
13380 }
13381 }
13382
13383 /* Verify type T. */
13384
13385 void
13386 verify_type (const_tree t)
13387 {
13388 bool error_found = false;
13389 tree mv = TYPE_MAIN_VARIANT (t);
13390 if (!mv)
13391 {
13392 error ("Main variant is not defined");
13393 error_found = true;
13394 }
13395 else if (mv != TYPE_MAIN_VARIANT (mv))
13396 {
13397 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13398 debug_tree (mv);
13399 error_found = true;
13400 }
13401 else if (t != mv && !verify_type_variant (t, mv))
13402 error_found = true;
13403
13404 tree ct = TYPE_CANONICAL (t);
13405 if (!ct)
13406 ;
13407 else if (TYPE_CANONICAL (t) != ct)
13408 {
13409 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13410 debug_tree (ct);
13411 error_found = true;
13412 }
13413 /* Method and function types can not be used to address memory and thus
13414 TYPE_CANONICAL really matters only for determining useless conversions.
13415
13416 FIXME: C++ FE produce declarations of builtin functions that are not
13417 compatible with main variants. */
13418 else if (TREE_CODE (t) == FUNCTION_TYPE)
13419 ;
13420 else if (t != ct
13421 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13422 with variably sized arrays because their sizes possibly
13423 gimplified to different variables. */
13424 && !variably_modified_type_p (ct, NULL)
13425 && !gimple_canonical_types_compatible_p (t, ct, false))
13426 {
13427 error ("TYPE_CANONICAL is not compatible");
13428 debug_tree (ct);
13429 error_found = true;
13430 }
13431
13432 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13433 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13434 {
13435 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13436 debug_tree (ct);
13437 error_found = true;
13438 }
13439
13440
13441 /* Check various uses of TYPE_MINVAL. */
13442 if (RECORD_OR_UNION_TYPE_P (t))
13443 {
13444 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13445 and danagle the pointer from time to time. */
13446 if (TYPE_VFIELD (t)
13447 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13448 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13449 {
13450 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13451 debug_tree (TYPE_VFIELD (t));
13452 error_found = true;
13453 }
13454 }
13455 else if (TREE_CODE (t) == POINTER_TYPE)
13456 {
13457 if (TYPE_NEXT_PTR_TO (t)
13458 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13459 {
13460 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13461 debug_tree (TYPE_NEXT_PTR_TO (t));
13462 error_found = true;
13463 }
13464 }
13465 else if (TREE_CODE (t) == REFERENCE_TYPE)
13466 {
13467 if (TYPE_NEXT_REF_TO (t)
13468 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13469 {
13470 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13471 debug_tree (TYPE_NEXT_REF_TO (t));
13472 error_found = true;
13473 }
13474 }
13475 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13476 || TREE_CODE (t) == FIXED_POINT_TYPE)
13477 {
13478 /* FIXME: The following check should pass:
13479 useless_type_conversion_p (const_cast <tree> (t),
13480 TREE_TYPE (TYPE_MIN_VALUE (t))
13481 but does not for C sizetypes in LTO. */
13482 }
13483 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13484 else if (TYPE_MINVAL (t)
13485 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13486 || in_lto_p))
13487 {
13488 error ("TYPE_MINVAL non-NULL");
13489 debug_tree (TYPE_MINVAL (t));
13490 error_found = true;
13491 }
13492
13493 /* Check various uses of TYPE_MAXVAL. */
13494 if (RECORD_OR_UNION_TYPE_P (t))
13495 {
13496 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13497 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13498 && TYPE_METHODS (t) != error_mark_node)
13499 {
13500 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13501 debug_tree (TYPE_METHODS (t));
13502 error_found = true;
13503 }
13504 }
13505 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13506 {
13507 if (TYPE_METHOD_BASETYPE (t)
13508 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13509 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13510 {
13511 error ("TYPE_METHOD_BASETYPE is not record nor union");
13512 debug_tree (TYPE_METHOD_BASETYPE (t));
13513 error_found = true;
13514 }
13515 }
13516 else if (TREE_CODE (t) == OFFSET_TYPE)
13517 {
13518 if (TYPE_OFFSET_BASETYPE (t)
13519 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13520 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13521 {
13522 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13523 debug_tree (TYPE_OFFSET_BASETYPE (t));
13524 error_found = true;
13525 }
13526 }
13527 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13528 || TREE_CODE (t) == FIXED_POINT_TYPE)
13529 {
13530 /* FIXME: The following check should pass:
13531 useless_type_conversion_p (const_cast <tree> (t),
13532 TREE_TYPE (TYPE_MAX_VALUE (t))
13533 but does not for C sizetypes in LTO. */
13534 }
13535 else if (TREE_CODE (t) == ARRAY_TYPE)
13536 {
13537 if (TYPE_ARRAY_MAX_SIZE (t)
13538 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13539 {
13540 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13541 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13542 error_found = true;
13543 }
13544 }
13545 else if (TYPE_MAXVAL (t))
13546 {
13547 error ("TYPE_MAXVAL non-NULL");
13548 debug_tree (TYPE_MAXVAL (t));
13549 error_found = true;
13550 }
13551
13552 /* Check various uses of TYPE_BINFO. */
13553 if (RECORD_OR_UNION_TYPE_P (t))
13554 {
13555 if (!TYPE_BINFO (t))
13556 ;
13557 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13558 {
13559 error ("TYPE_BINFO is not TREE_BINFO");
13560 debug_tree (TYPE_BINFO (t));
13561 error_found = true;
13562 }
13563 /* FIXME: Java builds invalid empty binfos that do not have
13564 TREE_TYPE set. */
13565 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13566 {
13567 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13568 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13569 error_found = true;
13570 }
13571 }
13572 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13573 {
13574 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13575 debug_tree (TYPE_LANG_SLOT_1 (t));
13576 error_found = true;
13577 }
13578
13579 /* Check various uses of TYPE_VALUES_RAW. */
13580 if (TREE_CODE (t) == ENUMERAL_TYPE)
13581 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13582 {
13583 tree value = TREE_VALUE (l);
13584 tree name = TREE_PURPOSE (l);
13585
13586 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13587 CONST_DECL of ENUMERAL TYPE. */
13588 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13589 {
13590 error ("Enum value is not CONST_DECL or INTEGER_CST");
13591 debug_tree (value);
13592 debug_tree (name);
13593 error_found = true;
13594 }
13595 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13596 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13597 {
13598 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13599 debug_tree (value);
13600 debug_tree (name);
13601 error_found = true;
13602 }
13603 if (TREE_CODE (name) != IDENTIFIER_NODE)
13604 {
13605 error ("Enum value name is not IDENTIFIER_NODE");
13606 debug_tree (value);
13607 debug_tree (name);
13608 error_found = true;
13609 }
13610 }
13611 else if (TREE_CODE (t) == ARRAY_TYPE)
13612 {
13613 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13614 {
13615 error ("Array TYPE_DOMAIN is not integer type");
13616 debug_tree (TYPE_DOMAIN (t));
13617 error_found = true;
13618 }
13619 }
13620 else if (RECORD_OR_UNION_TYPE_P (t))
13621 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13622 {
13623 /* TODO: verify properties of decls. */
13624 if (TREE_CODE (fld) == FIELD_DECL)
13625 ;
13626 else if (TREE_CODE (fld) == TYPE_DECL)
13627 ;
13628 else if (TREE_CODE (fld) == CONST_DECL)
13629 ;
13630 else if (TREE_CODE (fld) == VAR_DECL)
13631 ;
13632 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13633 ;
13634 else if (TREE_CODE (fld) == USING_DECL)
13635 ;
13636 else
13637 {
13638 error ("Wrong tree in TYPE_FIELDS list");
13639 debug_tree (fld);
13640 error_found = true;
13641 }
13642 }
13643 else if (TREE_CODE (t) == INTEGER_TYPE
13644 || TREE_CODE (t) == BOOLEAN_TYPE
13645 || TREE_CODE (t) == OFFSET_TYPE
13646 || TREE_CODE (t) == REFERENCE_TYPE
13647 || TREE_CODE (t) == NULLPTR_TYPE
13648 || TREE_CODE (t) == POINTER_TYPE)
13649 {
13650 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13651 {
13652 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13653 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13654 error_found = true;
13655 }
13656 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13657 {
13658 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13659 debug_tree (TYPE_CACHED_VALUES (t));
13660 error_found = true;
13661 }
13662 /* Verify just enough of cache to ensure that no one copied it to new type.
13663 All copying should go by copy_node that should clear it. */
13664 else if (TYPE_CACHED_VALUES_P (t))
13665 {
13666 int i;
13667 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13668 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13669 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13670 {
13671 error ("wrong TYPE_CACHED_VALUES entry");
13672 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13673 error_found = true;
13674 break;
13675 }
13676 }
13677 }
13678 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13679 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13680 {
13681 /* C++ FE uses TREE_PURPOSE to store initial values. */
13682 if (TREE_PURPOSE (l) && in_lto_p)
13683 {
13684 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13685 debug_tree (l);
13686 error_found = true;
13687 }
13688 if (!TYPE_P (TREE_VALUE (l)))
13689 {
13690 error ("Wrong entry in TYPE_ARG_TYPES list");
13691 debug_tree (l);
13692 error_found = true;
13693 }
13694 }
13695 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13696 {
13697 error ("TYPE_VALUES_RAW field is non-NULL");
13698 debug_tree (TYPE_VALUES_RAW (t));
13699 error_found = true;
13700 }
13701 if (TREE_CODE (t) != INTEGER_TYPE
13702 && TREE_CODE (t) != BOOLEAN_TYPE
13703 && TREE_CODE (t) != OFFSET_TYPE
13704 && TREE_CODE (t) != REFERENCE_TYPE
13705 && TREE_CODE (t) != NULLPTR_TYPE
13706 && TREE_CODE (t) != POINTER_TYPE
13707 && TYPE_CACHED_VALUES_P (t))
13708 {
13709 error ("TYPE_CACHED_VALUES_P is set while it should not");
13710 error_found = true;
13711 }
13712 if (TYPE_STRING_FLAG (t)
13713 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13714 {
13715 error ("TYPE_STRING_FLAG is set on wrong type code");
13716 error_found = true;
13717 }
13718 else if (TYPE_STRING_FLAG (t))
13719 {
13720 const_tree b = t;
13721 if (TREE_CODE (b) == ARRAY_TYPE)
13722 b = TREE_TYPE (t);
13723 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13724 that is 32bits. */
13725 if (TREE_CODE (b) != INTEGER_TYPE)
13726 {
13727 error ("TYPE_STRING_FLAG is set on type that does not look like "
13728 "char nor array of chars");
13729 error_found = true;
13730 }
13731 }
13732
13733 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13734 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13735 of a type. */
13736 if (TREE_CODE (t) == METHOD_TYPE
13737 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13738 {
13739 error ("TYPE_METHOD_BASETYPE is not main variant");
13740 error_found = true;
13741 }
13742
13743 if (error_found)
13744 {
13745 debug_tree (const_cast <tree> (t));
13746 internal_error ("verify_type failed");
13747 }
13748 }
13749
13750
13751 /* Return true if ARG is marked with the nonnull attribute in the
13752 current function signature. */
13753
13754 bool
13755 nonnull_arg_p (const_tree arg)
13756 {
13757 tree t, attrs, fntype;
13758 unsigned HOST_WIDE_INT arg_num;
13759
13760 gcc_assert (TREE_CODE (arg) == PARM_DECL && POINTER_TYPE_P (TREE_TYPE (arg)));
13761
13762 /* The static chain decl is always non null. */
13763 if (arg == cfun->static_chain_decl)
13764 return true;
13765
13766 /* THIS argument of method is always non-NULL. */
13767 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13768 && arg == DECL_ARGUMENTS (cfun->decl)
13769 && flag_delete_null_pointer_checks)
13770 return true;
13771
13772 /* Values passed by reference are always non-NULL. */
13773 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13774 && flag_delete_null_pointer_checks)
13775 return true;
13776
13777 fntype = TREE_TYPE (cfun->decl);
13778 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13779 {
13780 attrs = lookup_attribute ("nonnull", attrs);
13781
13782 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13783 if (attrs == NULL_TREE)
13784 return false;
13785
13786 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13787 if (TREE_VALUE (attrs) == NULL_TREE)
13788 return true;
13789
13790 /* Get the position number for ARG in the function signature. */
13791 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13792 t;
13793 t = DECL_CHAIN (t), arg_num++)
13794 {
13795 if (t == arg)
13796 break;
13797 }
13798
13799 gcc_assert (t == arg);
13800
13801 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13802 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13803 {
13804 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13805 return true;
13806 }
13807 }
13808
13809 return false;
13810 }
13811
13812
13813 #include "gt-tree.h"