tm.texi: Regenerated.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "tree.h"
35 #include "gimple.h"
36 #include "rtl.h"
37 #include "ssa.h"
38 #include "flags.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "calls.h"
43 #include "attribs.h"
44 #include "varasm.h"
45 #include "tm_p.h"
46 #include "toplev.h" /* get_random_seed */
47 #include "filenames.h"
48 #include "output.h"
49 #include "target.h"
50 #include "common/common-target.h"
51 #include "langhooks.h"
52 #include "tree-inline.h"
53 #include "tree-iterator.h"
54 #include "internal-fn.h"
55 #include "gimple-iterator.h"
56 #include "gimplify.h"
57 #include "cgraph.h"
58 #include "insn-config.h"
59 #include "expmed.h"
60 #include "dojump.h"
61 #include "explow.h"
62 #include "emit-rtl.h"
63 #include "stmt.h"
64 #include "expr.h"
65 #include "tree-dfa.h"
66 #include "params.h"
67 #include "tree-pass.h"
68 #include "langhooks-def.h"
69 #include "diagnostic.h"
70 #include "tree-diagnostic.h"
71 #include "tree-pretty-print.h"
72 #include "except.h"
73 #include "debug.h"
74 #include "intl.h"
75 #include "builtins.h"
76 #include "print-tree.h"
77 #include "ipa-utils.h"
78
79 /* Tree code classes. */
80
81 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
82 #define END_OF_BASE_TREE_CODES tcc_exceptional,
83
84 const enum tree_code_class tree_code_type[] = {
85 #include "all-tree.def"
86 };
87
88 #undef DEFTREECODE
89 #undef END_OF_BASE_TREE_CODES
90
91 /* Table indexed by tree code giving number of expression
92 operands beyond the fixed part of the node structure.
93 Not used for types or decls. */
94
95 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
96 #define END_OF_BASE_TREE_CODES 0,
97
98 const unsigned char tree_code_length[] = {
99 #include "all-tree.def"
100 };
101
102 #undef DEFTREECODE
103 #undef END_OF_BASE_TREE_CODES
104
105 /* Names of tree components.
106 Used for printing out the tree and error messages. */
107 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
108 #define END_OF_BASE_TREE_CODES "@dummy",
109
110 static const char *const tree_code_name[] = {
111 #include "all-tree.def"
112 };
113
114 #undef DEFTREECODE
115 #undef END_OF_BASE_TREE_CODES
116
117 /* Each tree code class has an associated string representation.
118 These must correspond to the tree_code_class entries. */
119
120 const char *const tree_code_class_strings[] =
121 {
122 "exceptional",
123 "constant",
124 "type",
125 "declaration",
126 "reference",
127 "comparison",
128 "unary",
129 "binary",
130 "statement",
131 "vl_exp",
132 "expression"
133 };
134
135 /* obstack.[ch] explicitly declined to prototype this. */
136 extern int _obstack_allocated_p (struct obstack *h, void *obj);
137
138 /* Statistics-gathering stuff. */
139
140 static int tree_code_counts[MAX_TREE_CODES];
141 int tree_node_counts[(int) all_kinds];
142 int tree_node_sizes[(int) all_kinds];
143
144 /* Keep in sync with tree.h:enum tree_node_kind. */
145 static const char * const tree_node_kind_names[] = {
146 "decls",
147 "types",
148 "blocks",
149 "stmts",
150 "refs",
151 "exprs",
152 "constants",
153 "identifiers",
154 "vecs",
155 "binfos",
156 "ssa names",
157 "constructors",
158 "random kinds",
159 "lang_decl kinds",
160 "lang_type kinds",
161 "omp clauses",
162 };
163
164 /* Unique id for next decl created. */
165 static GTY(()) int next_decl_uid;
166 /* Unique id for next type created. */
167 static GTY(()) int next_type_uid = 1;
168 /* Unique id for next debug decl created. Use negative numbers,
169 to catch erroneous uses. */
170 static GTY(()) int next_debug_decl_uid;
171
172 /* Since we cannot rehash a type after it is in the table, we have to
173 keep the hash code. */
174
175 struct GTY((for_user)) type_hash {
176 unsigned long hash;
177 tree type;
178 };
179
180 /* Initial size of the hash table (rounded to next prime). */
181 #define TYPE_HASH_INITIAL_SIZE 1000
182
183 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
184 {
185 static hashval_t hash (type_hash *t) { return t->hash; }
186 static bool equal (type_hash *a, type_hash *b);
187
188 static int
189 keep_cache_entry (type_hash *&t)
190 {
191 return ggc_marked_p (t->type);
192 }
193 };
194
195 /* Now here is the hash table. When recording a type, it is added to
196 the slot whose index is the hash code. Note that the hash table is
197 used for several kinds of types (function types, array types and
198 array index range types, for now). While all these live in the
199 same table, they are completely independent, and the hash code is
200 computed differently for each of these. */
201
202 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
203
204 /* Hash table and temporary node for larger integer const values. */
205 static GTY (()) tree int_cst_node;
206
207 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
208 {
209 static hashval_t hash (tree t);
210 static bool equal (tree x, tree y);
211 };
212
213 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
214
215 /* Hash table for optimization flags and target option flags. Use the same
216 hash table for both sets of options. Nodes for building the current
217 optimization and target option nodes. The assumption is most of the time
218 the options created will already be in the hash table, so we avoid
219 allocating and freeing up a node repeatably. */
220 static GTY (()) tree cl_optimization_node;
221 static GTY (()) tree cl_target_option_node;
222
223 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
224 {
225 static hashval_t hash (tree t);
226 static bool equal (tree x, tree y);
227 };
228
229 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
230
231 /* General tree->tree mapping structure for use in hash tables. */
232
233
234 static GTY ((cache))
235 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
236
237 static GTY ((cache))
238 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
239
240 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
241 {
242 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
243
244 static bool
245 equal (tree_vec_map *a, tree_vec_map *b)
246 {
247 return a->base.from == b->base.from;
248 }
249
250 static int
251 keep_cache_entry (tree_vec_map *&m)
252 {
253 return ggc_marked_p (m->base.from);
254 }
255 };
256
257 static GTY ((cache))
258 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
259
260 static void set_type_quals (tree, int);
261 static void print_type_hash_statistics (void);
262 static void print_debug_expr_statistics (void);
263 static void print_value_expr_statistics (void);
264 static void type_hash_list (const_tree, inchash::hash &);
265 static void attribute_hash_list (const_tree, inchash::hash &);
266
267 tree global_trees[TI_MAX];
268 tree integer_types[itk_none];
269
270 bool int_n_enabled_p[NUM_INT_N_ENTS];
271 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
272
273 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
274
275 /* Number of operands for each OpenMP clause. */
276 unsigned const char omp_clause_num_ops[] =
277 {
278 0, /* OMP_CLAUSE_ERROR */
279 1, /* OMP_CLAUSE_PRIVATE */
280 1, /* OMP_CLAUSE_SHARED */
281 1, /* OMP_CLAUSE_FIRSTPRIVATE */
282 2, /* OMP_CLAUSE_LASTPRIVATE */
283 5, /* OMP_CLAUSE_REDUCTION */
284 1, /* OMP_CLAUSE_COPYIN */
285 1, /* OMP_CLAUSE_COPYPRIVATE */
286 3, /* OMP_CLAUSE_LINEAR */
287 2, /* OMP_CLAUSE_ALIGNED */
288 1, /* OMP_CLAUSE_DEPEND */
289 1, /* OMP_CLAUSE_UNIFORM */
290 1, /* OMP_CLAUSE_TO_DECLARE */
291 1, /* OMP_CLAUSE_LINK */
292 2, /* OMP_CLAUSE_FROM */
293 2, /* OMP_CLAUSE_TO */
294 2, /* OMP_CLAUSE_MAP */
295 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
296 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
297 2, /* OMP_CLAUSE__CACHE_ */
298 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
299 1, /* OMP_CLAUSE_USE_DEVICE */
300 2, /* OMP_CLAUSE_GANG */
301 1, /* OMP_CLAUSE_ASYNC */
302 1, /* OMP_CLAUSE_WAIT */
303 0, /* OMP_CLAUSE_AUTO */
304 0, /* OMP_CLAUSE_SEQ */
305 1, /* OMP_CLAUSE__LOOPTEMP_ */
306 1, /* OMP_CLAUSE_IF */
307 1, /* OMP_CLAUSE_NUM_THREADS */
308 1, /* OMP_CLAUSE_SCHEDULE */
309 0, /* OMP_CLAUSE_NOWAIT */
310 1, /* OMP_CLAUSE_ORDERED */
311 0, /* OMP_CLAUSE_DEFAULT */
312 3, /* OMP_CLAUSE_COLLAPSE */
313 0, /* OMP_CLAUSE_UNTIED */
314 1, /* OMP_CLAUSE_FINAL */
315 0, /* OMP_CLAUSE_MERGEABLE */
316 1, /* OMP_CLAUSE_DEVICE */
317 1, /* OMP_CLAUSE_DIST_SCHEDULE */
318 0, /* OMP_CLAUSE_INBRANCH */
319 0, /* OMP_CLAUSE_NOTINBRANCH */
320 1, /* OMP_CLAUSE_NUM_TEAMS */
321 1, /* OMP_CLAUSE_THREAD_LIMIT */
322 0, /* OMP_CLAUSE_PROC_BIND */
323 1, /* OMP_CLAUSE_SAFELEN */
324 1, /* OMP_CLAUSE_SIMDLEN */
325 0, /* OMP_CLAUSE_FOR */
326 0, /* OMP_CLAUSE_PARALLEL */
327 0, /* OMP_CLAUSE_SECTIONS */
328 0, /* OMP_CLAUSE_TASKGROUP */
329 1, /* OMP_CLAUSE_PRIORITY */
330 1, /* OMP_CLAUSE_GRAINSIZE */
331 1, /* OMP_CLAUSE_NUM_TASKS */
332 0, /* OMP_CLAUSE_NOGROUP */
333 0, /* OMP_CLAUSE_THREADS */
334 0, /* OMP_CLAUSE_SIMD */
335 1, /* OMP_CLAUSE_HINT */
336 0, /* OMP_CLAUSE_DEFALTMAP */
337 1, /* OMP_CLAUSE__SIMDUID_ */
338 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
339 0, /* OMP_CLAUSE_INDEPENDENT */
340 1, /* OMP_CLAUSE_WORKER */
341 1, /* OMP_CLAUSE_VECTOR */
342 1, /* OMP_CLAUSE_NUM_GANGS */
343 1, /* OMP_CLAUSE_NUM_WORKERS */
344 1, /* OMP_CLAUSE_VECTOR_LENGTH */
345 };
346
347 const char * const omp_clause_code_name[] =
348 {
349 "error_clause",
350 "private",
351 "shared",
352 "firstprivate",
353 "lastprivate",
354 "reduction",
355 "copyin",
356 "copyprivate",
357 "linear",
358 "aligned",
359 "depend",
360 "uniform",
361 "to",
362 "link",
363 "from",
364 "to",
365 "map",
366 "use_device_ptr",
367 "is_device_ptr",
368 "_cache_",
369 "device_resident",
370 "use_device",
371 "gang",
372 "async",
373 "wait",
374 "auto",
375 "seq",
376 "_looptemp_",
377 "if",
378 "num_threads",
379 "schedule",
380 "nowait",
381 "ordered",
382 "default",
383 "collapse",
384 "untied",
385 "final",
386 "mergeable",
387 "device",
388 "dist_schedule",
389 "inbranch",
390 "notinbranch",
391 "num_teams",
392 "thread_limit",
393 "proc_bind",
394 "safelen",
395 "simdlen",
396 "for",
397 "parallel",
398 "sections",
399 "taskgroup",
400 "priority",
401 "grainsize",
402 "num_tasks",
403 "nogroup",
404 "threads",
405 "simd",
406 "hint",
407 "defaultmap",
408 "_simduid_",
409 "_Cilk_for_count_",
410 "independent",
411 "worker",
412 "vector",
413 "num_gangs",
414 "num_workers",
415 "vector_length"
416 };
417
418
419 /* Return the tree node structure used by tree code CODE. */
420
421 static inline enum tree_node_structure_enum
422 tree_node_structure_for_code (enum tree_code code)
423 {
424 switch (TREE_CODE_CLASS (code))
425 {
426 case tcc_declaration:
427 {
428 switch (code)
429 {
430 case FIELD_DECL:
431 return TS_FIELD_DECL;
432 case PARM_DECL:
433 return TS_PARM_DECL;
434 case VAR_DECL:
435 return TS_VAR_DECL;
436 case LABEL_DECL:
437 return TS_LABEL_DECL;
438 case RESULT_DECL:
439 return TS_RESULT_DECL;
440 case DEBUG_EXPR_DECL:
441 return TS_DECL_WRTL;
442 case CONST_DECL:
443 return TS_CONST_DECL;
444 case TYPE_DECL:
445 return TS_TYPE_DECL;
446 case FUNCTION_DECL:
447 return TS_FUNCTION_DECL;
448 case TRANSLATION_UNIT_DECL:
449 return TS_TRANSLATION_UNIT_DECL;
450 default:
451 return TS_DECL_NON_COMMON;
452 }
453 }
454 case tcc_type:
455 return TS_TYPE_NON_COMMON;
456 case tcc_reference:
457 case tcc_comparison:
458 case tcc_unary:
459 case tcc_binary:
460 case tcc_expression:
461 case tcc_statement:
462 case tcc_vl_exp:
463 return TS_EXP;
464 default: /* tcc_constant and tcc_exceptional */
465 break;
466 }
467 switch (code)
468 {
469 /* tcc_constant cases. */
470 case VOID_CST: return TS_TYPED;
471 case INTEGER_CST: return TS_INT_CST;
472 case REAL_CST: return TS_REAL_CST;
473 case FIXED_CST: return TS_FIXED_CST;
474 case COMPLEX_CST: return TS_COMPLEX;
475 case VECTOR_CST: return TS_VECTOR;
476 case STRING_CST: return TS_STRING;
477 /* tcc_exceptional cases. */
478 case ERROR_MARK: return TS_COMMON;
479 case IDENTIFIER_NODE: return TS_IDENTIFIER;
480 case TREE_LIST: return TS_LIST;
481 case TREE_VEC: return TS_VEC;
482 case SSA_NAME: return TS_SSA_NAME;
483 case PLACEHOLDER_EXPR: return TS_COMMON;
484 case STATEMENT_LIST: return TS_STATEMENT_LIST;
485 case BLOCK: return TS_BLOCK;
486 case CONSTRUCTOR: return TS_CONSTRUCTOR;
487 case TREE_BINFO: return TS_BINFO;
488 case OMP_CLAUSE: return TS_OMP_CLAUSE;
489 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
490 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
491
492 default:
493 gcc_unreachable ();
494 }
495 }
496
497
498 /* Initialize tree_contains_struct to describe the hierarchy of tree
499 nodes. */
500
501 static void
502 initialize_tree_contains_struct (void)
503 {
504 unsigned i;
505
506 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
507 {
508 enum tree_code code;
509 enum tree_node_structure_enum ts_code;
510
511 code = (enum tree_code) i;
512 ts_code = tree_node_structure_for_code (code);
513
514 /* Mark the TS structure itself. */
515 tree_contains_struct[code][ts_code] = 1;
516
517 /* Mark all the structures that TS is derived from. */
518 switch (ts_code)
519 {
520 case TS_TYPED:
521 case TS_BLOCK:
522 MARK_TS_BASE (code);
523 break;
524
525 case TS_COMMON:
526 case TS_INT_CST:
527 case TS_REAL_CST:
528 case TS_FIXED_CST:
529 case TS_VECTOR:
530 case TS_STRING:
531 case TS_COMPLEX:
532 case TS_SSA_NAME:
533 case TS_CONSTRUCTOR:
534 case TS_EXP:
535 case TS_STATEMENT_LIST:
536 MARK_TS_TYPED (code);
537 break;
538
539 case TS_IDENTIFIER:
540 case TS_DECL_MINIMAL:
541 case TS_TYPE_COMMON:
542 case TS_LIST:
543 case TS_VEC:
544 case TS_BINFO:
545 case TS_OMP_CLAUSE:
546 case TS_OPTIMIZATION:
547 case TS_TARGET_OPTION:
548 MARK_TS_COMMON (code);
549 break;
550
551 case TS_TYPE_WITH_LANG_SPECIFIC:
552 MARK_TS_TYPE_COMMON (code);
553 break;
554
555 case TS_TYPE_NON_COMMON:
556 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
557 break;
558
559 case TS_DECL_COMMON:
560 MARK_TS_DECL_MINIMAL (code);
561 break;
562
563 case TS_DECL_WRTL:
564 case TS_CONST_DECL:
565 MARK_TS_DECL_COMMON (code);
566 break;
567
568 case TS_DECL_NON_COMMON:
569 MARK_TS_DECL_WITH_VIS (code);
570 break;
571
572 case TS_DECL_WITH_VIS:
573 case TS_PARM_DECL:
574 case TS_LABEL_DECL:
575 case TS_RESULT_DECL:
576 MARK_TS_DECL_WRTL (code);
577 break;
578
579 case TS_FIELD_DECL:
580 MARK_TS_DECL_COMMON (code);
581 break;
582
583 case TS_VAR_DECL:
584 MARK_TS_DECL_WITH_VIS (code);
585 break;
586
587 case TS_TYPE_DECL:
588 case TS_FUNCTION_DECL:
589 MARK_TS_DECL_NON_COMMON (code);
590 break;
591
592 case TS_TRANSLATION_UNIT_DECL:
593 MARK_TS_DECL_COMMON (code);
594 break;
595
596 default:
597 gcc_unreachable ();
598 }
599 }
600
601 /* Basic consistency checks for attributes used in fold. */
602 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
603 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
604 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
605 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
606 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
607 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
608 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
609 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
610 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
611 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
612 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
613 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
614 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
615 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
616 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
617 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
618 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
619 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
620 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
621 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
622 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
623 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
624 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
625 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
626 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
627 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
628 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
629 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
630 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
631 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
632 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
633 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
634 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
635 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
636 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
637 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
638 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
639 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
641 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
642 }
643
644
645 /* Init tree.c. */
646
647 void
648 init_ttree (void)
649 {
650 /* Initialize the hash table of types. */
651 type_hash_table
652 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
653
654 debug_expr_for_decl
655 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
656
657 value_expr_for_decl
658 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
659
660 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
661
662 int_cst_node = make_int_cst (1, 1);
663
664 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
665
666 cl_optimization_node = make_node (OPTIMIZATION_NODE);
667 cl_target_option_node = make_node (TARGET_OPTION_NODE);
668
669 /* Initialize the tree_contains_struct array. */
670 initialize_tree_contains_struct ();
671 lang_hooks.init_ts ();
672 }
673
674 \f
675 /* The name of the object as the assembler will see it (but before any
676 translations made by ASM_OUTPUT_LABELREF). Often this is the same
677 as DECL_NAME. It is an IDENTIFIER_NODE. */
678 tree
679 decl_assembler_name (tree decl)
680 {
681 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
682 lang_hooks.set_decl_assembler_name (decl);
683 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
684 }
685
686 /* When the target supports COMDAT groups, this indicates which group the
687 DECL is associated with. This can be either an IDENTIFIER_NODE or a
688 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
689 tree
690 decl_comdat_group (const_tree node)
691 {
692 struct symtab_node *snode = symtab_node::get (node);
693 if (!snode)
694 return NULL;
695 return snode->get_comdat_group ();
696 }
697
698 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
699 tree
700 decl_comdat_group_id (const_tree node)
701 {
702 struct symtab_node *snode = symtab_node::get (node);
703 if (!snode)
704 return NULL;
705 return snode->get_comdat_group_id ();
706 }
707
708 /* When the target supports named section, return its name as IDENTIFIER_NODE
709 or NULL if it is in no section. */
710 const char *
711 decl_section_name (const_tree node)
712 {
713 struct symtab_node *snode = symtab_node::get (node);
714 if (!snode)
715 return NULL;
716 return snode->get_section ();
717 }
718
719 /* Set section name of NODE to VALUE (that is expected to be
720 identifier node) */
721 void
722 set_decl_section_name (tree node, const char *value)
723 {
724 struct symtab_node *snode;
725
726 if (value == NULL)
727 {
728 snode = symtab_node::get (node);
729 if (!snode)
730 return;
731 }
732 else if (TREE_CODE (node) == VAR_DECL)
733 snode = varpool_node::get_create (node);
734 else
735 snode = cgraph_node::get_create (node);
736 snode->set_section (value);
737 }
738
739 /* Return TLS model of a variable NODE. */
740 enum tls_model
741 decl_tls_model (const_tree node)
742 {
743 struct varpool_node *snode = varpool_node::get (node);
744 if (!snode)
745 return TLS_MODEL_NONE;
746 return snode->tls_model;
747 }
748
749 /* Set TLS model of variable NODE to MODEL. */
750 void
751 set_decl_tls_model (tree node, enum tls_model model)
752 {
753 struct varpool_node *vnode;
754
755 if (model == TLS_MODEL_NONE)
756 {
757 vnode = varpool_node::get (node);
758 if (!vnode)
759 return;
760 }
761 else
762 vnode = varpool_node::get_create (node);
763 vnode->tls_model = model;
764 }
765
766 /* Compute the number of bytes occupied by a tree with code CODE.
767 This function cannot be used for nodes that have variable sizes,
768 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
769 size_t
770 tree_code_size (enum tree_code code)
771 {
772 switch (TREE_CODE_CLASS (code))
773 {
774 case tcc_declaration: /* A decl node */
775 {
776 switch (code)
777 {
778 case FIELD_DECL:
779 return sizeof (struct tree_field_decl);
780 case PARM_DECL:
781 return sizeof (struct tree_parm_decl);
782 case VAR_DECL:
783 return sizeof (struct tree_var_decl);
784 case LABEL_DECL:
785 return sizeof (struct tree_label_decl);
786 case RESULT_DECL:
787 return sizeof (struct tree_result_decl);
788 case CONST_DECL:
789 return sizeof (struct tree_const_decl);
790 case TYPE_DECL:
791 return sizeof (struct tree_type_decl);
792 case FUNCTION_DECL:
793 return sizeof (struct tree_function_decl);
794 case DEBUG_EXPR_DECL:
795 return sizeof (struct tree_decl_with_rtl);
796 case TRANSLATION_UNIT_DECL:
797 return sizeof (struct tree_translation_unit_decl);
798 case NAMESPACE_DECL:
799 case IMPORTED_DECL:
800 case NAMELIST_DECL:
801 return sizeof (struct tree_decl_non_common);
802 default:
803 return lang_hooks.tree_size (code);
804 }
805 }
806
807 case tcc_type: /* a type node */
808 return sizeof (struct tree_type_non_common);
809
810 case tcc_reference: /* a reference */
811 case tcc_expression: /* an expression */
812 case tcc_statement: /* an expression with side effects */
813 case tcc_comparison: /* a comparison expression */
814 case tcc_unary: /* a unary arithmetic expression */
815 case tcc_binary: /* a binary arithmetic expression */
816 return (sizeof (struct tree_exp)
817 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
818
819 case tcc_constant: /* a constant */
820 switch (code)
821 {
822 case VOID_CST: return sizeof (struct tree_typed);
823 case INTEGER_CST: gcc_unreachable ();
824 case REAL_CST: return sizeof (struct tree_real_cst);
825 case FIXED_CST: return sizeof (struct tree_fixed_cst);
826 case COMPLEX_CST: return sizeof (struct tree_complex);
827 case VECTOR_CST: return sizeof (struct tree_vector);
828 case STRING_CST: gcc_unreachable ();
829 default:
830 return lang_hooks.tree_size (code);
831 }
832
833 case tcc_exceptional: /* something random, like an identifier. */
834 switch (code)
835 {
836 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
837 case TREE_LIST: return sizeof (struct tree_list);
838
839 case ERROR_MARK:
840 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
841
842 case TREE_VEC:
843 case OMP_CLAUSE: gcc_unreachable ();
844
845 case SSA_NAME: return sizeof (struct tree_ssa_name);
846
847 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
848 case BLOCK: return sizeof (struct tree_block);
849 case CONSTRUCTOR: return sizeof (struct tree_constructor);
850 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
851 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
852
853 default:
854 return lang_hooks.tree_size (code);
855 }
856
857 default:
858 gcc_unreachable ();
859 }
860 }
861
862 /* Compute the number of bytes occupied by NODE. This routine only
863 looks at TREE_CODE, except for those nodes that have variable sizes. */
864 size_t
865 tree_size (const_tree node)
866 {
867 const enum tree_code code = TREE_CODE (node);
868 switch (code)
869 {
870 case INTEGER_CST:
871 return (sizeof (struct tree_int_cst)
872 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
873
874 case TREE_BINFO:
875 return (offsetof (struct tree_binfo, base_binfos)
876 + vec<tree, va_gc>
877 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
878
879 case TREE_VEC:
880 return (sizeof (struct tree_vec)
881 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
882
883 case VECTOR_CST:
884 return (sizeof (struct tree_vector)
885 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
886
887 case STRING_CST:
888 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
889
890 case OMP_CLAUSE:
891 return (sizeof (struct tree_omp_clause)
892 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
893 * sizeof (tree));
894
895 default:
896 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
897 return (sizeof (struct tree_exp)
898 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
899 else
900 return tree_code_size (code);
901 }
902 }
903
904 /* Record interesting allocation statistics for a tree node with CODE
905 and LENGTH. */
906
907 static void
908 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
909 size_t length ATTRIBUTE_UNUSED)
910 {
911 enum tree_code_class type = TREE_CODE_CLASS (code);
912 tree_node_kind kind;
913
914 if (!GATHER_STATISTICS)
915 return;
916
917 switch (type)
918 {
919 case tcc_declaration: /* A decl node */
920 kind = d_kind;
921 break;
922
923 case tcc_type: /* a type node */
924 kind = t_kind;
925 break;
926
927 case tcc_statement: /* an expression with side effects */
928 kind = s_kind;
929 break;
930
931 case tcc_reference: /* a reference */
932 kind = r_kind;
933 break;
934
935 case tcc_expression: /* an expression */
936 case tcc_comparison: /* a comparison expression */
937 case tcc_unary: /* a unary arithmetic expression */
938 case tcc_binary: /* a binary arithmetic expression */
939 kind = e_kind;
940 break;
941
942 case tcc_constant: /* a constant */
943 kind = c_kind;
944 break;
945
946 case tcc_exceptional: /* something random, like an identifier. */
947 switch (code)
948 {
949 case IDENTIFIER_NODE:
950 kind = id_kind;
951 break;
952
953 case TREE_VEC:
954 kind = vec_kind;
955 break;
956
957 case TREE_BINFO:
958 kind = binfo_kind;
959 break;
960
961 case SSA_NAME:
962 kind = ssa_name_kind;
963 break;
964
965 case BLOCK:
966 kind = b_kind;
967 break;
968
969 case CONSTRUCTOR:
970 kind = constr_kind;
971 break;
972
973 case OMP_CLAUSE:
974 kind = omp_clause_kind;
975 break;
976
977 default:
978 kind = x_kind;
979 break;
980 }
981 break;
982
983 case tcc_vl_exp:
984 kind = e_kind;
985 break;
986
987 default:
988 gcc_unreachable ();
989 }
990
991 tree_code_counts[(int) code]++;
992 tree_node_counts[(int) kind]++;
993 tree_node_sizes[(int) kind] += length;
994 }
995
996 /* Allocate and return a new UID from the DECL_UID namespace. */
997
998 int
999 allocate_decl_uid (void)
1000 {
1001 return next_decl_uid++;
1002 }
1003
1004 /* Return a newly allocated node of code CODE. For decl and type
1005 nodes, some other fields are initialized. The rest of the node is
1006 initialized to zero. This function cannot be used for TREE_VEC,
1007 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1008 tree_code_size.
1009
1010 Achoo! I got a code in the node. */
1011
1012 tree
1013 make_node_stat (enum tree_code code MEM_STAT_DECL)
1014 {
1015 tree t;
1016 enum tree_code_class type = TREE_CODE_CLASS (code);
1017 size_t length = tree_code_size (code);
1018
1019 record_node_allocation_statistics (code, length);
1020
1021 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1022 TREE_SET_CODE (t, code);
1023
1024 switch (type)
1025 {
1026 case tcc_statement:
1027 TREE_SIDE_EFFECTS (t) = 1;
1028 break;
1029
1030 case tcc_declaration:
1031 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1032 {
1033 if (code == FUNCTION_DECL)
1034 {
1035 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1036 DECL_MODE (t) = FUNCTION_MODE;
1037 }
1038 else
1039 DECL_ALIGN (t) = 1;
1040 }
1041 DECL_SOURCE_LOCATION (t) = input_location;
1042 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1043 DECL_UID (t) = --next_debug_decl_uid;
1044 else
1045 {
1046 DECL_UID (t) = allocate_decl_uid ();
1047 SET_DECL_PT_UID (t, -1);
1048 }
1049 if (TREE_CODE (t) == LABEL_DECL)
1050 LABEL_DECL_UID (t) = -1;
1051
1052 break;
1053
1054 case tcc_type:
1055 TYPE_UID (t) = next_type_uid++;
1056 TYPE_ALIGN (t) = BITS_PER_UNIT;
1057 TYPE_USER_ALIGN (t) = 0;
1058 TYPE_MAIN_VARIANT (t) = t;
1059 TYPE_CANONICAL (t) = t;
1060
1061 /* Default to no attributes for type, but let target change that. */
1062 TYPE_ATTRIBUTES (t) = NULL_TREE;
1063 targetm.set_default_type_attributes (t);
1064
1065 /* We have not yet computed the alias set for this type. */
1066 TYPE_ALIAS_SET (t) = -1;
1067 break;
1068
1069 case tcc_constant:
1070 TREE_CONSTANT (t) = 1;
1071 break;
1072
1073 case tcc_expression:
1074 switch (code)
1075 {
1076 case INIT_EXPR:
1077 case MODIFY_EXPR:
1078 case VA_ARG_EXPR:
1079 case PREDECREMENT_EXPR:
1080 case PREINCREMENT_EXPR:
1081 case POSTDECREMENT_EXPR:
1082 case POSTINCREMENT_EXPR:
1083 /* All of these have side-effects, no matter what their
1084 operands are. */
1085 TREE_SIDE_EFFECTS (t) = 1;
1086 break;
1087
1088 default:
1089 break;
1090 }
1091 break;
1092
1093 case tcc_exceptional:
1094 switch (code)
1095 {
1096 case TARGET_OPTION_NODE:
1097 TREE_TARGET_OPTION(t)
1098 = ggc_cleared_alloc<struct cl_target_option> ();
1099 break;
1100
1101 case OPTIMIZATION_NODE:
1102 TREE_OPTIMIZATION (t)
1103 = ggc_cleared_alloc<struct cl_optimization> ();
1104 break;
1105
1106 default:
1107 break;
1108 }
1109 break;
1110
1111 default:
1112 /* Other classes need no special treatment. */
1113 break;
1114 }
1115
1116 return t;
1117 }
1118 \f
1119 /* Return a new node with the same contents as NODE except that its
1120 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1121
1122 tree
1123 copy_node_stat (tree node MEM_STAT_DECL)
1124 {
1125 tree t;
1126 enum tree_code code = TREE_CODE (node);
1127 size_t length;
1128
1129 gcc_assert (code != STATEMENT_LIST);
1130
1131 length = tree_size (node);
1132 record_node_allocation_statistics (code, length);
1133 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1134 memcpy (t, node, length);
1135
1136 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1137 TREE_CHAIN (t) = 0;
1138 TREE_ASM_WRITTEN (t) = 0;
1139 TREE_VISITED (t) = 0;
1140
1141 if (TREE_CODE_CLASS (code) == tcc_declaration)
1142 {
1143 if (code == DEBUG_EXPR_DECL)
1144 DECL_UID (t) = --next_debug_decl_uid;
1145 else
1146 {
1147 DECL_UID (t) = allocate_decl_uid ();
1148 if (DECL_PT_UID_SET_P (node))
1149 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1150 }
1151 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1152 && DECL_HAS_VALUE_EXPR_P (node))
1153 {
1154 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1155 DECL_HAS_VALUE_EXPR_P (t) = 1;
1156 }
1157 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1158 if (TREE_CODE (node) == VAR_DECL)
1159 {
1160 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1161 t->decl_with_vis.symtab_node = NULL;
1162 }
1163 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1164 {
1165 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1166 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1167 }
1168 if (TREE_CODE (node) == FUNCTION_DECL)
1169 {
1170 DECL_STRUCT_FUNCTION (t) = NULL;
1171 t->decl_with_vis.symtab_node = NULL;
1172 }
1173 }
1174 else if (TREE_CODE_CLASS (code) == tcc_type)
1175 {
1176 TYPE_UID (t) = next_type_uid++;
1177 /* The following is so that the debug code for
1178 the copy is different from the original type.
1179 The two statements usually duplicate each other
1180 (because they clear fields of the same union),
1181 but the optimizer should catch that. */
1182 TYPE_SYMTAB_POINTER (t) = 0;
1183 TYPE_SYMTAB_ADDRESS (t) = 0;
1184
1185 /* Do not copy the values cache. */
1186 if (TYPE_CACHED_VALUES_P (t))
1187 {
1188 TYPE_CACHED_VALUES_P (t) = 0;
1189 TYPE_CACHED_VALUES (t) = NULL_TREE;
1190 }
1191 }
1192 else if (code == TARGET_OPTION_NODE)
1193 {
1194 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1195 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1196 sizeof (struct cl_target_option));
1197 }
1198 else if (code == OPTIMIZATION_NODE)
1199 {
1200 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1201 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1202 sizeof (struct cl_optimization));
1203 }
1204
1205 return t;
1206 }
1207
1208 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1209 For example, this can copy a list made of TREE_LIST nodes. */
1210
1211 tree
1212 copy_list (tree list)
1213 {
1214 tree head;
1215 tree prev, next;
1216
1217 if (list == 0)
1218 return 0;
1219
1220 head = prev = copy_node (list);
1221 next = TREE_CHAIN (list);
1222 while (next)
1223 {
1224 TREE_CHAIN (prev) = copy_node (next);
1225 prev = TREE_CHAIN (prev);
1226 next = TREE_CHAIN (next);
1227 }
1228 return head;
1229 }
1230
1231 \f
1232 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1233 INTEGER_CST with value CST and type TYPE. */
1234
1235 static unsigned int
1236 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1237 {
1238 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1239 /* We need an extra zero HWI if CST is an unsigned integer with its
1240 upper bit set, and if CST occupies a whole number of HWIs. */
1241 if (TYPE_UNSIGNED (type)
1242 && wi::neg_p (cst)
1243 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1244 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1245 return cst.get_len ();
1246 }
1247
1248 /* Return a new INTEGER_CST with value CST and type TYPE. */
1249
1250 static tree
1251 build_new_int_cst (tree type, const wide_int &cst)
1252 {
1253 unsigned int len = cst.get_len ();
1254 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1255 tree nt = make_int_cst (len, ext_len);
1256
1257 if (len < ext_len)
1258 {
1259 --ext_len;
1260 TREE_INT_CST_ELT (nt, ext_len) = 0;
1261 for (unsigned int i = len; i < ext_len; ++i)
1262 TREE_INT_CST_ELT (nt, i) = -1;
1263 }
1264 else if (TYPE_UNSIGNED (type)
1265 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1266 {
1267 len--;
1268 TREE_INT_CST_ELT (nt, len)
1269 = zext_hwi (cst.elt (len),
1270 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1271 }
1272
1273 for (unsigned int i = 0; i < len; i++)
1274 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1275 TREE_TYPE (nt) = type;
1276 return nt;
1277 }
1278
1279 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1280
1281 tree
1282 build_int_cst (tree type, HOST_WIDE_INT low)
1283 {
1284 /* Support legacy code. */
1285 if (!type)
1286 type = integer_type_node;
1287
1288 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1289 }
1290
1291 tree
1292 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1293 {
1294 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1295 }
1296
1297 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1298
1299 tree
1300 build_int_cst_type (tree type, HOST_WIDE_INT low)
1301 {
1302 gcc_assert (type);
1303 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1304 }
1305
1306 /* Constructs tree in type TYPE from with value given by CST. Signedness
1307 of CST is assumed to be the same as the signedness of TYPE. */
1308
1309 tree
1310 double_int_to_tree (tree type, double_int cst)
1311 {
1312 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1313 }
1314
1315 /* We force the wide_int CST to the range of the type TYPE by sign or
1316 zero extending it. OVERFLOWABLE indicates if we are interested in
1317 overflow of the value, when >0 we are only interested in signed
1318 overflow, for <0 we are interested in any overflow. OVERFLOWED
1319 indicates whether overflow has already occurred. CONST_OVERFLOWED
1320 indicates whether constant overflow has already occurred. We force
1321 T's value to be within range of T's type (by setting to 0 or 1 all
1322 the bits outside the type's range). We set TREE_OVERFLOWED if,
1323 OVERFLOWED is nonzero,
1324 or OVERFLOWABLE is >0 and signed overflow occurs
1325 or OVERFLOWABLE is <0 and any overflow occurs
1326 We return a new tree node for the extended wide_int. The node
1327 is shared if no overflow flags are set. */
1328
1329
1330 tree
1331 force_fit_type (tree type, const wide_int_ref &cst,
1332 int overflowable, bool overflowed)
1333 {
1334 signop sign = TYPE_SIGN (type);
1335
1336 /* If we need to set overflow flags, return a new unshared node. */
1337 if (overflowed || !wi::fits_to_tree_p (cst, type))
1338 {
1339 if (overflowed
1340 || overflowable < 0
1341 || (overflowable > 0 && sign == SIGNED))
1342 {
1343 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1344 tree t = build_new_int_cst (type, tmp);
1345 TREE_OVERFLOW (t) = 1;
1346 return t;
1347 }
1348 }
1349
1350 /* Else build a shared node. */
1351 return wide_int_to_tree (type, cst);
1352 }
1353
1354 /* These are the hash table functions for the hash table of INTEGER_CST
1355 nodes of a sizetype. */
1356
1357 /* Return the hash code X, an INTEGER_CST. */
1358
1359 hashval_t
1360 int_cst_hasher::hash (tree x)
1361 {
1362 const_tree const t = x;
1363 hashval_t code = TYPE_UID (TREE_TYPE (t));
1364 int i;
1365
1366 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1367 code ^= TREE_INT_CST_ELT (t, i);
1368
1369 return code;
1370 }
1371
1372 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1373 is the same as that given by *Y, which is the same. */
1374
1375 bool
1376 int_cst_hasher::equal (tree x, tree y)
1377 {
1378 const_tree const xt = x;
1379 const_tree const yt = y;
1380
1381 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1382 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1383 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1384 return false;
1385
1386 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1387 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1388 return false;
1389
1390 return true;
1391 }
1392
1393 /* Create an INT_CST node of TYPE and value CST.
1394 The returned node is always shared. For small integers we use a
1395 per-type vector cache, for larger ones we use a single hash table.
1396 The value is extended from its precision according to the sign of
1397 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1398 the upper bits and ensures that hashing and value equality based
1399 upon the underlying HOST_WIDE_INTs works without masking. */
1400
1401 tree
1402 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1403 {
1404 tree t;
1405 int ix = -1;
1406 int limit = 0;
1407
1408 gcc_assert (type);
1409 unsigned int prec = TYPE_PRECISION (type);
1410 signop sgn = TYPE_SIGN (type);
1411
1412 /* Verify that everything is canonical. */
1413 int l = pcst.get_len ();
1414 if (l > 1)
1415 {
1416 if (pcst.elt (l - 1) == 0)
1417 gcc_checking_assert (pcst.elt (l - 2) < 0);
1418 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1419 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1420 }
1421
1422 wide_int cst = wide_int::from (pcst, prec, sgn);
1423 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1424
1425 if (ext_len == 1)
1426 {
1427 /* We just need to store a single HOST_WIDE_INT. */
1428 HOST_WIDE_INT hwi;
1429 if (TYPE_UNSIGNED (type))
1430 hwi = cst.to_uhwi ();
1431 else
1432 hwi = cst.to_shwi ();
1433
1434 switch (TREE_CODE (type))
1435 {
1436 case NULLPTR_TYPE:
1437 gcc_assert (hwi == 0);
1438 /* Fallthru. */
1439
1440 case POINTER_TYPE:
1441 case REFERENCE_TYPE:
1442 case POINTER_BOUNDS_TYPE:
1443 /* Cache NULL pointer and zero bounds. */
1444 if (hwi == 0)
1445 {
1446 limit = 1;
1447 ix = 0;
1448 }
1449 break;
1450
1451 case BOOLEAN_TYPE:
1452 /* Cache false or true. */
1453 limit = 2;
1454 if (hwi < 2)
1455 ix = hwi;
1456 break;
1457
1458 case INTEGER_TYPE:
1459 case OFFSET_TYPE:
1460 if (TYPE_SIGN (type) == UNSIGNED)
1461 {
1462 /* Cache [0, N). */
1463 limit = INTEGER_SHARE_LIMIT;
1464 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1465 ix = hwi;
1466 }
1467 else
1468 {
1469 /* Cache [-1, N). */
1470 limit = INTEGER_SHARE_LIMIT + 1;
1471 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1472 ix = hwi + 1;
1473 }
1474 break;
1475
1476 case ENUMERAL_TYPE:
1477 break;
1478
1479 default:
1480 gcc_unreachable ();
1481 }
1482
1483 if (ix >= 0)
1484 {
1485 /* Look for it in the type's vector of small shared ints. */
1486 if (!TYPE_CACHED_VALUES_P (type))
1487 {
1488 TYPE_CACHED_VALUES_P (type) = 1;
1489 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1490 }
1491
1492 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1493 if (t)
1494 /* Make sure no one is clobbering the shared constant. */
1495 gcc_checking_assert (TREE_TYPE (t) == type
1496 && TREE_INT_CST_NUNITS (t) == 1
1497 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1498 && TREE_INT_CST_EXT_NUNITS (t) == 1
1499 && TREE_INT_CST_ELT (t, 0) == hwi);
1500 else
1501 {
1502 /* Create a new shared int. */
1503 t = build_new_int_cst (type, cst);
1504 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1505 }
1506 }
1507 else
1508 {
1509 /* Use the cache of larger shared ints, using int_cst_node as
1510 a temporary. */
1511
1512 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1513 TREE_TYPE (int_cst_node) = type;
1514
1515 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1516 t = *slot;
1517 if (!t)
1518 {
1519 /* Insert this one into the hash table. */
1520 t = int_cst_node;
1521 *slot = t;
1522 /* Make a new node for next time round. */
1523 int_cst_node = make_int_cst (1, 1);
1524 }
1525 }
1526 }
1527 else
1528 {
1529 /* The value either hashes properly or we drop it on the floor
1530 for the gc to take care of. There will not be enough of them
1531 to worry about. */
1532
1533 tree nt = build_new_int_cst (type, cst);
1534 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1535 t = *slot;
1536 if (!t)
1537 {
1538 /* Insert this one into the hash table. */
1539 t = nt;
1540 *slot = t;
1541 }
1542 }
1543
1544 return t;
1545 }
1546
1547 void
1548 cache_integer_cst (tree t)
1549 {
1550 tree type = TREE_TYPE (t);
1551 int ix = -1;
1552 int limit = 0;
1553 int prec = TYPE_PRECISION (type);
1554
1555 gcc_assert (!TREE_OVERFLOW (t));
1556
1557 switch (TREE_CODE (type))
1558 {
1559 case NULLPTR_TYPE:
1560 gcc_assert (integer_zerop (t));
1561 /* Fallthru. */
1562
1563 case POINTER_TYPE:
1564 case REFERENCE_TYPE:
1565 /* Cache NULL pointer. */
1566 if (integer_zerop (t))
1567 {
1568 limit = 1;
1569 ix = 0;
1570 }
1571 break;
1572
1573 case BOOLEAN_TYPE:
1574 /* Cache false or true. */
1575 limit = 2;
1576 if (wi::ltu_p (t, 2))
1577 ix = TREE_INT_CST_ELT (t, 0);
1578 break;
1579
1580 case INTEGER_TYPE:
1581 case OFFSET_TYPE:
1582 if (TYPE_UNSIGNED (type))
1583 {
1584 /* Cache 0..N */
1585 limit = INTEGER_SHARE_LIMIT;
1586
1587 /* This is a little hokie, but if the prec is smaller than
1588 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1589 obvious test will not get the correct answer. */
1590 if (prec < HOST_BITS_PER_WIDE_INT)
1591 {
1592 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1593 ix = tree_to_uhwi (t);
1594 }
1595 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1596 ix = tree_to_uhwi (t);
1597 }
1598 else
1599 {
1600 /* Cache -1..N */
1601 limit = INTEGER_SHARE_LIMIT + 1;
1602
1603 if (integer_minus_onep (t))
1604 ix = 0;
1605 else if (!wi::neg_p (t))
1606 {
1607 if (prec < HOST_BITS_PER_WIDE_INT)
1608 {
1609 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1610 ix = tree_to_shwi (t) + 1;
1611 }
1612 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1613 ix = tree_to_shwi (t) + 1;
1614 }
1615 }
1616 break;
1617
1618 case ENUMERAL_TYPE:
1619 break;
1620
1621 default:
1622 gcc_unreachable ();
1623 }
1624
1625 if (ix >= 0)
1626 {
1627 /* Look for it in the type's vector of small shared ints. */
1628 if (!TYPE_CACHED_VALUES_P (type))
1629 {
1630 TYPE_CACHED_VALUES_P (type) = 1;
1631 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1632 }
1633
1634 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1635 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1636 }
1637 else
1638 {
1639 /* Use the cache of larger shared ints. */
1640 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1641 /* If there is already an entry for the number verify it's the
1642 same. */
1643 if (*slot)
1644 gcc_assert (wi::eq_p (tree (*slot), t));
1645 else
1646 /* Otherwise insert this one into the hash table. */
1647 *slot = t;
1648 }
1649 }
1650
1651
1652 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1653 and the rest are zeros. */
1654
1655 tree
1656 build_low_bits_mask (tree type, unsigned bits)
1657 {
1658 gcc_assert (bits <= TYPE_PRECISION (type));
1659
1660 return wide_int_to_tree (type, wi::mask (bits, false,
1661 TYPE_PRECISION (type)));
1662 }
1663
1664 /* Checks that X is integer constant that can be expressed in (unsigned)
1665 HOST_WIDE_INT without loss of precision. */
1666
1667 bool
1668 cst_and_fits_in_hwi (const_tree x)
1669 {
1670 if (TREE_CODE (x) != INTEGER_CST)
1671 return false;
1672
1673 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1674 return false;
1675
1676 return TREE_INT_CST_NUNITS (x) == 1;
1677 }
1678
1679 /* Build a newly constructed VECTOR_CST node of length LEN. */
1680
1681 tree
1682 make_vector_stat (unsigned len MEM_STAT_DECL)
1683 {
1684 tree t;
1685 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1686
1687 record_node_allocation_statistics (VECTOR_CST, length);
1688
1689 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1690
1691 TREE_SET_CODE (t, VECTOR_CST);
1692 TREE_CONSTANT (t) = 1;
1693
1694 return t;
1695 }
1696
1697 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1698 are in a list pointed to by VALS. */
1699
1700 tree
1701 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1702 {
1703 int over = 0;
1704 unsigned cnt = 0;
1705 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1706 TREE_TYPE (v) = type;
1707
1708 /* Iterate through elements and check for overflow. */
1709 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1710 {
1711 tree value = vals[cnt];
1712
1713 VECTOR_CST_ELT (v, cnt) = value;
1714
1715 /* Don't crash if we get an address constant. */
1716 if (!CONSTANT_CLASS_P (value))
1717 continue;
1718
1719 over |= TREE_OVERFLOW (value);
1720 }
1721
1722 TREE_OVERFLOW (v) = over;
1723 return v;
1724 }
1725
1726 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1727 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1728
1729 tree
1730 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1731 {
1732 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1733 unsigned HOST_WIDE_INT idx;
1734 tree value;
1735
1736 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1737 vec[idx] = value;
1738 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1739 vec[idx] = build_zero_cst (TREE_TYPE (type));
1740
1741 return build_vector (type, vec);
1742 }
1743
1744 /* Build a vector of type VECTYPE where all the elements are SCs. */
1745 tree
1746 build_vector_from_val (tree vectype, tree sc)
1747 {
1748 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1749
1750 if (sc == error_mark_node)
1751 return sc;
1752
1753 /* Verify that the vector type is suitable for SC. Note that there
1754 is some inconsistency in the type-system with respect to restrict
1755 qualifications of pointers. Vector types always have a main-variant
1756 element type and the qualification is applied to the vector-type.
1757 So TREE_TYPE (vector-type) does not return a properly qualified
1758 vector element-type. */
1759 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1760 TREE_TYPE (vectype)));
1761
1762 if (CONSTANT_CLASS_P (sc))
1763 {
1764 tree *v = XALLOCAVEC (tree, nunits);
1765 for (i = 0; i < nunits; ++i)
1766 v[i] = sc;
1767 return build_vector (vectype, v);
1768 }
1769 else
1770 {
1771 vec<constructor_elt, va_gc> *v;
1772 vec_alloc (v, nunits);
1773 for (i = 0; i < nunits; ++i)
1774 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1775 return build_constructor (vectype, v);
1776 }
1777 }
1778
1779 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1780 are in the vec pointed to by VALS. */
1781 tree
1782 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1783 {
1784 tree c = make_node (CONSTRUCTOR);
1785 unsigned int i;
1786 constructor_elt *elt;
1787 bool constant_p = true;
1788 bool side_effects_p = false;
1789
1790 TREE_TYPE (c) = type;
1791 CONSTRUCTOR_ELTS (c) = vals;
1792
1793 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1794 {
1795 /* Mostly ctors will have elts that don't have side-effects, so
1796 the usual case is to scan all the elements. Hence a single
1797 loop for both const and side effects, rather than one loop
1798 each (with early outs). */
1799 if (!TREE_CONSTANT (elt->value))
1800 constant_p = false;
1801 if (TREE_SIDE_EFFECTS (elt->value))
1802 side_effects_p = true;
1803 }
1804
1805 TREE_SIDE_EFFECTS (c) = side_effects_p;
1806 TREE_CONSTANT (c) = constant_p;
1807
1808 return c;
1809 }
1810
1811 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1812 INDEX and VALUE. */
1813 tree
1814 build_constructor_single (tree type, tree index, tree value)
1815 {
1816 vec<constructor_elt, va_gc> *v;
1817 constructor_elt elt = {index, value};
1818
1819 vec_alloc (v, 1);
1820 v->quick_push (elt);
1821
1822 return build_constructor (type, v);
1823 }
1824
1825
1826 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1827 are in a list pointed to by VALS. */
1828 tree
1829 build_constructor_from_list (tree type, tree vals)
1830 {
1831 tree t;
1832 vec<constructor_elt, va_gc> *v = NULL;
1833
1834 if (vals)
1835 {
1836 vec_alloc (v, list_length (vals));
1837 for (t = vals; t; t = TREE_CHAIN (t))
1838 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1839 }
1840
1841 return build_constructor (type, v);
1842 }
1843
1844 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1845 of elements, provided as index/value pairs. */
1846
1847 tree
1848 build_constructor_va (tree type, int nelts, ...)
1849 {
1850 vec<constructor_elt, va_gc> *v = NULL;
1851 va_list p;
1852
1853 va_start (p, nelts);
1854 vec_alloc (v, nelts);
1855 while (nelts--)
1856 {
1857 tree index = va_arg (p, tree);
1858 tree value = va_arg (p, tree);
1859 CONSTRUCTOR_APPEND_ELT (v, index, value);
1860 }
1861 va_end (p);
1862 return build_constructor (type, v);
1863 }
1864
1865 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1866
1867 tree
1868 build_fixed (tree type, FIXED_VALUE_TYPE f)
1869 {
1870 tree v;
1871 FIXED_VALUE_TYPE *fp;
1872
1873 v = make_node (FIXED_CST);
1874 fp = ggc_alloc<fixed_value> ();
1875 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1876
1877 TREE_TYPE (v) = type;
1878 TREE_FIXED_CST_PTR (v) = fp;
1879 return v;
1880 }
1881
1882 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1883
1884 tree
1885 build_real (tree type, REAL_VALUE_TYPE d)
1886 {
1887 tree v;
1888 REAL_VALUE_TYPE *dp;
1889 int overflow = 0;
1890
1891 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1892 Consider doing it via real_convert now. */
1893
1894 v = make_node (REAL_CST);
1895 dp = ggc_alloc<real_value> ();
1896 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1897
1898 TREE_TYPE (v) = type;
1899 TREE_REAL_CST_PTR (v) = dp;
1900 TREE_OVERFLOW (v) = overflow;
1901 return v;
1902 }
1903
1904 /* Like build_real, but first truncate D to the type. */
1905
1906 tree
1907 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1908 {
1909 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1910 }
1911
1912 /* Return a new REAL_CST node whose type is TYPE
1913 and whose value is the integer value of the INTEGER_CST node I. */
1914
1915 REAL_VALUE_TYPE
1916 real_value_from_int_cst (const_tree type, const_tree i)
1917 {
1918 REAL_VALUE_TYPE d;
1919
1920 /* Clear all bits of the real value type so that we can later do
1921 bitwise comparisons to see if two values are the same. */
1922 memset (&d, 0, sizeof d);
1923
1924 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1925 TYPE_SIGN (TREE_TYPE (i)));
1926 return d;
1927 }
1928
1929 /* Given a tree representing an integer constant I, return a tree
1930 representing the same value as a floating-point constant of type TYPE. */
1931
1932 tree
1933 build_real_from_int_cst (tree type, const_tree i)
1934 {
1935 tree v;
1936 int overflow = TREE_OVERFLOW (i);
1937
1938 v = build_real (type, real_value_from_int_cst (type, i));
1939
1940 TREE_OVERFLOW (v) |= overflow;
1941 return v;
1942 }
1943
1944 /* Return a newly constructed STRING_CST node whose value is
1945 the LEN characters at STR.
1946 Note that for a C string literal, LEN should include the trailing NUL.
1947 The TREE_TYPE is not initialized. */
1948
1949 tree
1950 build_string (int len, const char *str)
1951 {
1952 tree s;
1953 size_t length;
1954
1955 /* Do not waste bytes provided by padding of struct tree_string. */
1956 length = len + offsetof (struct tree_string, str) + 1;
1957
1958 record_node_allocation_statistics (STRING_CST, length);
1959
1960 s = (tree) ggc_internal_alloc (length);
1961
1962 memset (s, 0, sizeof (struct tree_typed));
1963 TREE_SET_CODE (s, STRING_CST);
1964 TREE_CONSTANT (s) = 1;
1965 TREE_STRING_LENGTH (s) = len;
1966 memcpy (s->string.str, str, len);
1967 s->string.str[len] = '\0';
1968
1969 return s;
1970 }
1971
1972 /* Return a newly constructed COMPLEX_CST node whose value is
1973 specified by the real and imaginary parts REAL and IMAG.
1974 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1975 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1976
1977 tree
1978 build_complex (tree type, tree real, tree imag)
1979 {
1980 tree t = make_node (COMPLEX_CST);
1981
1982 TREE_REALPART (t) = real;
1983 TREE_IMAGPART (t) = imag;
1984 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1985 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1986 return t;
1987 }
1988
1989 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
1990 element is set to 1. In particular, this is 1 + i for complex types. */
1991
1992 tree
1993 build_each_one_cst (tree type)
1994 {
1995 if (TREE_CODE (type) == COMPLEX_TYPE)
1996 {
1997 tree scalar = build_one_cst (TREE_TYPE (type));
1998 return build_complex (type, scalar, scalar);
1999 }
2000 else
2001 return build_one_cst (type);
2002 }
2003
2004 /* Return a constant of arithmetic type TYPE which is the
2005 multiplicative identity of the set TYPE. */
2006
2007 tree
2008 build_one_cst (tree type)
2009 {
2010 switch (TREE_CODE (type))
2011 {
2012 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2013 case POINTER_TYPE: case REFERENCE_TYPE:
2014 case OFFSET_TYPE:
2015 return build_int_cst (type, 1);
2016
2017 case REAL_TYPE:
2018 return build_real (type, dconst1);
2019
2020 case FIXED_POINT_TYPE:
2021 /* We can only generate 1 for accum types. */
2022 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2023 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2024
2025 case VECTOR_TYPE:
2026 {
2027 tree scalar = build_one_cst (TREE_TYPE (type));
2028
2029 return build_vector_from_val (type, scalar);
2030 }
2031
2032 case COMPLEX_TYPE:
2033 return build_complex (type,
2034 build_one_cst (TREE_TYPE (type)),
2035 build_zero_cst (TREE_TYPE (type)));
2036
2037 default:
2038 gcc_unreachable ();
2039 }
2040 }
2041
2042 /* Return an integer of type TYPE containing all 1's in as much precision as
2043 it contains, or a complex or vector whose subparts are such integers. */
2044
2045 tree
2046 build_all_ones_cst (tree type)
2047 {
2048 if (TREE_CODE (type) == COMPLEX_TYPE)
2049 {
2050 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2051 return build_complex (type, scalar, scalar);
2052 }
2053 else
2054 return build_minus_one_cst (type);
2055 }
2056
2057 /* Return a constant of arithmetic type TYPE which is the
2058 opposite of the multiplicative identity of the set TYPE. */
2059
2060 tree
2061 build_minus_one_cst (tree type)
2062 {
2063 switch (TREE_CODE (type))
2064 {
2065 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2066 case POINTER_TYPE: case REFERENCE_TYPE:
2067 case OFFSET_TYPE:
2068 return build_int_cst (type, -1);
2069
2070 case REAL_TYPE:
2071 return build_real (type, dconstm1);
2072
2073 case FIXED_POINT_TYPE:
2074 /* We can only generate 1 for accum types. */
2075 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2076 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2077 TYPE_MODE (type)));
2078
2079 case VECTOR_TYPE:
2080 {
2081 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2082
2083 return build_vector_from_val (type, scalar);
2084 }
2085
2086 case COMPLEX_TYPE:
2087 return build_complex (type,
2088 build_minus_one_cst (TREE_TYPE (type)),
2089 build_zero_cst (TREE_TYPE (type)));
2090
2091 default:
2092 gcc_unreachable ();
2093 }
2094 }
2095
2096 /* Build 0 constant of type TYPE. This is used by constructor folding
2097 and thus the constant should be represented in memory by
2098 zero(es). */
2099
2100 tree
2101 build_zero_cst (tree type)
2102 {
2103 switch (TREE_CODE (type))
2104 {
2105 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2106 case POINTER_TYPE: case REFERENCE_TYPE:
2107 case OFFSET_TYPE: case NULLPTR_TYPE:
2108 return build_int_cst (type, 0);
2109
2110 case REAL_TYPE:
2111 return build_real (type, dconst0);
2112
2113 case FIXED_POINT_TYPE:
2114 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2115
2116 case VECTOR_TYPE:
2117 {
2118 tree scalar = build_zero_cst (TREE_TYPE (type));
2119
2120 return build_vector_from_val (type, scalar);
2121 }
2122
2123 case COMPLEX_TYPE:
2124 {
2125 tree zero = build_zero_cst (TREE_TYPE (type));
2126
2127 return build_complex (type, zero, zero);
2128 }
2129
2130 default:
2131 if (!AGGREGATE_TYPE_P (type))
2132 return fold_convert (type, integer_zero_node);
2133 return build_constructor (type, NULL);
2134 }
2135 }
2136
2137
2138 /* Build a BINFO with LEN language slots. */
2139
2140 tree
2141 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2142 {
2143 tree t;
2144 size_t length = (offsetof (struct tree_binfo, base_binfos)
2145 + vec<tree, va_gc>::embedded_size (base_binfos));
2146
2147 record_node_allocation_statistics (TREE_BINFO, length);
2148
2149 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2150
2151 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2152
2153 TREE_SET_CODE (t, TREE_BINFO);
2154
2155 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2156
2157 return t;
2158 }
2159
2160 /* Create a CASE_LABEL_EXPR tree node and return it. */
2161
2162 tree
2163 build_case_label (tree low_value, tree high_value, tree label_decl)
2164 {
2165 tree t = make_node (CASE_LABEL_EXPR);
2166
2167 TREE_TYPE (t) = void_type_node;
2168 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2169
2170 CASE_LOW (t) = low_value;
2171 CASE_HIGH (t) = high_value;
2172 CASE_LABEL (t) = label_decl;
2173 CASE_CHAIN (t) = NULL_TREE;
2174
2175 return t;
2176 }
2177
2178 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2179 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2180 The latter determines the length of the HOST_WIDE_INT vector. */
2181
2182 tree
2183 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2184 {
2185 tree t;
2186 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2187 + sizeof (struct tree_int_cst));
2188
2189 gcc_assert (len);
2190 record_node_allocation_statistics (INTEGER_CST, length);
2191
2192 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2193
2194 TREE_SET_CODE (t, INTEGER_CST);
2195 TREE_INT_CST_NUNITS (t) = len;
2196 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2197 /* to_offset can only be applied to trees that are offset_int-sized
2198 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2199 must be exactly the precision of offset_int and so LEN is correct. */
2200 if (ext_len <= OFFSET_INT_ELTS)
2201 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2202 else
2203 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2204
2205 TREE_CONSTANT (t) = 1;
2206
2207 return t;
2208 }
2209
2210 /* Build a newly constructed TREE_VEC node of length LEN. */
2211
2212 tree
2213 make_tree_vec_stat (int len MEM_STAT_DECL)
2214 {
2215 tree t;
2216 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2217
2218 record_node_allocation_statistics (TREE_VEC, length);
2219
2220 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2221
2222 TREE_SET_CODE (t, TREE_VEC);
2223 TREE_VEC_LENGTH (t) = len;
2224
2225 return t;
2226 }
2227
2228 /* Grow a TREE_VEC node to new length LEN. */
2229
2230 tree
2231 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2232 {
2233 gcc_assert (TREE_CODE (v) == TREE_VEC);
2234
2235 int oldlen = TREE_VEC_LENGTH (v);
2236 gcc_assert (len > oldlen);
2237
2238 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2239 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2240
2241 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2242
2243 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2244
2245 TREE_VEC_LENGTH (v) = len;
2246
2247 return v;
2248 }
2249 \f
2250 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2251 fixed, and scalar, complex or vector. */
2252
2253 int
2254 zerop (const_tree expr)
2255 {
2256 return (integer_zerop (expr)
2257 || real_zerop (expr)
2258 || fixed_zerop (expr));
2259 }
2260
2261 /* Return 1 if EXPR is the integer constant zero or a complex constant
2262 of zero. */
2263
2264 int
2265 integer_zerop (const_tree expr)
2266 {
2267 STRIP_NOPS (expr);
2268
2269 switch (TREE_CODE (expr))
2270 {
2271 case INTEGER_CST:
2272 return wi::eq_p (expr, 0);
2273 case COMPLEX_CST:
2274 return (integer_zerop (TREE_REALPART (expr))
2275 && integer_zerop (TREE_IMAGPART (expr)));
2276 case VECTOR_CST:
2277 {
2278 unsigned i;
2279 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2280 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2281 return false;
2282 return true;
2283 }
2284 default:
2285 return false;
2286 }
2287 }
2288
2289 /* Return 1 if EXPR is the integer constant one or the corresponding
2290 complex constant. */
2291
2292 int
2293 integer_onep (const_tree expr)
2294 {
2295 STRIP_NOPS (expr);
2296
2297 switch (TREE_CODE (expr))
2298 {
2299 case INTEGER_CST:
2300 return wi::eq_p (wi::to_widest (expr), 1);
2301 case COMPLEX_CST:
2302 return (integer_onep (TREE_REALPART (expr))
2303 && integer_zerop (TREE_IMAGPART (expr)));
2304 case VECTOR_CST:
2305 {
2306 unsigned i;
2307 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2308 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2309 return false;
2310 return true;
2311 }
2312 default:
2313 return false;
2314 }
2315 }
2316
2317 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2318 return 1 if every piece is the integer constant one. */
2319
2320 int
2321 integer_each_onep (const_tree expr)
2322 {
2323 STRIP_NOPS (expr);
2324
2325 if (TREE_CODE (expr) == COMPLEX_CST)
2326 return (integer_onep (TREE_REALPART (expr))
2327 && integer_onep (TREE_IMAGPART (expr)));
2328 else
2329 return integer_onep (expr);
2330 }
2331
2332 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2333 it contains, or a complex or vector whose subparts are such integers. */
2334
2335 int
2336 integer_all_onesp (const_tree expr)
2337 {
2338 STRIP_NOPS (expr);
2339
2340 if (TREE_CODE (expr) == COMPLEX_CST
2341 && integer_all_onesp (TREE_REALPART (expr))
2342 && integer_all_onesp (TREE_IMAGPART (expr)))
2343 return 1;
2344
2345 else if (TREE_CODE (expr) == VECTOR_CST)
2346 {
2347 unsigned i;
2348 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2349 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2350 return 0;
2351 return 1;
2352 }
2353
2354 else if (TREE_CODE (expr) != INTEGER_CST)
2355 return 0;
2356
2357 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2358 }
2359
2360 /* Return 1 if EXPR is the integer constant minus one. */
2361
2362 int
2363 integer_minus_onep (const_tree expr)
2364 {
2365 STRIP_NOPS (expr);
2366
2367 if (TREE_CODE (expr) == COMPLEX_CST)
2368 return (integer_all_onesp (TREE_REALPART (expr))
2369 && integer_zerop (TREE_IMAGPART (expr)));
2370 else
2371 return integer_all_onesp (expr);
2372 }
2373
2374 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2375 one bit on). */
2376
2377 int
2378 integer_pow2p (const_tree expr)
2379 {
2380 STRIP_NOPS (expr);
2381
2382 if (TREE_CODE (expr) == COMPLEX_CST
2383 && integer_pow2p (TREE_REALPART (expr))
2384 && integer_zerop (TREE_IMAGPART (expr)))
2385 return 1;
2386
2387 if (TREE_CODE (expr) != INTEGER_CST)
2388 return 0;
2389
2390 return wi::popcount (expr) == 1;
2391 }
2392
2393 /* Return 1 if EXPR is an integer constant other than zero or a
2394 complex constant other than zero. */
2395
2396 int
2397 integer_nonzerop (const_tree expr)
2398 {
2399 STRIP_NOPS (expr);
2400
2401 return ((TREE_CODE (expr) == INTEGER_CST
2402 && !wi::eq_p (expr, 0))
2403 || (TREE_CODE (expr) == COMPLEX_CST
2404 && (integer_nonzerop (TREE_REALPART (expr))
2405 || integer_nonzerop (TREE_IMAGPART (expr)))));
2406 }
2407
2408 /* Return 1 if EXPR is the integer constant one. For vector,
2409 return 1 if every piece is the integer constant minus one
2410 (representing the value TRUE). */
2411
2412 int
2413 integer_truep (const_tree expr)
2414 {
2415 STRIP_NOPS (expr);
2416
2417 if (TREE_CODE (expr) == VECTOR_CST)
2418 return integer_all_onesp (expr);
2419 return integer_onep (expr);
2420 }
2421
2422 /* Return 1 if EXPR is the fixed-point constant zero. */
2423
2424 int
2425 fixed_zerop (const_tree expr)
2426 {
2427 return (TREE_CODE (expr) == FIXED_CST
2428 && TREE_FIXED_CST (expr).data.is_zero ());
2429 }
2430
2431 /* Return the power of two represented by a tree node known to be a
2432 power of two. */
2433
2434 int
2435 tree_log2 (const_tree expr)
2436 {
2437 STRIP_NOPS (expr);
2438
2439 if (TREE_CODE (expr) == COMPLEX_CST)
2440 return tree_log2 (TREE_REALPART (expr));
2441
2442 return wi::exact_log2 (expr);
2443 }
2444
2445 /* Similar, but return the largest integer Y such that 2 ** Y is less
2446 than or equal to EXPR. */
2447
2448 int
2449 tree_floor_log2 (const_tree expr)
2450 {
2451 STRIP_NOPS (expr);
2452
2453 if (TREE_CODE (expr) == COMPLEX_CST)
2454 return tree_log2 (TREE_REALPART (expr));
2455
2456 return wi::floor_log2 (expr);
2457 }
2458
2459 /* Return number of known trailing zero bits in EXPR, or, if the value of
2460 EXPR is known to be zero, the precision of it's type. */
2461
2462 unsigned int
2463 tree_ctz (const_tree expr)
2464 {
2465 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2466 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2467 return 0;
2468
2469 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2470 switch (TREE_CODE (expr))
2471 {
2472 case INTEGER_CST:
2473 ret1 = wi::ctz (expr);
2474 return MIN (ret1, prec);
2475 case SSA_NAME:
2476 ret1 = wi::ctz (get_nonzero_bits (expr));
2477 return MIN (ret1, prec);
2478 case PLUS_EXPR:
2479 case MINUS_EXPR:
2480 case BIT_IOR_EXPR:
2481 case BIT_XOR_EXPR:
2482 case MIN_EXPR:
2483 case MAX_EXPR:
2484 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2485 if (ret1 == 0)
2486 return ret1;
2487 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2488 return MIN (ret1, ret2);
2489 case POINTER_PLUS_EXPR:
2490 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2491 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2492 /* Second operand is sizetype, which could be in theory
2493 wider than pointer's precision. Make sure we never
2494 return more than prec. */
2495 ret2 = MIN (ret2, prec);
2496 return MIN (ret1, ret2);
2497 case BIT_AND_EXPR:
2498 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2499 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2500 return MAX (ret1, ret2);
2501 case MULT_EXPR:
2502 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2503 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2504 return MIN (ret1 + ret2, prec);
2505 case LSHIFT_EXPR:
2506 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2507 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2508 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2509 {
2510 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2511 return MIN (ret1 + ret2, prec);
2512 }
2513 return ret1;
2514 case RSHIFT_EXPR:
2515 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2516 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2517 {
2518 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2519 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2520 if (ret1 > ret2)
2521 return ret1 - ret2;
2522 }
2523 return 0;
2524 case TRUNC_DIV_EXPR:
2525 case CEIL_DIV_EXPR:
2526 case FLOOR_DIV_EXPR:
2527 case ROUND_DIV_EXPR:
2528 case EXACT_DIV_EXPR:
2529 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2530 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2531 {
2532 int l = tree_log2 (TREE_OPERAND (expr, 1));
2533 if (l >= 0)
2534 {
2535 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2536 ret2 = l;
2537 if (ret1 > ret2)
2538 return ret1 - ret2;
2539 }
2540 }
2541 return 0;
2542 CASE_CONVERT:
2543 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2544 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2545 ret1 = prec;
2546 return MIN (ret1, prec);
2547 case SAVE_EXPR:
2548 return tree_ctz (TREE_OPERAND (expr, 0));
2549 case COND_EXPR:
2550 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2551 if (ret1 == 0)
2552 return 0;
2553 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2554 return MIN (ret1, ret2);
2555 case COMPOUND_EXPR:
2556 return tree_ctz (TREE_OPERAND (expr, 1));
2557 case ADDR_EXPR:
2558 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2559 if (ret1 > BITS_PER_UNIT)
2560 {
2561 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2562 return MIN (ret1, prec);
2563 }
2564 return 0;
2565 default:
2566 return 0;
2567 }
2568 }
2569
2570 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2571 decimal float constants, so don't return 1 for them. */
2572
2573 int
2574 real_zerop (const_tree expr)
2575 {
2576 STRIP_NOPS (expr);
2577
2578 switch (TREE_CODE (expr))
2579 {
2580 case REAL_CST:
2581 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2582 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2583 case COMPLEX_CST:
2584 return real_zerop (TREE_REALPART (expr))
2585 && real_zerop (TREE_IMAGPART (expr));
2586 case VECTOR_CST:
2587 {
2588 unsigned i;
2589 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2590 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2591 return false;
2592 return true;
2593 }
2594 default:
2595 return false;
2596 }
2597 }
2598
2599 /* Return 1 if EXPR is the real constant one in real or complex form.
2600 Trailing zeroes matter for decimal float constants, so don't return
2601 1 for them. */
2602
2603 int
2604 real_onep (const_tree expr)
2605 {
2606 STRIP_NOPS (expr);
2607
2608 switch (TREE_CODE (expr))
2609 {
2610 case REAL_CST:
2611 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2612 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2613 case COMPLEX_CST:
2614 return real_onep (TREE_REALPART (expr))
2615 && real_zerop (TREE_IMAGPART (expr));
2616 case VECTOR_CST:
2617 {
2618 unsigned i;
2619 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2620 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2621 return false;
2622 return true;
2623 }
2624 default:
2625 return false;
2626 }
2627 }
2628
2629 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2630 matter for decimal float constants, so don't return 1 for them. */
2631
2632 int
2633 real_minus_onep (const_tree expr)
2634 {
2635 STRIP_NOPS (expr);
2636
2637 switch (TREE_CODE (expr))
2638 {
2639 case REAL_CST:
2640 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2641 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2642 case COMPLEX_CST:
2643 return real_minus_onep (TREE_REALPART (expr))
2644 && real_zerop (TREE_IMAGPART (expr));
2645 case VECTOR_CST:
2646 {
2647 unsigned i;
2648 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2649 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2650 return false;
2651 return true;
2652 }
2653 default:
2654 return false;
2655 }
2656 }
2657
2658 /* Nonzero if EXP is a constant or a cast of a constant. */
2659
2660 int
2661 really_constant_p (const_tree exp)
2662 {
2663 /* This is not quite the same as STRIP_NOPS. It does more. */
2664 while (CONVERT_EXPR_P (exp)
2665 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2666 exp = TREE_OPERAND (exp, 0);
2667 return TREE_CONSTANT (exp);
2668 }
2669 \f
2670 /* Return first list element whose TREE_VALUE is ELEM.
2671 Return 0 if ELEM is not in LIST. */
2672
2673 tree
2674 value_member (tree elem, tree list)
2675 {
2676 while (list)
2677 {
2678 if (elem == TREE_VALUE (list))
2679 return list;
2680 list = TREE_CHAIN (list);
2681 }
2682 return NULL_TREE;
2683 }
2684
2685 /* Return first list element whose TREE_PURPOSE is ELEM.
2686 Return 0 if ELEM is not in LIST. */
2687
2688 tree
2689 purpose_member (const_tree elem, tree list)
2690 {
2691 while (list)
2692 {
2693 if (elem == TREE_PURPOSE (list))
2694 return list;
2695 list = TREE_CHAIN (list);
2696 }
2697 return NULL_TREE;
2698 }
2699
2700 /* Return true if ELEM is in V. */
2701
2702 bool
2703 vec_member (const_tree elem, vec<tree, va_gc> *v)
2704 {
2705 unsigned ix;
2706 tree t;
2707 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2708 if (elem == t)
2709 return true;
2710 return false;
2711 }
2712
2713 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2714 NULL_TREE. */
2715
2716 tree
2717 chain_index (int idx, tree chain)
2718 {
2719 for (; chain && idx > 0; --idx)
2720 chain = TREE_CHAIN (chain);
2721 return chain;
2722 }
2723
2724 /* Return nonzero if ELEM is part of the chain CHAIN. */
2725
2726 int
2727 chain_member (const_tree elem, const_tree chain)
2728 {
2729 while (chain)
2730 {
2731 if (elem == chain)
2732 return 1;
2733 chain = DECL_CHAIN (chain);
2734 }
2735
2736 return 0;
2737 }
2738
2739 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2740 We expect a null pointer to mark the end of the chain.
2741 This is the Lisp primitive `length'. */
2742
2743 int
2744 list_length (const_tree t)
2745 {
2746 const_tree p = t;
2747 #ifdef ENABLE_TREE_CHECKING
2748 const_tree q = t;
2749 #endif
2750 int len = 0;
2751
2752 while (p)
2753 {
2754 p = TREE_CHAIN (p);
2755 #ifdef ENABLE_TREE_CHECKING
2756 if (len % 2)
2757 q = TREE_CHAIN (q);
2758 gcc_assert (p != q);
2759 #endif
2760 len++;
2761 }
2762
2763 return len;
2764 }
2765
2766 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2767 UNION_TYPE TYPE, or NULL_TREE if none. */
2768
2769 tree
2770 first_field (const_tree type)
2771 {
2772 tree t = TYPE_FIELDS (type);
2773 while (t && TREE_CODE (t) != FIELD_DECL)
2774 t = TREE_CHAIN (t);
2775 return t;
2776 }
2777
2778 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2779 by modifying the last node in chain 1 to point to chain 2.
2780 This is the Lisp primitive `nconc'. */
2781
2782 tree
2783 chainon (tree op1, tree op2)
2784 {
2785 tree t1;
2786
2787 if (!op1)
2788 return op2;
2789 if (!op2)
2790 return op1;
2791
2792 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2793 continue;
2794 TREE_CHAIN (t1) = op2;
2795
2796 #ifdef ENABLE_TREE_CHECKING
2797 {
2798 tree t2;
2799 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2800 gcc_assert (t2 != t1);
2801 }
2802 #endif
2803
2804 return op1;
2805 }
2806
2807 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2808
2809 tree
2810 tree_last (tree chain)
2811 {
2812 tree next;
2813 if (chain)
2814 while ((next = TREE_CHAIN (chain)))
2815 chain = next;
2816 return chain;
2817 }
2818
2819 /* Reverse the order of elements in the chain T,
2820 and return the new head of the chain (old last element). */
2821
2822 tree
2823 nreverse (tree t)
2824 {
2825 tree prev = 0, decl, next;
2826 for (decl = t; decl; decl = next)
2827 {
2828 /* We shouldn't be using this function to reverse BLOCK chains; we
2829 have blocks_nreverse for that. */
2830 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2831 next = TREE_CHAIN (decl);
2832 TREE_CHAIN (decl) = prev;
2833 prev = decl;
2834 }
2835 return prev;
2836 }
2837 \f
2838 /* Return a newly created TREE_LIST node whose
2839 purpose and value fields are PARM and VALUE. */
2840
2841 tree
2842 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2843 {
2844 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2845 TREE_PURPOSE (t) = parm;
2846 TREE_VALUE (t) = value;
2847 return t;
2848 }
2849
2850 /* Build a chain of TREE_LIST nodes from a vector. */
2851
2852 tree
2853 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2854 {
2855 tree ret = NULL_TREE;
2856 tree *pp = &ret;
2857 unsigned int i;
2858 tree t;
2859 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2860 {
2861 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2862 pp = &TREE_CHAIN (*pp);
2863 }
2864 return ret;
2865 }
2866
2867 /* Return a newly created TREE_LIST node whose
2868 purpose and value fields are PURPOSE and VALUE
2869 and whose TREE_CHAIN is CHAIN. */
2870
2871 tree
2872 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2873 {
2874 tree node;
2875
2876 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2877 memset (node, 0, sizeof (struct tree_common));
2878
2879 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2880
2881 TREE_SET_CODE (node, TREE_LIST);
2882 TREE_CHAIN (node) = chain;
2883 TREE_PURPOSE (node) = purpose;
2884 TREE_VALUE (node) = value;
2885 return node;
2886 }
2887
2888 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2889 trees. */
2890
2891 vec<tree, va_gc> *
2892 ctor_to_vec (tree ctor)
2893 {
2894 vec<tree, va_gc> *vec;
2895 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2896 unsigned int ix;
2897 tree val;
2898
2899 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2900 vec->quick_push (val);
2901
2902 return vec;
2903 }
2904 \f
2905 /* Return the size nominally occupied by an object of type TYPE
2906 when it resides in memory. The value is measured in units of bytes,
2907 and its data type is that normally used for type sizes
2908 (which is the first type created by make_signed_type or
2909 make_unsigned_type). */
2910
2911 tree
2912 size_in_bytes (const_tree type)
2913 {
2914 tree t;
2915
2916 if (type == error_mark_node)
2917 return integer_zero_node;
2918
2919 type = TYPE_MAIN_VARIANT (type);
2920 t = TYPE_SIZE_UNIT (type);
2921
2922 if (t == 0)
2923 {
2924 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2925 return size_zero_node;
2926 }
2927
2928 return t;
2929 }
2930
2931 /* Return the size of TYPE (in bytes) as a wide integer
2932 or return -1 if the size can vary or is larger than an integer. */
2933
2934 HOST_WIDE_INT
2935 int_size_in_bytes (const_tree type)
2936 {
2937 tree t;
2938
2939 if (type == error_mark_node)
2940 return 0;
2941
2942 type = TYPE_MAIN_VARIANT (type);
2943 t = TYPE_SIZE_UNIT (type);
2944
2945 if (t && tree_fits_uhwi_p (t))
2946 return TREE_INT_CST_LOW (t);
2947 else
2948 return -1;
2949 }
2950
2951 /* Return the maximum size of TYPE (in bytes) as a wide integer
2952 or return -1 if the size can vary or is larger than an integer. */
2953
2954 HOST_WIDE_INT
2955 max_int_size_in_bytes (const_tree type)
2956 {
2957 HOST_WIDE_INT size = -1;
2958 tree size_tree;
2959
2960 /* If this is an array type, check for a possible MAX_SIZE attached. */
2961
2962 if (TREE_CODE (type) == ARRAY_TYPE)
2963 {
2964 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2965
2966 if (size_tree && tree_fits_uhwi_p (size_tree))
2967 size = tree_to_uhwi (size_tree);
2968 }
2969
2970 /* If we still haven't been able to get a size, see if the language
2971 can compute a maximum size. */
2972
2973 if (size == -1)
2974 {
2975 size_tree = lang_hooks.types.max_size (type);
2976
2977 if (size_tree && tree_fits_uhwi_p (size_tree))
2978 size = tree_to_uhwi (size_tree);
2979 }
2980
2981 return size;
2982 }
2983 \f
2984 /* Return the bit position of FIELD, in bits from the start of the record.
2985 This is a tree of type bitsizetype. */
2986
2987 tree
2988 bit_position (const_tree field)
2989 {
2990 return bit_from_pos (DECL_FIELD_OFFSET (field),
2991 DECL_FIELD_BIT_OFFSET (field));
2992 }
2993 \f
2994 /* Return the byte position of FIELD, in bytes from the start of the record.
2995 This is a tree of type sizetype. */
2996
2997 tree
2998 byte_position (const_tree field)
2999 {
3000 return byte_from_pos (DECL_FIELD_OFFSET (field),
3001 DECL_FIELD_BIT_OFFSET (field));
3002 }
3003
3004 /* Likewise, but return as an integer. It must be representable in
3005 that way (since it could be a signed value, we don't have the
3006 option of returning -1 like int_size_in_byte can. */
3007
3008 HOST_WIDE_INT
3009 int_byte_position (const_tree field)
3010 {
3011 return tree_to_shwi (byte_position (field));
3012 }
3013 \f
3014 /* Return the strictest alignment, in bits, that T is known to have. */
3015
3016 unsigned int
3017 expr_align (const_tree t)
3018 {
3019 unsigned int align0, align1;
3020
3021 switch (TREE_CODE (t))
3022 {
3023 CASE_CONVERT: case NON_LVALUE_EXPR:
3024 /* If we have conversions, we know that the alignment of the
3025 object must meet each of the alignments of the types. */
3026 align0 = expr_align (TREE_OPERAND (t, 0));
3027 align1 = TYPE_ALIGN (TREE_TYPE (t));
3028 return MAX (align0, align1);
3029
3030 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3031 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3032 case CLEANUP_POINT_EXPR:
3033 /* These don't change the alignment of an object. */
3034 return expr_align (TREE_OPERAND (t, 0));
3035
3036 case COND_EXPR:
3037 /* The best we can do is say that the alignment is the least aligned
3038 of the two arms. */
3039 align0 = expr_align (TREE_OPERAND (t, 1));
3040 align1 = expr_align (TREE_OPERAND (t, 2));
3041 return MIN (align0, align1);
3042
3043 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3044 meaningfully, it's always 1. */
3045 case LABEL_DECL: case CONST_DECL:
3046 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3047 case FUNCTION_DECL:
3048 gcc_assert (DECL_ALIGN (t) != 0);
3049 return DECL_ALIGN (t);
3050
3051 default:
3052 break;
3053 }
3054
3055 /* Otherwise take the alignment from that of the type. */
3056 return TYPE_ALIGN (TREE_TYPE (t));
3057 }
3058 \f
3059 /* Return, as a tree node, the number of elements for TYPE (which is an
3060 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3061
3062 tree
3063 array_type_nelts (const_tree type)
3064 {
3065 tree index_type, min, max;
3066
3067 /* If they did it with unspecified bounds, then we should have already
3068 given an error about it before we got here. */
3069 if (! TYPE_DOMAIN (type))
3070 return error_mark_node;
3071
3072 index_type = TYPE_DOMAIN (type);
3073 min = TYPE_MIN_VALUE (index_type);
3074 max = TYPE_MAX_VALUE (index_type);
3075
3076 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3077 if (!max)
3078 return error_mark_node;
3079
3080 return (integer_zerop (min)
3081 ? max
3082 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3083 }
3084 \f
3085 /* If arg is static -- a reference to an object in static storage -- then
3086 return the object. This is not the same as the C meaning of `static'.
3087 If arg isn't static, return NULL. */
3088
3089 tree
3090 staticp (tree arg)
3091 {
3092 switch (TREE_CODE (arg))
3093 {
3094 case FUNCTION_DECL:
3095 /* Nested functions are static, even though taking their address will
3096 involve a trampoline as we unnest the nested function and create
3097 the trampoline on the tree level. */
3098 return arg;
3099
3100 case VAR_DECL:
3101 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3102 && ! DECL_THREAD_LOCAL_P (arg)
3103 && ! DECL_DLLIMPORT_P (arg)
3104 ? arg : NULL);
3105
3106 case CONST_DECL:
3107 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3108 ? arg : NULL);
3109
3110 case CONSTRUCTOR:
3111 return TREE_STATIC (arg) ? arg : NULL;
3112
3113 case LABEL_DECL:
3114 case STRING_CST:
3115 return arg;
3116
3117 case COMPONENT_REF:
3118 /* If the thing being referenced is not a field, then it is
3119 something language specific. */
3120 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3121
3122 /* If we are referencing a bitfield, we can't evaluate an
3123 ADDR_EXPR at compile time and so it isn't a constant. */
3124 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3125 return NULL;
3126
3127 return staticp (TREE_OPERAND (arg, 0));
3128
3129 case BIT_FIELD_REF:
3130 return NULL;
3131
3132 case INDIRECT_REF:
3133 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3134
3135 case ARRAY_REF:
3136 case ARRAY_RANGE_REF:
3137 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3138 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3139 return staticp (TREE_OPERAND (arg, 0));
3140 else
3141 return NULL;
3142
3143 case COMPOUND_LITERAL_EXPR:
3144 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3145
3146 default:
3147 return NULL;
3148 }
3149 }
3150
3151 \f
3152
3153
3154 /* Return whether OP is a DECL whose address is function-invariant. */
3155
3156 bool
3157 decl_address_invariant_p (const_tree op)
3158 {
3159 /* The conditions below are slightly less strict than the one in
3160 staticp. */
3161
3162 switch (TREE_CODE (op))
3163 {
3164 case PARM_DECL:
3165 case RESULT_DECL:
3166 case LABEL_DECL:
3167 case FUNCTION_DECL:
3168 return true;
3169
3170 case VAR_DECL:
3171 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3172 || DECL_THREAD_LOCAL_P (op)
3173 || DECL_CONTEXT (op) == current_function_decl
3174 || decl_function_context (op) == current_function_decl)
3175 return true;
3176 break;
3177
3178 case CONST_DECL:
3179 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3180 || decl_function_context (op) == current_function_decl)
3181 return true;
3182 break;
3183
3184 default:
3185 break;
3186 }
3187
3188 return false;
3189 }
3190
3191 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3192
3193 bool
3194 decl_address_ip_invariant_p (const_tree op)
3195 {
3196 /* The conditions below are slightly less strict than the one in
3197 staticp. */
3198
3199 switch (TREE_CODE (op))
3200 {
3201 case LABEL_DECL:
3202 case FUNCTION_DECL:
3203 case STRING_CST:
3204 return true;
3205
3206 case VAR_DECL:
3207 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3208 && !DECL_DLLIMPORT_P (op))
3209 || DECL_THREAD_LOCAL_P (op))
3210 return true;
3211 break;
3212
3213 case CONST_DECL:
3214 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3215 return true;
3216 break;
3217
3218 default:
3219 break;
3220 }
3221
3222 return false;
3223 }
3224
3225
3226 /* Return true if T is function-invariant (internal function, does
3227 not handle arithmetic; that's handled in skip_simple_arithmetic and
3228 tree_invariant_p). */
3229
3230 static bool tree_invariant_p (tree t);
3231
3232 static bool
3233 tree_invariant_p_1 (tree t)
3234 {
3235 tree op;
3236
3237 if (TREE_CONSTANT (t)
3238 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3239 return true;
3240
3241 switch (TREE_CODE (t))
3242 {
3243 case SAVE_EXPR:
3244 return true;
3245
3246 case ADDR_EXPR:
3247 op = TREE_OPERAND (t, 0);
3248 while (handled_component_p (op))
3249 {
3250 switch (TREE_CODE (op))
3251 {
3252 case ARRAY_REF:
3253 case ARRAY_RANGE_REF:
3254 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3255 || TREE_OPERAND (op, 2) != NULL_TREE
3256 || TREE_OPERAND (op, 3) != NULL_TREE)
3257 return false;
3258 break;
3259
3260 case COMPONENT_REF:
3261 if (TREE_OPERAND (op, 2) != NULL_TREE)
3262 return false;
3263 break;
3264
3265 default:;
3266 }
3267 op = TREE_OPERAND (op, 0);
3268 }
3269
3270 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3271
3272 default:
3273 break;
3274 }
3275
3276 return false;
3277 }
3278
3279 /* Return true if T is function-invariant. */
3280
3281 static bool
3282 tree_invariant_p (tree t)
3283 {
3284 tree inner = skip_simple_arithmetic (t);
3285 return tree_invariant_p_1 (inner);
3286 }
3287
3288 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3289 Do this to any expression which may be used in more than one place,
3290 but must be evaluated only once.
3291
3292 Normally, expand_expr would reevaluate the expression each time.
3293 Calling save_expr produces something that is evaluated and recorded
3294 the first time expand_expr is called on it. Subsequent calls to
3295 expand_expr just reuse the recorded value.
3296
3297 The call to expand_expr that generates code that actually computes
3298 the value is the first call *at compile time*. Subsequent calls
3299 *at compile time* generate code to use the saved value.
3300 This produces correct result provided that *at run time* control
3301 always flows through the insns made by the first expand_expr
3302 before reaching the other places where the save_expr was evaluated.
3303 You, the caller of save_expr, must make sure this is so.
3304
3305 Constants, and certain read-only nodes, are returned with no
3306 SAVE_EXPR because that is safe. Expressions containing placeholders
3307 are not touched; see tree.def for an explanation of what these
3308 are used for. */
3309
3310 tree
3311 save_expr (tree expr)
3312 {
3313 tree t = fold (expr);
3314 tree inner;
3315
3316 /* If the tree evaluates to a constant, then we don't want to hide that
3317 fact (i.e. this allows further folding, and direct checks for constants).
3318 However, a read-only object that has side effects cannot be bypassed.
3319 Since it is no problem to reevaluate literals, we just return the
3320 literal node. */
3321 inner = skip_simple_arithmetic (t);
3322 if (TREE_CODE (inner) == ERROR_MARK)
3323 return inner;
3324
3325 if (tree_invariant_p_1 (inner))
3326 return t;
3327
3328 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3329 it means that the size or offset of some field of an object depends on
3330 the value within another field.
3331
3332 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3333 and some variable since it would then need to be both evaluated once and
3334 evaluated more than once. Front-ends must assure this case cannot
3335 happen by surrounding any such subexpressions in their own SAVE_EXPR
3336 and forcing evaluation at the proper time. */
3337 if (contains_placeholder_p (inner))
3338 return t;
3339
3340 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3341 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3342
3343 /* This expression might be placed ahead of a jump to ensure that the
3344 value was computed on both sides of the jump. So make sure it isn't
3345 eliminated as dead. */
3346 TREE_SIDE_EFFECTS (t) = 1;
3347 return t;
3348 }
3349
3350 /* Look inside EXPR into any simple arithmetic operations. Return the
3351 outermost non-arithmetic or non-invariant node. */
3352
3353 tree
3354 skip_simple_arithmetic (tree expr)
3355 {
3356 /* We don't care about whether this can be used as an lvalue in this
3357 context. */
3358 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3359 expr = TREE_OPERAND (expr, 0);
3360
3361 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3362 a constant, it will be more efficient to not make another SAVE_EXPR since
3363 it will allow better simplification and GCSE will be able to merge the
3364 computations if they actually occur. */
3365 while (true)
3366 {
3367 if (UNARY_CLASS_P (expr))
3368 expr = TREE_OPERAND (expr, 0);
3369 else if (BINARY_CLASS_P (expr))
3370 {
3371 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3372 expr = TREE_OPERAND (expr, 0);
3373 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3374 expr = TREE_OPERAND (expr, 1);
3375 else
3376 break;
3377 }
3378 else
3379 break;
3380 }
3381
3382 return expr;
3383 }
3384
3385 /* Look inside EXPR into simple arithmetic operations involving constants.
3386 Return the outermost non-arithmetic or non-constant node. */
3387
3388 tree
3389 skip_simple_constant_arithmetic (tree expr)
3390 {
3391 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3392 expr = TREE_OPERAND (expr, 0);
3393
3394 while (true)
3395 {
3396 if (UNARY_CLASS_P (expr))
3397 expr = TREE_OPERAND (expr, 0);
3398 else if (BINARY_CLASS_P (expr))
3399 {
3400 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3401 expr = TREE_OPERAND (expr, 0);
3402 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3403 expr = TREE_OPERAND (expr, 1);
3404 else
3405 break;
3406 }
3407 else
3408 break;
3409 }
3410
3411 return expr;
3412 }
3413
3414 /* Return which tree structure is used by T. */
3415
3416 enum tree_node_structure_enum
3417 tree_node_structure (const_tree t)
3418 {
3419 const enum tree_code code = TREE_CODE (t);
3420 return tree_node_structure_for_code (code);
3421 }
3422
3423 /* Set various status flags when building a CALL_EXPR object T. */
3424
3425 static void
3426 process_call_operands (tree t)
3427 {
3428 bool side_effects = TREE_SIDE_EFFECTS (t);
3429 bool read_only = false;
3430 int i = call_expr_flags (t);
3431
3432 /* Calls have side-effects, except those to const or pure functions. */
3433 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3434 side_effects = true;
3435 /* Propagate TREE_READONLY of arguments for const functions. */
3436 if (i & ECF_CONST)
3437 read_only = true;
3438
3439 if (!side_effects || read_only)
3440 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3441 {
3442 tree op = TREE_OPERAND (t, i);
3443 if (op && TREE_SIDE_EFFECTS (op))
3444 side_effects = true;
3445 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3446 read_only = false;
3447 }
3448
3449 TREE_SIDE_EFFECTS (t) = side_effects;
3450 TREE_READONLY (t) = read_only;
3451 }
3452 \f
3453 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3454 size or offset that depends on a field within a record. */
3455
3456 bool
3457 contains_placeholder_p (const_tree exp)
3458 {
3459 enum tree_code code;
3460
3461 if (!exp)
3462 return 0;
3463
3464 code = TREE_CODE (exp);
3465 if (code == PLACEHOLDER_EXPR)
3466 return 1;
3467
3468 switch (TREE_CODE_CLASS (code))
3469 {
3470 case tcc_reference:
3471 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3472 position computations since they will be converted into a
3473 WITH_RECORD_EXPR involving the reference, which will assume
3474 here will be valid. */
3475 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3476
3477 case tcc_exceptional:
3478 if (code == TREE_LIST)
3479 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3480 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3481 break;
3482
3483 case tcc_unary:
3484 case tcc_binary:
3485 case tcc_comparison:
3486 case tcc_expression:
3487 switch (code)
3488 {
3489 case COMPOUND_EXPR:
3490 /* Ignoring the first operand isn't quite right, but works best. */
3491 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3492
3493 case COND_EXPR:
3494 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3495 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3496 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3497
3498 case SAVE_EXPR:
3499 /* The save_expr function never wraps anything containing
3500 a PLACEHOLDER_EXPR. */
3501 return 0;
3502
3503 default:
3504 break;
3505 }
3506
3507 switch (TREE_CODE_LENGTH (code))
3508 {
3509 case 1:
3510 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3511 case 2:
3512 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3513 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3514 default:
3515 return 0;
3516 }
3517
3518 case tcc_vl_exp:
3519 switch (code)
3520 {
3521 case CALL_EXPR:
3522 {
3523 const_tree arg;
3524 const_call_expr_arg_iterator iter;
3525 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3526 if (CONTAINS_PLACEHOLDER_P (arg))
3527 return 1;
3528 return 0;
3529 }
3530 default:
3531 return 0;
3532 }
3533
3534 default:
3535 return 0;
3536 }
3537 return 0;
3538 }
3539
3540 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3541 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3542 field positions. */
3543
3544 static bool
3545 type_contains_placeholder_1 (const_tree type)
3546 {
3547 /* If the size contains a placeholder or the parent type (component type in
3548 the case of arrays) type involves a placeholder, this type does. */
3549 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3550 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3551 || (!POINTER_TYPE_P (type)
3552 && TREE_TYPE (type)
3553 && type_contains_placeholder_p (TREE_TYPE (type))))
3554 return true;
3555
3556 /* Now do type-specific checks. Note that the last part of the check above
3557 greatly limits what we have to do below. */
3558 switch (TREE_CODE (type))
3559 {
3560 case VOID_TYPE:
3561 case POINTER_BOUNDS_TYPE:
3562 case COMPLEX_TYPE:
3563 case ENUMERAL_TYPE:
3564 case BOOLEAN_TYPE:
3565 case POINTER_TYPE:
3566 case OFFSET_TYPE:
3567 case REFERENCE_TYPE:
3568 case METHOD_TYPE:
3569 case FUNCTION_TYPE:
3570 case VECTOR_TYPE:
3571 case NULLPTR_TYPE:
3572 return false;
3573
3574 case INTEGER_TYPE:
3575 case REAL_TYPE:
3576 case FIXED_POINT_TYPE:
3577 /* Here we just check the bounds. */
3578 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3579 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3580
3581 case ARRAY_TYPE:
3582 /* We have already checked the component type above, so just check the
3583 domain type. */
3584 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3585
3586 case RECORD_TYPE:
3587 case UNION_TYPE:
3588 case QUAL_UNION_TYPE:
3589 {
3590 tree field;
3591
3592 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3593 if (TREE_CODE (field) == FIELD_DECL
3594 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3595 || (TREE_CODE (type) == QUAL_UNION_TYPE
3596 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3597 || type_contains_placeholder_p (TREE_TYPE (field))))
3598 return true;
3599
3600 return false;
3601 }
3602
3603 default:
3604 gcc_unreachable ();
3605 }
3606 }
3607
3608 /* Wrapper around above function used to cache its result. */
3609
3610 bool
3611 type_contains_placeholder_p (tree type)
3612 {
3613 bool result;
3614
3615 /* If the contains_placeholder_bits field has been initialized,
3616 then we know the answer. */
3617 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3618 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3619
3620 /* Indicate that we've seen this type node, and the answer is false.
3621 This is what we want to return if we run into recursion via fields. */
3622 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3623
3624 /* Compute the real value. */
3625 result = type_contains_placeholder_1 (type);
3626
3627 /* Store the real value. */
3628 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3629
3630 return result;
3631 }
3632 \f
3633 /* Push tree EXP onto vector QUEUE if it is not already present. */
3634
3635 static void
3636 push_without_duplicates (tree exp, vec<tree> *queue)
3637 {
3638 unsigned int i;
3639 tree iter;
3640
3641 FOR_EACH_VEC_ELT (*queue, i, iter)
3642 if (simple_cst_equal (iter, exp) == 1)
3643 break;
3644
3645 if (!iter)
3646 queue->safe_push (exp);
3647 }
3648
3649 /* Given a tree EXP, find all occurrences of references to fields
3650 in a PLACEHOLDER_EXPR and place them in vector REFS without
3651 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3652 we assume here that EXP contains only arithmetic expressions
3653 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3654 argument list. */
3655
3656 void
3657 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3658 {
3659 enum tree_code code = TREE_CODE (exp);
3660 tree inner;
3661 int i;
3662
3663 /* We handle TREE_LIST and COMPONENT_REF separately. */
3664 if (code == TREE_LIST)
3665 {
3666 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3667 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3668 }
3669 else if (code == COMPONENT_REF)
3670 {
3671 for (inner = TREE_OPERAND (exp, 0);
3672 REFERENCE_CLASS_P (inner);
3673 inner = TREE_OPERAND (inner, 0))
3674 ;
3675
3676 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3677 push_without_duplicates (exp, refs);
3678 else
3679 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3680 }
3681 else
3682 switch (TREE_CODE_CLASS (code))
3683 {
3684 case tcc_constant:
3685 break;
3686
3687 case tcc_declaration:
3688 /* Variables allocated to static storage can stay. */
3689 if (!TREE_STATIC (exp))
3690 push_without_duplicates (exp, refs);
3691 break;
3692
3693 case tcc_expression:
3694 /* This is the pattern built in ada/make_aligning_type. */
3695 if (code == ADDR_EXPR
3696 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3697 {
3698 push_without_duplicates (exp, refs);
3699 break;
3700 }
3701
3702 /* Fall through... */
3703
3704 case tcc_exceptional:
3705 case tcc_unary:
3706 case tcc_binary:
3707 case tcc_comparison:
3708 case tcc_reference:
3709 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3710 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3711 break;
3712
3713 case tcc_vl_exp:
3714 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3715 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3716 break;
3717
3718 default:
3719 gcc_unreachable ();
3720 }
3721 }
3722
3723 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3724 return a tree with all occurrences of references to F in a
3725 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3726 CONST_DECLs. Note that we assume here that EXP contains only
3727 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3728 occurring only in their argument list. */
3729
3730 tree
3731 substitute_in_expr (tree exp, tree f, tree r)
3732 {
3733 enum tree_code code = TREE_CODE (exp);
3734 tree op0, op1, op2, op3;
3735 tree new_tree;
3736
3737 /* We handle TREE_LIST and COMPONENT_REF separately. */
3738 if (code == TREE_LIST)
3739 {
3740 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3741 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3742 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3743 return exp;
3744
3745 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3746 }
3747 else if (code == COMPONENT_REF)
3748 {
3749 tree inner;
3750
3751 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3752 and it is the right field, replace it with R. */
3753 for (inner = TREE_OPERAND (exp, 0);
3754 REFERENCE_CLASS_P (inner);
3755 inner = TREE_OPERAND (inner, 0))
3756 ;
3757
3758 /* The field. */
3759 op1 = TREE_OPERAND (exp, 1);
3760
3761 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3762 return r;
3763
3764 /* If this expression hasn't been completed let, leave it alone. */
3765 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3766 return exp;
3767
3768 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3769 if (op0 == TREE_OPERAND (exp, 0))
3770 return exp;
3771
3772 new_tree
3773 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3774 }
3775 else
3776 switch (TREE_CODE_CLASS (code))
3777 {
3778 case tcc_constant:
3779 return exp;
3780
3781 case tcc_declaration:
3782 if (exp == f)
3783 return r;
3784 else
3785 return exp;
3786
3787 case tcc_expression:
3788 if (exp == f)
3789 return r;
3790
3791 /* Fall through... */
3792
3793 case tcc_exceptional:
3794 case tcc_unary:
3795 case tcc_binary:
3796 case tcc_comparison:
3797 case tcc_reference:
3798 switch (TREE_CODE_LENGTH (code))
3799 {
3800 case 0:
3801 return exp;
3802
3803 case 1:
3804 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3805 if (op0 == TREE_OPERAND (exp, 0))
3806 return exp;
3807
3808 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3809 break;
3810
3811 case 2:
3812 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3813 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3814
3815 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3816 return exp;
3817
3818 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3819 break;
3820
3821 case 3:
3822 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3823 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3824 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3825
3826 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3827 && op2 == TREE_OPERAND (exp, 2))
3828 return exp;
3829
3830 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3831 break;
3832
3833 case 4:
3834 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3835 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3836 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3837 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3838
3839 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3840 && op2 == TREE_OPERAND (exp, 2)
3841 && op3 == TREE_OPERAND (exp, 3))
3842 return exp;
3843
3844 new_tree
3845 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3846 break;
3847
3848 default:
3849 gcc_unreachable ();
3850 }
3851 break;
3852
3853 case tcc_vl_exp:
3854 {
3855 int i;
3856
3857 new_tree = NULL_TREE;
3858
3859 /* If we are trying to replace F with a constant, inline back
3860 functions which do nothing else than computing a value from
3861 the arguments they are passed. This makes it possible to
3862 fold partially or entirely the replacement expression. */
3863 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3864 {
3865 tree t = maybe_inline_call_in_expr (exp);
3866 if (t)
3867 return SUBSTITUTE_IN_EXPR (t, f, r);
3868 }
3869
3870 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3871 {
3872 tree op = TREE_OPERAND (exp, i);
3873 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3874 if (new_op != op)
3875 {
3876 if (!new_tree)
3877 new_tree = copy_node (exp);
3878 TREE_OPERAND (new_tree, i) = new_op;
3879 }
3880 }
3881
3882 if (new_tree)
3883 {
3884 new_tree = fold (new_tree);
3885 if (TREE_CODE (new_tree) == CALL_EXPR)
3886 process_call_operands (new_tree);
3887 }
3888 else
3889 return exp;
3890 }
3891 break;
3892
3893 default:
3894 gcc_unreachable ();
3895 }
3896
3897 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3898
3899 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3900 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3901
3902 return new_tree;
3903 }
3904
3905 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3906 for it within OBJ, a tree that is an object or a chain of references. */
3907
3908 tree
3909 substitute_placeholder_in_expr (tree exp, tree obj)
3910 {
3911 enum tree_code code = TREE_CODE (exp);
3912 tree op0, op1, op2, op3;
3913 tree new_tree;
3914
3915 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3916 in the chain of OBJ. */
3917 if (code == PLACEHOLDER_EXPR)
3918 {
3919 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3920 tree elt;
3921
3922 for (elt = obj; elt != 0;
3923 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3924 || TREE_CODE (elt) == COND_EXPR)
3925 ? TREE_OPERAND (elt, 1)
3926 : (REFERENCE_CLASS_P (elt)
3927 || UNARY_CLASS_P (elt)
3928 || BINARY_CLASS_P (elt)
3929 || VL_EXP_CLASS_P (elt)
3930 || EXPRESSION_CLASS_P (elt))
3931 ? TREE_OPERAND (elt, 0) : 0))
3932 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3933 return elt;
3934
3935 for (elt = obj; elt != 0;
3936 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3937 || TREE_CODE (elt) == COND_EXPR)
3938 ? TREE_OPERAND (elt, 1)
3939 : (REFERENCE_CLASS_P (elt)
3940 || UNARY_CLASS_P (elt)
3941 || BINARY_CLASS_P (elt)
3942 || VL_EXP_CLASS_P (elt)
3943 || EXPRESSION_CLASS_P (elt))
3944 ? TREE_OPERAND (elt, 0) : 0))
3945 if (POINTER_TYPE_P (TREE_TYPE (elt))
3946 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3947 == need_type))
3948 return fold_build1 (INDIRECT_REF, need_type, elt);
3949
3950 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3951 survives until RTL generation, there will be an error. */
3952 return exp;
3953 }
3954
3955 /* TREE_LIST is special because we need to look at TREE_VALUE
3956 and TREE_CHAIN, not TREE_OPERANDS. */
3957 else if (code == TREE_LIST)
3958 {
3959 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3960 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3961 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3962 return exp;
3963
3964 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3965 }
3966 else
3967 switch (TREE_CODE_CLASS (code))
3968 {
3969 case tcc_constant:
3970 case tcc_declaration:
3971 return exp;
3972
3973 case tcc_exceptional:
3974 case tcc_unary:
3975 case tcc_binary:
3976 case tcc_comparison:
3977 case tcc_expression:
3978 case tcc_reference:
3979 case tcc_statement:
3980 switch (TREE_CODE_LENGTH (code))
3981 {
3982 case 0:
3983 return exp;
3984
3985 case 1:
3986 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3987 if (op0 == TREE_OPERAND (exp, 0))
3988 return exp;
3989
3990 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3991 break;
3992
3993 case 2:
3994 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3995 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3996
3997 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3998 return exp;
3999
4000 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4001 break;
4002
4003 case 3:
4004 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4005 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4006 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4007
4008 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4009 && op2 == TREE_OPERAND (exp, 2))
4010 return exp;
4011
4012 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4013 break;
4014
4015 case 4:
4016 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4017 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4018 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4019 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4020
4021 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4022 && op2 == TREE_OPERAND (exp, 2)
4023 && op3 == TREE_OPERAND (exp, 3))
4024 return exp;
4025
4026 new_tree
4027 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4028 break;
4029
4030 default:
4031 gcc_unreachable ();
4032 }
4033 break;
4034
4035 case tcc_vl_exp:
4036 {
4037 int i;
4038
4039 new_tree = NULL_TREE;
4040
4041 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4042 {
4043 tree op = TREE_OPERAND (exp, i);
4044 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4045 if (new_op != op)
4046 {
4047 if (!new_tree)
4048 new_tree = copy_node (exp);
4049 TREE_OPERAND (new_tree, i) = new_op;
4050 }
4051 }
4052
4053 if (new_tree)
4054 {
4055 new_tree = fold (new_tree);
4056 if (TREE_CODE (new_tree) == CALL_EXPR)
4057 process_call_operands (new_tree);
4058 }
4059 else
4060 return exp;
4061 }
4062 break;
4063
4064 default:
4065 gcc_unreachable ();
4066 }
4067
4068 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4069
4070 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4071 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4072
4073 return new_tree;
4074 }
4075 \f
4076
4077 /* Subroutine of stabilize_reference; this is called for subtrees of
4078 references. Any expression with side-effects must be put in a SAVE_EXPR
4079 to ensure that it is only evaluated once.
4080
4081 We don't put SAVE_EXPR nodes around everything, because assigning very
4082 simple expressions to temporaries causes us to miss good opportunities
4083 for optimizations. Among other things, the opportunity to fold in the
4084 addition of a constant into an addressing mode often gets lost, e.g.
4085 "y[i+1] += x;". In general, we take the approach that we should not make
4086 an assignment unless we are forced into it - i.e., that any non-side effect
4087 operator should be allowed, and that cse should take care of coalescing
4088 multiple utterances of the same expression should that prove fruitful. */
4089
4090 static tree
4091 stabilize_reference_1 (tree e)
4092 {
4093 tree result;
4094 enum tree_code code = TREE_CODE (e);
4095
4096 /* We cannot ignore const expressions because it might be a reference
4097 to a const array but whose index contains side-effects. But we can
4098 ignore things that are actual constant or that already have been
4099 handled by this function. */
4100
4101 if (tree_invariant_p (e))
4102 return e;
4103
4104 switch (TREE_CODE_CLASS (code))
4105 {
4106 case tcc_exceptional:
4107 case tcc_type:
4108 case tcc_declaration:
4109 case tcc_comparison:
4110 case tcc_statement:
4111 case tcc_expression:
4112 case tcc_reference:
4113 case tcc_vl_exp:
4114 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4115 so that it will only be evaluated once. */
4116 /* The reference (r) and comparison (<) classes could be handled as
4117 below, but it is generally faster to only evaluate them once. */
4118 if (TREE_SIDE_EFFECTS (e))
4119 return save_expr (e);
4120 return e;
4121
4122 case tcc_constant:
4123 /* Constants need no processing. In fact, we should never reach
4124 here. */
4125 return e;
4126
4127 case tcc_binary:
4128 /* Division is slow and tends to be compiled with jumps,
4129 especially the division by powers of 2 that is often
4130 found inside of an array reference. So do it just once. */
4131 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4132 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4133 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4134 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4135 return save_expr (e);
4136 /* Recursively stabilize each operand. */
4137 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4138 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4139 break;
4140
4141 case tcc_unary:
4142 /* Recursively stabilize each operand. */
4143 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4144 break;
4145
4146 default:
4147 gcc_unreachable ();
4148 }
4149
4150 TREE_TYPE (result) = TREE_TYPE (e);
4151 TREE_READONLY (result) = TREE_READONLY (e);
4152 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4153 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4154
4155 return result;
4156 }
4157
4158 /* Stabilize a reference so that we can use it any number of times
4159 without causing its operands to be evaluated more than once.
4160 Returns the stabilized reference. This works by means of save_expr,
4161 so see the caveats in the comments about save_expr.
4162
4163 Also allows conversion expressions whose operands are references.
4164 Any other kind of expression is returned unchanged. */
4165
4166 tree
4167 stabilize_reference (tree ref)
4168 {
4169 tree result;
4170 enum tree_code code = TREE_CODE (ref);
4171
4172 switch (code)
4173 {
4174 case VAR_DECL:
4175 case PARM_DECL:
4176 case RESULT_DECL:
4177 /* No action is needed in this case. */
4178 return ref;
4179
4180 CASE_CONVERT:
4181 case FLOAT_EXPR:
4182 case FIX_TRUNC_EXPR:
4183 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4184 break;
4185
4186 case INDIRECT_REF:
4187 result = build_nt (INDIRECT_REF,
4188 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4189 break;
4190
4191 case COMPONENT_REF:
4192 result = build_nt (COMPONENT_REF,
4193 stabilize_reference (TREE_OPERAND (ref, 0)),
4194 TREE_OPERAND (ref, 1), NULL_TREE);
4195 break;
4196
4197 case BIT_FIELD_REF:
4198 result = build_nt (BIT_FIELD_REF,
4199 stabilize_reference (TREE_OPERAND (ref, 0)),
4200 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4201 break;
4202
4203 case ARRAY_REF:
4204 result = build_nt (ARRAY_REF,
4205 stabilize_reference (TREE_OPERAND (ref, 0)),
4206 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4207 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4208 break;
4209
4210 case ARRAY_RANGE_REF:
4211 result = build_nt (ARRAY_RANGE_REF,
4212 stabilize_reference (TREE_OPERAND (ref, 0)),
4213 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4214 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4215 break;
4216
4217 case COMPOUND_EXPR:
4218 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4219 it wouldn't be ignored. This matters when dealing with
4220 volatiles. */
4221 return stabilize_reference_1 (ref);
4222
4223 /* If arg isn't a kind of lvalue we recognize, make no change.
4224 Caller should recognize the error for an invalid lvalue. */
4225 default:
4226 return ref;
4227
4228 case ERROR_MARK:
4229 return error_mark_node;
4230 }
4231
4232 TREE_TYPE (result) = TREE_TYPE (ref);
4233 TREE_READONLY (result) = TREE_READONLY (ref);
4234 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4235 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4236
4237 return result;
4238 }
4239 \f
4240 /* Low-level constructors for expressions. */
4241
4242 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4243 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4244
4245 void
4246 recompute_tree_invariant_for_addr_expr (tree t)
4247 {
4248 tree node;
4249 bool tc = true, se = false;
4250
4251 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4252
4253 /* We started out assuming this address is both invariant and constant, but
4254 does not have side effects. Now go down any handled components and see if
4255 any of them involve offsets that are either non-constant or non-invariant.
4256 Also check for side-effects.
4257
4258 ??? Note that this code makes no attempt to deal with the case where
4259 taking the address of something causes a copy due to misalignment. */
4260
4261 #define UPDATE_FLAGS(NODE) \
4262 do { tree _node = (NODE); \
4263 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4264 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4265
4266 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4267 node = TREE_OPERAND (node, 0))
4268 {
4269 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4270 array reference (probably made temporarily by the G++ front end),
4271 so ignore all the operands. */
4272 if ((TREE_CODE (node) == ARRAY_REF
4273 || TREE_CODE (node) == ARRAY_RANGE_REF)
4274 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4275 {
4276 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4277 if (TREE_OPERAND (node, 2))
4278 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4279 if (TREE_OPERAND (node, 3))
4280 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4281 }
4282 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4283 FIELD_DECL, apparently. The G++ front end can put something else
4284 there, at least temporarily. */
4285 else if (TREE_CODE (node) == COMPONENT_REF
4286 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4287 {
4288 if (TREE_OPERAND (node, 2))
4289 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4290 }
4291 }
4292
4293 node = lang_hooks.expr_to_decl (node, &tc, &se);
4294
4295 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4296 the address, since &(*a)->b is a form of addition. If it's a constant, the
4297 address is constant too. If it's a decl, its address is constant if the
4298 decl is static. Everything else is not constant and, furthermore,
4299 taking the address of a volatile variable is not volatile. */
4300 if (TREE_CODE (node) == INDIRECT_REF
4301 || TREE_CODE (node) == MEM_REF)
4302 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4303 else if (CONSTANT_CLASS_P (node))
4304 ;
4305 else if (DECL_P (node))
4306 tc &= (staticp (node) != NULL_TREE);
4307 else
4308 {
4309 tc = false;
4310 se |= TREE_SIDE_EFFECTS (node);
4311 }
4312
4313
4314 TREE_CONSTANT (t) = tc;
4315 TREE_SIDE_EFFECTS (t) = se;
4316 #undef UPDATE_FLAGS
4317 }
4318
4319 /* Build an expression of code CODE, data type TYPE, and operands as
4320 specified. Expressions and reference nodes can be created this way.
4321 Constants, decls, types and misc nodes cannot be.
4322
4323 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4324 enough for all extant tree codes. */
4325
4326 tree
4327 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4328 {
4329 tree t;
4330
4331 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4332
4333 t = make_node_stat (code PASS_MEM_STAT);
4334 TREE_TYPE (t) = tt;
4335
4336 return t;
4337 }
4338
4339 tree
4340 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4341 {
4342 int length = sizeof (struct tree_exp);
4343 tree t;
4344
4345 record_node_allocation_statistics (code, length);
4346
4347 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4348
4349 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4350
4351 memset (t, 0, sizeof (struct tree_common));
4352
4353 TREE_SET_CODE (t, code);
4354
4355 TREE_TYPE (t) = type;
4356 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4357 TREE_OPERAND (t, 0) = node;
4358 if (node && !TYPE_P (node))
4359 {
4360 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4361 TREE_READONLY (t) = TREE_READONLY (node);
4362 }
4363
4364 if (TREE_CODE_CLASS (code) == tcc_statement)
4365 TREE_SIDE_EFFECTS (t) = 1;
4366 else switch (code)
4367 {
4368 case VA_ARG_EXPR:
4369 /* All of these have side-effects, no matter what their
4370 operands are. */
4371 TREE_SIDE_EFFECTS (t) = 1;
4372 TREE_READONLY (t) = 0;
4373 break;
4374
4375 case INDIRECT_REF:
4376 /* Whether a dereference is readonly has nothing to do with whether
4377 its operand is readonly. */
4378 TREE_READONLY (t) = 0;
4379 break;
4380
4381 case ADDR_EXPR:
4382 if (node)
4383 recompute_tree_invariant_for_addr_expr (t);
4384 break;
4385
4386 default:
4387 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4388 && node && !TYPE_P (node)
4389 && TREE_CONSTANT (node))
4390 TREE_CONSTANT (t) = 1;
4391 if (TREE_CODE_CLASS (code) == tcc_reference
4392 && node && TREE_THIS_VOLATILE (node))
4393 TREE_THIS_VOLATILE (t) = 1;
4394 break;
4395 }
4396
4397 return t;
4398 }
4399
4400 #define PROCESS_ARG(N) \
4401 do { \
4402 TREE_OPERAND (t, N) = arg##N; \
4403 if (arg##N &&!TYPE_P (arg##N)) \
4404 { \
4405 if (TREE_SIDE_EFFECTS (arg##N)) \
4406 side_effects = 1; \
4407 if (!TREE_READONLY (arg##N) \
4408 && !CONSTANT_CLASS_P (arg##N)) \
4409 (void) (read_only = 0); \
4410 if (!TREE_CONSTANT (arg##N)) \
4411 (void) (constant = 0); \
4412 } \
4413 } while (0)
4414
4415 tree
4416 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4417 {
4418 bool constant, read_only, side_effects;
4419 tree t;
4420
4421 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4422
4423 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4424 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4425 /* When sizetype precision doesn't match that of pointers
4426 we need to be able to build explicit extensions or truncations
4427 of the offset argument. */
4428 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4429 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4430 && TREE_CODE (arg1) == INTEGER_CST);
4431
4432 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4433 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4434 && ptrofftype_p (TREE_TYPE (arg1)));
4435
4436 t = make_node_stat (code PASS_MEM_STAT);
4437 TREE_TYPE (t) = tt;
4438
4439 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4440 result based on those same flags for the arguments. But if the
4441 arguments aren't really even `tree' expressions, we shouldn't be trying
4442 to do this. */
4443
4444 /* Expressions without side effects may be constant if their
4445 arguments are as well. */
4446 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4447 || TREE_CODE_CLASS (code) == tcc_binary);
4448 read_only = 1;
4449 side_effects = TREE_SIDE_EFFECTS (t);
4450
4451 PROCESS_ARG (0);
4452 PROCESS_ARG (1);
4453
4454 TREE_SIDE_EFFECTS (t) = side_effects;
4455 if (code == MEM_REF)
4456 {
4457 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4458 {
4459 tree o = TREE_OPERAND (arg0, 0);
4460 TREE_READONLY (t) = TREE_READONLY (o);
4461 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4462 }
4463 }
4464 else
4465 {
4466 TREE_READONLY (t) = read_only;
4467 TREE_CONSTANT (t) = constant;
4468 TREE_THIS_VOLATILE (t)
4469 = (TREE_CODE_CLASS (code) == tcc_reference
4470 && arg0 && TREE_THIS_VOLATILE (arg0));
4471 }
4472
4473 return t;
4474 }
4475
4476
4477 tree
4478 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4479 tree arg2 MEM_STAT_DECL)
4480 {
4481 bool constant, read_only, side_effects;
4482 tree t;
4483
4484 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4485 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4486
4487 t = make_node_stat (code PASS_MEM_STAT);
4488 TREE_TYPE (t) = tt;
4489
4490 read_only = 1;
4491
4492 /* As a special exception, if COND_EXPR has NULL branches, we
4493 assume that it is a gimple statement and always consider
4494 it to have side effects. */
4495 if (code == COND_EXPR
4496 && tt == void_type_node
4497 && arg1 == NULL_TREE
4498 && arg2 == NULL_TREE)
4499 side_effects = true;
4500 else
4501 side_effects = TREE_SIDE_EFFECTS (t);
4502
4503 PROCESS_ARG (0);
4504 PROCESS_ARG (1);
4505 PROCESS_ARG (2);
4506
4507 if (code == COND_EXPR)
4508 TREE_READONLY (t) = read_only;
4509
4510 TREE_SIDE_EFFECTS (t) = side_effects;
4511 TREE_THIS_VOLATILE (t)
4512 = (TREE_CODE_CLASS (code) == tcc_reference
4513 && arg0 && TREE_THIS_VOLATILE (arg0));
4514
4515 return t;
4516 }
4517
4518 tree
4519 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4520 tree arg2, tree arg3 MEM_STAT_DECL)
4521 {
4522 bool constant, read_only, side_effects;
4523 tree t;
4524
4525 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4526
4527 t = make_node_stat (code PASS_MEM_STAT);
4528 TREE_TYPE (t) = tt;
4529
4530 side_effects = TREE_SIDE_EFFECTS (t);
4531
4532 PROCESS_ARG (0);
4533 PROCESS_ARG (1);
4534 PROCESS_ARG (2);
4535 PROCESS_ARG (3);
4536
4537 TREE_SIDE_EFFECTS (t) = side_effects;
4538 TREE_THIS_VOLATILE (t)
4539 = (TREE_CODE_CLASS (code) == tcc_reference
4540 && arg0 && TREE_THIS_VOLATILE (arg0));
4541
4542 return t;
4543 }
4544
4545 tree
4546 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4547 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4548 {
4549 bool constant, read_only, side_effects;
4550 tree t;
4551
4552 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4553
4554 t = make_node_stat (code PASS_MEM_STAT);
4555 TREE_TYPE (t) = tt;
4556
4557 side_effects = TREE_SIDE_EFFECTS (t);
4558
4559 PROCESS_ARG (0);
4560 PROCESS_ARG (1);
4561 PROCESS_ARG (2);
4562 PROCESS_ARG (3);
4563 PROCESS_ARG (4);
4564
4565 TREE_SIDE_EFFECTS (t) = side_effects;
4566 if (code == TARGET_MEM_REF)
4567 {
4568 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4569 {
4570 tree o = TREE_OPERAND (arg0, 0);
4571 TREE_READONLY (t) = TREE_READONLY (o);
4572 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4573 }
4574 }
4575 else
4576 TREE_THIS_VOLATILE (t)
4577 = (TREE_CODE_CLASS (code) == tcc_reference
4578 && arg0 && TREE_THIS_VOLATILE (arg0));
4579
4580 return t;
4581 }
4582
4583 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4584 on the pointer PTR. */
4585
4586 tree
4587 build_simple_mem_ref_loc (location_t loc, tree ptr)
4588 {
4589 HOST_WIDE_INT offset = 0;
4590 tree ptype = TREE_TYPE (ptr);
4591 tree tem;
4592 /* For convenience allow addresses that collapse to a simple base
4593 and offset. */
4594 if (TREE_CODE (ptr) == ADDR_EXPR
4595 && (handled_component_p (TREE_OPERAND (ptr, 0))
4596 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4597 {
4598 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4599 gcc_assert (ptr);
4600 ptr = build_fold_addr_expr (ptr);
4601 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4602 }
4603 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4604 ptr, build_int_cst (ptype, offset));
4605 SET_EXPR_LOCATION (tem, loc);
4606 return tem;
4607 }
4608
4609 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4610
4611 offset_int
4612 mem_ref_offset (const_tree t)
4613 {
4614 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4615 }
4616
4617 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4618 offsetted by OFFSET units. */
4619
4620 tree
4621 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4622 {
4623 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4624 build_fold_addr_expr (base),
4625 build_int_cst (ptr_type_node, offset));
4626 tree addr = build1 (ADDR_EXPR, type, ref);
4627 recompute_tree_invariant_for_addr_expr (addr);
4628 return addr;
4629 }
4630
4631 /* Similar except don't specify the TREE_TYPE
4632 and leave the TREE_SIDE_EFFECTS as 0.
4633 It is permissible for arguments to be null,
4634 or even garbage if their values do not matter. */
4635
4636 tree
4637 build_nt (enum tree_code code, ...)
4638 {
4639 tree t;
4640 int length;
4641 int i;
4642 va_list p;
4643
4644 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4645
4646 va_start (p, code);
4647
4648 t = make_node (code);
4649 length = TREE_CODE_LENGTH (code);
4650
4651 for (i = 0; i < length; i++)
4652 TREE_OPERAND (t, i) = va_arg (p, tree);
4653
4654 va_end (p);
4655 return t;
4656 }
4657
4658 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4659 tree vec. */
4660
4661 tree
4662 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4663 {
4664 tree ret, t;
4665 unsigned int ix;
4666
4667 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4668 CALL_EXPR_FN (ret) = fn;
4669 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4670 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4671 CALL_EXPR_ARG (ret, ix) = t;
4672 return ret;
4673 }
4674 \f
4675 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4676 We do NOT enter this node in any sort of symbol table.
4677
4678 LOC is the location of the decl.
4679
4680 layout_decl is used to set up the decl's storage layout.
4681 Other slots are initialized to 0 or null pointers. */
4682
4683 tree
4684 build_decl_stat (location_t loc, enum tree_code code, tree name,
4685 tree type MEM_STAT_DECL)
4686 {
4687 tree t;
4688
4689 t = make_node_stat (code PASS_MEM_STAT);
4690 DECL_SOURCE_LOCATION (t) = loc;
4691
4692 /* if (type == error_mark_node)
4693 type = integer_type_node; */
4694 /* That is not done, deliberately, so that having error_mark_node
4695 as the type can suppress useless errors in the use of this variable. */
4696
4697 DECL_NAME (t) = name;
4698 TREE_TYPE (t) = type;
4699
4700 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4701 layout_decl (t, 0);
4702
4703 return t;
4704 }
4705
4706 /* Builds and returns function declaration with NAME and TYPE. */
4707
4708 tree
4709 build_fn_decl (const char *name, tree type)
4710 {
4711 tree id = get_identifier (name);
4712 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4713
4714 DECL_EXTERNAL (decl) = 1;
4715 TREE_PUBLIC (decl) = 1;
4716 DECL_ARTIFICIAL (decl) = 1;
4717 TREE_NOTHROW (decl) = 1;
4718
4719 return decl;
4720 }
4721
4722 vec<tree, va_gc> *all_translation_units;
4723
4724 /* Builds a new translation-unit decl with name NAME, queues it in the
4725 global list of translation-unit decls and returns it. */
4726
4727 tree
4728 build_translation_unit_decl (tree name)
4729 {
4730 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4731 name, NULL_TREE);
4732 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4733 vec_safe_push (all_translation_units, tu);
4734 return tu;
4735 }
4736
4737 \f
4738 /* BLOCK nodes are used to represent the structure of binding contours
4739 and declarations, once those contours have been exited and their contents
4740 compiled. This information is used for outputting debugging info. */
4741
4742 tree
4743 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4744 {
4745 tree block = make_node (BLOCK);
4746
4747 BLOCK_VARS (block) = vars;
4748 BLOCK_SUBBLOCKS (block) = subblocks;
4749 BLOCK_SUPERCONTEXT (block) = supercontext;
4750 BLOCK_CHAIN (block) = chain;
4751 return block;
4752 }
4753
4754 \f
4755 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4756
4757 LOC is the location to use in tree T. */
4758
4759 void
4760 protected_set_expr_location (tree t, location_t loc)
4761 {
4762 if (CAN_HAVE_LOCATION_P (t))
4763 SET_EXPR_LOCATION (t, loc);
4764 }
4765 \f
4766 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4767 is ATTRIBUTE. */
4768
4769 tree
4770 build_decl_attribute_variant (tree ddecl, tree attribute)
4771 {
4772 DECL_ATTRIBUTES (ddecl) = attribute;
4773 return ddecl;
4774 }
4775
4776 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4777 is ATTRIBUTE and its qualifiers are QUALS.
4778
4779 Record such modified types already made so we don't make duplicates. */
4780
4781 tree
4782 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4783 {
4784 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4785 {
4786 inchash::hash hstate;
4787 tree ntype;
4788 int i;
4789 tree t;
4790 enum tree_code code = TREE_CODE (ttype);
4791
4792 /* Building a distinct copy of a tagged type is inappropriate; it
4793 causes breakage in code that expects there to be a one-to-one
4794 relationship between a struct and its fields.
4795 build_duplicate_type is another solution (as used in
4796 handle_transparent_union_attribute), but that doesn't play well
4797 with the stronger C++ type identity model. */
4798 if (TREE_CODE (ttype) == RECORD_TYPE
4799 || TREE_CODE (ttype) == UNION_TYPE
4800 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4801 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4802 {
4803 warning (OPT_Wattributes,
4804 "ignoring attributes applied to %qT after definition",
4805 TYPE_MAIN_VARIANT (ttype));
4806 return build_qualified_type (ttype, quals);
4807 }
4808
4809 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4810 ntype = build_distinct_type_copy (ttype);
4811
4812 TYPE_ATTRIBUTES (ntype) = attribute;
4813
4814 hstate.add_int (code);
4815 if (TREE_TYPE (ntype))
4816 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4817 attribute_hash_list (attribute, hstate);
4818
4819 switch (TREE_CODE (ntype))
4820 {
4821 case FUNCTION_TYPE:
4822 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4823 break;
4824 case ARRAY_TYPE:
4825 if (TYPE_DOMAIN (ntype))
4826 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4827 break;
4828 case INTEGER_TYPE:
4829 t = TYPE_MAX_VALUE (ntype);
4830 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4831 hstate.add_object (TREE_INT_CST_ELT (t, i));
4832 break;
4833 case REAL_TYPE:
4834 case FIXED_POINT_TYPE:
4835 {
4836 unsigned int precision = TYPE_PRECISION (ntype);
4837 hstate.add_object (precision);
4838 }
4839 break;
4840 default:
4841 break;
4842 }
4843
4844 ntype = type_hash_canon (hstate.end(), ntype);
4845
4846 /* If the target-dependent attributes make NTYPE different from
4847 its canonical type, we will need to use structural equality
4848 checks for this type. */
4849 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4850 || !comp_type_attributes (ntype, ttype))
4851 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4852 else if (TYPE_CANONICAL (ntype) == ntype)
4853 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4854
4855 ttype = build_qualified_type (ntype, quals);
4856 }
4857 else if (TYPE_QUALS (ttype) != quals)
4858 ttype = build_qualified_type (ttype, quals);
4859
4860 return ttype;
4861 }
4862
4863 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4864 the same. */
4865
4866 static bool
4867 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4868 {
4869 tree cl1, cl2;
4870 for (cl1 = clauses1, cl2 = clauses2;
4871 cl1 && cl2;
4872 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4873 {
4874 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4875 return false;
4876 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4877 {
4878 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4879 OMP_CLAUSE_DECL (cl2)) != 1)
4880 return false;
4881 }
4882 switch (OMP_CLAUSE_CODE (cl1))
4883 {
4884 case OMP_CLAUSE_ALIGNED:
4885 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4886 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4887 return false;
4888 break;
4889 case OMP_CLAUSE_LINEAR:
4890 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4891 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4892 return false;
4893 break;
4894 case OMP_CLAUSE_SIMDLEN:
4895 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4896 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4897 return false;
4898 default:
4899 break;
4900 }
4901 }
4902 return true;
4903 }
4904
4905 /* Compare two constructor-element-type constants. Return 1 if the lists
4906 are known to be equal; otherwise return 0. */
4907
4908 static bool
4909 simple_cst_list_equal (const_tree l1, const_tree l2)
4910 {
4911 while (l1 != NULL_TREE && l2 != NULL_TREE)
4912 {
4913 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4914 return false;
4915
4916 l1 = TREE_CHAIN (l1);
4917 l2 = TREE_CHAIN (l2);
4918 }
4919
4920 return l1 == l2;
4921 }
4922
4923 /* Compare two identifier nodes representing attributes. Either one may
4924 be in wrapped __ATTR__ form. Return true if they are the same, false
4925 otherwise. */
4926
4927 static bool
4928 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4929 {
4930 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4931 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4932 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4933
4934 /* Identifiers can be compared directly for equality. */
4935 if (attr1 == attr2)
4936 return true;
4937
4938 /* If they are not equal, they may still be one in the form
4939 'text' while the other one is in the form '__text__'. TODO:
4940 If we were storing attributes in normalized 'text' form, then
4941 this could all go away and we could take full advantage of
4942 the fact that we're comparing identifiers. :-) */
4943 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4944 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4945
4946 if (attr2_len == attr1_len + 4)
4947 {
4948 const char *p = IDENTIFIER_POINTER (attr2);
4949 const char *q = IDENTIFIER_POINTER (attr1);
4950 if (p[0] == '_' && p[1] == '_'
4951 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4952 && strncmp (q, p + 2, attr1_len) == 0)
4953 return true;;
4954 }
4955 else if (attr2_len + 4 == attr1_len)
4956 {
4957 const char *p = IDENTIFIER_POINTER (attr2);
4958 const char *q = IDENTIFIER_POINTER (attr1);
4959 if (q[0] == '_' && q[1] == '_'
4960 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4961 && strncmp (q + 2, p, attr2_len) == 0)
4962 return true;
4963 }
4964
4965 return false;
4966 }
4967
4968 /* Compare two attributes for their value identity. Return true if the
4969 attribute values are known to be equal; otherwise return false. */
4970
4971 bool
4972 attribute_value_equal (const_tree attr1, const_tree attr2)
4973 {
4974 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4975 return true;
4976
4977 if (TREE_VALUE (attr1) != NULL_TREE
4978 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4979 && TREE_VALUE (attr2) != NULL_TREE
4980 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4981 {
4982 /* Handle attribute format. */
4983 if (is_attribute_p ("format", TREE_PURPOSE (attr1)))
4984 {
4985 attr1 = TREE_VALUE (attr1);
4986 attr2 = TREE_VALUE (attr2);
4987 /* Compare the archetypes (printf/scanf/strftime/...). */
4988 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
4989 TREE_VALUE (attr2)))
4990 return false;
4991 /* Archetypes are the same. Compare the rest. */
4992 return (simple_cst_list_equal (TREE_CHAIN (attr1),
4993 TREE_CHAIN (attr2)) == 1);
4994 }
4995 return (simple_cst_list_equal (TREE_VALUE (attr1),
4996 TREE_VALUE (attr2)) == 1);
4997 }
4998
4999 if ((flag_openmp || flag_openmp_simd)
5000 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
5001 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
5002 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
5003 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
5004 TREE_VALUE (attr2));
5005
5006 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
5007 }
5008
5009 /* Return 0 if the attributes for two types are incompatible, 1 if they
5010 are compatible, and 2 if they are nearly compatible (which causes a
5011 warning to be generated). */
5012 int
5013 comp_type_attributes (const_tree type1, const_tree type2)
5014 {
5015 const_tree a1 = TYPE_ATTRIBUTES (type1);
5016 const_tree a2 = TYPE_ATTRIBUTES (type2);
5017 const_tree a;
5018
5019 if (a1 == a2)
5020 return 1;
5021 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
5022 {
5023 const struct attribute_spec *as;
5024 const_tree attr;
5025
5026 as = lookup_attribute_spec (get_attribute_name (a));
5027 if (!as || as->affects_type_identity == false)
5028 continue;
5029
5030 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
5031 if (!attr || !attribute_value_equal (a, attr))
5032 break;
5033 }
5034 if (!a)
5035 {
5036 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
5037 {
5038 const struct attribute_spec *as;
5039
5040 as = lookup_attribute_spec (get_attribute_name (a));
5041 if (!as || as->affects_type_identity == false)
5042 continue;
5043
5044 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5045 break;
5046 /* We don't need to compare trees again, as we did this
5047 already in first loop. */
5048 }
5049 /* All types - affecting identity - are equal, so
5050 there is no need to call target hook for comparison. */
5051 if (!a)
5052 return 1;
5053 }
5054 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
5055 return 0;
5056 /* As some type combinations - like default calling-convention - might
5057 be compatible, we have to call the target hook to get the final result. */
5058 return targetm.comp_type_attributes (type1, type2);
5059 }
5060
5061 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5062 is ATTRIBUTE.
5063
5064 Record such modified types already made so we don't make duplicates. */
5065
5066 tree
5067 build_type_attribute_variant (tree ttype, tree attribute)
5068 {
5069 return build_type_attribute_qual_variant (ttype, attribute,
5070 TYPE_QUALS (ttype));
5071 }
5072
5073
5074 /* Reset the expression *EXPR_P, a size or position.
5075
5076 ??? We could reset all non-constant sizes or positions. But it's cheap
5077 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5078
5079 We need to reset self-referential sizes or positions because they cannot
5080 be gimplified and thus can contain a CALL_EXPR after the gimplification
5081 is finished, which will run afoul of LTO streaming. And they need to be
5082 reset to something essentially dummy but not constant, so as to preserve
5083 the properties of the object they are attached to. */
5084
5085 static inline void
5086 free_lang_data_in_one_sizepos (tree *expr_p)
5087 {
5088 tree expr = *expr_p;
5089 if (CONTAINS_PLACEHOLDER_P (expr))
5090 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5091 }
5092
5093
5094 /* Reset all the fields in a binfo node BINFO. We only keep
5095 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5096
5097 static void
5098 free_lang_data_in_binfo (tree binfo)
5099 {
5100 unsigned i;
5101 tree t;
5102
5103 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5104
5105 BINFO_VIRTUALS (binfo) = NULL_TREE;
5106 BINFO_BASE_ACCESSES (binfo) = NULL;
5107 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5108 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5109
5110 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5111 free_lang_data_in_binfo (t);
5112 }
5113
5114
5115 /* Reset all language specific information still present in TYPE. */
5116
5117 static void
5118 free_lang_data_in_type (tree type)
5119 {
5120 gcc_assert (TYPE_P (type));
5121
5122 /* Give the FE a chance to remove its own data first. */
5123 lang_hooks.free_lang_data (type);
5124
5125 TREE_LANG_FLAG_0 (type) = 0;
5126 TREE_LANG_FLAG_1 (type) = 0;
5127 TREE_LANG_FLAG_2 (type) = 0;
5128 TREE_LANG_FLAG_3 (type) = 0;
5129 TREE_LANG_FLAG_4 (type) = 0;
5130 TREE_LANG_FLAG_5 (type) = 0;
5131 TREE_LANG_FLAG_6 (type) = 0;
5132
5133 if (TREE_CODE (type) == FUNCTION_TYPE)
5134 {
5135 /* Remove the const and volatile qualifiers from arguments. The
5136 C++ front end removes them, but the C front end does not,
5137 leading to false ODR violation errors when merging two
5138 instances of the same function signature compiled by
5139 different front ends. */
5140 tree p;
5141
5142 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5143 {
5144 tree arg_type = TREE_VALUE (p);
5145
5146 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5147 {
5148 int quals = TYPE_QUALS (arg_type)
5149 & ~TYPE_QUAL_CONST
5150 & ~TYPE_QUAL_VOLATILE;
5151 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5152 free_lang_data_in_type (TREE_VALUE (p));
5153 }
5154 /* C++ FE uses TREE_PURPOSE to store initial values. */
5155 TREE_PURPOSE (p) = NULL;
5156 }
5157 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5158 TYPE_MINVAL (type) = NULL;
5159 }
5160 if (TREE_CODE (type) == METHOD_TYPE)
5161 {
5162 tree p;
5163
5164 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5165 {
5166 /* C++ FE uses TREE_PURPOSE to store initial values. */
5167 TREE_PURPOSE (p) = NULL;
5168 }
5169 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5170 TYPE_MINVAL (type) = NULL;
5171 }
5172
5173 /* Remove members that are not actually FIELD_DECLs from the field
5174 list of an aggregate. These occur in C++. */
5175 if (RECORD_OR_UNION_TYPE_P (type))
5176 {
5177 tree prev, member;
5178
5179 /* Note that TYPE_FIELDS can be shared across distinct
5180 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5181 to be removed, we cannot set its TREE_CHAIN to NULL.
5182 Otherwise, we would not be able to find all the other fields
5183 in the other instances of this TREE_TYPE.
5184
5185 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5186 prev = NULL_TREE;
5187 member = TYPE_FIELDS (type);
5188 while (member)
5189 {
5190 if (TREE_CODE (member) == FIELD_DECL
5191 || TREE_CODE (member) == TYPE_DECL)
5192 {
5193 if (prev)
5194 TREE_CHAIN (prev) = member;
5195 else
5196 TYPE_FIELDS (type) = member;
5197 prev = member;
5198 }
5199
5200 member = TREE_CHAIN (member);
5201 }
5202
5203 if (prev)
5204 TREE_CHAIN (prev) = NULL_TREE;
5205 else
5206 TYPE_FIELDS (type) = NULL_TREE;
5207
5208 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5209 and danagle the pointer from time to time. */
5210 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5211 TYPE_VFIELD (type) = NULL_TREE;
5212
5213 /* Remove TYPE_METHODS list. While it would be nice to keep it
5214 to enable ODR warnings about different method lists, doing so
5215 seems to impractically increase size of LTO data streamed.
5216 Keep the infrmation if TYPE_METHODS was non-NULL. This is used
5217 by function.c and pretty printers. */
5218 if (TYPE_METHODS (type))
5219 TYPE_METHODS (type) = error_mark_node;
5220 if (TYPE_BINFO (type))
5221 {
5222 free_lang_data_in_binfo (TYPE_BINFO (type));
5223 /* We need to preserve link to bases and virtual table for all
5224 polymorphic types to make devirtualization machinery working.
5225 Debug output cares only about bases, but output also
5226 virtual table pointers so merging of -fdevirtualize and
5227 -fno-devirtualize units is easier. */
5228 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5229 || !flag_devirtualize)
5230 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5231 && !BINFO_VTABLE (TYPE_BINFO (type)))
5232 || debug_info_level != DINFO_LEVEL_NONE))
5233 TYPE_BINFO (type) = NULL;
5234 }
5235 }
5236 else
5237 {
5238 /* For non-aggregate types, clear out the language slot (which
5239 overloads TYPE_BINFO). */
5240 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5241
5242 if (INTEGRAL_TYPE_P (type)
5243 || SCALAR_FLOAT_TYPE_P (type)
5244 || FIXED_POINT_TYPE_P (type))
5245 {
5246 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5247 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5248 }
5249 }
5250
5251 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5252 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5253
5254 if (TYPE_CONTEXT (type)
5255 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5256 {
5257 tree ctx = TYPE_CONTEXT (type);
5258 do
5259 {
5260 ctx = BLOCK_SUPERCONTEXT (ctx);
5261 }
5262 while (ctx && TREE_CODE (ctx) == BLOCK);
5263 TYPE_CONTEXT (type) = ctx;
5264 }
5265 }
5266
5267
5268 /* Return true if DECL may need an assembler name to be set. */
5269
5270 static inline bool
5271 need_assembler_name_p (tree decl)
5272 {
5273 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5274 Rule merging. This makes type_odr_p to return true on those types during
5275 LTO and by comparing the mangled name, we can say what types are intended
5276 to be equivalent across compilation unit.
5277
5278 We do not store names of type_in_anonymous_namespace_p.
5279
5280 Record, union and enumeration type have linkage that allows use
5281 to check type_in_anonymous_namespace_p. We do not mangle compound types
5282 that always can be compared structurally.
5283
5284 Similarly for builtin types, we compare properties of their main variant.
5285 A special case are integer types where mangling do make differences
5286 between char/signed char/unsigned char etc. Storing name for these makes
5287 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5288 See cp/mangle.c:write_builtin_type for details. */
5289
5290 if (flag_lto_odr_type_mering
5291 && TREE_CODE (decl) == TYPE_DECL
5292 && DECL_NAME (decl)
5293 && decl == TYPE_NAME (TREE_TYPE (decl))
5294 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5295 && (type_with_linkage_p (TREE_TYPE (decl))
5296 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5297 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5298 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5299 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5300 if (TREE_CODE (decl) != FUNCTION_DECL
5301 && TREE_CODE (decl) != VAR_DECL)
5302 return false;
5303
5304 /* If DECL already has its assembler name set, it does not need a
5305 new one. */
5306 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5307 || DECL_ASSEMBLER_NAME_SET_P (decl))
5308 return false;
5309
5310 /* Abstract decls do not need an assembler name. */
5311 if (DECL_ABSTRACT_P (decl))
5312 return false;
5313
5314 /* For VAR_DECLs, only static, public and external symbols need an
5315 assembler name. */
5316 if (TREE_CODE (decl) == VAR_DECL
5317 && !TREE_STATIC (decl)
5318 && !TREE_PUBLIC (decl)
5319 && !DECL_EXTERNAL (decl))
5320 return false;
5321
5322 if (TREE_CODE (decl) == FUNCTION_DECL)
5323 {
5324 /* Do not set assembler name on builtins. Allow RTL expansion to
5325 decide whether to expand inline or via a regular call. */
5326 if (DECL_BUILT_IN (decl)
5327 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5328 return false;
5329
5330 /* Functions represented in the callgraph need an assembler name. */
5331 if (cgraph_node::get (decl) != NULL)
5332 return true;
5333
5334 /* Unused and not public functions don't need an assembler name. */
5335 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5336 return false;
5337 }
5338
5339 return true;
5340 }
5341
5342
5343 /* Reset all language specific information still present in symbol
5344 DECL. */
5345
5346 static void
5347 free_lang_data_in_decl (tree decl)
5348 {
5349 gcc_assert (DECL_P (decl));
5350
5351 /* Give the FE a chance to remove its own data first. */
5352 lang_hooks.free_lang_data (decl);
5353
5354 TREE_LANG_FLAG_0 (decl) = 0;
5355 TREE_LANG_FLAG_1 (decl) = 0;
5356 TREE_LANG_FLAG_2 (decl) = 0;
5357 TREE_LANG_FLAG_3 (decl) = 0;
5358 TREE_LANG_FLAG_4 (decl) = 0;
5359 TREE_LANG_FLAG_5 (decl) = 0;
5360 TREE_LANG_FLAG_6 (decl) = 0;
5361
5362 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5363 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5364 if (TREE_CODE (decl) == FIELD_DECL)
5365 {
5366 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5367 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5368 DECL_QUALIFIER (decl) = NULL_TREE;
5369 }
5370
5371 if (TREE_CODE (decl) == FUNCTION_DECL)
5372 {
5373 struct cgraph_node *node;
5374 if (!(node = cgraph_node::get (decl))
5375 || (!node->definition && !node->clones))
5376 {
5377 if (node)
5378 node->release_body ();
5379 else
5380 {
5381 release_function_body (decl);
5382 DECL_ARGUMENTS (decl) = NULL;
5383 DECL_RESULT (decl) = NULL;
5384 DECL_INITIAL (decl) = error_mark_node;
5385 }
5386 }
5387 if (gimple_has_body_p (decl))
5388 {
5389 tree t;
5390
5391 /* If DECL has a gimple body, then the context for its
5392 arguments must be DECL. Otherwise, it doesn't really
5393 matter, as we will not be emitting any code for DECL. In
5394 general, there may be other instances of DECL created by
5395 the front end and since PARM_DECLs are generally shared,
5396 their DECL_CONTEXT changes as the replicas of DECL are
5397 created. The only time where DECL_CONTEXT is important
5398 is for the FUNCTION_DECLs that have a gimple body (since
5399 the PARM_DECL will be used in the function's body). */
5400 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5401 DECL_CONTEXT (t) = decl;
5402 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5403 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5404 = target_option_default_node;
5405 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5406 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5407 = optimization_default_node;
5408 }
5409
5410 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5411 At this point, it is not needed anymore. */
5412 DECL_SAVED_TREE (decl) = NULL_TREE;
5413
5414 /* Clear the abstract origin if it refers to a method. Otherwise
5415 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5416 origin will not be output correctly. */
5417 if (DECL_ABSTRACT_ORIGIN (decl)
5418 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5419 && RECORD_OR_UNION_TYPE_P
5420 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5421 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5422
5423 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5424 DECL_VINDEX referring to itself into a vtable slot number as it
5425 should. Happens with functions that are copied and then forgotten
5426 about. Just clear it, it won't matter anymore. */
5427 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5428 DECL_VINDEX (decl) = NULL_TREE;
5429 }
5430 else if (TREE_CODE (decl) == VAR_DECL)
5431 {
5432 if ((DECL_EXTERNAL (decl)
5433 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5434 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5435 DECL_INITIAL (decl) = NULL_TREE;
5436 }
5437 else if (TREE_CODE (decl) == TYPE_DECL
5438 || TREE_CODE (decl) == FIELD_DECL)
5439 DECL_INITIAL (decl) = NULL_TREE;
5440 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5441 && DECL_INITIAL (decl)
5442 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5443 {
5444 /* Strip builtins from the translation-unit BLOCK. We still have targets
5445 without builtin_decl_explicit support and also builtins are shared
5446 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5447 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5448 while (*nextp)
5449 {
5450 tree var = *nextp;
5451 if (TREE_CODE (var) == FUNCTION_DECL
5452 && DECL_BUILT_IN (var))
5453 *nextp = TREE_CHAIN (var);
5454 else
5455 nextp = &TREE_CHAIN (var);
5456 }
5457 }
5458 }
5459
5460
5461 /* Data used when collecting DECLs and TYPEs for language data removal. */
5462
5463 struct free_lang_data_d
5464 {
5465 /* Worklist to avoid excessive recursion. */
5466 vec<tree> worklist;
5467
5468 /* Set of traversed objects. Used to avoid duplicate visits. */
5469 hash_set<tree> *pset;
5470
5471 /* Array of symbols to process with free_lang_data_in_decl. */
5472 vec<tree> decls;
5473
5474 /* Array of types to process with free_lang_data_in_type. */
5475 vec<tree> types;
5476 };
5477
5478
5479 /* Save all language fields needed to generate proper debug information
5480 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5481
5482 static void
5483 save_debug_info_for_decl (tree t)
5484 {
5485 /*struct saved_debug_info_d *sdi;*/
5486
5487 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5488
5489 /* FIXME. Partial implementation for saving debug info removed. */
5490 }
5491
5492
5493 /* Save all language fields needed to generate proper debug information
5494 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5495
5496 static void
5497 save_debug_info_for_type (tree t)
5498 {
5499 /*struct saved_debug_info_d *sdi;*/
5500
5501 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5502
5503 /* FIXME. Partial implementation for saving debug info removed. */
5504 }
5505
5506
5507 /* Add type or decl T to one of the list of tree nodes that need their
5508 language data removed. The lists are held inside FLD. */
5509
5510 static void
5511 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5512 {
5513 if (DECL_P (t))
5514 {
5515 fld->decls.safe_push (t);
5516 if (debug_info_level > DINFO_LEVEL_TERSE)
5517 save_debug_info_for_decl (t);
5518 }
5519 else if (TYPE_P (t))
5520 {
5521 fld->types.safe_push (t);
5522 if (debug_info_level > DINFO_LEVEL_TERSE)
5523 save_debug_info_for_type (t);
5524 }
5525 else
5526 gcc_unreachable ();
5527 }
5528
5529 /* Push tree node T into FLD->WORKLIST. */
5530
5531 static inline void
5532 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5533 {
5534 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5535 fld->worklist.safe_push ((t));
5536 }
5537
5538
5539 /* Operand callback helper for free_lang_data_in_node. *TP is the
5540 subtree operand being considered. */
5541
5542 static tree
5543 find_decls_types_r (tree *tp, int *ws, void *data)
5544 {
5545 tree t = *tp;
5546 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5547
5548 if (TREE_CODE (t) == TREE_LIST)
5549 return NULL_TREE;
5550
5551 /* Language specific nodes will be removed, so there is no need
5552 to gather anything under them. */
5553 if (is_lang_specific (t))
5554 {
5555 *ws = 0;
5556 return NULL_TREE;
5557 }
5558
5559 if (DECL_P (t))
5560 {
5561 /* Note that walk_tree does not traverse every possible field in
5562 decls, so we have to do our own traversals here. */
5563 add_tree_to_fld_list (t, fld);
5564
5565 fld_worklist_push (DECL_NAME (t), fld);
5566 fld_worklist_push (DECL_CONTEXT (t), fld);
5567 fld_worklist_push (DECL_SIZE (t), fld);
5568 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5569
5570 /* We are going to remove everything under DECL_INITIAL for
5571 TYPE_DECLs. No point walking them. */
5572 if (TREE_CODE (t) != TYPE_DECL)
5573 fld_worklist_push (DECL_INITIAL (t), fld);
5574
5575 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5576 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5577
5578 if (TREE_CODE (t) == FUNCTION_DECL)
5579 {
5580 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5581 fld_worklist_push (DECL_RESULT (t), fld);
5582 }
5583 else if (TREE_CODE (t) == TYPE_DECL)
5584 {
5585 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5586 }
5587 else if (TREE_CODE (t) == FIELD_DECL)
5588 {
5589 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5590 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5591 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5592 fld_worklist_push (DECL_FCONTEXT (t), fld);
5593 }
5594
5595 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5596 && DECL_HAS_VALUE_EXPR_P (t))
5597 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5598
5599 if (TREE_CODE (t) != FIELD_DECL
5600 && TREE_CODE (t) != TYPE_DECL)
5601 fld_worklist_push (TREE_CHAIN (t), fld);
5602 *ws = 0;
5603 }
5604 else if (TYPE_P (t))
5605 {
5606 /* Note that walk_tree does not traverse every possible field in
5607 types, so we have to do our own traversals here. */
5608 add_tree_to_fld_list (t, fld);
5609
5610 if (!RECORD_OR_UNION_TYPE_P (t))
5611 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5612 fld_worklist_push (TYPE_SIZE (t), fld);
5613 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5614 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5615 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5616 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5617 fld_worklist_push (TYPE_NAME (t), fld);
5618 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5619 them and thus do not and want not to reach unused pointer types
5620 this way. */
5621 if (!POINTER_TYPE_P (t))
5622 fld_worklist_push (TYPE_MINVAL (t), fld);
5623 if (!RECORD_OR_UNION_TYPE_P (t))
5624 fld_worklist_push (TYPE_MAXVAL (t), fld);
5625 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5626 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5627 do not and want not to reach unused variants this way. */
5628 if (TYPE_CONTEXT (t))
5629 {
5630 tree ctx = TYPE_CONTEXT (t);
5631 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5632 So push that instead. */
5633 while (ctx && TREE_CODE (ctx) == BLOCK)
5634 ctx = BLOCK_SUPERCONTEXT (ctx);
5635 fld_worklist_push (ctx, fld);
5636 }
5637 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5638 and want not to reach unused types this way. */
5639
5640 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5641 {
5642 unsigned i;
5643 tree tem;
5644 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5645 fld_worklist_push (TREE_TYPE (tem), fld);
5646 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5647 if (tem
5648 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5649 && TREE_CODE (tem) == TREE_LIST)
5650 do
5651 {
5652 fld_worklist_push (TREE_VALUE (tem), fld);
5653 tem = TREE_CHAIN (tem);
5654 }
5655 while (tem);
5656 }
5657 if (RECORD_OR_UNION_TYPE_P (t))
5658 {
5659 tree tem;
5660 /* Push all TYPE_FIELDS - there can be interleaving interesting
5661 and non-interesting things. */
5662 tem = TYPE_FIELDS (t);
5663 while (tem)
5664 {
5665 if (TREE_CODE (tem) == FIELD_DECL
5666 || TREE_CODE (tem) == TYPE_DECL)
5667 fld_worklist_push (tem, fld);
5668 tem = TREE_CHAIN (tem);
5669 }
5670 }
5671
5672 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5673 *ws = 0;
5674 }
5675 else if (TREE_CODE (t) == BLOCK)
5676 {
5677 tree tem;
5678 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5679 fld_worklist_push (tem, fld);
5680 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5681 fld_worklist_push (tem, fld);
5682 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5683 }
5684
5685 if (TREE_CODE (t) != IDENTIFIER_NODE
5686 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5687 fld_worklist_push (TREE_TYPE (t), fld);
5688
5689 return NULL_TREE;
5690 }
5691
5692
5693 /* Find decls and types in T. */
5694
5695 static void
5696 find_decls_types (tree t, struct free_lang_data_d *fld)
5697 {
5698 while (1)
5699 {
5700 if (!fld->pset->contains (t))
5701 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5702 if (fld->worklist.is_empty ())
5703 break;
5704 t = fld->worklist.pop ();
5705 }
5706 }
5707
5708 /* Translate all the types in LIST with the corresponding runtime
5709 types. */
5710
5711 static tree
5712 get_eh_types_for_runtime (tree list)
5713 {
5714 tree head, prev;
5715
5716 if (list == NULL_TREE)
5717 return NULL_TREE;
5718
5719 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5720 prev = head;
5721 list = TREE_CHAIN (list);
5722 while (list)
5723 {
5724 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5725 TREE_CHAIN (prev) = n;
5726 prev = TREE_CHAIN (prev);
5727 list = TREE_CHAIN (list);
5728 }
5729
5730 return head;
5731 }
5732
5733
5734 /* Find decls and types referenced in EH region R and store them in
5735 FLD->DECLS and FLD->TYPES. */
5736
5737 static void
5738 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5739 {
5740 switch (r->type)
5741 {
5742 case ERT_CLEANUP:
5743 break;
5744
5745 case ERT_TRY:
5746 {
5747 eh_catch c;
5748
5749 /* The types referenced in each catch must first be changed to the
5750 EH types used at runtime. This removes references to FE types
5751 in the region. */
5752 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5753 {
5754 c->type_list = get_eh_types_for_runtime (c->type_list);
5755 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5756 }
5757 }
5758 break;
5759
5760 case ERT_ALLOWED_EXCEPTIONS:
5761 r->u.allowed.type_list
5762 = get_eh_types_for_runtime (r->u.allowed.type_list);
5763 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5764 break;
5765
5766 case ERT_MUST_NOT_THROW:
5767 walk_tree (&r->u.must_not_throw.failure_decl,
5768 find_decls_types_r, fld, fld->pset);
5769 break;
5770 }
5771 }
5772
5773
5774 /* Find decls and types referenced in cgraph node N and store them in
5775 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5776 look for *every* kind of DECL and TYPE node reachable from N,
5777 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5778 NAMESPACE_DECLs, etc). */
5779
5780 static void
5781 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5782 {
5783 basic_block bb;
5784 struct function *fn;
5785 unsigned ix;
5786 tree t;
5787
5788 find_decls_types (n->decl, fld);
5789
5790 if (!gimple_has_body_p (n->decl))
5791 return;
5792
5793 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5794
5795 fn = DECL_STRUCT_FUNCTION (n->decl);
5796
5797 /* Traverse locals. */
5798 FOR_EACH_LOCAL_DECL (fn, ix, t)
5799 find_decls_types (t, fld);
5800
5801 /* Traverse EH regions in FN. */
5802 {
5803 eh_region r;
5804 FOR_ALL_EH_REGION_FN (r, fn)
5805 find_decls_types_in_eh_region (r, fld);
5806 }
5807
5808 /* Traverse every statement in FN. */
5809 FOR_EACH_BB_FN (bb, fn)
5810 {
5811 gphi_iterator psi;
5812 gimple_stmt_iterator si;
5813 unsigned i;
5814
5815 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5816 {
5817 gphi *phi = psi.phi ();
5818
5819 for (i = 0; i < gimple_phi_num_args (phi); i++)
5820 {
5821 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5822 find_decls_types (*arg_p, fld);
5823 }
5824 }
5825
5826 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5827 {
5828 gimple *stmt = gsi_stmt (si);
5829
5830 if (is_gimple_call (stmt))
5831 find_decls_types (gimple_call_fntype (stmt), fld);
5832
5833 for (i = 0; i < gimple_num_ops (stmt); i++)
5834 {
5835 tree arg = gimple_op (stmt, i);
5836 find_decls_types (arg, fld);
5837 }
5838 }
5839 }
5840 }
5841
5842
5843 /* Find decls and types referenced in varpool node N and store them in
5844 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5845 look for *every* kind of DECL and TYPE node reachable from N,
5846 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5847 NAMESPACE_DECLs, etc). */
5848
5849 static void
5850 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5851 {
5852 find_decls_types (v->decl, fld);
5853 }
5854
5855 /* If T needs an assembler name, have one created for it. */
5856
5857 void
5858 assign_assembler_name_if_neeeded (tree t)
5859 {
5860 if (need_assembler_name_p (t))
5861 {
5862 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5863 diagnostics that use input_location to show locus
5864 information. The problem here is that, at this point,
5865 input_location is generally anchored to the end of the file
5866 (since the parser is long gone), so we don't have a good
5867 position to pin it to.
5868
5869 To alleviate this problem, this uses the location of T's
5870 declaration. Examples of this are
5871 testsuite/g++.dg/template/cond2.C and
5872 testsuite/g++.dg/template/pr35240.C. */
5873 location_t saved_location = input_location;
5874 input_location = DECL_SOURCE_LOCATION (t);
5875
5876 decl_assembler_name (t);
5877
5878 input_location = saved_location;
5879 }
5880 }
5881
5882
5883 /* Free language specific information for every operand and expression
5884 in every node of the call graph. This process operates in three stages:
5885
5886 1- Every callgraph node and varpool node is traversed looking for
5887 decls and types embedded in them. This is a more exhaustive
5888 search than that done by find_referenced_vars, because it will
5889 also collect individual fields, decls embedded in types, etc.
5890
5891 2- All the decls found are sent to free_lang_data_in_decl.
5892
5893 3- All the types found are sent to free_lang_data_in_type.
5894
5895 The ordering between decls and types is important because
5896 free_lang_data_in_decl sets assembler names, which includes
5897 mangling. So types cannot be freed up until assembler names have
5898 been set up. */
5899
5900 static void
5901 free_lang_data_in_cgraph (void)
5902 {
5903 struct cgraph_node *n;
5904 varpool_node *v;
5905 struct free_lang_data_d fld;
5906 tree t;
5907 unsigned i;
5908 alias_pair *p;
5909
5910 /* Initialize sets and arrays to store referenced decls and types. */
5911 fld.pset = new hash_set<tree>;
5912 fld.worklist.create (0);
5913 fld.decls.create (100);
5914 fld.types.create (100);
5915
5916 /* Find decls and types in the body of every function in the callgraph. */
5917 FOR_EACH_FUNCTION (n)
5918 find_decls_types_in_node (n, &fld);
5919
5920 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5921 find_decls_types (p->decl, &fld);
5922
5923 /* Find decls and types in every varpool symbol. */
5924 FOR_EACH_VARIABLE (v)
5925 find_decls_types_in_var (v, &fld);
5926
5927 /* Set the assembler name on every decl found. We need to do this
5928 now because free_lang_data_in_decl will invalidate data needed
5929 for mangling. This breaks mangling on interdependent decls. */
5930 FOR_EACH_VEC_ELT (fld.decls, i, t)
5931 assign_assembler_name_if_neeeded (t);
5932
5933 /* Traverse every decl found freeing its language data. */
5934 FOR_EACH_VEC_ELT (fld.decls, i, t)
5935 free_lang_data_in_decl (t);
5936
5937 /* Traverse every type found freeing its language data. */
5938 FOR_EACH_VEC_ELT (fld.types, i, t)
5939 free_lang_data_in_type (t);
5940 #ifdef ENABLE_CHECKING
5941 FOR_EACH_VEC_ELT (fld.types, i, t)
5942 verify_type (t);
5943 #endif
5944
5945 delete fld.pset;
5946 fld.worklist.release ();
5947 fld.decls.release ();
5948 fld.types.release ();
5949 }
5950
5951
5952 /* Free resources that are used by FE but are not needed once they are done. */
5953
5954 static unsigned
5955 free_lang_data (void)
5956 {
5957 unsigned i;
5958
5959 /* If we are the LTO frontend we have freed lang-specific data already. */
5960 if (in_lto_p
5961 || (!flag_generate_lto && !flag_generate_offload))
5962 return 0;
5963
5964 /* Allocate and assign alias sets to the standard integer types
5965 while the slots are still in the way the frontends generated them. */
5966 for (i = 0; i < itk_none; ++i)
5967 if (integer_types[i])
5968 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5969
5970 /* Traverse the IL resetting language specific information for
5971 operands, expressions, etc. */
5972 free_lang_data_in_cgraph ();
5973
5974 /* Create gimple variants for common types. */
5975 ptrdiff_type_node = integer_type_node;
5976 fileptr_type_node = ptr_type_node;
5977
5978 /* Reset some langhooks. Do not reset types_compatible_p, it may
5979 still be used indirectly via the get_alias_set langhook. */
5980 lang_hooks.dwarf_name = lhd_dwarf_name;
5981 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5982 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5983
5984 /* We do not want the default decl_assembler_name implementation,
5985 rather if we have fixed everything we want a wrapper around it
5986 asserting that all non-local symbols already got their assembler
5987 name and only produce assembler names for local symbols. Or rather
5988 make sure we never call decl_assembler_name on local symbols and
5989 devise a separate, middle-end private scheme for it. */
5990
5991 /* Reset diagnostic machinery. */
5992 tree_diagnostics_defaults (global_dc);
5993
5994 return 0;
5995 }
5996
5997
5998 namespace {
5999
6000 const pass_data pass_data_ipa_free_lang_data =
6001 {
6002 SIMPLE_IPA_PASS, /* type */
6003 "*free_lang_data", /* name */
6004 OPTGROUP_NONE, /* optinfo_flags */
6005 TV_IPA_FREE_LANG_DATA, /* tv_id */
6006 0, /* properties_required */
6007 0, /* properties_provided */
6008 0, /* properties_destroyed */
6009 0, /* todo_flags_start */
6010 0, /* todo_flags_finish */
6011 };
6012
6013 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6014 {
6015 public:
6016 pass_ipa_free_lang_data (gcc::context *ctxt)
6017 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6018 {}
6019
6020 /* opt_pass methods: */
6021 virtual unsigned int execute (function *) { return free_lang_data (); }
6022
6023 }; // class pass_ipa_free_lang_data
6024
6025 } // anon namespace
6026
6027 simple_ipa_opt_pass *
6028 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6029 {
6030 return new pass_ipa_free_lang_data (ctxt);
6031 }
6032
6033 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
6034 ATTR_NAME. Also used internally by remove_attribute(). */
6035 bool
6036 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
6037 {
6038 size_t ident_len = IDENTIFIER_LENGTH (ident);
6039
6040 if (ident_len == attr_len)
6041 {
6042 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
6043 return true;
6044 }
6045 else if (ident_len == attr_len + 4)
6046 {
6047 /* There is the possibility that ATTR is 'text' and IDENT is
6048 '__text__'. */
6049 const char *p = IDENTIFIER_POINTER (ident);
6050 if (p[0] == '_' && p[1] == '_'
6051 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6052 && strncmp (attr_name, p + 2, attr_len) == 0)
6053 return true;
6054 }
6055
6056 return false;
6057 }
6058
6059 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6060 of ATTR_NAME, and LIST is not NULL_TREE. */
6061 tree
6062 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6063 {
6064 while (list)
6065 {
6066 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6067
6068 if (ident_len == attr_len)
6069 {
6070 if (!strcmp (attr_name,
6071 IDENTIFIER_POINTER (get_attribute_name (list))))
6072 break;
6073 }
6074 /* TODO: If we made sure that attributes were stored in the
6075 canonical form without '__...__' (ie, as in 'text' as opposed
6076 to '__text__') then we could avoid the following case. */
6077 else if (ident_len == attr_len + 4)
6078 {
6079 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6080 if (p[0] == '_' && p[1] == '_'
6081 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6082 && strncmp (attr_name, p + 2, attr_len) == 0)
6083 break;
6084 }
6085 list = TREE_CHAIN (list);
6086 }
6087
6088 return list;
6089 }
6090
6091 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6092 return a pointer to the attribute's list first element if the attribute
6093 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6094 '__text__'). */
6095
6096 tree
6097 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6098 tree list)
6099 {
6100 while (list)
6101 {
6102 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6103
6104 if (attr_len > ident_len)
6105 {
6106 list = TREE_CHAIN (list);
6107 continue;
6108 }
6109
6110 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6111
6112 if (strncmp (attr_name, p, attr_len) == 0)
6113 break;
6114
6115 /* TODO: If we made sure that attributes were stored in the
6116 canonical form without '__...__' (ie, as in 'text' as opposed
6117 to '__text__') then we could avoid the following case. */
6118 if (p[0] == '_' && p[1] == '_' &&
6119 strncmp (attr_name, p + 2, attr_len) == 0)
6120 break;
6121
6122 list = TREE_CHAIN (list);
6123 }
6124
6125 return list;
6126 }
6127
6128
6129 /* A variant of lookup_attribute() that can be used with an identifier
6130 as the first argument, and where the identifier can be either
6131 'text' or '__text__'.
6132
6133 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6134 return a pointer to the attribute's list element if the attribute
6135 is part of the list, or NULL_TREE if not found. If the attribute
6136 appears more than once, this only returns the first occurrence; the
6137 TREE_CHAIN of the return value should be passed back in if further
6138 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6139 can be in the form 'text' or '__text__'. */
6140 static tree
6141 lookup_ident_attribute (tree attr_identifier, tree list)
6142 {
6143 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6144
6145 while (list)
6146 {
6147 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6148 == IDENTIFIER_NODE);
6149
6150 if (cmp_attrib_identifiers (attr_identifier,
6151 get_attribute_name (list)))
6152 /* Found it. */
6153 break;
6154 list = TREE_CHAIN (list);
6155 }
6156
6157 return list;
6158 }
6159
6160 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6161 modified list. */
6162
6163 tree
6164 remove_attribute (const char *attr_name, tree list)
6165 {
6166 tree *p;
6167 size_t attr_len = strlen (attr_name);
6168
6169 gcc_checking_assert (attr_name[0] != '_');
6170
6171 for (p = &list; *p; )
6172 {
6173 tree l = *p;
6174 /* TODO: If we were storing attributes in normalized form, here
6175 we could use a simple strcmp(). */
6176 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6177 *p = TREE_CHAIN (l);
6178 else
6179 p = &TREE_CHAIN (l);
6180 }
6181
6182 return list;
6183 }
6184
6185 /* Return an attribute list that is the union of a1 and a2. */
6186
6187 tree
6188 merge_attributes (tree a1, tree a2)
6189 {
6190 tree attributes;
6191
6192 /* Either one unset? Take the set one. */
6193
6194 if ((attributes = a1) == 0)
6195 attributes = a2;
6196
6197 /* One that completely contains the other? Take it. */
6198
6199 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6200 {
6201 if (attribute_list_contained (a2, a1))
6202 attributes = a2;
6203 else
6204 {
6205 /* Pick the longest list, and hang on the other list. */
6206
6207 if (list_length (a1) < list_length (a2))
6208 attributes = a2, a2 = a1;
6209
6210 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6211 {
6212 tree a;
6213 for (a = lookup_ident_attribute (get_attribute_name (a2),
6214 attributes);
6215 a != NULL_TREE && !attribute_value_equal (a, a2);
6216 a = lookup_ident_attribute (get_attribute_name (a2),
6217 TREE_CHAIN (a)))
6218 ;
6219 if (a == NULL_TREE)
6220 {
6221 a1 = copy_node (a2);
6222 TREE_CHAIN (a1) = attributes;
6223 attributes = a1;
6224 }
6225 }
6226 }
6227 }
6228 return attributes;
6229 }
6230
6231 /* Given types T1 and T2, merge their attributes and return
6232 the result. */
6233
6234 tree
6235 merge_type_attributes (tree t1, tree t2)
6236 {
6237 return merge_attributes (TYPE_ATTRIBUTES (t1),
6238 TYPE_ATTRIBUTES (t2));
6239 }
6240
6241 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6242 the result. */
6243
6244 tree
6245 merge_decl_attributes (tree olddecl, tree newdecl)
6246 {
6247 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6248 DECL_ATTRIBUTES (newdecl));
6249 }
6250
6251 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6252
6253 /* Specialization of merge_decl_attributes for various Windows targets.
6254
6255 This handles the following situation:
6256
6257 __declspec (dllimport) int foo;
6258 int foo;
6259
6260 The second instance of `foo' nullifies the dllimport. */
6261
6262 tree
6263 merge_dllimport_decl_attributes (tree old, tree new_tree)
6264 {
6265 tree a;
6266 int delete_dllimport_p = 1;
6267
6268 /* What we need to do here is remove from `old' dllimport if it doesn't
6269 appear in `new'. dllimport behaves like extern: if a declaration is
6270 marked dllimport and a definition appears later, then the object
6271 is not dllimport'd. We also remove a `new' dllimport if the old list
6272 contains dllexport: dllexport always overrides dllimport, regardless
6273 of the order of declaration. */
6274 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6275 delete_dllimport_p = 0;
6276 else if (DECL_DLLIMPORT_P (new_tree)
6277 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6278 {
6279 DECL_DLLIMPORT_P (new_tree) = 0;
6280 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6281 "dllimport ignored", new_tree);
6282 }
6283 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6284 {
6285 /* Warn about overriding a symbol that has already been used, e.g.:
6286 extern int __attribute__ ((dllimport)) foo;
6287 int* bar () {return &foo;}
6288 int foo;
6289 */
6290 if (TREE_USED (old))
6291 {
6292 warning (0, "%q+D redeclared without dllimport attribute "
6293 "after being referenced with dll linkage", new_tree);
6294 /* If we have used a variable's address with dllimport linkage,
6295 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6296 decl may already have had TREE_CONSTANT computed.
6297 We still remove the attribute so that assembler code refers
6298 to '&foo rather than '_imp__foo'. */
6299 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6300 DECL_DLLIMPORT_P (new_tree) = 1;
6301 }
6302
6303 /* Let an inline definition silently override the external reference,
6304 but otherwise warn about attribute inconsistency. */
6305 else if (TREE_CODE (new_tree) == VAR_DECL
6306 || !DECL_DECLARED_INLINE_P (new_tree))
6307 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6308 "previous dllimport ignored", new_tree);
6309 }
6310 else
6311 delete_dllimport_p = 0;
6312
6313 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6314
6315 if (delete_dllimport_p)
6316 a = remove_attribute ("dllimport", a);
6317
6318 return a;
6319 }
6320
6321 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6322 struct attribute_spec.handler. */
6323
6324 tree
6325 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6326 bool *no_add_attrs)
6327 {
6328 tree node = *pnode;
6329 bool is_dllimport;
6330
6331 /* These attributes may apply to structure and union types being created,
6332 but otherwise should pass to the declaration involved. */
6333 if (!DECL_P (node))
6334 {
6335 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6336 | (int) ATTR_FLAG_ARRAY_NEXT))
6337 {
6338 *no_add_attrs = true;
6339 return tree_cons (name, args, NULL_TREE);
6340 }
6341 if (TREE_CODE (node) == RECORD_TYPE
6342 || TREE_CODE (node) == UNION_TYPE)
6343 {
6344 node = TYPE_NAME (node);
6345 if (!node)
6346 return NULL_TREE;
6347 }
6348 else
6349 {
6350 warning (OPT_Wattributes, "%qE attribute ignored",
6351 name);
6352 *no_add_attrs = true;
6353 return NULL_TREE;
6354 }
6355 }
6356
6357 if (TREE_CODE (node) != FUNCTION_DECL
6358 && TREE_CODE (node) != VAR_DECL
6359 && TREE_CODE (node) != TYPE_DECL)
6360 {
6361 *no_add_attrs = true;
6362 warning (OPT_Wattributes, "%qE attribute ignored",
6363 name);
6364 return NULL_TREE;
6365 }
6366
6367 if (TREE_CODE (node) == TYPE_DECL
6368 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6369 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6370 {
6371 *no_add_attrs = true;
6372 warning (OPT_Wattributes, "%qE attribute ignored",
6373 name);
6374 return NULL_TREE;
6375 }
6376
6377 is_dllimport = is_attribute_p ("dllimport", name);
6378
6379 /* Report error on dllimport ambiguities seen now before they cause
6380 any damage. */
6381 if (is_dllimport)
6382 {
6383 /* Honor any target-specific overrides. */
6384 if (!targetm.valid_dllimport_attribute_p (node))
6385 *no_add_attrs = true;
6386
6387 else if (TREE_CODE (node) == FUNCTION_DECL
6388 && DECL_DECLARED_INLINE_P (node))
6389 {
6390 warning (OPT_Wattributes, "inline function %q+D declared as "
6391 " dllimport: attribute ignored", node);
6392 *no_add_attrs = true;
6393 }
6394 /* Like MS, treat definition of dllimported variables and
6395 non-inlined functions on declaration as syntax errors. */
6396 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6397 {
6398 error ("function %q+D definition is marked dllimport", node);
6399 *no_add_attrs = true;
6400 }
6401
6402 else if (TREE_CODE (node) == VAR_DECL)
6403 {
6404 if (DECL_INITIAL (node))
6405 {
6406 error ("variable %q+D definition is marked dllimport",
6407 node);
6408 *no_add_attrs = true;
6409 }
6410
6411 /* `extern' needn't be specified with dllimport.
6412 Specify `extern' now and hope for the best. Sigh. */
6413 DECL_EXTERNAL (node) = 1;
6414 /* Also, implicitly give dllimport'd variables declared within
6415 a function global scope, unless declared static. */
6416 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6417 TREE_PUBLIC (node) = 1;
6418 }
6419
6420 if (*no_add_attrs == false)
6421 DECL_DLLIMPORT_P (node) = 1;
6422 }
6423 else if (TREE_CODE (node) == FUNCTION_DECL
6424 && DECL_DECLARED_INLINE_P (node)
6425 && flag_keep_inline_dllexport)
6426 /* An exported function, even if inline, must be emitted. */
6427 DECL_EXTERNAL (node) = 0;
6428
6429 /* Report error if symbol is not accessible at global scope. */
6430 if (!TREE_PUBLIC (node)
6431 && (TREE_CODE (node) == VAR_DECL
6432 || TREE_CODE (node) == FUNCTION_DECL))
6433 {
6434 error ("external linkage required for symbol %q+D because of "
6435 "%qE attribute", node, name);
6436 *no_add_attrs = true;
6437 }
6438
6439 /* A dllexport'd entity must have default visibility so that other
6440 program units (shared libraries or the main executable) can see
6441 it. A dllimport'd entity must have default visibility so that
6442 the linker knows that undefined references within this program
6443 unit can be resolved by the dynamic linker. */
6444 if (!*no_add_attrs)
6445 {
6446 if (DECL_VISIBILITY_SPECIFIED (node)
6447 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6448 error ("%qE implies default visibility, but %qD has already "
6449 "been declared with a different visibility",
6450 name, node);
6451 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6452 DECL_VISIBILITY_SPECIFIED (node) = 1;
6453 }
6454
6455 return NULL_TREE;
6456 }
6457
6458 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6459 \f
6460 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6461 of the various TYPE_QUAL values. */
6462
6463 static void
6464 set_type_quals (tree type, int type_quals)
6465 {
6466 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6467 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6468 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6469 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6470 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6471 }
6472
6473 /* Returns true iff unqualified CAND and BASE are equivalent. */
6474
6475 bool
6476 check_base_type (const_tree cand, const_tree base)
6477 {
6478 return (TYPE_NAME (cand) == TYPE_NAME (base)
6479 /* Apparently this is needed for Objective-C. */
6480 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6481 /* Check alignment. */
6482 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6483 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6484 TYPE_ATTRIBUTES (base)));
6485 }
6486
6487 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6488
6489 bool
6490 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6491 {
6492 return (TYPE_QUALS (cand) == type_quals
6493 && check_base_type (cand, base));
6494 }
6495
6496 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6497
6498 static bool
6499 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6500 {
6501 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6502 && TYPE_NAME (cand) == TYPE_NAME (base)
6503 /* Apparently this is needed for Objective-C. */
6504 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6505 /* Check alignment. */
6506 && TYPE_ALIGN (cand) == align
6507 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6508 TYPE_ATTRIBUTES (base)));
6509 }
6510
6511 /* This function checks to see if TYPE matches the size one of the built-in
6512 atomic types, and returns that core atomic type. */
6513
6514 static tree
6515 find_atomic_core_type (tree type)
6516 {
6517 tree base_atomic_type;
6518
6519 /* Only handle complete types. */
6520 if (TYPE_SIZE (type) == NULL_TREE)
6521 return NULL_TREE;
6522
6523 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6524 switch (type_size)
6525 {
6526 case 8:
6527 base_atomic_type = atomicQI_type_node;
6528 break;
6529
6530 case 16:
6531 base_atomic_type = atomicHI_type_node;
6532 break;
6533
6534 case 32:
6535 base_atomic_type = atomicSI_type_node;
6536 break;
6537
6538 case 64:
6539 base_atomic_type = atomicDI_type_node;
6540 break;
6541
6542 case 128:
6543 base_atomic_type = atomicTI_type_node;
6544 break;
6545
6546 default:
6547 base_atomic_type = NULL_TREE;
6548 }
6549
6550 return base_atomic_type;
6551 }
6552
6553 /* Return a version of the TYPE, qualified as indicated by the
6554 TYPE_QUALS, if one exists. If no qualified version exists yet,
6555 return NULL_TREE. */
6556
6557 tree
6558 get_qualified_type (tree type, int type_quals)
6559 {
6560 tree t;
6561
6562 if (TYPE_QUALS (type) == type_quals)
6563 return type;
6564
6565 /* Search the chain of variants to see if there is already one there just
6566 like the one we need to have. If so, use that existing one. We must
6567 preserve the TYPE_NAME, since there is code that depends on this. */
6568 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6569 if (check_qualified_type (t, type, type_quals))
6570 return t;
6571
6572 return NULL_TREE;
6573 }
6574
6575 /* Like get_qualified_type, but creates the type if it does not
6576 exist. This function never returns NULL_TREE. */
6577
6578 tree
6579 build_qualified_type (tree type, int type_quals)
6580 {
6581 tree t;
6582
6583 /* See if we already have the appropriate qualified variant. */
6584 t = get_qualified_type (type, type_quals);
6585
6586 /* If not, build it. */
6587 if (!t)
6588 {
6589 t = build_variant_type_copy (type);
6590 set_type_quals (t, type_quals);
6591
6592 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6593 {
6594 /* See if this object can map to a basic atomic type. */
6595 tree atomic_type = find_atomic_core_type (type);
6596 if (atomic_type)
6597 {
6598 /* Ensure the alignment of this type is compatible with
6599 the required alignment of the atomic type. */
6600 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6601 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6602 }
6603 }
6604
6605 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6606 /* Propagate structural equality. */
6607 SET_TYPE_STRUCTURAL_EQUALITY (t);
6608 else if (TYPE_CANONICAL (type) != type)
6609 /* Build the underlying canonical type, since it is different
6610 from TYPE. */
6611 {
6612 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6613 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6614 }
6615 else
6616 /* T is its own canonical type. */
6617 TYPE_CANONICAL (t) = t;
6618
6619 }
6620
6621 return t;
6622 }
6623
6624 /* Create a variant of type T with alignment ALIGN. */
6625
6626 tree
6627 build_aligned_type (tree type, unsigned int align)
6628 {
6629 tree t;
6630
6631 if (TYPE_PACKED (type)
6632 || TYPE_ALIGN (type) == align)
6633 return type;
6634
6635 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6636 if (check_aligned_type (t, type, align))
6637 return t;
6638
6639 t = build_variant_type_copy (type);
6640 TYPE_ALIGN (t) = align;
6641
6642 return t;
6643 }
6644
6645 /* Create a new distinct copy of TYPE. The new type is made its own
6646 MAIN_VARIANT. If TYPE requires structural equality checks, the
6647 resulting type requires structural equality checks; otherwise, its
6648 TYPE_CANONICAL points to itself. */
6649
6650 tree
6651 build_distinct_type_copy (tree type)
6652 {
6653 tree t = copy_node (type);
6654
6655 TYPE_POINTER_TO (t) = 0;
6656 TYPE_REFERENCE_TO (t) = 0;
6657
6658 /* Set the canonical type either to a new equivalence class, or
6659 propagate the need for structural equality checks. */
6660 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6661 SET_TYPE_STRUCTURAL_EQUALITY (t);
6662 else
6663 TYPE_CANONICAL (t) = t;
6664
6665 /* Make it its own variant. */
6666 TYPE_MAIN_VARIANT (t) = t;
6667 TYPE_NEXT_VARIANT (t) = 0;
6668
6669 /* We do not record methods in type copies nor variants
6670 so we do not need to keep them up to date when new method
6671 is inserted. */
6672 if (RECORD_OR_UNION_TYPE_P (t))
6673 TYPE_METHODS (t) = NULL_TREE;
6674
6675 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6676 whose TREE_TYPE is not t. This can also happen in the Ada
6677 frontend when using subtypes. */
6678
6679 return t;
6680 }
6681
6682 /* Create a new variant of TYPE, equivalent but distinct. This is so
6683 the caller can modify it. TYPE_CANONICAL for the return type will
6684 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6685 are considered equal by the language itself (or that both types
6686 require structural equality checks). */
6687
6688 tree
6689 build_variant_type_copy (tree type)
6690 {
6691 tree t, m = TYPE_MAIN_VARIANT (type);
6692
6693 t = build_distinct_type_copy (type);
6694
6695 /* Since we're building a variant, assume that it is a non-semantic
6696 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6697 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6698
6699 /* Add the new type to the chain of variants of TYPE. */
6700 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6701 TYPE_NEXT_VARIANT (m) = t;
6702 TYPE_MAIN_VARIANT (t) = m;
6703
6704 return t;
6705 }
6706 \f
6707 /* Return true if the from tree in both tree maps are equal. */
6708
6709 int
6710 tree_map_base_eq (const void *va, const void *vb)
6711 {
6712 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6713 *const b = (const struct tree_map_base *) vb;
6714 return (a->from == b->from);
6715 }
6716
6717 /* Hash a from tree in a tree_base_map. */
6718
6719 unsigned int
6720 tree_map_base_hash (const void *item)
6721 {
6722 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6723 }
6724
6725 /* Return true if this tree map structure is marked for garbage collection
6726 purposes. We simply return true if the from tree is marked, so that this
6727 structure goes away when the from tree goes away. */
6728
6729 int
6730 tree_map_base_marked_p (const void *p)
6731 {
6732 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6733 }
6734
6735 /* Hash a from tree in a tree_map. */
6736
6737 unsigned int
6738 tree_map_hash (const void *item)
6739 {
6740 return (((const struct tree_map *) item)->hash);
6741 }
6742
6743 /* Hash a from tree in a tree_decl_map. */
6744
6745 unsigned int
6746 tree_decl_map_hash (const void *item)
6747 {
6748 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6749 }
6750
6751 /* Return the initialization priority for DECL. */
6752
6753 priority_type
6754 decl_init_priority_lookup (tree decl)
6755 {
6756 symtab_node *snode = symtab_node::get (decl);
6757
6758 if (!snode)
6759 return DEFAULT_INIT_PRIORITY;
6760 return
6761 snode->get_init_priority ();
6762 }
6763
6764 /* Return the finalization priority for DECL. */
6765
6766 priority_type
6767 decl_fini_priority_lookup (tree decl)
6768 {
6769 cgraph_node *node = cgraph_node::get (decl);
6770
6771 if (!node)
6772 return DEFAULT_INIT_PRIORITY;
6773 return
6774 node->get_fini_priority ();
6775 }
6776
6777 /* Set the initialization priority for DECL to PRIORITY. */
6778
6779 void
6780 decl_init_priority_insert (tree decl, priority_type priority)
6781 {
6782 struct symtab_node *snode;
6783
6784 if (priority == DEFAULT_INIT_PRIORITY)
6785 {
6786 snode = symtab_node::get (decl);
6787 if (!snode)
6788 return;
6789 }
6790 else if (TREE_CODE (decl) == VAR_DECL)
6791 snode = varpool_node::get_create (decl);
6792 else
6793 snode = cgraph_node::get_create (decl);
6794 snode->set_init_priority (priority);
6795 }
6796
6797 /* Set the finalization priority for DECL to PRIORITY. */
6798
6799 void
6800 decl_fini_priority_insert (tree decl, priority_type priority)
6801 {
6802 struct cgraph_node *node;
6803
6804 if (priority == DEFAULT_INIT_PRIORITY)
6805 {
6806 node = cgraph_node::get (decl);
6807 if (!node)
6808 return;
6809 }
6810 else
6811 node = cgraph_node::get_create (decl);
6812 node->set_fini_priority (priority);
6813 }
6814
6815 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6816
6817 static void
6818 print_debug_expr_statistics (void)
6819 {
6820 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6821 (long) debug_expr_for_decl->size (),
6822 (long) debug_expr_for_decl->elements (),
6823 debug_expr_for_decl->collisions ());
6824 }
6825
6826 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6827
6828 static void
6829 print_value_expr_statistics (void)
6830 {
6831 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6832 (long) value_expr_for_decl->size (),
6833 (long) value_expr_for_decl->elements (),
6834 value_expr_for_decl->collisions ());
6835 }
6836
6837 /* Lookup a debug expression for FROM, and return it if we find one. */
6838
6839 tree
6840 decl_debug_expr_lookup (tree from)
6841 {
6842 struct tree_decl_map *h, in;
6843 in.base.from = from;
6844
6845 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6846 if (h)
6847 return h->to;
6848 return NULL_TREE;
6849 }
6850
6851 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6852
6853 void
6854 decl_debug_expr_insert (tree from, tree to)
6855 {
6856 struct tree_decl_map *h;
6857
6858 h = ggc_alloc<tree_decl_map> ();
6859 h->base.from = from;
6860 h->to = to;
6861 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6862 }
6863
6864 /* Lookup a value expression for FROM, and return it if we find one. */
6865
6866 tree
6867 decl_value_expr_lookup (tree from)
6868 {
6869 struct tree_decl_map *h, in;
6870 in.base.from = from;
6871
6872 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6873 if (h)
6874 return h->to;
6875 return NULL_TREE;
6876 }
6877
6878 /* Insert a mapping FROM->TO in the value expression hashtable. */
6879
6880 void
6881 decl_value_expr_insert (tree from, tree to)
6882 {
6883 struct tree_decl_map *h;
6884
6885 h = ggc_alloc<tree_decl_map> ();
6886 h->base.from = from;
6887 h->to = to;
6888 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6889 }
6890
6891 /* Lookup a vector of debug arguments for FROM, and return it if we
6892 find one. */
6893
6894 vec<tree, va_gc> **
6895 decl_debug_args_lookup (tree from)
6896 {
6897 struct tree_vec_map *h, in;
6898
6899 if (!DECL_HAS_DEBUG_ARGS_P (from))
6900 return NULL;
6901 gcc_checking_assert (debug_args_for_decl != NULL);
6902 in.base.from = from;
6903 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6904 if (h)
6905 return &h->to;
6906 return NULL;
6907 }
6908
6909 /* Insert a mapping FROM->empty vector of debug arguments in the value
6910 expression hashtable. */
6911
6912 vec<tree, va_gc> **
6913 decl_debug_args_insert (tree from)
6914 {
6915 struct tree_vec_map *h;
6916 tree_vec_map **loc;
6917
6918 if (DECL_HAS_DEBUG_ARGS_P (from))
6919 return decl_debug_args_lookup (from);
6920 if (debug_args_for_decl == NULL)
6921 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6922 h = ggc_alloc<tree_vec_map> ();
6923 h->base.from = from;
6924 h->to = NULL;
6925 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6926 *loc = h;
6927 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6928 return &h->to;
6929 }
6930
6931 /* Hashing of types so that we don't make duplicates.
6932 The entry point is `type_hash_canon'. */
6933
6934 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6935 with types in the TREE_VALUE slots), by adding the hash codes
6936 of the individual types. */
6937
6938 static void
6939 type_hash_list (const_tree list, inchash::hash &hstate)
6940 {
6941 const_tree tail;
6942
6943 for (tail = list; tail; tail = TREE_CHAIN (tail))
6944 if (TREE_VALUE (tail) != error_mark_node)
6945 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6946 }
6947
6948 /* These are the Hashtable callback functions. */
6949
6950 /* Returns true iff the types are equivalent. */
6951
6952 bool
6953 type_cache_hasher::equal (type_hash *a, type_hash *b)
6954 {
6955 /* First test the things that are the same for all types. */
6956 if (a->hash != b->hash
6957 || TREE_CODE (a->type) != TREE_CODE (b->type)
6958 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6959 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6960 TYPE_ATTRIBUTES (b->type))
6961 || (TREE_CODE (a->type) != COMPLEX_TYPE
6962 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6963 return 0;
6964
6965 /* Be careful about comparing arrays before and after the element type
6966 has been completed; don't compare TYPE_ALIGN unless both types are
6967 complete. */
6968 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6969 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6970 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6971 return 0;
6972
6973 switch (TREE_CODE (a->type))
6974 {
6975 case VOID_TYPE:
6976 case COMPLEX_TYPE:
6977 case POINTER_TYPE:
6978 case REFERENCE_TYPE:
6979 case NULLPTR_TYPE:
6980 return 1;
6981
6982 case VECTOR_TYPE:
6983 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6984
6985 case ENUMERAL_TYPE:
6986 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6987 && !(TYPE_VALUES (a->type)
6988 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6989 && TYPE_VALUES (b->type)
6990 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6991 && type_list_equal (TYPE_VALUES (a->type),
6992 TYPE_VALUES (b->type))))
6993 return 0;
6994
6995 /* ... fall through ... */
6996
6997 case INTEGER_TYPE:
6998 case REAL_TYPE:
6999 case BOOLEAN_TYPE:
7000 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7001 return false;
7002 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7003 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7004 TYPE_MAX_VALUE (b->type)))
7005 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7006 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7007 TYPE_MIN_VALUE (b->type))));
7008
7009 case FIXED_POINT_TYPE:
7010 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7011
7012 case OFFSET_TYPE:
7013 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7014
7015 case METHOD_TYPE:
7016 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7017 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7018 || (TYPE_ARG_TYPES (a->type)
7019 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7020 && TYPE_ARG_TYPES (b->type)
7021 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7022 && type_list_equal (TYPE_ARG_TYPES (a->type),
7023 TYPE_ARG_TYPES (b->type)))))
7024 break;
7025 return 0;
7026 case ARRAY_TYPE:
7027 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
7028
7029 case RECORD_TYPE:
7030 case UNION_TYPE:
7031 case QUAL_UNION_TYPE:
7032 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7033 || (TYPE_FIELDS (a->type)
7034 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7035 && TYPE_FIELDS (b->type)
7036 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7037 && type_list_equal (TYPE_FIELDS (a->type),
7038 TYPE_FIELDS (b->type))));
7039
7040 case FUNCTION_TYPE:
7041 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7042 || (TYPE_ARG_TYPES (a->type)
7043 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7044 && TYPE_ARG_TYPES (b->type)
7045 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7046 && type_list_equal (TYPE_ARG_TYPES (a->type),
7047 TYPE_ARG_TYPES (b->type))))
7048 break;
7049 return 0;
7050
7051 default:
7052 return 0;
7053 }
7054
7055 if (lang_hooks.types.type_hash_eq != NULL)
7056 return lang_hooks.types.type_hash_eq (a->type, b->type);
7057
7058 return 1;
7059 }
7060
7061 /* Given TYPE, and HASHCODE its hash code, return the canonical
7062 object for an identical type if one already exists.
7063 Otherwise, return TYPE, and record it as the canonical object.
7064
7065 To use this function, first create a type of the sort you want.
7066 Then compute its hash code from the fields of the type that
7067 make it different from other similar types.
7068 Then call this function and use the value. */
7069
7070 tree
7071 type_hash_canon (unsigned int hashcode, tree type)
7072 {
7073 type_hash in;
7074 type_hash **loc;
7075
7076 /* The hash table only contains main variants, so ensure that's what we're
7077 being passed. */
7078 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7079
7080 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7081 must call that routine before comparing TYPE_ALIGNs. */
7082 layout_type (type);
7083
7084 in.hash = hashcode;
7085 in.type = type;
7086
7087 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7088 if (*loc)
7089 {
7090 tree t1 = ((type_hash *) *loc)->type;
7091 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7092 if (GATHER_STATISTICS)
7093 {
7094 tree_code_counts[(int) TREE_CODE (type)]--;
7095 tree_node_counts[(int) t_kind]--;
7096 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
7097 }
7098 return t1;
7099 }
7100 else
7101 {
7102 struct type_hash *h;
7103
7104 h = ggc_alloc<type_hash> ();
7105 h->hash = hashcode;
7106 h->type = type;
7107 *loc = h;
7108
7109 return type;
7110 }
7111 }
7112
7113 static void
7114 print_type_hash_statistics (void)
7115 {
7116 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7117 (long) type_hash_table->size (),
7118 (long) type_hash_table->elements (),
7119 type_hash_table->collisions ());
7120 }
7121
7122 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7123 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7124 by adding the hash codes of the individual attributes. */
7125
7126 static void
7127 attribute_hash_list (const_tree list, inchash::hash &hstate)
7128 {
7129 const_tree tail;
7130
7131 for (tail = list; tail; tail = TREE_CHAIN (tail))
7132 /* ??? Do we want to add in TREE_VALUE too? */
7133 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7134 }
7135
7136 /* Given two lists of attributes, return true if list l2 is
7137 equivalent to l1. */
7138
7139 int
7140 attribute_list_equal (const_tree l1, const_tree l2)
7141 {
7142 if (l1 == l2)
7143 return 1;
7144
7145 return attribute_list_contained (l1, l2)
7146 && attribute_list_contained (l2, l1);
7147 }
7148
7149 /* Given two lists of attributes, return true if list L2 is
7150 completely contained within L1. */
7151 /* ??? This would be faster if attribute names were stored in a canonicalized
7152 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7153 must be used to show these elements are equivalent (which they are). */
7154 /* ??? It's not clear that attributes with arguments will always be handled
7155 correctly. */
7156
7157 int
7158 attribute_list_contained (const_tree l1, const_tree l2)
7159 {
7160 const_tree t1, t2;
7161
7162 /* First check the obvious, maybe the lists are identical. */
7163 if (l1 == l2)
7164 return 1;
7165
7166 /* Maybe the lists are similar. */
7167 for (t1 = l1, t2 = l2;
7168 t1 != 0 && t2 != 0
7169 && get_attribute_name (t1) == get_attribute_name (t2)
7170 && TREE_VALUE (t1) == TREE_VALUE (t2);
7171 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7172 ;
7173
7174 /* Maybe the lists are equal. */
7175 if (t1 == 0 && t2 == 0)
7176 return 1;
7177
7178 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7179 {
7180 const_tree attr;
7181 /* This CONST_CAST is okay because lookup_attribute does not
7182 modify its argument and the return value is assigned to a
7183 const_tree. */
7184 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7185 CONST_CAST_TREE (l1));
7186 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7187 attr = lookup_ident_attribute (get_attribute_name (t2),
7188 TREE_CHAIN (attr)))
7189 ;
7190
7191 if (attr == NULL_TREE)
7192 return 0;
7193 }
7194
7195 return 1;
7196 }
7197
7198 /* Given two lists of types
7199 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7200 return 1 if the lists contain the same types in the same order.
7201 Also, the TREE_PURPOSEs must match. */
7202
7203 int
7204 type_list_equal (const_tree l1, const_tree l2)
7205 {
7206 const_tree t1, t2;
7207
7208 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7209 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7210 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7211 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7212 && (TREE_TYPE (TREE_PURPOSE (t1))
7213 == TREE_TYPE (TREE_PURPOSE (t2))))))
7214 return 0;
7215
7216 return t1 == t2;
7217 }
7218
7219 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7220 given by TYPE. If the argument list accepts variable arguments,
7221 then this function counts only the ordinary arguments. */
7222
7223 int
7224 type_num_arguments (const_tree type)
7225 {
7226 int i = 0;
7227 tree t;
7228
7229 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7230 /* If the function does not take a variable number of arguments,
7231 the last element in the list will have type `void'. */
7232 if (VOID_TYPE_P (TREE_VALUE (t)))
7233 break;
7234 else
7235 ++i;
7236
7237 return i;
7238 }
7239
7240 /* Nonzero if integer constants T1 and T2
7241 represent the same constant value. */
7242
7243 int
7244 tree_int_cst_equal (const_tree t1, const_tree t2)
7245 {
7246 if (t1 == t2)
7247 return 1;
7248
7249 if (t1 == 0 || t2 == 0)
7250 return 0;
7251
7252 if (TREE_CODE (t1) == INTEGER_CST
7253 && TREE_CODE (t2) == INTEGER_CST
7254 && wi::to_widest (t1) == wi::to_widest (t2))
7255 return 1;
7256
7257 return 0;
7258 }
7259
7260 /* Return true if T is an INTEGER_CST whose numerical value (extended
7261 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7262
7263 bool
7264 tree_fits_shwi_p (const_tree t)
7265 {
7266 return (t != NULL_TREE
7267 && TREE_CODE (t) == INTEGER_CST
7268 && wi::fits_shwi_p (wi::to_widest (t)));
7269 }
7270
7271 /* Return true if T is an INTEGER_CST whose numerical value (extended
7272 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7273
7274 bool
7275 tree_fits_uhwi_p (const_tree t)
7276 {
7277 return (t != NULL_TREE
7278 && TREE_CODE (t) == INTEGER_CST
7279 && wi::fits_uhwi_p (wi::to_widest (t)));
7280 }
7281
7282 /* T is an INTEGER_CST whose numerical value (extended according to
7283 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7284 HOST_WIDE_INT. */
7285
7286 HOST_WIDE_INT
7287 tree_to_shwi (const_tree t)
7288 {
7289 gcc_assert (tree_fits_shwi_p (t));
7290 return TREE_INT_CST_LOW (t);
7291 }
7292
7293 /* T is an INTEGER_CST whose numerical value (extended according to
7294 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7295 HOST_WIDE_INT. */
7296
7297 unsigned HOST_WIDE_INT
7298 tree_to_uhwi (const_tree t)
7299 {
7300 gcc_assert (tree_fits_uhwi_p (t));
7301 return TREE_INT_CST_LOW (t);
7302 }
7303
7304 /* Return the most significant (sign) bit of T. */
7305
7306 int
7307 tree_int_cst_sign_bit (const_tree t)
7308 {
7309 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7310
7311 return wi::extract_uhwi (t, bitno, 1);
7312 }
7313
7314 /* Return an indication of the sign of the integer constant T.
7315 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7316 Note that -1 will never be returned if T's type is unsigned. */
7317
7318 int
7319 tree_int_cst_sgn (const_tree t)
7320 {
7321 if (wi::eq_p (t, 0))
7322 return 0;
7323 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7324 return 1;
7325 else if (wi::neg_p (t))
7326 return -1;
7327 else
7328 return 1;
7329 }
7330
7331 /* Return the minimum number of bits needed to represent VALUE in a
7332 signed or unsigned type, UNSIGNEDP says which. */
7333
7334 unsigned int
7335 tree_int_cst_min_precision (tree value, signop sgn)
7336 {
7337 /* If the value is negative, compute its negative minus 1. The latter
7338 adjustment is because the absolute value of the largest negative value
7339 is one larger than the largest positive value. This is equivalent to
7340 a bit-wise negation, so use that operation instead. */
7341
7342 if (tree_int_cst_sgn (value) < 0)
7343 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7344
7345 /* Return the number of bits needed, taking into account the fact
7346 that we need one more bit for a signed than unsigned type.
7347 If value is 0 or -1, the minimum precision is 1 no matter
7348 whether unsignedp is true or false. */
7349
7350 if (integer_zerop (value))
7351 return 1;
7352 else
7353 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7354 }
7355
7356 /* Return truthvalue of whether T1 is the same tree structure as T2.
7357 Return 1 if they are the same.
7358 Return 0 if they are understandably different.
7359 Return -1 if either contains tree structure not understood by
7360 this function. */
7361
7362 int
7363 simple_cst_equal (const_tree t1, const_tree t2)
7364 {
7365 enum tree_code code1, code2;
7366 int cmp;
7367 int i;
7368
7369 if (t1 == t2)
7370 return 1;
7371 if (t1 == 0 || t2 == 0)
7372 return 0;
7373
7374 code1 = TREE_CODE (t1);
7375 code2 = TREE_CODE (t2);
7376
7377 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7378 {
7379 if (CONVERT_EXPR_CODE_P (code2)
7380 || code2 == NON_LVALUE_EXPR)
7381 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7382 else
7383 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7384 }
7385
7386 else if (CONVERT_EXPR_CODE_P (code2)
7387 || code2 == NON_LVALUE_EXPR)
7388 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7389
7390 if (code1 != code2)
7391 return 0;
7392
7393 switch (code1)
7394 {
7395 case INTEGER_CST:
7396 return wi::to_widest (t1) == wi::to_widest (t2);
7397
7398 case REAL_CST:
7399 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7400
7401 case FIXED_CST:
7402 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7403
7404 case STRING_CST:
7405 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7406 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7407 TREE_STRING_LENGTH (t1)));
7408
7409 case CONSTRUCTOR:
7410 {
7411 unsigned HOST_WIDE_INT idx;
7412 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7413 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7414
7415 if (vec_safe_length (v1) != vec_safe_length (v2))
7416 return false;
7417
7418 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7419 /* ??? Should we handle also fields here? */
7420 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7421 return false;
7422 return true;
7423 }
7424
7425 case SAVE_EXPR:
7426 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7427
7428 case CALL_EXPR:
7429 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7430 if (cmp <= 0)
7431 return cmp;
7432 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7433 return 0;
7434 {
7435 const_tree arg1, arg2;
7436 const_call_expr_arg_iterator iter1, iter2;
7437 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7438 arg2 = first_const_call_expr_arg (t2, &iter2);
7439 arg1 && arg2;
7440 arg1 = next_const_call_expr_arg (&iter1),
7441 arg2 = next_const_call_expr_arg (&iter2))
7442 {
7443 cmp = simple_cst_equal (arg1, arg2);
7444 if (cmp <= 0)
7445 return cmp;
7446 }
7447 return arg1 == arg2;
7448 }
7449
7450 case TARGET_EXPR:
7451 /* Special case: if either target is an unallocated VAR_DECL,
7452 it means that it's going to be unified with whatever the
7453 TARGET_EXPR is really supposed to initialize, so treat it
7454 as being equivalent to anything. */
7455 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7456 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7457 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7458 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7459 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7460 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7461 cmp = 1;
7462 else
7463 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7464
7465 if (cmp <= 0)
7466 return cmp;
7467
7468 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7469
7470 case WITH_CLEANUP_EXPR:
7471 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7472 if (cmp <= 0)
7473 return cmp;
7474
7475 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7476
7477 case COMPONENT_REF:
7478 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7479 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7480
7481 return 0;
7482
7483 case VAR_DECL:
7484 case PARM_DECL:
7485 case CONST_DECL:
7486 case FUNCTION_DECL:
7487 return 0;
7488
7489 default:
7490 break;
7491 }
7492
7493 /* This general rule works for most tree codes. All exceptions should be
7494 handled above. If this is a language-specific tree code, we can't
7495 trust what might be in the operand, so say we don't know
7496 the situation. */
7497 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7498 return -1;
7499
7500 switch (TREE_CODE_CLASS (code1))
7501 {
7502 case tcc_unary:
7503 case tcc_binary:
7504 case tcc_comparison:
7505 case tcc_expression:
7506 case tcc_reference:
7507 case tcc_statement:
7508 cmp = 1;
7509 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7510 {
7511 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7512 if (cmp <= 0)
7513 return cmp;
7514 }
7515
7516 return cmp;
7517
7518 default:
7519 return -1;
7520 }
7521 }
7522
7523 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7524 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7525 than U, respectively. */
7526
7527 int
7528 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7529 {
7530 if (tree_int_cst_sgn (t) < 0)
7531 return -1;
7532 else if (!tree_fits_uhwi_p (t))
7533 return 1;
7534 else if (TREE_INT_CST_LOW (t) == u)
7535 return 0;
7536 else if (TREE_INT_CST_LOW (t) < u)
7537 return -1;
7538 else
7539 return 1;
7540 }
7541
7542 /* Return true if SIZE represents a constant size that is in bounds of
7543 what the middle-end and the backend accepts (covering not more than
7544 half of the address-space). */
7545
7546 bool
7547 valid_constant_size_p (const_tree size)
7548 {
7549 if (! tree_fits_uhwi_p (size)
7550 || TREE_OVERFLOW (size)
7551 || tree_int_cst_sign_bit (size) != 0)
7552 return false;
7553 return true;
7554 }
7555
7556 /* Return the precision of the type, or for a complex or vector type the
7557 precision of the type of its elements. */
7558
7559 unsigned int
7560 element_precision (const_tree type)
7561 {
7562 if (!TYPE_P (type))
7563 type = TREE_TYPE (type);
7564 enum tree_code code = TREE_CODE (type);
7565 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7566 type = TREE_TYPE (type);
7567
7568 return TYPE_PRECISION (type);
7569 }
7570
7571 /* Return true if CODE represents an associative tree code. Otherwise
7572 return false. */
7573 bool
7574 associative_tree_code (enum tree_code code)
7575 {
7576 switch (code)
7577 {
7578 case BIT_IOR_EXPR:
7579 case BIT_AND_EXPR:
7580 case BIT_XOR_EXPR:
7581 case PLUS_EXPR:
7582 case MULT_EXPR:
7583 case MIN_EXPR:
7584 case MAX_EXPR:
7585 return true;
7586
7587 default:
7588 break;
7589 }
7590 return false;
7591 }
7592
7593 /* Return true if CODE represents a commutative tree code. Otherwise
7594 return false. */
7595 bool
7596 commutative_tree_code (enum tree_code code)
7597 {
7598 switch (code)
7599 {
7600 case PLUS_EXPR:
7601 case MULT_EXPR:
7602 case MULT_HIGHPART_EXPR:
7603 case MIN_EXPR:
7604 case MAX_EXPR:
7605 case BIT_IOR_EXPR:
7606 case BIT_XOR_EXPR:
7607 case BIT_AND_EXPR:
7608 case NE_EXPR:
7609 case EQ_EXPR:
7610 case UNORDERED_EXPR:
7611 case ORDERED_EXPR:
7612 case UNEQ_EXPR:
7613 case LTGT_EXPR:
7614 case TRUTH_AND_EXPR:
7615 case TRUTH_XOR_EXPR:
7616 case TRUTH_OR_EXPR:
7617 case WIDEN_MULT_EXPR:
7618 case VEC_WIDEN_MULT_HI_EXPR:
7619 case VEC_WIDEN_MULT_LO_EXPR:
7620 case VEC_WIDEN_MULT_EVEN_EXPR:
7621 case VEC_WIDEN_MULT_ODD_EXPR:
7622 return true;
7623
7624 default:
7625 break;
7626 }
7627 return false;
7628 }
7629
7630 /* Return true if CODE represents a ternary tree code for which the
7631 first two operands are commutative. Otherwise return false. */
7632 bool
7633 commutative_ternary_tree_code (enum tree_code code)
7634 {
7635 switch (code)
7636 {
7637 case WIDEN_MULT_PLUS_EXPR:
7638 case WIDEN_MULT_MINUS_EXPR:
7639 case DOT_PROD_EXPR:
7640 case FMA_EXPR:
7641 return true;
7642
7643 default:
7644 break;
7645 }
7646 return false;
7647 }
7648
7649 /* Returns true if CODE can overflow. */
7650
7651 bool
7652 operation_can_overflow (enum tree_code code)
7653 {
7654 switch (code)
7655 {
7656 case PLUS_EXPR:
7657 case MINUS_EXPR:
7658 case MULT_EXPR:
7659 case LSHIFT_EXPR:
7660 /* Can overflow in various ways. */
7661 return true;
7662 case TRUNC_DIV_EXPR:
7663 case EXACT_DIV_EXPR:
7664 case FLOOR_DIV_EXPR:
7665 case CEIL_DIV_EXPR:
7666 /* For INT_MIN / -1. */
7667 return true;
7668 case NEGATE_EXPR:
7669 case ABS_EXPR:
7670 /* For -INT_MIN. */
7671 return true;
7672 default:
7673 /* These operators cannot overflow. */
7674 return false;
7675 }
7676 }
7677
7678 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7679 ftrapv doesn't generate trapping insns for CODE. */
7680
7681 bool
7682 operation_no_trapping_overflow (tree type, enum tree_code code)
7683 {
7684 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7685
7686 /* We don't generate instructions that trap on overflow for complex or vector
7687 types. */
7688 if (!INTEGRAL_TYPE_P (type))
7689 return true;
7690
7691 if (!TYPE_OVERFLOW_TRAPS (type))
7692 return true;
7693
7694 switch (code)
7695 {
7696 case PLUS_EXPR:
7697 case MINUS_EXPR:
7698 case MULT_EXPR:
7699 case NEGATE_EXPR:
7700 case ABS_EXPR:
7701 /* These operators can overflow, and -ftrapv generates trapping code for
7702 these. */
7703 return false;
7704 case TRUNC_DIV_EXPR:
7705 case EXACT_DIV_EXPR:
7706 case FLOOR_DIV_EXPR:
7707 case CEIL_DIV_EXPR:
7708 case LSHIFT_EXPR:
7709 /* These operators can overflow, but -ftrapv does not generate trapping
7710 code for these. */
7711 return true;
7712 default:
7713 /* These operators cannot overflow. */
7714 return true;
7715 }
7716 }
7717
7718 namespace inchash
7719 {
7720
7721 /* Generate a hash value for an expression. This can be used iteratively
7722 by passing a previous result as the HSTATE argument.
7723
7724 This function is intended to produce the same hash for expressions which
7725 would compare equal using operand_equal_p. */
7726 void
7727 add_expr (const_tree t, inchash::hash &hstate)
7728 {
7729 int i;
7730 enum tree_code code;
7731 enum tree_code_class tclass;
7732
7733 if (t == NULL_TREE)
7734 {
7735 hstate.merge_hash (0);
7736 return;
7737 }
7738
7739 code = TREE_CODE (t);
7740
7741 switch (code)
7742 {
7743 /* Alas, constants aren't shared, so we can't rely on pointer
7744 identity. */
7745 case VOID_CST:
7746 hstate.merge_hash (0);
7747 return;
7748 case INTEGER_CST:
7749 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7750 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7751 return;
7752 case REAL_CST:
7753 {
7754 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7755 hstate.merge_hash (val2);
7756 return;
7757 }
7758 case FIXED_CST:
7759 {
7760 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7761 hstate.merge_hash (val2);
7762 return;
7763 }
7764 case STRING_CST:
7765 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7766 return;
7767 case COMPLEX_CST:
7768 inchash::add_expr (TREE_REALPART (t), hstate);
7769 inchash::add_expr (TREE_IMAGPART (t), hstate);
7770 return;
7771 case VECTOR_CST:
7772 {
7773 unsigned i;
7774 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7775 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7776 return;
7777 }
7778 case SSA_NAME:
7779 /* We can just compare by pointer. */
7780 hstate.add_wide_int (SSA_NAME_VERSION (t));
7781 return;
7782 case PLACEHOLDER_EXPR:
7783 /* The node itself doesn't matter. */
7784 return;
7785 case TREE_LIST:
7786 /* A list of expressions, for a CALL_EXPR or as the elements of a
7787 VECTOR_CST. */
7788 for (; t; t = TREE_CHAIN (t))
7789 inchash::add_expr (TREE_VALUE (t), hstate);
7790 return;
7791 case CONSTRUCTOR:
7792 {
7793 unsigned HOST_WIDE_INT idx;
7794 tree field, value;
7795 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7796 {
7797 inchash::add_expr (field, hstate);
7798 inchash::add_expr (value, hstate);
7799 }
7800 return;
7801 }
7802 case FUNCTION_DECL:
7803 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7804 Otherwise nodes that compare equal according to operand_equal_p might
7805 get different hash codes. However, don't do this for machine specific
7806 or front end builtins, since the function code is overloaded in those
7807 cases. */
7808 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7809 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7810 {
7811 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7812 code = TREE_CODE (t);
7813 }
7814 /* FALL THROUGH */
7815 default:
7816 tclass = TREE_CODE_CLASS (code);
7817
7818 if (tclass == tcc_declaration)
7819 {
7820 /* DECL's have a unique ID */
7821 hstate.add_wide_int (DECL_UID (t));
7822 }
7823 else
7824 {
7825 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7826
7827 hstate.add_object (code);
7828
7829 /* Don't hash the type, that can lead to having nodes which
7830 compare equal according to operand_equal_p, but which
7831 have different hash codes. */
7832 if (CONVERT_EXPR_CODE_P (code)
7833 || code == NON_LVALUE_EXPR)
7834 {
7835 /* Make sure to include signness in the hash computation. */
7836 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7837 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7838 }
7839
7840 else if (commutative_tree_code (code))
7841 {
7842 /* It's a commutative expression. We want to hash it the same
7843 however it appears. We do this by first hashing both operands
7844 and then rehashing based on the order of their independent
7845 hashes. */
7846 inchash::hash one, two;
7847 inchash::add_expr (TREE_OPERAND (t, 0), one);
7848 inchash::add_expr (TREE_OPERAND (t, 1), two);
7849 hstate.add_commutative (one, two);
7850 }
7851 else
7852 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7853 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7854 }
7855 return;
7856 }
7857 }
7858
7859 }
7860
7861 /* Constructors for pointer, array and function types.
7862 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7863 constructed by language-dependent code, not here.) */
7864
7865 /* Construct, lay out and return the type of pointers to TO_TYPE with
7866 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7867 reference all of memory. If such a type has already been
7868 constructed, reuse it. */
7869
7870 tree
7871 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7872 bool can_alias_all)
7873 {
7874 tree t;
7875 bool could_alias = can_alias_all;
7876
7877 if (to_type == error_mark_node)
7878 return error_mark_node;
7879
7880 /* If the pointed-to type has the may_alias attribute set, force
7881 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7882 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7883 can_alias_all = true;
7884
7885 /* In some cases, languages will have things that aren't a POINTER_TYPE
7886 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7887 In that case, return that type without regard to the rest of our
7888 operands.
7889
7890 ??? This is a kludge, but consistent with the way this function has
7891 always operated and there doesn't seem to be a good way to avoid this
7892 at the moment. */
7893 if (TYPE_POINTER_TO (to_type) != 0
7894 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7895 return TYPE_POINTER_TO (to_type);
7896
7897 /* First, if we already have a type for pointers to TO_TYPE and it's
7898 the proper mode, use it. */
7899 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7900 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7901 return t;
7902
7903 t = make_node (POINTER_TYPE);
7904
7905 TREE_TYPE (t) = to_type;
7906 SET_TYPE_MODE (t, mode);
7907 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7908 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7909 TYPE_POINTER_TO (to_type) = t;
7910
7911 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7912 SET_TYPE_STRUCTURAL_EQUALITY (t);
7913 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7914 TYPE_CANONICAL (t)
7915 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7916 mode, false);
7917
7918 /* Lay out the type. This function has many callers that are concerned
7919 with expression-construction, and this simplifies them all. */
7920 layout_type (t);
7921
7922 return t;
7923 }
7924
7925 /* By default build pointers in ptr_mode. */
7926
7927 tree
7928 build_pointer_type (tree to_type)
7929 {
7930 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7931 : TYPE_ADDR_SPACE (to_type);
7932 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7933 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7934 }
7935
7936 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7937
7938 tree
7939 build_reference_type_for_mode (tree to_type, machine_mode mode,
7940 bool can_alias_all)
7941 {
7942 tree t;
7943 bool could_alias = can_alias_all;
7944
7945 if (to_type == error_mark_node)
7946 return error_mark_node;
7947
7948 /* If the pointed-to type has the may_alias attribute set, force
7949 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7950 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7951 can_alias_all = true;
7952
7953 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7954 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7955 In that case, return that type without regard to the rest of our
7956 operands.
7957
7958 ??? This is a kludge, but consistent with the way this function has
7959 always operated and there doesn't seem to be a good way to avoid this
7960 at the moment. */
7961 if (TYPE_REFERENCE_TO (to_type) != 0
7962 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7963 return TYPE_REFERENCE_TO (to_type);
7964
7965 /* First, if we already have a type for pointers to TO_TYPE and it's
7966 the proper mode, use it. */
7967 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7968 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7969 return t;
7970
7971 t = make_node (REFERENCE_TYPE);
7972
7973 TREE_TYPE (t) = to_type;
7974 SET_TYPE_MODE (t, mode);
7975 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7976 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7977 TYPE_REFERENCE_TO (to_type) = t;
7978
7979 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7980 SET_TYPE_STRUCTURAL_EQUALITY (t);
7981 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7982 TYPE_CANONICAL (t)
7983 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7984 mode, false);
7985
7986 layout_type (t);
7987
7988 return t;
7989 }
7990
7991
7992 /* Build the node for the type of references-to-TO_TYPE by default
7993 in ptr_mode. */
7994
7995 tree
7996 build_reference_type (tree to_type)
7997 {
7998 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7999 : TYPE_ADDR_SPACE (to_type);
8000 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8001 return build_reference_type_for_mode (to_type, pointer_mode, false);
8002 }
8003
8004 #define MAX_INT_CACHED_PREC \
8005 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8006 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8007
8008 /* Builds a signed or unsigned integer type of precision PRECISION.
8009 Used for C bitfields whose precision does not match that of
8010 built-in target types. */
8011 tree
8012 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8013 int unsignedp)
8014 {
8015 tree itype, ret;
8016
8017 if (unsignedp)
8018 unsignedp = MAX_INT_CACHED_PREC + 1;
8019
8020 if (precision <= MAX_INT_CACHED_PREC)
8021 {
8022 itype = nonstandard_integer_type_cache[precision + unsignedp];
8023 if (itype)
8024 return itype;
8025 }
8026
8027 itype = make_node (INTEGER_TYPE);
8028 TYPE_PRECISION (itype) = precision;
8029
8030 if (unsignedp)
8031 fixup_unsigned_type (itype);
8032 else
8033 fixup_signed_type (itype);
8034
8035 ret = itype;
8036 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
8037 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
8038 if (precision <= MAX_INT_CACHED_PREC)
8039 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8040
8041 return ret;
8042 }
8043
8044 #define MAX_BOOL_CACHED_PREC \
8045 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8046 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8047
8048 /* Builds a boolean type of precision PRECISION.
8049 Used for boolean vectors to choose proper vector element size. */
8050 tree
8051 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8052 {
8053 tree type;
8054
8055 if (precision <= MAX_BOOL_CACHED_PREC)
8056 {
8057 type = nonstandard_boolean_type_cache[precision];
8058 if (type)
8059 return type;
8060 }
8061
8062 type = make_node (BOOLEAN_TYPE);
8063 TYPE_PRECISION (type) = precision;
8064 fixup_unsigned_type (type);
8065
8066 if (precision <= MAX_INT_CACHED_PREC)
8067 nonstandard_boolean_type_cache[precision] = type;
8068
8069 return type;
8070 }
8071
8072 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8073 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8074 is true, reuse such a type that has already been constructed. */
8075
8076 static tree
8077 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8078 {
8079 tree itype = make_node (INTEGER_TYPE);
8080 inchash::hash hstate;
8081
8082 TREE_TYPE (itype) = type;
8083
8084 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8085 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8086
8087 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8088 SET_TYPE_MODE (itype, TYPE_MODE (type));
8089 TYPE_SIZE (itype) = TYPE_SIZE (type);
8090 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8091 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
8092 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8093
8094 if (!shared)
8095 return itype;
8096
8097 if ((TYPE_MIN_VALUE (itype)
8098 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8099 || (TYPE_MAX_VALUE (itype)
8100 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8101 {
8102 /* Since we cannot reliably merge this type, we need to compare it using
8103 structural equality checks. */
8104 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8105 return itype;
8106 }
8107
8108 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
8109 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8110 hstate.merge_hash (TYPE_HASH (type));
8111 itype = type_hash_canon (hstate.end (), itype);
8112
8113 return itype;
8114 }
8115
8116 /* Wrapper around build_range_type_1 with SHARED set to true. */
8117
8118 tree
8119 build_range_type (tree type, tree lowval, tree highval)
8120 {
8121 return build_range_type_1 (type, lowval, highval, true);
8122 }
8123
8124 /* Wrapper around build_range_type_1 with SHARED set to false. */
8125
8126 tree
8127 build_nonshared_range_type (tree type, tree lowval, tree highval)
8128 {
8129 return build_range_type_1 (type, lowval, highval, false);
8130 }
8131
8132 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8133 MAXVAL should be the maximum value in the domain
8134 (one less than the length of the array).
8135
8136 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8137 We don't enforce this limit, that is up to caller (e.g. language front end).
8138 The limit exists because the result is a signed type and we don't handle
8139 sizes that use more than one HOST_WIDE_INT. */
8140
8141 tree
8142 build_index_type (tree maxval)
8143 {
8144 return build_range_type (sizetype, size_zero_node, maxval);
8145 }
8146
8147 /* Return true if the debug information for TYPE, a subtype, should be emitted
8148 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8149 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8150 debug info and doesn't reflect the source code. */
8151
8152 bool
8153 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8154 {
8155 tree base_type = TREE_TYPE (type), low, high;
8156
8157 /* Subrange types have a base type which is an integral type. */
8158 if (!INTEGRAL_TYPE_P (base_type))
8159 return false;
8160
8161 /* Get the real bounds of the subtype. */
8162 if (lang_hooks.types.get_subrange_bounds)
8163 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8164 else
8165 {
8166 low = TYPE_MIN_VALUE (type);
8167 high = TYPE_MAX_VALUE (type);
8168 }
8169
8170 /* If the type and its base type have the same representation and the same
8171 name, then the type is not a subrange but a copy of the base type. */
8172 if ((TREE_CODE (base_type) == INTEGER_TYPE
8173 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8174 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8175 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8176 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8177 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8178 return false;
8179
8180 if (lowval)
8181 *lowval = low;
8182 if (highval)
8183 *highval = high;
8184 return true;
8185 }
8186
8187 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8188 and number of elements specified by the range of values of INDEX_TYPE.
8189 If SHARED is true, reuse such a type that has already been constructed. */
8190
8191 static tree
8192 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8193 {
8194 tree t;
8195
8196 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8197 {
8198 error ("arrays of functions are not meaningful");
8199 elt_type = integer_type_node;
8200 }
8201
8202 t = make_node (ARRAY_TYPE);
8203 TREE_TYPE (t) = elt_type;
8204 TYPE_DOMAIN (t) = index_type;
8205 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8206 layout_type (t);
8207
8208 /* If the element type is incomplete at this point we get marked for
8209 structural equality. Do not record these types in the canonical
8210 type hashtable. */
8211 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8212 return t;
8213
8214 if (shared)
8215 {
8216 inchash::hash hstate;
8217 hstate.add_object (TYPE_HASH (elt_type));
8218 if (index_type)
8219 hstate.add_object (TYPE_HASH (index_type));
8220 t = type_hash_canon (hstate.end (), t);
8221 }
8222
8223 if (TYPE_CANONICAL (t) == t)
8224 {
8225 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8226 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
8227 SET_TYPE_STRUCTURAL_EQUALITY (t);
8228 else if (TYPE_CANONICAL (elt_type) != elt_type
8229 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8230 TYPE_CANONICAL (t)
8231 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8232 index_type
8233 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8234 shared);
8235 }
8236
8237 return t;
8238 }
8239
8240 /* Wrapper around build_array_type_1 with SHARED set to true. */
8241
8242 tree
8243 build_array_type (tree elt_type, tree index_type)
8244 {
8245 return build_array_type_1 (elt_type, index_type, true);
8246 }
8247
8248 /* Wrapper around build_array_type_1 with SHARED set to false. */
8249
8250 tree
8251 build_nonshared_array_type (tree elt_type, tree index_type)
8252 {
8253 return build_array_type_1 (elt_type, index_type, false);
8254 }
8255
8256 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8257 sizetype. */
8258
8259 tree
8260 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8261 {
8262 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8263 }
8264
8265 /* Recursively examines the array elements of TYPE, until a non-array
8266 element type is found. */
8267
8268 tree
8269 strip_array_types (tree type)
8270 {
8271 while (TREE_CODE (type) == ARRAY_TYPE)
8272 type = TREE_TYPE (type);
8273
8274 return type;
8275 }
8276
8277 /* Computes the canonical argument types from the argument type list
8278 ARGTYPES.
8279
8280 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8281 on entry to this function, or if any of the ARGTYPES are
8282 structural.
8283
8284 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8285 true on entry to this function, or if any of the ARGTYPES are
8286 non-canonical.
8287
8288 Returns a canonical argument list, which may be ARGTYPES when the
8289 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8290 true) or would not differ from ARGTYPES. */
8291
8292 static tree
8293 maybe_canonicalize_argtypes (tree argtypes,
8294 bool *any_structural_p,
8295 bool *any_noncanonical_p)
8296 {
8297 tree arg;
8298 bool any_noncanonical_argtypes_p = false;
8299
8300 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8301 {
8302 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8303 /* Fail gracefully by stating that the type is structural. */
8304 *any_structural_p = true;
8305 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8306 *any_structural_p = true;
8307 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8308 || TREE_PURPOSE (arg))
8309 /* If the argument has a default argument, we consider it
8310 non-canonical even though the type itself is canonical.
8311 That way, different variants of function and method types
8312 with default arguments will all point to the variant with
8313 no defaults as their canonical type. */
8314 any_noncanonical_argtypes_p = true;
8315 }
8316
8317 if (*any_structural_p)
8318 return argtypes;
8319
8320 if (any_noncanonical_argtypes_p)
8321 {
8322 /* Build the canonical list of argument types. */
8323 tree canon_argtypes = NULL_TREE;
8324 bool is_void = false;
8325
8326 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8327 {
8328 if (arg == void_list_node)
8329 is_void = true;
8330 else
8331 canon_argtypes = tree_cons (NULL_TREE,
8332 TYPE_CANONICAL (TREE_VALUE (arg)),
8333 canon_argtypes);
8334 }
8335
8336 canon_argtypes = nreverse (canon_argtypes);
8337 if (is_void)
8338 canon_argtypes = chainon (canon_argtypes, void_list_node);
8339
8340 /* There is a non-canonical type. */
8341 *any_noncanonical_p = true;
8342 return canon_argtypes;
8343 }
8344
8345 /* The canonical argument types are the same as ARGTYPES. */
8346 return argtypes;
8347 }
8348
8349 /* Construct, lay out and return
8350 the type of functions returning type VALUE_TYPE
8351 given arguments of types ARG_TYPES.
8352 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8353 are data type nodes for the arguments of the function.
8354 If such a type has already been constructed, reuse it. */
8355
8356 tree
8357 build_function_type (tree value_type, tree arg_types)
8358 {
8359 tree t;
8360 inchash::hash hstate;
8361 bool any_structural_p, any_noncanonical_p;
8362 tree canon_argtypes;
8363
8364 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8365 {
8366 error ("function return type cannot be function");
8367 value_type = integer_type_node;
8368 }
8369
8370 /* Make a node of the sort we want. */
8371 t = make_node (FUNCTION_TYPE);
8372 TREE_TYPE (t) = value_type;
8373 TYPE_ARG_TYPES (t) = arg_types;
8374
8375 /* If we already have such a type, use the old one. */
8376 hstate.add_object (TYPE_HASH (value_type));
8377 type_hash_list (arg_types, hstate);
8378 t = type_hash_canon (hstate.end (), t);
8379
8380 /* Set up the canonical type. */
8381 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8382 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8383 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8384 &any_structural_p,
8385 &any_noncanonical_p);
8386 if (any_structural_p)
8387 SET_TYPE_STRUCTURAL_EQUALITY (t);
8388 else if (any_noncanonical_p)
8389 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8390 canon_argtypes);
8391
8392 if (!COMPLETE_TYPE_P (t))
8393 layout_type (t);
8394 return t;
8395 }
8396
8397 /* Build a function type. The RETURN_TYPE is the type returned by the
8398 function. If VAARGS is set, no void_type_node is appended to the
8399 the list. ARGP must be always be terminated be a NULL_TREE. */
8400
8401 static tree
8402 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8403 {
8404 tree t, args, last;
8405
8406 t = va_arg (argp, tree);
8407 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8408 args = tree_cons (NULL_TREE, t, args);
8409
8410 if (vaargs)
8411 {
8412 last = args;
8413 if (args != NULL_TREE)
8414 args = nreverse (args);
8415 gcc_assert (last != void_list_node);
8416 }
8417 else if (args == NULL_TREE)
8418 args = void_list_node;
8419 else
8420 {
8421 last = args;
8422 args = nreverse (args);
8423 TREE_CHAIN (last) = void_list_node;
8424 }
8425 args = build_function_type (return_type, args);
8426
8427 return args;
8428 }
8429
8430 /* Build a function type. The RETURN_TYPE is the type returned by the
8431 function. If additional arguments are provided, they are
8432 additional argument types. The list of argument types must always
8433 be terminated by NULL_TREE. */
8434
8435 tree
8436 build_function_type_list (tree return_type, ...)
8437 {
8438 tree args;
8439 va_list p;
8440
8441 va_start (p, return_type);
8442 args = build_function_type_list_1 (false, return_type, p);
8443 va_end (p);
8444 return args;
8445 }
8446
8447 /* Build a variable argument function type. The RETURN_TYPE is the
8448 type returned by the function. If additional arguments are provided,
8449 they are additional argument types. The list of argument types must
8450 always be terminated by NULL_TREE. */
8451
8452 tree
8453 build_varargs_function_type_list (tree return_type, ...)
8454 {
8455 tree args;
8456 va_list p;
8457
8458 va_start (p, return_type);
8459 args = build_function_type_list_1 (true, return_type, p);
8460 va_end (p);
8461
8462 return args;
8463 }
8464
8465 /* Build a function type. RETURN_TYPE is the type returned by the
8466 function; VAARGS indicates whether the function takes varargs. The
8467 function takes N named arguments, the types of which are provided in
8468 ARG_TYPES. */
8469
8470 static tree
8471 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8472 tree *arg_types)
8473 {
8474 int i;
8475 tree t = vaargs ? NULL_TREE : void_list_node;
8476
8477 for (i = n - 1; i >= 0; i--)
8478 t = tree_cons (NULL_TREE, arg_types[i], t);
8479
8480 return build_function_type (return_type, t);
8481 }
8482
8483 /* Build a function type. RETURN_TYPE is the type returned by the
8484 function. The function takes N named arguments, the types of which
8485 are provided in ARG_TYPES. */
8486
8487 tree
8488 build_function_type_array (tree return_type, int n, tree *arg_types)
8489 {
8490 return build_function_type_array_1 (false, return_type, n, arg_types);
8491 }
8492
8493 /* Build a variable argument function type. RETURN_TYPE is the type
8494 returned by the function. The function takes N named arguments, the
8495 types of which are provided in ARG_TYPES. */
8496
8497 tree
8498 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8499 {
8500 return build_function_type_array_1 (true, return_type, n, arg_types);
8501 }
8502
8503 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8504 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8505 for the method. An implicit additional parameter (of type
8506 pointer-to-BASETYPE) is added to the ARGTYPES. */
8507
8508 tree
8509 build_method_type_directly (tree basetype,
8510 tree rettype,
8511 tree argtypes)
8512 {
8513 tree t;
8514 tree ptype;
8515 inchash::hash hstate;
8516 bool any_structural_p, any_noncanonical_p;
8517 tree canon_argtypes;
8518
8519 /* Make a node of the sort we want. */
8520 t = make_node (METHOD_TYPE);
8521
8522 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8523 TREE_TYPE (t) = rettype;
8524 ptype = build_pointer_type (basetype);
8525
8526 /* The actual arglist for this function includes a "hidden" argument
8527 which is "this". Put it into the list of argument types. */
8528 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8529 TYPE_ARG_TYPES (t) = argtypes;
8530
8531 /* If we already have such a type, use the old one. */
8532 hstate.add_object (TYPE_HASH (basetype));
8533 hstate.add_object (TYPE_HASH (rettype));
8534 type_hash_list (argtypes, hstate);
8535 t = type_hash_canon (hstate.end (), t);
8536
8537 /* Set up the canonical type. */
8538 any_structural_p
8539 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8540 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8541 any_noncanonical_p
8542 = (TYPE_CANONICAL (basetype) != basetype
8543 || TYPE_CANONICAL (rettype) != rettype);
8544 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8545 &any_structural_p,
8546 &any_noncanonical_p);
8547 if (any_structural_p)
8548 SET_TYPE_STRUCTURAL_EQUALITY (t);
8549 else if (any_noncanonical_p)
8550 TYPE_CANONICAL (t)
8551 = build_method_type_directly (TYPE_CANONICAL (basetype),
8552 TYPE_CANONICAL (rettype),
8553 canon_argtypes);
8554 if (!COMPLETE_TYPE_P (t))
8555 layout_type (t);
8556
8557 return t;
8558 }
8559
8560 /* Construct, lay out and return the type of methods belonging to class
8561 BASETYPE and whose arguments and values are described by TYPE.
8562 If that type exists already, reuse it.
8563 TYPE must be a FUNCTION_TYPE node. */
8564
8565 tree
8566 build_method_type (tree basetype, tree type)
8567 {
8568 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8569
8570 return build_method_type_directly (basetype,
8571 TREE_TYPE (type),
8572 TYPE_ARG_TYPES (type));
8573 }
8574
8575 /* Construct, lay out and return the type of offsets to a value
8576 of type TYPE, within an object of type BASETYPE.
8577 If a suitable offset type exists already, reuse it. */
8578
8579 tree
8580 build_offset_type (tree basetype, tree type)
8581 {
8582 tree t;
8583 inchash::hash hstate;
8584
8585 /* Make a node of the sort we want. */
8586 t = make_node (OFFSET_TYPE);
8587
8588 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8589 TREE_TYPE (t) = type;
8590
8591 /* If we already have such a type, use the old one. */
8592 hstate.add_object (TYPE_HASH (basetype));
8593 hstate.add_object (TYPE_HASH (type));
8594 t = type_hash_canon (hstate.end (), t);
8595
8596 if (!COMPLETE_TYPE_P (t))
8597 layout_type (t);
8598
8599 if (TYPE_CANONICAL (t) == t)
8600 {
8601 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8602 || TYPE_STRUCTURAL_EQUALITY_P (type))
8603 SET_TYPE_STRUCTURAL_EQUALITY (t);
8604 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8605 || TYPE_CANONICAL (type) != type)
8606 TYPE_CANONICAL (t)
8607 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8608 TYPE_CANONICAL (type));
8609 }
8610
8611 return t;
8612 }
8613
8614 /* Create a complex type whose components are COMPONENT_TYPE. */
8615
8616 tree
8617 build_complex_type (tree component_type)
8618 {
8619 tree t;
8620 inchash::hash hstate;
8621
8622 gcc_assert (INTEGRAL_TYPE_P (component_type)
8623 || SCALAR_FLOAT_TYPE_P (component_type)
8624 || FIXED_POINT_TYPE_P (component_type));
8625
8626 /* Make a node of the sort we want. */
8627 t = make_node (COMPLEX_TYPE);
8628
8629 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8630
8631 /* If we already have such a type, use the old one. */
8632 hstate.add_object (TYPE_HASH (component_type));
8633 t = type_hash_canon (hstate.end (), t);
8634
8635 if (!COMPLETE_TYPE_P (t))
8636 layout_type (t);
8637
8638 if (TYPE_CANONICAL (t) == t)
8639 {
8640 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8641 SET_TYPE_STRUCTURAL_EQUALITY (t);
8642 else if (TYPE_CANONICAL (component_type) != component_type)
8643 TYPE_CANONICAL (t)
8644 = build_complex_type (TYPE_CANONICAL (component_type));
8645 }
8646
8647 /* We need to create a name, since complex is a fundamental type. */
8648 if (! TYPE_NAME (t))
8649 {
8650 const char *name;
8651 if (component_type == char_type_node)
8652 name = "complex char";
8653 else if (component_type == signed_char_type_node)
8654 name = "complex signed char";
8655 else if (component_type == unsigned_char_type_node)
8656 name = "complex unsigned char";
8657 else if (component_type == short_integer_type_node)
8658 name = "complex short int";
8659 else if (component_type == short_unsigned_type_node)
8660 name = "complex short unsigned int";
8661 else if (component_type == integer_type_node)
8662 name = "complex int";
8663 else if (component_type == unsigned_type_node)
8664 name = "complex unsigned int";
8665 else if (component_type == long_integer_type_node)
8666 name = "complex long int";
8667 else if (component_type == long_unsigned_type_node)
8668 name = "complex long unsigned int";
8669 else if (component_type == long_long_integer_type_node)
8670 name = "complex long long int";
8671 else if (component_type == long_long_unsigned_type_node)
8672 name = "complex long long unsigned int";
8673 else
8674 name = 0;
8675
8676 if (name != 0)
8677 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8678 get_identifier (name), t);
8679 }
8680
8681 return build_qualified_type (t, TYPE_QUALS (component_type));
8682 }
8683
8684 /* If TYPE is a real or complex floating-point type and the target
8685 does not directly support arithmetic on TYPE then return the wider
8686 type to be used for arithmetic on TYPE. Otherwise, return
8687 NULL_TREE. */
8688
8689 tree
8690 excess_precision_type (tree type)
8691 {
8692 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8693 {
8694 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8695 switch (TREE_CODE (type))
8696 {
8697 case REAL_TYPE:
8698 switch (flt_eval_method)
8699 {
8700 case 1:
8701 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8702 return double_type_node;
8703 break;
8704 case 2:
8705 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8706 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8707 return long_double_type_node;
8708 break;
8709 default:
8710 gcc_unreachable ();
8711 }
8712 break;
8713 case COMPLEX_TYPE:
8714 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8715 return NULL_TREE;
8716 switch (flt_eval_method)
8717 {
8718 case 1:
8719 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8720 return complex_double_type_node;
8721 break;
8722 case 2:
8723 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8724 || (TYPE_MODE (TREE_TYPE (type))
8725 == TYPE_MODE (double_type_node)))
8726 return complex_long_double_type_node;
8727 break;
8728 default:
8729 gcc_unreachable ();
8730 }
8731 break;
8732 default:
8733 break;
8734 }
8735 }
8736 return NULL_TREE;
8737 }
8738 \f
8739 /* Return OP, stripped of any conversions to wider types as much as is safe.
8740 Converting the value back to OP's type makes a value equivalent to OP.
8741
8742 If FOR_TYPE is nonzero, we return a value which, if converted to
8743 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8744
8745 OP must have integer, real or enumeral type. Pointers are not allowed!
8746
8747 There are some cases where the obvious value we could return
8748 would regenerate to OP if converted to OP's type,
8749 but would not extend like OP to wider types.
8750 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8751 For example, if OP is (unsigned short)(signed char)-1,
8752 we avoid returning (signed char)-1 if FOR_TYPE is int,
8753 even though extending that to an unsigned short would regenerate OP,
8754 since the result of extending (signed char)-1 to (int)
8755 is different from (int) OP. */
8756
8757 tree
8758 get_unwidened (tree op, tree for_type)
8759 {
8760 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8761 tree type = TREE_TYPE (op);
8762 unsigned final_prec
8763 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8764 int uns
8765 = (for_type != 0 && for_type != type
8766 && final_prec > TYPE_PRECISION (type)
8767 && TYPE_UNSIGNED (type));
8768 tree win = op;
8769
8770 while (CONVERT_EXPR_P (op))
8771 {
8772 int bitschange;
8773
8774 /* TYPE_PRECISION on vector types has different meaning
8775 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8776 so avoid them here. */
8777 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8778 break;
8779
8780 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8781 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8782
8783 /* Truncations are many-one so cannot be removed.
8784 Unless we are later going to truncate down even farther. */
8785 if (bitschange < 0
8786 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8787 break;
8788
8789 /* See what's inside this conversion. If we decide to strip it,
8790 we will set WIN. */
8791 op = TREE_OPERAND (op, 0);
8792
8793 /* If we have not stripped any zero-extensions (uns is 0),
8794 we can strip any kind of extension.
8795 If we have previously stripped a zero-extension,
8796 only zero-extensions can safely be stripped.
8797 Any extension can be stripped if the bits it would produce
8798 are all going to be discarded later by truncating to FOR_TYPE. */
8799
8800 if (bitschange > 0)
8801 {
8802 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8803 win = op;
8804 /* TYPE_UNSIGNED says whether this is a zero-extension.
8805 Let's avoid computing it if it does not affect WIN
8806 and if UNS will not be needed again. */
8807 if ((uns
8808 || CONVERT_EXPR_P (op))
8809 && TYPE_UNSIGNED (TREE_TYPE (op)))
8810 {
8811 uns = 1;
8812 win = op;
8813 }
8814 }
8815 }
8816
8817 /* If we finally reach a constant see if it fits in for_type and
8818 in that case convert it. */
8819 if (for_type
8820 && TREE_CODE (win) == INTEGER_CST
8821 && TREE_TYPE (win) != for_type
8822 && int_fits_type_p (win, for_type))
8823 win = fold_convert (for_type, win);
8824
8825 return win;
8826 }
8827 \f
8828 /* Return OP or a simpler expression for a narrower value
8829 which can be sign-extended or zero-extended to give back OP.
8830 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8831 or 0 if the value should be sign-extended. */
8832
8833 tree
8834 get_narrower (tree op, int *unsignedp_ptr)
8835 {
8836 int uns = 0;
8837 int first = 1;
8838 tree win = op;
8839 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8840
8841 while (TREE_CODE (op) == NOP_EXPR)
8842 {
8843 int bitschange
8844 = (TYPE_PRECISION (TREE_TYPE (op))
8845 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8846
8847 /* Truncations are many-one so cannot be removed. */
8848 if (bitschange < 0)
8849 break;
8850
8851 /* See what's inside this conversion. If we decide to strip it,
8852 we will set WIN. */
8853
8854 if (bitschange > 0)
8855 {
8856 op = TREE_OPERAND (op, 0);
8857 /* An extension: the outermost one can be stripped,
8858 but remember whether it is zero or sign extension. */
8859 if (first)
8860 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8861 /* Otherwise, if a sign extension has been stripped,
8862 only sign extensions can now be stripped;
8863 if a zero extension has been stripped, only zero-extensions. */
8864 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8865 break;
8866 first = 0;
8867 }
8868 else /* bitschange == 0 */
8869 {
8870 /* A change in nominal type can always be stripped, but we must
8871 preserve the unsignedness. */
8872 if (first)
8873 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8874 first = 0;
8875 op = TREE_OPERAND (op, 0);
8876 /* Keep trying to narrow, but don't assign op to win if it
8877 would turn an integral type into something else. */
8878 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8879 continue;
8880 }
8881
8882 win = op;
8883 }
8884
8885 if (TREE_CODE (op) == COMPONENT_REF
8886 /* Since type_for_size always gives an integer type. */
8887 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8888 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8889 /* Ensure field is laid out already. */
8890 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8891 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8892 {
8893 unsigned HOST_WIDE_INT innerprec
8894 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8895 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8896 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8897 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8898
8899 /* We can get this structure field in a narrower type that fits it,
8900 but the resulting extension to its nominal type (a fullword type)
8901 must satisfy the same conditions as for other extensions.
8902
8903 Do this only for fields that are aligned (not bit-fields),
8904 because when bit-field insns will be used there is no
8905 advantage in doing this. */
8906
8907 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8908 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8909 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8910 && type != 0)
8911 {
8912 if (first)
8913 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8914 win = fold_convert (type, op);
8915 }
8916 }
8917
8918 *unsignedp_ptr = uns;
8919 return win;
8920 }
8921 \f
8922 /* Returns true if integer constant C has a value that is permissible
8923 for type TYPE (an INTEGER_TYPE). */
8924
8925 bool
8926 int_fits_type_p (const_tree c, const_tree type)
8927 {
8928 tree type_low_bound, type_high_bound;
8929 bool ok_for_low_bound, ok_for_high_bound;
8930 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8931
8932 retry:
8933 type_low_bound = TYPE_MIN_VALUE (type);
8934 type_high_bound = TYPE_MAX_VALUE (type);
8935
8936 /* If at least one bound of the type is a constant integer, we can check
8937 ourselves and maybe make a decision. If no such decision is possible, but
8938 this type is a subtype, try checking against that. Otherwise, use
8939 fits_to_tree_p, which checks against the precision.
8940
8941 Compute the status for each possibly constant bound, and return if we see
8942 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8943 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8944 for "constant known to fit". */
8945
8946 /* Check if c >= type_low_bound. */
8947 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8948 {
8949 if (tree_int_cst_lt (c, type_low_bound))
8950 return false;
8951 ok_for_low_bound = true;
8952 }
8953 else
8954 ok_for_low_bound = false;
8955
8956 /* Check if c <= type_high_bound. */
8957 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8958 {
8959 if (tree_int_cst_lt (type_high_bound, c))
8960 return false;
8961 ok_for_high_bound = true;
8962 }
8963 else
8964 ok_for_high_bound = false;
8965
8966 /* If the constant fits both bounds, the result is known. */
8967 if (ok_for_low_bound && ok_for_high_bound)
8968 return true;
8969
8970 /* Perform some generic filtering which may allow making a decision
8971 even if the bounds are not constant. First, negative integers
8972 never fit in unsigned types, */
8973 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8974 return false;
8975
8976 /* Second, narrower types always fit in wider ones. */
8977 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8978 return true;
8979
8980 /* Third, unsigned integers with top bit set never fit signed types. */
8981 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8982 {
8983 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8984 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8985 {
8986 /* When a tree_cst is converted to a wide-int, the precision
8987 is taken from the type. However, if the precision of the
8988 mode underneath the type is smaller than that, it is
8989 possible that the value will not fit. The test below
8990 fails if any bit is set between the sign bit of the
8991 underlying mode and the top bit of the type. */
8992 if (wi::ne_p (wi::zext (c, prec - 1), c))
8993 return false;
8994 }
8995 else if (wi::neg_p (c))
8996 return false;
8997 }
8998
8999 /* If we haven't been able to decide at this point, there nothing more we
9000 can check ourselves here. Look at the base type if we have one and it
9001 has the same precision. */
9002 if (TREE_CODE (type) == INTEGER_TYPE
9003 && TREE_TYPE (type) != 0
9004 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9005 {
9006 type = TREE_TYPE (type);
9007 goto retry;
9008 }
9009
9010 /* Or to fits_to_tree_p, if nothing else. */
9011 return wi::fits_to_tree_p (c, type);
9012 }
9013
9014 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9015 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9016 represented (assuming two's-complement arithmetic) within the bit
9017 precision of the type are returned instead. */
9018
9019 void
9020 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9021 {
9022 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9023 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9024 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
9025 else
9026 {
9027 if (TYPE_UNSIGNED (type))
9028 mpz_set_ui (min, 0);
9029 else
9030 {
9031 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9032 wi::to_mpz (mn, min, SIGNED);
9033 }
9034 }
9035
9036 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9037 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9038 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
9039 else
9040 {
9041 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9042 wi::to_mpz (mn, max, TYPE_SIGN (type));
9043 }
9044 }
9045
9046 /* Return true if VAR is an automatic variable defined in function FN. */
9047
9048 bool
9049 auto_var_in_fn_p (const_tree var, const_tree fn)
9050 {
9051 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9052 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
9053 || TREE_CODE (var) == PARM_DECL)
9054 && ! TREE_STATIC (var))
9055 || TREE_CODE (var) == LABEL_DECL
9056 || TREE_CODE (var) == RESULT_DECL));
9057 }
9058
9059 /* Subprogram of following function. Called by walk_tree.
9060
9061 Return *TP if it is an automatic variable or parameter of the
9062 function passed in as DATA. */
9063
9064 static tree
9065 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9066 {
9067 tree fn = (tree) data;
9068
9069 if (TYPE_P (*tp))
9070 *walk_subtrees = 0;
9071
9072 else if (DECL_P (*tp)
9073 && auto_var_in_fn_p (*tp, fn))
9074 return *tp;
9075
9076 return NULL_TREE;
9077 }
9078
9079 /* Returns true if T is, contains, or refers to a type with variable
9080 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9081 arguments, but not the return type. If FN is nonzero, only return
9082 true if a modifier of the type or position of FN is a variable or
9083 parameter inside FN.
9084
9085 This concept is more general than that of C99 'variably modified types':
9086 in C99, a struct type is never variably modified because a VLA may not
9087 appear as a structure member. However, in GNU C code like:
9088
9089 struct S { int i[f()]; };
9090
9091 is valid, and other languages may define similar constructs. */
9092
9093 bool
9094 variably_modified_type_p (tree type, tree fn)
9095 {
9096 tree t;
9097
9098 /* Test if T is either variable (if FN is zero) or an expression containing
9099 a variable in FN. If TYPE isn't gimplified, return true also if
9100 gimplify_one_sizepos would gimplify the expression into a local
9101 variable. */
9102 #define RETURN_TRUE_IF_VAR(T) \
9103 do { tree _t = (T); \
9104 if (_t != NULL_TREE \
9105 && _t != error_mark_node \
9106 && TREE_CODE (_t) != INTEGER_CST \
9107 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9108 && (!fn \
9109 || (!TYPE_SIZES_GIMPLIFIED (type) \
9110 && !is_gimple_sizepos (_t)) \
9111 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9112 return true; } while (0)
9113
9114 if (type == error_mark_node)
9115 return false;
9116
9117 /* If TYPE itself has variable size, it is variably modified. */
9118 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9119 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9120
9121 switch (TREE_CODE (type))
9122 {
9123 case POINTER_TYPE:
9124 case REFERENCE_TYPE:
9125 case VECTOR_TYPE:
9126 if (variably_modified_type_p (TREE_TYPE (type), fn))
9127 return true;
9128 break;
9129
9130 case FUNCTION_TYPE:
9131 case METHOD_TYPE:
9132 /* If TYPE is a function type, it is variably modified if the
9133 return type is variably modified. */
9134 if (variably_modified_type_p (TREE_TYPE (type), fn))
9135 return true;
9136 break;
9137
9138 case INTEGER_TYPE:
9139 case REAL_TYPE:
9140 case FIXED_POINT_TYPE:
9141 case ENUMERAL_TYPE:
9142 case BOOLEAN_TYPE:
9143 /* Scalar types are variably modified if their end points
9144 aren't constant. */
9145 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9146 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9147 break;
9148
9149 case RECORD_TYPE:
9150 case UNION_TYPE:
9151 case QUAL_UNION_TYPE:
9152 /* We can't see if any of the fields are variably-modified by the
9153 definition we normally use, since that would produce infinite
9154 recursion via pointers. */
9155 /* This is variably modified if some field's type is. */
9156 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9157 if (TREE_CODE (t) == FIELD_DECL)
9158 {
9159 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9160 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9161 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9162
9163 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9164 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9165 }
9166 break;
9167
9168 case ARRAY_TYPE:
9169 /* Do not call ourselves to avoid infinite recursion. This is
9170 variably modified if the element type is. */
9171 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9172 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9173 break;
9174
9175 default:
9176 break;
9177 }
9178
9179 /* The current language may have other cases to check, but in general,
9180 all other types are not variably modified. */
9181 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9182
9183 #undef RETURN_TRUE_IF_VAR
9184 }
9185
9186 /* Given a DECL or TYPE, return the scope in which it was declared, or
9187 NULL_TREE if there is no containing scope. */
9188
9189 tree
9190 get_containing_scope (const_tree t)
9191 {
9192 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9193 }
9194
9195 /* Return the innermost context enclosing DECL that is
9196 a FUNCTION_DECL, or zero if none. */
9197
9198 tree
9199 decl_function_context (const_tree decl)
9200 {
9201 tree context;
9202
9203 if (TREE_CODE (decl) == ERROR_MARK)
9204 return 0;
9205
9206 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9207 where we look up the function at runtime. Such functions always take
9208 a first argument of type 'pointer to real context'.
9209
9210 C++ should really be fixed to use DECL_CONTEXT for the real context,
9211 and use something else for the "virtual context". */
9212 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9213 context
9214 = TYPE_MAIN_VARIANT
9215 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9216 else
9217 context = DECL_CONTEXT (decl);
9218
9219 while (context && TREE_CODE (context) != FUNCTION_DECL)
9220 {
9221 if (TREE_CODE (context) == BLOCK)
9222 context = BLOCK_SUPERCONTEXT (context);
9223 else
9224 context = get_containing_scope (context);
9225 }
9226
9227 return context;
9228 }
9229
9230 /* Return the innermost context enclosing DECL that is
9231 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9232 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9233
9234 tree
9235 decl_type_context (const_tree decl)
9236 {
9237 tree context = DECL_CONTEXT (decl);
9238
9239 while (context)
9240 switch (TREE_CODE (context))
9241 {
9242 case NAMESPACE_DECL:
9243 case TRANSLATION_UNIT_DECL:
9244 return NULL_TREE;
9245
9246 case RECORD_TYPE:
9247 case UNION_TYPE:
9248 case QUAL_UNION_TYPE:
9249 return context;
9250
9251 case TYPE_DECL:
9252 case FUNCTION_DECL:
9253 context = DECL_CONTEXT (context);
9254 break;
9255
9256 case BLOCK:
9257 context = BLOCK_SUPERCONTEXT (context);
9258 break;
9259
9260 default:
9261 gcc_unreachable ();
9262 }
9263
9264 return NULL_TREE;
9265 }
9266
9267 /* CALL is a CALL_EXPR. Return the declaration for the function
9268 called, or NULL_TREE if the called function cannot be
9269 determined. */
9270
9271 tree
9272 get_callee_fndecl (const_tree call)
9273 {
9274 tree addr;
9275
9276 if (call == error_mark_node)
9277 return error_mark_node;
9278
9279 /* It's invalid to call this function with anything but a
9280 CALL_EXPR. */
9281 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9282
9283 /* The first operand to the CALL is the address of the function
9284 called. */
9285 addr = CALL_EXPR_FN (call);
9286
9287 /* If there is no function, return early. */
9288 if (addr == NULL_TREE)
9289 return NULL_TREE;
9290
9291 STRIP_NOPS (addr);
9292
9293 /* If this is a readonly function pointer, extract its initial value. */
9294 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9295 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9296 && DECL_INITIAL (addr))
9297 addr = DECL_INITIAL (addr);
9298
9299 /* If the address is just `&f' for some function `f', then we know
9300 that `f' is being called. */
9301 if (TREE_CODE (addr) == ADDR_EXPR
9302 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9303 return TREE_OPERAND (addr, 0);
9304
9305 /* We couldn't figure out what was being called. */
9306 return NULL_TREE;
9307 }
9308
9309 #define TREE_MEM_USAGE_SPACES 40
9310
9311 /* Print debugging information about tree nodes generated during the compile,
9312 and any language-specific information. */
9313
9314 void
9315 dump_tree_statistics (void)
9316 {
9317 if (GATHER_STATISTICS)
9318 {
9319 int i;
9320 int total_nodes, total_bytes;
9321 fprintf (stderr, "\nKind Nodes Bytes\n");
9322 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9323 total_nodes = total_bytes = 0;
9324 for (i = 0; i < (int) all_kinds; i++)
9325 {
9326 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9327 tree_node_counts[i], tree_node_sizes[i]);
9328 total_nodes += tree_node_counts[i];
9329 total_bytes += tree_node_sizes[i];
9330 }
9331 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9332 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9333 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9334 fprintf (stderr, "Code Nodes\n");
9335 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9336 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9337 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9338 tree_code_counts[i]);
9339 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9340 fprintf (stderr, "\n");
9341 ssanames_print_statistics ();
9342 fprintf (stderr, "\n");
9343 phinodes_print_statistics ();
9344 fprintf (stderr, "\n");
9345 }
9346 else
9347 fprintf (stderr, "(No per-node statistics)\n");
9348
9349 print_type_hash_statistics ();
9350 print_debug_expr_statistics ();
9351 print_value_expr_statistics ();
9352 lang_hooks.print_statistics ();
9353 }
9354 \f
9355 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9356
9357 /* Generate a crc32 of a byte. */
9358
9359 static unsigned
9360 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9361 {
9362 unsigned ix;
9363
9364 for (ix = bits; ix--; value <<= 1)
9365 {
9366 unsigned feedback;
9367
9368 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9369 chksum <<= 1;
9370 chksum ^= feedback;
9371 }
9372 return chksum;
9373 }
9374
9375 /* Generate a crc32 of a 32-bit unsigned. */
9376
9377 unsigned
9378 crc32_unsigned (unsigned chksum, unsigned value)
9379 {
9380 return crc32_unsigned_bits (chksum, value, 32);
9381 }
9382
9383 /* Generate a crc32 of a byte. */
9384
9385 unsigned
9386 crc32_byte (unsigned chksum, char byte)
9387 {
9388 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9389 }
9390
9391 /* Generate a crc32 of a string. */
9392
9393 unsigned
9394 crc32_string (unsigned chksum, const char *string)
9395 {
9396 do
9397 {
9398 chksum = crc32_byte (chksum, *string);
9399 }
9400 while (*string++);
9401 return chksum;
9402 }
9403
9404 /* P is a string that will be used in a symbol. Mask out any characters
9405 that are not valid in that context. */
9406
9407 void
9408 clean_symbol_name (char *p)
9409 {
9410 for (; *p; p++)
9411 if (! (ISALNUM (*p)
9412 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9413 || *p == '$'
9414 #endif
9415 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9416 || *p == '.'
9417 #endif
9418 ))
9419 *p = '_';
9420 }
9421
9422 /* For anonymous aggregate types, we need some sort of name to
9423 hold on to. In practice, this should not appear, but it should
9424 not be harmful if it does. */
9425 bool
9426 anon_aggrname_p(const_tree id_node)
9427 {
9428 #ifndef NO_DOT_IN_LABEL
9429 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9430 && IDENTIFIER_POINTER (id_node)[1] == '_');
9431 #else /* NO_DOT_IN_LABEL */
9432 #ifndef NO_DOLLAR_IN_LABEL
9433 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9434 && IDENTIFIER_POINTER (id_node)[1] == '_');
9435 #else /* NO_DOLLAR_IN_LABEL */
9436 #define ANON_AGGRNAME_PREFIX "__anon_"
9437 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9438 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9439 #endif /* NO_DOLLAR_IN_LABEL */
9440 #endif /* NO_DOT_IN_LABEL */
9441 }
9442
9443 /* Return a format for an anonymous aggregate name. */
9444 const char *
9445 anon_aggrname_format()
9446 {
9447 #ifndef NO_DOT_IN_LABEL
9448 return "._%d";
9449 #else /* NO_DOT_IN_LABEL */
9450 #ifndef NO_DOLLAR_IN_LABEL
9451 return "$_%d";
9452 #else /* NO_DOLLAR_IN_LABEL */
9453 return "__anon_%d";
9454 #endif /* NO_DOLLAR_IN_LABEL */
9455 #endif /* NO_DOT_IN_LABEL */
9456 }
9457
9458 /* Generate a name for a special-purpose function.
9459 The generated name may need to be unique across the whole link.
9460 Changes to this function may also require corresponding changes to
9461 xstrdup_mask_random.
9462 TYPE is some string to identify the purpose of this function to the
9463 linker or collect2; it must start with an uppercase letter,
9464 one of:
9465 I - for constructors
9466 D - for destructors
9467 N - for C++ anonymous namespaces
9468 F - for DWARF unwind frame information. */
9469
9470 tree
9471 get_file_function_name (const char *type)
9472 {
9473 char *buf;
9474 const char *p;
9475 char *q;
9476
9477 /* If we already have a name we know to be unique, just use that. */
9478 if (first_global_object_name)
9479 p = q = ASTRDUP (first_global_object_name);
9480 /* If the target is handling the constructors/destructors, they
9481 will be local to this file and the name is only necessary for
9482 debugging purposes.
9483 We also assign sub_I and sub_D sufixes to constructors called from
9484 the global static constructors. These are always local. */
9485 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9486 || (strncmp (type, "sub_", 4) == 0
9487 && (type[4] == 'I' || type[4] == 'D')))
9488 {
9489 const char *file = main_input_filename;
9490 if (! file)
9491 file = LOCATION_FILE (input_location);
9492 /* Just use the file's basename, because the full pathname
9493 might be quite long. */
9494 p = q = ASTRDUP (lbasename (file));
9495 }
9496 else
9497 {
9498 /* Otherwise, the name must be unique across the entire link.
9499 We don't have anything that we know to be unique to this translation
9500 unit, so use what we do have and throw in some randomness. */
9501 unsigned len;
9502 const char *name = weak_global_object_name;
9503 const char *file = main_input_filename;
9504
9505 if (! name)
9506 name = "";
9507 if (! file)
9508 file = LOCATION_FILE (input_location);
9509
9510 len = strlen (file);
9511 q = (char *) alloca (9 + 17 + len + 1);
9512 memcpy (q, file, len + 1);
9513
9514 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9515 crc32_string (0, name), get_random_seed (false));
9516
9517 p = q;
9518 }
9519
9520 clean_symbol_name (q);
9521 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9522 + strlen (type));
9523
9524 /* Set up the name of the file-level functions we may need.
9525 Use a global object (which is already required to be unique over
9526 the program) rather than the file name (which imposes extra
9527 constraints). */
9528 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9529
9530 return get_identifier (buf);
9531 }
9532 \f
9533 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9534
9535 /* Complain that the tree code of NODE does not match the expected 0
9536 terminated list of trailing codes. The trailing code list can be
9537 empty, for a more vague error message. FILE, LINE, and FUNCTION
9538 are of the caller. */
9539
9540 void
9541 tree_check_failed (const_tree node, const char *file,
9542 int line, const char *function, ...)
9543 {
9544 va_list args;
9545 const char *buffer;
9546 unsigned length = 0;
9547 enum tree_code code;
9548
9549 va_start (args, function);
9550 while ((code = (enum tree_code) va_arg (args, int)))
9551 length += 4 + strlen (get_tree_code_name (code));
9552 va_end (args);
9553 if (length)
9554 {
9555 char *tmp;
9556 va_start (args, function);
9557 length += strlen ("expected ");
9558 buffer = tmp = (char *) alloca (length);
9559 length = 0;
9560 while ((code = (enum tree_code) va_arg (args, int)))
9561 {
9562 const char *prefix = length ? " or " : "expected ";
9563
9564 strcpy (tmp + length, prefix);
9565 length += strlen (prefix);
9566 strcpy (tmp + length, get_tree_code_name (code));
9567 length += strlen (get_tree_code_name (code));
9568 }
9569 va_end (args);
9570 }
9571 else
9572 buffer = "unexpected node";
9573
9574 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9575 buffer, get_tree_code_name (TREE_CODE (node)),
9576 function, trim_filename (file), line);
9577 }
9578
9579 /* Complain that the tree code of NODE does match the expected 0
9580 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9581 the caller. */
9582
9583 void
9584 tree_not_check_failed (const_tree node, const char *file,
9585 int line, const char *function, ...)
9586 {
9587 va_list args;
9588 char *buffer;
9589 unsigned length = 0;
9590 enum tree_code code;
9591
9592 va_start (args, function);
9593 while ((code = (enum tree_code) va_arg (args, int)))
9594 length += 4 + strlen (get_tree_code_name (code));
9595 va_end (args);
9596 va_start (args, function);
9597 buffer = (char *) alloca (length);
9598 length = 0;
9599 while ((code = (enum tree_code) va_arg (args, int)))
9600 {
9601 if (length)
9602 {
9603 strcpy (buffer + length, " or ");
9604 length += 4;
9605 }
9606 strcpy (buffer + length, get_tree_code_name (code));
9607 length += strlen (get_tree_code_name (code));
9608 }
9609 va_end (args);
9610
9611 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9612 buffer, get_tree_code_name (TREE_CODE (node)),
9613 function, trim_filename (file), line);
9614 }
9615
9616 /* Similar to tree_check_failed, except that we check for a class of tree
9617 code, given in CL. */
9618
9619 void
9620 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9621 const char *file, int line, const char *function)
9622 {
9623 internal_error
9624 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9625 TREE_CODE_CLASS_STRING (cl),
9626 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9627 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9628 }
9629
9630 /* Similar to tree_check_failed, except that instead of specifying a
9631 dozen codes, use the knowledge that they're all sequential. */
9632
9633 void
9634 tree_range_check_failed (const_tree node, const char *file, int line,
9635 const char *function, enum tree_code c1,
9636 enum tree_code c2)
9637 {
9638 char *buffer;
9639 unsigned length = 0;
9640 unsigned int c;
9641
9642 for (c = c1; c <= c2; ++c)
9643 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9644
9645 length += strlen ("expected ");
9646 buffer = (char *) alloca (length);
9647 length = 0;
9648
9649 for (c = c1; c <= c2; ++c)
9650 {
9651 const char *prefix = length ? " or " : "expected ";
9652
9653 strcpy (buffer + length, prefix);
9654 length += strlen (prefix);
9655 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9656 length += strlen (get_tree_code_name ((enum tree_code) c));
9657 }
9658
9659 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9660 buffer, get_tree_code_name (TREE_CODE (node)),
9661 function, trim_filename (file), line);
9662 }
9663
9664
9665 /* Similar to tree_check_failed, except that we check that a tree does
9666 not have the specified code, given in CL. */
9667
9668 void
9669 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9670 const char *file, int line, const char *function)
9671 {
9672 internal_error
9673 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9674 TREE_CODE_CLASS_STRING (cl),
9675 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9676 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9677 }
9678
9679
9680 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9681
9682 void
9683 omp_clause_check_failed (const_tree node, const char *file, int line,
9684 const char *function, enum omp_clause_code code)
9685 {
9686 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9687 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9688 function, trim_filename (file), line);
9689 }
9690
9691
9692 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9693
9694 void
9695 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9696 const char *function, enum omp_clause_code c1,
9697 enum omp_clause_code c2)
9698 {
9699 char *buffer;
9700 unsigned length = 0;
9701 unsigned int c;
9702
9703 for (c = c1; c <= c2; ++c)
9704 length += 4 + strlen (omp_clause_code_name[c]);
9705
9706 length += strlen ("expected ");
9707 buffer = (char *) alloca (length);
9708 length = 0;
9709
9710 for (c = c1; c <= c2; ++c)
9711 {
9712 const char *prefix = length ? " or " : "expected ";
9713
9714 strcpy (buffer + length, prefix);
9715 length += strlen (prefix);
9716 strcpy (buffer + length, omp_clause_code_name[c]);
9717 length += strlen (omp_clause_code_name[c]);
9718 }
9719
9720 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9721 buffer, omp_clause_code_name[TREE_CODE (node)],
9722 function, trim_filename (file), line);
9723 }
9724
9725
9726 #undef DEFTREESTRUCT
9727 #define DEFTREESTRUCT(VAL, NAME) NAME,
9728
9729 static const char *ts_enum_names[] = {
9730 #include "treestruct.def"
9731 };
9732 #undef DEFTREESTRUCT
9733
9734 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9735
9736 /* Similar to tree_class_check_failed, except that we check for
9737 whether CODE contains the tree structure identified by EN. */
9738
9739 void
9740 tree_contains_struct_check_failed (const_tree node,
9741 const enum tree_node_structure_enum en,
9742 const char *file, int line,
9743 const char *function)
9744 {
9745 internal_error
9746 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9747 TS_ENUM_NAME (en),
9748 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9749 }
9750
9751
9752 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9753 (dynamically sized) vector. */
9754
9755 void
9756 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9757 const char *function)
9758 {
9759 internal_error
9760 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9761 idx + 1, len, function, trim_filename (file), line);
9762 }
9763
9764 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9765 (dynamically sized) vector. */
9766
9767 void
9768 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9769 const char *function)
9770 {
9771 internal_error
9772 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9773 idx + 1, len, function, trim_filename (file), line);
9774 }
9775
9776 /* Similar to above, except that the check is for the bounds of the operand
9777 vector of an expression node EXP. */
9778
9779 void
9780 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9781 int line, const char *function)
9782 {
9783 enum tree_code code = TREE_CODE (exp);
9784 internal_error
9785 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9786 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9787 function, trim_filename (file), line);
9788 }
9789
9790 /* Similar to above, except that the check is for the number of
9791 operands of an OMP_CLAUSE node. */
9792
9793 void
9794 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9795 int line, const char *function)
9796 {
9797 internal_error
9798 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9799 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9800 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9801 trim_filename (file), line);
9802 }
9803 #endif /* ENABLE_TREE_CHECKING */
9804 \f
9805 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9806 and mapped to the machine mode MODE. Initialize its fields and build
9807 the information necessary for debugging output. */
9808
9809 static tree
9810 make_vector_type (tree innertype, int nunits, machine_mode mode)
9811 {
9812 tree t;
9813 inchash::hash hstate;
9814
9815 t = make_node (VECTOR_TYPE);
9816 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9817 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9818 SET_TYPE_MODE (t, mode);
9819
9820 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9821 SET_TYPE_STRUCTURAL_EQUALITY (t);
9822 else if ((TYPE_CANONICAL (innertype) != innertype
9823 || mode != VOIDmode)
9824 && !VECTOR_BOOLEAN_TYPE_P (t))
9825 TYPE_CANONICAL (t)
9826 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9827
9828 layout_type (t);
9829
9830 hstate.add_wide_int (VECTOR_TYPE);
9831 hstate.add_wide_int (nunits);
9832 hstate.add_wide_int (mode);
9833 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9834 t = type_hash_canon (hstate.end (), t);
9835
9836 /* We have built a main variant, based on the main variant of the
9837 inner type. Use it to build the variant we return. */
9838 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9839 && TREE_TYPE (t) != innertype)
9840 return build_type_attribute_qual_variant (t,
9841 TYPE_ATTRIBUTES (innertype),
9842 TYPE_QUALS (innertype));
9843
9844 return t;
9845 }
9846
9847 static tree
9848 make_or_reuse_type (unsigned size, int unsignedp)
9849 {
9850 int i;
9851
9852 if (size == INT_TYPE_SIZE)
9853 return unsignedp ? unsigned_type_node : integer_type_node;
9854 if (size == CHAR_TYPE_SIZE)
9855 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9856 if (size == SHORT_TYPE_SIZE)
9857 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9858 if (size == LONG_TYPE_SIZE)
9859 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9860 if (size == LONG_LONG_TYPE_SIZE)
9861 return (unsignedp ? long_long_unsigned_type_node
9862 : long_long_integer_type_node);
9863
9864 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9865 if (size == int_n_data[i].bitsize
9866 && int_n_enabled_p[i])
9867 return (unsignedp ? int_n_trees[i].unsigned_type
9868 : int_n_trees[i].signed_type);
9869
9870 if (unsignedp)
9871 return make_unsigned_type (size);
9872 else
9873 return make_signed_type (size);
9874 }
9875
9876 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9877
9878 static tree
9879 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9880 {
9881 if (satp)
9882 {
9883 if (size == SHORT_FRACT_TYPE_SIZE)
9884 return unsignedp ? sat_unsigned_short_fract_type_node
9885 : sat_short_fract_type_node;
9886 if (size == FRACT_TYPE_SIZE)
9887 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9888 if (size == LONG_FRACT_TYPE_SIZE)
9889 return unsignedp ? sat_unsigned_long_fract_type_node
9890 : sat_long_fract_type_node;
9891 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9892 return unsignedp ? sat_unsigned_long_long_fract_type_node
9893 : sat_long_long_fract_type_node;
9894 }
9895 else
9896 {
9897 if (size == SHORT_FRACT_TYPE_SIZE)
9898 return unsignedp ? unsigned_short_fract_type_node
9899 : short_fract_type_node;
9900 if (size == FRACT_TYPE_SIZE)
9901 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9902 if (size == LONG_FRACT_TYPE_SIZE)
9903 return unsignedp ? unsigned_long_fract_type_node
9904 : long_fract_type_node;
9905 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9906 return unsignedp ? unsigned_long_long_fract_type_node
9907 : long_long_fract_type_node;
9908 }
9909
9910 return make_fract_type (size, unsignedp, satp);
9911 }
9912
9913 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9914
9915 static tree
9916 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9917 {
9918 if (satp)
9919 {
9920 if (size == SHORT_ACCUM_TYPE_SIZE)
9921 return unsignedp ? sat_unsigned_short_accum_type_node
9922 : sat_short_accum_type_node;
9923 if (size == ACCUM_TYPE_SIZE)
9924 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9925 if (size == LONG_ACCUM_TYPE_SIZE)
9926 return unsignedp ? sat_unsigned_long_accum_type_node
9927 : sat_long_accum_type_node;
9928 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9929 return unsignedp ? sat_unsigned_long_long_accum_type_node
9930 : sat_long_long_accum_type_node;
9931 }
9932 else
9933 {
9934 if (size == SHORT_ACCUM_TYPE_SIZE)
9935 return unsignedp ? unsigned_short_accum_type_node
9936 : short_accum_type_node;
9937 if (size == ACCUM_TYPE_SIZE)
9938 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9939 if (size == LONG_ACCUM_TYPE_SIZE)
9940 return unsignedp ? unsigned_long_accum_type_node
9941 : long_accum_type_node;
9942 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9943 return unsignedp ? unsigned_long_long_accum_type_node
9944 : long_long_accum_type_node;
9945 }
9946
9947 return make_accum_type (size, unsignedp, satp);
9948 }
9949
9950
9951 /* Create an atomic variant node for TYPE. This routine is called
9952 during initialization of data types to create the 5 basic atomic
9953 types. The generic build_variant_type function requires these to
9954 already be set up in order to function properly, so cannot be
9955 called from there. If ALIGN is non-zero, then ensure alignment is
9956 overridden to this value. */
9957
9958 static tree
9959 build_atomic_base (tree type, unsigned int align)
9960 {
9961 tree t;
9962
9963 /* Make sure its not already registered. */
9964 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9965 return t;
9966
9967 t = build_variant_type_copy (type);
9968 set_type_quals (t, TYPE_QUAL_ATOMIC);
9969
9970 if (align)
9971 TYPE_ALIGN (t) = align;
9972
9973 return t;
9974 }
9975
9976 /* Create nodes for all integer types (and error_mark_node) using the sizes
9977 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9978 SHORT_DOUBLE specifies whether double should be of the same precision
9979 as float. */
9980
9981 void
9982 build_common_tree_nodes (bool signed_char, bool short_double)
9983 {
9984 int i;
9985
9986 error_mark_node = make_node (ERROR_MARK);
9987 TREE_TYPE (error_mark_node) = error_mark_node;
9988
9989 initialize_sizetypes ();
9990
9991 /* Define both `signed char' and `unsigned char'. */
9992 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9993 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9994 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9995 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9996
9997 /* Define `char', which is like either `signed char' or `unsigned char'
9998 but not the same as either. */
9999 char_type_node
10000 = (signed_char
10001 ? make_signed_type (CHAR_TYPE_SIZE)
10002 : make_unsigned_type (CHAR_TYPE_SIZE));
10003 TYPE_STRING_FLAG (char_type_node) = 1;
10004
10005 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10006 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10007 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10008 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10009 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10010 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10011 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10012 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10013
10014 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10015 {
10016 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10017 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10018 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
10019 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
10020
10021 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
10022 && int_n_enabled_p[i])
10023 {
10024 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10025 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10026 }
10027 }
10028
10029 /* Define a boolean type. This type only represents boolean values but
10030 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10031 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10032 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10033 TYPE_PRECISION (boolean_type_node) = 1;
10034 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10035
10036 /* Define what type to use for size_t. */
10037 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10038 size_type_node = unsigned_type_node;
10039 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10040 size_type_node = long_unsigned_type_node;
10041 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10042 size_type_node = long_long_unsigned_type_node;
10043 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10044 size_type_node = short_unsigned_type_node;
10045 else
10046 {
10047 int i;
10048
10049 size_type_node = NULL_TREE;
10050 for (i = 0; i < NUM_INT_N_ENTS; i++)
10051 if (int_n_enabled_p[i])
10052 {
10053 char name[50];
10054 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10055
10056 if (strcmp (name, SIZE_TYPE) == 0)
10057 {
10058 size_type_node = int_n_trees[i].unsigned_type;
10059 }
10060 }
10061 if (size_type_node == NULL_TREE)
10062 gcc_unreachable ();
10063 }
10064
10065 /* Fill in the rest of the sized types. Reuse existing type nodes
10066 when possible. */
10067 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10068 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10069 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10070 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10071 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10072
10073 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10074 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10075 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10076 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10077 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10078
10079 /* Don't call build_qualified type for atomics. That routine does
10080 special processing for atomics, and until they are initialized
10081 it's better not to make that call.
10082
10083 Check to see if there is a target override for atomic types. */
10084
10085 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10086 targetm.atomic_align_for_mode (QImode));
10087 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10088 targetm.atomic_align_for_mode (HImode));
10089 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10090 targetm.atomic_align_for_mode (SImode));
10091 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10092 targetm.atomic_align_for_mode (DImode));
10093 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10094 targetm.atomic_align_for_mode (TImode));
10095
10096 access_public_node = get_identifier ("public");
10097 access_protected_node = get_identifier ("protected");
10098 access_private_node = get_identifier ("private");
10099
10100 /* Define these next since types below may used them. */
10101 integer_zero_node = build_int_cst (integer_type_node, 0);
10102 integer_one_node = build_int_cst (integer_type_node, 1);
10103 integer_three_node = build_int_cst (integer_type_node, 3);
10104 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10105
10106 size_zero_node = size_int (0);
10107 size_one_node = size_int (1);
10108 bitsize_zero_node = bitsize_int (0);
10109 bitsize_one_node = bitsize_int (1);
10110 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10111
10112 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10113 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10114
10115 void_type_node = make_node (VOID_TYPE);
10116 layout_type (void_type_node);
10117
10118 pointer_bounds_type_node = targetm.chkp_bound_type ();
10119
10120 /* We are not going to have real types in C with less than byte alignment,
10121 so we might as well not have any types that claim to have it. */
10122 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
10123 TYPE_USER_ALIGN (void_type_node) = 0;
10124
10125 void_node = make_node (VOID_CST);
10126 TREE_TYPE (void_node) = void_type_node;
10127
10128 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10129 layout_type (TREE_TYPE (null_pointer_node));
10130
10131 ptr_type_node = build_pointer_type (void_type_node);
10132 const_ptr_type_node
10133 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10134 fileptr_type_node = ptr_type_node;
10135
10136 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10137
10138 float_type_node = make_node (REAL_TYPE);
10139 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10140 layout_type (float_type_node);
10141
10142 double_type_node = make_node (REAL_TYPE);
10143 if (short_double)
10144 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
10145 else
10146 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10147 layout_type (double_type_node);
10148
10149 long_double_type_node = make_node (REAL_TYPE);
10150 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10151 layout_type (long_double_type_node);
10152
10153 float_ptr_type_node = build_pointer_type (float_type_node);
10154 double_ptr_type_node = build_pointer_type (double_type_node);
10155 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10156 integer_ptr_type_node = build_pointer_type (integer_type_node);
10157
10158 /* Fixed size integer types. */
10159 uint16_type_node = make_or_reuse_type (16, 1);
10160 uint32_type_node = make_or_reuse_type (32, 1);
10161 uint64_type_node = make_or_reuse_type (64, 1);
10162
10163 /* Decimal float types. */
10164 dfloat32_type_node = make_node (REAL_TYPE);
10165 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10166 layout_type (dfloat32_type_node);
10167 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10168 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10169
10170 dfloat64_type_node = make_node (REAL_TYPE);
10171 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10172 layout_type (dfloat64_type_node);
10173 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10174 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10175
10176 dfloat128_type_node = make_node (REAL_TYPE);
10177 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10178 layout_type (dfloat128_type_node);
10179 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10180 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10181
10182 complex_integer_type_node = build_complex_type (integer_type_node);
10183 complex_float_type_node = build_complex_type (float_type_node);
10184 complex_double_type_node = build_complex_type (double_type_node);
10185 complex_long_double_type_node = build_complex_type (long_double_type_node);
10186
10187 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10188 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10189 sat_ ## KIND ## _type_node = \
10190 make_sat_signed_ ## KIND ## _type (SIZE); \
10191 sat_unsigned_ ## KIND ## _type_node = \
10192 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10193 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10194 unsigned_ ## KIND ## _type_node = \
10195 make_unsigned_ ## KIND ## _type (SIZE);
10196
10197 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10198 sat_ ## WIDTH ## KIND ## _type_node = \
10199 make_sat_signed_ ## KIND ## _type (SIZE); \
10200 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10201 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10202 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10203 unsigned_ ## WIDTH ## KIND ## _type_node = \
10204 make_unsigned_ ## KIND ## _type (SIZE);
10205
10206 /* Make fixed-point type nodes based on four different widths. */
10207 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10208 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10209 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10210 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10211 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10212
10213 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10214 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10215 NAME ## _type_node = \
10216 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10217 u ## NAME ## _type_node = \
10218 make_or_reuse_unsigned_ ## KIND ## _type \
10219 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10220 sat_ ## NAME ## _type_node = \
10221 make_or_reuse_sat_signed_ ## KIND ## _type \
10222 (GET_MODE_BITSIZE (MODE ## mode)); \
10223 sat_u ## NAME ## _type_node = \
10224 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10225 (GET_MODE_BITSIZE (U ## MODE ## mode));
10226
10227 /* Fixed-point type and mode nodes. */
10228 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10229 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10230 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10231 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10232 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10233 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10234 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10235 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10236 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10237 MAKE_FIXED_MODE_NODE (accum, da, DA)
10238 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10239
10240 {
10241 tree t = targetm.build_builtin_va_list ();
10242
10243 /* Many back-ends define record types without setting TYPE_NAME.
10244 If we copied the record type here, we'd keep the original
10245 record type without a name. This breaks name mangling. So,
10246 don't copy record types and let c_common_nodes_and_builtins()
10247 declare the type to be __builtin_va_list. */
10248 if (TREE_CODE (t) != RECORD_TYPE)
10249 t = build_variant_type_copy (t);
10250
10251 va_list_type_node = t;
10252 }
10253 }
10254
10255 /* Modify DECL for given flags.
10256 TM_PURE attribute is set only on types, so the function will modify
10257 DECL's type when ECF_TM_PURE is used. */
10258
10259 void
10260 set_call_expr_flags (tree decl, int flags)
10261 {
10262 if (flags & ECF_NOTHROW)
10263 TREE_NOTHROW (decl) = 1;
10264 if (flags & ECF_CONST)
10265 TREE_READONLY (decl) = 1;
10266 if (flags & ECF_PURE)
10267 DECL_PURE_P (decl) = 1;
10268 if (flags & ECF_LOOPING_CONST_OR_PURE)
10269 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10270 if (flags & ECF_NOVOPS)
10271 DECL_IS_NOVOPS (decl) = 1;
10272 if (flags & ECF_NORETURN)
10273 TREE_THIS_VOLATILE (decl) = 1;
10274 if (flags & ECF_MALLOC)
10275 DECL_IS_MALLOC (decl) = 1;
10276 if (flags & ECF_RETURNS_TWICE)
10277 DECL_IS_RETURNS_TWICE (decl) = 1;
10278 if (flags & ECF_LEAF)
10279 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10280 NULL, DECL_ATTRIBUTES (decl));
10281 if ((flags & ECF_TM_PURE) && flag_tm)
10282 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10283 /* Looping const or pure is implied by noreturn.
10284 There is currently no way to declare looping const or looping pure alone. */
10285 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10286 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10287 }
10288
10289
10290 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10291
10292 static void
10293 local_define_builtin (const char *name, tree type, enum built_in_function code,
10294 const char *library_name, int ecf_flags)
10295 {
10296 tree decl;
10297
10298 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10299 library_name, NULL_TREE);
10300 set_call_expr_flags (decl, ecf_flags);
10301
10302 set_builtin_decl (code, decl, true);
10303 }
10304
10305 /* Call this function after instantiating all builtins that the language
10306 front end cares about. This will build the rest of the builtins
10307 and internal functions that are relied upon by the tree optimizers and
10308 the middle-end. */
10309
10310 void
10311 build_common_builtin_nodes (void)
10312 {
10313 tree tmp, ftype;
10314 int ecf_flags;
10315
10316 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10317 {
10318 ftype = build_function_type (void_type_node, void_list_node);
10319 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10320 "__builtin_unreachable",
10321 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10322 | ECF_CONST);
10323 }
10324
10325 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10326 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10327 {
10328 ftype = build_function_type_list (ptr_type_node,
10329 ptr_type_node, const_ptr_type_node,
10330 size_type_node, NULL_TREE);
10331
10332 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10333 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10334 "memcpy", ECF_NOTHROW | ECF_LEAF);
10335 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10336 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10337 "memmove", ECF_NOTHROW | ECF_LEAF);
10338 }
10339
10340 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10341 {
10342 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10343 const_ptr_type_node, size_type_node,
10344 NULL_TREE);
10345 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10346 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10347 }
10348
10349 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10350 {
10351 ftype = build_function_type_list (ptr_type_node,
10352 ptr_type_node, integer_type_node,
10353 size_type_node, NULL_TREE);
10354 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10355 "memset", ECF_NOTHROW | ECF_LEAF);
10356 }
10357
10358 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10359 {
10360 ftype = build_function_type_list (ptr_type_node,
10361 size_type_node, NULL_TREE);
10362 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10363 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10364 }
10365
10366 ftype = build_function_type_list (ptr_type_node, size_type_node,
10367 size_type_node, NULL_TREE);
10368 local_define_builtin ("__builtin_alloca_with_align", ftype,
10369 BUILT_IN_ALLOCA_WITH_ALIGN,
10370 "__builtin_alloca_with_align",
10371 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10372
10373 /* If we're checking the stack, `alloca' can throw. */
10374 if (flag_stack_check)
10375 {
10376 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10377 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10378 }
10379
10380 ftype = build_function_type_list (void_type_node,
10381 ptr_type_node, ptr_type_node,
10382 ptr_type_node, NULL_TREE);
10383 local_define_builtin ("__builtin_init_trampoline", ftype,
10384 BUILT_IN_INIT_TRAMPOLINE,
10385 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10386 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10387 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10388 "__builtin_init_heap_trampoline",
10389 ECF_NOTHROW | ECF_LEAF);
10390
10391 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10392 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10393 BUILT_IN_ADJUST_TRAMPOLINE,
10394 "__builtin_adjust_trampoline",
10395 ECF_CONST | ECF_NOTHROW);
10396
10397 ftype = build_function_type_list (void_type_node,
10398 ptr_type_node, ptr_type_node, NULL_TREE);
10399 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10400 BUILT_IN_NONLOCAL_GOTO,
10401 "__builtin_nonlocal_goto",
10402 ECF_NORETURN | ECF_NOTHROW);
10403
10404 ftype = build_function_type_list (void_type_node,
10405 ptr_type_node, ptr_type_node, NULL_TREE);
10406 local_define_builtin ("__builtin_setjmp_setup", ftype,
10407 BUILT_IN_SETJMP_SETUP,
10408 "__builtin_setjmp_setup", ECF_NOTHROW);
10409
10410 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10411 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10412 BUILT_IN_SETJMP_RECEIVER,
10413 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10414
10415 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10416 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10417 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10418
10419 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10420 local_define_builtin ("__builtin_stack_restore", ftype,
10421 BUILT_IN_STACK_RESTORE,
10422 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10423
10424 /* If there's a possibility that we might use the ARM EABI, build the
10425 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10426 if (targetm.arm_eabi_unwinder)
10427 {
10428 ftype = build_function_type_list (void_type_node, NULL_TREE);
10429 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10430 BUILT_IN_CXA_END_CLEANUP,
10431 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10432 }
10433
10434 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10435 local_define_builtin ("__builtin_unwind_resume", ftype,
10436 BUILT_IN_UNWIND_RESUME,
10437 ((targetm_common.except_unwind_info (&global_options)
10438 == UI_SJLJ)
10439 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10440 ECF_NORETURN);
10441
10442 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10443 {
10444 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10445 NULL_TREE);
10446 local_define_builtin ("__builtin_return_address", ftype,
10447 BUILT_IN_RETURN_ADDRESS,
10448 "__builtin_return_address",
10449 ECF_NOTHROW);
10450 }
10451
10452 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10453 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10454 {
10455 ftype = build_function_type_list (void_type_node, ptr_type_node,
10456 ptr_type_node, NULL_TREE);
10457 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10458 local_define_builtin ("__cyg_profile_func_enter", ftype,
10459 BUILT_IN_PROFILE_FUNC_ENTER,
10460 "__cyg_profile_func_enter", 0);
10461 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10462 local_define_builtin ("__cyg_profile_func_exit", ftype,
10463 BUILT_IN_PROFILE_FUNC_EXIT,
10464 "__cyg_profile_func_exit", 0);
10465 }
10466
10467 /* The exception object and filter values from the runtime. The argument
10468 must be zero before exception lowering, i.e. from the front end. After
10469 exception lowering, it will be the region number for the exception
10470 landing pad. These functions are PURE instead of CONST to prevent
10471 them from being hoisted past the exception edge that will initialize
10472 its value in the landing pad. */
10473 ftype = build_function_type_list (ptr_type_node,
10474 integer_type_node, NULL_TREE);
10475 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10476 /* Only use TM_PURE if we have TM language support. */
10477 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10478 ecf_flags |= ECF_TM_PURE;
10479 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10480 "__builtin_eh_pointer", ecf_flags);
10481
10482 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10483 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10484 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10485 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10486
10487 ftype = build_function_type_list (void_type_node,
10488 integer_type_node, integer_type_node,
10489 NULL_TREE);
10490 local_define_builtin ("__builtin_eh_copy_values", ftype,
10491 BUILT_IN_EH_COPY_VALUES,
10492 "__builtin_eh_copy_values", ECF_NOTHROW);
10493
10494 /* Complex multiplication and division. These are handled as builtins
10495 rather than optabs because emit_library_call_value doesn't support
10496 complex. Further, we can do slightly better with folding these
10497 beasties if the real and complex parts of the arguments are separate. */
10498 {
10499 int mode;
10500
10501 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10502 {
10503 char mode_name_buf[4], *q;
10504 const char *p;
10505 enum built_in_function mcode, dcode;
10506 tree type, inner_type;
10507 const char *prefix = "__";
10508
10509 if (targetm.libfunc_gnu_prefix)
10510 prefix = "__gnu_";
10511
10512 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10513 if (type == NULL)
10514 continue;
10515 inner_type = TREE_TYPE (type);
10516
10517 ftype = build_function_type_list (type, inner_type, inner_type,
10518 inner_type, inner_type, NULL_TREE);
10519
10520 mcode = ((enum built_in_function)
10521 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10522 dcode = ((enum built_in_function)
10523 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10524
10525 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10526 *q = TOLOWER (*p);
10527 *q = '\0';
10528
10529 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10530 NULL);
10531 local_define_builtin (built_in_names[mcode], ftype, mcode,
10532 built_in_names[mcode],
10533 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10534
10535 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10536 NULL);
10537 local_define_builtin (built_in_names[dcode], ftype, dcode,
10538 built_in_names[dcode],
10539 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10540 }
10541 }
10542
10543 init_internal_fns ();
10544 }
10545
10546 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10547 better way.
10548
10549 If we requested a pointer to a vector, build up the pointers that
10550 we stripped off while looking for the inner type. Similarly for
10551 return values from functions.
10552
10553 The argument TYPE is the top of the chain, and BOTTOM is the
10554 new type which we will point to. */
10555
10556 tree
10557 reconstruct_complex_type (tree type, tree bottom)
10558 {
10559 tree inner, outer;
10560
10561 if (TREE_CODE (type) == POINTER_TYPE)
10562 {
10563 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10564 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10565 TYPE_REF_CAN_ALIAS_ALL (type));
10566 }
10567 else if (TREE_CODE (type) == REFERENCE_TYPE)
10568 {
10569 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10570 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10571 TYPE_REF_CAN_ALIAS_ALL (type));
10572 }
10573 else if (TREE_CODE (type) == ARRAY_TYPE)
10574 {
10575 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10576 outer = build_array_type (inner, TYPE_DOMAIN (type));
10577 }
10578 else if (TREE_CODE (type) == FUNCTION_TYPE)
10579 {
10580 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10581 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10582 }
10583 else if (TREE_CODE (type) == METHOD_TYPE)
10584 {
10585 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10586 /* The build_method_type_directly() routine prepends 'this' to argument list,
10587 so we must compensate by getting rid of it. */
10588 outer
10589 = build_method_type_directly
10590 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10591 inner,
10592 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10593 }
10594 else if (TREE_CODE (type) == OFFSET_TYPE)
10595 {
10596 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10597 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10598 }
10599 else
10600 return bottom;
10601
10602 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10603 TYPE_QUALS (type));
10604 }
10605
10606 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10607 the inner type. */
10608 tree
10609 build_vector_type_for_mode (tree innertype, machine_mode mode)
10610 {
10611 int nunits;
10612
10613 switch (GET_MODE_CLASS (mode))
10614 {
10615 case MODE_VECTOR_INT:
10616 case MODE_VECTOR_FLOAT:
10617 case MODE_VECTOR_FRACT:
10618 case MODE_VECTOR_UFRACT:
10619 case MODE_VECTOR_ACCUM:
10620 case MODE_VECTOR_UACCUM:
10621 nunits = GET_MODE_NUNITS (mode);
10622 break;
10623
10624 case MODE_INT:
10625 /* Check that there are no leftover bits. */
10626 gcc_assert (GET_MODE_BITSIZE (mode)
10627 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10628
10629 nunits = GET_MODE_BITSIZE (mode)
10630 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10631 break;
10632
10633 default:
10634 gcc_unreachable ();
10635 }
10636
10637 return make_vector_type (innertype, nunits, mode);
10638 }
10639
10640 /* Similarly, but takes the inner type and number of units, which must be
10641 a power of two. */
10642
10643 tree
10644 build_vector_type (tree innertype, int nunits)
10645 {
10646 return make_vector_type (innertype, nunits, VOIDmode);
10647 }
10648
10649 /* Build truth vector with specified length and number of units. */
10650
10651 tree
10652 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10653 {
10654 machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits,
10655 vector_size);
10656
10657 gcc_assert (mask_mode != VOIDmode);
10658
10659 unsigned HOST_WIDE_INT esize = GET_MODE_BITSIZE (mask_mode) / nunits;
10660 gcc_assert (esize * nunits == GET_MODE_BITSIZE (mask_mode));
10661
10662 tree bool_type = build_nonstandard_boolean_type (esize);
10663
10664 return make_vector_type (bool_type, nunits, mask_mode);
10665 }
10666
10667 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10668
10669 tree
10670 build_same_sized_truth_vector_type (tree vectype)
10671 {
10672 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10673 return vectype;
10674
10675 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10676
10677 if (!size)
10678 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10679
10680 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10681 }
10682
10683 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10684
10685 tree
10686 build_opaque_vector_type (tree innertype, int nunits)
10687 {
10688 tree t = make_vector_type (innertype, nunits, VOIDmode);
10689 tree cand;
10690 /* We always build the non-opaque variant before the opaque one,
10691 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10692 cand = TYPE_NEXT_VARIANT (t);
10693 if (cand
10694 && TYPE_VECTOR_OPAQUE (cand)
10695 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10696 return cand;
10697 /* Othewise build a variant type and make sure to queue it after
10698 the non-opaque type. */
10699 cand = build_distinct_type_copy (t);
10700 TYPE_VECTOR_OPAQUE (cand) = true;
10701 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10702 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10703 TYPE_NEXT_VARIANT (t) = cand;
10704 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10705 return cand;
10706 }
10707
10708
10709 /* Given an initializer INIT, return TRUE if INIT is zero or some
10710 aggregate of zeros. Otherwise return FALSE. */
10711 bool
10712 initializer_zerop (const_tree init)
10713 {
10714 tree elt;
10715
10716 STRIP_NOPS (init);
10717
10718 switch (TREE_CODE (init))
10719 {
10720 case INTEGER_CST:
10721 return integer_zerop (init);
10722
10723 case REAL_CST:
10724 /* ??? Note that this is not correct for C4X float formats. There,
10725 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10726 negative exponent. */
10727 return real_zerop (init)
10728 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10729
10730 case FIXED_CST:
10731 return fixed_zerop (init);
10732
10733 case COMPLEX_CST:
10734 return integer_zerop (init)
10735 || (real_zerop (init)
10736 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10737 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10738
10739 case VECTOR_CST:
10740 {
10741 unsigned i;
10742 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10743 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10744 return false;
10745 return true;
10746 }
10747
10748 case CONSTRUCTOR:
10749 {
10750 unsigned HOST_WIDE_INT idx;
10751
10752 if (TREE_CLOBBER_P (init))
10753 return false;
10754 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10755 if (!initializer_zerop (elt))
10756 return false;
10757 return true;
10758 }
10759
10760 case STRING_CST:
10761 {
10762 int i;
10763
10764 /* We need to loop through all elements to handle cases like
10765 "\0" and "\0foobar". */
10766 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10767 if (TREE_STRING_POINTER (init)[i] != '\0')
10768 return false;
10769
10770 return true;
10771 }
10772
10773 default:
10774 return false;
10775 }
10776 }
10777
10778 /* Check if vector VEC consists of all the equal elements and
10779 that the number of elements corresponds to the type of VEC.
10780 The function returns first element of the vector
10781 or NULL_TREE if the vector is not uniform. */
10782 tree
10783 uniform_vector_p (const_tree vec)
10784 {
10785 tree first, t;
10786 unsigned i;
10787
10788 if (vec == NULL_TREE)
10789 return NULL_TREE;
10790
10791 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10792
10793 if (TREE_CODE (vec) == VECTOR_CST)
10794 {
10795 first = VECTOR_CST_ELT (vec, 0);
10796 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10797 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10798 return NULL_TREE;
10799
10800 return first;
10801 }
10802
10803 else if (TREE_CODE (vec) == CONSTRUCTOR)
10804 {
10805 first = error_mark_node;
10806
10807 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10808 {
10809 if (i == 0)
10810 {
10811 first = t;
10812 continue;
10813 }
10814 if (!operand_equal_p (first, t, 0))
10815 return NULL_TREE;
10816 }
10817 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10818 return NULL_TREE;
10819
10820 return first;
10821 }
10822
10823 return NULL_TREE;
10824 }
10825
10826 /* Build an empty statement at location LOC. */
10827
10828 tree
10829 build_empty_stmt (location_t loc)
10830 {
10831 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10832 SET_EXPR_LOCATION (t, loc);
10833 return t;
10834 }
10835
10836
10837 /* Build an OpenMP clause with code CODE. LOC is the location of the
10838 clause. */
10839
10840 tree
10841 build_omp_clause (location_t loc, enum omp_clause_code code)
10842 {
10843 tree t;
10844 int size, length;
10845
10846 length = omp_clause_num_ops[code];
10847 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10848
10849 record_node_allocation_statistics (OMP_CLAUSE, size);
10850
10851 t = (tree) ggc_internal_alloc (size);
10852 memset (t, 0, size);
10853 TREE_SET_CODE (t, OMP_CLAUSE);
10854 OMP_CLAUSE_SET_CODE (t, code);
10855 OMP_CLAUSE_LOCATION (t) = loc;
10856
10857 return t;
10858 }
10859
10860 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10861 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10862 Except for the CODE and operand count field, other storage for the
10863 object is initialized to zeros. */
10864
10865 tree
10866 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10867 {
10868 tree t;
10869 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10870
10871 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10872 gcc_assert (len >= 1);
10873
10874 record_node_allocation_statistics (code, length);
10875
10876 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10877
10878 TREE_SET_CODE (t, code);
10879
10880 /* Can't use TREE_OPERAND to store the length because if checking is
10881 enabled, it will try to check the length before we store it. :-P */
10882 t->exp.operands[0] = build_int_cst (sizetype, len);
10883
10884 return t;
10885 }
10886
10887 /* Helper function for build_call_* functions; build a CALL_EXPR with
10888 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10889 the argument slots. */
10890
10891 static tree
10892 build_call_1 (tree return_type, tree fn, int nargs)
10893 {
10894 tree t;
10895
10896 t = build_vl_exp (CALL_EXPR, nargs + 3);
10897 TREE_TYPE (t) = return_type;
10898 CALL_EXPR_FN (t) = fn;
10899 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10900
10901 return t;
10902 }
10903
10904 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10905 FN and a null static chain slot. NARGS is the number of call arguments
10906 which are specified as "..." arguments. */
10907
10908 tree
10909 build_call_nary (tree return_type, tree fn, int nargs, ...)
10910 {
10911 tree ret;
10912 va_list args;
10913 va_start (args, nargs);
10914 ret = build_call_valist (return_type, fn, nargs, args);
10915 va_end (args);
10916 return ret;
10917 }
10918
10919 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10920 FN and a null static chain slot. NARGS is the number of call arguments
10921 which are specified as a va_list ARGS. */
10922
10923 tree
10924 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10925 {
10926 tree t;
10927 int i;
10928
10929 t = build_call_1 (return_type, fn, nargs);
10930 for (i = 0; i < nargs; i++)
10931 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10932 process_call_operands (t);
10933 return t;
10934 }
10935
10936 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10937 FN and a null static chain slot. NARGS is the number of call arguments
10938 which are specified as a tree array ARGS. */
10939
10940 tree
10941 build_call_array_loc (location_t loc, tree return_type, tree fn,
10942 int nargs, const tree *args)
10943 {
10944 tree t;
10945 int i;
10946
10947 t = build_call_1 (return_type, fn, nargs);
10948 for (i = 0; i < nargs; i++)
10949 CALL_EXPR_ARG (t, i) = args[i];
10950 process_call_operands (t);
10951 SET_EXPR_LOCATION (t, loc);
10952 return t;
10953 }
10954
10955 /* Like build_call_array, but takes a vec. */
10956
10957 tree
10958 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10959 {
10960 tree ret, t;
10961 unsigned int ix;
10962
10963 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10964 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10965 CALL_EXPR_ARG (ret, ix) = t;
10966 process_call_operands (ret);
10967 return ret;
10968 }
10969
10970 /* Conveniently construct a function call expression. FNDECL names the
10971 function to be called and N arguments are passed in the array
10972 ARGARRAY. */
10973
10974 tree
10975 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10976 {
10977 tree fntype = TREE_TYPE (fndecl);
10978 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10979
10980 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10981 }
10982
10983 /* Conveniently construct a function call expression. FNDECL names the
10984 function to be called and the arguments are passed in the vector
10985 VEC. */
10986
10987 tree
10988 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10989 {
10990 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10991 vec_safe_address (vec));
10992 }
10993
10994
10995 /* Conveniently construct a function call expression. FNDECL names the
10996 function to be called, N is the number of arguments, and the "..."
10997 parameters are the argument expressions. */
10998
10999 tree
11000 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11001 {
11002 va_list ap;
11003 tree *argarray = XALLOCAVEC (tree, n);
11004 int i;
11005
11006 va_start (ap, n);
11007 for (i = 0; i < n; i++)
11008 argarray[i] = va_arg (ap, tree);
11009 va_end (ap);
11010 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11011 }
11012
11013 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11014 varargs macros aren't supported by all bootstrap compilers. */
11015
11016 tree
11017 build_call_expr (tree fndecl, int n, ...)
11018 {
11019 va_list ap;
11020 tree *argarray = XALLOCAVEC (tree, n);
11021 int i;
11022
11023 va_start (ap, n);
11024 for (i = 0; i < n; i++)
11025 argarray[i] = va_arg (ap, tree);
11026 va_end (ap);
11027 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11028 }
11029
11030 /* Build internal call expression. This is just like CALL_EXPR, except
11031 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11032 internal function. */
11033
11034 tree
11035 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11036 tree type, int n, ...)
11037 {
11038 va_list ap;
11039 int i;
11040
11041 tree fn = build_call_1 (type, NULL_TREE, n);
11042 va_start (ap, n);
11043 for (i = 0; i < n; i++)
11044 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
11045 va_end (ap);
11046 SET_EXPR_LOCATION (fn, loc);
11047 CALL_EXPR_IFN (fn) = ifn;
11048 return fn;
11049 }
11050
11051 /* Create a new constant string literal and return a char* pointer to it.
11052 The STRING_CST value is the LEN characters at STR. */
11053 tree
11054 build_string_literal (int len, const char *str)
11055 {
11056 tree t, elem, index, type;
11057
11058 t = build_string (len, str);
11059 elem = build_type_variant (char_type_node, 1, 0);
11060 index = build_index_type (size_int (len - 1));
11061 type = build_array_type (elem, index);
11062 TREE_TYPE (t) = type;
11063 TREE_CONSTANT (t) = 1;
11064 TREE_READONLY (t) = 1;
11065 TREE_STATIC (t) = 1;
11066
11067 type = build_pointer_type (elem);
11068 t = build1 (ADDR_EXPR, type,
11069 build4 (ARRAY_REF, elem,
11070 t, integer_zero_node, NULL_TREE, NULL_TREE));
11071 return t;
11072 }
11073
11074
11075
11076 /* Return true if T (assumed to be a DECL) must be assigned a memory
11077 location. */
11078
11079 bool
11080 needs_to_live_in_memory (const_tree t)
11081 {
11082 return (TREE_ADDRESSABLE (t)
11083 || is_global_var (t)
11084 || (TREE_CODE (t) == RESULT_DECL
11085 && !DECL_BY_REFERENCE (t)
11086 && aggregate_value_p (t, current_function_decl)));
11087 }
11088
11089 /* Return value of a constant X and sign-extend it. */
11090
11091 HOST_WIDE_INT
11092 int_cst_value (const_tree x)
11093 {
11094 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11095 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11096
11097 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11098 gcc_assert (cst_and_fits_in_hwi (x));
11099
11100 if (bits < HOST_BITS_PER_WIDE_INT)
11101 {
11102 bool negative = ((val >> (bits - 1)) & 1) != 0;
11103 if (negative)
11104 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
11105 else
11106 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
11107 }
11108
11109 return val;
11110 }
11111
11112 /* If TYPE is an integral or pointer type, return an integer type with
11113 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11114 if TYPE is already an integer type of signedness UNSIGNEDP. */
11115
11116 tree
11117 signed_or_unsigned_type_for (int unsignedp, tree type)
11118 {
11119 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11120 return type;
11121
11122 if (TREE_CODE (type) == VECTOR_TYPE)
11123 {
11124 tree inner = TREE_TYPE (type);
11125 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11126 if (!inner2)
11127 return NULL_TREE;
11128 if (inner == inner2)
11129 return type;
11130 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11131 }
11132
11133 if (!INTEGRAL_TYPE_P (type)
11134 && !POINTER_TYPE_P (type)
11135 && TREE_CODE (type) != OFFSET_TYPE)
11136 return NULL_TREE;
11137
11138 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11139 }
11140
11141 /* If TYPE is an integral or pointer type, return an integer type with
11142 the same precision which is unsigned, or itself if TYPE is already an
11143 unsigned integer type. */
11144
11145 tree
11146 unsigned_type_for (tree type)
11147 {
11148 return signed_or_unsigned_type_for (1, type);
11149 }
11150
11151 /* If TYPE is an integral or pointer type, return an integer type with
11152 the same precision which is signed, or itself if TYPE is already a
11153 signed integer type. */
11154
11155 tree
11156 signed_type_for (tree type)
11157 {
11158 return signed_or_unsigned_type_for (0, type);
11159 }
11160
11161 /* If TYPE is a vector type, return a signed integer vector type with the
11162 same width and number of subparts. Otherwise return boolean_type_node. */
11163
11164 tree
11165 truth_type_for (tree type)
11166 {
11167 if (TREE_CODE (type) == VECTOR_TYPE)
11168 {
11169 if (VECTOR_BOOLEAN_TYPE_P (type))
11170 return type;
11171 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11172 GET_MODE_SIZE (TYPE_MODE (type)));
11173 }
11174 else
11175 return boolean_type_node;
11176 }
11177
11178 /* Returns the largest value obtainable by casting something in INNER type to
11179 OUTER type. */
11180
11181 tree
11182 upper_bound_in_type (tree outer, tree inner)
11183 {
11184 unsigned int det = 0;
11185 unsigned oprec = TYPE_PRECISION (outer);
11186 unsigned iprec = TYPE_PRECISION (inner);
11187 unsigned prec;
11188
11189 /* Compute a unique number for every combination. */
11190 det |= (oprec > iprec) ? 4 : 0;
11191 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11192 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11193
11194 /* Determine the exponent to use. */
11195 switch (det)
11196 {
11197 case 0:
11198 case 1:
11199 /* oprec <= iprec, outer: signed, inner: don't care. */
11200 prec = oprec - 1;
11201 break;
11202 case 2:
11203 case 3:
11204 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11205 prec = oprec;
11206 break;
11207 case 4:
11208 /* oprec > iprec, outer: signed, inner: signed. */
11209 prec = iprec - 1;
11210 break;
11211 case 5:
11212 /* oprec > iprec, outer: signed, inner: unsigned. */
11213 prec = iprec;
11214 break;
11215 case 6:
11216 /* oprec > iprec, outer: unsigned, inner: signed. */
11217 prec = oprec;
11218 break;
11219 case 7:
11220 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11221 prec = iprec;
11222 break;
11223 default:
11224 gcc_unreachable ();
11225 }
11226
11227 return wide_int_to_tree (outer,
11228 wi::mask (prec, false, TYPE_PRECISION (outer)));
11229 }
11230
11231 /* Returns the smallest value obtainable by casting something in INNER type to
11232 OUTER type. */
11233
11234 tree
11235 lower_bound_in_type (tree outer, tree inner)
11236 {
11237 unsigned oprec = TYPE_PRECISION (outer);
11238 unsigned iprec = TYPE_PRECISION (inner);
11239
11240 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11241 and obtain 0. */
11242 if (TYPE_UNSIGNED (outer)
11243 /* If we are widening something of an unsigned type, OUTER type
11244 contains all values of INNER type. In particular, both INNER
11245 and OUTER types have zero in common. */
11246 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11247 return build_int_cst (outer, 0);
11248 else
11249 {
11250 /* If we are widening a signed type to another signed type, we
11251 want to obtain -2^^(iprec-1). If we are keeping the
11252 precision or narrowing to a signed type, we want to obtain
11253 -2^(oprec-1). */
11254 unsigned prec = oprec > iprec ? iprec : oprec;
11255 return wide_int_to_tree (outer,
11256 wi::mask (prec - 1, true,
11257 TYPE_PRECISION (outer)));
11258 }
11259 }
11260
11261 /* Return nonzero if two operands that are suitable for PHI nodes are
11262 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11263 SSA_NAME or invariant. Note that this is strictly an optimization.
11264 That is, callers of this function can directly call operand_equal_p
11265 and get the same result, only slower. */
11266
11267 int
11268 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11269 {
11270 if (arg0 == arg1)
11271 return 1;
11272 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11273 return 0;
11274 return operand_equal_p (arg0, arg1, 0);
11275 }
11276
11277 /* Returns number of zeros at the end of binary representation of X. */
11278
11279 tree
11280 num_ending_zeros (const_tree x)
11281 {
11282 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11283 }
11284
11285
11286 #define WALK_SUBTREE(NODE) \
11287 do \
11288 { \
11289 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11290 if (result) \
11291 return result; \
11292 } \
11293 while (0)
11294
11295 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11296 be walked whenever a type is seen in the tree. Rest of operands and return
11297 value are as for walk_tree. */
11298
11299 static tree
11300 walk_type_fields (tree type, walk_tree_fn func, void *data,
11301 hash_set<tree> *pset, walk_tree_lh lh)
11302 {
11303 tree result = NULL_TREE;
11304
11305 switch (TREE_CODE (type))
11306 {
11307 case POINTER_TYPE:
11308 case REFERENCE_TYPE:
11309 case VECTOR_TYPE:
11310 /* We have to worry about mutually recursive pointers. These can't
11311 be written in C. They can in Ada. It's pathological, but
11312 there's an ACATS test (c38102a) that checks it. Deal with this
11313 by checking if we're pointing to another pointer, that one
11314 points to another pointer, that one does too, and we have no htab.
11315 If so, get a hash table. We check three levels deep to avoid
11316 the cost of the hash table if we don't need one. */
11317 if (POINTER_TYPE_P (TREE_TYPE (type))
11318 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11319 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11320 && !pset)
11321 {
11322 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11323 func, data);
11324 if (result)
11325 return result;
11326
11327 break;
11328 }
11329
11330 /* ... fall through ... */
11331
11332 case COMPLEX_TYPE:
11333 WALK_SUBTREE (TREE_TYPE (type));
11334 break;
11335
11336 case METHOD_TYPE:
11337 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11338
11339 /* Fall through. */
11340
11341 case FUNCTION_TYPE:
11342 WALK_SUBTREE (TREE_TYPE (type));
11343 {
11344 tree arg;
11345
11346 /* We never want to walk into default arguments. */
11347 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11348 WALK_SUBTREE (TREE_VALUE (arg));
11349 }
11350 break;
11351
11352 case ARRAY_TYPE:
11353 /* Don't follow this nodes's type if a pointer for fear that
11354 we'll have infinite recursion. If we have a PSET, then we
11355 need not fear. */
11356 if (pset
11357 || (!POINTER_TYPE_P (TREE_TYPE (type))
11358 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11359 WALK_SUBTREE (TREE_TYPE (type));
11360 WALK_SUBTREE (TYPE_DOMAIN (type));
11361 break;
11362
11363 case OFFSET_TYPE:
11364 WALK_SUBTREE (TREE_TYPE (type));
11365 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11366 break;
11367
11368 default:
11369 break;
11370 }
11371
11372 return NULL_TREE;
11373 }
11374
11375 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11376 called with the DATA and the address of each sub-tree. If FUNC returns a
11377 non-NULL value, the traversal is stopped, and the value returned by FUNC
11378 is returned. If PSET is non-NULL it is used to record the nodes visited,
11379 and to avoid visiting a node more than once. */
11380
11381 tree
11382 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11383 hash_set<tree> *pset, walk_tree_lh lh)
11384 {
11385 enum tree_code code;
11386 int walk_subtrees;
11387 tree result;
11388
11389 #define WALK_SUBTREE_TAIL(NODE) \
11390 do \
11391 { \
11392 tp = & (NODE); \
11393 goto tail_recurse; \
11394 } \
11395 while (0)
11396
11397 tail_recurse:
11398 /* Skip empty subtrees. */
11399 if (!*tp)
11400 return NULL_TREE;
11401
11402 /* Don't walk the same tree twice, if the user has requested
11403 that we avoid doing so. */
11404 if (pset && pset->add (*tp))
11405 return NULL_TREE;
11406
11407 /* Call the function. */
11408 walk_subtrees = 1;
11409 result = (*func) (tp, &walk_subtrees, data);
11410
11411 /* If we found something, return it. */
11412 if (result)
11413 return result;
11414
11415 code = TREE_CODE (*tp);
11416
11417 /* Even if we didn't, FUNC may have decided that there was nothing
11418 interesting below this point in the tree. */
11419 if (!walk_subtrees)
11420 {
11421 /* But we still need to check our siblings. */
11422 if (code == TREE_LIST)
11423 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11424 else if (code == OMP_CLAUSE)
11425 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11426 else
11427 return NULL_TREE;
11428 }
11429
11430 if (lh)
11431 {
11432 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11433 if (result || !walk_subtrees)
11434 return result;
11435 }
11436
11437 switch (code)
11438 {
11439 case ERROR_MARK:
11440 case IDENTIFIER_NODE:
11441 case INTEGER_CST:
11442 case REAL_CST:
11443 case FIXED_CST:
11444 case VECTOR_CST:
11445 case STRING_CST:
11446 case BLOCK:
11447 case PLACEHOLDER_EXPR:
11448 case SSA_NAME:
11449 case FIELD_DECL:
11450 case RESULT_DECL:
11451 /* None of these have subtrees other than those already walked
11452 above. */
11453 break;
11454
11455 case TREE_LIST:
11456 WALK_SUBTREE (TREE_VALUE (*tp));
11457 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11458 break;
11459
11460 case TREE_VEC:
11461 {
11462 int len = TREE_VEC_LENGTH (*tp);
11463
11464 if (len == 0)
11465 break;
11466
11467 /* Walk all elements but the first. */
11468 while (--len)
11469 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11470
11471 /* Now walk the first one as a tail call. */
11472 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11473 }
11474
11475 case COMPLEX_CST:
11476 WALK_SUBTREE (TREE_REALPART (*tp));
11477 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11478
11479 case CONSTRUCTOR:
11480 {
11481 unsigned HOST_WIDE_INT idx;
11482 constructor_elt *ce;
11483
11484 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11485 idx++)
11486 WALK_SUBTREE (ce->value);
11487 }
11488 break;
11489
11490 case SAVE_EXPR:
11491 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11492
11493 case BIND_EXPR:
11494 {
11495 tree decl;
11496 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11497 {
11498 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11499 into declarations that are just mentioned, rather than
11500 declared; they don't really belong to this part of the tree.
11501 And, we can see cycles: the initializer for a declaration
11502 can refer to the declaration itself. */
11503 WALK_SUBTREE (DECL_INITIAL (decl));
11504 WALK_SUBTREE (DECL_SIZE (decl));
11505 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11506 }
11507 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11508 }
11509
11510 case STATEMENT_LIST:
11511 {
11512 tree_stmt_iterator i;
11513 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11514 WALK_SUBTREE (*tsi_stmt_ptr (i));
11515 }
11516 break;
11517
11518 case OMP_CLAUSE:
11519 switch (OMP_CLAUSE_CODE (*tp))
11520 {
11521 case OMP_CLAUSE_GANG:
11522 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11523 /* FALLTHRU */
11524
11525 case OMP_CLAUSE_DEVICE_RESIDENT:
11526 case OMP_CLAUSE_USE_DEVICE:
11527 case OMP_CLAUSE_ASYNC:
11528 case OMP_CLAUSE_WAIT:
11529 case OMP_CLAUSE_WORKER:
11530 case OMP_CLAUSE_VECTOR:
11531 case OMP_CLAUSE_NUM_GANGS:
11532 case OMP_CLAUSE_NUM_WORKERS:
11533 case OMP_CLAUSE_VECTOR_LENGTH:
11534 case OMP_CLAUSE_PRIVATE:
11535 case OMP_CLAUSE_SHARED:
11536 case OMP_CLAUSE_FIRSTPRIVATE:
11537 case OMP_CLAUSE_COPYIN:
11538 case OMP_CLAUSE_COPYPRIVATE:
11539 case OMP_CLAUSE_FINAL:
11540 case OMP_CLAUSE_IF:
11541 case OMP_CLAUSE_NUM_THREADS:
11542 case OMP_CLAUSE_SCHEDULE:
11543 case OMP_CLAUSE_UNIFORM:
11544 case OMP_CLAUSE_DEPEND:
11545 case OMP_CLAUSE_NUM_TEAMS:
11546 case OMP_CLAUSE_THREAD_LIMIT:
11547 case OMP_CLAUSE_DEVICE:
11548 case OMP_CLAUSE_DIST_SCHEDULE:
11549 case OMP_CLAUSE_SAFELEN:
11550 case OMP_CLAUSE_SIMDLEN:
11551 case OMP_CLAUSE_ORDERED:
11552 case OMP_CLAUSE_PRIORITY:
11553 case OMP_CLAUSE_GRAINSIZE:
11554 case OMP_CLAUSE_NUM_TASKS:
11555 case OMP_CLAUSE_HINT:
11556 case OMP_CLAUSE_TO_DECLARE:
11557 case OMP_CLAUSE_LINK:
11558 case OMP_CLAUSE_USE_DEVICE_PTR:
11559 case OMP_CLAUSE_IS_DEVICE_PTR:
11560 case OMP_CLAUSE__LOOPTEMP_:
11561 case OMP_CLAUSE__SIMDUID_:
11562 case OMP_CLAUSE__CILK_FOR_COUNT_:
11563 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11564 /* FALLTHRU */
11565
11566 case OMP_CLAUSE_INDEPENDENT:
11567 case OMP_CLAUSE_NOWAIT:
11568 case OMP_CLAUSE_DEFAULT:
11569 case OMP_CLAUSE_UNTIED:
11570 case OMP_CLAUSE_MERGEABLE:
11571 case OMP_CLAUSE_PROC_BIND:
11572 case OMP_CLAUSE_INBRANCH:
11573 case OMP_CLAUSE_NOTINBRANCH:
11574 case OMP_CLAUSE_FOR:
11575 case OMP_CLAUSE_PARALLEL:
11576 case OMP_CLAUSE_SECTIONS:
11577 case OMP_CLAUSE_TASKGROUP:
11578 case OMP_CLAUSE_NOGROUP:
11579 case OMP_CLAUSE_THREADS:
11580 case OMP_CLAUSE_SIMD:
11581 case OMP_CLAUSE_DEFAULTMAP:
11582 case OMP_CLAUSE_AUTO:
11583 case OMP_CLAUSE_SEQ:
11584 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11585
11586 case OMP_CLAUSE_LASTPRIVATE:
11587 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11588 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11589 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11590
11591 case OMP_CLAUSE_COLLAPSE:
11592 {
11593 int i;
11594 for (i = 0; i < 3; i++)
11595 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11596 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11597 }
11598
11599 case OMP_CLAUSE_LINEAR:
11600 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11601 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11602 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11603 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11604
11605 case OMP_CLAUSE_ALIGNED:
11606 case OMP_CLAUSE_FROM:
11607 case OMP_CLAUSE_TO:
11608 case OMP_CLAUSE_MAP:
11609 case OMP_CLAUSE__CACHE_:
11610 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11611 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11612 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11613
11614 case OMP_CLAUSE_REDUCTION:
11615 {
11616 int i;
11617 for (i = 0; i < 5; i++)
11618 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11619 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11620 }
11621
11622 default:
11623 gcc_unreachable ();
11624 }
11625 break;
11626
11627 case TARGET_EXPR:
11628 {
11629 int i, len;
11630
11631 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11632 But, we only want to walk once. */
11633 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11634 for (i = 0; i < len; ++i)
11635 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11636 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11637 }
11638
11639 case DECL_EXPR:
11640 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11641 defining. We only want to walk into these fields of a type in this
11642 case and not in the general case of a mere reference to the type.
11643
11644 The criterion is as follows: if the field can be an expression, it
11645 must be walked only here. This should be in keeping with the fields
11646 that are directly gimplified in gimplify_type_sizes in order for the
11647 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11648 variable-sized types.
11649
11650 Note that DECLs get walked as part of processing the BIND_EXPR. */
11651 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11652 {
11653 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11654 if (TREE_CODE (*type_p) == ERROR_MARK)
11655 return NULL_TREE;
11656
11657 /* Call the function for the type. See if it returns anything or
11658 doesn't want us to continue. If we are to continue, walk both
11659 the normal fields and those for the declaration case. */
11660 result = (*func) (type_p, &walk_subtrees, data);
11661 if (result || !walk_subtrees)
11662 return result;
11663
11664 /* But do not walk a pointed-to type since it may itself need to
11665 be walked in the declaration case if it isn't anonymous. */
11666 if (!POINTER_TYPE_P (*type_p))
11667 {
11668 result = walk_type_fields (*type_p, func, data, pset, lh);
11669 if (result)
11670 return result;
11671 }
11672
11673 /* If this is a record type, also walk the fields. */
11674 if (RECORD_OR_UNION_TYPE_P (*type_p))
11675 {
11676 tree field;
11677
11678 for (field = TYPE_FIELDS (*type_p); field;
11679 field = DECL_CHAIN (field))
11680 {
11681 /* We'd like to look at the type of the field, but we can
11682 easily get infinite recursion. So assume it's pointed
11683 to elsewhere in the tree. Also, ignore things that
11684 aren't fields. */
11685 if (TREE_CODE (field) != FIELD_DECL)
11686 continue;
11687
11688 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11689 WALK_SUBTREE (DECL_SIZE (field));
11690 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11691 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11692 WALK_SUBTREE (DECL_QUALIFIER (field));
11693 }
11694 }
11695
11696 /* Same for scalar types. */
11697 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11698 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11699 || TREE_CODE (*type_p) == INTEGER_TYPE
11700 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11701 || TREE_CODE (*type_p) == REAL_TYPE)
11702 {
11703 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11704 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11705 }
11706
11707 WALK_SUBTREE (TYPE_SIZE (*type_p));
11708 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11709 }
11710 /* FALLTHRU */
11711
11712 default:
11713 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11714 {
11715 int i, len;
11716
11717 /* Walk over all the sub-trees of this operand. */
11718 len = TREE_OPERAND_LENGTH (*tp);
11719
11720 /* Go through the subtrees. We need to do this in forward order so
11721 that the scope of a FOR_EXPR is handled properly. */
11722 if (len)
11723 {
11724 for (i = 0; i < len - 1; ++i)
11725 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11726 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11727 }
11728 }
11729 /* If this is a type, walk the needed fields in the type. */
11730 else if (TYPE_P (*tp))
11731 return walk_type_fields (*tp, func, data, pset, lh);
11732 break;
11733 }
11734
11735 /* We didn't find what we were looking for. */
11736 return NULL_TREE;
11737
11738 #undef WALK_SUBTREE_TAIL
11739 }
11740 #undef WALK_SUBTREE
11741
11742 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11743
11744 tree
11745 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11746 walk_tree_lh lh)
11747 {
11748 tree result;
11749
11750 hash_set<tree> pset;
11751 result = walk_tree_1 (tp, func, data, &pset, lh);
11752 return result;
11753 }
11754
11755
11756 tree
11757 tree_block (tree t)
11758 {
11759 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11760
11761 if (IS_EXPR_CODE_CLASS (c))
11762 return LOCATION_BLOCK (t->exp.locus);
11763 gcc_unreachable ();
11764 return NULL;
11765 }
11766
11767 void
11768 tree_set_block (tree t, tree b)
11769 {
11770 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11771
11772 if (IS_EXPR_CODE_CLASS (c))
11773 {
11774 if (b)
11775 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11776 else
11777 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11778 }
11779 else
11780 gcc_unreachable ();
11781 }
11782
11783 /* Create a nameless artificial label and put it in the current
11784 function context. The label has a location of LOC. Returns the
11785 newly created label. */
11786
11787 tree
11788 create_artificial_label (location_t loc)
11789 {
11790 tree lab = build_decl (loc,
11791 LABEL_DECL, NULL_TREE, void_type_node);
11792
11793 DECL_ARTIFICIAL (lab) = 1;
11794 DECL_IGNORED_P (lab) = 1;
11795 DECL_CONTEXT (lab) = current_function_decl;
11796 return lab;
11797 }
11798
11799 /* Given a tree, try to return a useful variable name that we can use
11800 to prefix a temporary that is being assigned the value of the tree.
11801 I.E. given <temp> = &A, return A. */
11802
11803 const char *
11804 get_name (tree t)
11805 {
11806 tree stripped_decl;
11807
11808 stripped_decl = t;
11809 STRIP_NOPS (stripped_decl);
11810 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11811 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11812 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11813 {
11814 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11815 if (!name)
11816 return NULL;
11817 return IDENTIFIER_POINTER (name);
11818 }
11819 else
11820 {
11821 switch (TREE_CODE (stripped_decl))
11822 {
11823 case ADDR_EXPR:
11824 return get_name (TREE_OPERAND (stripped_decl, 0));
11825 default:
11826 return NULL;
11827 }
11828 }
11829 }
11830
11831 /* Return true if TYPE has a variable argument list. */
11832
11833 bool
11834 stdarg_p (const_tree fntype)
11835 {
11836 function_args_iterator args_iter;
11837 tree n = NULL_TREE, t;
11838
11839 if (!fntype)
11840 return false;
11841
11842 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11843 {
11844 n = t;
11845 }
11846
11847 return n != NULL_TREE && n != void_type_node;
11848 }
11849
11850 /* Return true if TYPE has a prototype. */
11851
11852 bool
11853 prototype_p (const_tree fntype)
11854 {
11855 tree t;
11856
11857 gcc_assert (fntype != NULL_TREE);
11858
11859 t = TYPE_ARG_TYPES (fntype);
11860 return (t != NULL_TREE);
11861 }
11862
11863 /* If BLOCK is inlined from an __attribute__((__artificial__))
11864 routine, return pointer to location from where it has been
11865 called. */
11866 location_t *
11867 block_nonartificial_location (tree block)
11868 {
11869 location_t *ret = NULL;
11870
11871 while (block && TREE_CODE (block) == BLOCK
11872 && BLOCK_ABSTRACT_ORIGIN (block))
11873 {
11874 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11875
11876 while (TREE_CODE (ao) == BLOCK
11877 && BLOCK_ABSTRACT_ORIGIN (ao)
11878 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11879 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11880
11881 if (TREE_CODE (ao) == FUNCTION_DECL)
11882 {
11883 /* If AO is an artificial inline, point RET to the
11884 call site locus at which it has been inlined and continue
11885 the loop, in case AO's caller is also an artificial
11886 inline. */
11887 if (DECL_DECLARED_INLINE_P (ao)
11888 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11889 ret = &BLOCK_SOURCE_LOCATION (block);
11890 else
11891 break;
11892 }
11893 else if (TREE_CODE (ao) != BLOCK)
11894 break;
11895
11896 block = BLOCK_SUPERCONTEXT (block);
11897 }
11898 return ret;
11899 }
11900
11901
11902 /* If EXP is inlined from an __attribute__((__artificial__))
11903 function, return the location of the original call expression. */
11904
11905 location_t
11906 tree_nonartificial_location (tree exp)
11907 {
11908 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11909
11910 if (loc)
11911 return *loc;
11912 else
11913 return EXPR_LOCATION (exp);
11914 }
11915
11916
11917 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11918 nodes. */
11919
11920 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11921
11922 hashval_t
11923 cl_option_hasher::hash (tree x)
11924 {
11925 const_tree const t = x;
11926 const char *p;
11927 size_t i;
11928 size_t len = 0;
11929 hashval_t hash = 0;
11930
11931 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11932 {
11933 p = (const char *)TREE_OPTIMIZATION (t);
11934 len = sizeof (struct cl_optimization);
11935 }
11936
11937 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11938 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11939
11940 else
11941 gcc_unreachable ();
11942
11943 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11944 something else. */
11945 for (i = 0; i < len; i++)
11946 if (p[i])
11947 hash = (hash << 4) ^ ((i << 2) | p[i]);
11948
11949 return hash;
11950 }
11951
11952 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11953 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11954 same. */
11955
11956 bool
11957 cl_option_hasher::equal (tree x, tree y)
11958 {
11959 const_tree const xt = x;
11960 const_tree const yt = y;
11961 const char *xp;
11962 const char *yp;
11963 size_t len;
11964
11965 if (TREE_CODE (xt) != TREE_CODE (yt))
11966 return 0;
11967
11968 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11969 {
11970 xp = (const char *)TREE_OPTIMIZATION (xt);
11971 yp = (const char *)TREE_OPTIMIZATION (yt);
11972 len = sizeof (struct cl_optimization);
11973 }
11974
11975 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11976 {
11977 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11978 TREE_TARGET_OPTION (yt));
11979 }
11980
11981 else
11982 gcc_unreachable ();
11983
11984 return (memcmp (xp, yp, len) == 0);
11985 }
11986
11987 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11988
11989 tree
11990 build_optimization_node (struct gcc_options *opts)
11991 {
11992 tree t;
11993
11994 /* Use the cache of optimization nodes. */
11995
11996 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11997 opts);
11998
11999 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12000 t = *slot;
12001 if (!t)
12002 {
12003 /* Insert this one into the hash table. */
12004 t = cl_optimization_node;
12005 *slot = t;
12006
12007 /* Make a new node for next time round. */
12008 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12009 }
12010
12011 return t;
12012 }
12013
12014 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12015
12016 tree
12017 build_target_option_node (struct gcc_options *opts)
12018 {
12019 tree t;
12020
12021 /* Use the cache of optimization nodes. */
12022
12023 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12024 opts);
12025
12026 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12027 t = *slot;
12028 if (!t)
12029 {
12030 /* Insert this one into the hash table. */
12031 t = cl_target_option_node;
12032 *slot = t;
12033
12034 /* Make a new node for next time round. */
12035 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12036 }
12037
12038 return t;
12039 }
12040
12041 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12042 so that they aren't saved during PCH writing. */
12043
12044 void
12045 prepare_target_option_nodes_for_pch (void)
12046 {
12047 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12048 for (; iter != cl_option_hash_table->end (); ++iter)
12049 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12050 TREE_TARGET_GLOBALS (*iter) = NULL;
12051 }
12052
12053 /* Determine the "ultimate origin" of a block. The block may be an inlined
12054 instance of an inlined instance of a block which is local to an inline
12055 function, so we have to trace all of the way back through the origin chain
12056 to find out what sort of node actually served as the original seed for the
12057 given block. */
12058
12059 tree
12060 block_ultimate_origin (const_tree block)
12061 {
12062 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
12063
12064 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12065 we're trying to output the abstract instance of this function. */
12066 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12067 return NULL_TREE;
12068
12069 if (immediate_origin == NULL_TREE)
12070 return NULL_TREE;
12071 else
12072 {
12073 tree ret_val;
12074 tree lookahead = immediate_origin;
12075
12076 do
12077 {
12078 ret_val = lookahead;
12079 lookahead = (TREE_CODE (ret_val) == BLOCK
12080 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12081 }
12082 while (lookahead != NULL && lookahead != ret_val);
12083
12084 /* The block's abstract origin chain may not be the *ultimate* origin of
12085 the block. It could lead to a DECL that has an abstract origin set.
12086 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12087 will give us if it has one). Note that DECL's abstract origins are
12088 supposed to be the most distant ancestor (or so decl_ultimate_origin
12089 claims), so we don't need to loop following the DECL origins. */
12090 if (DECL_P (ret_val))
12091 return DECL_ORIGIN (ret_val);
12092
12093 return ret_val;
12094 }
12095 }
12096
12097 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12098 no instruction. */
12099
12100 bool
12101 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12102 {
12103 /* Use precision rather then machine mode when we can, which gives
12104 the correct answer even for submode (bit-field) types. */
12105 if ((INTEGRAL_TYPE_P (outer_type)
12106 || POINTER_TYPE_P (outer_type)
12107 || TREE_CODE (outer_type) == OFFSET_TYPE)
12108 && (INTEGRAL_TYPE_P (inner_type)
12109 || POINTER_TYPE_P (inner_type)
12110 || TREE_CODE (inner_type) == OFFSET_TYPE))
12111 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12112
12113 /* Otherwise fall back on comparing machine modes (e.g. for
12114 aggregate types, floats). */
12115 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12116 }
12117
12118 /* Return true iff conversion in EXP generates no instruction. Mark
12119 it inline so that we fully inline into the stripping functions even
12120 though we have two uses of this function. */
12121
12122 static inline bool
12123 tree_nop_conversion (const_tree exp)
12124 {
12125 tree outer_type, inner_type;
12126
12127 if (!CONVERT_EXPR_P (exp)
12128 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12129 return false;
12130 if (TREE_OPERAND (exp, 0) == error_mark_node)
12131 return false;
12132
12133 outer_type = TREE_TYPE (exp);
12134 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12135
12136 if (!inner_type)
12137 return false;
12138
12139 return tree_nop_conversion_p (outer_type, inner_type);
12140 }
12141
12142 /* Return true iff conversion in EXP generates no instruction. Don't
12143 consider conversions changing the signedness. */
12144
12145 static bool
12146 tree_sign_nop_conversion (const_tree exp)
12147 {
12148 tree outer_type, inner_type;
12149
12150 if (!tree_nop_conversion (exp))
12151 return false;
12152
12153 outer_type = TREE_TYPE (exp);
12154 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12155
12156 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12157 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12158 }
12159
12160 /* Strip conversions from EXP according to tree_nop_conversion and
12161 return the resulting expression. */
12162
12163 tree
12164 tree_strip_nop_conversions (tree exp)
12165 {
12166 while (tree_nop_conversion (exp))
12167 exp = TREE_OPERAND (exp, 0);
12168 return exp;
12169 }
12170
12171 /* Strip conversions from EXP according to tree_sign_nop_conversion
12172 and return the resulting expression. */
12173
12174 tree
12175 tree_strip_sign_nop_conversions (tree exp)
12176 {
12177 while (tree_sign_nop_conversion (exp))
12178 exp = TREE_OPERAND (exp, 0);
12179 return exp;
12180 }
12181
12182 /* Avoid any floating point extensions from EXP. */
12183 tree
12184 strip_float_extensions (tree exp)
12185 {
12186 tree sub, expt, subt;
12187
12188 /* For floating point constant look up the narrowest type that can hold
12189 it properly and handle it like (type)(narrowest_type)constant.
12190 This way we can optimize for instance a=a*2.0 where "a" is float
12191 but 2.0 is double constant. */
12192 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12193 {
12194 REAL_VALUE_TYPE orig;
12195 tree type = NULL;
12196
12197 orig = TREE_REAL_CST (exp);
12198 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12199 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12200 type = float_type_node;
12201 else if (TYPE_PRECISION (TREE_TYPE (exp))
12202 > TYPE_PRECISION (double_type_node)
12203 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12204 type = double_type_node;
12205 if (type)
12206 return build_real_truncate (type, orig);
12207 }
12208
12209 if (!CONVERT_EXPR_P (exp))
12210 return exp;
12211
12212 sub = TREE_OPERAND (exp, 0);
12213 subt = TREE_TYPE (sub);
12214 expt = TREE_TYPE (exp);
12215
12216 if (!FLOAT_TYPE_P (subt))
12217 return exp;
12218
12219 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12220 return exp;
12221
12222 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12223 return exp;
12224
12225 return strip_float_extensions (sub);
12226 }
12227
12228 /* Strip out all handled components that produce invariant
12229 offsets. */
12230
12231 const_tree
12232 strip_invariant_refs (const_tree op)
12233 {
12234 while (handled_component_p (op))
12235 {
12236 switch (TREE_CODE (op))
12237 {
12238 case ARRAY_REF:
12239 case ARRAY_RANGE_REF:
12240 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12241 || TREE_OPERAND (op, 2) != NULL_TREE
12242 || TREE_OPERAND (op, 3) != NULL_TREE)
12243 return NULL;
12244 break;
12245
12246 case COMPONENT_REF:
12247 if (TREE_OPERAND (op, 2) != NULL_TREE)
12248 return NULL;
12249 break;
12250
12251 default:;
12252 }
12253 op = TREE_OPERAND (op, 0);
12254 }
12255
12256 return op;
12257 }
12258
12259 static GTY(()) tree gcc_eh_personality_decl;
12260
12261 /* Return the GCC personality function decl. */
12262
12263 tree
12264 lhd_gcc_personality (void)
12265 {
12266 if (!gcc_eh_personality_decl)
12267 gcc_eh_personality_decl = build_personality_function ("gcc");
12268 return gcc_eh_personality_decl;
12269 }
12270
12271 /* TARGET is a call target of GIMPLE call statement
12272 (obtained by gimple_call_fn). Return true if it is
12273 OBJ_TYPE_REF representing an virtual call of C++ method.
12274 (As opposed to OBJ_TYPE_REF representing objc calls
12275 through a cast where middle-end devirtualization machinery
12276 can't apply.) */
12277
12278 bool
12279 virtual_method_call_p (const_tree target)
12280 {
12281 if (TREE_CODE (target) != OBJ_TYPE_REF)
12282 return false;
12283 tree t = TREE_TYPE (target);
12284 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12285 t = TREE_TYPE (t);
12286 if (TREE_CODE (t) == FUNCTION_TYPE)
12287 return false;
12288 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12289 /* If we do not have BINFO associated, it means that type was built
12290 without devirtualization enabled. Do not consider this a virtual
12291 call. */
12292 if (!TYPE_BINFO (obj_type_ref_class (target)))
12293 return false;
12294 return true;
12295 }
12296
12297 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12298
12299 tree
12300 obj_type_ref_class (const_tree ref)
12301 {
12302 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12303 ref = TREE_TYPE (ref);
12304 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12305 ref = TREE_TYPE (ref);
12306 /* We look for type THIS points to. ObjC also builds
12307 OBJ_TYPE_REF with non-method calls, Their first parameter
12308 ID however also corresponds to class type. */
12309 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12310 || TREE_CODE (ref) == FUNCTION_TYPE);
12311 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12312 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12313 return TREE_TYPE (ref);
12314 }
12315
12316 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12317
12318 static tree
12319 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12320 {
12321 unsigned int i;
12322 tree base_binfo, b;
12323
12324 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12325 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12326 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12327 return base_binfo;
12328 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12329 return b;
12330 return NULL;
12331 }
12332
12333 /* Try to find a base info of BINFO that would have its field decl at offset
12334 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12335 found, return, otherwise return NULL_TREE. */
12336
12337 tree
12338 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12339 {
12340 tree type = BINFO_TYPE (binfo);
12341
12342 while (true)
12343 {
12344 HOST_WIDE_INT pos, size;
12345 tree fld;
12346 int i;
12347
12348 if (types_same_for_odr (type, expected_type))
12349 return binfo;
12350 if (offset < 0)
12351 return NULL_TREE;
12352
12353 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12354 {
12355 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12356 continue;
12357
12358 pos = int_bit_position (fld);
12359 size = tree_to_uhwi (DECL_SIZE (fld));
12360 if (pos <= offset && (pos + size) > offset)
12361 break;
12362 }
12363 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12364 return NULL_TREE;
12365
12366 /* Offset 0 indicates the primary base, whose vtable contents are
12367 represented in the binfo for the derived class. */
12368 else if (offset != 0)
12369 {
12370 tree found_binfo = NULL, base_binfo;
12371 /* Offsets in BINFO are in bytes relative to the whole structure
12372 while POS is in bits relative to the containing field. */
12373 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12374 / BITS_PER_UNIT);
12375
12376 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12377 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12378 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12379 {
12380 found_binfo = base_binfo;
12381 break;
12382 }
12383 if (found_binfo)
12384 binfo = found_binfo;
12385 else
12386 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12387 binfo_offset);
12388 }
12389
12390 type = TREE_TYPE (fld);
12391 offset -= pos;
12392 }
12393 }
12394
12395 /* Returns true if X is a typedef decl. */
12396
12397 bool
12398 is_typedef_decl (const_tree x)
12399 {
12400 return (x && TREE_CODE (x) == TYPE_DECL
12401 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12402 }
12403
12404 /* Returns true iff TYPE is a type variant created for a typedef. */
12405
12406 bool
12407 typedef_variant_p (const_tree type)
12408 {
12409 return is_typedef_decl (TYPE_NAME (type));
12410 }
12411
12412 /* Warn about a use of an identifier which was marked deprecated. */
12413 void
12414 warn_deprecated_use (tree node, tree attr)
12415 {
12416 const char *msg;
12417
12418 if (node == 0 || !warn_deprecated_decl)
12419 return;
12420
12421 if (!attr)
12422 {
12423 if (DECL_P (node))
12424 attr = DECL_ATTRIBUTES (node);
12425 else if (TYPE_P (node))
12426 {
12427 tree decl = TYPE_STUB_DECL (node);
12428 if (decl)
12429 attr = lookup_attribute ("deprecated",
12430 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12431 }
12432 }
12433
12434 if (attr)
12435 attr = lookup_attribute ("deprecated", attr);
12436
12437 if (attr)
12438 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12439 else
12440 msg = NULL;
12441
12442 bool w;
12443 if (DECL_P (node))
12444 {
12445 if (msg)
12446 w = warning (OPT_Wdeprecated_declarations,
12447 "%qD is deprecated: %s", node, msg);
12448 else
12449 w = warning (OPT_Wdeprecated_declarations,
12450 "%qD is deprecated", node);
12451 if (w)
12452 inform (DECL_SOURCE_LOCATION (node), "declared here");
12453 }
12454 else if (TYPE_P (node))
12455 {
12456 tree what = NULL_TREE;
12457 tree decl = TYPE_STUB_DECL (node);
12458
12459 if (TYPE_NAME (node))
12460 {
12461 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12462 what = TYPE_NAME (node);
12463 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12464 && DECL_NAME (TYPE_NAME (node)))
12465 what = DECL_NAME (TYPE_NAME (node));
12466 }
12467
12468 if (decl)
12469 {
12470 if (what)
12471 {
12472 if (msg)
12473 w = warning (OPT_Wdeprecated_declarations,
12474 "%qE is deprecated: %s", what, msg);
12475 else
12476 w = warning (OPT_Wdeprecated_declarations,
12477 "%qE is deprecated", what);
12478 }
12479 else
12480 {
12481 if (msg)
12482 w = warning (OPT_Wdeprecated_declarations,
12483 "type is deprecated: %s", msg);
12484 else
12485 w = warning (OPT_Wdeprecated_declarations,
12486 "type is deprecated");
12487 }
12488 if (w)
12489 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12490 }
12491 else
12492 {
12493 if (what)
12494 {
12495 if (msg)
12496 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12497 what, msg);
12498 else
12499 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12500 }
12501 else
12502 {
12503 if (msg)
12504 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12505 msg);
12506 else
12507 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12508 }
12509 }
12510 }
12511 }
12512
12513 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12514 somewhere in it. */
12515
12516 bool
12517 contains_bitfld_component_ref_p (const_tree ref)
12518 {
12519 while (handled_component_p (ref))
12520 {
12521 if (TREE_CODE (ref) == COMPONENT_REF
12522 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12523 return true;
12524 ref = TREE_OPERAND (ref, 0);
12525 }
12526
12527 return false;
12528 }
12529
12530 /* Try to determine whether a TRY_CATCH expression can fall through.
12531 This is a subroutine of block_may_fallthru. */
12532
12533 static bool
12534 try_catch_may_fallthru (const_tree stmt)
12535 {
12536 tree_stmt_iterator i;
12537
12538 /* If the TRY block can fall through, the whole TRY_CATCH can
12539 fall through. */
12540 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12541 return true;
12542
12543 i = tsi_start (TREE_OPERAND (stmt, 1));
12544 switch (TREE_CODE (tsi_stmt (i)))
12545 {
12546 case CATCH_EXPR:
12547 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12548 catch expression and a body. The whole TRY_CATCH may fall
12549 through iff any of the catch bodies falls through. */
12550 for (; !tsi_end_p (i); tsi_next (&i))
12551 {
12552 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12553 return true;
12554 }
12555 return false;
12556
12557 case EH_FILTER_EXPR:
12558 /* The exception filter expression only matters if there is an
12559 exception. If the exception does not match EH_FILTER_TYPES,
12560 we will execute EH_FILTER_FAILURE, and we will fall through
12561 if that falls through. If the exception does match
12562 EH_FILTER_TYPES, the stack unwinder will continue up the
12563 stack, so we will not fall through. We don't know whether we
12564 will throw an exception which matches EH_FILTER_TYPES or not,
12565 so we just ignore EH_FILTER_TYPES and assume that we might
12566 throw an exception which doesn't match. */
12567 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12568
12569 default:
12570 /* This case represents statements to be executed when an
12571 exception occurs. Those statements are implicitly followed
12572 by a RESX statement to resume execution after the exception.
12573 So in this case the TRY_CATCH never falls through. */
12574 return false;
12575 }
12576 }
12577
12578 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12579 need not be 100% accurate; simply be conservative and return true if we
12580 don't know. This is used only to avoid stupidly generating extra code.
12581 If we're wrong, we'll just delete the extra code later. */
12582
12583 bool
12584 block_may_fallthru (const_tree block)
12585 {
12586 /* This CONST_CAST is okay because expr_last returns its argument
12587 unmodified and we assign it to a const_tree. */
12588 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12589
12590 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12591 {
12592 case GOTO_EXPR:
12593 case RETURN_EXPR:
12594 /* Easy cases. If the last statement of the block implies
12595 control transfer, then we can't fall through. */
12596 return false;
12597
12598 case SWITCH_EXPR:
12599 /* If SWITCH_LABELS is set, this is lowered, and represents a
12600 branch to a selected label and hence can not fall through.
12601 Otherwise SWITCH_BODY is set, and the switch can fall
12602 through. */
12603 return SWITCH_LABELS (stmt) == NULL_TREE;
12604
12605 case COND_EXPR:
12606 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12607 return true;
12608 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12609
12610 case BIND_EXPR:
12611 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12612
12613 case TRY_CATCH_EXPR:
12614 return try_catch_may_fallthru (stmt);
12615
12616 case TRY_FINALLY_EXPR:
12617 /* The finally clause is always executed after the try clause,
12618 so if it does not fall through, then the try-finally will not
12619 fall through. Otherwise, if the try clause does not fall
12620 through, then when the finally clause falls through it will
12621 resume execution wherever the try clause was going. So the
12622 whole try-finally will only fall through if both the try
12623 clause and the finally clause fall through. */
12624 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12625 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12626
12627 case MODIFY_EXPR:
12628 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12629 stmt = TREE_OPERAND (stmt, 1);
12630 else
12631 return true;
12632 /* FALLTHRU */
12633
12634 case CALL_EXPR:
12635 /* Functions that do not return do not fall through. */
12636 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12637
12638 case CLEANUP_POINT_EXPR:
12639 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12640
12641 case TARGET_EXPR:
12642 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12643
12644 case ERROR_MARK:
12645 return true;
12646
12647 default:
12648 return lang_hooks.block_may_fallthru (stmt);
12649 }
12650 }
12651
12652 /* True if we are using EH to handle cleanups. */
12653 static bool using_eh_for_cleanups_flag = false;
12654
12655 /* This routine is called from front ends to indicate eh should be used for
12656 cleanups. */
12657 void
12658 using_eh_for_cleanups (void)
12659 {
12660 using_eh_for_cleanups_flag = true;
12661 }
12662
12663 /* Query whether EH is used for cleanups. */
12664 bool
12665 using_eh_for_cleanups_p (void)
12666 {
12667 return using_eh_for_cleanups_flag;
12668 }
12669
12670 /* Wrapper for tree_code_name to ensure that tree code is valid */
12671 const char *
12672 get_tree_code_name (enum tree_code code)
12673 {
12674 const char *invalid = "<invalid tree code>";
12675
12676 if (code >= MAX_TREE_CODES)
12677 return invalid;
12678
12679 return tree_code_name[code];
12680 }
12681
12682 /* Drops the TREE_OVERFLOW flag from T. */
12683
12684 tree
12685 drop_tree_overflow (tree t)
12686 {
12687 gcc_checking_assert (TREE_OVERFLOW (t));
12688
12689 /* For tree codes with a sharing machinery re-build the result. */
12690 if (TREE_CODE (t) == INTEGER_CST)
12691 return wide_int_to_tree (TREE_TYPE (t), t);
12692
12693 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12694 and drop the flag. */
12695 t = copy_node (t);
12696 TREE_OVERFLOW (t) = 0;
12697 return t;
12698 }
12699
12700 /* Given a memory reference expression T, return its base address.
12701 The base address of a memory reference expression is the main
12702 object being referenced. For instance, the base address for
12703 'array[i].fld[j]' is 'array'. You can think of this as stripping
12704 away the offset part from a memory address.
12705
12706 This function calls handled_component_p to strip away all the inner
12707 parts of the memory reference until it reaches the base object. */
12708
12709 tree
12710 get_base_address (tree t)
12711 {
12712 while (handled_component_p (t))
12713 t = TREE_OPERAND (t, 0);
12714
12715 if ((TREE_CODE (t) == MEM_REF
12716 || TREE_CODE (t) == TARGET_MEM_REF)
12717 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12718 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12719
12720 /* ??? Either the alias oracle or all callers need to properly deal
12721 with WITH_SIZE_EXPRs before we can look through those. */
12722 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12723 return NULL_TREE;
12724
12725 return t;
12726 }
12727
12728 /* Return a tree of sizetype representing the size, in bytes, of the element
12729 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12730
12731 tree
12732 array_ref_element_size (tree exp)
12733 {
12734 tree aligned_size = TREE_OPERAND (exp, 3);
12735 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12736 location_t loc = EXPR_LOCATION (exp);
12737
12738 /* If a size was specified in the ARRAY_REF, it's the size measured
12739 in alignment units of the element type. So multiply by that value. */
12740 if (aligned_size)
12741 {
12742 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12743 sizetype from another type of the same width and signedness. */
12744 if (TREE_TYPE (aligned_size) != sizetype)
12745 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12746 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12747 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12748 }
12749
12750 /* Otherwise, take the size from that of the element type. Substitute
12751 any PLACEHOLDER_EXPR that we have. */
12752 else
12753 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12754 }
12755
12756 /* Return a tree representing the lower bound of the array mentioned in
12757 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12758
12759 tree
12760 array_ref_low_bound (tree exp)
12761 {
12762 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12763
12764 /* If a lower bound is specified in EXP, use it. */
12765 if (TREE_OPERAND (exp, 2))
12766 return TREE_OPERAND (exp, 2);
12767
12768 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12769 substituting for a PLACEHOLDER_EXPR as needed. */
12770 if (domain_type && TYPE_MIN_VALUE (domain_type))
12771 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12772
12773 /* Otherwise, return a zero of the appropriate type. */
12774 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12775 }
12776
12777 /* Return a tree representing the upper bound of the array mentioned in
12778 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12779
12780 tree
12781 array_ref_up_bound (tree exp)
12782 {
12783 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12784
12785 /* If there is a domain type and it has an upper bound, use it, substituting
12786 for a PLACEHOLDER_EXPR as needed. */
12787 if (domain_type && TYPE_MAX_VALUE (domain_type))
12788 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12789
12790 /* Otherwise fail. */
12791 return NULL_TREE;
12792 }
12793
12794 /* Returns true if REF is an array reference to an array at the end of
12795 a structure. If this is the case, the array may be allocated larger
12796 than its upper bound implies. */
12797
12798 bool
12799 array_at_struct_end_p (tree ref)
12800 {
12801 if (TREE_CODE (ref) != ARRAY_REF
12802 && TREE_CODE (ref) != ARRAY_RANGE_REF)
12803 return false;
12804
12805 while (handled_component_p (ref))
12806 {
12807 /* If the reference chain contains a component reference to a
12808 non-union type and there follows another field the reference
12809 is not at the end of a structure. */
12810 if (TREE_CODE (ref) == COMPONENT_REF
12811 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12812 {
12813 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12814 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12815 nextf = DECL_CHAIN (nextf);
12816 if (nextf)
12817 return false;
12818 }
12819
12820 ref = TREE_OPERAND (ref, 0);
12821 }
12822
12823 /* If the reference is based on a declared entity, the size of the array
12824 is constrained by its given domain. */
12825 if (DECL_P (ref))
12826 return false;
12827
12828 return true;
12829 }
12830
12831 /* Return a tree representing the offset, in bytes, of the field referenced
12832 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12833
12834 tree
12835 component_ref_field_offset (tree exp)
12836 {
12837 tree aligned_offset = TREE_OPERAND (exp, 2);
12838 tree field = TREE_OPERAND (exp, 1);
12839 location_t loc = EXPR_LOCATION (exp);
12840
12841 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12842 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12843 value. */
12844 if (aligned_offset)
12845 {
12846 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12847 sizetype from another type of the same width and signedness. */
12848 if (TREE_TYPE (aligned_offset) != sizetype)
12849 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12850 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12851 size_int (DECL_OFFSET_ALIGN (field)
12852 / BITS_PER_UNIT));
12853 }
12854
12855 /* Otherwise, take the offset from that of the field. Substitute
12856 any PLACEHOLDER_EXPR that we have. */
12857 else
12858 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12859 }
12860
12861 /* Return the machine mode of T. For vectors, returns the mode of the
12862 inner type. The main use case is to feed the result to HONOR_NANS,
12863 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12864
12865 machine_mode
12866 element_mode (const_tree t)
12867 {
12868 if (!TYPE_P (t))
12869 t = TREE_TYPE (t);
12870 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12871 t = TREE_TYPE (t);
12872 return TYPE_MODE (t);
12873 }
12874
12875
12876 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12877 TV. TV should be the more specified variant (i.e. the main variant). */
12878
12879 static bool
12880 verify_type_variant (const_tree t, tree tv)
12881 {
12882 /* Type variant can differ by:
12883
12884 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12885 ENCODE_QUAL_ADDR_SPACE.
12886 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12887 in this case some values may not be set in the variant types
12888 (see TYPE_COMPLETE_P checks).
12889 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12890 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12891 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12892 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12893 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12894 this is necessary to make it possible to merge types form different TUs
12895 - arrays, pointers and references may have TREE_TYPE that is a variant
12896 of TREE_TYPE of their main variants.
12897 - aggregates may have new TYPE_FIELDS list that list variants of
12898 the main variant TYPE_FIELDS.
12899 - vector types may differ by TYPE_VECTOR_OPAQUE
12900 - TYPE_METHODS is always NULL for vairant types and maintained for
12901 main variant only.
12902 */
12903
12904 /* Convenience macro for matching individual fields. */
12905 #define verify_variant_match(flag) \
12906 do { \
12907 if (flag (tv) != flag (t)) \
12908 { \
12909 error ("type variant differs by " #flag "."); \
12910 debug_tree (tv); \
12911 return false; \
12912 } \
12913 } while (false)
12914
12915 /* tree_base checks. */
12916
12917 verify_variant_match (TREE_CODE);
12918 /* FIXME: Ada builds non-artificial variants of artificial types. */
12919 if (TYPE_ARTIFICIAL (tv) && 0)
12920 verify_variant_match (TYPE_ARTIFICIAL);
12921 if (POINTER_TYPE_P (tv))
12922 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12923 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12924 verify_variant_match (TYPE_UNSIGNED);
12925 verify_variant_match (TYPE_ALIGN_OK);
12926 verify_variant_match (TYPE_PACKED);
12927 if (TREE_CODE (t) == REFERENCE_TYPE)
12928 verify_variant_match (TYPE_REF_IS_RVALUE);
12929 verify_variant_match (TYPE_SATURATING);
12930 /* FIXME: This check trigger during libstdc++ build. */
12931 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12932 verify_variant_match (TYPE_FINAL_P);
12933
12934 /* tree_type_common checks. */
12935
12936 if (COMPLETE_TYPE_P (t))
12937 {
12938 verify_variant_match (TYPE_SIZE);
12939 verify_variant_match (TYPE_MODE);
12940 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
12941 /* FIXME: ideally we should compare pointer equality, but java FE
12942 produce variants where size is INTEGER_CST of different type (int
12943 wrt size_type) during libjava biuld. */
12944 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
12945 {
12946 error ("type variant has different TYPE_SIZE_UNIT");
12947 debug_tree (tv);
12948 error ("type variant's TYPE_SIZE_UNIT");
12949 debug_tree (TYPE_SIZE_UNIT (tv));
12950 error ("type's TYPE_SIZE_UNIT");
12951 debug_tree (TYPE_SIZE_UNIT (t));
12952 return false;
12953 }
12954 }
12955 verify_variant_match (TYPE_PRECISION);
12956 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12957 if (RECORD_OR_UNION_TYPE_P (t))
12958 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12959 else if (TREE_CODE (t) == ARRAY_TYPE)
12960 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12961 /* During LTO we merge variant lists from diferent translation units
12962 that may differ BY TYPE_CONTEXT that in turn may point
12963 to TRANSLATION_UNIT_DECL.
12964 Ada also builds variants of types with different TYPE_CONTEXT. */
12965 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12966 verify_variant_match (TYPE_CONTEXT);
12967 verify_variant_match (TYPE_STRING_FLAG);
12968 if (TYPE_ALIAS_SET_KNOWN_P (t) && TYPE_ALIAS_SET_KNOWN_P (tv))
12969 verify_variant_match (TYPE_ALIAS_SET);
12970
12971 /* tree_type_non_common checks. */
12972
12973 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12974 and dangle the pointer from time to time. */
12975 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12976 && (in_lto_p || !TYPE_VFIELD (tv)
12977 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12978 {
12979 error ("type variant has different TYPE_VFIELD");
12980 debug_tree (tv);
12981 return false;
12982 }
12983 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
12984 || TREE_CODE (t) == INTEGER_TYPE
12985 || TREE_CODE (t) == BOOLEAN_TYPE
12986 || TREE_CODE (t) == REAL_TYPE
12987 || TREE_CODE (t) == FIXED_POINT_TYPE)
12988 {
12989 verify_variant_match (TYPE_MAX_VALUE);
12990 verify_variant_match (TYPE_MIN_VALUE);
12991 }
12992 if (TREE_CODE (t) == METHOD_TYPE)
12993 verify_variant_match (TYPE_METHOD_BASETYPE);
12994 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
12995 {
12996 error ("type variant has TYPE_METHODS");
12997 debug_tree (tv);
12998 return false;
12999 }
13000 if (TREE_CODE (t) == OFFSET_TYPE)
13001 verify_variant_match (TYPE_OFFSET_BASETYPE);
13002 if (TREE_CODE (t) == ARRAY_TYPE)
13003 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13004 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13005 or even type's main variant. This is needed to make bootstrap pass
13006 and the bug seems new in GCC 5.
13007 C++ FE should be updated to make this consistent and we should check
13008 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13009 is a match with main variant.
13010
13011 Also disable the check for Java for now because of parser hack that builds
13012 first an dummy BINFO and then sometimes replace it by real BINFO in some
13013 of the copies. */
13014 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13015 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13016 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13017 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13018 at LTO time only. */
13019 && (in_lto_p && odr_type_p (t)))
13020 {
13021 error ("type variant has different TYPE_BINFO");
13022 debug_tree (tv);
13023 error ("type variant's TYPE_BINFO");
13024 debug_tree (TYPE_BINFO (tv));
13025 error ("type's TYPE_BINFO");
13026 debug_tree (TYPE_BINFO (t));
13027 return false;
13028 }
13029
13030 /* Check various uses of TYPE_VALUES_RAW. */
13031 if (TREE_CODE (t) == ENUMERAL_TYPE)
13032 verify_variant_match (TYPE_VALUES);
13033 else if (TREE_CODE (t) == ARRAY_TYPE)
13034 verify_variant_match (TYPE_DOMAIN);
13035 /* Permit incomplete variants of complete type. While FEs may complete
13036 all variants, this does not happen for C++ templates in all cases. */
13037 else if (RECORD_OR_UNION_TYPE_P (t)
13038 && COMPLETE_TYPE_P (t)
13039 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13040 {
13041 tree f1, f2;
13042
13043 /* Fortran builds qualified variants as new records with items of
13044 qualified type. Verify that they looks same. */
13045 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13046 f1 && f2;
13047 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13048 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13049 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13050 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13051 /* FIXME: gfc_nonrestricted_type builds all types as variants
13052 with exception of pointer types. It deeply copies the type
13053 which means that we may end up with a variant type
13054 referring non-variant pointer. We may change it to
13055 produce types as variants, too, like
13056 objc_get_protocol_qualified_type does. */
13057 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13058 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13059 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13060 break;
13061 if (f1 || f2)
13062 {
13063 error ("type variant has different TYPE_FIELDS");
13064 debug_tree (tv);
13065 error ("first mismatch is field");
13066 debug_tree (f1);
13067 error ("and field");
13068 debug_tree (f2);
13069 return false;
13070 }
13071 }
13072 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13073 verify_variant_match (TYPE_ARG_TYPES);
13074 /* For C++ the qualified variant of array type is really an array type
13075 of qualified TREE_TYPE.
13076 objc builds variants of pointer where pointer to type is a variant, too
13077 in objc_get_protocol_qualified_type. */
13078 if (TREE_TYPE (t) != TREE_TYPE (tv)
13079 && ((TREE_CODE (t) != ARRAY_TYPE
13080 && !POINTER_TYPE_P (t))
13081 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13082 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13083 {
13084 error ("type variant has different TREE_TYPE");
13085 debug_tree (tv);
13086 error ("type variant's TREE_TYPE");
13087 debug_tree (TREE_TYPE (tv));
13088 error ("type's TREE_TYPE");
13089 debug_tree (TREE_TYPE (t));
13090 return false;
13091 }
13092 if (type_with_alias_set_p (t)
13093 && !gimple_canonical_types_compatible_p (t, tv, false))
13094 {
13095 error ("type is not compatible with its vairant");
13096 debug_tree (tv);
13097 error ("type variant's TREE_TYPE");
13098 debug_tree (TREE_TYPE (tv));
13099 error ("type's TREE_TYPE");
13100 debug_tree (TREE_TYPE (t));
13101 return false;
13102 }
13103 return true;
13104 #undef verify_variant_match
13105 }
13106
13107
13108 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13109 the middle-end types_compatible_p function. It needs to avoid
13110 claiming types are different for types that should be treated
13111 the same with respect to TBAA. Canonical types are also used
13112 for IL consistency checks via the useless_type_conversion_p
13113 predicate which does not handle all type kinds itself but falls
13114 back to pointer-comparison of TYPE_CANONICAL for aggregates
13115 for example. */
13116
13117 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13118 type calculation because we need to allow inter-operability between signed
13119 and unsigned variants. */
13120
13121 bool
13122 type_with_interoperable_signedness (const_tree type)
13123 {
13124 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13125 signed char and unsigned char. Similarly fortran FE builds
13126 C_SIZE_T as signed type, while C defines it unsigned. */
13127
13128 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13129 == INTEGER_TYPE
13130 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13131 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13132 }
13133
13134 /* Return true iff T1 and T2 are structurally identical for what
13135 TBAA is concerned.
13136 This function is used both by lto.c canonical type merging and by the
13137 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13138 that have TYPE_CANONICAL defined and assume them equivalent. */
13139
13140 bool
13141 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13142 bool trust_type_canonical)
13143 {
13144 /* Type variants should be same as the main variant. When not doing sanity
13145 checking to verify this fact, go to main variants and save some work. */
13146 if (trust_type_canonical)
13147 {
13148 t1 = TYPE_MAIN_VARIANT (t1);
13149 t2 = TYPE_MAIN_VARIANT (t2);
13150 }
13151
13152 /* Check first for the obvious case of pointer identity. */
13153 if (t1 == t2)
13154 return true;
13155
13156 /* Check that we have two types to compare. */
13157 if (t1 == NULL_TREE || t2 == NULL_TREE)
13158 return false;
13159
13160 /* We consider complete types always compatible with incomplete type.
13161 This does not make sense for canonical type calculation and thus we
13162 need to ensure that we are never called on it.
13163
13164 FIXME: For more correctness the function probably should have three modes
13165 1) mode assuming that types are complete mathcing their structure
13166 2) mode allowing incomplete types but producing equivalence classes
13167 and thus ignoring all info from complete types
13168 3) mode allowing incomplete types to match complete but checking
13169 compatibility between complete types.
13170
13171 1 and 2 can be used for canonical type calculation. 3 is the real
13172 definition of type compatibility that can be used i.e. for warnings during
13173 declaration merging. */
13174
13175 gcc_assert (!trust_type_canonical
13176 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13177 /* If the types have been previously registered and found equal
13178 they still are. */
13179 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13180 && trust_type_canonical)
13181 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13182
13183 /* Can't be the same type if the types don't have the same code. */
13184 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13185 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13186 return false;
13187
13188 /* Qualifiers do not matter for canonical type comparison purposes. */
13189
13190 /* Void types and nullptr types are always the same. */
13191 if (TREE_CODE (t1) == VOID_TYPE
13192 || TREE_CODE (t1) == NULLPTR_TYPE)
13193 return true;
13194
13195 /* Can't be the same type if they have different mode. */
13196 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13197 return false;
13198
13199 /* Non-aggregate types can be handled cheaply. */
13200 if (INTEGRAL_TYPE_P (t1)
13201 || SCALAR_FLOAT_TYPE_P (t1)
13202 || FIXED_POINT_TYPE_P (t1)
13203 || TREE_CODE (t1) == VECTOR_TYPE
13204 || TREE_CODE (t1) == COMPLEX_TYPE
13205 || TREE_CODE (t1) == OFFSET_TYPE
13206 || POINTER_TYPE_P (t1))
13207 {
13208 /* Can't be the same type if they have different recision. */
13209 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13210 return false;
13211
13212 /* In some cases the signed and unsigned types are required to be
13213 inter-operable. */
13214 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13215 && !type_with_interoperable_signedness (t1))
13216 return false;
13217
13218 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13219 interoperable with "signed char". Unless all frontends are revisited
13220 to agree on these types, we must ignore the flag completely. */
13221
13222 /* Fortran standard define C_PTR type that is compatible with every
13223 C pointer. For this reason we need to glob all pointers into one.
13224 Still pointers in different address spaces are not compatible. */
13225 if (POINTER_TYPE_P (t1))
13226 {
13227 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13228 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13229 return false;
13230 }
13231
13232 /* Tail-recurse to components. */
13233 if (TREE_CODE (t1) == VECTOR_TYPE
13234 || TREE_CODE (t1) == COMPLEX_TYPE)
13235 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13236 TREE_TYPE (t2),
13237 trust_type_canonical);
13238
13239 return true;
13240 }
13241
13242 /* Do type-specific comparisons. */
13243 switch (TREE_CODE (t1))
13244 {
13245 case ARRAY_TYPE:
13246 /* Array types are the same if the element types are the same and
13247 the number of elements are the same. */
13248 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13249 trust_type_canonical)
13250 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13251 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13252 return false;
13253 else
13254 {
13255 tree i1 = TYPE_DOMAIN (t1);
13256 tree i2 = TYPE_DOMAIN (t2);
13257
13258 /* For an incomplete external array, the type domain can be
13259 NULL_TREE. Check this condition also. */
13260 if (i1 == NULL_TREE && i2 == NULL_TREE)
13261 return true;
13262 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13263 return false;
13264 else
13265 {
13266 tree min1 = TYPE_MIN_VALUE (i1);
13267 tree min2 = TYPE_MIN_VALUE (i2);
13268 tree max1 = TYPE_MAX_VALUE (i1);
13269 tree max2 = TYPE_MAX_VALUE (i2);
13270
13271 /* The minimum/maximum values have to be the same. */
13272 if ((min1 == min2
13273 || (min1 && min2
13274 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13275 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13276 || operand_equal_p (min1, min2, 0))))
13277 && (max1 == max2
13278 || (max1 && max2
13279 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13280 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13281 || operand_equal_p (max1, max2, 0)))))
13282 return true;
13283 else
13284 return false;
13285 }
13286 }
13287
13288 case METHOD_TYPE:
13289 case FUNCTION_TYPE:
13290 /* Function types are the same if the return type and arguments types
13291 are the same. */
13292 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13293 trust_type_canonical))
13294 return false;
13295
13296 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13297 return true;
13298 else
13299 {
13300 tree parms1, parms2;
13301
13302 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13303 parms1 && parms2;
13304 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13305 {
13306 if (!gimple_canonical_types_compatible_p
13307 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13308 trust_type_canonical))
13309 return false;
13310 }
13311
13312 if (parms1 || parms2)
13313 return false;
13314
13315 return true;
13316 }
13317
13318 case RECORD_TYPE:
13319 case UNION_TYPE:
13320 case QUAL_UNION_TYPE:
13321 {
13322 tree f1, f2;
13323
13324 /* For aggregate types, all the fields must be the same. */
13325 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13326 f1 || f2;
13327 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13328 {
13329 /* Skip non-fields. */
13330 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13331 f1 = TREE_CHAIN (f1);
13332 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13333 f2 = TREE_CHAIN (f2);
13334 if (!f1 || !f2)
13335 break;
13336 /* The fields must have the same name, offset and type. */
13337 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13338 || !gimple_compare_field_offset (f1, f2)
13339 || !gimple_canonical_types_compatible_p
13340 (TREE_TYPE (f1), TREE_TYPE (f2),
13341 trust_type_canonical))
13342 return false;
13343 }
13344
13345 /* If one aggregate has more fields than the other, they
13346 are not the same. */
13347 if (f1 || f2)
13348 return false;
13349
13350 return true;
13351 }
13352
13353 default:
13354 /* Consider all types with language specific trees in them mutually
13355 compatible. This is executed only from verify_type and false
13356 positives can be tolerated. */
13357 gcc_assert (!in_lto_p);
13358 return true;
13359 }
13360 }
13361
13362 /* Verify type T. */
13363
13364 void
13365 verify_type (const_tree t)
13366 {
13367 bool error_found = false;
13368 tree mv = TYPE_MAIN_VARIANT (t);
13369 if (!mv)
13370 {
13371 error ("Main variant is not defined");
13372 error_found = true;
13373 }
13374 else if (mv != TYPE_MAIN_VARIANT (mv))
13375 {
13376 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13377 debug_tree (mv);
13378 error_found = true;
13379 }
13380 else if (t != mv && !verify_type_variant (t, mv))
13381 error_found = true;
13382
13383 tree ct = TYPE_CANONICAL (t);
13384 if (!ct)
13385 ;
13386 else if (TYPE_CANONICAL (t) != ct)
13387 {
13388 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13389 debug_tree (ct);
13390 error_found = true;
13391 }
13392 /* Method and function types can not be used to address memory and thus
13393 TYPE_CANONICAL really matters only for determining useless conversions.
13394
13395 FIXME: C++ FE produce declarations of builtin functions that are not
13396 compatible with main variants. */
13397 else if (TREE_CODE (t) == FUNCTION_TYPE)
13398 ;
13399 else if (t != ct
13400 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13401 with variably sized arrays because their sizes possibly
13402 gimplified to different variables. */
13403 && !variably_modified_type_p (ct, NULL)
13404 && !gimple_canonical_types_compatible_p (t, ct, false))
13405 {
13406 error ("TYPE_CANONICAL is not compatible");
13407 debug_tree (ct);
13408 error_found = true;
13409 }
13410
13411
13412 /* Check various uses of TYPE_MINVAL. */
13413 if (RECORD_OR_UNION_TYPE_P (t))
13414 {
13415 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13416 and danagle the pointer from time to time. */
13417 if (TYPE_VFIELD (t)
13418 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13419 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13420 {
13421 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13422 debug_tree (TYPE_VFIELD (t));
13423 error_found = true;
13424 }
13425 }
13426 else if (TREE_CODE (t) == POINTER_TYPE)
13427 {
13428 if (TYPE_NEXT_PTR_TO (t)
13429 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13430 {
13431 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13432 debug_tree (TYPE_NEXT_PTR_TO (t));
13433 error_found = true;
13434 }
13435 }
13436 else if (TREE_CODE (t) == REFERENCE_TYPE)
13437 {
13438 if (TYPE_NEXT_REF_TO (t)
13439 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13440 {
13441 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13442 debug_tree (TYPE_NEXT_REF_TO (t));
13443 error_found = true;
13444 }
13445 }
13446 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13447 || TREE_CODE (t) == FIXED_POINT_TYPE)
13448 {
13449 /* FIXME: The following check should pass:
13450 useless_type_conversion_p (const_cast <tree> (t),
13451 TREE_TYPE (TYPE_MIN_VALUE (t))
13452 but does not for C sizetypes in LTO. */
13453 }
13454 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13455 else if (TYPE_MINVAL (t)
13456 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13457 || in_lto_p))
13458 {
13459 error ("TYPE_MINVAL non-NULL");
13460 debug_tree (TYPE_MINVAL (t));
13461 error_found = true;
13462 }
13463
13464 /* Check various uses of TYPE_MAXVAL. */
13465 if (RECORD_OR_UNION_TYPE_P (t))
13466 {
13467 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13468 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13469 && TYPE_METHODS (t) != error_mark_node)
13470 {
13471 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13472 debug_tree (TYPE_METHODS (t));
13473 error_found = true;
13474 }
13475 }
13476 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13477 {
13478 if (TYPE_METHOD_BASETYPE (t)
13479 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13480 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13481 {
13482 error ("TYPE_METHOD_BASETYPE is not record nor union");
13483 debug_tree (TYPE_METHOD_BASETYPE (t));
13484 error_found = true;
13485 }
13486 }
13487 else if (TREE_CODE (t) == OFFSET_TYPE)
13488 {
13489 if (TYPE_OFFSET_BASETYPE (t)
13490 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13491 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13492 {
13493 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13494 debug_tree (TYPE_OFFSET_BASETYPE (t));
13495 error_found = true;
13496 }
13497 }
13498 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13499 || TREE_CODE (t) == FIXED_POINT_TYPE)
13500 {
13501 /* FIXME: The following check should pass:
13502 useless_type_conversion_p (const_cast <tree> (t),
13503 TREE_TYPE (TYPE_MAX_VALUE (t))
13504 but does not for C sizetypes in LTO. */
13505 }
13506 else if (TREE_CODE (t) == ARRAY_TYPE)
13507 {
13508 if (TYPE_ARRAY_MAX_SIZE (t)
13509 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13510 {
13511 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13512 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13513 error_found = true;
13514 }
13515 }
13516 else if (TYPE_MAXVAL (t))
13517 {
13518 error ("TYPE_MAXVAL non-NULL");
13519 debug_tree (TYPE_MAXVAL (t));
13520 error_found = true;
13521 }
13522
13523 /* Check various uses of TYPE_BINFO. */
13524 if (RECORD_OR_UNION_TYPE_P (t))
13525 {
13526 if (!TYPE_BINFO (t))
13527 ;
13528 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13529 {
13530 error ("TYPE_BINFO is not TREE_BINFO");
13531 debug_tree (TYPE_BINFO (t));
13532 error_found = true;
13533 }
13534 /* FIXME: Java builds invalid empty binfos that do not have
13535 TREE_TYPE set. */
13536 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13537 {
13538 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13539 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13540 error_found = true;
13541 }
13542 }
13543 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13544 {
13545 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13546 debug_tree (TYPE_LANG_SLOT_1 (t));
13547 error_found = true;
13548 }
13549
13550 /* Check various uses of TYPE_VALUES_RAW. */
13551 if (TREE_CODE (t) == ENUMERAL_TYPE)
13552 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13553 {
13554 tree value = TREE_VALUE (l);
13555 tree name = TREE_PURPOSE (l);
13556
13557 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13558 CONST_DECL of ENUMERAL TYPE. */
13559 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13560 {
13561 error ("Enum value is not CONST_DECL or INTEGER_CST");
13562 debug_tree (value);
13563 debug_tree (name);
13564 error_found = true;
13565 }
13566 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13567 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13568 {
13569 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13570 debug_tree (value);
13571 debug_tree (name);
13572 error_found = true;
13573 }
13574 if (TREE_CODE (name) != IDENTIFIER_NODE)
13575 {
13576 error ("Enum value name is not IDENTIFIER_NODE");
13577 debug_tree (value);
13578 debug_tree (name);
13579 error_found = true;
13580 }
13581 }
13582 else if (TREE_CODE (t) == ARRAY_TYPE)
13583 {
13584 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13585 {
13586 error ("Array TYPE_DOMAIN is not integer type");
13587 debug_tree (TYPE_DOMAIN (t));
13588 error_found = true;
13589 }
13590 }
13591 else if (RECORD_OR_UNION_TYPE_P (t))
13592 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13593 {
13594 /* TODO: verify properties of decls. */
13595 if (TREE_CODE (fld) == FIELD_DECL)
13596 ;
13597 else if (TREE_CODE (fld) == TYPE_DECL)
13598 ;
13599 else if (TREE_CODE (fld) == CONST_DECL)
13600 ;
13601 else if (TREE_CODE (fld) == VAR_DECL)
13602 ;
13603 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13604 ;
13605 else if (TREE_CODE (fld) == USING_DECL)
13606 ;
13607 else
13608 {
13609 error ("Wrong tree in TYPE_FIELDS list");
13610 debug_tree (fld);
13611 error_found = true;
13612 }
13613 }
13614 else if (TREE_CODE (t) == INTEGER_TYPE
13615 || TREE_CODE (t) == BOOLEAN_TYPE
13616 || TREE_CODE (t) == OFFSET_TYPE
13617 || TREE_CODE (t) == REFERENCE_TYPE
13618 || TREE_CODE (t) == NULLPTR_TYPE
13619 || TREE_CODE (t) == POINTER_TYPE)
13620 {
13621 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13622 {
13623 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13624 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13625 error_found = true;
13626 }
13627 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13628 {
13629 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13630 debug_tree (TYPE_CACHED_VALUES (t));
13631 error_found = true;
13632 }
13633 /* Verify just enough of cache to ensure that no one copied it to new type.
13634 All copying should go by copy_node that should clear it. */
13635 else if (TYPE_CACHED_VALUES_P (t))
13636 {
13637 int i;
13638 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13639 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13640 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13641 {
13642 error ("wrong TYPE_CACHED_VALUES entry");
13643 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13644 error_found = true;
13645 break;
13646 }
13647 }
13648 }
13649 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13650 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13651 {
13652 /* C++ FE uses TREE_PURPOSE to store initial values. */
13653 if (TREE_PURPOSE (l) && in_lto_p)
13654 {
13655 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13656 debug_tree (l);
13657 error_found = true;
13658 }
13659 if (!TYPE_P (TREE_VALUE (l)))
13660 {
13661 error ("Wrong entry in TYPE_ARG_TYPES list");
13662 debug_tree (l);
13663 error_found = true;
13664 }
13665 }
13666 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13667 {
13668 error ("TYPE_VALUES_RAW field is non-NULL");
13669 debug_tree (TYPE_VALUES_RAW (t));
13670 error_found = true;
13671 }
13672 if (TREE_CODE (t) != INTEGER_TYPE
13673 && TREE_CODE (t) != BOOLEAN_TYPE
13674 && TREE_CODE (t) != OFFSET_TYPE
13675 && TREE_CODE (t) != REFERENCE_TYPE
13676 && TREE_CODE (t) != NULLPTR_TYPE
13677 && TREE_CODE (t) != POINTER_TYPE
13678 && TYPE_CACHED_VALUES_P (t))
13679 {
13680 error ("TYPE_CACHED_VALUES_P is set while it should not");
13681 error_found = true;
13682 }
13683 if (TYPE_STRING_FLAG (t)
13684 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13685 {
13686 error ("TYPE_STRING_FLAG is set on wrong type code");
13687 error_found = true;
13688 }
13689 else if (TYPE_STRING_FLAG (t))
13690 {
13691 const_tree b = t;
13692 if (TREE_CODE (b) == ARRAY_TYPE)
13693 b = TREE_TYPE (t);
13694 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13695 that is 32bits. */
13696 if (TREE_CODE (b) != INTEGER_TYPE)
13697 {
13698 error ("TYPE_STRING_FLAG is set on type that does not look like "
13699 "char nor array of chars");
13700 error_found = true;
13701 }
13702 }
13703
13704 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13705 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13706 of a type. */
13707 if (TREE_CODE (t) == METHOD_TYPE
13708 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13709 {
13710 error ("TYPE_METHOD_BASETYPE is not main variant");
13711 error_found = true;
13712 }
13713
13714 if (error_found)
13715 {
13716 debug_tree (const_cast <tree> (t));
13717 internal_error ("verify_type failed");
13718 }
13719 }
13720
13721
13722 /* Return true if ARG is marked with the nonnull attribute in the
13723 current function signature. */
13724
13725 bool
13726 nonnull_arg_p (const_tree arg)
13727 {
13728 tree t, attrs, fntype;
13729 unsigned HOST_WIDE_INT arg_num;
13730
13731 gcc_assert (TREE_CODE (arg) == PARM_DECL && POINTER_TYPE_P (TREE_TYPE (arg)));
13732
13733 /* The static chain decl is always non null. */
13734 if (arg == cfun->static_chain_decl)
13735 return true;
13736
13737 /* THIS argument of method is always non-NULL. */
13738 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13739 && arg == DECL_ARGUMENTS (cfun->decl)
13740 && flag_delete_null_pointer_checks)
13741 return true;
13742
13743 /* Values passed by reference are always non-NULL. */
13744 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13745 && flag_delete_null_pointer_checks)
13746 return true;
13747
13748 fntype = TREE_TYPE (cfun->decl);
13749 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13750 {
13751 attrs = lookup_attribute ("nonnull", attrs);
13752
13753 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13754 if (attrs == NULL_TREE)
13755 return false;
13756
13757 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13758 if (TREE_VALUE (attrs) == NULL_TREE)
13759 return true;
13760
13761 /* Get the position number for ARG in the function signature. */
13762 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13763 t;
13764 t = DECL_CHAIN (t), arg_num++)
13765 {
13766 if (t == arg)
13767 break;
13768 }
13769
13770 gcc_assert (t == arg);
13771
13772 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13773 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13774 {
13775 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13776 return true;
13777 }
13778 }
13779
13780 return false;
13781 }
13782
13783
13784 #include "gt-tree.h"