Implement N4514, C++ Extensions for Transactional Memory.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "tree.h"
35 #include "gimple.h"
36 #include "rtl.h"
37 #include "ssa.h"
38 #include "flags.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "calls.h"
43 #include "attribs.h"
44 #include "varasm.h"
45 #include "tm_p.h"
46 #include "toplev.h" /* get_random_seed */
47 #include "filenames.h"
48 #include "output.h"
49 #include "target.h"
50 #include "common/common-target.h"
51 #include "langhooks.h"
52 #include "tree-inline.h"
53 #include "tree-iterator.h"
54 #include "internal-fn.h"
55 #include "gimple-iterator.h"
56 #include "gimplify.h"
57 #include "cgraph.h"
58 #include "insn-config.h"
59 #include "expmed.h"
60 #include "dojump.h"
61 #include "explow.h"
62 #include "emit-rtl.h"
63 #include "stmt.h"
64 #include "expr.h"
65 #include "tree-dfa.h"
66 #include "params.h"
67 #include "tree-pass.h"
68 #include "langhooks-def.h"
69 #include "diagnostic.h"
70 #include "tree-diagnostic.h"
71 #include "tree-pretty-print.h"
72 #include "except.h"
73 #include "debug.h"
74 #include "intl.h"
75 #include "builtins.h"
76 #include "print-tree.h"
77 #include "ipa-utils.h"
78
79 /* Tree code classes. */
80
81 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
82 #define END_OF_BASE_TREE_CODES tcc_exceptional,
83
84 const enum tree_code_class tree_code_type[] = {
85 #include "all-tree.def"
86 };
87
88 #undef DEFTREECODE
89 #undef END_OF_BASE_TREE_CODES
90
91 /* Table indexed by tree code giving number of expression
92 operands beyond the fixed part of the node structure.
93 Not used for types or decls. */
94
95 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
96 #define END_OF_BASE_TREE_CODES 0,
97
98 const unsigned char tree_code_length[] = {
99 #include "all-tree.def"
100 };
101
102 #undef DEFTREECODE
103 #undef END_OF_BASE_TREE_CODES
104
105 /* Names of tree components.
106 Used for printing out the tree and error messages. */
107 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
108 #define END_OF_BASE_TREE_CODES "@dummy",
109
110 static const char *const tree_code_name[] = {
111 #include "all-tree.def"
112 };
113
114 #undef DEFTREECODE
115 #undef END_OF_BASE_TREE_CODES
116
117 /* Each tree code class has an associated string representation.
118 These must correspond to the tree_code_class entries. */
119
120 const char *const tree_code_class_strings[] =
121 {
122 "exceptional",
123 "constant",
124 "type",
125 "declaration",
126 "reference",
127 "comparison",
128 "unary",
129 "binary",
130 "statement",
131 "vl_exp",
132 "expression"
133 };
134
135 /* obstack.[ch] explicitly declined to prototype this. */
136 extern int _obstack_allocated_p (struct obstack *h, void *obj);
137
138 /* Statistics-gathering stuff. */
139
140 static int tree_code_counts[MAX_TREE_CODES];
141 int tree_node_counts[(int) all_kinds];
142 int tree_node_sizes[(int) all_kinds];
143
144 /* Keep in sync with tree.h:enum tree_node_kind. */
145 static const char * const tree_node_kind_names[] = {
146 "decls",
147 "types",
148 "blocks",
149 "stmts",
150 "refs",
151 "exprs",
152 "constants",
153 "identifiers",
154 "vecs",
155 "binfos",
156 "ssa names",
157 "constructors",
158 "random kinds",
159 "lang_decl kinds",
160 "lang_type kinds",
161 "omp clauses",
162 };
163
164 /* Unique id for next decl created. */
165 static GTY(()) int next_decl_uid;
166 /* Unique id for next type created. */
167 static GTY(()) int next_type_uid = 1;
168 /* Unique id for next debug decl created. Use negative numbers,
169 to catch erroneous uses. */
170 static GTY(()) int next_debug_decl_uid;
171
172 /* Since we cannot rehash a type after it is in the table, we have to
173 keep the hash code. */
174
175 struct GTY((for_user)) type_hash {
176 unsigned long hash;
177 tree type;
178 };
179
180 /* Initial size of the hash table (rounded to next prime). */
181 #define TYPE_HASH_INITIAL_SIZE 1000
182
183 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
184 {
185 static hashval_t hash (type_hash *t) { return t->hash; }
186 static bool equal (type_hash *a, type_hash *b);
187
188 static int
189 keep_cache_entry (type_hash *&t)
190 {
191 return ggc_marked_p (t->type);
192 }
193 };
194
195 /* Now here is the hash table. When recording a type, it is added to
196 the slot whose index is the hash code. Note that the hash table is
197 used for several kinds of types (function types, array types and
198 array index range types, for now). While all these live in the
199 same table, they are completely independent, and the hash code is
200 computed differently for each of these. */
201
202 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
203
204 /* Hash table and temporary node for larger integer const values. */
205 static GTY (()) tree int_cst_node;
206
207 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
208 {
209 static hashval_t hash (tree t);
210 static bool equal (tree x, tree y);
211 };
212
213 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
214
215 /* Hash table for optimization flags and target option flags. Use the same
216 hash table for both sets of options. Nodes for building the current
217 optimization and target option nodes. The assumption is most of the time
218 the options created will already be in the hash table, so we avoid
219 allocating and freeing up a node repeatably. */
220 static GTY (()) tree cl_optimization_node;
221 static GTY (()) tree cl_target_option_node;
222
223 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
224 {
225 static hashval_t hash (tree t);
226 static bool equal (tree x, tree y);
227 };
228
229 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
230
231 /* General tree->tree mapping structure for use in hash tables. */
232
233
234 static GTY ((cache))
235 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
236
237 static GTY ((cache))
238 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
239
240 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
241 {
242 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
243
244 static bool
245 equal (tree_vec_map *a, tree_vec_map *b)
246 {
247 return a->base.from == b->base.from;
248 }
249
250 static int
251 keep_cache_entry (tree_vec_map *&m)
252 {
253 return ggc_marked_p (m->base.from);
254 }
255 };
256
257 static GTY ((cache))
258 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
259
260 static void set_type_quals (tree, int);
261 static void print_type_hash_statistics (void);
262 static void print_debug_expr_statistics (void);
263 static void print_value_expr_statistics (void);
264 static void type_hash_list (const_tree, inchash::hash &);
265 static void attribute_hash_list (const_tree, inchash::hash &);
266
267 tree global_trees[TI_MAX];
268 tree integer_types[itk_none];
269
270 bool int_n_enabled_p[NUM_INT_N_ENTS];
271 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
272
273 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
274
275 /* Number of operands for each OpenMP clause. */
276 unsigned const char omp_clause_num_ops[] =
277 {
278 0, /* OMP_CLAUSE_ERROR */
279 1, /* OMP_CLAUSE_PRIVATE */
280 1, /* OMP_CLAUSE_SHARED */
281 1, /* OMP_CLAUSE_FIRSTPRIVATE */
282 2, /* OMP_CLAUSE_LASTPRIVATE */
283 4, /* OMP_CLAUSE_REDUCTION */
284 1, /* OMP_CLAUSE_COPYIN */
285 1, /* OMP_CLAUSE_COPYPRIVATE */
286 3, /* OMP_CLAUSE_LINEAR */
287 2, /* OMP_CLAUSE_ALIGNED */
288 1, /* OMP_CLAUSE_DEPEND */
289 1, /* OMP_CLAUSE_UNIFORM */
290 2, /* OMP_CLAUSE_FROM */
291 2, /* OMP_CLAUSE_TO */
292 2, /* OMP_CLAUSE_MAP */
293 2, /* OMP_CLAUSE__CACHE_ */
294 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
295 1, /* OMP_CLAUSE_USE_DEVICE */
296 2, /* OMP_CLAUSE_GANG */
297 1, /* OMP_CLAUSE_ASYNC */
298 1, /* OMP_CLAUSE_WAIT */
299 0, /* OMP_CLAUSE_AUTO */
300 0, /* OMP_CLAUSE_SEQ */
301 1, /* OMP_CLAUSE__LOOPTEMP_ */
302 1, /* OMP_CLAUSE_IF */
303 1, /* OMP_CLAUSE_NUM_THREADS */
304 1, /* OMP_CLAUSE_SCHEDULE */
305 0, /* OMP_CLAUSE_NOWAIT */
306 0, /* OMP_CLAUSE_ORDERED */
307 0, /* OMP_CLAUSE_DEFAULT */
308 3, /* OMP_CLAUSE_COLLAPSE */
309 0, /* OMP_CLAUSE_UNTIED */
310 1, /* OMP_CLAUSE_FINAL */
311 0, /* OMP_CLAUSE_MERGEABLE */
312 1, /* OMP_CLAUSE_DEVICE */
313 1, /* OMP_CLAUSE_DIST_SCHEDULE */
314 0, /* OMP_CLAUSE_INBRANCH */
315 0, /* OMP_CLAUSE_NOTINBRANCH */
316 1, /* OMP_CLAUSE_NUM_TEAMS */
317 1, /* OMP_CLAUSE_THREAD_LIMIT */
318 0, /* OMP_CLAUSE_PROC_BIND */
319 1, /* OMP_CLAUSE_SAFELEN */
320 1, /* OMP_CLAUSE_SIMDLEN */
321 0, /* OMP_CLAUSE_FOR */
322 0, /* OMP_CLAUSE_PARALLEL */
323 0, /* OMP_CLAUSE_SECTIONS */
324 0, /* OMP_CLAUSE_TASKGROUP */
325 1, /* OMP_CLAUSE__SIMDUID_ */
326 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
327 0, /* OMP_CLAUSE_INDEPENDENT */
328 1, /* OMP_CLAUSE_WORKER */
329 1, /* OMP_CLAUSE_VECTOR */
330 1, /* OMP_CLAUSE_NUM_GANGS */
331 1, /* OMP_CLAUSE_NUM_WORKERS */
332 1, /* OMP_CLAUSE_VECTOR_LENGTH */
333 };
334
335 const char * const omp_clause_code_name[] =
336 {
337 "error_clause",
338 "private",
339 "shared",
340 "firstprivate",
341 "lastprivate",
342 "reduction",
343 "copyin",
344 "copyprivate",
345 "linear",
346 "aligned",
347 "depend",
348 "uniform",
349 "from",
350 "to",
351 "map",
352 "_cache_",
353 "device_resident",
354 "use_device",
355 "gang",
356 "async",
357 "wait",
358 "auto",
359 "seq",
360 "_looptemp_",
361 "if",
362 "num_threads",
363 "schedule",
364 "nowait",
365 "ordered",
366 "default",
367 "collapse",
368 "untied",
369 "final",
370 "mergeable",
371 "device",
372 "dist_schedule",
373 "inbranch",
374 "notinbranch",
375 "num_teams",
376 "thread_limit",
377 "proc_bind",
378 "safelen",
379 "simdlen",
380 "for",
381 "parallel",
382 "sections",
383 "taskgroup",
384 "_simduid_",
385 "_Cilk_for_count_",
386 "independent",
387 "worker",
388 "vector",
389 "num_gangs",
390 "num_workers",
391 "vector_length"
392 };
393
394
395 /* Return the tree node structure used by tree code CODE. */
396
397 static inline enum tree_node_structure_enum
398 tree_node_structure_for_code (enum tree_code code)
399 {
400 switch (TREE_CODE_CLASS (code))
401 {
402 case tcc_declaration:
403 {
404 switch (code)
405 {
406 case FIELD_DECL:
407 return TS_FIELD_DECL;
408 case PARM_DECL:
409 return TS_PARM_DECL;
410 case VAR_DECL:
411 return TS_VAR_DECL;
412 case LABEL_DECL:
413 return TS_LABEL_DECL;
414 case RESULT_DECL:
415 return TS_RESULT_DECL;
416 case DEBUG_EXPR_DECL:
417 return TS_DECL_WRTL;
418 case CONST_DECL:
419 return TS_CONST_DECL;
420 case TYPE_DECL:
421 return TS_TYPE_DECL;
422 case FUNCTION_DECL:
423 return TS_FUNCTION_DECL;
424 case TRANSLATION_UNIT_DECL:
425 return TS_TRANSLATION_UNIT_DECL;
426 default:
427 return TS_DECL_NON_COMMON;
428 }
429 }
430 case tcc_type:
431 return TS_TYPE_NON_COMMON;
432 case tcc_reference:
433 case tcc_comparison:
434 case tcc_unary:
435 case tcc_binary:
436 case tcc_expression:
437 case tcc_statement:
438 case tcc_vl_exp:
439 return TS_EXP;
440 default: /* tcc_constant and tcc_exceptional */
441 break;
442 }
443 switch (code)
444 {
445 /* tcc_constant cases. */
446 case VOID_CST: return TS_TYPED;
447 case INTEGER_CST: return TS_INT_CST;
448 case REAL_CST: return TS_REAL_CST;
449 case FIXED_CST: return TS_FIXED_CST;
450 case COMPLEX_CST: return TS_COMPLEX;
451 case VECTOR_CST: return TS_VECTOR;
452 case STRING_CST: return TS_STRING;
453 /* tcc_exceptional cases. */
454 case ERROR_MARK: return TS_COMMON;
455 case IDENTIFIER_NODE: return TS_IDENTIFIER;
456 case TREE_LIST: return TS_LIST;
457 case TREE_VEC: return TS_VEC;
458 case SSA_NAME: return TS_SSA_NAME;
459 case PLACEHOLDER_EXPR: return TS_COMMON;
460 case STATEMENT_LIST: return TS_STATEMENT_LIST;
461 case BLOCK: return TS_BLOCK;
462 case CONSTRUCTOR: return TS_CONSTRUCTOR;
463 case TREE_BINFO: return TS_BINFO;
464 case OMP_CLAUSE: return TS_OMP_CLAUSE;
465 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
466 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
467
468 default:
469 gcc_unreachable ();
470 }
471 }
472
473
474 /* Initialize tree_contains_struct to describe the hierarchy of tree
475 nodes. */
476
477 static void
478 initialize_tree_contains_struct (void)
479 {
480 unsigned i;
481
482 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
483 {
484 enum tree_code code;
485 enum tree_node_structure_enum ts_code;
486
487 code = (enum tree_code) i;
488 ts_code = tree_node_structure_for_code (code);
489
490 /* Mark the TS structure itself. */
491 tree_contains_struct[code][ts_code] = 1;
492
493 /* Mark all the structures that TS is derived from. */
494 switch (ts_code)
495 {
496 case TS_TYPED:
497 case TS_BLOCK:
498 MARK_TS_BASE (code);
499 break;
500
501 case TS_COMMON:
502 case TS_INT_CST:
503 case TS_REAL_CST:
504 case TS_FIXED_CST:
505 case TS_VECTOR:
506 case TS_STRING:
507 case TS_COMPLEX:
508 case TS_SSA_NAME:
509 case TS_CONSTRUCTOR:
510 case TS_EXP:
511 case TS_STATEMENT_LIST:
512 MARK_TS_TYPED (code);
513 break;
514
515 case TS_IDENTIFIER:
516 case TS_DECL_MINIMAL:
517 case TS_TYPE_COMMON:
518 case TS_LIST:
519 case TS_VEC:
520 case TS_BINFO:
521 case TS_OMP_CLAUSE:
522 case TS_OPTIMIZATION:
523 case TS_TARGET_OPTION:
524 MARK_TS_COMMON (code);
525 break;
526
527 case TS_TYPE_WITH_LANG_SPECIFIC:
528 MARK_TS_TYPE_COMMON (code);
529 break;
530
531 case TS_TYPE_NON_COMMON:
532 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
533 break;
534
535 case TS_DECL_COMMON:
536 MARK_TS_DECL_MINIMAL (code);
537 break;
538
539 case TS_DECL_WRTL:
540 case TS_CONST_DECL:
541 MARK_TS_DECL_COMMON (code);
542 break;
543
544 case TS_DECL_NON_COMMON:
545 MARK_TS_DECL_WITH_VIS (code);
546 break;
547
548 case TS_DECL_WITH_VIS:
549 case TS_PARM_DECL:
550 case TS_LABEL_DECL:
551 case TS_RESULT_DECL:
552 MARK_TS_DECL_WRTL (code);
553 break;
554
555 case TS_FIELD_DECL:
556 MARK_TS_DECL_COMMON (code);
557 break;
558
559 case TS_VAR_DECL:
560 MARK_TS_DECL_WITH_VIS (code);
561 break;
562
563 case TS_TYPE_DECL:
564 case TS_FUNCTION_DECL:
565 MARK_TS_DECL_NON_COMMON (code);
566 break;
567
568 case TS_TRANSLATION_UNIT_DECL:
569 MARK_TS_DECL_COMMON (code);
570 break;
571
572 default:
573 gcc_unreachable ();
574 }
575 }
576
577 /* Basic consistency checks for attributes used in fold. */
578 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
579 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
580 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
581 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
582 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
583 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
584 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
585 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
586 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
587 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
588 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
589 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
590 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
591 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
592 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
593 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
594 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
595 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
596 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
597 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
598 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
599 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
600 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
601 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
602 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
603 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
604 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
605 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
606 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
607 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
608 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
609 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
610 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
611 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
612 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
613 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
614 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
615 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
616 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
617 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
618 }
619
620
621 /* Init tree.c. */
622
623 void
624 init_ttree (void)
625 {
626 /* Initialize the hash table of types. */
627 type_hash_table
628 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
629
630 debug_expr_for_decl
631 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
632
633 value_expr_for_decl
634 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
635
636 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
637
638 int_cst_node = make_int_cst (1, 1);
639
640 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
641
642 cl_optimization_node = make_node (OPTIMIZATION_NODE);
643 cl_target_option_node = make_node (TARGET_OPTION_NODE);
644
645 /* Initialize the tree_contains_struct array. */
646 initialize_tree_contains_struct ();
647 lang_hooks.init_ts ();
648 }
649
650 \f
651 /* The name of the object as the assembler will see it (but before any
652 translations made by ASM_OUTPUT_LABELREF). Often this is the same
653 as DECL_NAME. It is an IDENTIFIER_NODE. */
654 tree
655 decl_assembler_name (tree decl)
656 {
657 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
658 lang_hooks.set_decl_assembler_name (decl);
659 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
660 }
661
662 /* When the target supports COMDAT groups, this indicates which group the
663 DECL is associated with. This can be either an IDENTIFIER_NODE or a
664 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
665 tree
666 decl_comdat_group (const_tree node)
667 {
668 struct symtab_node *snode = symtab_node::get (node);
669 if (!snode)
670 return NULL;
671 return snode->get_comdat_group ();
672 }
673
674 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
675 tree
676 decl_comdat_group_id (const_tree node)
677 {
678 struct symtab_node *snode = symtab_node::get (node);
679 if (!snode)
680 return NULL;
681 return snode->get_comdat_group_id ();
682 }
683
684 /* When the target supports named section, return its name as IDENTIFIER_NODE
685 or NULL if it is in no section. */
686 const char *
687 decl_section_name (const_tree node)
688 {
689 struct symtab_node *snode = symtab_node::get (node);
690 if (!snode)
691 return NULL;
692 return snode->get_section ();
693 }
694
695 /* Set section name of NODE to VALUE (that is expected to be
696 identifier node) */
697 void
698 set_decl_section_name (tree node, const char *value)
699 {
700 struct symtab_node *snode;
701
702 if (value == NULL)
703 {
704 snode = symtab_node::get (node);
705 if (!snode)
706 return;
707 }
708 else if (TREE_CODE (node) == VAR_DECL)
709 snode = varpool_node::get_create (node);
710 else
711 snode = cgraph_node::get_create (node);
712 snode->set_section (value);
713 }
714
715 /* Return TLS model of a variable NODE. */
716 enum tls_model
717 decl_tls_model (const_tree node)
718 {
719 struct varpool_node *snode = varpool_node::get (node);
720 if (!snode)
721 return TLS_MODEL_NONE;
722 return snode->tls_model;
723 }
724
725 /* Set TLS model of variable NODE to MODEL. */
726 void
727 set_decl_tls_model (tree node, enum tls_model model)
728 {
729 struct varpool_node *vnode;
730
731 if (model == TLS_MODEL_NONE)
732 {
733 vnode = varpool_node::get (node);
734 if (!vnode)
735 return;
736 }
737 else
738 vnode = varpool_node::get_create (node);
739 vnode->tls_model = model;
740 }
741
742 /* Compute the number of bytes occupied by a tree with code CODE.
743 This function cannot be used for nodes that have variable sizes,
744 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
745 size_t
746 tree_code_size (enum tree_code code)
747 {
748 switch (TREE_CODE_CLASS (code))
749 {
750 case tcc_declaration: /* A decl node */
751 {
752 switch (code)
753 {
754 case FIELD_DECL:
755 return sizeof (struct tree_field_decl);
756 case PARM_DECL:
757 return sizeof (struct tree_parm_decl);
758 case VAR_DECL:
759 return sizeof (struct tree_var_decl);
760 case LABEL_DECL:
761 return sizeof (struct tree_label_decl);
762 case RESULT_DECL:
763 return sizeof (struct tree_result_decl);
764 case CONST_DECL:
765 return sizeof (struct tree_const_decl);
766 case TYPE_DECL:
767 return sizeof (struct tree_type_decl);
768 case FUNCTION_DECL:
769 return sizeof (struct tree_function_decl);
770 case DEBUG_EXPR_DECL:
771 return sizeof (struct tree_decl_with_rtl);
772 case TRANSLATION_UNIT_DECL:
773 return sizeof (struct tree_translation_unit_decl);
774 case NAMESPACE_DECL:
775 case IMPORTED_DECL:
776 case NAMELIST_DECL:
777 return sizeof (struct tree_decl_non_common);
778 default:
779 return lang_hooks.tree_size (code);
780 }
781 }
782
783 case tcc_type: /* a type node */
784 return sizeof (struct tree_type_non_common);
785
786 case tcc_reference: /* a reference */
787 case tcc_expression: /* an expression */
788 case tcc_statement: /* an expression with side effects */
789 case tcc_comparison: /* a comparison expression */
790 case tcc_unary: /* a unary arithmetic expression */
791 case tcc_binary: /* a binary arithmetic expression */
792 return (sizeof (struct tree_exp)
793 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
794
795 case tcc_constant: /* a constant */
796 switch (code)
797 {
798 case VOID_CST: return sizeof (struct tree_typed);
799 case INTEGER_CST: gcc_unreachable ();
800 case REAL_CST: return sizeof (struct tree_real_cst);
801 case FIXED_CST: return sizeof (struct tree_fixed_cst);
802 case COMPLEX_CST: return sizeof (struct tree_complex);
803 case VECTOR_CST: return sizeof (struct tree_vector);
804 case STRING_CST: gcc_unreachable ();
805 default:
806 return lang_hooks.tree_size (code);
807 }
808
809 case tcc_exceptional: /* something random, like an identifier. */
810 switch (code)
811 {
812 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
813 case TREE_LIST: return sizeof (struct tree_list);
814
815 case ERROR_MARK:
816 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
817
818 case TREE_VEC:
819 case OMP_CLAUSE: gcc_unreachable ();
820
821 case SSA_NAME: return sizeof (struct tree_ssa_name);
822
823 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
824 case BLOCK: return sizeof (struct tree_block);
825 case CONSTRUCTOR: return sizeof (struct tree_constructor);
826 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
827 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
828
829 default:
830 return lang_hooks.tree_size (code);
831 }
832
833 default:
834 gcc_unreachable ();
835 }
836 }
837
838 /* Compute the number of bytes occupied by NODE. This routine only
839 looks at TREE_CODE, except for those nodes that have variable sizes. */
840 size_t
841 tree_size (const_tree node)
842 {
843 const enum tree_code code = TREE_CODE (node);
844 switch (code)
845 {
846 case INTEGER_CST:
847 return (sizeof (struct tree_int_cst)
848 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
849
850 case TREE_BINFO:
851 return (offsetof (struct tree_binfo, base_binfos)
852 + vec<tree, va_gc>
853 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
854
855 case TREE_VEC:
856 return (sizeof (struct tree_vec)
857 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
858
859 case VECTOR_CST:
860 return (sizeof (struct tree_vector)
861 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
862
863 case STRING_CST:
864 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
865
866 case OMP_CLAUSE:
867 return (sizeof (struct tree_omp_clause)
868 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
869 * sizeof (tree));
870
871 default:
872 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
873 return (sizeof (struct tree_exp)
874 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
875 else
876 return tree_code_size (code);
877 }
878 }
879
880 /* Record interesting allocation statistics for a tree node with CODE
881 and LENGTH. */
882
883 static void
884 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
885 size_t length ATTRIBUTE_UNUSED)
886 {
887 enum tree_code_class type = TREE_CODE_CLASS (code);
888 tree_node_kind kind;
889
890 if (!GATHER_STATISTICS)
891 return;
892
893 switch (type)
894 {
895 case tcc_declaration: /* A decl node */
896 kind = d_kind;
897 break;
898
899 case tcc_type: /* a type node */
900 kind = t_kind;
901 break;
902
903 case tcc_statement: /* an expression with side effects */
904 kind = s_kind;
905 break;
906
907 case tcc_reference: /* a reference */
908 kind = r_kind;
909 break;
910
911 case tcc_expression: /* an expression */
912 case tcc_comparison: /* a comparison expression */
913 case tcc_unary: /* a unary arithmetic expression */
914 case tcc_binary: /* a binary arithmetic expression */
915 kind = e_kind;
916 break;
917
918 case tcc_constant: /* a constant */
919 kind = c_kind;
920 break;
921
922 case tcc_exceptional: /* something random, like an identifier. */
923 switch (code)
924 {
925 case IDENTIFIER_NODE:
926 kind = id_kind;
927 break;
928
929 case TREE_VEC:
930 kind = vec_kind;
931 break;
932
933 case TREE_BINFO:
934 kind = binfo_kind;
935 break;
936
937 case SSA_NAME:
938 kind = ssa_name_kind;
939 break;
940
941 case BLOCK:
942 kind = b_kind;
943 break;
944
945 case CONSTRUCTOR:
946 kind = constr_kind;
947 break;
948
949 case OMP_CLAUSE:
950 kind = omp_clause_kind;
951 break;
952
953 default:
954 kind = x_kind;
955 break;
956 }
957 break;
958
959 case tcc_vl_exp:
960 kind = e_kind;
961 break;
962
963 default:
964 gcc_unreachable ();
965 }
966
967 tree_code_counts[(int) code]++;
968 tree_node_counts[(int) kind]++;
969 tree_node_sizes[(int) kind] += length;
970 }
971
972 /* Allocate and return a new UID from the DECL_UID namespace. */
973
974 int
975 allocate_decl_uid (void)
976 {
977 return next_decl_uid++;
978 }
979
980 /* Return a newly allocated node of code CODE. For decl and type
981 nodes, some other fields are initialized. The rest of the node is
982 initialized to zero. This function cannot be used for TREE_VEC,
983 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
984 tree_code_size.
985
986 Achoo! I got a code in the node. */
987
988 tree
989 make_node_stat (enum tree_code code MEM_STAT_DECL)
990 {
991 tree t;
992 enum tree_code_class type = TREE_CODE_CLASS (code);
993 size_t length = tree_code_size (code);
994
995 record_node_allocation_statistics (code, length);
996
997 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
998 TREE_SET_CODE (t, code);
999
1000 switch (type)
1001 {
1002 case tcc_statement:
1003 TREE_SIDE_EFFECTS (t) = 1;
1004 break;
1005
1006 case tcc_declaration:
1007 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1008 {
1009 if (code == FUNCTION_DECL)
1010 {
1011 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1012 DECL_MODE (t) = FUNCTION_MODE;
1013 }
1014 else
1015 DECL_ALIGN (t) = 1;
1016 }
1017 DECL_SOURCE_LOCATION (t) = input_location;
1018 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1019 DECL_UID (t) = --next_debug_decl_uid;
1020 else
1021 {
1022 DECL_UID (t) = allocate_decl_uid ();
1023 SET_DECL_PT_UID (t, -1);
1024 }
1025 if (TREE_CODE (t) == LABEL_DECL)
1026 LABEL_DECL_UID (t) = -1;
1027
1028 break;
1029
1030 case tcc_type:
1031 TYPE_UID (t) = next_type_uid++;
1032 TYPE_ALIGN (t) = BITS_PER_UNIT;
1033 TYPE_USER_ALIGN (t) = 0;
1034 TYPE_MAIN_VARIANT (t) = t;
1035 TYPE_CANONICAL (t) = t;
1036
1037 /* Default to no attributes for type, but let target change that. */
1038 TYPE_ATTRIBUTES (t) = NULL_TREE;
1039 targetm.set_default_type_attributes (t);
1040
1041 /* We have not yet computed the alias set for this type. */
1042 TYPE_ALIAS_SET (t) = -1;
1043 break;
1044
1045 case tcc_constant:
1046 TREE_CONSTANT (t) = 1;
1047 break;
1048
1049 case tcc_expression:
1050 switch (code)
1051 {
1052 case INIT_EXPR:
1053 case MODIFY_EXPR:
1054 case VA_ARG_EXPR:
1055 case PREDECREMENT_EXPR:
1056 case PREINCREMENT_EXPR:
1057 case POSTDECREMENT_EXPR:
1058 case POSTINCREMENT_EXPR:
1059 /* All of these have side-effects, no matter what their
1060 operands are. */
1061 TREE_SIDE_EFFECTS (t) = 1;
1062 break;
1063
1064 default:
1065 break;
1066 }
1067 break;
1068
1069 case tcc_exceptional:
1070 switch (code)
1071 {
1072 case TARGET_OPTION_NODE:
1073 TREE_TARGET_OPTION(t)
1074 = ggc_cleared_alloc<struct cl_target_option> ();
1075 break;
1076
1077 case OPTIMIZATION_NODE:
1078 TREE_OPTIMIZATION (t)
1079 = ggc_cleared_alloc<struct cl_optimization> ();
1080 break;
1081
1082 default:
1083 break;
1084 }
1085 break;
1086
1087 default:
1088 /* Other classes need no special treatment. */
1089 break;
1090 }
1091
1092 return t;
1093 }
1094 \f
1095 /* Return a new node with the same contents as NODE except that its
1096 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1097
1098 tree
1099 copy_node_stat (tree node MEM_STAT_DECL)
1100 {
1101 tree t;
1102 enum tree_code code = TREE_CODE (node);
1103 size_t length;
1104
1105 gcc_assert (code != STATEMENT_LIST);
1106
1107 length = tree_size (node);
1108 record_node_allocation_statistics (code, length);
1109 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1110 memcpy (t, node, length);
1111
1112 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1113 TREE_CHAIN (t) = 0;
1114 TREE_ASM_WRITTEN (t) = 0;
1115 TREE_VISITED (t) = 0;
1116
1117 if (TREE_CODE_CLASS (code) == tcc_declaration)
1118 {
1119 if (code == DEBUG_EXPR_DECL)
1120 DECL_UID (t) = --next_debug_decl_uid;
1121 else
1122 {
1123 DECL_UID (t) = allocate_decl_uid ();
1124 if (DECL_PT_UID_SET_P (node))
1125 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1126 }
1127 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1128 && DECL_HAS_VALUE_EXPR_P (node))
1129 {
1130 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1131 DECL_HAS_VALUE_EXPR_P (t) = 1;
1132 }
1133 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1134 if (TREE_CODE (node) == VAR_DECL)
1135 {
1136 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1137 t->decl_with_vis.symtab_node = NULL;
1138 }
1139 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1140 {
1141 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1142 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1143 }
1144 if (TREE_CODE (node) == FUNCTION_DECL)
1145 {
1146 DECL_STRUCT_FUNCTION (t) = NULL;
1147 t->decl_with_vis.symtab_node = NULL;
1148 }
1149 }
1150 else if (TREE_CODE_CLASS (code) == tcc_type)
1151 {
1152 TYPE_UID (t) = next_type_uid++;
1153 /* The following is so that the debug code for
1154 the copy is different from the original type.
1155 The two statements usually duplicate each other
1156 (because they clear fields of the same union),
1157 but the optimizer should catch that. */
1158 TYPE_SYMTAB_POINTER (t) = 0;
1159 TYPE_SYMTAB_ADDRESS (t) = 0;
1160
1161 /* Do not copy the values cache. */
1162 if (TYPE_CACHED_VALUES_P (t))
1163 {
1164 TYPE_CACHED_VALUES_P (t) = 0;
1165 TYPE_CACHED_VALUES (t) = NULL_TREE;
1166 }
1167 }
1168 else if (code == TARGET_OPTION_NODE)
1169 {
1170 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1171 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1172 sizeof (struct cl_target_option));
1173 }
1174 else if (code == OPTIMIZATION_NODE)
1175 {
1176 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1177 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1178 sizeof (struct cl_optimization));
1179 }
1180
1181 return t;
1182 }
1183
1184 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1185 For example, this can copy a list made of TREE_LIST nodes. */
1186
1187 tree
1188 copy_list (tree list)
1189 {
1190 tree head;
1191 tree prev, next;
1192
1193 if (list == 0)
1194 return 0;
1195
1196 head = prev = copy_node (list);
1197 next = TREE_CHAIN (list);
1198 while (next)
1199 {
1200 TREE_CHAIN (prev) = copy_node (next);
1201 prev = TREE_CHAIN (prev);
1202 next = TREE_CHAIN (next);
1203 }
1204 return head;
1205 }
1206
1207 \f
1208 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1209 INTEGER_CST with value CST and type TYPE. */
1210
1211 static unsigned int
1212 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1213 {
1214 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1215 /* We need an extra zero HWI if CST is an unsigned integer with its
1216 upper bit set, and if CST occupies a whole number of HWIs. */
1217 if (TYPE_UNSIGNED (type)
1218 && wi::neg_p (cst)
1219 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1220 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1221 return cst.get_len ();
1222 }
1223
1224 /* Return a new INTEGER_CST with value CST and type TYPE. */
1225
1226 static tree
1227 build_new_int_cst (tree type, const wide_int &cst)
1228 {
1229 unsigned int len = cst.get_len ();
1230 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1231 tree nt = make_int_cst (len, ext_len);
1232
1233 if (len < ext_len)
1234 {
1235 --ext_len;
1236 TREE_INT_CST_ELT (nt, ext_len) = 0;
1237 for (unsigned int i = len; i < ext_len; ++i)
1238 TREE_INT_CST_ELT (nt, i) = -1;
1239 }
1240 else if (TYPE_UNSIGNED (type)
1241 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1242 {
1243 len--;
1244 TREE_INT_CST_ELT (nt, len)
1245 = zext_hwi (cst.elt (len),
1246 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1247 }
1248
1249 for (unsigned int i = 0; i < len; i++)
1250 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1251 TREE_TYPE (nt) = type;
1252 return nt;
1253 }
1254
1255 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1256
1257 tree
1258 build_int_cst (tree type, HOST_WIDE_INT low)
1259 {
1260 /* Support legacy code. */
1261 if (!type)
1262 type = integer_type_node;
1263
1264 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1265 }
1266
1267 tree
1268 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1269 {
1270 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1271 }
1272
1273 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1274
1275 tree
1276 build_int_cst_type (tree type, HOST_WIDE_INT low)
1277 {
1278 gcc_assert (type);
1279 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1280 }
1281
1282 /* Constructs tree in type TYPE from with value given by CST. Signedness
1283 of CST is assumed to be the same as the signedness of TYPE. */
1284
1285 tree
1286 double_int_to_tree (tree type, double_int cst)
1287 {
1288 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1289 }
1290
1291 /* We force the wide_int CST to the range of the type TYPE by sign or
1292 zero extending it. OVERFLOWABLE indicates if we are interested in
1293 overflow of the value, when >0 we are only interested in signed
1294 overflow, for <0 we are interested in any overflow. OVERFLOWED
1295 indicates whether overflow has already occurred. CONST_OVERFLOWED
1296 indicates whether constant overflow has already occurred. We force
1297 T's value to be within range of T's type (by setting to 0 or 1 all
1298 the bits outside the type's range). We set TREE_OVERFLOWED if,
1299 OVERFLOWED is nonzero,
1300 or OVERFLOWABLE is >0 and signed overflow occurs
1301 or OVERFLOWABLE is <0 and any overflow occurs
1302 We return a new tree node for the extended wide_int. The node
1303 is shared if no overflow flags are set. */
1304
1305
1306 tree
1307 force_fit_type (tree type, const wide_int_ref &cst,
1308 int overflowable, bool overflowed)
1309 {
1310 signop sign = TYPE_SIGN (type);
1311
1312 /* If we need to set overflow flags, return a new unshared node. */
1313 if (overflowed || !wi::fits_to_tree_p (cst, type))
1314 {
1315 if (overflowed
1316 || overflowable < 0
1317 || (overflowable > 0 && sign == SIGNED))
1318 {
1319 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1320 tree t = build_new_int_cst (type, tmp);
1321 TREE_OVERFLOW (t) = 1;
1322 return t;
1323 }
1324 }
1325
1326 /* Else build a shared node. */
1327 return wide_int_to_tree (type, cst);
1328 }
1329
1330 /* These are the hash table functions for the hash table of INTEGER_CST
1331 nodes of a sizetype. */
1332
1333 /* Return the hash code X, an INTEGER_CST. */
1334
1335 hashval_t
1336 int_cst_hasher::hash (tree x)
1337 {
1338 const_tree const t = x;
1339 hashval_t code = TYPE_UID (TREE_TYPE (t));
1340 int i;
1341
1342 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1343 code ^= TREE_INT_CST_ELT (t, i);
1344
1345 return code;
1346 }
1347
1348 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1349 is the same as that given by *Y, which is the same. */
1350
1351 bool
1352 int_cst_hasher::equal (tree x, tree y)
1353 {
1354 const_tree const xt = x;
1355 const_tree const yt = y;
1356
1357 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1358 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1359 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1360 return false;
1361
1362 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1363 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1364 return false;
1365
1366 return true;
1367 }
1368
1369 /* Create an INT_CST node of TYPE and value CST.
1370 The returned node is always shared. For small integers we use a
1371 per-type vector cache, for larger ones we use a single hash table.
1372 The value is extended from its precision according to the sign of
1373 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1374 the upper bits and ensures that hashing and value equality based
1375 upon the underlying HOST_WIDE_INTs works without masking. */
1376
1377 tree
1378 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1379 {
1380 tree t;
1381 int ix = -1;
1382 int limit = 0;
1383
1384 gcc_assert (type);
1385 unsigned int prec = TYPE_PRECISION (type);
1386 signop sgn = TYPE_SIGN (type);
1387
1388 /* Verify that everything is canonical. */
1389 int l = pcst.get_len ();
1390 if (l > 1)
1391 {
1392 if (pcst.elt (l - 1) == 0)
1393 gcc_checking_assert (pcst.elt (l - 2) < 0);
1394 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1395 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1396 }
1397
1398 wide_int cst = wide_int::from (pcst, prec, sgn);
1399 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1400
1401 if (ext_len == 1)
1402 {
1403 /* We just need to store a single HOST_WIDE_INT. */
1404 HOST_WIDE_INT hwi;
1405 if (TYPE_UNSIGNED (type))
1406 hwi = cst.to_uhwi ();
1407 else
1408 hwi = cst.to_shwi ();
1409
1410 switch (TREE_CODE (type))
1411 {
1412 case NULLPTR_TYPE:
1413 gcc_assert (hwi == 0);
1414 /* Fallthru. */
1415
1416 case POINTER_TYPE:
1417 case REFERENCE_TYPE:
1418 case POINTER_BOUNDS_TYPE:
1419 /* Cache NULL pointer and zero bounds. */
1420 if (hwi == 0)
1421 {
1422 limit = 1;
1423 ix = 0;
1424 }
1425 break;
1426
1427 case BOOLEAN_TYPE:
1428 /* Cache false or true. */
1429 limit = 2;
1430 if (hwi < 2)
1431 ix = hwi;
1432 break;
1433
1434 case INTEGER_TYPE:
1435 case OFFSET_TYPE:
1436 if (TYPE_SIGN (type) == UNSIGNED)
1437 {
1438 /* Cache [0, N). */
1439 limit = INTEGER_SHARE_LIMIT;
1440 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1441 ix = hwi;
1442 }
1443 else
1444 {
1445 /* Cache [-1, N). */
1446 limit = INTEGER_SHARE_LIMIT + 1;
1447 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1448 ix = hwi + 1;
1449 }
1450 break;
1451
1452 case ENUMERAL_TYPE:
1453 break;
1454
1455 default:
1456 gcc_unreachable ();
1457 }
1458
1459 if (ix >= 0)
1460 {
1461 /* Look for it in the type's vector of small shared ints. */
1462 if (!TYPE_CACHED_VALUES_P (type))
1463 {
1464 TYPE_CACHED_VALUES_P (type) = 1;
1465 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1466 }
1467
1468 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1469 if (t)
1470 /* Make sure no one is clobbering the shared constant. */
1471 gcc_checking_assert (TREE_TYPE (t) == type
1472 && TREE_INT_CST_NUNITS (t) == 1
1473 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1474 && TREE_INT_CST_EXT_NUNITS (t) == 1
1475 && TREE_INT_CST_ELT (t, 0) == hwi);
1476 else
1477 {
1478 /* Create a new shared int. */
1479 t = build_new_int_cst (type, cst);
1480 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1481 }
1482 }
1483 else
1484 {
1485 /* Use the cache of larger shared ints, using int_cst_node as
1486 a temporary. */
1487
1488 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1489 TREE_TYPE (int_cst_node) = type;
1490
1491 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1492 t = *slot;
1493 if (!t)
1494 {
1495 /* Insert this one into the hash table. */
1496 t = int_cst_node;
1497 *slot = t;
1498 /* Make a new node for next time round. */
1499 int_cst_node = make_int_cst (1, 1);
1500 }
1501 }
1502 }
1503 else
1504 {
1505 /* The value either hashes properly or we drop it on the floor
1506 for the gc to take care of. There will not be enough of them
1507 to worry about. */
1508
1509 tree nt = build_new_int_cst (type, cst);
1510 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1511 t = *slot;
1512 if (!t)
1513 {
1514 /* Insert this one into the hash table. */
1515 t = nt;
1516 *slot = t;
1517 }
1518 }
1519
1520 return t;
1521 }
1522
1523 void
1524 cache_integer_cst (tree t)
1525 {
1526 tree type = TREE_TYPE (t);
1527 int ix = -1;
1528 int limit = 0;
1529 int prec = TYPE_PRECISION (type);
1530
1531 gcc_assert (!TREE_OVERFLOW (t));
1532
1533 switch (TREE_CODE (type))
1534 {
1535 case NULLPTR_TYPE:
1536 gcc_assert (integer_zerop (t));
1537 /* Fallthru. */
1538
1539 case POINTER_TYPE:
1540 case REFERENCE_TYPE:
1541 /* Cache NULL pointer. */
1542 if (integer_zerop (t))
1543 {
1544 limit = 1;
1545 ix = 0;
1546 }
1547 break;
1548
1549 case BOOLEAN_TYPE:
1550 /* Cache false or true. */
1551 limit = 2;
1552 if (wi::ltu_p (t, 2))
1553 ix = TREE_INT_CST_ELT (t, 0);
1554 break;
1555
1556 case INTEGER_TYPE:
1557 case OFFSET_TYPE:
1558 if (TYPE_UNSIGNED (type))
1559 {
1560 /* Cache 0..N */
1561 limit = INTEGER_SHARE_LIMIT;
1562
1563 /* This is a little hokie, but if the prec is smaller than
1564 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1565 obvious test will not get the correct answer. */
1566 if (prec < HOST_BITS_PER_WIDE_INT)
1567 {
1568 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1569 ix = tree_to_uhwi (t);
1570 }
1571 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1572 ix = tree_to_uhwi (t);
1573 }
1574 else
1575 {
1576 /* Cache -1..N */
1577 limit = INTEGER_SHARE_LIMIT + 1;
1578
1579 if (integer_minus_onep (t))
1580 ix = 0;
1581 else if (!wi::neg_p (t))
1582 {
1583 if (prec < HOST_BITS_PER_WIDE_INT)
1584 {
1585 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1586 ix = tree_to_shwi (t) + 1;
1587 }
1588 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1589 ix = tree_to_shwi (t) + 1;
1590 }
1591 }
1592 break;
1593
1594 case ENUMERAL_TYPE:
1595 break;
1596
1597 default:
1598 gcc_unreachable ();
1599 }
1600
1601 if (ix >= 0)
1602 {
1603 /* Look for it in the type's vector of small shared ints. */
1604 if (!TYPE_CACHED_VALUES_P (type))
1605 {
1606 TYPE_CACHED_VALUES_P (type) = 1;
1607 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1608 }
1609
1610 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1611 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1612 }
1613 else
1614 {
1615 /* Use the cache of larger shared ints. */
1616 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1617 /* If there is already an entry for the number verify it's the
1618 same. */
1619 if (*slot)
1620 gcc_assert (wi::eq_p (tree (*slot), t));
1621 else
1622 /* Otherwise insert this one into the hash table. */
1623 *slot = t;
1624 }
1625 }
1626
1627
1628 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1629 and the rest are zeros. */
1630
1631 tree
1632 build_low_bits_mask (tree type, unsigned bits)
1633 {
1634 gcc_assert (bits <= TYPE_PRECISION (type));
1635
1636 return wide_int_to_tree (type, wi::mask (bits, false,
1637 TYPE_PRECISION (type)));
1638 }
1639
1640 /* Checks that X is integer constant that can be expressed in (unsigned)
1641 HOST_WIDE_INT without loss of precision. */
1642
1643 bool
1644 cst_and_fits_in_hwi (const_tree x)
1645 {
1646 if (TREE_CODE (x) != INTEGER_CST)
1647 return false;
1648
1649 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1650 return false;
1651
1652 return TREE_INT_CST_NUNITS (x) == 1;
1653 }
1654
1655 /* Build a newly constructed VECTOR_CST node of length LEN. */
1656
1657 tree
1658 make_vector_stat (unsigned len MEM_STAT_DECL)
1659 {
1660 tree t;
1661 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1662
1663 record_node_allocation_statistics (VECTOR_CST, length);
1664
1665 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1666
1667 TREE_SET_CODE (t, VECTOR_CST);
1668 TREE_CONSTANT (t) = 1;
1669
1670 return t;
1671 }
1672
1673 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1674 are in a list pointed to by VALS. */
1675
1676 tree
1677 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1678 {
1679 int over = 0;
1680 unsigned cnt = 0;
1681 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1682 TREE_TYPE (v) = type;
1683
1684 /* Iterate through elements and check for overflow. */
1685 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1686 {
1687 tree value = vals[cnt];
1688
1689 VECTOR_CST_ELT (v, cnt) = value;
1690
1691 /* Don't crash if we get an address constant. */
1692 if (!CONSTANT_CLASS_P (value))
1693 continue;
1694
1695 over |= TREE_OVERFLOW (value);
1696 }
1697
1698 TREE_OVERFLOW (v) = over;
1699 return v;
1700 }
1701
1702 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1703 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1704
1705 tree
1706 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1707 {
1708 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1709 unsigned HOST_WIDE_INT idx;
1710 tree value;
1711
1712 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1713 vec[idx] = value;
1714 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1715 vec[idx] = build_zero_cst (TREE_TYPE (type));
1716
1717 return build_vector (type, vec);
1718 }
1719
1720 /* Build a vector of type VECTYPE where all the elements are SCs. */
1721 tree
1722 build_vector_from_val (tree vectype, tree sc)
1723 {
1724 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1725
1726 if (sc == error_mark_node)
1727 return sc;
1728
1729 /* Verify that the vector type is suitable for SC. Note that there
1730 is some inconsistency in the type-system with respect to restrict
1731 qualifications of pointers. Vector types always have a main-variant
1732 element type and the qualification is applied to the vector-type.
1733 So TREE_TYPE (vector-type) does not return a properly qualified
1734 vector element-type. */
1735 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1736 TREE_TYPE (vectype)));
1737
1738 if (CONSTANT_CLASS_P (sc))
1739 {
1740 tree *v = XALLOCAVEC (tree, nunits);
1741 for (i = 0; i < nunits; ++i)
1742 v[i] = sc;
1743 return build_vector (vectype, v);
1744 }
1745 else
1746 {
1747 vec<constructor_elt, va_gc> *v;
1748 vec_alloc (v, nunits);
1749 for (i = 0; i < nunits; ++i)
1750 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1751 return build_constructor (vectype, v);
1752 }
1753 }
1754
1755 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1756 are in the vec pointed to by VALS. */
1757 tree
1758 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1759 {
1760 tree c = make_node (CONSTRUCTOR);
1761 unsigned int i;
1762 constructor_elt *elt;
1763 bool constant_p = true;
1764 bool side_effects_p = false;
1765
1766 TREE_TYPE (c) = type;
1767 CONSTRUCTOR_ELTS (c) = vals;
1768
1769 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1770 {
1771 /* Mostly ctors will have elts that don't have side-effects, so
1772 the usual case is to scan all the elements. Hence a single
1773 loop for both const and side effects, rather than one loop
1774 each (with early outs). */
1775 if (!TREE_CONSTANT (elt->value))
1776 constant_p = false;
1777 if (TREE_SIDE_EFFECTS (elt->value))
1778 side_effects_p = true;
1779 }
1780
1781 TREE_SIDE_EFFECTS (c) = side_effects_p;
1782 TREE_CONSTANT (c) = constant_p;
1783
1784 return c;
1785 }
1786
1787 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1788 INDEX and VALUE. */
1789 tree
1790 build_constructor_single (tree type, tree index, tree value)
1791 {
1792 vec<constructor_elt, va_gc> *v;
1793 constructor_elt elt = {index, value};
1794
1795 vec_alloc (v, 1);
1796 v->quick_push (elt);
1797
1798 return build_constructor (type, v);
1799 }
1800
1801
1802 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1803 are in a list pointed to by VALS. */
1804 tree
1805 build_constructor_from_list (tree type, tree vals)
1806 {
1807 tree t;
1808 vec<constructor_elt, va_gc> *v = NULL;
1809
1810 if (vals)
1811 {
1812 vec_alloc (v, list_length (vals));
1813 for (t = vals; t; t = TREE_CHAIN (t))
1814 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1815 }
1816
1817 return build_constructor (type, v);
1818 }
1819
1820 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1821 of elements, provided as index/value pairs. */
1822
1823 tree
1824 build_constructor_va (tree type, int nelts, ...)
1825 {
1826 vec<constructor_elt, va_gc> *v = NULL;
1827 va_list p;
1828
1829 va_start (p, nelts);
1830 vec_alloc (v, nelts);
1831 while (nelts--)
1832 {
1833 tree index = va_arg (p, tree);
1834 tree value = va_arg (p, tree);
1835 CONSTRUCTOR_APPEND_ELT (v, index, value);
1836 }
1837 va_end (p);
1838 return build_constructor (type, v);
1839 }
1840
1841 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1842
1843 tree
1844 build_fixed (tree type, FIXED_VALUE_TYPE f)
1845 {
1846 tree v;
1847 FIXED_VALUE_TYPE *fp;
1848
1849 v = make_node (FIXED_CST);
1850 fp = ggc_alloc<fixed_value> ();
1851 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1852
1853 TREE_TYPE (v) = type;
1854 TREE_FIXED_CST_PTR (v) = fp;
1855 return v;
1856 }
1857
1858 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1859
1860 tree
1861 build_real (tree type, REAL_VALUE_TYPE d)
1862 {
1863 tree v;
1864 REAL_VALUE_TYPE *dp;
1865 int overflow = 0;
1866
1867 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1868 Consider doing it via real_convert now. */
1869
1870 v = make_node (REAL_CST);
1871 dp = ggc_alloc<real_value> ();
1872 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1873
1874 TREE_TYPE (v) = type;
1875 TREE_REAL_CST_PTR (v) = dp;
1876 TREE_OVERFLOW (v) = overflow;
1877 return v;
1878 }
1879
1880 /* Return a new REAL_CST node whose type is TYPE
1881 and whose value is the integer value of the INTEGER_CST node I. */
1882
1883 REAL_VALUE_TYPE
1884 real_value_from_int_cst (const_tree type, const_tree i)
1885 {
1886 REAL_VALUE_TYPE d;
1887
1888 /* Clear all bits of the real value type so that we can later do
1889 bitwise comparisons to see if two values are the same. */
1890 memset (&d, 0, sizeof d);
1891
1892 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1893 TYPE_SIGN (TREE_TYPE (i)));
1894 return d;
1895 }
1896
1897 /* Given a tree representing an integer constant I, return a tree
1898 representing the same value as a floating-point constant of type TYPE. */
1899
1900 tree
1901 build_real_from_int_cst (tree type, const_tree i)
1902 {
1903 tree v;
1904 int overflow = TREE_OVERFLOW (i);
1905
1906 v = build_real (type, real_value_from_int_cst (type, i));
1907
1908 TREE_OVERFLOW (v) |= overflow;
1909 return v;
1910 }
1911
1912 /* Return a newly constructed STRING_CST node whose value is
1913 the LEN characters at STR.
1914 Note that for a C string literal, LEN should include the trailing NUL.
1915 The TREE_TYPE is not initialized. */
1916
1917 tree
1918 build_string (int len, const char *str)
1919 {
1920 tree s;
1921 size_t length;
1922
1923 /* Do not waste bytes provided by padding of struct tree_string. */
1924 length = len + offsetof (struct tree_string, str) + 1;
1925
1926 record_node_allocation_statistics (STRING_CST, length);
1927
1928 s = (tree) ggc_internal_alloc (length);
1929
1930 memset (s, 0, sizeof (struct tree_typed));
1931 TREE_SET_CODE (s, STRING_CST);
1932 TREE_CONSTANT (s) = 1;
1933 TREE_STRING_LENGTH (s) = len;
1934 memcpy (s->string.str, str, len);
1935 s->string.str[len] = '\0';
1936
1937 return s;
1938 }
1939
1940 /* Return a newly constructed COMPLEX_CST node whose value is
1941 specified by the real and imaginary parts REAL and IMAG.
1942 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1943 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1944
1945 tree
1946 build_complex (tree type, tree real, tree imag)
1947 {
1948 tree t = make_node (COMPLEX_CST);
1949
1950 TREE_REALPART (t) = real;
1951 TREE_IMAGPART (t) = imag;
1952 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1953 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1954 return t;
1955 }
1956
1957 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
1958 element is set to 1. In particular, this is 1 + i for complex types. */
1959
1960 tree
1961 build_each_one_cst (tree type)
1962 {
1963 if (TREE_CODE (type) == COMPLEX_TYPE)
1964 {
1965 tree scalar = build_one_cst (TREE_TYPE (type));
1966 return build_complex (type, scalar, scalar);
1967 }
1968 else
1969 return build_one_cst (type);
1970 }
1971
1972 /* Return a constant of arithmetic type TYPE which is the
1973 multiplicative identity of the set TYPE. */
1974
1975 tree
1976 build_one_cst (tree type)
1977 {
1978 switch (TREE_CODE (type))
1979 {
1980 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1981 case POINTER_TYPE: case REFERENCE_TYPE:
1982 case OFFSET_TYPE:
1983 return build_int_cst (type, 1);
1984
1985 case REAL_TYPE:
1986 return build_real (type, dconst1);
1987
1988 case FIXED_POINT_TYPE:
1989 /* We can only generate 1 for accum types. */
1990 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1991 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1992
1993 case VECTOR_TYPE:
1994 {
1995 tree scalar = build_one_cst (TREE_TYPE (type));
1996
1997 return build_vector_from_val (type, scalar);
1998 }
1999
2000 case COMPLEX_TYPE:
2001 return build_complex (type,
2002 build_one_cst (TREE_TYPE (type)),
2003 build_zero_cst (TREE_TYPE (type)));
2004
2005 default:
2006 gcc_unreachable ();
2007 }
2008 }
2009
2010 /* Return an integer of type TYPE containing all 1's in as much precision as
2011 it contains, or a complex or vector whose subparts are such integers. */
2012
2013 tree
2014 build_all_ones_cst (tree type)
2015 {
2016 if (TREE_CODE (type) == COMPLEX_TYPE)
2017 {
2018 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2019 return build_complex (type, scalar, scalar);
2020 }
2021 else
2022 return build_minus_one_cst (type);
2023 }
2024
2025 /* Return a constant of arithmetic type TYPE which is the
2026 opposite of the multiplicative identity of the set TYPE. */
2027
2028 tree
2029 build_minus_one_cst (tree type)
2030 {
2031 switch (TREE_CODE (type))
2032 {
2033 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2034 case POINTER_TYPE: case REFERENCE_TYPE:
2035 case OFFSET_TYPE:
2036 return build_int_cst (type, -1);
2037
2038 case REAL_TYPE:
2039 return build_real (type, dconstm1);
2040
2041 case FIXED_POINT_TYPE:
2042 /* We can only generate 1 for accum types. */
2043 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2044 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2045 TYPE_MODE (type)));
2046
2047 case VECTOR_TYPE:
2048 {
2049 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2050
2051 return build_vector_from_val (type, scalar);
2052 }
2053
2054 case COMPLEX_TYPE:
2055 return build_complex (type,
2056 build_minus_one_cst (TREE_TYPE (type)),
2057 build_zero_cst (TREE_TYPE (type)));
2058
2059 default:
2060 gcc_unreachable ();
2061 }
2062 }
2063
2064 /* Build 0 constant of type TYPE. This is used by constructor folding
2065 and thus the constant should be represented in memory by
2066 zero(es). */
2067
2068 tree
2069 build_zero_cst (tree type)
2070 {
2071 switch (TREE_CODE (type))
2072 {
2073 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2074 case POINTER_TYPE: case REFERENCE_TYPE:
2075 case OFFSET_TYPE: case NULLPTR_TYPE:
2076 return build_int_cst (type, 0);
2077
2078 case REAL_TYPE:
2079 return build_real (type, dconst0);
2080
2081 case FIXED_POINT_TYPE:
2082 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2083
2084 case VECTOR_TYPE:
2085 {
2086 tree scalar = build_zero_cst (TREE_TYPE (type));
2087
2088 return build_vector_from_val (type, scalar);
2089 }
2090
2091 case COMPLEX_TYPE:
2092 {
2093 tree zero = build_zero_cst (TREE_TYPE (type));
2094
2095 return build_complex (type, zero, zero);
2096 }
2097
2098 default:
2099 if (!AGGREGATE_TYPE_P (type))
2100 return fold_convert (type, integer_zero_node);
2101 return build_constructor (type, NULL);
2102 }
2103 }
2104
2105
2106 /* Build a BINFO with LEN language slots. */
2107
2108 tree
2109 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2110 {
2111 tree t;
2112 size_t length = (offsetof (struct tree_binfo, base_binfos)
2113 + vec<tree, va_gc>::embedded_size (base_binfos));
2114
2115 record_node_allocation_statistics (TREE_BINFO, length);
2116
2117 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2118
2119 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2120
2121 TREE_SET_CODE (t, TREE_BINFO);
2122
2123 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2124
2125 return t;
2126 }
2127
2128 /* Create a CASE_LABEL_EXPR tree node and return it. */
2129
2130 tree
2131 build_case_label (tree low_value, tree high_value, tree label_decl)
2132 {
2133 tree t = make_node (CASE_LABEL_EXPR);
2134
2135 TREE_TYPE (t) = void_type_node;
2136 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2137
2138 CASE_LOW (t) = low_value;
2139 CASE_HIGH (t) = high_value;
2140 CASE_LABEL (t) = label_decl;
2141 CASE_CHAIN (t) = NULL_TREE;
2142
2143 return t;
2144 }
2145
2146 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2147 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2148 The latter determines the length of the HOST_WIDE_INT vector. */
2149
2150 tree
2151 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2152 {
2153 tree t;
2154 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2155 + sizeof (struct tree_int_cst));
2156
2157 gcc_assert (len);
2158 record_node_allocation_statistics (INTEGER_CST, length);
2159
2160 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2161
2162 TREE_SET_CODE (t, INTEGER_CST);
2163 TREE_INT_CST_NUNITS (t) = len;
2164 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2165 /* to_offset can only be applied to trees that are offset_int-sized
2166 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2167 must be exactly the precision of offset_int and so LEN is correct. */
2168 if (ext_len <= OFFSET_INT_ELTS)
2169 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2170 else
2171 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2172
2173 TREE_CONSTANT (t) = 1;
2174
2175 return t;
2176 }
2177
2178 /* Build a newly constructed TREE_VEC node of length LEN. */
2179
2180 tree
2181 make_tree_vec_stat (int len MEM_STAT_DECL)
2182 {
2183 tree t;
2184 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2185
2186 record_node_allocation_statistics (TREE_VEC, length);
2187
2188 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2189
2190 TREE_SET_CODE (t, TREE_VEC);
2191 TREE_VEC_LENGTH (t) = len;
2192
2193 return t;
2194 }
2195
2196 /* Grow a TREE_VEC node to new length LEN. */
2197
2198 tree
2199 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2200 {
2201 gcc_assert (TREE_CODE (v) == TREE_VEC);
2202
2203 int oldlen = TREE_VEC_LENGTH (v);
2204 gcc_assert (len > oldlen);
2205
2206 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2207 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2208
2209 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2210
2211 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2212
2213 TREE_VEC_LENGTH (v) = len;
2214
2215 return v;
2216 }
2217 \f
2218 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2219 fixed, and scalar, complex or vector. */
2220
2221 int
2222 zerop (const_tree expr)
2223 {
2224 return (integer_zerop (expr)
2225 || real_zerop (expr)
2226 || fixed_zerop (expr));
2227 }
2228
2229 /* Return 1 if EXPR is the integer constant zero or a complex constant
2230 of zero. */
2231
2232 int
2233 integer_zerop (const_tree expr)
2234 {
2235 STRIP_NOPS (expr);
2236
2237 switch (TREE_CODE (expr))
2238 {
2239 case INTEGER_CST:
2240 return wi::eq_p (expr, 0);
2241 case COMPLEX_CST:
2242 return (integer_zerop (TREE_REALPART (expr))
2243 && integer_zerop (TREE_IMAGPART (expr)));
2244 case VECTOR_CST:
2245 {
2246 unsigned i;
2247 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2248 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2249 return false;
2250 return true;
2251 }
2252 default:
2253 return false;
2254 }
2255 }
2256
2257 /* Return 1 if EXPR is the integer constant one or the corresponding
2258 complex constant. */
2259
2260 int
2261 integer_onep (const_tree expr)
2262 {
2263 STRIP_NOPS (expr);
2264
2265 switch (TREE_CODE (expr))
2266 {
2267 case INTEGER_CST:
2268 return wi::eq_p (wi::to_widest (expr), 1);
2269 case COMPLEX_CST:
2270 return (integer_onep (TREE_REALPART (expr))
2271 && integer_zerop (TREE_IMAGPART (expr)));
2272 case VECTOR_CST:
2273 {
2274 unsigned i;
2275 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2276 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2277 return false;
2278 return true;
2279 }
2280 default:
2281 return false;
2282 }
2283 }
2284
2285 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2286 return 1 if every piece is the integer constant one. */
2287
2288 int
2289 integer_each_onep (const_tree expr)
2290 {
2291 STRIP_NOPS (expr);
2292
2293 if (TREE_CODE (expr) == COMPLEX_CST)
2294 return (integer_onep (TREE_REALPART (expr))
2295 && integer_onep (TREE_IMAGPART (expr)));
2296 else
2297 return integer_onep (expr);
2298 }
2299
2300 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2301 it contains, or a complex or vector whose subparts are such integers. */
2302
2303 int
2304 integer_all_onesp (const_tree expr)
2305 {
2306 STRIP_NOPS (expr);
2307
2308 if (TREE_CODE (expr) == COMPLEX_CST
2309 && integer_all_onesp (TREE_REALPART (expr))
2310 && integer_all_onesp (TREE_IMAGPART (expr)))
2311 return 1;
2312
2313 else if (TREE_CODE (expr) == VECTOR_CST)
2314 {
2315 unsigned i;
2316 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2317 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2318 return 0;
2319 return 1;
2320 }
2321
2322 else if (TREE_CODE (expr) != INTEGER_CST)
2323 return 0;
2324
2325 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2326 }
2327
2328 /* Return 1 if EXPR is the integer constant minus one. */
2329
2330 int
2331 integer_minus_onep (const_tree expr)
2332 {
2333 STRIP_NOPS (expr);
2334
2335 if (TREE_CODE (expr) == COMPLEX_CST)
2336 return (integer_all_onesp (TREE_REALPART (expr))
2337 && integer_zerop (TREE_IMAGPART (expr)));
2338 else
2339 return integer_all_onesp (expr);
2340 }
2341
2342 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2343 one bit on). */
2344
2345 int
2346 integer_pow2p (const_tree expr)
2347 {
2348 STRIP_NOPS (expr);
2349
2350 if (TREE_CODE (expr) == COMPLEX_CST
2351 && integer_pow2p (TREE_REALPART (expr))
2352 && integer_zerop (TREE_IMAGPART (expr)))
2353 return 1;
2354
2355 if (TREE_CODE (expr) != INTEGER_CST)
2356 return 0;
2357
2358 return wi::popcount (expr) == 1;
2359 }
2360
2361 /* Return 1 if EXPR is an integer constant other than zero or a
2362 complex constant other than zero. */
2363
2364 int
2365 integer_nonzerop (const_tree expr)
2366 {
2367 STRIP_NOPS (expr);
2368
2369 return ((TREE_CODE (expr) == INTEGER_CST
2370 && !wi::eq_p (expr, 0))
2371 || (TREE_CODE (expr) == COMPLEX_CST
2372 && (integer_nonzerop (TREE_REALPART (expr))
2373 || integer_nonzerop (TREE_IMAGPART (expr)))));
2374 }
2375
2376 /* Return 1 if EXPR is the integer constant one. For vector,
2377 return 1 if every piece is the integer constant minus one
2378 (representing the value TRUE). */
2379
2380 int
2381 integer_truep (const_tree expr)
2382 {
2383 STRIP_NOPS (expr);
2384
2385 if (TREE_CODE (expr) == VECTOR_CST)
2386 return integer_all_onesp (expr);
2387 return integer_onep (expr);
2388 }
2389
2390 /* Return 1 if EXPR is the fixed-point constant zero. */
2391
2392 int
2393 fixed_zerop (const_tree expr)
2394 {
2395 return (TREE_CODE (expr) == FIXED_CST
2396 && TREE_FIXED_CST (expr).data.is_zero ());
2397 }
2398
2399 /* Return the power of two represented by a tree node known to be a
2400 power of two. */
2401
2402 int
2403 tree_log2 (const_tree expr)
2404 {
2405 STRIP_NOPS (expr);
2406
2407 if (TREE_CODE (expr) == COMPLEX_CST)
2408 return tree_log2 (TREE_REALPART (expr));
2409
2410 return wi::exact_log2 (expr);
2411 }
2412
2413 /* Similar, but return the largest integer Y such that 2 ** Y is less
2414 than or equal to EXPR. */
2415
2416 int
2417 tree_floor_log2 (const_tree expr)
2418 {
2419 STRIP_NOPS (expr);
2420
2421 if (TREE_CODE (expr) == COMPLEX_CST)
2422 return tree_log2 (TREE_REALPART (expr));
2423
2424 return wi::floor_log2 (expr);
2425 }
2426
2427 /* Return number of known trailing zero bits in EXPR, or, if the value of
2428 EXPR is known to be zero, the precision of it's type. */
2429
2430 unsigned int
2431 tree_ctz (const_tree expr)
2432 {
2433 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2434 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2435 return 0;
2436
2437 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2438 switch (TREE_CODE (expr))
2439 {
2440 case INTEGER_CST:
2441 ret1 = wi::ctz (expr);
2442 return MIN (ret1, prec);
2443 case SSA_NAME:
2444 ret1 = wi::ctz (get_nonzero_bits (expr));
2445 return MIN (ret1, prec);
2446 case PLUS_EXPR:
2447 case MINUS_EXPR:
2448 case BIT_IOR_EXPR:
2449 case BIT_XOR_EXPR:
2450 case MIN_EXPR:
2451 case MAX_EXPR:
2452 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2453 if (ret1 == 0)
2454 return ret1;
2455 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2456 return MIN (ret1, ret2);
2457 case POINTER_PLUS_EXPR:
2458 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2459 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2460 /* Second operand is sizetype, which could be in theory
2461 wider than pointer's precision. Make sure we never
2462 return more than prec. */
2463 ret2 = MIN (ret2, prec);
2464 return MIN (ret1, ret2);
2465 case BIT_AND_EXPR:
2466 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2467 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2468 return MAX (ret1, ret2);
2469 case MULT_EXPR:
2470 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2471 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2472 return MIN (ret1 + ret2, prec);
2473 case LSHIFT_EXPR:
2474 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2475 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2476 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2477 {
2478 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2479 return MIN (ret1 + ret2, prec);
2480 }
2481 return ret1;
2482 case RSHIFT_EXPR:
2483 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2484 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2485 {
2486 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2487 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2488 if (ret1 > ret2)
2489 return ret1 - ret2;
2490 }
2491 return 0;
2492 case TRUNC_DIV_EXPR:
2493 case CEIL_DIV_EXPR:
2494 case FLOOR_DIV_EXPR:
2495 case ROUND_DIV_EXPR:
2496 case EXACT_DIV_EXPR:
2497 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2498 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2499 {
2500 int l = tree_log2 (TREE_OPERAND (expr, 1));
2501 if (l >= 0)
2502 {
2503 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2504 ret2 = l;
2505 if (ret1 > ret2)
2506 return ret1 - ret2;
2507 }
2508 }
2509 return 0;
2510 CASE_CONVERT:
2511 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2512 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2513 ret1 = prec;
2514 return MIN (ret1, prec);
2515 case SAVE_EXPR:
2516 return tree_ctz (TREE_OPERAND (expr, 0));
2517 case COND_EXPR:
2518 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2519 if (ret1 == 0)
2520 return 0;
2521 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2522 return MIN (ret1, ret2);
2523 case COMPOUND_EXPR:
2524 return tree_ctz (TREE_OPERAND (expr, 1));
2525 case ADDR_EXPR:
2526 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2527 if (ret1 > BITS_PER_UNIT)
2528 {
2529 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2530 return MIN (ret1, prec);
2531 }
2532 return 0;
2533 default:
2534 return 0;
2535 }
2536 }
2537
2538 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2539 decimal float constants, so don't return 1 for them. */
2540
2541 int
2542 real_zerop (const_tree expr)
2543 {
2544 STRIP_NOPS (expr);
2545
2546 switch (TREE_CODE (expr))
2547 {
2548 case REAL_CST:
2549 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2550 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2551 case COMPLEX_CST:
2552 return real_zerop (TREE_REALPART (expr))
2553 && real_zerop (TREE_IMAGPART (expr));
2554 case VECTOR_CST:
2555 {
2556 unsigned i;
2557 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2558 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2559 return false;
2560 return true;
2561 }
2562 default:
2563 return false;
2564 }
2565 }
2566
2567 /* Return 1 if EXPR is the real constant one in real or complex form.
2568 Trailing zeroes matter for decimal float constants, so don't return
2569 1 for them. */
2570
2571 int
2572 real_onep (const_tree expr)
2573 {
2574 STRIP_NOPS (expr);
2575
2576 switch (TREE_CODE (expr))
2577 {
2578 case REAL_CST:
2579 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2580 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2581 case COMPLEX_CST:
2582 return real_onep (TREE_REALPART (expr))
2583 && real_zerop (TREE_IMAGPART (expr));
2584 case VECTOR_CST:
2585 {
2586 unsigned i;
2587 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2588 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2589 return false;
2590 return true;
2591 }
2592 default:
2593 return false;
2594 }
2595 }
2596
2597 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2598 matter for decimal float constants, so don't return 1 for them. */
2599
2600 int
2601 real_minus_onep (const_tree expr)
2602 {
2603 STRIP_NOPS (expr);
2604
2605 switch (TREE_CODE (expr))
2606 {
2607 case REAL_CST:
2608 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2609 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2610 case COMPLEX_CST:
2611 return real_minus_onep (TREE_REALPART (expr))
2612 && real_zerop (TREE_IMAGPART (expr));
2613 case VECTOR_CST:
2614 {
2615 unsigned i;
2616 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2617 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2618 return false;
2619 return true;
2620 }
2621 default:
2622 return false;
2623 }
2624 }
2625
2626 /* Nonzero if EXP is a constant or a cast of a constant. */
2627
2628 int
2629 really_constant_p (const_tree exp)
2630 {
2631 /* This is not quite the same as STRIP_NOPS. It does more. */
2632 while (CONVERT_EXPR_P (exp)
2633 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2634 exp = TREE_OPERAND (exp, 0);
2635 return TREE_CONSTANT (exp);
2636 }
2637 \f
2638 /* Return first list element whose TREE_VALUE is ELEM.
2639 Return 0 if ELEM is not in LIST. */
2640
2641 tree
2642 value_member (tree elem, tree list)
2643 {
2644 while (list)
2645 {
2646 if (elem == TREE_VALUE (list))
2647 return list;
2648 list = TREE_CHAIN (list);
2649 }
2650 return NULL_TREE;
2651 }
2652
2653 /* Return first list element whose TREE_PURPOSE is ELEM.
2654 Return 0 if ELEM is not in LIST. */
2655
2656 tree
2657 purpose_member (const_tree elem, tree list)
2658 {
2659 while (list)
2660 {
2661 if (elem == TREE_PURPOSE (list))
2662 return list;
2663 list = TREE_CHAIN (list);
2664 }
2665 return NULL_TREE;
2666 }
2667
2668 /* Return true if ELEM is in V. */
2669
2670 bool
2671 vec_member (const_tree elem, vec<tree, va_gc> *v)
2672 {
2673 unsigned ix;
2674 tree t;
2675 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2676 if (elem == t)
2677 return true;
2678 return false;
2679 }
2680
2681 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2682 NULL_TREE. */
2683
2684 tree
2685 chain_index (int idx, tree chain)
2686 {
2687 for (; chain && idx > 0; --idx)
2688 chain = TREE_CHAIN (chain);
2689 return chain;
2690 }
2691
2692 /* Return nonzero if ELEM is part of the chain CHAIN. */
2693
2694 int
2695 chain_member (const_tree elem, const_tree chain)
2696 {
2697 while (chain)
2698 {
2699 if (elem == chain)
2700 return 1;
2701 chain = DECL_CHAIN (chain);
2702 }
2703
2704 return 0;
2705 }
2706
2707 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2708 We expect a null pointer to mark the end of the chain.
2709 This is the Lisp primitive `length'. */
2710
2711 int
2712 list_length (const_tree t)
2713 {
2714 const_tree p = t;
2715 #ifdef ENABLE_TREE_CHECKING
2716 const_tree q = t;
2717 #endif
2718 int len = 0;
2719
2720 while (p)
2721 {
2722 p = TREE_CHAIN (p);
2723 #ifdef ENABLE_TREE_CHECKING
2724 if (len % 2)
2725 q = TREE_CHAIN (q);
2726 gcc_assert (p != q);
2727 #endif
2728 len++;
2729 }
2730
2731 return len;
2732 }
2733
2734 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2735 UNION_TYPE TYPE, or NULL_TREE if none. */
2736
2737 tree
2738 first_field (const_tree type)
2739 {
2740 tree t = TYPE_FIELDS (type);
2741 while (t && TREE_CODE (t) != FIELD_DECL)
2742 t = TREE_CHAIN (t);
2743 return t;
2744 }
2745
2746 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2747 by modifying the last node in chain 1 to point to chain 2.
2748 This is the Lisp primitive `nconc'. */
2749
2750 tree
2751 chainon (tree op1, tree op2)
2752 {
2753 tree t1;
2754
2755 if (!op1)
2756 return op2;
2757 if (!op2)
2758 return op1;
2759
2760 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2761 continue;
2762 TREE_CHAIN (t1) = op2;
2763
2764 #ifdef ENABLE_TREE_CHECKING
2765 {
2766 tree t2;
2767 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2768 gcc_assert (t2 != t1);
2769 }
2770 #endif
2771
2772 return op1;
2773 }
2774
2775 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2776
2777 tree
2778 tree_last (tree chain)
2779 {
2780 tree next;
2781 if (chain)
2782 while ((next = TREE_CHAIN (chain)))
2783 chain = next;
2784 return chain;
2785 }
2786
2787 /* Reverse the order of elements in the chain T,
2788 and return the new head of the chain (old last element). */
2789
2790 tree
2791 nreverse (tree t)
2792 {
2793 tree prev = 0, decl, next;
2794 for (decl = t; decl; decl = next)
2795 {
2796 /* We shouldn't be using this function to reverse BLOCK chains; we
2797 have blocks_nreverse for that. */
2798 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2799 next = TREE_CHAIN (decl);
2800 TREE_CHAIN (decl) = prev;
2801 prev = decl;
2802 }
2803 return prev;
2804 }
2805 \f
2806 /* Return a newly created TREE_LIST node whose
2807 purpose and value fields are PARM and VALUE. */
2808
2809 tree
2810 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2811 {
2812 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2813 TREE_PURPOSE (t) = parm;
2814 TREE_VALUE (t) = value;
2815 return t;
2816 }
2817
2818 /* Build a chain of TREE_LIST nodes from a vector. */
2819
2820 tree
2821 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2822 {
2823 tree ret = NULL_TREE;
2824 tree *pp = &ret;
2825 unsigned int i;
2826 tree t;
2827 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2828 {
2829 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2830 pp = &TREE_CHAIN (*pp);
2831 }
2832 return ret;
2833 }
2834
2835 /* Return a newly created TREE_LIST node whose
2836 purpose and value fields are PURPOSE and VALUE
2837 and whose TREE_CHAIN is CHAIN. */
2838
2839 tree
2840 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2841 {
2842 tree node;
2843
2844 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2845 memset (node, 0, sizeof (struct tree_common));
2846
2847 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2848
2849 TREE_SET_CODE (node, TREE_LIST);
2850 TREE_CHAIN (node) = chain;
2851 TREE_PURPOSE (node) = purpose;
2852 TREE_VALUE (node) = value;
2853 return node;
2854 }
2855
2856 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2857 trees. */
2858
2859 vec<tree, va_gc> *
2860 ctor_to_vec (tree ctor)
2861 {
2862 vec<tree, va_gc> *vec;
2863 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2864 unsigned int ix;
2865 tree val;
2866
2867 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2868 vec->quick_push (val);
2869
2870 return vec;
2871 }
2872 \f
2873 /* Return the size nominally occupied by an object of type TYPE
2874 when it resides in memory. The value is measured in units of bytes,
2875 and its data type is that normally used for type sizes
2876 (which is the first type created by make_signed_type or
2877 make_unsigned_type). */
2878
2879 tree
2880 size_in_bytes (const_tree type)
2881 {
2882 tree t;
2883
2884 if (type == error_mark_node)
2885 return integer_zero_node;
2886
2887 type = TYPE_MAIN_VARIANT (type);
2888 t = TYPE_SIZE_UNIT (type);
2889
2890 if (t == 0)
2891 {
2892 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2893 return size_zero_node;
2894 }
2895
2896 return t;
2897 }
2898
2899 /* Return the size of TYPE (in bytes) as a wide integer
2900 or return -1 if the size can vary or is larger than an integer. */
2901
2902 HOST_WIDE_INT
2903 int_size_in_bytes (const_tree type)
2904 {
2905 tree t;
2906
2907 if (type == error_mark_node)
2908 return 0;
2909
2910 type = TYPE_MAIN_VARIANT (type);
2911 t = TYPE_SIZE_UNIT (type);
2912
2913 if (t && tree_fits_uhwi_p (t))
2914 return TREE_INT_CST_LOW (t);
2915 else
2916 return -1;
2917 }
2918
2919 /* Return the maximum size of TYPE (in bytes) as a wide integer
2920 or return -1 if the size can vary or is larger than an integer. */
2921
2922 HOST_WIDE_INT
2923 max_int_size_in_bytes (const_tree type)
2924 {
2925 HOST_WIDE_INT size = -1;
2926 tree size_tree;
2927
2928 /* If this is an array type, check for a possible MAX_SIZE attached. */
2929
2930 if (TREE_CODE (type) == ARRAY_TYPE)
2931 {
2932 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2933
2934 if (size_tree && tree_fits_uhwi_p (size_tree))
2935 size = tree_to_uhwi (size_tree);
2936 }
2937
2938 /* If we still haven't been able to get a size, see if the language
2939 can compute a maximum size. */
2940
2941 if (size == -1)
2942 {
2943 size_tree = lang_hooks.types.max_size (type);
2944
2945 if (size_tree && tree_fits_uhwi_p (size_tree))
2946 size = tree_to_uhwi (size_tree);
2947 }
2948
2949 return size;
2950 }
2951 \f
2952 /* Return the bit position of FIELD, in bits from the start of the record.
2953 This is a tree of type bitsizetype. */
2954
2955 tree
2956 bit_position (const_tree field)
2957 {
2958 return bit_from_pos (DECL_FIELD_OFFSET (field),
2959 DECL_FIELD_BIT_OFFSET (field));
2960 }
2961 \f
2962 /* Return the byte position of FIELD, in bytes from the start of the record.
2963 This is a tree of type sizetype. */
2964
2965 tree
2966 byte_position (const_tree field)
2967 {
2968 return byte_from_pos (DECL_FIELD_OFFSET (field),
2969 DECL_FIELD_BIT_OFFSET (field));
2970 }
2971
2972 /* Likewise, but return as an integer. It must be representable in
2973 that way (since it could be a signed value, we don't have the
2974 option of returning -1 like int_size_in_byte can. */
2975
2976 HOST_WIDE_INT
2977 int_byte_position (const_tree field)
2978 {
2979 return tree_to_shwi (byte_position (field));
2980 }
2981 \f
2982 /* Return the strictest alignment, in bits, that T is known to have. */
2983
2984 unsigned int
2985 expr_align (const_tree t)
2986 {
2987 unsigned int align0, align1;
2988
2989 switch (TREE_CODE (t))
2990 {
2991 CASE_CONVERT: case NON_LVALUE_EXPR:
2992 /* If we have conversions, we know that the alignment of the
2993 object must meet each of the alignments of the types. */
2994 align0 = expr_align (TREE_OPERAND (t, 0));
2995 align1 = TYPE_ALIGN (TREE_TYPE (t));
2996 return MAX (align0, align1);
2997
2998 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2999 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3000 case CLEANUP_POINT_EXPR:
3001 /* These don't change the alignment of an object. */
3002 return expr_align (TREE_OPERAND (t, 0));
3003
3004 case COND_EXPR:
3005 /* The best we can do is say that the alignment is the least aligned
3006 of the two arms. */
3007 align0 = expr_align (TREE_OPERAND (t, 1));
3008 align1 = expr_align (TREE_OPERAND (t, 2));
3009 return MIN (align0, align1);
3010
3011 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3012 meaningfully, it's always 1. */
3013 case LABEL_DECL: case CONST_DECL:
3014 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3015 case FUNCTION_DECL:
3016 gcc_assert (DECL_ALIGN (t) != 0);
3017 return DECL_ALIGN (t);
3018
3019 default:
3020 break;
3021 }
3022
3023 /* Otherwise take the alignment from that of the type. */
3024 return TYPE_ALIGN (TREE_TYPE (t));
3025 }
3026 \f
3027 /* Return, as a tree node, the number of elements for TYPE (which is an
3028 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3029
3030 tree
3031 array_type_nelts (const_tree type)
3032 {
3033 tree index_type, min, max;
3034
3035 /* If they did it with unspecified bounds, then we should have already
3036 given an error about it before we got here. */
3037 if (! TYPE_DOMAIN (type))
3038 return error_mark_node;
3039
3040 index_type = TYPE_DOMAIN (type);
3041 min = TYPE_MIN_VALUE (index_type);
3042 max = TYPE_MAX_VALUE (index_type);
3043
3044 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3045 if (!max)
3046 return error_mark_node;
3047
3048 return (integer_zerop (min)
3049 ? max
3050 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3051 }
3052 \f
3053 /* If arg is static -- a reference to an object in static storage -- then
3054 return the object. This is not the same as the C meaning of `static'.
3055 If arg isn't static, return NULL. */
3056
3057 tree
3058 staticp (tree arg)
3059 {
3060 switch (TREE_CODE (arg))
3061 {
3062 case FUNCTION_DECL:
3063 /* Nested functions are static, even though taking their address will
3064 involve a trampoline as we unnest the nested function and create
3065 the trampoline on the tree level. */
3066 return arg;
3067
3068 case VAR_DECL:
3069 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3070 && ! DECL_THREAD_LOCAL_P (arg)
3071 && ! DECL_DLLIMPORT_P (arg)
3072 ? arg : NULL);
3073
3074 case CONST_DECL:
3075 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3076 ? arg : NULL);
3077
3078 case CONSTRUCTOR:
3079 return TREE_STATIC (arg) ? arg : NULL;
3080
3081 case LABEL_DECL:
3082 case STRING_CST:
3083 return arg;
3084
3085 case COMPONENT_REF:
3086 /* If the thing being referenced is not a field, then it is
3087 something language specific. */
3088 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3089
3090 /* If we are referencing a bitfield, we can't evaluate an
3091 ADDR_EXPR at compile time and so it isn't a constant. */
3092 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3093 return NULL;
3094
3095 return staticp (TREE_OPERAND (arg, 0));
3096
3097 case BIT_FIELD_REF:
3098 return NULL;
3099
3100 case INDIRECT_REF:
3101 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3102
3103 case ARRAY_REF:
3104 case ARRAY_RANGE_REF:
3105 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3106 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3107 return staticp (TREE_OPERAND (arg, 0));
3108 else
3109 return NULL;
3110
3111 case COMPOUND_LITERAL_EXPR:
3112 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3113
3114 default:
3115 return NULL;
3116 }
3117 }
3118
3119 \f
3120
3121
3122 /* Return whether OP is a DECL whose address is function-invariant. */
3123
3124 bool
3125 decl_address_invariant_p (const_tree op)
3126 {
3127 /* The conditions below are slightly less strict than the one in
3128 staticp. */
3129
3130 switch (TREE_CODE (op))
3131 {
3132 case PARM_DECL:
3133 case RESULT_DECL:
3134 case LABEL_DECL:
3135 case FUNCTION_DECL:
3136 return true;
3137
3138 case VAR_DECL:
3139 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3140 || DECL_THREAD_LOCAL_P (op)
3141 || DECL_CONTEXT (op) == current_function_decl
3142 || decl_function_context (op) == current_function_decl)
3143 return true;
3144 break;
3145
3146 case CONST_DECL:
3147 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3148 || decl_function_context (op) == current_function_decl)
3149 return true;
3150 break;
3151
3152 default:
3153 break;
3154 }
3155
3156 return false;
3157 }
3158
3159 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3160
3161 bool
3162 decl_address_ip_invariant_p (const_tree op)
3163 {
3164 /* The conditions below are slightly less strict than the one in
3165 staticp. */
3166
3167 switch (TREE_CODE (op))
3168 {
3169 case LABEL_DECL:
3170 case FUNCTION_DECL:
3171 case STRING_CST:
3172 return true;
3173
3174 case VAR_DECL:
3175 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3176 && !DECL_DLLIMPORT_P (op))
3177 || DECL_THREAD_LOCAL_P (op))
3178 return true;
3179 break;
3180
3181 case CONST_DECL:
3182 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3183 return true;
3184 break;
3185
3186 default:
3187 break;
3188 }
3189
3190 return false;
3191 }
3192
3193
3194 /* Return true if T is function-invariant (internal function, does
3195 not handle arithmetic; that's handled in skip_simple_arithmetic and
3196 tree_invariant_p). */
3197
3198 static bool tree_invariant_p (tree t);
3199
3200 static bool
3201 tree_invariant_p_1 (tree t)
3202 {
3203 tree op;
3204
3205 if (TREE_CONSTANT (t)
3206 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3207 return true;
3208
3209 switch (TREE_CODE (t))
3210 {
3211 case SAVE_EXPR:
3212 return true;
3213
3214 case ADDR_EXPR:
3215 op = TREE_OPERAND (t, 0);
3216 while (handled_component_p (op))
3217 {
3218 switch (TREE_CODE (op))
3219 {
3220 case ARRAY_REF:
3221 case ARRAY_RANGE_REF:
3222 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3223 || TREE_OPERAND (op, 2) != NULL_TREE
3224 || TREE_OPERAND (op, 3) != NULL_TREE)
3225 return false;
3226 break;
3227
3228 case COMPONENT_REF:
3229 if (TREE_OPERAND (op, 2) != NULL_TREE)
3230 return false;
3231 break;
3232
3233 default:;
3234 }
3235 op = TREE_OPERAND (op, 0);
3236 }
3237
3238 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3239
3240 default:
3241 break;
3242 }
3243
3244 return false;
3245 }
3246
3247 /* Return true if T is function-invariant. */
3248
3249 static bool
3250 tree_invariant_p (tree t)
3251 {
3252 tree inner = skip_simple_arithmetic (t);
3253 return tree_invariant_p_1 (inner);
3254 }
3255
3256 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3257 Do this to any expression which may be used in more than one place,
3258 but must be evaluated only once.
3259
3260 Normally, expand_expr would reevaluate the expression each time.
3261 Calling save_expr produces something that is evaluated and recorded
3262 the first time expand_expr is called on it. Subsequent calls to
3263 expand_expr just reuse the recorded value.
3264
3265 The call to expand_expr that generates code that actually computes
3266 the value is the first call *at compile time*. Subsequent calls
3267 *at compile time* generate code to use the saved value.
3268 This produces correct result provided that *at run time* control
3269 always flows through the insns made by the first expand_expr
3270 before reaching the other places where the save_expr was evaluated.
3271 You, the caller of save_expr, must make sure this is so.
3272
3273 Constants, and certain read-only nodes, are returned with no
3274 SAVE_EXPR because that is safe. Expressions containing placeholders
3275 are not touched; see tree.def for an explanation of what these
3276 are used for. */
3277
3278 tree
3279 save_expr (tree expr)
3280 {
3281 tree t = fold (expr);
3282 tree inner;
3283
3284 /* If the tree evaluates to a constant, then we don't want to hide that
3285 fact (i.e. this allows further folding, and direct checks for constants).
3286 However, a read-only object that has side effects cannot be bypassed.
3287 Since it is no problem to reevaluate literals, we just return the
3288 literal node. */
3289 inner = skip_simple_arithmetic (t);
3290 if (TREE_CODE (inner) == ERROR_MARK)
3291 return inner;
3292
3293 if (tree_invariant_p_1 (inner))
3294 return t;
3295
3296 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3297 it means that the size or offset of some field of an object depends on
3298 the value within another field.
3299
3300 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3301 and some variable since it would then need to be both evaluated once and
3302 evaluated more than once. Front-ends must assure this case cannot
3303 happen by surrounding any such subexpressions in their own SAVE_EXPR
3304 and forcing evaluation at the proper time. */
3305 if (contains_placeholder_p (inner))
3306 return t;
3307
3308 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3309 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3310
3311 /* This expression might be placed ahead of a jump to ensure that the
3312 value was computed on both sides of the jump. So make sure it isn't
3313 eliminated as dead. */
3314 TREE_SIDE_EFFECTS (t) = 1;
3315 return t;
3316 }
3317
3318 /* Look inside EXPR into any simple arithmetic operations. Return the
3319 outermost non-arithmetic or non-invariant node. */
3320
3321 tree
3322 skip_simple_arithmetic (tree expr)
3323 {
3324 /* We don't care about whether this can be used as an lvalue in this
3325 context. */
3326 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3327 expr = TREE_OPERAND (expr, 0);
3328
3329 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3330 a constant, it will be more efficient to not make another SAVE_EXPR since
3331 it will allow better simplification and GCSE will be able to merge the
3332 computations if they actually occur. */
3333 while (true)
3334 {
3335 if (UNARY_CLASS_P (expr))
3336 expr = TREE_OPERAND (expr, 0);
3337 else if (BINARY_CLASS_P (expr))
3338 {
3339 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3340 expr = TREE_OPERAND (expr, 0);
3341 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3342 expr = TREE_OPERAND (expr, 1);
3343 else
3344 break;
3345 }
3346 else
3347 break;
3348 }
3349
3350 return expr;
3351 }
3352
3353 /* Look inside EXPR into simple arithmetic operations involving constants.
3354 Return the outermost non-arithmetic or non-constant node. */
3355
3356 tree
3357 skip_simple_constant_arithmetic (tree expr)
3358 {
3359 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3360 expr = TREE_OPERAND (expr, 0);
3361
3362 while (true)
3363 {
3364 if (UNARY_CLASS_P (expr))
3365 expr = TREE_OPERAND (expr, 0);
3366 else if (BINARY_CLASS_P (expr))
3367 {
3368 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3369 expr = TREE_OPERAND (expr, 0);
3370 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3371 expr = TREE_OPERAND (expr, 1);
3372 else
3373 break;
3374 }
3375 else
3376 break;
3377 }
3378
3379 return expr;
3380 }
3381
3382 /* Return which tree structure is used by T. */
3383
3384 enum tree_node_structure_enum
3385 tree_node_structure (const_tree t)
3386 {
3387 const enum tree_code code = TREE_CODE (t);
3388 return tree_node_structure_for_code (code);
3389 }
3390
3391 /* Set various status flags when building a CALL_EXPR object T. */
3392
3393 static void
3394 process_call_operands (tree t)
3395 {
3396 bool side_effects = TREE_SIDE_EFFECTS (t);
3397 bool read_only = false;
3398 int i = call_expr_flags (t);
3399
3400 /* Calls have side-effects, except those to const or pure functions. */
3401 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3402 side_effects = true;
3403 /* Propagate TREE_READONLY of arguments for const functions. */
3404 if (i & ECF_CONST)
3405 read_only = true;
3406
3407 if (!side_effects || read_only)
3408 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3409 {
3410 tree op = TREE_OPERAND (t, i);
3411 if (op && TREE_SIDE_EFFECTS (op))
3412 side_effects = true;
3413 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3414 read_only = false;
3415 }
3416
3417 TREE_SIDE_EFFECTS (t) = side_effects;
3418 TREE_READONLY (t) = read_only;
3419 }
3420 \f
3421 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3422 size or offset that depends on a field within a record. */
3423
3424 bool
3425 contains_placeholder_p (const_tree exp)
3426 {
3427 enum tree_code code;
3428
3429 if (!exp)
3430 return 0;
3431
3432 code = TREE_CODE (exp);
3433 if (code == PLACEHOLDER_EXPR)
3434 return 1;
3435
3436 switch (TREE_CODE_CLASS (code))
3437 {
3438 case tcc_reference:
3439 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3440 position computations since they will be converted into a
3441 WITH_RECORD_EXPR involving the reference, which will assume
3442 here will be valid. */
3443 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3444
3445 case tcc_exceptional:
3446 if (code == TREE_LIST)
3447 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3448 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3449 break;
3450
3451 case tcc_unary:
3452 case tcc_binary:
3453 case tcc_comparison:
3454 case tcc_expression:
3455 switch (code)
3456 {
3457 case COMPOUND_EXPR:
3458 /* Ignoring the first operand isn't quite right, but works best. */
3459 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3460
3461 case COND_EXPR:
3462 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3463 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3464 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3465
3466 case SAVE_EXPR:
3467 /* The save_expr function never wraps anything containing
3468 a PLACEHOLDER_EXPR. */
3469 return 0;
3470
3471 default:
3472 break;
3473 }
3474
3475 switch (TREE_CODE_LENGTH (code))
3476 {
3477 case 1:
3478 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3479 case 2:
3480 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3481 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3482 default:
3483 return 0;
3484 }
3485
3486 case tcc_vl_exp:
3487 switch (code)
3488 {
3489 case CALL_EXPR:
3490 {
3491 const_tree arg;
3492 const_call_expr_arg_iterator iter;
3493 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3494 if (CONTAINS_PLACEHOLDER_P (arg))
3495 return 1;
3496 return 0;
3497 }
3498 default:
3499 return 0;
3500 }
3501
3502 default:
3503 return 0;
3504 }
3505 return 0;
3506 }
3507
3508 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3509 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3510 field positions. */
3511
3512 static bool
3513 type_contains_placeholder_1 (const_tree type)
3514 {
3515 /* If the size contains a placeholder or the parent type (component type in
3516 the case of arrays) type involves a placeholder, this type does. */
3517 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3518 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3519 || (!POINTER_TYPE_P (type)
3520 && TREE_TYPE (type)
3521 && type_contains_placeholder_p (TREE_TYPE (type))))
3522 return true;
3523
3524 /* Now do type-specific checks. Note that the last part of the check above
3525 greatly limits what we have to do below. */
3526 switch (TREE_CODE (type))
3527 {
3528 case VOID_TYPE:
3529 case POINTER_BOUNDS_TYPE:
3530 case COMPLEX_TYPE:
3531 case ENUMERAL_TYPE:
3532 case BOOLEAN_TYPE:
3533 case POINTER_TYPE:
3534 case OFFSET_TYPE:
3535 case REFERENCE_TYPE:
3536 case METHOD_TYPE:
3537 case FUNCTION_TYPE:
3538 case VECTOR_TYPE:
3539 case NULLPTR_TYPE:
3540 return false;
3541
3542 case INTEGER_TYPE:
3543 case REAL_TYPE:
3544 case FIXED_POINT_TYPE:
3545 /* Here we just check the bounds. */
3546 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3547 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3548
3549 case ARRAY_TYPE:
3550 /* We have already checked the component type above, so just check the
3551 domain type. */
3552 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3553
3554 case RECORD_TYPE:
3555 case UNION_TYPE:
3556 case QUAL_UNION_TYPE:
3557 {
3558 tree field;
3559
3560 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3561 if (TREE_CODE (field) == FIELD_DECL
3562 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3563 || (TREE_CODE (type) == QUAL_UNION_TYPE
3564 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3565 || type_contains_placeholder_p (TREE_TYPE (field))))
3566 return true;
3567
3568 return false;
3569 }
3570
3571 default:
3572 gcc_unreachable ();
3573 }
3574 }
3575
3576 /* Wrapper around above function used to cache its result. */
3577
3578 bool
3579 type_contains_placeholder_p (tree type)
3580 {
3581 bool result;
3582
3583 /* If the contains_placeholder_bits field has been initialized,
3584 then we know the answer. */
3585 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3586 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3587
3588 /* Indicate that we've seen this type node, and the answer is false.
3589 This is what we want to return if we run into recursion via fields. */
3590 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3591
3592 /* Compute the real value. */
3593 result = type_contains_placeholder_1 (type);
3594
3595 /* Store the real value. */
3596 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3597
3598 return result;
3599 }
3600 \f
3601 /* Push tree EXP onto vector QUEUE if it is not already present. */
3602
3603 static void
3604 push_without_duplicates (tree exp, vec<tree> *queue)
3605 {
3606 unsigned int i;
3607 tree iter;
3608
3609 FOR_EACH_VEC_ELT (*queue, i, iter)
3610 if (simple_cst_equal (iter, exp) == 1)
3611 break;
3612
3613 if (!iter)
3614 queue->safe_push (exp);
3615 }
3616
3617 /* Given a tree EXP, find all occurrences of references to fields
3618 in a PLACEHOLDER_EXPR and place them in vector REFS without
3619 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3620 we assume here that EXP contains only arithmetic expressions
3621 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3622 argument list. */
3623
3624 void
3625 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3626 {
3627 enum tree_code code = TREE_CODE (exp);
3628 tree inner;
3629 int i;
3630
3631 /* We handle TREE_LIST and COMPONENT_REF separately. */
3632 if (code == TREE_LIST)
3633 {
3634 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3635 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3636 }
3637 else if (code == COMPONENT_REF)
3638 {
3639 for (inner = TREE_OPERAND (exp, 0);
3640 REFERENCE_CLASS_P (inner);
3641 inner = TREE_OPERAND (inner, 0))
3642 ;
3643
3644 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3645 push_without_duplicates (exp, refs);
3646 else
3647 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3648 }
3649 else
3650 switch (TREE_CODE_CLASS (code))
3651 {
3652 case tcc_constant:
3653 break;
3654
3655 case tcc_declaration:
3656 /* Variables allocated to static storage can stay. */
3657 if (!TREE_STATIC (exp))
3658 push_without_duplicates (exp, refs);
3659 break;
3660
3661 case tcc_expression:
3662 /* This is the pattern built in ada/make_aligning_type. */
3663 if (code == ADDR_EXPR
3664 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3665 {
3666 push_without_duplicates (exp, refs);
3667 break;
3668 }
3669
3670 /* Fall through... */
3671
3672 case tcc_exceptional:
3673 case tcc_unary:
3674 case tcc_binary:
3675 case tcc_comparison:
3676 case tcc_reference:
3677 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3678 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3679 break;
3680
3681 case tcc_vl_exp:
3682 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3683 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3684 break;
3685
3686 default:
3687 gcc_unreachable ();
3688 }
3689 }
3690
3691 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3692 return a tree with all occurrences of references to F in a
3693 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3694 CONST_DECLs. Note that we assume here that EXP contains only
3695 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3696 occurring only in their argument list. */
3697
3698 tree
3699 substitute_in_expr (tree exp, tree f, tree r)
3700 {
3701 enum tree_code code = TREE_CODE (exp);
3702 tree op0, op1, op2, op3;
3703 tree new_tree;
3704
3705 /* We handle TREE_LIST and COMPONENT_REF separately. */
3706 if (code == TREE_LIST)
3707 {
3708 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3709 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3710 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3711 return exp;
3712
3713 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3714 }
3715 else if (code == COMPONENT_REF)
3716 {
3717 tree inner;
3718
3719 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3720 and it is the right field, replace it with R. */
3721 for (inner = TREE_OPERAND (exp, 0);
3722 REFERENCE_CLASS_P (inner);
3723 inner = TREE_OPERAND (inner, 0))
3724 ;
3725
3726 /* The field. */
3727 op1 = TREE_OPERAND (exp, 1);
3728
3729 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3730 return r;
3731
3732 /* If this expression hasn't been completed let, leave it alone. */
3733 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3734 return exp;
3735
3736 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3737 if (op0 == TREE_OPERAND (exp, 0))
3738 return exp;
3739
3740 new_tree
3741 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3742 }
3743 else
3744 switch (TREE_CODE_CLASS (code))
3745 {
3746 case tcc_constant:
3747 return exp;
3748
3749 case tcc_declaration:
3750 if (exp == f)
3751 return r;
3752 else
3753 return exp;
3754
3755 case tcc_expression:
3756 if (exp == f)
3757 return r;
3758
3759 /* Fall through... */
3760
3761 case tcc_exceptional:
3762 case tcc_unary:
3763 case tcc_binary:
3764 case tcc_comparison:
3765 case tcc_reference:
3766 switch (TREE_CODE_LENGTH (code))
3767 {
3768 case 0:
3769 return exp;
3770
3771 case 1:
3772 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3773 if (op0 == TREE_OPERAND (exp, 0))
3774 return exp;
3775
3776 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3777 break;
3778
3779 case 2:
3780 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3781 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3782
3783 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3784 return exp;
3785
3786 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3787 break;
3788
3789 case 3:
3790 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3791 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3792 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3793
3794 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3795 && op2 == TREE_OPERAND (exp, 2))
3796 return exp;
3797
3798 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3799 break;
3800
3801 case 4:
3802 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3803 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3804 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3805 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3806
3807 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3808 && op2 == TREE_OPERAND (exp, 2)
3809 && op3 == TREE_OPERAND (exp, 3))
3810 return exp;
3811
3812 new_tree
3813 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3814 break;
3815
3816 default:
3817 gcc_unreachable ();
3818 }
3819 break;
3820
3821 case tcc_vl_exp:
3822 {
3823 int i;
3824
3825 new_tree = NULL_TREE;
3826
3827 /* If we are trying to replace F with a constant, inline back
3828 functions which do nothing else than computing a value from
3829 the arguments they are passed. This makes it possible to
3830 fold partially or entirely the replacement expression. */
3831 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3832 {
3833 tree t = maybe_inline_call_in_expr (exp);
3834 if (t)
3835 return SUBSTITUTE_IN_EXPR (t, f, r);
3836 }
3837
3838 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3839 {
3840 tree op = TREE_OPERAND (exp, i);
3841 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3842 if (new_op != op)
3843 {
3844 if (!new_tree)
3845 new_tree = copy_node (exp);
3846 TREE_OPERAND (new_tree, i) = new_op;
3847 }
3848 }
3849
3850 if (new_tree)
3851 {
3852 new_tree = fold (new_tree);
3853 if (TREE_CODE (new_tree) == CALL_EXPR)
3854 process_call_operands (new_tree);
3855 }
3856 else
3857 return exp;
3858 }
3859 break;
3860
3861 default:
3862 gcc_unreachable ();
3863 }
3864
3865 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3866
3867 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3868 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3869
3870 return new_tree;
3871 }
3872
3873 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3874 for it within OBJ, a tree that is an object or a chain of references. */
3875
3876 tree
3877 substitute_placeholder_in_expr (tree exp, tree obj)
3878 {
3879 enum tree_code code = TREE_CODE (exp);
3880 tree op0, op1, op2, op3;
3881 tree new_tree;
3882
3883 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3884 in the chain of OBJ. */
3885 if (code == PLACEHOLDER_EXPR)
3886 {
3887 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3888 tree elt;
3889
3890 for (elt = obj; elt != 0;
3891 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3892 || TREE_CODE (elt) == COND_EXPR)
3893 ? TREE_OPERAND (elt, 1)
3894 : (REFERENCE_CLASS_P (elt)
3895 || UNARY_CLASS_P (elt)
3896 || BINARY_CLASS_P (elt)
3897 || VL_EXP_CLASS_P (elt)
3898 || EXPRESSION_CLASS_P (elt))
3899 ? TREE_OPERAND (elt, 0) : 0))
3900 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3901 return elt;
3902
3903 for (elt = obj; elt != 0;
3904 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3905 || TREE_CODE (elt) == COND_EXPR)
3906 ? TREE_OPERAND (elt, 1)
3907 : (REFERENCE_CLASS_P (elt)
3908 || UNARY_CLASS_P (elt)
3909 || BINARY_CLASS_P (elt)
3910 || VL_EXP_CLASS_P (elt)
3911 || EXPRESSION_CLASS_P (elt))
3912 ? TREE_OPERAND (elt, 0) : 0))
3913 if (POINTER_TYPE_P (TREE_TYPE (elt))
3914 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3915 == need_type))
3916 return fold_build1 (INDIRECT_REF, need_type, elt);
3917
3918 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3919 survives until RTL generation, there will be an error. */
3920 return exp;
3921 }
3922
3923 /* TREE_LIST is special because we need to look at TREE_VALUE
3924 and TREE_CHAIN, not TREE_OPERANDS. */
3925 else if (code == TREE_LIST)
3926 {
3927 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3928 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3929 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3930 return exp;
3931
3932 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3933 }
3934 else
3935 switch (TREE_CODE_CLASS (code))
3936 {
3937 case tcc_constant:
3938 case tcc_declaration:
3939 return exp;
3940
3941 case tcc_exceptional:
3942 case tcc_unary:
3943 case tcc_binary:
3944 case tcc_comparison:
3945 case tcc_expression:
3946 case tcc_reference:
3947 case tcc_statement:
3948 switch (TREE_CODE_LENGTH (code))
3949 {
3950 case 0:
3951 return exp;
3952
3953 case 1:
3954 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3955 if (op0 == TREE_OPERAND (exp, 0))
3956 return exp;
3957
3958 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3959 break;
3960
3961 case 2:
3962 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3963 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3964
3965 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3966 return exp;
3967
3968 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3969 break;
3970
3971 case 3:
3972 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3973 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3974 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3975
3976 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3977 && op2 == TREE_OPERAND (exp, 2))
3978 return exp;
3979
3980 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3981 break;
3982
3983 case 4:
3984 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3985 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3986 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3987 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3988
3989 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3990 && op2 == TREE_OPERAND (exp, 2)
3991 && op3 == TREE_OPERAND (exp, 3))
3992 return exp;
3993
3994 new_tree
3995 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3996 break;
3997
3998 default:
3999 gcc_unreachable ();
4000 }
4001 break;
4002
4003 case tcc_vl_exp:
4004 {
4005 int i;
4006
4007 new_tree = NULL_TREE;
4008
4009 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4010 {
4011 tree op = TREE_OPERAND (exp, i);
4012 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4013 if (new_op != op)
4014 {
4015 if (!new_tree)
4016 new_tree = copy_node (exp);
4017 TREE_OPERAND (new_tree, i) = new_op;
4018 }
4019 }
4020
4021 if (new_tree)
4022 {
4023 new_tree = fold (new_tree);
4024 if (TREE_CODE (new_tree) == CALL_EXPR)
4025 process_call_operands (new_tree);
4026 }
4027 else
4028 return exp;
4029 }
4030 break;
4031
4032 default:
4033 gcc_unreachable ();
4034 }
4035
4036 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4037
4038 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4039 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4040
4041 return new_tree;
4042 }
4043 \f
4044
4045 /* Subroutine of stabilize_reference; this is called for subtrees of
4046 references. Any expression with side-effects must be put in a SAVE_EXPR
4047 to ensure that it is only evaluated once.
4048
4049 We don't put SAVE_EXPR nodes around everything, because assigning very
4050 simple expressions to temporaries causes us to miss good opportunities
4051 for optimizations. Among other things, the opportunity to fold in the
4052 addition of a constant into an addressing mode often gets lost, e.g.
4053 "y[i+1] += x;". In general, we take the approach that we should not make
4054 an assignment unless we are forced into it - i.e., that any non-side effect
4055 operator should be allowed, and that cse should take care of coalescing
4056 multiple utterances of the same expression should that prove fruitful. */
4057
4058 static tree
4059 stabilize_reference_1 (tree e)
4060 {
4061 tree result;
4062 enum tree_code code = TREE_CODE (e);
4063
4064 /* We cannot ignore const expressions because it might be a reference
4065 to a const array but whose index contains side-effects. But we can
4066 ignore things that are actual constant or that already have been
4067 handled by this function. */
4068
4069 if (tree_invariant_p (e))
4070 return e;
4071
4072 switch (TREE_CODE_CLASS (code))
4073 {
4074 case tcc_exceptional:
4075 case tcc_type:
4076 case tcc_declaration:
4077 case tcc_comparison:
4078 case tcc_statement:
4079 case tcc_expression:
4080 case tcc_reference:
4081 case tcc_vl_exp:
4082 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4083 so that it will only be evaluated once. */
4084 /* The reference (r) and comparison (<) classes could be handled as
4085 below, but it is generally faster to only evaluate them once. */
4086 if (TREE_SIDE_EFFECTS (e))
4087 return save_expr (e);
4088 return e;
4089
4090 case tcc_constant:
4091 /* Constants need no processing. In fact, we should never reach
4092 here. */
4093 return e;
4094
4095 case tcc_binary:
4096 /* Division is slow and tends to be compiled with jumps,
4097 especially the division by powers of 2 that is often
4098 found inside of an array reference. So do it just once. */
4099 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4100 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4101 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4102 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4103 return save_expr (e);
4104 /* Recursively stabilize each operand. */
4105 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4106 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4107 break;
4108
4109 case tcc_unary:
4110 /* Recursively stabilize each operand. */
4111 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4112 break;
4113
4114 default:
4115 gcc_unreachable ();
4116 }
4117
4118 TREE_TYPE (result) = TREE_TYPE (e);
4119 TREE_READONLY (result) = TREE_READONLY (e);
4120 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4121 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4122
4123 return result;
4124 }
4125
4126 /* Stabilize a reference so that we can use it any number of times
4127 without causing its operands to be evaluated more than once.
4128 Returns the stabilized reference. This works by means of save_expr,
4129 so see the caveats in the comments about save_expr.
4130
4131 Also allows conversion expressions whose operands are references.
4132 Any other kind of expression is returned unchanged. */
4133
4134 tree
4135 stabilize_reference (tree ref)
4136 {
4137 tree result;
4138 enum tree_code code = TREE_CODE (ref);
4139
4140 switch (code)
4141 {
4142 case VAR_DECL:
4143 case PARM_DECL:
4144 case RESULT_DECL:
4145 /* No action is needed in this case. */
4146 return ref;
4147
4148 CASE_CONVERT:
4149 case FLOAT_EXPR:
4150 case FIX_TRUNC_EXPR:
4151 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4152 break;
4153
4154 case INDIRECT_REF:
4155 result = build_nt (INDIRECT_REF,
4156 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4157 break;
4158
4159 case COMPONENT_REF:
4160 result = build_nt (COMPONENT_REF,
4161 stabilize_reference (TREE_OPERAND (ref, 0)),
4162 TREE_OPERAND (ref, 1), NULL_TREE);
4163 break;
4164
4165 case BIT_FIELD_REF:
4166 result = build_nt (BIT_FIELD_REF,
4167 stabilize_reference (TREE_OPERAND (ref, 0)),
4168 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4169 break;
4170
4171 case ARRAY_REF:
4172 result = build_nt (ARRAY_REF,
4173 stabilize_reference (TREE_OPERAND (ref, 0)),
4174 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4175 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4176 break;
4177
4178 case ARRAY_RANGE_REF:
4179 result = build_nt (ARRAY_RANGE_REF,
4180 stabilize_reference (TREE_OPERAND (ref, 0)),
4181 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4182 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4183 break;
4184
4185 case COMPOUND_EXPR:
4186 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4187 it wouldn't be ignored. This matters when dealing with
4188 volatiles. */
4189 return stabilize_reference_1 (ref);
4190
4191 /* If arg isn't a kind of lvalue we recognize, make no change.
4192 Caller should recognize the error for an invalid lvalue. */
4193 default:
4194 return ref;
4195
4196 case ERROR_MARK:
4197 return error_mark_node;
4198 }
4199
4200 TREE_TYPE (result) = TREE_TYPE (ref);
4201 TREE_READONLY (result) = TREE_READONLY (ref);
4202 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4203 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4204
4205 return result;
4206 }
4207 \f
4208 /* Low-level constructors for expressions. */
4209
4210 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4211 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4212
4213 void
4214 recompute_tree_invariant_for_addr_expr (tree t)
4215 {
4216 tree node;
4217 bool tc = true, se = false;
4218
4219 /* We started out assuming this address is both invariant and constant, but
4220 does not have side effects. Now go down any handled components and see if
4221 any of them involve offsets that are either non-constant or non-invariant.
4222 Also check for side-effects.
4223
4224 ??? Note that this code makes no attempt to deal with the case where
4225 taking the address of something causes a copy due to misalignment. */
4226
4227 #define UPDATE_FLAGS(NODE) \
4228 do { tree _node = (NODE); \
4229 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4230 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4231
4232 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4233 node = TREE_OPERAND (node, 0))
4234 {
4235 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4236 array reference (probably made temporarily by the G++ front end),
4237 so ignore all the operands. */
4238 if ((TREE_CODE (node) == ARRAY_REF
4239 || TREE_CODE (node) == ARRAY_RANGE_REF)
4240 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4241 {
4242 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4243 if (TREE_OPERAND (node, 2))
4244 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4245 if (TREE_OPERAND (node, 3))
4246 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4247 }
4248 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4249 FIELD_DECL, apparently. The G++ front end can put something else
4250 there, at least temporarily. */
4251 else if (TREE_CODE (node) == COMPONENT_REF
4252 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4253 {
4254 if (TREE_OPERAND (node, 2))
4255 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4256 }
4257 }
4258
4259 node = lang_hooks.expr_to_decl (node, &tc, &se);
4260
4261 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4262 the address, since &(*a)->b is a form of addition. If it's a constant, the
4263 address is constant too. If it's a decl, its address is constant if the
4264 decl is static. Everything else is not constant and, furthermore,
4265 taking the address of a volatile variable is not volatile. */
4266 if (TREE_CODE (node) == INDIRECT_REF
4267 || TREE_CODE (node) == MEM_REF)
4268 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4269 else if (CONSTANT_CLASS_P (node))
4270 ;
4271 else if (DECL_P (node))
4272 tc &= (staticp (node) != NULL_TREE);
4273 else
4274 {
4275 tc = false;
4276 se |= TREE_SIDE_EFFECTS (node);
4277 }
4278
4279
4280 TREE_CONSTANT (t) = tc;
4281 TREE_SIDE_EFFECTS (t) = se;
4282 #undef UPDATE_FLAGS
4283 }
4284
4285 /* Build an expression of code CODE, data type TYPE, and operands as
4286 specified. Expressions and reference nodes can be created this way.
4287 Constants, decls, types and misc nodes cannot be.
4288
4289 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4290 enough for all extant tree codes. */
4291
4292 tree
4293 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4294 {
4295 tree t;
4296
4297 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4298
4299 t = make_node_stat (code PASS_MEM_STAT);
4300 TREE_TYPE (t) = tt;
4301
4302 return t;
4303 }
4304
4305 tree
4306 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4307 {
4308 int length = sizeof (struct tree_exp);
4309 tree t;
4310
4311 record_node_allocation_statistics (code, length);
4312
4313 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4314
4315 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4316
4317 memset (t, 0, sizeof (struct tree_common));
4318
4319 TREE_SET_CODE (t, code);
4320
4321 TREE_TYPE (t) = type;
4322 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4323 TREE_OPERAND (t, 0) = node;
4324 if (node && !TYPE_P (node))
4325 {
4326 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4327 TREE_READONLY (t) = TREE_READONLY (node);
4328 }
4329
4330 if (TREE_CODE_CLASS (code) == tcc_statement)
4331 TREE_SIDE_EFFECTS (t) = 1;
4332 else switch (code)
4333 {
4334 case VA_ARG_EXPR:
4335 /* All of these have side-effects, no matter what their
4336 operands are. */
4337 TREE_SIDE_EFFECTS (t) = 1;
4338 TREE_READONLY (t) = 0;
4339 break;
4340
4341 case INDIRECT_REF:
4342 /* Whether a dereference is readonly has nothing to do with whether
4343 its operand is readonly. */
4344 TREE_READONLY (t) = 0;
4345 break;
4346
4347 case ADDR_EXPR:
4348 if (node)
4349 recompute_tree_invariant_for_addr_expr (t);
4350 break;
4351
4352 default:
4353 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4354 && node && !TYPE_P (node)
4355 && TREE_CONSTANT (node))
4356 TREE_CONSTANT (t) = 1;
4357 if (TREE_CODE_CLASS (code) == tcc_reference
4358 && node && TREE_THIS_VOLATILE (node))
4359 TREE_THIS_VOLATILE (t) = 1;
4360 break;
4361 }
4362
4363 return t;
4364 }
4365
4366 #define PROCESS_ARG(N) \
4367 do { \
4368 TREE_OPERAND (t, N) = arg##N; \
4369 if (arg##N &&!TYPE_P (arg##N)) \
4370 { \
4371 if (TREE_SIDE_EFFECTS (arg##N)) \
4372 side_effects = 1; \
4373 if (!TREE_READONLY (arg##N) \
4374 && !CONSTANT_CLASS_P (arg##N)) \
4375 (void) (read_only = 0); \
4376 if (!TREE_CONSTANT (arg##N)) \
4377 (void) (constant = 0); \
4378 } \
4379 } while (0)
4380
4381 tree
4382 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4383 {
4384 bool constant, read_only, side_effects;
4385 tree t;
4386
4387 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4388
4389 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4390 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4391 /* When sizetype precision doesn't match that of pointers
4392 we need to be able to build explicit extensions or truncations
4393 of the offset argument. */
4394 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4395 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4396 && TREE_CODE (arg1) == INTEGER_CST);
4397
4398 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4399 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4400 && ptrofftype_p (TREE_TYPE (arg1)));
4401
4402 t = make_node_stat (code PASS_MEM_STAT);
4403 TREE_TYPE (t) = tt;
4404
4405 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4406 result based on those same flags for the arguments. But if the
4407 arguments aren't really even `tree' expressions, we shouldn't be trying
4408 to do this. */
4409
4410 /* Expressions without side effects may be constant if their
4411 arguments are as well. */
4412 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4413 || TREE_CODE_CLASS (code) == tcc_binary);
4414 read_only = 1;
4415 side_effects = TREE_SIDE_EFFECTS (t);
4416
4417 PROCESS_ARG (0);
4418 PROCESS_ARG (1);
4419
4420 TREE_SIDE_EFFECTS (t) = side_effects;
4421 if (code == MEM_REF)
4422 {
4423 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4424 {
4425 tree o = TREE_OPERAND (arg0, 0);
4426 TREE_READONLY (t) = TREE_READONLY (o);
4427 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4428 }
4429 }
4430 else
4431 {
4432 TREE_READONLY (t) = read_only;
4433 TREE_CONSTANT (t) = constant;
4434 TREE_THIS_VOLATILE (t)
4435 = (TREE_CODE_CLASS (code) == tcc_reference
4436 && arg0 && TREE_THIS_VOLATILE (arg0));
4437 }
4438
4439 return t;
4440 }
4441
4442
4443 tree
4444 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4445 tree arg2 MEM_STAT_DECL)
4446 {
4447 bool constant, read_only, side_effects;
4448 tree t;
4449
4450 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4451 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4452
4453 t = make_node_stat (code PASS_MEM_STAT);
4454 TREE_TYPE (t) = tt;
4455
4456 read_only = 1;
4457
4458 /* As a special exception, if COND_EXPR has NULL branches, we
4459 assume that it is a gimple statement and always consider
4460 it to have side effects. */
4461 if (code == COND_EXPR
4462 && tt == void_type_node
4463 && arg1 == NULL_TREE
4464 && arg2 == NULL_TREE)
4465 side_effects = true;
4466 else
4467 side_effects = TREE_SIDE_EFFECTS (t);
4468
4469 PROCESS_ARG (0);
4470 PROCESS_ARG (1);
4471 PROCESS_ARG (2);
4472
4473 if (code == COND_EXPR)
4474 TREE_READONLY (t) = read_only;
4475
4476 TREE_SIDE_EFFECTS (t) = side_effects;
4477 TREE_THIS_VOLATILE (t)
4478 = (TREE_CODE_CLASS (code) == tcc_reference
4479 && arg0 && TREE_THIS_VOLATILE (arg0));
4480
4481 return t;
4482 }
4483
4484 tree
4485 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4486 tree arg2, tree arg3 MEM_STAT_DECL)
4487 {
4488 bool constant, read_only, side_effects;
4489 tree t;
4490
4491 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4492
4493 t = make_node_stat (code PASS_MEM_STAT);
4494 TREE_TYPE (t) = tt;
4495
4496 side_effects = TREE_SIDE_EFFECTS (t);
4497
4498 PROCESS_ARG (0);
4499 PROCESS_ARG (1);
4500 PROCESS_ARG (2);
4501 PROCESS_ARG (3);
4502
4503 TREE_SIDE_EFFECTS (t) = side_effects;
4504 TREE_THIS_VOLATILE (t)
4505 = (TREE_CODE_CLASS (code) == tcc_reference
4506 && arg0 && TREE_THIS_VOLATILE (arg0));
4507
4508 return t;
4509 }
4510
4511 tree
4512 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4513 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4514 {
4515 bool constant, read_only, side_effects;
4516 tree t;
4517
4518 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4519
4520 t = make_node_stat (code PASS_MEM_STAT);
4521 TREE_TYPE (t) = tt;
4522
4523 side_effects = TREE_SIDE_EFFECTS (t);
4524
4525 PROCESS_ARG (0);
4526 PROCESS_ARG (1);
4527 PROCESS_ARG (2);
4528 PROCESS_ARG (3);
4529 PROCESS_ARG (4);
4530
4531 TREE_SIDE_EFFECTS (t) = side_effects;
4532 if (code == TARGET_MEM_REF)
4533 {
4534 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4535 {
4536 tree o = TREE_OPERAND (arg0, 0);
4537 TREE_READONLY (t) = TREE_READONLY (o);
4538 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4539 }
4540 }
4541 else
4542 TREE_THIS_VOLATILE (t)
4543 = (TREE_CODE_CLASS (code) == tcc_reference
4544 && arg0 && TREE_THIS_VOLATILE (arg0));
4545
4546 return t;
4547 }
4548
4549 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4550 on the pointer PTR. */
4551
4552 tree
4553 build_simple_mem_ref_loc (location_t loc, tree ptr)
4554 {
4555 HOST_WIDE_INT offset = 0;
4556 tree ptype = TREE_TYPE (ptr);
4557 tree tem;
4558 /* For convenience allow addresses that collapse to a simple base
4559 and offset. */
4560 if (TREE_CODE (ptr) == ADDR_EXPR
4561 && (handled_component_p (TREE_OPERAND (ptr, 0))
4562 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4563 {
4564 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4565 gcc_assert (ptr);
4566 ptr = build_fold_addr_expr (ptr);
4567 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4568 }
4569 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4570 ptr, build_int_cst (ptype, offset));
4571 SET_EXPR_LOCATION (tem, loc);
4572 return tem;
4573 }
4574
4575 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4576
4577 offset_int
4578 mem_ref_offset (const_tree t)
4579 {
4580 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4581 }
4582
4583 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4584 offsetted by OFFSET units. */
4585
4586 tree
4587 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4588 {
4589 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4590 build_fold_addr_expr (base),
4591 build_int_cst (ptr_type_node, offset));
4592 tree addr = build1 (ADDR_EXPR, type, ref);
4593 recompute_tree_invariant_for_addr_expr (addr);
4594 return addr;
4595 }
4596
4597 /* Similar except don't specify the TREE_TYPE
4598 and leave the TREE_SIDE_EFFECTS as 0.
4599 It is permissible for arguments to be null,
4600 or even garbage if their values do not matter. */
4601
4602 tree
4603 build_nt (enum tree_code code, ...)
4604 {
4605 tree t;
4606 int length;
4607 int i;
4608 va_list p;
4609
4610 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4611
4612 va_start (p, code);
4613
4614 t = make_node (code);
4615 length = TREE_CODE_LENGTH (code);
4616
4617 for (i = 0; i < length; i++)
4618 TREE_OPERAND (t, i) = va_arg (p, tree);
4619
4620 va_end (p);
4621 return t;
4622 }
4623
4624 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4625 tree vec. */
4626
4627 tree
4628 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4629 {
4630 tree ret, t;
4631 unsigned int ix;
4632
4633 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4634 CALL_EXPR_FN (ret) = fn;
4635 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4636 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4637 CALL_EXPR_ARG (ret, ix) = t;
4638 return ret;
4639 }
4640 \f
4641 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4642 We do NOT enter this node in any sort of symbol table.
4643
4644 LOC is the location of the decl.
4645
4646 layout_decl is used to set up the decl's storage layout.
4647 Other slots are initialized to 0 or null pointers. */
4648
4649 tree
4650 build_decl_stat (location_t loc, enum tree_code code, tree name,
4651 tree type MEM_STAT_DECL)
4652 {
4653 tree t;
4654
4655 t = make_node_stat (code PASS_MEM_STAT);
4656 DECL_SOURCE_LOCATION (t) = loc;
4657
4658 /* if (type == error_mark_node)
4659 type = integer_type_node; */
4660 /* That is not done, deliberately, so that having error_mark_node
4661 as the type can suppress useless errors in the use of this variable. */
4662
4663 DECL_NAME (t) = name;
4664 TREE_TYPE (t) = type;
4665
4666 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4667 layout_decl (t, 0);
4668
4669 return t;
4670 }
4671
4672 /* Builds and returns function declaration with NAME and TYPE. */
4673
4674 tree
4675 build_fn_decl (const char *name, tree type)
4676 {
4677 tree id = get_identifier (name);
4678 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4679
4680 DECL_EXTERNAL (decl) = 1;
4681 TREE_PUBLIC (decl) = 1;
4682 DECL_ARTIFICIAL (decl) = 1;
4683 TREE_NOTHROW (decl) = 1;
4684
4685 return decl;
4686 }
4687
4688 vec<tree, va_gc> *all_translation_units;
4689
4690 /* Builds a new translation-unit decl with name NAME, queues it in the
4691 global list of translation-unit decls and returns it. */
4692
4693 tree
4694 build_translation_unit_decl (tree name)
4695 {
4696 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4697 name, NULL_TREE);
4698 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4699 vec_safe_push (all_translation_units, tu);
4700 return tu;
4701 }
4702
4703 \f
4704 /* BLOCK nodes are used to represent the structure of binding contours
4705 and declarations, once those contours have been exited and their contents
4706 compiled. This information is used for outputting debugging info. */
4707
4708 tree
4709 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4710 {
4711 tree block = make_node (BLOCK);
4712
4713 BLOCK_VARS (block) = vars;
4714 BLOCK_SUBBLOCKS (block) = subblocks;
4715 BLOCK_SUPERCONTEXT (block) = supercontext;
4716 BLOCK_CHAIN (block) = chain;
4717 return block;
4718 }
4719
4720 \f
4721 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4722
4723 LOC is the location to use in tree T. */
4724
4725 void
4726 protected_set_expr_location (tree t, location_t loc)
4727 {
4728 if (CAN_HAVE_LOCATION_P (t))
4729 SET_EXPR_LOCATION (t, loc);
4730 }
4731 \f
4732 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4733 is ATTRIBUTE. */
4734
4735 tree
4736 build_decl_attribute_variant (tree ddecl, tree attribute)
4737 {
4738 DECL_ATTRIBUTES (ddecl) = attribute;
4739 return ddecl;
4740 }
4741
4742 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4743 is ATTRIBUTE and its qualifiers are QUALS.
4744
4745 Record such modified types already made so we don't make duplicates. */
4746
4747 tree
4748 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4749 {
4750 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4751 {
4752 inchash::hash hstate;
4753 tree ntype;
4754 int i;
4755 tree t;
4756 enum tree_code code = TREE_CODE (ttype);
4757
4758 /* Building a distinct copy of a tagged type is inappropriate; it
4759 causes breakage in code that expects there to be a one-to-one
4760 relationship between a struct and its fields.
4761 build_duplicate_type is another solution (as used in
4762 handle_transparent_union_attribute), but that doesn't play well
4763 with the stronger C++ type identity model. */
4764 if (TREE_CODE (ttype) == RECORD_TYPE
4765 || TREE_CODE (ttype) == UNION_TYPE
4766 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4767 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4768 {
4769 warning (OPT_Wattributes,
4770 "ignoring attributes applied to %qT after definition",
4771 TYPE_MAIN_VARIANT (ttype));
4772 return build_qualified_type (ttype, quals);
4773 }
4774
4775 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4776 ntype = build_distinct_type_copy (ttype);
4777
4778 TYPE_ATTRIBUTES (ntype) = attribute;
4779
4780 hstate.add_int (code);
4781 if (TREE_TYPE (ntype))
4782 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4783 attribute_hash_list (attribute, hstate);
4784
4785 switch (TREE_CODE (ntype))
4786 {
4787 case FUNCTION_TYPE:
4788 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4789 break;
4790 case ARRAY_TYPE:
4791 if (TYPE_DOMAIN (ntype))
4792 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4793 break;
4794 case INTEGER_TYPE:
4795 t = TYPE_MAX_VALUE (ntype);
4796 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4797 hstate.add_object (TREE_INT_CST_ELT (t, i));
4798 break;
4799 case REAL_TYPE:
4800 case FIXED_POINT_TYPE:
4801 {
4802 unsigned int precision = TYPE_PRECISION (ntype);
4803 hstate.add_object (precision);
4804 }
4805 break;
4806 default:
4807 break;
4808 }
4809
4810 ntype = type_hash_canon (hstate.end(), ntype);
4811
4812 /* If the target-dependent attributes make NTYPE different from
4813 its canonical type, we will need to use structural equality
4814 checks for this type. */
4815 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4816 || !comp_type_attributes (ntype, ttype))
4817 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4818 else if (TYPE_CANONICAL (ntype) == ntype)
4819 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4820
4821 ttype = build_qualified_type (ntype, quals);
4822 }
4823 else if (TYPE_QUALS (ttype) != quals)
4824 ttype = build_qualified_type (ttype, quals);
4825
4826 return ttype;
4827 }
4828
4829 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4830 the same. */
4831
4832 static bool
4833 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4834 {
4835 tree cl1, cl2;
4836 for (cl1 = clauses1, cl2 = clauses2;
4837 cl1 && cl2;
4838 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4839 {
4840 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4841 return false;
4842 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4843 {
4844 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4845 OMP_CLAUSE_DECL (cl2)) != 1)
4846 return false;
4847 }
4848 switch (OMP_CLAUSE_CODE (cl1))
4849 {
4850 case OMP_CLAUSE_ALIGNED:
4851 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4852 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4853 return false;
4854 break;
4855 case OMP_CLAUSE_LINEAR:
4856 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4857 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4858 return false;
4859 break;
4860 case OMP_CLAUSE_SIMDLEN:
4861 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4862 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4863 return false;
4864 default:
4865 break;
4866 }
4867 }
4868 return true;
4869 }
4870
4871 /* Compare two constructor-element-type constants. Return 1 if the lists
4872 are known to be equal; otherwise return 0. */
4873
4874 static bool
4875 simple_cst_list_equal (const_tree l1, const_tree l2)
4876 {
4877 while (l1 != NULL_TREE && l2 != NULL_TREE)
4878 {
4879 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4880 return false;
4881
4882 l1 = TREE_CHAIN (l1);
4883 l2 = TREE_CHAIN (l2);
4884 }
4885
4886 return l1 == l2;
4887 }
4888
4889 /* Compare two identifier nodes representing attributes. Either one may
4890 be in wrapped __ATTR__ form. Return true if they are the same, false
4891 otherwise. */
4892
4893 static bool
4894 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4895 {
4896 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4897 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4898 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4899
4900 /* Identifiers can be compared directly for equality. */
4901 if (attr1 == attr2)
4902 return true;
4903
4904 /* If they are not equal, they may still be one in the form
4905 'text' while the other one is in the form '__text__'. TODO:
4906 If we were storing attributes in normalized 'text' form, then
4907 this could all go away and we could take full advantage of
4908 the fact that we're comparing identifiers. :-) */
4909 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4910 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4911
4912 if (attr2_len == attr1_len + 4)
4913 {
4914 const char *p = IDENTIFIER_POINTER (attr2);
4915 const char *q = IDENTIFIER_POINTER (attr1);
4916 if (p[0] == '_' && p[1] == '_'
4917 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4918 && strncmp (q, p + 2, attr1_len) == 0)
4919 return true;;
4920 }
4921 else if (attr2_len + 4 == attr1_len)
4922 {
4923 const char *p = IDENTIFIER_POINTER (attr2);
4924 const char *q = IDENTIFIER_POINTER (attr1);
4925 if (q[0] == '_' && q[1] == '_'
4926 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4927 && strncmp (q + 2, p, attr2_len) == 0)
4928 return true;
4929 }
4930
4931 return false;
4932 }
4933
4934 /* Compare two attributes for their value identity. Return true if the
4935 attribute values are known to be equal; otherwise return false. */
4936
4937 bool
4938 attribute_value_equal (const_tree attr1, const_tree attr2)
4939 {
4940 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4941 return true;
4942
4943 if (TREE_VALUE (attr1) != NULL_TREE
4944 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4945 && TREE_VALUE (attr2) != NULL_TREE
4946 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4947 {
4948 /* Handle attribute format. */
4949 if (is_attribute_p ("format", TREE_PURPOSE (attr1)))
4950 {
4951 attr1 = TREE_VALUE (attr1);
4952 attr2 = TREE_VALUE (attr2);
4953 /* Compare the archetypes (printf/scanf/strftime/...). */
4954 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
4955 TREE_VALUE (attr2)))
4956 return false;
4957 /* Archetypes are the same. Compare the rest. */
4958 return (simple_cst_list_equal (TREE_CHAIN (attr1),
4959 TREE_CHAIN (attr2)) == 1);
4960 }
4961 return (simple_cst_list_equal (TREE_VALUE (attr1),
4962 TREE_VALUE (attr2)) == 1);
4963 }
4964
4965 if ((flag_openmp || flag_openmp_simd)
4966 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4967 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4968 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4969 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4970 TREE_VALUE (attr2));
4971
4972 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4973 }
4974
4975 /* Return 0 if the attributes for two types are incompatible, 1 if they
4976 are compatible, and 2 if they are nearly compatible (which causes a
4977 warning to be generated). */
4978 int
4979 comp_type_attributes (const_tree type1, const_tree type2)
4980 {
4981 const_tree a1 = TYPE_ATTRIBUTES (type1);
4982 const_tree a2 = TYPE_ATTRIBUTES (type2);
4983 const_tree a;
4984
4985 if (a1 == a2)
4986 return 1;
4987 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4988 {
4989 const struct attribute_spec *as;
4990 const_tree attr;
4991
4992 as = lookup_attribute_spec (get_attribute_name (a));
4993 if (!as || as->affects_type_identity == false)
4994 continue;
4995
4996 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4997 if (!attr || !attribute_value_equal (a, attr))
4998 break;
4999 }
5000 if (!a)
5001 {
5002 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
5003 {
5004 const struct attribute_spec *as;
5005
5006 as = lookup_attribute_spec (get_attribute_name (a));
5007 if (!as || as->affects_type_identity == false)
5008 continue;
5009
5010 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5011 break;
5012 /* We don't need to compare trees again, as we did this
5013 already in first loop. */
5014 }
5015 /* All types - affecting identity - are equal, so
5016 there is no need to call target hook for comparison. */
5017 if (!a)
5018 return 1;
5019 }
5020 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
5021 return 0;
5022 /* As some type combinations - like default calling-convention - might
5023 be compatible, we have to call the target hook to get the final result. */
5024 return targetm.comp_type_attributes (type1, type2);
5025 }
5026
5027 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5028 is ATTRIBUTE.
5029
5030 Record such modified types already made so we don't make duplicates. */
5031
5032 tree
5033 build_type_attribute_variant (tree ttype, tree attribute)
5034 {
5035 return build_type_attribute_qual_variant (ttype, attribute,
5036 TYPE_QUALS (ttype));
5037 }
5038
5039
5040 /* Reset the expression *EXPR_P, a size or position.
5041
5042 ??? We could reset all non-constant sizes or positions. But it's cheap
5043 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5044
5045 We need to reset self-referential sizes or positions because they cannot
5046 be gimplified and thus can contain a CALL_EXPR after the gimplification
5047 is finished, which will run afoul of LTO streaming. And they need to be
5048 reset to something essentially dummy but not constant, so as to preserve
5049 the properties of the object they are attached to. */
5050
5051 static inline void
5052 free_lang_data_in_one_sizepos (tree *expr_p)
5053 {
5054 tree expr = *expr_p;
5055 if (CONTAINS_PLACEHOLDER_P (expr))
5056 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5057 }
5058
5059
5060 /* Reset all the fields in a binfo node BINFO. We only keep
5061 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5062
5063 static void
5064 free_lang_data_in_binfo (tree binfo)
5065 {
5066 unsigned i;
5067 tree t;
5068
5069 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5070
5071 BINFO_VIRTUALS (binfo) = NULL_TREE;
5072 BINFO_BASE_ACCESSES (binfo) = NULL;
5073 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5074 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5075
5076 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5077 free_lang_data_in_binfo (t);
5078 }
5079
5080
5081 /* Reset all language specific information still present in TYPE. */
5082
5083 static void
5084 free_lang_data_in_type (tree type)
5085 {
5086 gcc_assert (TYPE_P (type));
5087
5088 /* Give the FE a chance to remove its own data first. */
5089 lang_hooks.free_lang_data (type);
5090
5091 TREE_LANG_FLAG_0 (type) = 0;
5092 TREE_LANG_FLAG_1 (type) = 0;
5093 TREE_LANG_FLAG_2 (type) = 0;
5094 TREE_LANG_FLAG_3 (type) = 0;
5095 TREE_LANG_FLAG_4 (type) = 0;
5096 TREE_LANG_FLAG_5 (type) = 0;
5097 TREE_LANG_FLAG_6 (type) = 0;
5098
5099 if (TREE_CODE (type) == FUNCTION_TYPE)
5100 {
5101 /* Remove the const and volatile qualifiers from arguments. The
5102 C++ front end removes them, but the C front end does not,
5103 leading to false ODR violation errors when merging two
5104 instances of the same function signature compiled by
5105 different front ends. */
5106 tree p;
5107
5108 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5109 {
5110 tree arg_type = TREE_VALUE (p);
5111
5112 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5113 {
5114 int quals = TYPE_QUALS (arg_type)
5115 & ~TYPE_QUAL_CONST
5116 & ~TYPE_QUAL_VOLATILE;
5117 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5118 free_lang_data_in_type (TREE_VALUE (p));
5119 }
5120 /* C++ FE uses TREE_PURPOSE to store initial values. */
5121 TREE_PURPOSE (p) = NULL;
5122 }
5123 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5124 TYPE_MINVAL (type) = NULL;
5125 }
5126 if (TREE_CODE (type) == METHOD_TYPE)
5127 {
5128 tree p;
5129
5130 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5131 {
5132 /* C++ FE uses TREE_PURPOSE to store initial values. */
5133 TREE_PURPOSE (p) = NULL;
5134 }
5135 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5136 TYPE_MINVAL (type) = NULL;
5137 }
5138
5139 /* Remove members that are not actually FIELD_DECLs from the field
5140 list of an aggregate. These occur in C++. */
5141 if (RECORD_OR_UNION_TYPE_P (type))
5142 {
5143 tree prev, member;
5144
5145 /* Note that TYPE_FIELDS can be shared across distinct
5146 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5147 to be removed, we cannot set its TREE_CHAIN to NULL.
5148 Otherwise, we would not be able to find all the other fields
5149 in the other instances of this TREE_TYPE.
5150
5151 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5152 prev = NULL_TREE;
5153 member = TYPE_FIELDS (type);
5154 while (member)
5155 {
5156 if (TREE_CODE (member) == FIELD_DECL
5157 || TREE_CODE (member) == TYPE_DECL)
5158 {
5159 if (prev)
5160 TREE_CHAIN (prev) = member;
5161 else
5162 TYPE_FIELDS (type) = member;
5163 prev = member;
5164 }
5165
5166 member = TREE_CHAIN (member);
5167 }
5168
5169 if (prev)
5170 TREE_CHAIN (prev) = NULL_TREE;
5171 else
5172 TYPE_FIELDS (type) = NULL_TREE;
5173
5174 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5175 and danagle the pointer from time to time. */
5176 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5177 TYPE_VFIELD (type) = NULL_TREE;
5178
5179 /* Remove TYPE_METHODS list. While it would be nice to keep it
5180 to enable ODR warnings about different method lists, doing so
5181 seems to impractically increase size of LTO data streamed.
5182 Keep the infrmation if TYPE_METHODS was non-NULL. This is used
5183 by function.c and pretty printers. */
5184 if (TYPE_METHODS (type))
5185 TYPE_METHODS (type) = error_mark_node;
5186 if (TYPE_BINFO (type))
5187 {
5188 free_lang_data_in_binfo (TYPE_BINFO (type));
5189 /* We need to preserve link to bases and virtual table for all
5190 polymorphic types to make devirtualization machinery working.
5191 Debug output cares only about bases, but output also
5192 virtual table pointers so merging of -fdevirtualize and
5193 -fno-devirtualize units is easier. */
5194 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5195 || !flag_devirtualize)
5196 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5197 && !BINFO_VTABLE (TYPE_BINFO (type)))
5198 || debug_info_level != DINFO_LEVEL_NONE))
5199 TYPE_BINFO (type) = NULL;
5200 }
5201 }
5202 else
5203 {
5204 /* For non-aggregate types, clear out the language slot (which
5205 overloads TYPE_BINFO). */
5206 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5207
5208 if (INTEGRAL_TYPE_P (type)
5209 || SCALAR_FLOAT_TYPE_P (type)
5210 || FIXED_POINT_TYPE_P (type))
5211 {
5212 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5213 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5214 }
5215 }
5216
5217 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5218 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5219
5220 if (TYPE_CONTEXT (type)
5221 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5222 {
5223 tree ctx = TYPE_CONTEXT (type);
5224 do
5225 {
5226 ctx = BLOCK_SUPERCONTEXT (ctx);
5227 }
5228 while (ctx && TREE_CODE (ctx) == BLOCK);
5229 TYPE_CONTEXT (type) = ctx;
5230 }
5231 }
5232
5233
5234 /* Return true if DECL may need an assembler name to be set. */
5235
5236 static inline bool
5237 need_assembler_name_p (tree decl)
5238 {
5239 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5240 Rule merging. This makes type_odr_p to return true on those types during
5241 LTO and by comparing the mangled name, we can say what types are intended
5242 to be equivalent across compilation unit.
5243
5244 We do not store names of type_in_anonymous_namespace_p.
5245
5246 Record, union and enumeration type have linkage that allows use
5247 to check type_in_anonymous_namespace_p. We do not mangle compound types
5248 that always can be compared structurally.
5249
5250 Similarly for builtin types, we compare properties of their main variant.
5251 A special case are integer types where mangling do make differences
5252 between char/signed char/unsigned char etc. Storing name for these makes
5253 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5254 See cp/mangle.c:write_builtin_type for details. */
5255
5256 if (flag_lto_odr_type_mering
5257 && TREE_CODE (decl) == TYPE_DECL
5258 && DECL_NAME (decl)
5259 && decl == TYPE_NAME (TREE_TYPE (decl))
5260 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5261 && (type_with_linkage_p (TREE_TYPE (decl))
5262 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5263 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5264 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5265 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5266 if (TREE_CODE (decl) != FUNCTION_DECL
5267 && TREE_CODE (decl) != VAR_DECL)
5268 return false;
5269
5270 /* If DECL already has its assembler name set, it does not need a
5271 new one. */
5272 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5273 || DECL_ASSEMBLER_NAME_SET_P (decl))
5274 return false;
5275
5276 /* Abstract decls do not need an assembler name. */
5277 if (DECL_ABSTRACT_P (decl))
5278 return false;
5279
5280 /* For VAR_DECLs, only static, public and external symbols need an
5281 assembler name. */
5282 if (TREE_CODE (decl) == VAR_DECL
5283 && !TREE_STATIC (decl)
5284 && !TREE_PUBLIC (decl)
5285 && !DECL_EXTERNAL (decl))
5286 return false;
5287
5288 if (TREE_CODE (decl) == FUNCTION_DECL)
5289 {
5290 /* Do not set assembler name on builtins. Allow RTL expansion to
5291 decide whether to expand inline or via a regular call. */
5292 if (DECL_BUILT_IN (decl)
5293 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5294 return false;
5295
5296 /* Functions represented in the callgraph need an assembler name. */
5297 if (cgraph_node::get (decl) != NULL)
5298 return true;
5299
5300 /* Unused and not public functions don't need an assembler name. */
5301 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5302 return false;
5303 }
5304
5305 return true;
5306 }
5307
5308
5309 /* Reset all language specific information still present in symbol
5310 DECL. */
5311
5312 static void
5313 free_lang_data_in_decl (tree decl)
5314 {
5315 gcc_assert (DECL_P (decl));
5316
5317 /* Give the FE a chance to remove its own data first. */
5318 lang_hooks.free_lang_data (decl);
5319
5320 TREE_LANG_FLAG_0 (decl) = 0;
5321 TREE_LANG_FLAG_1 (decl) = 0;
5322 TREE_LANG_FLAG_2 (decl) = 0;
5323 TREE_LANG_FLAG_3 (decl) = 0;
5324 TREE_LANG_FLAG_4 (decl) = 0;
5325 TREE_LANG_FLAG_5 (decl) = 0;
5326 TREE_LANG_FLAG_6 (decl) = 0;
5327
5328 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5329 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5330 if (TREE_CODE (decl) == FIELD_DECL)
5331 {
5332 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5333 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5334 DECL_QUALIFIER (decl) = NULL_TREE;
5335 }
5336
5337 if (TREE_CODE (decl) == FUNCTION_DECL)
5338 {
5339 struct cgraph_node *node;
5340 if (!(node = cgraph_node::get (decl))
5341 || (!node->definition && !node->clones))
5342 {
5343 if (node)
5344 node->release_body ();
5345 else
5346 {
5347 release_function_body (decl);
5348 DECL_ARGUMENTS (decl) = NULL;
5349 DECL_RESULT (decl) = NULL;
5350 DECL_INITIAL (decl) = error_mark_node;
5351 }
5352 }
5353 if (gimple_has_body_p (decl))
5354 {
5355 tree t;
5356
5357 /* If DECL has a gimple body, then the context for its
5358 arguments must be DECL. Otherwise, it doesn't really
5359 matter, as we will not be emitting any code for DECL. In
5360 general, there may be other instances of DECL created by
5361 the front end and since PARM_DECLs are generally shared,
5362 their DECL_CONTEXT changes as the replicas of DECL are
5363 created. The only time where DECL_CONTEXT is important
5364 is for the FUNCTION_DECLs that have a gimple body (since
5365 the PARM_DECL will be used in the function's body). */
5366 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5367 DECL_CONTEXT (t) = decl;
5368 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5369 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5370 = target_option_default_node;
5371 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5372 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5373 = optimization_default_node;
5374 }
5375
5376 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5377 At this point, it is not needed anymore. */
5378 DECL_SAVED_TREE (decl) = NULL_TREE;
5379
5380 /* Clear the abstract origin if it refers to a method. Otherwise
5381 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5382 origin will not be output correctly. */
5383 if (DECL_ABSTRACT_ORIGIN (decl)
5384 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5385 && RECORD_OR_UNION_TYPE_P
5386 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5387 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5388
5389 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5390 DECL_VINDEX referring to itself into a vtable slot number as it
5391 should. Happens with functions that are copied and then forgotten
5392 about. Just clear it, it won't matter anymore. */
5393 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5394 DECL_VINDEX (decl) = NULL_TREE;
5395 }
5396 else if (TREE_CODE (decl) == VAR_DECL)
5397 {
5398 if ((DECL_EXTERNAL (decl)
5399 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5400 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5401 DECL_INITIAL (decl) = NULL_TREE;
5402 }
5403 else if (TREE_CODE (decl) == TYPE_DECL
5404 || TREE_CODE (decl) == FIELD_DECL)
5405 DECL_INITIAL (decl) = NULL_TREE;
5406 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5407 && DECL_INITIAL (decl)
5408 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5409 {
5410 /* Strip builtins from the translation-unit BLOCK. We still have targets
5411 without builtin_decl_explicit support and also builtins are shared
5412 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5413 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5414 while (*nextp)
5415 {
5416 tree var = *nextp;
5417 if (TREE_CODE (var) == FUNCTION_DECL
5418 && DECL_BUILT_IN (var))
5419 *nextp = TREE_CHAIN (var);
5420 else
5421 nextp = &TREE_CHAIN (var);
5422 }
5423 }
5424 }
5425
5426
5427 /* Data used when collecting DECLs and TYPEs for language data removal. */
5428
5429 struct free_lang_data_d
5430 {
5431 /* Worklist to avoid excessive recursion. */
5432 vec<tree> worklist;
5433
5434 /* Set of traversed objects. Used to avoid duplicate visits. */
5435 hash_set<tree> *pset;
5436
5437 /* Array of symbols to process with free_lang_data_in_decl. */
5438 vec<tree> decls;
5439
5440 /* Array of types to process with free_lang_data_in_type. */
5441 vec<tree> types;
5442 };
5443
5444
5445 /* Save all language fields needed to generate proper debug information
5446 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5447
5448 static void
5449 save_debug_info_for_decl (tree t)
5450 {
5451 /*struct saved_debug_info_d *sdi;*/
5452
5453 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5454
5455 /* FIXME. Partial implementation for saving debug info removed. */
5456 }
5457
5458
5459 /* Save all language fields needed to generate proper debug information
5460 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5461
5462 static void
5463 save_debug_info_for_type (tree t)
5464 {
5465 /*struct saved_debug_info_d *sdi;*/
5466
5467 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5468
5469 /* FIXME. Partial implementation for saving debug info removed. */
5470 }
5471
5472
5473 /* Add type or decl T to one of the list of tree nodes that need their
5474 language data removed. The lists are held inside FLD. */
5475
5476 static void
5477 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5478 {
5479 if (DECL_P (t))
5480 {
5481 fld->decls.safe_push (t);
5482 if (debug_info_level > DINFO_LEVEL_TERSE)
5483 save_debug_info_for_decl (t);
5484 }
5485 else if (TYPE_P (t))
5486 {
5487 fld->types.safe_push (t);
5488 if (debug_info_level > DINFO_LEVEL_TERSE)
5489 save_debug_info_for_type (t);
5490 }
5491 else
5492 gcc_unreachable ();
5493 }
5494
5495 /* Push tree node T into FLD->WORKLIST. */
5496
5497 static inline void
5498 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5499 {
5500 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5501 fld->worklist.safe_push ((t));
5502 }
5503
5504
5505 /* Operand callback helper for free_lang_data_in_node. *TP is the
5506 subtree operand being considered. */
5507
5508 static tree
5509 find_decls_types_r (tree *tp, int *ws, void *data)
5510 {
5511 tree t = *tp;
5512 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5513
5514 if (TREE_CODE (t) == TREE_LIST)
5515 return NULL_TREE;
5516
5517 /* Language specific nodes will be removed, so there is no need
5518 to gather anything under them. */
5519 if (is_lang_specific (t))
5520 {
5521 *ws = 0;
5522 return NULL_TREE;
5523 }
5524
5525 if (DECL_P (t))
5526 {
5527 /* Note that walk_tree does not traverse every possible field in
5528 decls, so we have to do our own traversals here. */
5529 add_tree_to_fld_list (t, fld);
5530
5531 fld_worklist_push (DECL_NAME (t), fld);
5532 fld_worklist_push (DECL_CONTEXT (t), fld);
5533 fld_worklist_push (DECL_SIZE (t), fld);
5534 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5535
5536 /* We are going to remove everything under DECL_INITIAL for
5537 TYPE_DECLs. No point walking them. */
5538 if (TREE_CODE (t) != TYPE_DECL)
5539 fld_worklist_push (DECL_INITIAL (t), fld);
5540
5541 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5542 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5543
5544 if (TREE_CODE (t) == FUNCTION_DECL)
5545 {
5546 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5547 fld_worklist_push (DECL_RESULT (t), fld);
5548 }
5549 else if (TREE_CODE (t) == TYPE_DECL)
5550 {
5551 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5552 }
5553 else if (TREE_CODE (t) == FIELD_DECL)
5554 {
5555 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5556 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5557 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5558 fld_worklist_push (DECL_FCONTEXT (t), fld);
5559 }
5560
5561 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5562 && DECL_HAS_VALUE_EXPR_P (t))
5563 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5564
5565 if (TREE_CODE (t) != FIELD_DECL
5566 && TREE_CODE (t) != TYPE_DECL)
5567 fld_worklist_push (TREE_CHAIN (t), fld);
5568 *ws = 0;
5569 }
5570 else if (TYPE_P (t))
5571 {
5572 /* Note that walk_tree does not traverse every possible field in
5573 types, so we have to do our own traversals here. */
5574 add_tree_to_fld_list (t, fld);
5575
5576 if (!RECORD_OR_UNION_TYPE_P (t))
5577 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5578 fld_worklist_push (TYPE_SIZE (t), fld);
5579 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5580 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5581 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5582 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5583 fld_worklist_push (TYPE_NAME (t), fld);
5584 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5585 them and thus do not and want not to reach unused pointer types
5586 this way. */
5587 if (!POINTER_TYPE_P (t))
5588 fld_worklist_push (TYPE_MINVAL (t), fld);
5589 if (!RECORD_OR_UNION_TYPE_P (t))
5590 fld_worklist_push (TYPE_MAXVAL (t), fld);
5591 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5592 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5593 do not and want not to reach unused variants this way. */
5594 if (TYPE_CONTEXT (t))
5595 {
5596 tree ctx = TYPE_CONTEXT (t);
5597 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5598 So push that instead. */
5599 while (ctx && TREE_CODE (ctx) == BLOCK)
5600 ctx = BLOCK_SUPERCONTEXT (ctx);
5601 fld_worklist_push (ctx, fld);
5602 }
5603 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5604 and want not to reach unused types this way. */
5605
5606 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5607 {
5608 unsigned i;
5609 tree tem;
5610 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5611 fld_worklist_push (TREE_TYPE (tem), fld);
5612 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5613 if (tem
5614 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5615 && TREE_CODE (tem) == TREE_LIST)
5616 do
5617 {
5618 fld_worklist_push (TREE_VALUE (tem), fld);
5619 tem = TREE_CHAIN (tem);
5620 }
5621 while (tem);
5622 }
5623 if (RECORD_OR_UNION_TYPE_P (t))
5624 {
5625 tree tem;
5626 /* Push all TYPE_FIELDS - there can be interleaving interesting
5627 and non-interesting things. */
5628 tem = TYPE_FIELDS (t);
5629 while (tem)
5630 {
5631 if (TREE_CODE (tem) == FIELD_DECL
5632 || TREE_CODE (tem) == TYPE_DECL)
5633 fld_worklist_push (tem, fld);
5634 tem = TREE_CHAIN (tem);
5635 }
5636 }
5637
5638 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5639 *ws = 0;
5640 }
5641 else if (TREE_CODE (t) == BLOCK)
5642 {
5643 tree tem;
5644 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5645 fld_worklist_push (tem, fld);
5646 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5647 fld_worklist_push (tem, fld);
5648 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5649 }
5650
5651 if (TREE_CODE (t) != IDENTIFIER_NODE
5652 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5653 fld_worklist_push (TREE_TYPE (t), fld);
5654
5655 return NULL_TREE;
5656 }
5657
5658
5659 /* Find decls and types in T. */
5660
5661 static void
5662 find_decls_types (tree t, struct free_lang_data_d *fld)
5663 {
5664 while (1)
5665 {
5666 if (!fld->pset->contains (t))
5667 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5668 if (fld->worklist.is_empty ())
5669 break;
5670 t = fld->worklist.pop ();
5671 }
5672 }
5673
5674 /* Translate all the types in LIST with the corresponding runtime
5675 types. */
5676
5677 static tree
5678 get_eh_types_for_runtime (tree list)
5679 {
5680 tree head, prev;
5681
5682 if (list == NULL_TREE)
5683 return NULL_TREE;
5684
5685 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5686 prev = head;
5687 list = TREE_CHAIN (list);
5688 while (list)
5689 {
5690 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5691 TREE_CHAIN (prev) = n;
5692 prev = TREE_CHAIN (prev);
5693 list = TREE_CHAIN (list);
5694 }
5695
5696 return head;
5697 }
5698
5699
5700 /* Find decls and types referenced in EH region R and store them in
5701 FLD->DECLS and FLD->TYPES. */
5702
5703 static void
5704 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5705 {
5706 switch (r->type)
5707 {
5708 case ERT_CLEANUP:
5709 break;
5710
5711 case ERT_TRY:
5712 {
5713 eh_catch c;
5714
5715 /* The types referenced in each catch must first be changed to the
5716 EH types used at runtime. This removes references to FE types
5717 in the region. */
5718 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5719 {
5720 c->type_list = get_eh_types_for_runtime (c->type_list);
5721 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5722 }
5723 }
5724 break;
5725
5726 case ERT_ALLOWED_EXCEPTIONS:
5727 r->u.allowed.type_list
5728 = get_eh_types_for_runtime (r->u.allowed.type_list);
5729 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5730 break;
5731
5732 case ERT_MUST_NOT_THROW:
5733 walk_tree (&r->u.must_not_throw.failure_decl,
5734 find_decls_types_r, fld, fld->pset);
5735 break;
5736 }
5737 }
5738
5739
5740 /* Find decls and types referenced in cgraph node N and store them in
5741 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5742 look for *every* kind of DECL and TYPE node reachable from N,
5743 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5744 NAMESPACE_DECLs, etc). */
5745
5746 static void
5747 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5748 {
5749 basic_block bb;
5750 struct function *fn;
5751 unsigned ix;
5752 tree t;
5753
5754 find_decls_types (n->decl, fld);
5755
5756 if (!gimple_has_body_p (n->decl))
5757 return;
5758
5759 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5760
5761 fn = DECL_STRUCT_FUNCTION (n->decl);
5762
5763 /* Traverse locals. */
5764 FOR_EACH_LOCAL_DECL (fn, ix, t)
5765 find_decls_types (t, fld);
5766
5767 /* Traverse EH regions in FN. */
5768 {
5769 eh_region r;
5770 FOR_ALL_EH_REGION_FN (r, fn)
5771 find_decls_types_in_eh_region (r, fld);
5772 }
5773
5774 /* Traverse every statement in FN. */
5775 FOR_EACH_BB_FN (bb, fn)
5776 {
5777 gphi_iterator psi;
5778 gimple_stmt_iterator si;
5779 unsigned i;
5780
5781 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5782 {
5783 gphi *phi = psi.phi ();
5784
5785 for (i = 0; i < gimple_phi_num_args (phi); i++)
5786 {
5787 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5788 find_decls_types (*arg_p, fld);
5789 }
5790 }
5791
5792 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5793 {
5794 gimple *stmt = gsi_stmt (si);
5795
5796 if (is_gimple_call (stmt))
5797 find_decls_types (gimple_call_fntype (stmt), fld);
5798
5799 for (i = 0; i < gimple_num_ops (stmt); i++)
5800 {
5801 tree arg = gimple_op (stmt, i);
5802 find_decls_types (arg, fld);
5803 }
5804 }
5805 }
5806 }
5807
5808
5809 /* Find decls and types referenced in varpool node N and store them in
5810 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5811 look for *every* kind of DECL and TYPE node reachable from N,
5812 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5813 NAMESPACE_DECLs, etc). */
5814
5815 static void
5816 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5817 {
5818 find_decls_types (v->decl, fld);
5819 }
5820
5821 /* If T needs an assembler name, have one created for it. */
5822
5823 void
5824 assign_assembler_name_if_neeeded (tree t)
5825 {
5826 if (need_assembler_name_p (t))
5827 {
5828 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5829 diagnostics that use input_location to show locus
5830 information. The problem here is that, at this point,
5831 input_location is generally anchored to the end of the file
5832 (since the parser is long gone), so we don't have a good
5833 position to pin it to.
5834
5835 To alleviate this problem, this uses the location of T's
5836 declaration. Examples of this are
5837 testsuite/g++.dg/template/cond2.C and
5838 testsuite/g++.dg/template/pr35240.C. */
5839 location_t saved_location = input_location;
5840 input_location = DECL_SOURCE_LOCATION (t);
5841
5842 decl_assembler_name (t);
5843
5844 input_location = saved_location;
5845 }
5846 }
5847
5848
5849 /* Free language specific information for every operand and expression
5850 in every node of the call graph. This process operates in three stages:
5851
5852 1- Every callgraph node and varpool node is traversed looking for
5853 decls and types embedded in them. This is a more exhaustive
5854 search than that done by find_referenced_vars, because it will
5855 also collect individual fields, decls embedded in types, etc.
5856
5857 2- All the decls found are sent to free_lang_data_in_decl.
5858
5859 3- All the types found are sent to free_lang_data_in_type.
5860
5861 The ordering between decls and types is important because
5862 free_lang_data_in_decl sets assembler names, which includes
5863 mangling. So types cannot be freed up until assembler names have
5864 been set up. */
5865
5866 static void
5867 free_lang_data_in_cgraph (void)
5868 {
5869 struct cgraph_node *n;
5870 varpool_node *v;
5871 struct free_lang_data_d fld;
5872 tree t;
5873 unsigned i;
5874 alias_pair *p;
5875
5876 /* Initialize sets and arrays to store referenced decls and types. */
5877 fld.pset = new hash_set<tree>;
5878 fld.worklist.create (0);
5879 fld.decls.create (100);
5880 fld.types.create (100);
5881
5882 /* Find decls and types in the body of every function in the callgraph. */
5883 FOR_EACH_FUNCTION (n)
5884 find_decls_types_in_node (n, &fld);
5885
5886 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5887 find_decls_types (p->decl, &fld);
5888
5889 /* Find decls and types in every varpool symbol. */
5890 FOR_EACH_VARIABLE (v)
5891 find_decls_types_in_var (v, &fld);
5892
5893 /* Set the assembler name on every decl found. We need to do this
5894 now because free_lang_data_in_decl will invalidate data needed
5895 for mangling. This breaks mangling on interdependent decls. */
5896 FOR_EACH_VEC_ELT (fld.decls, i, t)
5897 assign_assembler_name_if_neeeded (t);
5898
5899 /* Traverse every decl found freeing its language data. */
5900 FOR_EACH_VEC_ELT (fld.decls, i, t)
5901 free_lang_data_in_decl (t);
5902
5903 /* Traverse every type found freeing its language data. */
5904 FOR_EACH_VEC_ELT (fld.types, i, t)
5905 free_lang_data_in_type (t);
5906 #ifdef ENABLE_CHECKING
5907 FOR_EACH_VEC_ELT (fld.types, i, t)
5908 verify_type (t);
5909 #endif
5910
5911 delete fld.pset;
5912 fld.worklist.release ();
5913 fld.decls.release ();
5914 fld.types.release ();
5915 }
5916
5917
5918 /* Free resources that are used by FE but are not needed once they are done. */
5919
5920 static unsigned
5921 free_lang_data (void)
5922 {
5923 unsigned i;
5924
5925 /* If we are the LTO frontend we have freed lang-specific data already. */
5926 if (in_lto_p
5927 || (!flag_generate_lto && !flag_generate_offload))
5928 return 0;
5929
5930 /* Allocate and assign alias sets to the standard integer types
5931 while the slots are still in the way the frontends generated them. */
5932 for (i = 0; i < itk_none; ++i)
5933 if (integer_types[i])
5934 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5935
5936 /* Traverse the IL resetting language specific information for
5937 operands, expressions, etc. */
5938 free_lang_data_in_cgraph ();
5939
5940 /* Create gimple variants for common types. */
5941 ptrdiff_type_node = integer_type_node;
5942 fileptr_type_node = ptr_type_node;
5943
5944 /* Reset some langhooks. Do not reset types_compatible_p, it may
5945 still be used indirectly via the get_alias_set langhook. */
5946 lang_hooks.dwarf_name = lhd_dwarf_name;
5947 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5948 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5949
5950 /* We do not want the default decl_assembler_name implementation,
5951 rather if we have fixed everything we want a wrapper around it
5952 asserting that all non-local symbols already got their assembler
5953 name and only produce assembler names for local symbols. Or rather
5954 make sure we never call decl_assembler_name on local symbols and
5955 devise a separate, middle-end private scheme for it. */
5956
5957 /* Reset diagnostic machinery. */
5958 tree_diagnostics_defaults (global_dc);
5959
5960 return 0;
5961 }
5962
5963
5964 namespace {
5965
5966 const pass_data pass_data_ipa_free_lang_data =
5967 {
5968 SIMPLE_IPA_PASS, /* type */
5969 "*free_lang_data", /* name */
5970 OPTGROUP_NONE, /* optinfo_flags */
5971 TV_IPA_FREE_LANG_DATA, /* tv_id */
5972 0, /* properties_required */
5973 0, /* properties_provided */
5974 0, /* properties_destroyed */
5975 0, /* todo_flags_start */
5976 0, /* todo_flags_finish */
5977 };
5978
5979 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5980 {
5981 public:
5982 pass_ipa_free_lang_data (gcc::context *ctxt)
5983 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5984 {}
5985
5986 /* opt_pass methods: */
5987 virtual unsigned int execute (function *) { return free_lang_data (); }
5988
5989 }; // class pass_ipa_free_lang_data
5990
5991 } // anon namespace
5992
5993 simple_ipa_opt_pass *
5994 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5995 {
5996 return new pass_ipa_free_lang_data (ctxt);
5997 }
5998
5999 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
6000 ATTR_NAME. Also used internally by remove_attribute(). */
6001 bool
6002 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
6003 {
6004 size_t ident_len = IDENTIFIER_LENGTH (ident);
6005
6006 if (ident_len == attr_len)
6007 {
6008 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
6009 return true;
6010 }
6011 else if (ident_len == attr_len + 4)
6012 {
6013 /* There is the possibility that ATTR is 'text' and IDENT is
6014 '__text__'. */
6015 const char *p = IDENTIFIER_POINTER (ident);
6016 if (p[0] == '_' && p[1] == '_'
6017 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6018 && strncmp (attr_name, p + 2, attr_len) == 0)
6019 return true;
6020 }
6021
6022 return false;
6023 }
6024
6025 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6026 of ATTR_NAME, and LIST is not NULL_TREE. */
6027 tree
6028 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6029 {
6030 while (list)
6031 {
6032 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6033
6034 if (ident_len == attr_len)
6035 {
6036 if (!strcmp (attr_name,
6037 IDENTIFIER_POINTER (get_attribute_name (list))))
6038 break;
6039 }
6040 /* TODO: If we made sure that attributes were stored in the
6041 canonical form without '__...__' (ie, as in 'text' as opposed
6042 to '__text__') then we could avoid the following case. */
6043 else if (ident_len == attr_len + 4)
6044 {
6045 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6046 if (p[0] == '_' && p[1] == '_'
6047 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6048 && strncmp (attr_name, p + 2, attr_len) == 0)
6049 break;
6050 }
6051 list = TREE_CHAIN (list);
6052 }
6053
6054 return list;
6055 }
6056
6057 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6058 return a pointer to the attribute's list first element if the attribute
6059 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6060 '__text__'). */
6061
6062 tree
6063 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6064 tree list)
6065 {
6066 while (list)
6067 {
6068 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6069
6070 if (attr_len > ident_len)
6071 {
6072 list = TREE_CHAIN (list);
6073 continue;
6074 }
6075
6076 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6077
6078 if (strncmp (attr_name, p, attr_len) == 0)
6079 break;
6080
6081 /* TODO: If we made sure that attributes were stored in the
6082 canonical form without '__...__' (ie, as in 'text' as opposed
6083 to '__text__') then we could avoid the following case. */
6084 if (p[0] == '_' && p[1] == '_' &&
6085 strncmp (attr_name, p + 2, attr_len) == 0)
6086 break;
6087
6088 list = TREE_CHAIN (list);
6089 }
6090
6091 return list;
6092 }
6093
6094
6095 /* A variant of lookup_attribute() that can be used with an identifier
6096 as the first argument, and where the identifier can be either
6097 'text' or '__text__'.
6098
6099 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6100 return a pointer to the attribute's list element if the attribute
6101 is part of the list, or NULL_TREE if not found. If the attribute
6102 appears more than once, this only returns the first occurrence; the
6103 TREE_CHAIN of the return value should be passed back in if further
6104 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6105 can be in the form 'text' or '__text__'. */
6106 static tree
6107 lookup_ident_attribute (tree attr_identifier, tree list)
6108 {
6109 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6110
6111 while (list)
6112 {
6113 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6114 == IDENTIFIER_NODE);
6115
6116 if (cmp_attrib_identifiers (attr_identifier,
6117 get_attribute_name (list)))
6118 /* Found it. */
6119 break;
6120 list = TREE_CHAIN (list);
6121 }
6122
6123 return list;
6124 }
6125
6126 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6127 modified list. */
6128
6129 tree
6130 remove_attribute (const char *attr_name, tree list)
6131 {
6132 tree *p;
6133 size_t attr_len = strlen (attr_name);
6134
6135 gcc_checking_assert (attr_name[0] != '_');
6136
6137 for (p = &list; *p; )
6138 {
6139 tree l = *p;
6140 /* TODO: If we were storing attributes in normalized form, here
6141 we could use a simple strcmp(). */
6142 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6143 *p = TREE_CHAIN (l);
6144 else
6145 p = &TREE_CHAIN (l);
6146 }
6147
6148 return list;
6149 }
6150
6151 /* Return an attribute list that is the union of a1 and a2. */
6152
6153 tree
6154 merge_attributes (tree a1, tree a2)
6155 {
6156 tree attributes;
6157
6158 /* Either one unset? Take the set one. */
6159
6160 if ((attributes = a1) == 0)
6161 attributes = a2;
6162
6163 /* One that completely contains the other? Take it. */
6164
6165 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6166 {
6167 if (attribute_list_contained (a2, a1))
6168 attributes = a2;
6169 else
6170 {
6171 /* Pick the longest list, and hang on the other list. */
6172
6173 if (list_length (a1) < list_length (a2))
6174 attributes = a2, a2 = a1;
6175
6176 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6177 {
6178 tree a;
6179 for (a = lookup_ident_attribute (get_attribute_name (a2),
6180 attributes);
6181 a != NULL_TREE && !attribute_value_equal (a, a2);
6182 a = lookup_ident_attribute (get_attribute_name (a2),
6183 TREE_CHAIN (a)))
6184 ;
6185 if (a == NULL_TREE)
6186 {
6187 a1 = copy_node (a2);
6188 TREE_CHAIN (a1) = attributes;
6189 attributes = a1;
6190 }
6191 }
6192 }
6193 }
6194 return attributes;
6195 }
6196
6197 /* Given types T1 and T2, merge their attributes and return
6198 the result. */
6199
6200 tree
6201 merge_type_attributes (tree t1, tree t2)
6202 {
6203 return merge_attributes (TYPE_ATTRIBUTES (t1),
6204 TYPE_ATTRIBUTES (t2));
6205 }
6206
6207 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6208 the result. */
6209
6210 tree
6211 merge_decl_attributes (tree olddecl, tree newdecl)
6212 {
6213 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6214 DECL_ATTRIBUTES (newdecl));
6215 }
6216
6217 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6218
6219 /* Specialization of merge_decl_attributes for various Windows targets.
6220
6221 This handles the following situation:
6222
6223 __declspec (dllimport) int foo;
6224 int foo;
6225
6226 The second instance of `foo' nullifies the dllimport. */
6227
6228 tree
6229 merge_dllimport_decl_attributes (tree old, tree new_tree)
6230 {
6231 tree a;
6232 int delete_dllimport_p = 1;
6233
6234 /* What we need to do here is remove from `old' dllimport if it doesn't
6235 appear in `new'. dllimport behaves like extern: if a declaration is
6236 marked dllimport and a definition appears later, then the object
6237 is not dllimport'd. We also remove a `new' dllimport if the old list
6238 contains dllexport: dllexport always overrides dllimport, regardless
6239 of the order of declaration. */
6240 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6241 delete_dllimport_p = 0;
6242 else if (DECL_DLLIMPORT_P (new_tree)
6243 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6244 {
6245 DECL_DLLIMPORT_P (new_tree) = 0;
6246 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6247 "dllimport ignored", new_tree);
6248 }
6249 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6250 {
6251 /* Warn about overriding a symbol that has already been used, e.g.:
6252 extern int __attribute__ ((dllimport)) foo;
6253 int* bar () {return &foo;}
6254 int foo;
6255 */
6256 if (TREE_USED (old))
6257 {
6258 warning (0, "%q+D redeclared without dllimport attribute "
6259 "after being referenced with dll linkage", new_tree);
6260 /* If we have used a variable's address with dllimport linkage,
6261 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6262 decl may already have had TREE_CONSTANT computed.
6263 We still remove the attribute so that assembler code refers
6264 to '&foo rather than '_imp__foo'. */
6265 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6266 DECL_DLLIMPORT_P (new_tree) = 1;
6267 }
6268
6269 /* Let an inline definition silently override the external reference,
6270 but otherwise warn about attribute inconsistency. */
6271 else if (TREE_CODE (new_tree) == VAR_DECL
6272 || !DECL_DECLARED_INLINE_P (new_tree))
6273 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6274 "previous dllimport ignored", new_tree);
6275 }
6276 else
6277 delete_dllimport_p = 0;
6278
6279 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6280
6281 if (delete_dllimport_p)
6282 a = remove_attribute ("dllimport", a);
6283
6284 return a;
6285 }
6286
6287 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6288 struct attribute_spec.handler. */
6289
6290 tree
6291 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6292 bool *no_add_attrs)
6293 {
6294 tree node = *pnode;
6295 bool is_dllimport;
6296
6297 /* These attributes may apply to structure and union types being created,
6298 but otherwise should pass to the declaration involved. */
6299 if (!DECL_P (node))
6300 {
6301 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6302 | (int) ATTR_FLAG_ARRAY_NEXT))
6303 {
6304 *no_add_attrs = true;
6305 return tree_cons (name, args, NULL_TREE);
6306 }
6307 if (TREE_CODE (node) == RECORD_TYPE
6308 || TREE_CODE (node) == UNION_TYPE)
6309 {
6310 node = TYPE_NAME (node);
6311 if (!node)
6312 return NULL_TREE;
6313 }
6314 else
6315 {
6316 warning (OPT_Wattributes, "%qE attribute ignored",
6317 name);
6318 *no_add_attrs = true;
6319 return NULL_TREE;
6320 }
6321 }
6322
6323 if (TREE_CODE (node) != FUNCTION_DECL
6324 && TREE_CODE (node) != VAR_DECL
6325 && TREE_CODE (node) != TYPE_DECL)
6326 {
6327 *no_add_attrs = true;
6328 warning (OPT_Wattributes, "%qE attribute ignored",
6329 name);
6330 return NULL_TREE;
6331 }
6332
6333 if (TREE_CODE (node) == TYPE_DECL
6334 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6335 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6336 {
6337 *no_add_attrs = true;
6338 warning (OPT_Wattributes, "%qE attribute ignored",
6339 name);
6340 return NULL_TREE;
6341 }
6342
6343 is_dllimport = is_attribute_p ("dllimport", name);
6344
6345 /* Report error on dllimport ambiguities seen now before they cause
6346 any damage. */
6347 if (is_dllimport)
6348 {
6349 /* Honor any target-specific overrides. */
6350 if (!targetm.valid_dllimport_attribute_p (node))
6351 *no_add_attrs = true;
6352
6353 else if (TREE_CODE (node) == FUNCTION_DECL
6354 && DECL_DECLARED_INLINE_P (node))
6355 {
6356 warning (OPT_Wattributes, "inline function %q+D declared as "
6357 " dllimport: attribute ignored", node);
6358 *no_add_attrs = true;
6359 }
6360 /* Like MS, treat definition of dllimported variables and
6361 non-inlined functions on declaration as syntax errors. */
6362 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6363 {
6364 error ("function %q+D definition is marked dllimport", node);
6365 *no_add_attrs = true;
6366 }
6367
6368 else if (TREE_CODE (node) == VAR_DECL)
6369 {
6370 if (DECL_INITIAL (node))
6371 {
6372 error ("variable %q+D definition is marked dllimport",
6373 node);
6374 *no_add_attrs = true;
6375 }
6376
6377 /* `extern' needn't be specified with dllimport.
6378 Specify `extern' now and hope for the best. Sigh. */
6379 DECL_EXTERNAL (node) = 1;
6380 /* Also, implicitly give dllimport'd variables declared within
6381 a function global scope, unless declared static. */
6382 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6383 TREE_PUBLIC (node) = 1;
6384 }
6385
6386 if (*no_add_attrs == false)
6387 DECL_DLLIMPORT_P (node) = 1;
6388 }
6389 else if (TREE_CODE (node) == FUNCTION_DECL
6390 && DECL_DECLARED_INLINE_P (node)
6391 && flag_keep_inline_dllexport)
6392 /* An exported function, even if inline, must be emitted. */
6393 DECL_EXTERNAL (node) = 0;
6394
6395 /* Report error if symbol is not accessible at global scope. */
6396 if (!TREE_PUBLIC (node)
6397 && (TREE_CODE (node) == VAR_DECL
6398 || TREE_CODE (node) == FUNCTION_DECL))
6399 {
6400 error ("external linkage required for symbol %q+D because of "
6401 "%qE attribute", node, name);
6402 *no_add_attrs = true;
6403 }
6404
6405 /* A dllexport'd entity must have default visibility so that other
6406 program units (shared libraries or the main executable) can see
6407 it. A dllimport'd entity must have default visibility so that
6408 the linker knows that undefined references within this program
6409 unit can be resolved by the dynamic linker. */
6410 if (!*no_add_attrs)
6411 {
6412 if (DECL_VISIBILITY_SPECIFIED (node)
6413 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6414 error ("%qE implies default visibility, but %qD has already "
6415 "been declared with a different visibility",
6416 name, node);
6417 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6418 DECL_VISIBILITY_SPECIFIED (node) = 1;
6419 }
6420
6421 return NULL_TREE;
6422 }
6423
6424 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6425 \f
6426 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6427 of the various TYPE_QUAL values. */
6428
6429 static void
6430 set_type_quals (tree type, int type_quals)
6431 {
6432 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6433 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6434 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6435 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6436 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6437 }
6438
6439 /* Returns true iff unqualified CAND and BASE are equivalent. */
6440
6441 bool
6442 check_base_type (const_tree cand, const_tree base)
6443 {
6444 return (TYPE_NAME (cand) == TYPE_NAME (base)
6445 /* Apparently this is needed for Objective-C. */
6446 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6447 /* Check alignment. */
6448 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6449 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6450 TYPE_ATTRIBUTES (base)));
6451 }
6452
6453 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6454
6455 bool
6456 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6457 {
6458 return (TYPE_QUALS (cand) == type_quals
6459 && check_base_type (cand, base));
6460 }
6461
6462 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6463
6464 static bool
6465 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6466 {
6467 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6468 && TYPE_NAME (cand) == TYPE_NAME (base)
6469 /* Apparently this is needed for Objective-C. */
6470 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6471 /* Check alignment. */
6472 && TYPE_ALIGN (cand) == align
6473 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6474 TYPE_ATTRIBUTES (base)));
6475 }
6476
6477 /* This function checks to see if TYPE matches the size one of the built-in
6478 atomic types, and returns that core atomic type. */
6479
6480 static tree
6481 find_atomic_core_type (tree type)
6482 {
6483 tree base_atomic_type;
6484
6485 /* Only handle complete types. */
6486 if (TYPE_SIZE (type) == NULL_TREE)
6487 return NULL_TREE;
6488
6489 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6490 switch (type_size)
6491 {
6492 case 8:
6493 base_atomic_type = atomicQI_type_node;
6494 break;
6495
6496 case 16:
6497 base_atomic_type = atomicHI_type_node;
6498 break;
6499
6500 case 32:
6501 base_atomic_type = atomicSI_type_node;
6502 break;
6503
6504 case 64:
6505 base_atomic_type = atomicDI_type_node;
6506 break;
6507
6508 case 128:
6509 base_atomic_type = atomicTI_type_node;
6510 break;
6511
6512 default:
6513 base_atomic_type = NULL_TREE;
6514 }
6515
6516 return base_atomic_type;
6517 }
6518
6519 /* Return a version of the TYPE, qualified as indicated by the
6520 TYPE_QUALS, if one exists. If no qualified version exists yet,
6521 return NULL_TREE. */
6522
6523 tree
6524 get_qualified_type (tree type, int type_quals)
6525 {
6526 tree t;
6527
6528 if (TYPE_QUALS (type) == type_quals)
6529 return type;
6530
6531 /* Search the chain of variants to see if there is already one there just
6532 like the one we need to have. If so, use that existing one. We must
6533 preserve the TYPE_NAME, since there is code that depends on this. */
6534 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6535 if (check_qualified_type (t, type, type_quals))
6536 return t;
6537
6538 return NULL_TREE;
6539 }
6540
6541 /* Like get_qualified_type, but creates the type if it does not
6542 exist. This function never returns NULL_TREE. */
6543
6544 tree
6545 build_qualified_type (tree type, int type_quals)
6546 {
6547 tree t;
6548
6549 /* See if we already have the appropriate qualified variant. */
6550 t = get_qualified_type (type, type_quals);
6551
6552 /* If not, build it. */
6553 if (!t)
6554 {
6555 t = build_variant_type_copy (type);
6556 set_type_quals (t, type_quals);
6557
6558 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6559 {
6560 /* See if this object can map to a basic atomic type. */
6561 tree atomic_type = find_atomic_core_type (type);
6562 if (atomic_type)
6563 {
6564 /* Ensure the alignment of this type is compatible with
6565 the required alignment of the atomic type. */
6566 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6567 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6568 }
6569 }
6570
6571 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6572 /* Propagate structural equality. */
6573 SET_TYPE_STRUCTURAL_EQUALITY (t);
6574 else if (TYPE_CANONICAL (type) != type)
6575 /* Build the underlying canonical type, since it is different
6576 from TYPE. */
6577 {
6578 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6579 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6580 }
6581 else
6582 /* T is its own canonical type. */
6583 TYPE_CANONICAL (t) = t;
6584
6585 }
6586
6587 return t;
6588 }
6589
6590 /* Create a variant of type T with alignment ALIGN. */
6591
6592 tree
6593 build_aligned_type (tree type, unsigned int align)
6594 {
6595 tree t;
6596
6597 if (TYPE_PACKED (type)
6598 || TYPE_ALIGN (type) == align)
6599 return type;
6600
6601 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6602 if (check_aligned_type (t, type, align))
6603 return t;
6604
6605 t = build_variant_type_copy (type);
6606 TYPE_ALIGN (t) = align;
6607
6608 return t;
6609 }
6610
6611 /* Create a new distinct copy of TYPE. The new type is made its own
6612 MAIN_VARIANT. If TYPE requires structural equality checks, the
6613 resulting type requires structural equality checks; otherwise, its
6614 TYPE_CANONICAL points to itself. */
6615
6616 tree
6617 build_distinct_type_copy (tree type)
6618 {
6619 tree t = copy_node (type);
6620
6621 TYPE_POINTER_TO (t) = 0;
6622 TYPE_REFERENCE_TO (t) = 0;
6623
6624 /* Set the canonical type either to a new equivalence class, or
6625 propagate the need for structural equality checks. */
6626 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6627 SET_TYPE_STRUCTURAL_EQUALITY (t);
6628 else
6629 TYPE_CANONICAL (t) = t;
6630
6631 /* Make it its own variant. */
6632 TYPE_MAIN_VARIANT (t) = t;
6633 TYPE_NEXT_VARIANT (t) = 0;
6634
6635 /* We do not record methods in type copies nor variants
6636 so we do not need to keep them up to date when new method
6637 is inserted. */
6638 if (RECORD_OR_UNION_TYPE_P (t))
6639 TYPE_METHODS (t) = NULL_TREE;
6640
6641 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6642 whose TREE_TYPE is not t. This can also happen in the Ada
6643 frontend when using subtypes. */
6644
6645 return t;
6646 }
6647
6648 /* Create a new variant of TYPE, equivalent but distinct. This is so
6649 the caller can modify it. TYPE_CANONICAL for the return type will
6650 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6651 are considered equal by the language itself (or that both types
6652 require structural equality checks). */
6653
6654 tree
6655 build_variant_type_copy (tree type)
6656 {
6657 tree t, m = TYPE_MAIN_VARIANT (type);
6658
6659 t = build_distinct_type_copy (type);
6660
6661 /* Since we're building a variant, assume that it is a non-semantic
6662 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6663 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6664
6665 /* Add the new type to the chain of variants of TYPE. */
6666 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6667 TYPE_NEXT_VARIANT (m) = t;
6668 TYPE_MAIN_VARIANT (t) = m;
6669
6670 return t;
6671 }
6672 \f
6673 /* Return true if the from tree in both tree maps are equal. */
6674
6675 int
6676 tree_map_base_eq (const void *va, const void *vb)
6677 {
6678 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6679 *const b = (const struct tree_map_base *) vb;
6680 return (a->from == b->from);
6681 }
6682
6683 /* Hash a from tree in a tree_base_map. */
6684
6685 unsigned int
6686 tree_map_base_hash (const void *item)
6687 {
6688 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6689 }
6690
6691 /* Return true if this tree map structure is marked for garbage collection
6692 purposes. We simply return true if the from tree is marked, so that this
6693 structure goes away when the from tree goes away. */
6694
6695 int
6696 tree_map_base_marked_p (const void *p)
6697 {
6698 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6699 }
6700
6701 /* Hash a from tree in a tree_map. */
6702
6703 unsigned int
6704 tree_map_hash (const void *item)
6705 {
6706 return (((const struct tree_map *) item)->hash);
6707 }
6708
6709 /* Hash a from tree in a tree_decl_map. */
6710
6711 unsigned int
6712 tree_decl_map_hash (const void *item)
6713 {
6714 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6715 }
6716
6717 /* Return the initialization priority for DECL. */
6718
6719 priority_type
6720 decl_init_priority_lookup (tree decl)
6721 {
6722 symtab_node *snode = symtab_node::get (decl);
6723
6724 if (!snode)
6725 return DEFAULT_INIT_PRIORITY;
6726 return
6727 snode->get_init_priority ();
6728 }
6729
6730 /* Return the finalization priority for DECL. */
6731
6732 priority_type
6733 decl_fini_priority_lookup (tree decl)
6734 {
6735 cgraph_node *node = cgraph_node::get (decl);
6736
6737 if (!node)
6738 return DEFAULT_INIT_PRIORITY;
6739 return
6740 node->get_fini_priority ();
6741 }
6742
6743 /* Set the initialization priority for DECL to PRIORITY. */
6744
6745 void
6746 decl_init_priority_insert (tree decl, priority_type priority)
6747 {
6748 struct symtab_node *snode;
6749
6750 if (priority == DEFAULT_INIT_PRIORITY)
6751 {
6752 snode = symtab_node::get (decl);
6753 if (!snode)
6754 return;
6755 }
6756 else if (TREE_CODE (decl) == VAR_DECL)
6757 snode = varpool_node::get_create (decl);
6758 else
6759 snode = cgraph_node::get_create (decl);
6760 snode->set_init_priority (priority);
6761 }
6762
6763 /* Set the finalization priority for DECL to PRIORITY. */
6764
6765 void
6766 decl_fini_priority_insert (tree decl, priority_type priority)
6767 {
6768 struct cgraph_node *node;
6769
6770 if (priority == DEFAULT_INIT_PRIORITY)
6771 {
6772 node = cgraph_node::get (decl);
6773 if (!node)
6774 return;
6775 }
6776 else
6777 node = cgraph_node::get_create (decl);
6778 node->set_fini_priority (priority);
6779 }
6780
6781 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6782
6783 static void
6784 print_debug_expr_statistics (void)
6785 {
6786 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6787 (long) debug_expr_for_decl->size (),
6788 (long) debug_expr_for_decl->elements (),
6789 debug_expr_for_decl->collisions ());
6790 }
6791
6792 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6793
6794 static void
6795 print_value_expr_statistics (void)
6796 {
6797 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6798 (long) value_expr_for_decl->size (),
6799 (long) value_expr_for_decl->elements (),
6800 value_expr_for_decl->collisions ());
6801 }
6802
6803 /* Lookup a debug expression for FROM, and return it if we find one. */
6804
6805 tree
6806 decl_debug_expr_lookup (tree from)
6807 {
6808 struct tree_decl_map *h, in;
6809 in.base.from = from;
6810
6811 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6812 if (h)
6813 return h->to;
6814 return NULL_TREE;
6815 }
6816
6817 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6818
6819 void
6820 decl_debug_expr_insert (tree from, tree to)
6821 {
6822 struct tree_decl_map *h;
6823
6824 h = ggc_alloc<tree_decl_map> ();
6825 h->base.from = from;
6826 h->to = to;
6827 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6828 }
6829
6830 /* Lookup a value expression for FROM, and return it if we find one. */
6831
6832 tree
6833 decl_value_expr_lookup (tree from)
6834 {
6835 struct tree_decl_map *h, in;
6836 in.base.from = from;
6837
6838 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6839 if (h)
6840 return h->to;
6841 return NULL_TREE;
6842 }
6843
6844 /* Insert a mapping FROM->TO in the value expression hashtable. */
6845
6846 void
6847 decl_value_expr_insert (tree from, tree to)
6848 {
6849 struct tree_decl_map *h;
6850
6851 h = ggc_alloc<tree_decl_map> ();
6852 h->base.from = from;
6853 h->to = to;
6854 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6855 }
6856
6857 /* Lookup a vector of debug arguments for FROM, and return it if we
6858 find one. */
6859
6860 vec<tree, va_gc> **
6861 decl_debug_args_lookup (tree from)
6862 {
6863 struct tree_vec_map *h, in;
6864
6865 if (!DECL_HAS_DEBUG_ARGS_P (from))
6866 return NULL;
6867 gcc_checking_assert (debug_args_for_decl != NULL);
6868 in.base.from = from;
6869 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6870 if (h)
6871 return &h->to;
6872 return NULL;
6873 }
6874
6875 /* Insert a mapping FROM->empty vector of debug arguments in the value
6876 expression hashtable. */
6877
6878 vec<tree, va_gc> **
6879 decl_debug_args_insert (tree from)
6880 {
6881 struct tree_vec_map *h;
6882 tree_vec_map **loc;
6883
6884 if (DECL_HAS_DEBUG_ARGS_P (from))
6885 return decl_debug_args_lookup (from);
6886 if (debug_args_for_decl == NULL)
6887 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6888 h = ggc_alloc<tree_vec_map> ();
6889 h->base.from = from;
6890 h->to = NULL;
6891 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6892 *loc = h;
6893 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6894 return &h->to;
6895 }
6896
6897 /* Hashing of types so that we don't make duplicates.
6898 The entry point is `type_hash_canon'. */
6899
6900 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6901 with types in the TREE_VALUE slots), by adding the hash codes
6902 of the individual types. */
6903
6904 static void
6905 type_hash_list (const_tree list, inchash::hash &hstate)
6906 {
6907 const_tree tail;
6908
6909 for (tail = list; tail; tail = TREE_CHAIN (tail))
6910 if (TREE_VALUE (tail) != error_mark_node)
6911 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6912 }
6913
6914 /* These are the Hashtable callback functions. */
6915
6916 /* Returns true iff the types are equivalent. */
6917
6918 bool
6919 type_cache_hasher::equal (type_hash *a, type_hash *b)
6920 {
6921 /* First test the things that are the same for all types. */
6922 if (a->hash != b->hash
6923 || TREE_CODE (a->type) != TREE_CODE (b->type)
6924 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6925 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6926 TYPE_ATTRIBUTES (b->type))
6927 || (TREE_CODE (a->type) != COMPLEX_TYPE
6928 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6929 return 0;
6930
6931 /* Be careful about comparing arrays before and after the element type
6932 has been completed; don't compare TYPE_ALIGN unless both types are
6933 complete. */
6934 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6935 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6936 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6937 return 0;
6938
6939 switch (TREE_CODE (a->type))
6940 {
6941 case VOID_TYPE:
6942 case COMPLEX_TYPE:
6943 case POINTER_TYPE:
6944 case REFERENCE_TYPE:
6945 case NULLPTR_TYPE:
6946 return 1;
6947
6948 case VECTOR_TYPE:
6949 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6950
6951 case ENUMERAL_TYPE:
6952 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6953 && !(TYPE_VALUES (a->type)
6954 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6955 && TYPE_VALUES (b->type)
6956 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6957 && type_list_equal (TYPE_VALUES (a->type),
6958 TYPE_VALUES (b->type))))
6959 return 0;
6960
6961 /* ... fall through ... */
6962
6963 case INTEGER_TYPE:
6964 case REAL_TYPE:
6965 case BOOLEAN_TYPE:
6966 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6967 return false;
6968 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6969 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6970 TYPE_MAX_VALUE (b->type)))
6971 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6972 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6973 TYPE_MIN_VALUE (b->type))));
6974
6975 case FIXED_POINT_TYPE:
6976 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6977
6978 case OFFSET_TYPE:
6979 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6980
6981 case METHOD_TYPE:
6982 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6983 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6984 || (TYPE_ARG_TYPES (a->type)
6985 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6986 && TYPE_ARG_TYPES (b->type)
6987 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6988 && type_list_equal (TYPE_ARG_TYPES (a->type),
6989 TYPE_ARG_TYPES (b->type)))))
6990 break;
6991 return 0;
6992 case ARRAY_TYPE:
6993 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6994
6995 case RECORD_TYPE:
6996 case UNION_TYPE:
6997 case QUAL_UNION_TYPE:
6998 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6999 || (TYPE_FIELDS (a->type)
7000 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7001 && TYPE_FIELDS (b->type)
7002 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7003 && type_list_equal (TYPE_FIELDS (a->type),
7004 TYPE_FIELDS (b->type))));
7005
7006 case FUNCTION_TYPE:
7007 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7008 || (TYPE_ARG_TYPES (a->type)
7009 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7010 && TYPE_ARG_TYPES (b->type)
7011 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7012 && type_list_equal (TYPE_ARG_TYPES (a->type),
7013 TYPE_ARG_TYPES (b->type))))
7014 break;
7015 return 0;
7016
7017 default:
7018 return 0;
7019 }
7020
7021 if (lang_hooks.types.type_hash_eq != NULL)
7022 return lang_hooks.types.type_hash_eq (a->type, b->type);
7023
7024 return 1;
7025 }
7026
7027 /* Given TYPE, and HASHCODE its hash code, return the canonical
7028 object for an identical type if one already exists.
7029 Otherwise, return TYPE, and record it as the canonical object.
7030
7031 To use this function, first create a type of the sort you want.
7032 Then compute its hash code from the fields of the type that
7033 make it different from other similar types.
7034 Then call this function and use the value. */
7035
7036 tree
7037 type_hash_canon (unsigned int hashcode, tree type)
7038 {
7039 type_hash in;
7040 type_hash **loc;
7041
7042 /* The hash table only contains main variants, so ensure that's what we're
7043 being passed. */
7044 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7045
7046 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7047 must call that routine before comparing TYPE_ALIGNs. */
7048 layout_type (type);
7049
7050 in.hash = hashcode;
7051 in.type = type;
7052
7053 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7054 if (*loc)
7055 {
7056 tree t1 = ((type_hash *) *loc)->type;
7057 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7058 if (GATHER_STATISTICS)
7059 {
7060 tree_code_counts[(int) TREE_CODE (type)]--;
7061 tree_node_counts[(int) t_kind]--;
7062 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
7063 }
7064 return t1;
7065 }
7066 else
7067 {
7068 struct type_hash *h;
7069
7070 h = ggc_alloc<type_hash> ();
7071 h->hash = hashcode;
7072 h->type = type;
7073 *loc = h;
7074
7075 return type;
7076 }
7077 }
7078
7079 static void
7080 print_type_hash_statistics (void)
7081 {
7082 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7083 (long) type_hash_table->size (),
7084 (long) type_hash_table->elements (),
7085 type_hash_table->collisions ());
7086 }
7087
7088 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7089 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7090 by adding the hash codes of the individual attributes. */
7091
7092 static void
7093 attribute_hash_list (const_tree list, inchash::hash &hstate)
7094 {
7095 const_tree tail;
7096
7097 for (tail = list; tail; tail = TREE_CHAIN (tail))
7098 /* ??? Do we want to add in TREE_VALUE too? */
7099 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7100 }
7101
7102 /* Given two lists of attributes, return true if list l2 is
7103 equivalent to l1. */
7104
7105 int
7106 attribute_list_equal (const_tree l1, const_tree l2)
7107 {
7108 if (l1 == l2)
7109 return 1;
7110
7111 return attribute_list_contained (l1, l2)
7112 && attribute_list_contained (l2, l1);
7113 }
7114
7115 /* Given two lists of attributes, return true if list L2 is
7116 completely contained within L1. */
7117 /* ??? This would be faster if attribute names were stored in a canonicalized
7118 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7119 must be used to show these elements are equivalent (which they are). */
7120 /* ??? It's not clear that attributes with arguments will always be handled
7121 correctly. */
7122
7123 int
7124 attribute_list_contained (const_tree l1, const_tree l2)
7125 {
7126 const_tree t1, t2;
7127
7128 /* First check the obvious, maybe the lists are identical. */
7129 if (l1 == l2)
7130 return 1;
7131
7132 /* Maybe the lists are similar. */
7133 for (t1 = l1, t2 = l2;
7134 t1 != 0 && t2 != 0
7135 && get_attribute_name (t1) == get_attribute_name (t2)
7136 && TREE_VALUE (t1) == TREE_VALUE (t2);
7137 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7138 ;
7139
7140 /* Maybe the lists are equal. */
7141 if (t1 == 0 && t2 == 0)
7142 return 1;
7143
7144 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7145 {
7146 const_tree attr;
7147 /* This CONST_CAST is okay because lookup_attribute does not
7148 modify its argument and the return value is assigned to a
7149 const_tree. */
7150 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7151 CONST_CAST_TREE (l1));
7152 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7153 attr = lookup_ident_attribute (get_attribute_name (t2),
7154 TREE_CHAIN (attr)))
7155 ;
7156
7157 if (attr == NULL_TREE)
7158 return 0;
7159 }
7160
7161 return 1;
7162 }
7163
7164 /* Given two lists of types
7165 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7166 return 1 if the lists contain the same types in the same order.
7167 Also, the TREE_PURPOSEs must match. */
7168
7169 int
7170 type_list_equal (const_tree l1, const_tree l2)
7171 {
7172 const_tree t1, t2;
7173
7174 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7175 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7176 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7177 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7178 && (TREE_TYPE (TREE_PURPOSE (t1))
7179 == TREE_TYPE (TREE_PURPOSE (t2))))))
7180 return 0;
7181
7182 return t1 == t2;
7183 }
7184
7185 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7186 given by TYPE. If the argument list accepts variable arguments,
7187 then this function counts only the ordinary arguments. */
7188
7189 int
7190 type_num_arguments (const_tree type)
7191 {
7192 int i = 0;
7193 tree t;
7194
7195 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7196 /* If the function does not take a variable number of arguments,
7197 the last element in the list will have type `void'. */
7198 if (VOID_TYPE_P (TREE_VALUE (t)))
7199 break;
7200 else
7201 ++i;
7202
7203 return i;
7204 }
7205
7206 /* Nonzero if integer constants T1 and T2
7207 represent the same constant value. */
7208
7209 int
7210 tree_int_cst_equal (const_tree t1, const_tree t2)
7211 {
7212 if (t1 == t2)
7213 return 1;
7214
7215 if (t1 == 0 || t2 == 0)
7216 return 0;
7217
7218 if (TREE_CODE (t1) == INTEGER_CST
7219 && TREE_CODE (t2) == INTEGER_CST
7220 && wi::to_widest (t1) == wi::to_widest (t2))
7221 return 1;
7222
7223 return 0;
7224 }
7225
7226 /* Return true if T is an INTEGER_CST whose numerical value (extended
7227 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7228
7229 bool
7230 tree_fits_shwi_p (const_tree t)
7231 {
7232 return (t != NULL_TREE
7233 && TREE_CODE (t) == INTEGER_CST
7234 && wi::fits_shwi_p (wi::to_widest (t)));
7235 }
7236
7237 /* Return true if T is an INTEGER_CST whose numerical value (extended
7238 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7239
7240 bool
7241 tree_fits_uhwi_p (const_tree t)
7242 {
7243 return (t != NULL_TREE
7244 && TREE_CODE (t) == INTEGER_CST
7245 && wi::fits_uhwi_p (wi::to_widest (t)));
7246 }
7247
7248 /* T is an INTEGER_CST whose numerical value (extended according to
7249 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7250 HOST_WIDE_INT. */
7251
7252 HOST_WIDE_INT
7253 tree_to_shwi (const_tree t)
7254 {
7255 gcc_assert (tree_fits_shwi_p (t));
7256 return TREE_INT_CST_LOW (t);
7257 }
7258
7259 /* T is an INTEGER_CST whose numerical value (extended according to
7260 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7261 HOST_WIDE_INT. */
7262
7263 unsigned HOST_WIDE_INT
7264 tree_to_uhwi (const_tree t)
7265 {
7266 gcc_assert (tree_fits_uhwi_p (t));
7267 return TREE_INT_CST_LOW (t);
7268 }
7269
7270 /* Return the most significant (sign) bit of T. */
7271
7272 int
7273 tree_int_cst_sign_bit (const_tree t)
7274 {
7275 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7276
7277 return wi::extract_uhwi (t, bitno, 1);
7278 }
7279
7280 /* Return an indication of the sign of the integer constant T.
7281 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7282 Note that -1 will never be returned if T's type is unsigned. */
7283
7284 int
7285 tree_int_cst_sgn (const_tree t)
7286 {
7287 if (wi::eq_p (t, 0))
7288 return 0;
7289 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7290 return 1;
7291 else if (wi::neg_p (t))
7292 return -1;
7293 else
7294 return 1;
7295 }
7296
7297 /* Return the minimum number of bits needed to represent VALUE in a
7298 signed or unsigned type, UNSIGNEDP says which. */
7299
7300 unsigned int
7301 tree_int_cst_min_precision (tree value, signop sgn)
7302 {
7303 /* If the value is negative, compute its negative minus 1. The latter
7304 adjustment is because the absolute value of the largest negative value
7305 is one larger than the largest positive value. This is equivalent to
7306 a bit-wise negation, so use that operation instead. */
7307
7308 if (tree_int_cst_sgn (value) < 0)
7309 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7310
7311 /* Return the number of bits needed, taking into account the fact
7312 that we need one more bit for a signed than unsigned type.
7313 If value is 0 or -1, the minimum precision is 1 no matter
7314 whether unsignedp is true or false. */
7315
7316 if (integer_zerop (value))
7317 return 1;
7318 else
7319 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7320 }
7321
7322 /* Return truthvalue of whether T1 is the same tree structure as T2.
7323 Return 1 if they are the same.
7324 Return 0 if they are understandably different.
7325 Return -1 if either contains tree structure not understood by
7326 this function. */
7327
7328 int
7329 simple_cst_equal (const_tree t1, const_tree t2)
7330 {
7331 enum tree_code code1, code2;
7332 int cmp;
7333 int i;
7334
7335 if (t1 == t2)
7336 return 1;
7337 if (t1 == 0 || t2 == 0)
7338 return 0;
7339
7340 code1 = TREE_CODE (t1);
7341 code2 = TREE_CODE (t2);
7342
7343 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7344 {
7345 if (CONVERT_EXPR_CODE_P (code2)
7346 || code2 == NON_LVALUE_EXPR)
7347 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7348 else
7349 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7350 }
7351
7352 else if (CONVERT_EXPR_CODE_P (code2)
7353 || code2 == NON_LVALUE_EXPR)
7354 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7355
7356 if (code1 != code2)
7357 return 0;
7358
7359 switch (code1)
7360 {
7361 case INTEGER_CST:
7362 return wi::to_widest (t1) == wi::to_widest (t2);
7363
7364 case REAL_CST:
7365 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7366
7367 case FIXED_CST:
7368 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7369
7370 case STRING_CST:
7371 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7372 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7373 TREE_STRING_LENGTH (t1)));
7374
7375 case CONSTRUCTOR:
7376 {
7377 unsigned HOST_WIDE_INT idx;
7378 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7379 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7380
7381 if (vec_safe_length (v1) != vec_safe_length (v2))
7382 return false;
7383
7384 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7385 /* ??? Should we handle also fields here? */
7386 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7387 return false;
7388 return true;
7389 }
7390
7391 case SAVE_EXPR:
7392 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7393
7394 case CALL_EXPR:
7395 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7396 if (cmp <= 0)
7397 return cmp;
7398 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7399 return 0;
7400 {
7401 const_tree arg1, arg2;
7402 const_call_expr_arg_iterator iter1, iter2;
7403 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7404 arg2 = first_const_call_expr_arg (t2, &iter2);
7405 arg1 && arg2;
7406 arg1 = next_const_call_expr_arg (&iter1),
7407 arg2 = next_const_call_expr_arg (&iter2))
7408 {
7409 cmp = simple_cst_equal (arg1, arg2);
7410 if (cmp <= 0)
7411 return cmp;
7412 }
7413 return arg1 == arg2;
7414 }
7415
7416 case TARGET_EXPR:
7417 /* Special case: if either target is an unallocated VAR_DECL,
7418 it means that it's going to be unified with whatever the
7419 TARGET_EXPR is really supposed to initialize, so treat it
7420 as being equivalent to anything. */
7421 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7422 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7423 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7424 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7425 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7426 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7427 cmp = 1;
7428 else
7429 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7430
7431 if (cmp <= 0)
7432 return cmp;
7433
7434 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7435
7436 case WITH_CLEANUP_EXPR:
7437 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7438 if (cmp <= 0)
7439 return cmp;
7440
7441 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7442
7443 case COMPONENT_REF:
7444 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7445 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7446
7447 return 0;
7448
7449 case VAR_DECL:
7450 case PARM_DECL:
7451 case CONST_DECL:
7452 case FUNCTION_DECL:
7453 return 0;
7454
7455 default:
7456 break;
7457 }
7458
7459 /* This general rule works for most tree codes. All exceptions should be
7460 handled above. If this is a language-specific tree code, we can't
7461 trust what might be in the operand, so say we don't know
7462 the situation. */
7463 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7464 return -1;
7465
7466 switch (TREE_CODE_CLASS (code1))
7467 {
7468 case tcc_unary:
7469 case tcc_binary:
7470 case tcc_comparison:
7471 case tcc_expression:
7472 case tcc_reference:
7473 case tcc_statement:
7474 cmp = 1;
7475 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7476 {
7477 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7478 if (cmp <= 0)
7479 return cmp;
7480 }
7481
7482 return cmp;
7483
7484 default:
7485 return -1;
7486 }
7487 }
7488
7489 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7490 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7491 than U, respectively. */
7492
7493 int
7494 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7495 {
7496 if (tree_int_cst_sgn (t) < 0)
7497 return -1;
7498 else if (!tree_fits_uhwi_p (t))
7499 return 1;
7500 else if (TREE_INT_CST_LOW (t) == u)
7501 return 0;
7502 else if (TREE_INT_CST_LOW (t) < u)
7503 return -1;
7504 else
7505 return 1;
7506 }
7507
7508 /* Return true if SIZE represents a constant size that is in bounds of
7509 what the middle-end and the backend accepts (covering not more than
7510 half of the address-space). */
7511
7512 bool
7513 valid_constant_size_p (const_tree size)
7514 {
7515 if (! tree_fits_uhwi_p (size)
7516 || TREE_OVERFLOW (size)
7517 || tree_int_cst_sign_bit (size) != 0)
7518 return false;
7519 return true;
7520 }
7521
7522 /* Return the precision of the type, or for a complex or vector type the
7523 precision of the type of its elements. */
7524
7525 unsigned int
7526 element_precision (const_tree type)
7527 {
7528 if (!TYPE_P (type))
7529 type = TREE_TYPE (type);
7530 enum tree_code code = TREE_CODE (type);
7531 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7532 type = TREE_TYPE (type);
7533
7534 return TYPE_PRECISION (type);
7535 }
7536
7537 /* Return true if CODE represents an associative tree code. Otherwise
7538 return false. */
7539 bool
7540 associative_tree_code (enum tree_code code)
7541 {
7542 switch (code)
7543 {
7544 case BIT_IOR_EXPR:
7545 case BIT_AND_EXPR:
7546 case BIT_XOR_EXPR:
7547 case PLUS_EXPR:
7548 case MULT_EXPR:
7549 case MIN_EXPR:
7550 case MAX_EXPR:
7551 return true;
7552
7553 default:
7554 break;
7555 }
7556 return false;
7557 }
7558
7559 /* Return true if CODE represents a commutative tree code. Otherwise
7560 return false. */
7561 bool
7562 commutative_tree_code (enum tree_code code)
7563 {
7564 switch (code)
7565 {
7566 case PLUS_EXPR:
7567 case MULT_EXPR:
7568 case MULT_HIGHPART_EXPR:
7569 case MIN_EXPR:
7570 case MAX_EXPR:
7571 case BIT_IOR_EXPR:
7572 case BIT_XOR_EXPR:
7573 case BIT_AND_EXPR:
7574 case NE_EXPR:
7575 case EQ_EXPR:
7576 case UNORDERED_EXPR:
7577 case ORDERED_EXPR:
7578 case UNEQ_EXPR:
7579 case LTGT_EXPR:
7580 case TRUTH_AND_EXPR:
7581 case TRUTH_XOR_EXPR:
7582 case TRUTH_OR_EXPR:
7583 case WIDEN_MULT_EXPR:
7584 case VEC_WIDEN_MULT_HI_EXPR:
7585 case VEC_WIDEN_MULT_LO_EXPR:
7586 case VEC_WIDEN_MULT_EVEN_EXPR:
7587 case VEC_WIDEN_MULT_ODD_EXPR:
7588 return true;
7589
7590 default:
7591 break;
7592 }
7593 return false;
7594 }
7595
7596 /* Return true if CODE represents a ternary tree code for which the
7597 first two operands are commutative. Otherwise return false. */
7598 bool
7599 commutative_ternary_tree_code (enum tree_code code)
7600 {
7601 switch (code)
7602 {
7603 case WIDEN_MULT_PLUS_EXPR:
7604 case WIDEN_MULT_MINUS_EXPR:
7605 case DOT_PROD_EXPR:
7606 case FMA_EXPR:
7607 return true;
7608
7609 default:
7610 break;
7611 }
7612 return false;
7613 }
7614
7615 /* Returns true if CODE can overflow. */
7616
7617 bool
7618 operation_can_overflow (enum tree_code code)
7619 {
7620 switch (code)
7621 {
7622 case PLUS_EXPR:
7623 case MINUS_EXPR:
7624 case MULT_EXPR:
7625 case LSHIFT_EXPR:
7626 /* Can overflow in various ways. */
7627 return true;
7628 case TRUNC_DIV_EXPR:
7629 case EXACT_DIV_EXPR:
7630 case FLOOR_DIV_EXPR:
7631 case CEIL_DIV_EXPR:
7632 /* For INT_MIN / -1. */
7633 return true;
7634 case NEGATE_EXPR:
7635 case ABS_EXPR:
7636 /* For -INT_MIN. */
7637 return true;
7638 default:
7639 /* These operators cannot overflow. */
7640 return false;
7641 }
7642 }
7643
7644 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7645 ftrapv doesn't generate trapping insns for CODE. */
7646
7647 bool
7648 operation_no_trapping_overflow (tree type, enum tree_code code)
7649 {
7650 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7651
7652 /* We don't generate instructions that trap on overflow for complex or vector
7653 types. */
7654 if (!INTEGRAL_TYPE_P (type))
7655 return true;
7656
7657 if (!TYPE_OVERFLOW_TRAPS (type))
7658 return true;
7659
7660 switch (code)
7661 {
7662 case PLUS_EXPR:
7663 case MINUS_EXPR:
7664 case MULT_EXPR:
7665 case NEGATE_EXPR:
7666 case ABS_EXPR:
7667 /* These operators can overflow, and -ftrapv generates trapping code for
7668 these. */
7669 return false;
7670 case TRUNC_DIV_EXPR:
7671 case EXACT_DIV_EXPR:
7672 case FLOOR_DIV_EXPR:
7673 case CEIL_DIV_EXPR:
7674 case LSHIFT_EXPR:
7675 /* These operators can overflow, but -ftrapv does not generate trapping
7676 code for these. */
7677 return true;
7678 default:
7679 /* These operators cannot overflow. */
7680 return true;
7681 }
7682 }
7683
7684 namespace inchash
7685 {
7686
7687 /* Generate a hash value for an expression. This can be used iteratively
7688 by passing a previous result as the HSTATE argument.
7689
7690 This function is intended to produce the same hash for expressions which
7691 would compare equal using operand_equal_p. */
7692 void
7693 add_expr (const_tree t, inchash::hash &hstate)
7694 {
7695 int i;
7696 enum tree_code code;
7697 enum tree_code_class tclass;
7698
7699 if (t == NULL_TREE)
7700 {
7701 hstate.merge_hash (0);
7702 return;
7703 }
7704
7705 code = TREE_CODE (t);
7706
7707 switch (code)
7708 {
7709 /* Alas, constants aren't shared, so we can't rely on pointer
7710 identity. */
7711 case VOID_CST:
7712 hstate.merge_hash (0);
7713 return;
7714 case INTEGER_CST:
7715 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7716 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7717 return;
7718 case REAL_CST:
7719 {
7720 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7721 hstate.merge_hash (val2);
7722 return;
7723 }
7724 case FIXED_CST:
7725 {
7726 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7727 hstate.merge_hash (val2);
7728 return;
7729 }
7730 case STRING_CST:
7731 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7732 return;
7733 case COMPLEX_CST:
7734 inchash::add_expr (TREE_REALPART (t), hstate);
7735 inchash::add_expr (TREE_IMAGPART (t), hstate);
7736 return;
7737 case VECTOR_CST:
7738 {
7739 unsigned i;
7740 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7741 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7742 return;
7743 }
7744 case SSA_NAME:
7745 /* We can just compare by pointer. */
7746 hstate.add_wide_int (SSA_NAME_VERSION (t));
7747 return;
7748 case PLACEHOLDER_EXPR:
7749 /* The node itself doesn't matter. */
7750 return;
7751 case TREE_LIST:
7752 /* A list of expressions, for a CALL_EXPR or as the elements of a
7753 VECTOR_CST. */
7754 for (; t; t = TREE_CHAIN (t))
7755 inchash::add_expr (TREE_VALUE (t), hstate);
7756 return;
7757 case CONSTRUCTOR:
7758 {
7759 unsigned HOST_WIDE_INT idx;
7760 tree field, value;
7761 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7762 {
7763 inchash::add_expr (field, hstate);
7764 inchash::add_expr (value, hstate);
7765 }
7766 return;
7767 }
7768 case FUNCTION_DECL:
7769 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7770 Otherwise nodes that compare equal according to operand_equal_p might
7771 get different hash codes. However, don't do this for machine specific
7772 or front end builtins, since the function code is overloaded in those
7773 cases. */
7774 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7775 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7776 {
7777 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7778 code = TREE_CODE (t);
7779 }
7780 /* FALL THROUGH */
7781 default:
7782 tclass = TREE_CODE_CLASS (code);
7783
7784 if (tclass == tcc_declaration)
7785 {
7786 /* DECL's have a unique ID */
7787 hstate.add_wide_int (DECL_UID (t));
7788 }
7789 else
7790 {
7791 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7792
7793 hstate.add_object (code);
7794
7795 /* Don't hash the type, that can lead to having nodes which
7796 compare equal according to operand_equal_p, but which
7797 have different hash codes. */
7798 if (CONVERT_EXPR_CODE_P (code)
7799 || code == NON_LVALUE_EXPR)
7800 {
7801 /* Make sure to include signness in the hash computation. */
7802 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7803 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7804 }
7805
7806 else if (commutative_tree_code (code))
7807 {
7808 /* It's a commutative expression. We want to hash it the same
7809 however it appears. We do this by first hashing both operands
7810 and then rehashing based on the order of their independent
7811 hashes. */
7812 inchash::hash one, two;
7813 inchash::add_expr (TREE_OPERAND (t, 0), one);
7814 inchash::add_expr (TREE_OPERAND (t, 1), two);
7815 hstate.add_commutative (one, two);
7816 }
7817 else
7818 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7819 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7820 }
7821 return;
7822 }
7823 }
7824
7825 }
7826
7827 /* Constructors for pointer, array and function types.
7828 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7829 constructed by language-dependent code, not here.) */
7830
7831 /* Construct, lay out and return the type of pointers to TO_TYPE with
7832 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7833 reference all of memory. If such a type has already been
7834 constructed, reuse it. */
7835
7836 tree
7837 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7838 bool can_alias_all)
7839 {
7840 tree t;
7841 bool could_alias = can_alias_all;
7842
7843 if (to_type == error_mark_node)
7844 return error_mark_node;
7845
7846 /* If the pointed-to type has the may_alias attribute set, force
7847 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7848 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7849 can_alias_all = true;
7850
7851 /* In some cases, languages will have things that aren't a POINTER_TYPE
7852 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7853 In that case, return that type without regard to the rest of our
7854 operands.
7855
7856 ??? This is a kludge, but consistent with the way this function has
7857 always operated and there doesn't seem to be a good way to avoid this
7858 at the moment. */
7859 if (TYPE_POINTER_TO (to_type) != 0
7860 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7861 return TYPE_POINTER_TO (to_type);
7862
7863 /* First, if we already have a type for pointers to TO_TYPE and it's
7864 the proper mode, use it. */
7865 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7866 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7867 return t;
7868
7869 t = make_node (POINTER_TYPE);
7870
7871 TREE_TYPE (t) = to_type;
7872 SET_TYPE_MODE (t, mode);
7873 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7874 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7875 TYPE_POINTER_TO (to_type) = t;
7876
7877 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7878 SET_TYPE_STRUCTURAL_EQUALITY (t);
7879 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7880 TYPE_CANONICAL (t)
7881 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7882 mode, false);
7883
7884 /* Lay out the type. This function has many callers that are concerned
7885 with expression-construction, and this simplifies them all. */
7886 layout_type (t);
7887
7888 return t;
7889 }
7890
7891 /* By default build pointers in ptr_mode. */
7892
7893 tree
7894 build_pointer_type (tree to_type)
7895 {
7896 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7897 : TYPE_ADDR_SPACE (to_type);
7898 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7899 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7900 }
7901
7902 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7903
7904 tree
7905 build_reference_type_for_mode (tree to_type, machine_mode mode,
7906 bool can_alias_all)
7907 {
7908 tree t;
7909 bool could_alias = can_alias_all;
7910
7911 if (to_type == error_mark_node)
7912 return error_mark_node;
7913
7914 /* If the pointed-to type has the may_alias attribute set, force
7915 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7916 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7917 can_alias_all = true;
7918
7919 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7920 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7921 In that case, return that type without regard to the rest of our
7922 operands.
7923
7924 ??? This is a kludge, but consistent with the way this function has
7925 always operated and there doesn't seem to be a good way to avoid this
7926 at the moment. */
7927 if (TYPE_REFERENCE_TO (to_type) != 0
7928 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7929 return TYPE_REFERENCE_TO (to_type);
7930
7931 /* First, if we already have a type for pointers to TO_TYPE and it's
7932 the proper mode, use it. */
7933 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7934 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7935 return t;
7936
7937 t = make_node (REFERENCE_TYPE);
7938
7939 TREE_TYPE (t) = to_type;
7940 SET_TYPE_MODE (t, mode);
7941 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7942 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7943 TYPE_REFERENCE_TO (to_type) = t;
7944
7945 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7946 SET_TYPE_STRUCTURAL_EQUALITY (t);
7947 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7948 TYPE_CANONICAL (t)
7949 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7950 mode, false);
7951
7952 layout_type (t);
7953
7954 return t;
7955 }
7956
7957
7958 /* Build the node for the type of references-to-TO_TYPE by default
7959 in ptr_mode. */
7960
7961 tree
7962 build_reference_type (tree to_type)
7963 {
7964 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7965 : TYPE_ADDR_SPACE (to_type);
7966 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7967 return build_reference_type_for_mode (to_type, pointer_mode, false);
7968 }
7969
7970 #define MAX_INT_CACHED_PREC \
7971 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7972 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7973
7974 /* Builds a signed or unsigned integer type of precision PRECISION.
7975 Used for C bitfields whose precision does not match that of
7976 built-in target types. */
7977 tree
7978 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7979 int unsignedp)
7980 {
7981 tree itype, ret;
7982
7983 if (unsignedp)
7984 unsignedp = MAX_INT_CACHED_PREC + 1;
7985
7986 if (precision <= MAX_INT_CACHED_PREC)
7987 {
7988 itype = nonstandard_integer_type_cache[precision + unsignedp];
7989 if (itype)
7990 return itype;
7991 }
7992
7993 itype = make_node (INTEGER_TYPE);
7994 TYPE_PRECISION (itype) = precision;
7995
7996 if (unsignedp)
7997 fixup_unsigned_type (itype);
7998 else
7999 fixup_signed_type (itype);
8000
8001 ret = itype;
8002 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
8003 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
8004 if (precision <= MAX_INT_CACHED_PREC)
8005 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8006
8007 return ret;
8008 }
8009
8010 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8011 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8012 is true, reuse such a type that has already been constructed. */
8013
8014 static tree
8015 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8016 {
8017 tree itype = make_node (INTEGER_TYPE);
8018 inchash::hash hstate;
8019
8020 TREE_TYPE (itype) = type;
8021
8022 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8023 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8024
8025 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8026 SET_TYPE_MODE (itype, TYPE_MODE (type));
8027 TYPE_SIZE (itype) = TYPE_SIZE (type);
8028 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8029 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
8030 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8031
8032 if (!shared)
8033 return itype;
8034
8035 if ((TYPE_MIN_VALUE (itype)
8036 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8037 || (TYPE_MAX_VALUE (itype)
8038 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8039 {
8040 /* Since we cannot reliably merge this type, we need to compare it using
8041 structural equality checks. */
8042 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8043 return itype;
8044 }
8045
8046 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
8047 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8048 hstate.merge_hash (TYPE_HASH (type));
8049 itype = type_hash_canon (hstate.end (), itype);
8050
8051 return itype;
8052 }
8053
8054 /* Wrapper around build_range_type_1 with SHARED set to true. */
8055
8056 tree
8057 build_range_type (tree type, tree lowval, tree highval)
8058 {
8059 return build_range_type_1 (type, lowval, highval, true);
8060 }
8061
8062 /* Wrapper around build_range_type_1 with SHARED set to false. */
8063
8064 tree
8065 build_nonshared_range_type (tree type, tree lowval, tree highval)
8066 {
8067 return build_range_type_1 (type, lowval, highval, false);
8068 }
8069
8070 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8071 MAXVAL should be the maximum value in the domain
8072 (one less than the length of the array).
8073
8074 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8075 We don't enforce this limit, that is up to caller (e.g. language front end).
8076 The limit exists because the result is a signed type and we don't handle
8077 sizes that use more than one HOST_WIDE_INT. */
8078
8079 tree
8080 build_index_type (tree maxval)
8081 {
8082 return build_range_type (sizetype, size_zero_node, maxval);
8083 }
8084
8085 /* Return true if the debug information for TYPE, a subtype, should be emitted
8086 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8087 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8088 debug info and doesn't reflect the source code. */
8089
8090 bool
8091 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8092 {
8093 tree base_type = TREE_TYPE (type), low, high;
8094
8095 /* Subrange types have a base type which is an integral type. */
8096 if (!INTEGRAL_TYPE_P (base_type))
8097 return false;
8098
8099 /* Get the real bounds of the subtype. */
8100 if (lang_hooks.types.get_subrange_bounds)
8101 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8102 else
8103 {
8104 low = TYPE_MIN_VALUE (type);
8105 high = TYPE_MAX_VALUE (type);
8106 }
8107
8108 /* If the type and its base type have the same representation and the same
8109 name, then the type is not a subrange but a copy of the base type. */
8110 if ((TREE_CODE (base_type) == INTEGER_TYPE
8111 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8112 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8113 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8114 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8115 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8116 return false;
8117
8118 if (lowval)
8119 *lowval = low;
8120 if (highval)
8121 *highval = high;
8122 return true;
8123 }
8124
8125 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8126 and number of elements specified by the range of values of INDEX_TYPE.
8127 If SHARED is true, reuse such a type that has already been constructed. */
8128
8129 static tree
8130 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8131 {
8132 tree t;
8133
8134 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8135 {
8136 error ("arrays of functions are not meaningful");
8137 elt_type = integer_type_node;
8138 }
8139
8140 t = make_node (ARRAY_TYPE);
8141 TREE_TYPE (t) = elt_type;
8142 TYPE_DOMAIN (t) = index_type;
8143 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8144 layout_type (t);
8145
8146 /* If the element type is incomplete at this point we get marked for
8147 structural equality. Do not record these types in the canonical
8148 type hashtable. */
8149 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8150 return t;
8151
8152 if (shared)
8153 {
8154 inchash::hash hstate;
8155 hstate.add_object (TYPE_HASH (elt_type));
8156 if (index_type)
8157 hstate.add_object (TYPE_HASH (index_type));
8158 t = type_hash_canon (hstate.end (), t);
8159 }
8160
8161 if (TYPE_CANONICAL (t) == t)
8162 {
8163 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8164 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
8165 SET_TYPE_STRUCTURAL_EQUALITY (t);
8166 else if (TYPE_CANONICAL (elt_type) != elt_type
8167 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8168 TYPE_CANONICAL (t)
8169 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8170 index_type
8171 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8172 shared);
8173 }
8174
8175 return t;
8176 }
8177
8178 /* Wrapper around build_array_type_1 with SHARED set to true. */
8179
8180 tree
8181 build_array_type (tree elt_type, tree index_type)
8182 {
8183 return build_array_type_1 (elt_type, index_type, true);
8184 }
8185
8186 /* Wrapper around build_array_type_1 with SHARED set to false. */
8187
8188 tree
8189 build_nonshared_array_type (tree elt_type, tree index_type)
8190 {
8191 return build_array_type_1 (elt_type, index_type, false);
8192 }
8193
8194 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8195 sizetype. */
8196
8197 tree
8198 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8199 {
8200 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8201 }
8202
8203 /* Recursively examines the array elements of TYPE, until a non-array
8204 element type is found. */
8205
8206 tree
8207 strip_array_types (tree type)
8208 {
8209 while (TREE_CODE (type) == ARRAY_TYPE)
8210 type = TREE_TYPE (type);
8211
8212 return type;
8213 }
8214
8215 /* Computes the canonical argument types from the argument type list
8216 ARGTYPES.
8217
8218 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8219 on entry to this function, or if any of the ARGTYPES are
8220 structural.
8221
8222 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8223 true on entry to this function, or if any of the ARGTYPES are
8224 non-canonical.
8225
8226 Returns a canonical argument list, which may be ARGTYPES when the
8227 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8228 true) or would not differ from ARGTYPES. */
8229
8230 static tree
8231 maybe_canonicalize_argtypes (tree argtypes,
8232 bool *any_structural_p,
8233 bool *any_noncanonical_p)
8234 {
8235 tree arg;
8236 bool any_noncanonical_argtypes_p = false;
8237
8238 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8239 {
8240 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8241 /* Fail gracefully by stating that the type is structural. */
8242 *any_structural_p = true;
8243 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8244 *any_structural_p = true;
8245 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8246 || TREE_PURPOSE (arg))
8247 /* If the argument has a default argument, we consider it
8248 non-canonical even though the type itself is canonical.
8249 That way, different variants of function and method types
8250 with default arguments will all point to the variant with
8251 no defaults as their canonical type. */
8252 any_noncanonical_argtypes_p = true;
8253 }
8254
8255 if (*any_structural_p)
8256 return argtypes;
8257
8258 if (any_noncanonical_argtypes_p)
8259 {
8260 /* Build the canonical list of argument types. */
8261 tree canon_argtypes = NULL_TREE;
8262 bool is_void = false;
8263
8264 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8265 {
8266 if (arg == void_list_node)
8267 is_void = true;
8268 else
8269 canon_argtypes = tree_cons (NULL_TREE,
8270 TYPE_CANONICAL (TREE_VALUE (arg)),
8271 canon_argtypes);
8272 }
8273
8274 canon_argtypes = nreverse (canon_argtypes);
8275 if (is_void)
8276 canon_argtypes = chainon (canon_argtypes, void_list_node);
8277
8278 /* There is a non-canonical type. */
8279 *any_noncanonical_p = true;
8280 return canon_argtypes;
8281 }
8282
8283 /* The canonical argument types are the same as ARGTYPES. */
8284 return argtypes;
8285 }
8286
8287 /* Construct, lay out and return
8288 the type of functions returning type VALUE_TYPE
8289 given arguments of types ARG_TYPES.
8290 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8291 are data type nodes for the arguments of the function.
8292 If such a type has already been constructed, reuse it. */
8293
8294 tree
8295 build_function_type (tree value_type, tree arg_types)
8296 {
8297 tree t;
8298 inchash::hash hstate;
8299 bool any_structural_p, any_noncanonical_p;
8300 tree canon_argtypes;
8301
8302 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8303 {
8304 error ("function return type cannot be function");
8305 value_type = integer_type_node;
8306 }
8307
8308 /* Make a node of the sort we want. */
8309 t = make_node (FUNCTION_TYPE);
8310 TREE_TYPE (t) = value_type;
8311 TYPE_ARG_TYPES (t) = arg_types;
8312
8313 /* If we already have such a type, use the old one. */
8314 hstate.add_object (TYPE_HASH (value_type));
8315 type_hash_list (arg_types, hstate);
8316 t = type_hash_canon (hstate.end (), t);
8317
8318 /* Set up the canonical type. */
8319 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8320 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8321 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8322 &any_structural_p,
8323 &any_noncanonical_p);
8324 if (any_structural_p)
8325 SET_TYPE_STRUCTURAL_EQUALITY (t);
8326 else if (any_noncanonical_p)
8327 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8328 canon_argtypes);
8329
8330 if (!COMPLETE_TYPE_P (t))
8331 layout_type (t);
8332 return t;
8333 }
8334
8335 /* Build a function type. The RETURN_TYPE is the type returned by the
8336 function. If VAARGS is set, no void_type_node is appended to the
8337 the list. ARGP must be always be terminated be a NULL_TREE. */
8338
8339 static tree
8340 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8341 {
8342 tree t, args, last;
8343
8344 t = va_arg (argp, tree);
8345 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8346 args = tree_cons (NULL_TREE, t, args);
8347
8348 if (vaargs)
8349 {
8350 last = args;
8351 if (args != NULL_TREE)
8352 args = nreverse (args);
8353 gcc_assert (last != void_list_node);
8354 }
8355 else if (args == NULL_TREE)
8356 args = void_list_node;
8357 else
8358 {
8359 last = args;
8360 args = nreverse (args);
8361 TREE_CHAIN (last) = void_list_node;
8362 }
8363 args = build_function_type (return_type, args);
8364
8365 return args;
8366 }
8367
8368 /* Build a function type. The RETURN_TYPE is the type returned by the
8369 function. If additional arguments are provided, they are
8370 additional argument types. The list of argument types must always
8371 be terminated by NULL_TREE. */
8372
8373 tree
8374 build_function_type_list (tree return_type, ...)
8375 {
8376 tree args;
8377 va_list p;
8378
8379 va_start (p, return_type);
8380 args = build_function_type_list_1 (false, return_type, p);
8381 va_end (p);
8382 return args;
8383 }
8384
8385 /* Build a variable argument function type. The RETURN_TYPE is the
8386 type returned by the function. If additional arguments are provided,
8387 they are additional argument types. The list of argument types must
8388 always be terminated by NULL_TREE. */
8389
8390 tree
8391 build_varargs_function_type_list (tree return_type, ...)
8392 {
8393 tree args;
8394 va_list p;
8395
8396 va_start (p, return_type);
8397 args = build_function_type_list_1 (true, return_type, p);
8398 va_end (p);
8399
8400 return args;
8401 }
8402
8403 /* Build a function type. RETURN_TYPE is the type returned by the
8404 function; VAARGS indicates whether the function takes varargs. The
8405 function takes N named arguments, the types of which are provided in
8406 ARG_TYPES. */
8407
8408 static tree
8409 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8410 tree *arg_types)
8411 {
8412 int i;
8413 tree t = vaargs ? NULL_TREE : void_list_node;
8414
8415 for (i = n - 1; i >= 0; i--)
8416 t = tree_cons (NULL_TREE, arg_types[i], t);
8417
8418 return build_function_type (return_type, t);
8419 }
8420
8421 /* Build a function type. RETURN_TYPE is the type returned by the
8422 function. The function takes N named arguments, the types of which
8423 are provided in ARG_TYPES. */
8424
8425 tree
8426 build_function_type_array (tree return_type, int n, tree *arg_types)
8427 {
8428 return build_function_type_array_1 (false, return_type, n, arg_types);
8429 }
8430
8431 /* Build a variable argument function type. RETURN_TYPE is the type
8432 returned by the function. The function takes N named arguments, the
8433 types of which are provided in ARG_TYPES. */
8434
8435 tree
8436 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8437 {
8438 return build_function_type_array_1 (true, return_type, n, arg_types);
8439 }
8440
8441 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8442 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8443 for the method. An implicit additional parameter (of type
8444 pointer-to-BASETYPE) is added to the ARGTYPES. */
8445
8446 tree
8447 build_method_type_directly (tree basetype,
8448 tree rettype,
8449 tree argtypes)
8450 {
8451 tree t;
8452 tree ptype;
8453 inchash::hash hstate;
8454 bool any_structural_p, any_noncanonical_p;
8455 tree canon_argtypes;
8456
8457 /* Make a node of the sort we want. */
8458 t = make_node (METHOD_TYPE);
8459
8460 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8461 TREE_TYPE (t) = rettype;
8462 ptype = build_pointer_type (basetype);
8463
8464 /* The actual arglist for this function includes a "hidden" argument
8465 which is "this". Put it into the list of argument types. */
8466 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8467 TYPE_ARG_TYPES (t) = argtypes;
8468
8469 /* If we already have such a type, use the old one. */
8470 hstate.add_object (TYPE_HASH (basetype));
8471 hstate.add_object (TYPE_HASH (rettype));
8472 type_hash_list (argtypes, hstate);
8473 t = type_hash_canon (hstate.end (), t);
8474
8475 /* Set up the canonical type. */
8476 any_structural_p
8477 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8478 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8479 any_noncanonical_p
8480 = (TYPE_CANONICAL (basetype) != basetype
8481 || TYPE_CANONICAL (rettype) != rettype);
8482 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8483 &any_structural_p,
8484 &any_noncanonical_p);
8485 if (any_structural_p)
8486 SET_TYPE_STRUCTURAL_EQUALITY (t);
8487 else if (any_noncanonical_p)
8488 TYPE_CANONICAL (t)
8489 = build_method_type_directly (TYPE_CANONICAL (basetype),
8490 TYPE_CANONICAL (rettype),
8491 canon_argtypes);
8492 if (!COMPLETE_TYPE_P (t))
8493 layout_type (t);
8494
8495 return t;
8496 }
8497
8498 /* Construct, lay out and return the type of methods belonging to class
8499 BASETYPE and whose arguments and values are described by TYPE.
8500 If that type exists already, reuse it.
8501 TYPE must be a FUNCTION_TYPE node. */
8502
8503 tree
8504 build_method_type (tree basetype, tree type)
8505 {
8506 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8507
8508 return build_method_type_directly (basetype,
8509 TREE_TYPE (type),
8510 TYPE_ARG_TYPES (type));
8511 }
8512
8513 /* Construct, lay out and return the type of offsets to a value
8514 of type TYPE, within an object of type BASETYPE.
8515 If a suitable offset type exists already, reuse it. */
8516
8517 tree
8518 build_offset_type (tree basetype, tree type)
8519 {
8520 tree t;
8521 inchash::hash hstate;
8522
8523 /* Make a node of the sort we want. */
8524 t = make_node (OFFSET_TYPE);
8525
8526 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8527 TREE_TYPE (t) = type;
8528
8529 /* If we already have such a type, use the old one. */
8530 hstate.add_object (TYPE_HASH (basetype));
8531 hstate.add_object (TYPE_HASH (type));
8532 t = type_hash_canon (hstate.end (), t);
8533
8534 if (!COMPLETE_TYPE_P (t))
8535 layout_type (t);
8536
8537 if (TYPE_CANONICAL (t) == t)
8538 {
8539 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8540 || TYPE_STRUCTURAL_EQUALITY_P (type))
8541 SET_TYPE_STRUCTURAL_EQUALITY (t);
8542 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8543 || TYPE_CANONICAL (type) != type)
8544 TYPE_CANONICAL (t)
8545 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8546 TYPE_CANONICAL (type));
8547 }
8548
8549 return t;
8550 }
8551
8552 /* Create a complex type whose components are COMPONENT_TYPE. */
8553
8554 tree
8555 build_complex_type (tree component_type)
8556 {
8557 tree t;
8558 inchash::hash hstate;
8559
8560 gcc_assert (INTEGRAL_TYPE_P (component_type)
8561 || SCALAR_FLOAT_TYPE_P (component_type)
8562 || FIXED_POINT_TYPE_P (component_type));
8563
8564 /* Make a node of the sort we want. */
8565 t = make_node (COMPLEX_TYPE);
8566
8567 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8568
8569 /* If we already have such a type, use the old one. */
8570 hstate.add_object (TYPE_HASH (component_type));
8571 t = type_hash_canon (hstate.end (), t);
8572
8573 if (!COMPLETE_TYPE_P (t))
8574 layout_type (t);
8575
8576 if (TYPE_CANONICAL (t) == t)
8577 {
8578 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8579 SET_TYPE_STRUCTURAL_EQUALITY (t);
8580 else if (TYPE_CANONICAL (component_type) != component_type)
8581 TYPE_CANONICAL (t)
8582 = build_complex_type (TYPE_CANONICAL (component_type));
8583 }
8584
8585 /* We need to create a name, since complex is a fundamental type. */
8586 if (! TYPE_NAME (t))
8587 {
8588 const char *name;
8589 if (component_type == char_type_node)
8590 name = "complex char";
8591 else if (component_type == signed_char_type_node)
8592 name = "complex signed char";
8593 else if (component_type == unsigned_char_type_node)
8594 name = "complex unsigned char";
8595 else if (component_type == short_integer_type_node)
8596 name = "complex short int";
8597 else if (component_type == short_unsigned_type_node)
8598 name = "complex short unsigned int";
8599 else if (component_type == integer_type_node)
8600 name = "complex int";
8601 else if (component_type == unsigned_type_node)
8602 name = "complex unsigned int";
8603 else if (component_type == long_integer_type_node)
8604 name = "complex long int";
8605 else if (component_type == long_unsigned_type_node)
8606 name = "complex long unsigned int";
8607 else if (component_type == long_long_integer_type_node)
8608 name = "complex long long int";
8609 else if (component_type == long_long_unsigned_type_node)
8610 name = "complex long long unsigned int";
8611 else
8612 name = 0;
8613
8614 if (name != 0)
8615 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8616 get_identifier (name), t);
8617 }
8618
8619 return build_qualified_type (t, TYPE_QUALS (component_type));
8620 }
8621
8622 /* If TYPE is a real or complex floating-point type and the target
8623 does not directly support arithmetic on TYPE then return the wider
8624 type to be used for arithmetic on TYPE. Otherwise, return
8625 NULL_TREE. */
8626
8627 tree
8628 excess_precision_type (tree type)
8629 {
8630 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8631 {
8632 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8633 switch (TREE_CODE (type))
8634 {
8635 case REAL_TYPE:
8636 switch (flt_eval_method)
8637 {
8638 case 1:
8639 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8640 return double_type_node;
8641 break;
8642 case 2:
8643 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8644 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8645 return long_double_type_node;
8646 break;
8647 default:
8648 gcc_unreachable ();
8649 }
8650 break;
8651 case COMPLEX_TYPE:
8652 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8653 return NULL_TREE;
8654 switch (flt_eval_method)
8655 {
8656 case 1:
8657 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8658 return complex_double_type_node;
8659 break;
8660 case 2:
8661 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8662 || (TYPE_MODE (TREE_TYPE (type))
8663 == TYPE_MODE (double_type_node)))
8664 return complex_long_double_type_node;
8665 break;
8666 default:
8667 gcc_unreachable ();
8668 }
8669 break;
8670 default:
8671 break;
8672 }
8673 }
8674 return NULL_TREE;
8675 }
8676 \f
8677 /* Return OP, stripped of any conversions to wider types as much as is safe.
8678 Converting the value back to OP's type makes a value equivalent to OP.
8679
8680 If FOR_TYPE is nonzero, we return a value which, if converted to
8681 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8682
8683 OP must have integer, real or enumeral type. Pointers are not allowed!
8684
8685 There are some cases where the obvious value we could return
8686 would regenerate to OP if converted to OP's type,
8687 but would not extend like OP to wider types.
8688 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8689 For example, if OP is (unsigned short)(signed char)-1,
8690 we avoid returning (signed char)-1 if FOR_TYPE is int,
8691 even though extending that to an unsigned short would regenerate OP,
8692 since the result of extending (signed char)-1 to (int)
8693 is different from (int) OP. */
8694
8695 tree
8696 get_unwidened (tree op, tree for_type)
8697 {
8698 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8699 tree type = TREE_TYPE (op);
8700 unsigned final_prec
8701 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8702 int uns
8703 = (for_type != 0 && for_type != type
8704 && final_prec > TYPE_PRECISION (type)
8705 && TYPE_UNSIGNED (type));
8706 tree win = op;
8707
8708 while (CONVERT_EXPR_P (op))
8709 {
8710 int bitschange;
8711
8712 /* TYPE_PRECISION on vector types has different meaning
8713 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8714 so avoid them here. */
8715 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8716 break;
8717
8718 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8719 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8720
8721 /* Truncations are many-one so cannot be removed.
8722 Unless we are later going to truncate down even farther. */
8723 if (bitschange < 0
8724 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8725 break;
8726
8727 /* See what's inside this conversion. If we decide to strip it,
8728 we will set WIN. */
8729 op = TREE_OPERAND (op, 0);
8730
8731 /* If we have not stripped any zero-extensions (uns is 0),
8732 we can strip any kind of extension.
8733 If we have previously stripped a zero-extension,
8734 only zero-extensions can safely be stripped.
8735 Any extension can be stripped if the bits it would produce
8736 are all going to be discarded later by truncating to FOR_TYPE. */
8737
8738 if (bitschange > 0)
8739 {
8740 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8741 win = op;
8742 /* TYPE_UNSIGNED says whether this is a zero-extension.
8743 Let's avoid computing it if it does not affect WIN
8744 and if UNS will not be needed again. */
8745 if ((uns
8746 || CONVERT_EXPR_P (op))
8747 && TYPE_UNSIGNED (TREE_TYPE (op)))
8748 {
8749 uns = 1;
8750 win = op;
8751 }
8752 }
8753 }
8754
8755 /* If we finally reach a constant see if it fits in for_type and
8756 in that case convert it. */
8757 if (for_type
8758 && TREE_CODE (win) == INTEGER_CST
8759 && TREE_TYPE (win) != for_type
8760 && int_fits_type_p (win, for_type))
8761 win = fold_convert (for_type, win);
8762
8763 return win;
8764 }
8765 \f
8766 /* Return OP or a simpler expression for a narrower value
8767 which can be sign-extended or zero-extended to give back OP.
8768 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8769 or 0 if the value should be sign-extended. */
8770
8771 tree
8772 get_narrower (tree op, int *unsignedp_ptr)
8773 {
8774 int uns = 0;
8775 int first = 1;
8776 tree win = op;
8777 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8778
8779 while (TREE_CODE (op) == NOP_EXPR)
8780 {
8781 int bitschange
8782 = (TYPE_PRECISION (TREE_TYPE (op))
8783 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8784
8785 /* Truncations are many-one so cannot be removed. */
8786 if (bitschange < 0)
8787 break;
8788
8789 /* See what's inside this conversion. If we decide to strip it,
8790 we will set WIN. */
8791
8792 if (bitschange > 0)
8793 {
8794 op = TREE_OPERAND (op, 0);
8795 /* An extension: the outermost one can be stripped,
8796 but remember whether it is zero or sign extension. */
8797 if (first)
8798 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8799 /* Otherwise, if a sign extension has been stripped,
8800 only sign extensions can now be stripped;
8801 if a zero extension has been stripped, only zero-extensions. */
8802 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8803 break;
8804 first = 0;
8805 }
8806 else /* bitschange == 0 */
8807 {
8808 /* A change in nominal type can always be stripped, but we must
8809 preserve the unsignedness. */
8810 if (first)
8811 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8812 first = 0;
8813 op = TREE_OPERAND (op, 0);
8814 /* Keep trying to narrow, but don't assign op to win if it
8815 would turn an integral type into something else. */
8816 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8817 continue;
8818 }
8819
8820 win = op;
8821 }
8822
8823 if (TREE_CODE (op) == COMPONENT_REF
8824 /* Since type_for_size always gives an integer type. */
8825 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8826 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8827 /* Ensure field is laid out already. */
8828 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8829 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8830 {
8831 unsigned HOST_WIDE_INT innerprec
8832 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8833 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8834 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8835 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8836
8837 /* We can get this structure field in a narrower type that fits it,
8838 but the resulting extension to its nominal type (a fullword type)
8839 must satisfy the same conditions as for other extensions.
8840
8841 Do this only for fields that are aligned (not bit-fields),
8842 because when bit-field insns will be used there is no
8843 advantage in doing this. */
8844
8845 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8846 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8847 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8848 && type != 0)
8849 {
8850 if (first)
8851 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8852 win = fold_convert (type, op);
8853 }
8854 }
8855
8856 *unsignedp_ptr = uns;
8857 return win;
8858 }
8859 \f
8860 /* Returns true if integer constant C has a value that is permissible
8861 for type TYPE (an INTEGER_TYPE). */
8862
8863 bool
8864 int_fits_type_p (const_tree c, const_tree type)
8865 {
8866 tree type_low_bound, type_high_bound;
8867 bool ok_for_low_bound, ok_for_high_bound;
8868 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8869
8870 retry:
8871 type_low_bound = TYPE_MIN_VALUE (type);
8872 type_high_bound = TYPE_MAX_VALUE (type);
8873
8874 /* If at least one bound of the type is a constant integer, we can check
8875 ourselves and maybe make a decision. If no such decision is possible, but
8876 this type is a subtype, try checking against that. Otherwise, use
8877 fits_to_tree_p, which checks against the precision.
8878
8879 Compute the status for each possibly constant bound, and return if we see
8880 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8881 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8882 for "constant known to fit". */
8883
8884 /* Check if c >= type_low_bound. */
8885 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8886 {
8887 if (tree_int_cst_lt (c, type_low_bound))
8888 return false;
8889 ok_for_low_bound = true;
8890 }
8891 else
8892 ok_for_low_bound = false;
8893
8894 /* Check if c <= type_high_bound. */
8895 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8896 {
8897 if (tree_int_cst_lt (type_high_bound, c))
8898 return false;
8899 ok_for_high_bound = true;
8900 }
8901 else
8902 ok_for_high_bound = false;
8903
8904 /* If the constant fits both bounds, the result is known. */
8905 if (ok_for_low_bound && ok_for_high_bound)
8906 return true;
8907
8908 /* Perform some generic filtering which may allow making a decision
8909 even if the bounds are not constant. First, negative integers
8910 never fit in unsigned types, */
8911 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8912 return false;
8913
8914 /* Second, narrower types always fit in wider ones. */
8915 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8916 return true;
8917
8918 /* Third, unsigned integers with top bit set never fit signed types. */
8919 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8920 {
8921 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8922 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8923 {
8924 /* When a tree_cst is converted to a wide-int, the precision
8925 is taken from the type. However, if the precision of the
8926 mode underneath the type is smaller than that, it is
8927 possible that the value will not fit. The test below
8928 fails if any bit is set between the sign bit of the
8929 underlying mode and the top bit of the type. */
8930 if (wi::ne_p (wi::zext (c, prec - 1), c))
8931 return false;
8932 }
8933 else if (wi::neg_p (c))
8934 return false;
8935 }
8936
8937 /* If we haven't been able to decide at this point, there nothing more we
8938 can check ourselves here. Look at the base type if we have one and it
8939 has the same precision. */
8940 if (TREE_CODE (type) == INTEGER_TYPE
8941 && TREE_TYPE (type) != 0
8942 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8943 {
8944 type = TREE_TYPE (type);
8945 goto retry;
8946 }
8947
8948 /* Or to fits_to_tree_p, if nothing else. */
8949 return wi::fits_to_tree_p (c, type);
8950 }
8951
8952 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8953 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8954 represented (assuming two's-complement arithmetic) within the bit
8955 precision of the type are returned instead. */
8956
8957 void
8958 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8959 {
8960 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8961 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8962 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8963 else
8964 {
8965 if (TYPE_UNSIGNED (type))
8966 mpz_set_ui (min, 0);
8967 else
8968 {
8969 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8970 wi::to_mpz (mn, min, SIGNED);
8971 }
8972 }
8973
8974 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8975 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8976 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8977 else
8978 {
8979 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8980 wi::to_mpz (mn, max, TYPE_SIGN (type));
8981 }
8982 }
8983
8984 /* Return true if VAR is an automatic variable defined in function FN. */
8985
8986 bool
8987 auto_var_in_fn_p (const_tree var, const_tree fn)
8988 {
8989 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8990 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8991 || TREE_CODE (var) == PARM_DECL)
8992 && ! TREE_STATIC (var))
8993 || TREE_CODE (var) == LABEL_DECL
8994 || TREE_CODE (var) == RESULT_DECL));
8995 }
8996
8997 /* Subprogram of following function. Called by walk_tree.
8998
8999 Return *TP if it is an automatic variable or parameter of the
9000 function passed in as DATA. */
9001
9002 static tree
9003 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9004 {
9005 tree fn = (tree) data;
9006
9007 if (TYPE_P (*tp))
9008 *walk_subtrees = 0;
9009
9010 else if (DECL_P (*tp)
9011 && auto_var_in_fn_p (*tp, fn))
9012 return *tp;
9013
9014 return NULL_TREE;
9015 }
9016
9017 /* Returns true if T is, contains, or refers to a type with variable
9018 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9019 arguments, but not the return type. If FN is nonzero, only return
9020 true if a modifier of the type or position of FN is a variable or
9021 parameter inside FN.
9022
9023 This concept is more general than that of C99 'variably modified types':
9024 in C99, a struct type is never variably modified because a VLA may not
9025 appear as a structure member. However, in GNU C code like:
9026
9027 struct S { int i[f()]; };
9028
9029 is valid, and other languages may define similar constructs. */
9030
9031 bool
9032 variably_modified_type_p (tree type, tree fn)
9033 {
9034 tree t;
9035
9036 /* Test if T is either variable (if FN is zero) or an expression containing
9037 a variable in FN. If TYPE isn't gimplified, return true also if
9038 gimplify_one_sizepos would gimplify the expression into a local
9039 variable. */
9040 #define RETURN_TRUE_IF_VAR(T) \
9041 do { tree _t = (T); \
9042 if (_t != NULL_TREE \
9043 && _t != error_mark_node \
9044 && TREE_CODE (_t) != INTEGER_CST \
9045 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9046 && (!fn \
9047 || (!TYPE_SIZES_GIMPLIFIED (type) \
9048 && !is_gimple_sizepos (_t)) \
9049 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9050 return true; } while (0)
9051
9052 if (type == error_mark_node)
9053 return false;
9054
9055 /* If TYPE itself has variable size, it is variably modified. */
9056 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9057 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9058
9059 switch (TREE_CODE (type))
9060 {
9061 case POINTER_TYPE:
9062 case REFERENCE_TYPE:
9063 case VECTOR_TYPE:
9064 if (variably_modified_type_p (TREE_TYPE (type), fn))
9065 return true;
9066 break;
9067
9068 case FUNCTION_TYPE:
9069 case METHOD_TYPE:
9070 /* If TYPE is a function type, it is variably modified if the
9071 return type is variably modified. */
9072 if (variably_modified_type_p (TREE_TYPE (type), fn))
9073 return true;
9074 break;
9075
9076 case INTEGER_TYPE:
9077 case REAL_TYPE:
9078 case FIXED_POINT_TYPE:
9079 case ENUMERAL_TYPE:
9080 case BOOLEAN_TYPE:
9081 /* Scalar types are variably modified if their end points
9082 aren't constant. */
9083 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9084 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9085 break;
9086
9087 case RECORD_TYPE:
9088 case UNION_TYPE:
9089 case QUAL_UNION_TYPE:
9090 /* We can't see if any of the fields are variably-modified by the
9091 definition we normally use, since that would produce infinite
9092 recursion via pointers. */
9093 /* This is variably modified if some field's type is. */
9094 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9095 if (TREE_CODE (t) == FIELD_DECL)
9096 {
9097 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9098 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9099 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9100
9101 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9102 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9103 }
9104 break;
9105
9106 case ARRAY_TYPE:
9107 /* Do not call ourselves to avoid infinite recursion. This is
9108 variably modified if the element type is. */
9109 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9110 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9111 break;
9112
9113 default:
9114 break;
9115 }
9116
9117 /* The current language may have other cases to check, but in general,
9118 all other types are not variably modified. */
9119 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9120
9121 #undef RETURN_TRUE_IF_VAR
9122 }
9123
9124 /* Given a DECL or TYPE, return the scope in which it was declared, or
9125 NULL_TREE if there is no containing scope. */
9126
9127 tree
9128 get_containing_scope (const_tree t)
9129 {
9130 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9131 }
9132
9133 /* Return the innermost context enclosing DECL that is
9134 a FUNCTION_DECL, or zero if none. */
9135
9136 tree
9137 decl_function_context (const_tree decl)
9138 {
9139 tree context;
9140
9141 if (TREE_CODE (decl) == ERROR_MARK)
9142 return 0;
9143
9144 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9145 where we look up the function at runtime. Such functions always take
9146 a first argument of type 'pointer to real context'.
9147
9148 C++ should really be fixed to use DECL_CONTEXT for the real context,
9149 and use something else for the "virtual context". */
9150 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9151 context
9152 = TYPE_MAIN_VARIANT
9153 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9154 else
9155 context = DECL_CONTEXT (decl);
9156
9157 while (context && TREE_CODE (context) != FUNCTION_DECL)
9158 {
9159 if (TREE_CODE (context) == BLOCK)
9160 context = BLOCK_SUPERCONTEXT (context);
9161 else
9162 context = get_containing_scope (context);
9163 }
9164
9165 return context;
9166 }
9167
9168 /* Return the innermost context enclosing DECL that is
9169 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9170 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9171
9172 tree
9173 decl_type_context (const_tree decl)
9174 {
9175 tree context = DECL_CONTEXT (decl);
9176
9177 while (context)
9178 switch (TREE_CODE (context))
9179 {
9180 case NAMESPACE_DECL:
9181 case TRANSLATION_UNIT_DECL:
9182 return NULL_TREE;
9183
9184 case RECORD_TYPE:
9185 case UNION_TYPE:
9186 case QUAL_UNION_TYPE:
9187 return context;
9188
9189 case TYPE_DECL:
9190 case FUNCTION_DECL:
9191 context = DECL_CONTEXT (context);
9192 break;
9193
9194 case BLOCK:
9195 context = BLOCK_SUPERCONTEXT (context);
9196 break;
9197
9198 default:
9199 gcc_unreachable ();
9200 }
9201
9202 return NULL_TREE;
9203 }
9204
9205 /* CALL is a CALL_EXPR. Return the declaration for the function
9206 called, or NULL_TREE if the called function cannot be
9207 determined. */
9208
9209 tree
9210 get_callee_fndecl (const_tree call)
9211 {
9212 tree addr;
9213
9214 if (call == error_mark_node)
9215 return error_mark_node;
9216
9217 /* It's invalid to call this function with anything but a
9218 CALL_EXPR. */
9219 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9220
9221 /* The first operand to the CALL is the address of the function
9222 called. */
9223 addr = CALL_EXPR_FN (call);
9224
9225 /* If there is no function, return early. */
9226 if (addr == NULL_TREE)
9227 return NULL_TREE;
9228
9229 STRIP_NOPS (addr);
9230
9231 /* If this is a readonly function pointer, extract its initial value. */
9232 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9233 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9234 && DECL_INITIAL (addr))
9235 addr = DECL_INITIAL (addr);
9236
9237 /* If the address is just `&f' for some function `f', then we know
9238 that `f' is being called. */
9239 if (TREE_CODE (addr) == ADDR_EXPR
9240 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9241 return TREE_OPERAND (addr, 0);
9242
9243 /* We couldn't figure out what was being called. */
9244 return NULL_TREE;
9245 }
9246
9247 #define TREE_MEM_USAGE_SPACES 40
9248
9249 /* Print debugging information about tree nodes generated during the compile,
9250 and any language-specific information. */
9251
9252 void
9253 dump_tree_statistics (void)
9254 {
9255 if (GATHER_STATISTICS)
9256 {
9257 int i;
9258 int total_nodes, total_bytes;
9259 fprintf (stderr, "\nKind Nodes Bytes\n");
9260 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9261 total_nodes = total_bytes = 0;
9262 for (i = 0; i < (int) all_kinds; i++)
9263 {
9264 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9265 tree_node_counts[i], tree_node_sizes[i]);
9266 total_nodes += tree_node_counts[i];
9267 total_bytes += tree_node_sizes[i];
9268 }
9269 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9270 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9271 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9272 fprintf (stderr, "Code Nodes\n");
9273 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9274 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9275 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9276 tree_code_counts[i]);
9277 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9278 fprintf (stderr, "\n");
9279 ssanames_print_statistics ();
9280 fprintf (stderr, "\n");
9281 phinodes_print_statistics ();
9282 fprintf (stderr, "\n");
9283 }
9284 else
9285 fprintf (stderr, "(No per-node statistics)\n");
9286
9287 print_type_hash_statistics ();
9288 print_debug_expr_statistics ();
9289 print_value_expr_statistics ();
9290 lang_hooks.print_statistics ();
9291 }
9292 \f
9293 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9294
9295 /* Generate a crc32 of a byte. */
9296
9297 static unsigned
9298 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9299 {
9300 unsigned ix;
9301
9302 for (ix = bits; ix--; value <<= 1)
9303 {
9304 unsigned feedback;
9305
9306 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9307 chksum <<= 1;
9308 chksum ^= feedback;
9309 }
9310 return chksum;
9311 }
9312
9313 /* Generate a crc32 of a 32-bit unsigned. */
9314
9315 unsigned
9316 crc32_unsigned (unsigned chksum, unsigned value)
9317 {
9318 return crc32_unsigned_bits (chksum, value, 32);
9319 }
9320
9321 /* Generate a crc32 of a byte. */
9322
9323 unsigned
9324 crc32_byte (unsigned chksum, char byte)
9325 {
9326 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9327 }
9328
9329 /* Generate a crc32 of a string. */
9330
9331 unsigned
9332 crc32_string (unsigned chksum, const char *string)
9333 {
9334 do
9335 {
9336 chksum = crc32_byte (chksum, *string);
9337 }
9338 while (*string++);
9339 return chksum;
9340 }
9341
9342 /* P is a string that will be used in a symbol. Mask out any characters
9343 that are not valid in that context. */
9344
9345 void
9346 clean_symbol_name (char *p)
9347 {
9348 for (; *p; p++)
9349 if (! (ISALNUM (*p)
9350 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9351 || *p == '$'
9352 #endif
9353 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9354 || *p == '.'
9355 #endif
9356 ))
9357 *p = '_';
9358 }
9359
9360 /* For anonymous aggregate types, we need some sort of name to
9361 hold on to. In practice, this should not appear, but it should
9362 not be harmful if it does. */
9363 bool
9364 anon_aggrname_p(const_tree id_node)
9365 {
9366 #ifndef NO_DOT_IN_LABEL
9367 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9368 && IDENTIFIER_POINTER (id_node)[1] == '_');
9369 #else /* NO_DOT_IN_LABEL */
9370 #ifndef NO_DOLLAR_IN_LABEL
9371 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9372 && IDENTIFIER_POINTER (id_node)[1] == '_');
9373 #else /* NO_DOLLAR_IN_LABEL */
9374 #define ANON_AGGRNAME_PREFIX "__anon_"
9375 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9376 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9377 #endif /* NO_DOLLAR_IN_LABEL */
9378 #endif /* NO_DOT_IN_LABEL */
9379 }
9380
9381 /* Return a format for an anonymous aggregate name. */
9382 const char *
9383 anon_aggrname_format()
9384 {
9385 #ifndef NO_DOT_IN_LABEL
9386 return "._%d";
9387 #else /* NO_DOT_IN_LABEL */
9388 #ifndef NO_DOLLAR_IN_LABEL
9389 return "$_%d";
9390 #else /* NO_DOLLAR_IN_LABEL */
9391 return "__anon_%d";
9392 #endif /* NO_DOLLAR_IN_LABEL */
9393 #endif /* NO_DOT_IN_LABEL */
9394 }
9395
9396 /* Generate a name for a special-purpose function.
9397 The generated name may need to be unique across the whole link.
9398 Changes to this function may also require corresponding changes to
9399 xstrdup_mask_random.
9400 TYPE is some string to identify the purpose of this function to the
9401 linker or collect2; it must start with an uppercase letter,
9402 one of:
9403 I - for constructors
9404 D - for destructors
9405 N - for C++ anonymous namespaces
9406 F - for DWARF unwind frame information. */
9407
9408 tree
9409 get_file_function_name (const char *type)
9410 {
9411 char *buf;
9412 const char *p;
9413 char *q;
9414
9415 /* If we already have a name we know to be unique, just use that. */
9416 if (first_global_object_name)
9417 p = q = ASTRDUP (first_global_object_name);
9418 /* If the target is handling the constructors/destructors, they
9419 will be local to this file and the name is only necessary for
9420 debugging purposes.
9421 We also assign sub_I and sub_D sufixes to constructors called from
9422 the global static constructors. These are always local. */
9423 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9424 || (strncmp (type, "sub_", 4) == 0
9425 && (type[4] == 'I' || type[4] == 'D')))
9426 {
9427 const char *file = main_input_filename;
9428 if (! file)
9429 file = LOCATION_FILE (input_location);
9430 /* Just use the file's basename, because the full pathname
9431 might be quite long. */
9432 p = q = ASTRDUP (lbasename (file));
9433 }
9434 else
9435 {
9436 /* Otherwise, the name must be unique across the entire link.
9437 We don't have anything that we know to be unique to this translation
9438 unit, so use what we do have and throw in some randomness. */
9439 unsigned len;
9440 const char *name = weak_global_object_name;
9441 const char *file = main_input_filename;
9442
9443 if (! name)
9444 name = "";
9445 if (! file)
9446 file = LOCATION_FILE (input_location);
9447
9448 len = strlen (file);
9449 q = (char *) alloca (9 + 17 + len + 1);
9450 memcpy (q, file, len + 1);
9451
9452 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9453 crc32_string (0, name), get_random_seed (false));
9454
9455 p = q;
9456 }
9457
9458 clean_symbol_name (q);
9459 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9460 + strlen (type));
9461
9462 /* Set up the name of the file-level functions we may need.
9463 Use a global object (which is already required to be unique over
9464 the program) rather than the file name (which imposes extra
9465 constraints). */
9466 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9467
9468 return get_identifier (buf);
9469 }
9470 \f
9471 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9472
9473 /* Complain that the tree code of NODE does not match the expected 0
9474 terminated list of trailing codes. The trailing code list can be
9475 empty, for a more vague error message. FILE, LINE, and FUNCTION
9476 are of the caller. */
9477
9478 void
9479 tree_check_failed (const_tree node, const char *file,
9480 int line, const char *function, ...)
9481 {
9482 va_list args;
9483 const char *buffer;
9484 unsigned length = 0;
9485 enum tree_code code;
9486
9487 va_start (args, function);
9488 while ((code = (enum tree_code) va_arg (args, int)))
9489 length += 4 + strlen (get_tree_code_name (code));
9490 va_end (args);
9491 if (length)
9492 {
9493 char *tmp;
9494 va_start (args, function);
9495 length += strlen ("expected ");
9496 buffer = tmp = (char *) alloca (length);
9497 length = 0;
9498 while ((code = (enum tree_code) va_arg (args, int)))
9499 {
9500 const char *prefix = length ? " or " : "expected ";
9501
9502 strcpy (tmp + length, prefix);
9503 length += strlen (prefix);
9504 strcpy (tmp + length, get_tree_code_name (code));
9505 length += strlen (get_tree_code_name (code));
9506 }
9507 va_end (args);
9508 }
9509 else
9510 buffer = "unexpected node";
9511
9512 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9513 buffer, get_tree_code_name (TREE_CODE (node)),
9514 function, trim_filename (file), line);
9515 }
9516
9517 /* Complain that the tree code of NODE does match the expected 0
9518 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9519 the caller. */
9520
9521 void
9522 tree_not_check_failed (const_tree node, const char *file,
9523 int line, const char *function, ...)
9524 {
9525 va_list args;
9526 char *buffer;
9527 unsigned length = 0;
9528 enum tree_code code;
9529
9530 va_start (args, function);
9531 while ((code = (enum tree_code) va_arg (args, int)))
9532 length += 4 + strlen (get_tree_code_name (code));
9533 va_end (args);
9534 va_start (args, function);
9535 buffer = (char *) alloca (length);
9536 length = 0;
9537 while ((code = (enum tree_code) va_arg (args, int)))
9538 {
9539 if (length)
9540 {
9541 strcpy (buffer + length, " or ");
9542 length += 4;
9543 }
9544 strcpy (buffer + length, get_tree_code_name (code));
9545 length += strlen (get_tree_code_name (code));
9546 }
9547 va_end (args);
9548
9549 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9550 buffer, get_tree_code_name (TREE_CODE (node)),
9551 function, trim_filename (file), line);
9552 }
9553
9554 /* Similar to tree_check_failed, except that we check for a class of tree
9555 code, given in CL. */
9556
9557 void
9558 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9559 const char *file, int line, const char *function)
9560 {
9561 internal_error
9562 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9563 TREE_CODE_CLASS_STRING (cl),
9564 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9565 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9566 }
9567
9568 /* Similar to tree_check_failed, except that instead of specifying a
9569 dozen codes, use the knowledge that they're all sequential. */
9570
9571 void
9572 tree_range_check_failed (const_tree node, const char *file, int line,
9573 const char *function, enum tree_code c1,
9574 enum tree_code c2)
9575 {
9576 char *buffer;
9577 unsigned length = 0;
9578 unsigned int c;
9579
9580 for (c = c1; c <= c2; ++c)
9581 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9582
9583 length += strlen ("expected ");
9584 buffer = (char *) alloca (length);
9585 length = 0;
9586
9587 for (c = c1; c <= c2; ++c)
9588 {
9589 const char *prefix = length ? " or " : "expected ";
9590
9591 strcpy (buffer + length, prefix);
9592 length += strlen (prefix);
9593 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9594 length += strlen (get_tree_code_name ((enum tree_code) c));
9595 }
9596
9597 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9598 buffer, get_tree_code_name (TREE_CODE (node)),
9599 function, trim_filename (file), line);
9600 }
9601
9602
9603 /* Similar to tree_check_failed, except that we check that a tree does
9604 not have the specified code, given in CL. */
9605
9606 void
9607 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9608 const char *file, int line, const char *function)
9609 {
9610 internal_error
9611 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9612 TREE_CODE_CLASS_STRING (cl),
9613 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9614 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9615 }
9616
9617
9618 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9619
9620 void
9621 omp_clause_check_failed (const_tree node, const char *file, int line,
9622 const char *function, enum omp_clause_code code)
9623 {
9624 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9625 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9626 function, trim_filename (file), line);
9627 }
9628
9629
9630 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9631
9632 void
9633 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9634 const char *function, enum omp_clause_code c1,
9635 enum omp_clause_code c2)
9636 {
9637 char *buffer;
9638 unsigned length = 0;
9639 unsigned int c;
9640
9641 for (c = c1; c <= c2; ++c)
9642 length += 4 + strlen (omp_clause_code_name[c]);
9643
9644 length += strlen ("expected ");
9645 buffer = (char *) alloca (length);
9646 length = 0;
9647
9648 for (c = c1; c <= c2; ++c)
9649 {
9650 const char *prefix = length ? " or " : "expected ";
9651
9652 strcpy (buffer + length, prefix);
9653 length += strlen (prefix);
9654 strcpy (buffer + length, omp_clause_code_name[c]);
9655 length += strlen (omp_clause_code_name[c]);
9656 }
9657
9658 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9659 buffer, omp_clause_code_name[TREE_CODE (node)],
9660 function, trim_filename (file), line);
9661 }
9662
9663
9664 #undef DEFTREESTRUCT
9665 #define DEFTREESTRUCT(VAL, NAME) NAME,
9666
9667 static const char *ts_enum_names[] = {
9668 #include "treestruct.def"
9669 };
9670 #undef DEFTREESTRUCT
9671
9672 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9673
9674 /* Similar to tree_class_check_failed, except that we check for
9675 whether CODE contains the tree structure identified by EN. */
9676
9677 void
9678 tree_contains_struct_check_failed (const_tree node,
9679 const enum tree_node_structure_enum en,
9680 const char *file, int line,
9681 const char *function)
9682 {
9683 internal_error
9684 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9685 TS_ENUM_NAME (en),
9686 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9687 }
9688
9689
9690 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9691 (dynamically sized) vector. */
9692
9693 void
9694 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9695 const char *function)
9696 {
9697 internal_error
9698 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9699 idx + 1, len, function, trim_filename (file), line);
9700 }
9701
9702 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9703 (dynamically sized) vector. */
9704
9705 void
9706 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9707 const char *function)
9708 {
9709 internal_error
9710 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9711 idx + 1, len, function, trim_filename (file), line);
9712 }
9713
9714 /* Similar to above, except that the check is for the bounds of the operand
9715 vector of an expression node EXP. */
9716
9717 void
9718 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9719 int line, const char *function)
9720 {
9721 enum tree_code code = TREE_CODE (exp);
9722 internal_error
9723 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9724 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9725 function, trim_filename (file), line);
9726 }
9727
9728 /* Similar to above, except that the check is for the number of
9729 operands of an OMP_CLAUSE node. */
9730
9731 void
9732 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9733 int line, const char *function)
9734 {
9735 internal_error
9736 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9737 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9738 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9739 trim_filename (file), line);
9740 }
9741 #endif /* ENABLE_TREE_CHECKING */
9742 \f
9743 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9744 and mapped to the machine mode MODE. Initialize its fields and build
9745 the information necessary for debugging output. */
9746
9747 static tree
9748 make_vector_type (tree innertype, int nunits, machine_mode mode)
9749 {
9750 tree t;
9751 inchash::hash hstate;
9752
9753 t = make_node (VECTOR_TYPE);
9754 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9755 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9756 SET_TYPE_MODE (t, mode);
9757
9758 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9759 SET_TYPE_STRUCTURAL_EQUALITY (t);
9760 else if (TYPE_CANONICAL (innertype) != innertype
9761 || mode != VOIDmode)
9762 TYPE_CANONICAL (t)
9763 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9764
9765 layout_type (t);
9766
9767 hstate.add_wide_int (VECTOR_TYPE);
9768 hstate.add_wide_int (nunits);
9769 hstate.add_wide_int (mode);
9770 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9771 t = type_hash_canon (hstate.end (), t);
9772
9773 /* We have built a main variant, based on the main variant of the
9774 inner type. Use it to build the variant we return. */
9775 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9776 && TREE_TYPE (t) != innertype)
9777 return build_type_attribute_qual_variant (t,
9778 TYPE_ATTRIBUTES (innertype),
9779 TYPE_QUALS (innertype));
9780
9781 return t;
9782 }
9783
9784 static tree
9785 make_or_reuse_type (unsigned size, int unsignedp)
9786 {
9787 int i;
9788
9789 if (size == INT_TYPE_SIZE)
9790 return unsignedp ? unsigned_type_node : integer_type_node;
9791 if (size == CHAR_TYPE_SIZE)
9792 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9793 if (size == SHORT_TYPE_SIZE)
9794 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9795 if (size == LONG_TYPE_SIZE)
9796 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9797 if (size == LONG_LONG_TYPE_SIZE)
9798 return (unsignedp ? long_long_unsigned_type_node
9799 : long_long_integer_type_node);
9800
9801 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9802 if (size == int_n_data[i].bitsize
9803 && int_n_enabled_p[i])
9804 return (unsignedp ? int_n_trees[i].unsigned_type
9805 : int_n_trees[i].signed_type);
9806
9807 if (unsignedp)
9808 return make_unsigned_type (size);
9809 else
9810 return make_signed_type (size);
9811 }
9812
9813 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9814
9815 static tree
9816 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9817 {
9818 if (satp)
9819 {
9820 if (size == SHORT_FRACT_TYPE_SIZE)
9821 return unsignedp ? sat_unsigned_short_fract_type_node
9822 : sat_short_fract_type_node;
9823 if (size == FRACT_TYPE_SIZE)
9824 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9825 if (size == LONG_FRACT_TYPE_SIZE)
9826 return unsignedp ? sat_unsigned_long_fract_type_node
9827 : sat_long_fract_type_node;
9828 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9829 return unsignedp ? sat_unsigned_long_long_fract_type_node
9830 : sat_long_long_fract_type_node;
9831 }
9832 else
9833 {
9834 if (size == SHORT_FRACT_TYPE_SIZE)
9835 return unsignedp ? unsigned_short_fract_type_node
9836 : short_fract_type_node;
9837 if (size == FRACT_TYPE_SIZE)
9838 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9839 if (size == LONG_FRACT_TYPE_SIZE)
9840 return unsignedp ? unsigned_long_fract_type_node
9841 : long_fract_type_node;
9842 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9843 return unsignedp ? unsigned_long_long_fract_type_node
9844 : long_long_fract_type_node;
9845 }
9846
9847 return make_fract_type (size, unsignedp, satp);
9848 }
9849
9850 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9851
9852 static tree
9853 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9854 {
9855 if (satp)
9856 {
9857 if (size == SHORT_ACCUM_TYPE_SIZE)
9858 return unsignedp ? sat_unsigned_short_accum_type_node
9859 : sat_short_accum_type_node;
9860 if (size == ACCUM_TYPE_SIZE)
9861 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9862 if (size == LONG_ACCUM_TYPE_SIZE)
9863 return unsignedp ? sat_unsigned_long_accum_type_node
9864 : sat_long_accum_type_node;
9865 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9866 return unsignedp ? sat_unsigned_long_long_accum_type_node
9867 : sat_long_long_accum_type_node;
9868 }
9869 else
9870 {
9871 if (size == SHORT_ACCUM_TYPE_SIZE)
9872 return unsignedp ? unsigned_short_accum_type_node
9873 : short_accum_type_node;
9874 if (size == ACCUM_TYPE_SIZE)
9875 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9876 if (size == LONG_ACCUM_TYPE_SIZE)
9877 return unsignedp ? unsigned_long_accum_type_node
9878 : long_accum_type_node;
9879 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9880 return unsignedp ? unsigned_long_long_accum_type_node
9881 : long_long_accum_type_node;
9882 }
9883
9884 return make_accum_type (size, unsignedp, satp);
9885 }
9886
9887
9888 /* Create an atomic variant node for TYPE. This routine is called
9889 during initialization of data types to create the 5 basic atomic
9890 types. The generic build_variant_type function requires these to
9891 already be set up in order to function properly, so cannot be
9892 called from there. If ALIGN is non-zero, then ensure alignment is
9893 overridden to this value. */
9894
9895 static tree
9896 build_atomic_base (tree type, unsigned int align)
9897 {
9898 tree t;
9899
9900 /* Make sure its not already registered. */
9901 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9902 return t;
9903
9904 t = build_variant_type_copy (type);
9905 set_type_quals (t, TYPE_QUAL_ATOMIC);
9906
9907 if (align)
9908 TYPE_ALIGN (t) = align;
9909
9910 return t;
9911 }
9912
9913 /* Create nodes for all integer types (and error_mark_node) using the sizes
9914 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9915 SHORT_DOUBLE specifies whether double should be of the same precision
9916 as float. */
9917
9918 void
9919 build_common_tree_nodes (bool signed_char, bool short_double)
9920 {
9921 int i;
9922
9923 error_mark_node = make_node (ERROR_MARK);
9924 TREE_TYPE (error_mark_node) = error_mark_node;
9925
9926 initialize_sizetypes ();
9927
9928 /* Define both `signed char' and `unsigned char'. */
9929 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9930 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9931 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9932 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9933
9934 /* Define `char', which is like either `signed char' or `unsigned char'
9935 but not the same as either. */
9936 char_type_node
9937 = (signed_char
9938 ? make_signed_type (CHAR_TYPE_SIZE)
9939 : make_unsigned_type (CHAR_TYPE_SIZE));
9940 TYPE_STRING_FLAG (char_type_node) = 1;
9941
9942 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9943 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9944 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9945 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9946 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9947 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9948 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9949 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9950
9951 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9952 {
9953 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9954 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9955 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9956 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9957
9958 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9959 && int_n_enabled_p[i])
9960 {
9961 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9962 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9963 }
9964 }
9965
9966 /* Define a boolean type. This type only represents boolean values but
9967 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9968 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9969 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9970 TYPE_PRECISION (boolean_type_node) = 1;
9971 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9972
9973 /* Define what type to use for size_t. */
9974 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9975 size_type_node = unsigned_type_node;
9976 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9977 size_type_node = long_unsigned_type_node;
9978 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9979 size_type_node = long_long_unsigned_type_node;
9980 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9981 size_type_node = short_unsigned_type_node;
9982 else
9983 {
9984 int i;
9985
9986 size_type_node = NULL_TREE;
9987 for (i = 0; i < NUM_INT_N_ENTS; i++)
9988 if (int_n_enabled_p[i])
9989 {
9990 char name[50];
9991 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9992
9993 if (strcmp (name, SIZE_TYPE) == 0)
9994 {
9995 size_type_node = int_n_trees[i].unsigned_type;
9996 }
9997 }
9998 if (size_type_node == NULL_TREE)
9999 gcc_unreachable ();
10000 }
10001
10002 /* Fill in the rest of the sized types. Reuse existing type nodes
10003 when possible. */
10004 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10005 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10006 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10007 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10008 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10009
10010 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10011 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10012 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10013 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10014 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10015
10016 /* Don't call build_qualified type for atomics. That routine does
10017 special processing for atomics, and until they are initialized
10018 it's better not to make that call.
10019
10020 Check to see if there is a target override for atomic types. */
10021
10022 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10023 targetm.atomic_align_for_mode (QImode));
10024 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10025 targetm.atomic_align_for_mode (HImode));
10026 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10027 targetm.atomic_align_for_mode (SImode));
10028 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10029 targetm.atomic_align_for_mode (DImode));
10030 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10031 targetm.atomic_align_for_mode (TImode));
10032
10033 access_public_node = get_identifier ("public");
10034 access_protected_node = get_identifier ("protected");
10035 access_private_node = get_identifier ("private");
10036
10037 /* Define these next since types below may used them. */
10038 integer_zero_node = build_int_cst (integer_type_node, 0);
10039 integer_one_node = build_int_cst (integer_type_node, 1);
10040 integer_three_node = build_int_cst (integer_type_node, 3);
10041 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10042
10043 size_zero_node = size_int (0);
10044 size_one_node = size_int (1);
10045 bitsize_zero_node = bitsize_int (0);
10046 bitsize_one_node = bitsize_int (1);
10047 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10048
10049 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10050 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10051
10052 void_type_node = make_node (VOID_TYPE);
10053 layout_type (void_type_node);
10054
10055 pointer_bounds_type_node = targetm.chkp_bound_type ();
10056
10057 /* We are not going to have real types in C with less than byte alignment,
10058 so we might as well not have any types that claim to have it. */
10059 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
10060 TYPE_USER_ALIGN (void_type_node) = 0;
10061
10062 void_node = make_node (VOID_CST);
10063 TREE_TYPE (void_node) = void_type_node;
10064
10065 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10066 layout_type (TREE_TYPE (null_pointer_node));
10067
10068 ptr_type_node = build_pointer_type (void_type_node);
10069 const_ptr_type_node
10070 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10071 fileptr_type_node = ptr_type_node;
10072
10073 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10074
10075 float_type_node = make_node (REAL_TYPE);
10076 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10077 layout_type (float_type_node);
10078
10079 double_type_node = make_node (REAL_TYPE);
10080 if (short_double)
10081 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
10082 else
10083 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10084 layout_type (double_type_node);
10085
10086 long_double_type_node = make_node (REAL_TYPE);
10087 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10088 layout_type (long_double_type_node);
10089
10090 float_ptr_type_node = build_pointer_type (float_type_node);
10091 double_ptr_type_node = build_pointer_type (double_type_node);
10092 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10093 integer_ptr_type_node = build_pointer_type (integer_type_node);
10094
10095 /* Fixed size integer types. */
10096 uint16_type_node = make_or_reuse_type (16, 1);
10097 uint32_type_node = make_or_reuse_type (32, 1);
10098 uint64_type_node = make_or_reuse_type (64, 1);
10099
10100 /* Decimal float types. */
10101 dfloat32_type_node = make_node (REAL_TYPE);
10102 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10103 layout_type (dfloat32_type_node);
10104 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10105 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10106
10107 dfloat64_type_node = make_node (REAL_TYPE);
10108 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10109 layout_type (dfloat64_type_node);
10110 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10111 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10112
10113 dfloat128_type_node = make_node (REAL_TYPE);
10114 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10115 layout_type (dfloat128_type_node);
10116 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10117 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10118
10119 complex_integer_type_node = build_complex_type (integer_type_node);
10120 complex_float_type_node = build_complex_type (float_type_node);
10121 complex_double_type_node = build_complex_type (double_type_node);
10122 complex_long_double_type_node = build_complex_type (long_double_type_node);
10123
10124 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10125 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10126 sat_ ## KIND ## _type_node = \
10127 make_sat_signed_ ## KIND ## _type (SIZE); \
10128 sat_unsigned_ ## KIND ## _type_node = \
10129 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10130 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10131 unsigned_ ## KIND ## _type_node = \
10132 make_unsigned_ ## KIND ## _type (SIZE);
10133
10134 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10135 sat_ ## WIDTH ## KIND ## _type_node = \
10136 make_sat_signed_ ## KIND ## _type (SIZE); \
10137 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10138 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10139 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10140 unsigned_ ## WIDTH ## KIND ## _type_node = \
10141 make_unsigned_ ## KIND ## _type (SIZE);
10142
10143 /* Make fixed-point type nodes based on four different widths. */
10144 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10145 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10146 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10147 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10148 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10149
10150 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10151 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10152 NAME ## _type_node = \
10153 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10154 u ## NAME ## _type_node = \
10155 make_or_reuse_unsigned_ ## KIND ## _type \
10156 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10157 sat_ ## NAME ## _type_node = \
10158 make_or_reuse_sat_signed_ ## KIND ## _type \
10159 (GET_MODE_BITSIZE (MODE ## mode)); \
10160 sat_u ## NAME ## _type_node = \
10161 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10162 (GET_MODE_BITSIZE (U ## MODE ## mode));
10163
10164 /* Fixed-point type and mode nodes. */
10165 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10166 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10167 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10168 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10169 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10170 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10171 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10172 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10173 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10174 MAKE_FIXED_MODE_NODE (accum, da, DA)
10175 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10176
10177 {
10178 tree t = targetm.build_builtin_va_list ();
10179
10180 /* Many back-ends define record types without setting TYPE_NAME.
10181 If we copied the record type here, we'd keep the original
10182 record type without a name. This breaks name mangling. So,
10183 don't copy record types and let c_common_nodes_and_builtins()
10184 declare the type to be __builtin_va_list. */
10185 if (TREE_CODE (t) != RECORD_TYPE)
10186 t = build_variant_type_copy (t);
10187
10188 va_list_type_node = t;
10189 }
10190 }
10191
10192 /* Modify DECL for given flags.
10193 TM_PURE attribute is set only on types, so the function will modify
10194 DECL's type when ECF_TM_PURE is used. */
10195
10196 void
10197 set_call_expr_flags (tree decl, int flags)
10198 {
10199 if (flags & ECF_NOTHROW)
10200 TREE_NOTHROW (decl) = 1;
10201 if (flags & ECF_CONST)
10202 TREE_READONLY (decl) = 1;
10203 if (flags & ECF_PURE)
10204 DECL_PURE_P (decl) = 1;
10205 if (flags & ECF_LOOPING_CONST_OR_PURE)
10206 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10207 if (flags & ECF_NOVOPS)
10208 DECL_IS_NOVOPS (decl) = 1;
10209 if (flags & ECF_NORETURN)
10210 TREE_THIS_VOLATILE (decl) = 1;
10211 if (flags & ECF_MALLOC)
10212 DECL_IS_MALLOC (decl) = 1;
10213 if (flags & ECF_RETURNS_TWICE)
10214 DECL_IS_RETURNS_TWICE (decl) = 1;
10215 if (flags & ECF_LEAF)
10216 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10217 NULL, DECL_ATTRIBUTES (decl));
10218 if ((flags & ECF_TM_PURE) && flag_tm)
10219 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10220 /* Looping const or pure is implied by noreturn.
10221 There is currently no way to declare looping const or looping pure alone. */
10222 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10223 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10224 }
10225
10226
10227 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10228
10229 static void
10230 local_define_builtin (const char *name, tree type, enum built_in_function code,
10231 const char *library_name, int ecf_flags)
10232 {
10233 tree decl;
10234
10235 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10236 library_name, NULL_TREE);
10237 set_call_expr_flags (decl, ecf_flags);
10238
10239 set_builtin_decl (code, decl, true);
10240 }
10241
10242 /* Call this function after instantiating all builtins that the language
10243 front end cares about. This will build the rest of the builtins
10244 and internal functions that are relied upon by the tree optimizers and
10245 the middle-end. */
10246
10247 void
10248 build_common_builtin_nodes (void)
10249 {
10250 tree tmp, ftype;
10251 int ecf_flags;
10252
10253 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10254 {
10255 ftype = build_function_type (void_type_node, void_list_node);
10256 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10257 "__builtin_unreachable",
10258 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10259 | ECF_CONST);
10260 }
10261
10262 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10263 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10264 {
10265 ftype = build_function_type_list (ptr_type_node,
10266 ptr_type_node, const_ptr_type_node,
10267 size_type_node, NULL_TREE);
10268
10269 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10270 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10271 "memcpy", ECF_NOTHROW | ECF_LEAF);
10272 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10273 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10274 "memmove", ECF_NOTHROW | ECF_LEAF);
10275 }
10276
10277 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10278 {
10279 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10280 const_ptr_type_node, size_type_node,
10281 NULL_TREE);
10282 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10283 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10284 }
10285
10286 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10287 {
10288 ftype = build_function_type_list (ptr_type_node,
10289 ptr_type_node, integer_type_node,
10290 size_type_node, NULL_TREE);
10291 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10292 "memset", ECF_NOTHROW | ECF_LEAF);
10293 }
10294
10295 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10296 {
10297 ftype = build_function_type_list (ptr_type_node,
10298 size_type_node, NULL_TREE);
10299 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10300 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10301 }
10302
10303 ftype = build_function_type_list (ptr_type_node, size_type_node,
10304 size_type_node, NULL_TREE);
10305 local_define_builtin ("__builtin_alloca_with_align", ftype,
10306 BUILT_IN_ALLOCA_WITH_ALIGN,
10307 "__builtin_alloca_with_align",
10308 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10309
10310 /* If we're checking the stack, `alloca' can throw. */
10311 if (flag_stack_check)
10312 {
10313 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10314 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10315 }
10316
10317 ftype = build_function_type_list (void_type_node,
10318 ptr_type_node, ptr_type_node,
10319 ptr_type_node, NULL_TREE);
10320 local_define_builtin ("__builtin_init_trampoline", ftype,
10321 BUILT_IN_INIT_TRAMPOLINE,
10322 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10323 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10324 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10325 "__builtin_init_heap_trampoline",
10326 ECF_NOTHROW | ECF_LEAF);
10327
10328 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10329 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10330 BUILT_IN_ADJUST_TRAMPOLINE,
10331 "__builtin_adjust_trampoline",
10332 ECF_CONST | ECF_NOTHROW);
10333
10334 ftype = build_function_type_list (void_type_node,
10335 ptr_type_node, ptr_type_node, NULL_TREE);
10336 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10337 BUILT_IN_NONLOCAL_GOTO,
10338 "__builtin_nonlocal_goto",
10339 ECF_NORETURN | ECF_NOTHROW);
10340
10341 ftype = build_function_type_list (void_type_node,
10342 ptr_type_node, ptr_type_node, NULL_TREE);
10343 local_define_builtin ("__builtin_setjmp_setup", ftype,
10344 BUILT_IN_SETJMP_SETUP,
10345 "__builtin_setjmp_setup", ECF_NOTHROW);
10346
10347 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10348 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10349 BUILT_IN_SETJMP_RECEIVER,
10350 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10351
10352 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10353 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10354 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10355
10356 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10357 local_define_builtin ("__builtin_stack_restore", ftype,
10358 BUILT_IN_STACK_RESTORE,
10359 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10360
10361 /* If there's a possibility that we might use the ARM EABI, build the
10362 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10363 if (targetm.arm_eabi_unwinder)
10364 {
10365 ftype = build_function_type_list (void_type_node, NULL_TREE);
10366 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10367 BUILT_IN_CXA_END_CLEANUP,
10368 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10369 }
10370
10371 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10372 local_define_builtin ("__builtin_unwind_resume", ftype,
10373 BUILT_IN_UNWIND_RESUME,
10374 ((targetm_common.except_unwind_info (&global_options)
10375 == UI_SJLJ)
10376 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10377 ECF_NORETURN);
10378
10379 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10380 {
10381 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10382 NULL_TREE);
10383 local_define_builtin ("__builtin_return_address", ftype,
10384 BUILT_IN_RETURN_ADDRESS,
10385 "__builtin_return_address",
10386 ECF_NOTHROW);
10387 }
10388
10389 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10390 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10391 {
10392 ftype = build_function_type_list (void_type_node, ptr_type_node,
10393 ptr_type_node, NULL_TREE);
10394 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10395 local_define_builtin ("__cyg_profile_func_enter", ftype,
10396 BUILT_IN_PROFILE_FUNC_ENTER,
10397 "__cyg_profile_func_enter", 0);
10398 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10399 local_define_builtin ("__cyg_profile_func_exit", ftype,
10400 BUILT_IN_PROFILE_FUNC_EXIT,
10401 "__cyg_profile_func_exit", 0);
10402 }
10403
10404 /* The exception object and filter values from the runtime. The argument
10405 must be zero before exception lowering, i.e. from the front end. After
10406 exception lowering, it will be the region number for the exception
10407 landing pad. These functions are PURE instead of CONST to prevent
10408 them from being hoisted past the exception edge that will initialize
10409 its value in the landing pad. */
10410 ftype = build_function_type_list (ptr_type_node,
10411 integer_type_node, NULL_TREE);
10412 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10413 /* Only use TM_PURE if we have TM language support. */
10414 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10415 ecf_flags |= ECF_TM_PURE;
10416 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10417 "__builtin_eh_pointer", ecf_flags);
10418
10419 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10420 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10421 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10422 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10423
10424 ftype = build_function_type_list (void_type_node,
10425 integer_type_node, integer_type_node,
10426 NULL_TREE);
10427 local_define_builtin ("__builtin_eh_copy_values", ftype,
10428 BUILT_IN_EH_COPY_VALUES,
10429 "__builtin_eh_copy_values", ECF_NOTHROW);
10430
10431 /* Complex multiplication and division. These are handled as builtins
10432 rather than optabs because emit_library_call_value doesn't support
10433 complex. Further, we can do slightly better with folding these
10434 beasties if the real and complex parts of the arguments are separate. */
10435 {
10436 int mode;
10437
10438 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10439 {
10440 char mode_name_buf[4], *q;
10441 const char *p;
10442 enum built_in_function mcode, dcode;
10443 tree type, inner_type;
10444 const char *prefix = "__";
10445
10446 if (targetm.libfunc_gnu_prefix)
10447 prefix = "__gnu_";
10448
10449 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10450 if (type == NULL)
10451 continue;
10452 inner_type = TREE_TYPE (type);
10453
10454 ftype = build_function_type_list (type, inner_type, inner_type,
10455 inner_type, inner_type, NULL_TREE);
10456
10457 mcode = ((enum built_in_function)
10458 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10459 dcode = ((enum built_in_function)
10460 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10461
10462 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10463 *q = TOLOWER (*p);
10464 *q = '\0';
10465
10466 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10467 NULL);
10468 local_define_builtin (built_in_names[mcode], ftype, mcode,
10469 built_in_names[mcode],
10470 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10471
10472 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10473 NULL);
10474 local_define_builtin (built_in_names[dcode], ftype, dcode,
10475 built_in_names[dcode],
10476 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10477 }
10478 }
10479
10480 init_internal_fns ();
10481 }
10482
10483 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10484 better way.
10485
10486 If we requested a pointer to a vector, build up the pointers that
10487 we stripped off while looking for the inner type. Similarly for
10488 return values from functions.
10489
10490 The argument TYPE is the top of the chain, and BOTTOM is the
10491 new type which we will point to. */
10492
10493 tree
10494 reconstruct_complex_type (tree type, tree bottom)
10495 {
10496 tree inner, outer;
10497
10498 if (TREE_CODE (type) == POINTER_TYPE)
10499 {
10500 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10501 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10502 TYPE_REF_CAN_ALIAS_ALL (type));
10503 }
10504 else if (TREE_CODE (type) == REFERENCE_TYPE)
10505 {
10506 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10507 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10508 TYPE_REF_CAN_ALIAS_ALL (type));
10509 }
10510 else if (TREE_CODE (type) == ARRAY_TYPE)
10511 {
10512 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10513 outer = build_array_type (inner, TYPE_DOMAIN (type));
10514 }
10515 else if (TREE_CODE (type) == FUNCTION_TYPE)
10516 {
10517 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10518 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10519 }
10520 else if (TREE_CODE (type) == METHOD_TYPE)
10521 {
10522 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10523 /* The build_method_type_directly() routine prepends 'this' to argument list,
10524 so we must compensate by getting rid of it. */
10525 outer
10526 = build_method_type_directly
10527 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10528 inner,
10529 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10530 }
10531 else if (TREE_CODE (type) == OFFSET_TYPE)
10532 {
10533 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10534 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10535 }
10536 else
10537 return bottom;
10538
10539 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10540 TYPE_QUALS (type));
10541 }
10542
10543 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10544 the inner type. */
10545 tree
10546 build_vector_type_for_mode (tree innertype, machine_mode mode)
10547 {
10548 int nunits;
10549
10550 switch (GET_MODE_CLASS (mode))
10551 {
10552 case MODE_VECTOR_INT:
10553 case MODE_VECTOR_FLOAT:
10554 case MODE_VECTOR_FRACT:
10555 case MODE_VECTOR_UFRACT:
10556 case MODE_VECTOR_ACCUM:
10557 case MODE_VECTOR_UACCUM:
10558 nunits = GET_MODE_NUNITS (mode);
10559 break;
10560
10561 case MODE_INT:
10562 /* Check that there are no leftover bits. */
10563 gcc_assert (GET_MODE_BITSIZE (mode)
10564 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10565
10566 nunits = GET_MODE_BITSIZE (mode)
10567 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10568 break;
10569
10570 default:
10571 gcc_unreachable ();
10572 }
10573
10574 return make_vector_type (innertype, nunits, mode);
10575 }
10576
10577 /* Similarly, but takes the inner type and number of units, which must be
10578 a power of two. */
10579
10580 tree
10581 build_vector_type (tree innertype, int nunits)
10582 {
10583 return make_vector_type (innertype, nunits, VOIDmode);
10584 }
10585
10586 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10587
10588 tree
10589 build_opaque_vector_type (tree innertype, int nunits)
10590 {
10591 tree t = make_vector_type (innertype, nunits, VOIDmode);
10592 tree cand;
10593 /* We always build the non-opaque variant before the opaque one,
10594 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10595 cand = TYPE_NEXT_VARIANT (t);
10596 if (cand
10597 && TYPE_VECTOR_OPAQUE (cand)
10598 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10599 return cand;
10600 /* Othewise build a variant type and make sure to queue it after
10601 the non-opaque type. */
10602 cand = build_distinct_type_copy (t);
10603 TYPE_VECTOR_OPAQUE (cand) = true;
10604 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10605 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10606 TYPE_NEXT_VARIANT (t) = cand;
10607 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10608 return cand;
10609 }
10610
10611
10612 /* Given an initializer INIT, return TRUE if INIT is zero or some
10613 aggregate of zeros. Otherwise return FALSE. */
10614 bool
10615 initializer_zerop (const_tree init)
10616 {
10617 tree elt;
10618
10619 STRIP_NOPS (init);
10620
10621 switch (TREE_CODE (init))
10622 {
10623 case INTEGER_CST:
10624 return integer_zerop (init);
10625
10626 case REAL_CST:
10627 /* ??? Note that this is not correct for C4X float formats. There,
10628 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10629 negative exponent. */
10630 return real_zerop (init)
10631 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10632
10633 case FIXED_CST:
10634 return fixed_zerop (init);
10635
10636 case COMPLEX_CST:
10637 return integer_zerop (init)
10638 || (real_zerop (init)
10639 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10640 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10641
10642 case VECTOR_CST:
10643 {
10644 unsigned i;
10645 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10646 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10647 return false;
10648 return true;
10649 }
10650
10651 case CONSTRUCTOR:
10652 {
10653 unsigned HOST_WIDE_INT idx;
10654
10655 if (TREE_CLOBBER_P (init))
10656 return false;
10657 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10658 if (!initializer_zerop (elt))
10659 return false;
10660 return true;
10661 }
10662
10663 case STRING_CST:
10664 {
10665 int i;
10666
10667 /* We need to loop through all elements to handle cases like
10668 "\0" and "\0foobar". */
10669 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10670 if (TREE_STRING_POINTER (init)[i] != '\0')
10671 return false;
10672
10673 return true;
10674 }
10675
10676 default:
10677 return false;
10678 }
10679 }
10680
10681 /* Check if vector VEC consists of all the equal elements and
10682 that the number of elements corresponds to the type of VEC.
10683 The function returns first element of the vector
10684 or NULL_TREE if the vector is not uniform. */
10685 tree
10686 uniform_vector_p (const_tree vec)
10687 {
10688 tree first, t;
10689 unsigned i;
10690
10691 if (vec == NULL_TREE)
10692 return NULL_TREE;
10693
10694 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10695
10696 if (TREE_CODE (vec) == VECTOR_CST)
10697 {
10698 first = VECTOR_CST_ELT (vec, 0);
10699 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10700 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10701 return NULL_TREE;
10702
10703 return first;
10704 }
10705
10706 else if (TREE_CODE (vec) == CONSTRUCTOR)
10707 {
10708 first = error_mark_node;
10709
10710 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10711 {
10712 if (i == 0)
10713 {
10714 first = t;
10715 continue;
10716 }
10717 if (!operand_equal_p (first, t, 0))
10718 return NULL_TREE;
10719 }
10720 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10721 return NULL_TREE;
10722
10723 return first;
10724 }
10725
10726 return NULL_TREE;
10727 }
10728
10729 /* Build an empty statement at location LOC. */
10730
10731 tree
10732 build_empty_stmt (location_t loc)
10733 {
10734 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10735 SET_EXPR_LOCATION (t, loc);
10736 return t;
10737 }
10738
10739
10740 /* Build an OpenMP clause with code CODE. LOC is the location of the
10741 clause. */
10742
10743 tree
10744 build_omp_clause (location_t loc, enum omp_clause_code code)
10745 {
10746 tree t;
10747 int size, length;
10748
10749 length = omp_clause_num_ops[code];
10750 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10751
10752 record_node_allocation_statistics (OMP_CLAUSE, size);
10753
10754 t = (tree) ggc_internal_alloc (size);
10755 memset (t, 0, size);
10756 TREE_SET_CODE (t, OMP_CLAUSE);
10757 OMP_CLAUSE_SET_CODE (t, code);
10758 OMP_CLAUSE_LOCATION (t) = loc;
10759
10760 return t;
10761 }
10762
10763 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10764 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10765 Except for the CODE and operand count field, other storage for the
10766 object is initialized to zeros. */
10767
10768 tree
10769 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10770 {
10771 tree t;
10772 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10773
10774 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10775 gcc_assert (len >= 1);
10776
10777 record_node_allocation_statistics (code, length);
10778
10779 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10780
10781 TREE_SET_CODE (t, code);
10782
10783 /* Can't use TREE_OPERAND to store the length because if checking is
10784 enabled, it will try to check the length before we store it. :-P */
10785 t->exp.operands[0] = build_int_cst (sizetype, len);
10786
10787 return t;
10788 }
10789
10790 /* Helper function for build_call_* functions; build a CALL_EXPR with
10791 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10792 the argument slots. */
10793
10794 static tree
10795 build_call_1 (tree return_type, tree fn, int nargs)
10796 {
10797 tree t;
10798
10799 t = build_vl_exp (CALL_EXPR, nargs + 3);
10800 TREE_TYPE (t) = return_type;
10801 CALL_EXPR_FN (t) = fn;
10802 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10803
10804 return t;
10805 }
10806
10807 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10808 FN and a null static chain slot. NARGS is the number of call arguments
10809 which are specified as "..." arguments. */
10810
10811 tree
10812 build_call_nary (tree return_type, tree fn, int nargs, ...)
10813 {
10814 tree ret;
10815 va_list args;
10816 va_start (args, nargs);
10817 ret = build_call_valist (return_type, fn, nargs, args);
10818 va_end (args);
10819 return ret;
10820 }
10821
10822 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10823 FN and a null static chain slot. NARGS is the number of call arguments
10824 which are specified as a va_list ARGS. */
10825
10826 tree
10827 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10828 {
10829 tree t;
10830 int i;
10831
10832 t = build_call_1 (return_type, fn, nargs);
10833 for (i = 0; i < nargs; i++)
10834 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10835 process_call_operands (t);
10836 return t;
10837 }
10838
10839 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10840 FN and a null static chain slot. NARGS is the number of call arguments
10841 which are specified as a tree array ARGS. */
10842
10843 tree
10844 build_call_array_loc (location_t loc, tree return_type, tree fn,
10845 int nargs, const tree *args)
10846 {
10847 tree t;
10848 int i;
10849
10850 t = build_call_1 (return_type, fn, nargs);
10851 for (i = 0; i < nargs; i++)
10852 CALL_EXPR_ARG (t, i) = args[i];
10853 process_call_operands (t);
10854 SET_EXPR_LOCATION (t, loc);
10855 return t;
10856 }
10857
10858 /* Like build_call_array, but takes a vec. */
10859
10860 tree
10861 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10862 {
10863 tree ret, t;
10864 unsigned int ix;
10865
10866 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10867 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10868 CALL_EXPR_ARG (ret, ix) = t;
10869 process_call_operands (ret);
10870 return ret;
10871 }
10872
10873 /* Conveniently construct a function call expression. FNDECL names the
10874 function to be called and N arguments are passed in the array
10875 ARGARRAY. */
10876
10877 tree
10878 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10879 {
10880 tree fntype = TREE_TYPE (fndecl);
10881 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10882
10883 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10884 }
10885
10886 /* Conveniently construct a function call expression. FNDECL names the
10887 function to be called and the arguments are passed in the vector
10888 VEC. */
10889
10890 tree
10891 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10892 {
10893 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10894 vec_safe_address (vec));
10895 }
10896
10897
10898 /* Conveniently construct a function call expression. FNDECL names the
10899 function to be called, N is the number of arguments, and the "..."
10900 parameters are the argument expressions. */
10901
10902 tree
10903 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10904 {
10905 va_list ap;
10906 tree *argarray = XALLOCAVEC (tree, n);
10907 int i;
10908
10909 va_start (ap, n);
10910 for (i = 0; i < n; i++)
10911 argarray[i] = va_arg (ap, tree);
10912 va_end (ap);
10913 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10914 }
10915
10916 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10917 varargs macros aren't supported by all bootstrap compilers. */
10918
10919 tree
10920 build_call_expr (tree fndecl, int n, ...)
10921 {
10922 va_list ap;
10923 tree *argarray = XALLOCAVEC (tree, n);
10924 int i;
10925
10926 va_start (ap, n);
10927 for (i = 0; i < n; i++)
10928 argarray[i] = va_arg (ap, tree);
10929 va_end (ap);
10930 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10931 }
10932
10933 /* Build internal call expression. This is just like CALL_EXPR, except
10934 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10935 internal function. */
10936
10937 tree
10938 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10939 tree type, int n, ...)
10940 {
10941 va_list ap;
10942 int i;
10943
10944 tree fn = build_call_1 (type, NULL_TREE, n);
10945 va_start (ap, n);
10946 for (i = 0; i < n; i++)
10947 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10948 va_end (ap);
10949 SET_EXPR_LOCATION (fn, loc);
10950 CALL_EXPR_IFN (fn) = ifn;
10951 return fn;
10952 }
10953
10954 /* Create a new constant string literal and return a char* pointer to it.
10955 The STRING_CST value is the LEN characters at STR. */
10956 tree
10957 build_string_literal (int len, const char *str)
10958 {
10959 tree t, elem, index, type;
10960
10961 t = build_string (len, str);
10962 elem = build_type_variant (char_type_node, 1, 0);
10963 index = build_index_type (size_int (len - 1));
10964 type = build_array_type (elem, index);
10965 TREE_TYPE (t) = type;
10966 TREE_CONSTANT (t) = 1;
10967 TREE_READONLY (t) = 1;
10968 TREE_STATIC (t) = 1;
10969
10970 type = build_pointer_type (elem);
10971 t = build1 (ADDR_EXPR, type,
10972 build4 (ARRAY_REF, elem,
10973 t, integer_zero_node, NULL_TREE, NULL_TREE));
10974 return t;
10975 }
10976
10977
10978
10979 /* Return true if T (assumed to be a DECL) must be assigned a memory
10980 location. */
10981
10982 bool
10983 needs_to_live_in_memory (const_tree t)
10984 {
10985 return (TREE_ADDRESSABLE (t)
10986 || is_global_var (t)
10987 || (TREE_CODE (t) == RESULT_DECL
10988 && !DECL_BY_REFERENCE (t)
10989 && aggregate_value_p (t, current_function_decl)));
10990 }
10991
10992 /* Return value of a constant X and sign-extend it. */
10993
10994 HOST_WIDE_INT
10995 int_cst_value (const_tree x)
10996 {
10997 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10998 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10999
11000 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11001 gcc_assert (cst_and_fits_in_hwi (x));
11002
11003 if (bits < HOST_BITS_PER_WIDE_INT)
11004 {
11005 bool negative = ((val >> (bits - 1)) & 1) != 0;
11006 if (negative)
11007 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
11008 else
11009 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
11010 }
11011
11012 return val;
11013 }
11014
11015 /* If TYPE is an integral or pointer type, return an integer type with
11016 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11017 if TYPE is already an integer type of signedness UNSIGNEDP. */
11018
11019 tree
11020 signed_or_unsigned_type_for (int unsignedp, tree type)
11021 {
11022 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11023 return type;
11024
11025 if (TREE_CODE (type) == VECTOR_TYPE)
11026 {
11027 tree inner = TREE_TYPE (type);
11028 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11029 if (!inner2)
11030 return NULL_TREE;
11031 if (inner == inner2)
11032 return type;
11033 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11034 }
11035
11036 if (!INTEGRAL_TYPE_P (type)
11037 && !POINTER_TYPE_P (type)
11038 && TREE_CODE (type) != OFFSET_TYPE)
11039 return NULL_TREE;
11040
11041 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11042 }
11043
11044 /* If TYPE is an integral or pointer type, return an integer type with
11045 the same precision which is unsigned, or itself if TYPE is already an
11046 unsigned integer type. */
11047
11048 tree
11049 unsigned_type_for (tree type)
11050 {
11051 return signed_or_unsigned_type_for (1, type);
11052 }
11053
11054 /* If TYPE is an integral or pointer type, return an integer type with
11055 the same precision which is signed, or itself if TYPE is already a
11056 signed integer type. */
11057
11058 tree
11059 signed_type_for (tree type)
11060 {
11061 return signed_or_unsigned_type_for (0, type);
11062 }
11063
11064 /* If TYPE is a vector type, return a signed integer vector type with the
11065 same width and number of subparts. Otherwise return boolean_type_node. */
11066
11067 tree
11068 truth_type_for (tree type)
11069 {
11070 if (TREE_CODE (type) == VECTOR_TYPE)
11071 {
11072 tree elem = lang_hooks.types.type_for_size
11073 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
11074 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
11075 }
11076 else
11077 return boolean_type_node;
11078 }
11079
11080 /* Returns the largest value obtainable by casting something in INNER type to
11081 OUTER type. */
11082
11083 tree
11084 upper_bound_in_type (tree outer, tree inner)
11085 {
11086 unsigned int det = 0;
11087 unsigned oprec = TYPE_PRECISION (outer);
11088 unsigned iprec = TYPE_PRECISION (inner);
11089 unsigned prec;
11090
11091 /* Compute a unique number for every combination. */
11092 det |= (oprec > iprec) ? 4 : 0;
11093 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11094 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11095
11096 /* Determine the exponent to use. */
11097 switch (det)
11098 {
11099 case 0:
11100 case 1:
11101 /* oprec <= iprec, outer: signed, inner: don't care. */
11102 prec = oprec - 1;
11103 break;
11104 case 2:
11105 case 3:
11106 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11107 prec = oprec;
11108 break;
11109 case 4:
11110 /* oprec > iprec, outer: signed, inner: signed. */
11111 prec = iprec - 1;
11112 break;
11113 case 5:
11114 /* oprec > iprec, outer: signed, inner: unsigned. */
11115 prec = iprec;
11116 break;
11117 case 6:
11118 /* oprec > iprec, outer: unsigned, inner: signed. */
11119 prec = oprec;
11120 break;
11121 case 7:
11122 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11123 prec = iprec;
11124 break;
11125 default:
11126 gcc_unreachable ();
11127 }
11128
11129 return wide_int_to_tree (outer,
11130 wi::mask (prec, false, TYPE_PRECISION (outer)));
11131 }
11132
11133 /* Returns the smallest value obtainable by casting something in INNER type to
11134 OUTER type. */
11135
11136 tree
11137 lower_bound_in_type (tree outer, tree inner)
11138 {
11139 unsigned oprec = TYPE_PRECISION (outer);
11140 unsigned iprec = TYPE_PRECISION (inner);
11141
11142 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11143 and obtain 0. */
11144 if (TYPE_UNSIGNED (outer)
11145 /* If we are widening something of an unsigned type, OUTER type
11146 contains all values of INNER type. In particular, both INNER
11147 and OUTER types have zero in common. */
11148 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11149 return build_int_cst (outer, 0);
11150 else
11151 {
11152 /* If we are widening a signed type to another signed type, we
11153 want to obtain -2^^(iprec-1). If we are keeping the
11154 precision or narrowing to a signed type, we want to obtain
11155 -2^(oprec-1). */
11156 unsigned prec = oprec > iprec ? iprec : oprec;
11157 return wide_int_to_tree (outer,
11158 wi::mask (prec - 1, true,
11159 TYPE_PRECISION (outer)));
11160 }
11161 }
11162
11163 /* Return nonzero if two operands that are suitable for PHI nodes are
11164 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11165 SSA_NAME or invariant. Note that this is strictly an optimization.
11166 That is, callers of this function can directly call operand_equal_p
11167 and get the same result, only slower. */
11168
11169 int
11170 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11171 {
11172 if (arg0 == arg1)
11173 return 1;
11174 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11175 return 0;
11176 return operand_equal_p (arg0, arg1, 0);
11177 }
11178
11179 /* Returns number of zeros at the end of binary representation of X. */
11180
11181 tree
11182 num_ending_zeros (const_tree x)
11183 {
11184 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11185 }
11186
11187
11188 #define WALK_SUBTREE(NODE) \
11189 do \
11190 { \
11191 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11192 if (result) \
11193 return result; \
11194 } \
11195 while (0)
11196
11197 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11198 be walked whenever a type is seen in the tree. Rest of operands and return
11199 value are as for walk_tree. */
11200
11201 static tree
11202 walk_type_fields (tree type, walk_tree_fn func, void *data,
11203 hash_set<tree> *pset, walk_tree_lh lh)
11204 {
11205 tree result = NULL_TREE;
11206
11207 switch (TREE_CODE (type))
11208 {
11209 case POINTER_TYPE:
11210 case REFERENCE_TYPE:
11211 case VECTOR_TYPE:
11212 /* We have to worry about mutually recursive pointers. These can't
11213 be written in C. They can in Ada. It's pathological, but
11214 there's an ACATS test (c38102a) that checks it. Deal with this
11215 by checking if we're pointing to another pointer, that one
11216 points to another pointer, that one does too, and we have no htab.
11217 If so, get a hash table. We check three levels deep to avoid
11218 the cost of the hash table if we don't need one. */
11219 if (POINTER_TYPE_P (TREE_TYPE (type))
11220 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11221 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11222 && !pset)
11223 {
11224 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11225 func, data);
11226 if (result)
11227 return result;
11228
11229 break;
11230 }
11231
11232 /* ... fall through ... */
11233
11234 case COMPLEX_TYPE:
11235 WALK_SUBTREE (TREE_TYPE (type));
11236 break;
11237
11238 case METHOD_TYPE:
11239 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11240
11241 /* Fall through. */
11242
11243 case FUNCTION_TYPE:
11244 WALK_SUBTREE (TREE_TYPE (type));
11245 {
11246 tree arg;
11247
11248 /* We never want to walk into default arguments. */
11249 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11250 WALK_SUBTREE (TREE_VALUE (arg));
11251 }
11252 break;
11253
11254 case ARRAY_TYPE:
11255 /* Don't follow this nodes's type if a pointer for fear that
11256 we'll have infinite recursion. If we have a PSET, then we
11257 need not fear. */
11258 if (pset
11259 || (!POINTER_TYPE_P (TREE_TYPE (type))
11260 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11261 WALK_SUBTREE (TREE_TYPE (type));
11262 WALK_SUBTREE (TYPE_DOMAIN (type));
11263 break;
11264
11265 case OFFSET_TYPE:
11266 WALK_SUBTREE (TREE_TYPE (type));
11267 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11268 break;
11269
11270 default:
11271 break;
11272 }
11273
11274 return NULL_TREE;
11275 }
11276
11277 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11278 called with the DATA and the address of each sub-tree. If FUNC returns a
11279 non-NULL value, the traversal is stopped, and the value returned by FUNC
11280 is returned. If PSET is non-NULL it is used to record the nodes visited,
11281 and to avoid visiting a node more than once. */
11282
11283 tree
11284 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11285 hash_set<tree> *pset, walk_tree_lh lh)
11286 {
11287 enum tree_code code;
11288 int walk_subtrees;
11289 tree result;
11290
11291 #define WALK_SUBTREE_TAIL(NODE) \
11292 do \
11293 { \
11294 tp = & (NODE); \
11295 goto tail_recurse; \
11296 } \
11297 while (0)
11298
11299 tail_recurse:
11300 /* Skip empty subtrees. */
11301 if (!*tp)
11302 return NULL_TREE;
11303
11304 /* Don't walk the same tree twice, if the user has requested
11305 that we avoid doing so. */
11306 if (pset && pset->add (*tp))
11307 return NULL_TREE;
11308
11309 /* Call the function. */
11310 walk_subtrees = 1;
11311 result = (*func) (tp, &walk_subtrees, data);
11312
11313 /* If we found something, return it. */
11314 if (result)
11315 return result;
11316
11317 code = TREE_CODE (*tp);
11318
11319 /* Even if we didn't, FUNC may have decided that there was nothing
11320 interesting below this point in the tree. */
11321 if (!walk_subtrees)
11322 {
11323 /* But we still need to check our siblings. */
11324 if (code == TREE_LIST)
11325 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11326 else if (code == OMP_CLAUSE)
11327 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11328 else
11329 return NULL_TREE;
11330 }
11331
11332 if (lh)
11333 {
11334 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11335 if (result || !walk_subtrees)
11336 return result;
11337 }
11338
11339 switch (code)
11340 {
11341 case ERROR_MARK:
11342 case IDENTIFIER_NODE:
11343 case INTEGER_CST:
11344 case REAL_CST:
11345 case FIXED_CST:
11346 case VECTOR_CST:
11347 case STRING_CST:
11348 case BLOCK:
11349 case PLACEHOLDER_EXPR:
11350 case SSA_NAME:
11351 case FIELD_DECL:
11352 case RESULT_DECL:
11353 /* None of these have subtrees other than those already walked
11354 above. */
11355 break;
11356
11357 case TREE_LIST:
11358 WALK_SUBTREE (TREE_VALUE (*tp));
11359 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11360 break;
11361
11362 case TREE_VEC:
11363 {
11364 int len = TREE_VEC_LENGTH (*tp);
11365
11366 if (len == 0)
11367 break;
11368
11369 /* Walk all elements but the first. */
11370 while (--len)
11371 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11372
11373 /* Now walk the first one as a tail call. */
11374 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11375 }
11376
11377 case COMPLEX_CST:
11378 WALK_SUBTREE (TREE_REALPART (*tp));
11379 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11380
11381 case CONSTRUCTOR:
11382 {
11383 unsigned HOST_WIDE_INT idx;
11384 constructor_elt *ce;
11385
11386 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11387 idx++)
11388 WALK_SUBTREE (ce->value);
11389 }
11390 break;
11391
11392 case SAVE_EXPR:
11393 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11394
11395 case BIND_EXPR:
11396 {
11397 tree decl;
11398 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11399 {
11400 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11401 into declarations that are just mentioned, rather than
11402 declared; they don't really belong to this part of the tree.
11403 And, we can see cycles: the initializer for a declaration
11404 can refer to the declaration itself. */
11405 WALK_SUBTREE (DECL_INITIAL (decl));
11406 WALK_SUBTREE (DECL_SIZE (decl));
11407 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11408 }
11409 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11410 }
11411
11412 case STATEMENT_LIST:
11413 {
11414 tree_stmt_iterator i;
11415 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11416 WALK_SUBTREE (*tsi_stmt_ptr (i));
11417 }
11418 break;
11419
11420 case OMP_CLAUSE:
11421 switch (OMP_CLAUSE_CODE (*tp))
11422 {
11423 case OMP_CLAUSE_GANG:
11424 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11425 /* FALLTHRU */
11426
11427 case OMP_CLAUSE_DEVICE_RESIDENT:
11428 case OMP_CLAUSE_USE_DEVICE:
11429 case OMP_CLAUSE_ASYNC:
11430 case OMP_CLAUSE_WAIT:
11431 case OMP_CLAUSE_WORKER:
11432 case OMP_CLAUSE_VECTOR:
11433 case OMP_CLAUSE_NUM_GANGS:
11434 case OMP_CLAUSE_NUM_WORKERS:
11435 case OMP_CLAUSE_VECTOR_LENGTH:
11436 case OMP_CLAUSE_PRIVATE:
11437 case OMP_CLAUSE_SHARED:
11438 case OMP_CLAUSE_FIRSTPRIVATE:
11439 case OMP_CLAUSE_COPYIN:
11440 case OMP_CLAUSE_COPYPRIVATE:
11441 case OMP_CLAUSE_FINAL:
11442 case OMP_CLAUSE_IF:
11443 case OMP_CLAUSE_NUM_THREADS:
11444 case OMP_CLAUSE_SCHEDULE:
11445 case OMP_CLAUSE_UNIFORM:
11446 case OMP_CLAUSE_DEPEND:
11447 case OMP_CLAUSE_NUM_TEAMS:
11448 case OMP_CLAUSE_THREAD_LIMIT:
11449 case OMP_CLAUSE_DEVICE:
11450 case OMP_CLAUSE_DIST_SCHEDULE:
11451 case OMP_CLAUSE_SAFELEN:
11452 case OMP_CLAUSE_SIMDLEN:
11453 case OMP_CLAUSE__LOOPTEMP_:
11454 case OMP_CLAUSE__SIMDUID_:
11455 case OMP_CLAUSE__CILK_FOR_COUNT_:
11456 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11457 /* FALLTHRU */
11458
11459 case OMP_CLAUSE_INDEPENDENT:
11460 case OMP_CLAUSE_NOWAIT:
11461 case OMP_CLAUSE_ORDERED:
11462 case OMP_CLAUSE_DEFAULT:
11463 case OMP_CLAUSE_UNTIED:
11464 case OMP_CLAUSE_MERGEABLE:
11465 case OMP_CLAUSE_PROC_BIND:
11466 case OMP_CLAUSE_INBRANCH:
11467 case OMP_CLAUSE_NOTINBRANCH:
11468 case OMP_CLAUSE_FOR:
11469 case OMP_CLAUSE_PARALLEL:
11470 case OMP_CLAUSE_SECTIONS:
11471 case OMP_CLAUSE_TASKGROUP:
11472 case OMP_CLAUSE_AUTO:
11473 case OMP_CLAUSE_SEQ:
11474 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11475
11476 case OMP_CLAUSE_LASTPRIVATE:
11477 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11478 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11479 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11480
11481 case OMP_CLAUSE_COLLAPSE:
11482 {
11483 int i;
11484 for (i = 0; i < 3; i++)
11485 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11486 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11487 }
11488
11489 case OMP_CLAUSE_LINEAR:
11490 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11491 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11492 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11493 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11494
11495 case OMP_CLAUSE_ALIGNED:
11496 case OMP_CLAUSE_FROM:
11497 case OMP_CLAUSE_TO:
11498 case OMP_CLAUSE_MAP:
11499 case OMP_CLAUSE__CACHE_:
11500 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11501 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11502 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11503
11504 case OMP_CLAUSE_REDUCTION:
11505 {
11506 int i;
11507 for (i = 0; i < 4; i++)
11508 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11509 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11510 }
11511
11512 default:
11513 gcc_unreachable ();
11514 }
11515 break;
11516
11517 case TARGET_EXPR:
11518 {
11519 int i, len;
11520
11521 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11522 But, we only want to walk once. */
11523 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11524 for (i = 0; i < len; ++i)
11525 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11526 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11527 }
11528
11529 case DECL_EXPR:
11530 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11531 defining. We only want to walk into these fields of a type in this
11532 case and not in the general case of a mere reference to the type.
11533
11534 The criterion is as follows: if the field can be an expression, it
11535 must be walked only here. This should be in keeping with the fields
11536 that are directly gimplified in gimplify_type_sizes in order for the
11537 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11538 variable-sized types.
11539
11540 Note that DECLs get walked as part of processing the BIND_EXPR. */
11541 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11542 {
11543 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11544 if (TREE_CODE (*type_p) == ERROR_MARK)
11545 return NULL_TREE;
11546
11547 /* Call the function for the type. See if it returns anything or
11548 doesn't want us to continue. If we are to continue, walk both
11549 the normal fields and those for the declaration case. */
11550 result = (*func) (type_p, &walk_subtrees, data);
11551 if (result || !walk_subtrees)
11552 return result;
11553
11554 /* But do not walk a pointed-to type since it may itself need to
11555 be walked in the declaration case if it isn't anonymous. */
11556 if (!POINTER_TYPE_P (*type_p))
11557 {
11558 result = walk_type_fields (*type_p, func, data, pset, lh);
11559 if (result)
11560 return result;
11561 }
11562
11563 /* If this is a record type, also walk the fields. */
11564 if (RECORD_OR_UNION_TYPE_P (*type_p))
11565 {
11566 tree field;
11567
11568 for (field = TYPE_FIELDS (*type_p); field;
11569 field = DECL_CHAIN (field))
11570 {
11571 /* We'd like to look at the type of the field, but we can
11572 easily get infinite recursion. So assume it's pointed
11573 to elsewhere in the tree. Also, ignore things that
11574 aren't fields. */
11575 if (TREE_CODE (field) != FIELD_DECL)
11576 continue;
11577
11578 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11579 WALK_SUBTREE (DECL_SIZE (field));
11580 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11581 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11582 WALK_SUBTREE (DECL_QUALIFIER (field));
11583 }
11584 }
11585
11586 /* Same for scalar types. */
11587 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11588 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11589 || TREE_CODE (*type_p) == INTEGER_TYPE
11590 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11591 || TREE_CODE (*type_p) == REAL_TYPE)
11592 {
11593 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11594 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11595 }
11596
11597 WALK_SUBTREE (TYPE_SIZE (*type_p));
11598 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11599 }
11600 /* FALLTHRU */
11601
11602 default:
11603 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11604 {
11605 int i, len;
11606
11607 /* Walk over all the sub-trees of this operand. */
11608 len = TREE_OPERAND_LENGTH (*tp);
11609
11610 /* Go through the subtrees. We need to do this in forward order so
11611 that the scope of a FOR_EXPR is handled properly. */
11612 if (len)
11613 {
11614 for (i = 0; i < len - 1; ++i)
11615 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11616 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11617 }
11618 }
11619 /* If this is a type, walk the needed fields in the type. */
11620 else if (TYPE_P (*tp))
11621 return walk_type_fields (*tp, func, data, pset, lh);
11622 break;
11623 }
11624
11625 /* We didn't find what we were looking for. */
11626 return NULL_TREE;
11627
11628 #undef WALK_SUBTREE_TAIL
11629 }
11630 #undef WALK_SUBTREE
11631
11632 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11633
11634 tree
11635 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11636 walk_tree_lh lh)
11637 {
11638 tree result;
11639
11640 hash_set<tree> pset;
11641 result = walk_tree_1 (tp, func, data, &pset, lh);
11642 return result;
11643 }
11644
11645
11646 tree
11647 tree_block (tree t)
11648 {
11649 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11650
11651 if (IS_EXPR_CODE_CLASS (c))
11652 return LOCATION_BLOCK (t->exp.locus);
11653 gcc_unreachable ();
11654 return NULL;
11655 }
11656
11657 void
11658 tree_set_block (tree t, tree b)
11659 {
11660 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11661
11662 if (IS_EXPR_CODE_CLASS (c))
11663 {
11664 if (b)
11665 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11666 else
11667 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11668 }
11669 else
11670 gcc_unreachable ();
11671 }
11672
11673 /* Create a nameless artificial label and put it in the current
11674 function context. The label has a location of LOC. Returns the
11675 newly created label. */
11676
11677 tree
11678 create_artificial_label (location_t loc)
11679 {
11680 tree lab = build_decl (loc,
11681 LABEL_DECL, NULL_TREE, void_type_node);
11682
11683 DECL_ARTIFICIAL (lab) = 1;
11684 DECL_IGNORED_P (lab) = 1;
11685 DECL_CONTEXT (lab) = current_function_decl;
11686 return lab;
11687 }
11688
11689 /* Given a tree, try to return a useful variable name that we can use
11690 to prefix a temporary that is being assigned the value of the tree.
11691 I.E. given <temp> = &A, return A. */
11692
11693 const char *
11694 get_name (tree t)
11695 {
11696 tree stripped_decl;
11697
11698 stripped_decl = t;
11699 STRIP_NOPS (stripped_decl);
11700 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11701 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11702 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11703 {
11704 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11705 if (!name)
11706 return NULL;
11707 return IDENTIFIER_POINTER (name);
11708 }
11709 else
11710 {
11711 switch (TREE_CODE (stripped_decl))
11712 {
11713 case ADDR_EXPR:
11714 return get_name (TREE_OPERAND (stripped_decl, 0));
11715 default:
11716 return NULL;
11717 }
11718 }
11719 }
11720
11721 /* Return true if TYPE has a variable argument list. */
11722
11723 bool
11724 stdarg_p (const_tree fntype)
11725 {
11726 function_args_iterator args_iter;
11727 tree n = NULL_TREE, t;
11728
11729 if (!fntype)
11730 return false;
11731
11732 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11733 {
11734 n = t;
11735 }
11736
11737 return n != NULL_TREE && n != void_type_node;
11738 }
11739
11740 /* Return true if TYPE has a prototype. */
11741
11742 bool
11743 prototype_p (const_tree fntype)
11744 {
11745 tree t;
11746
11747 gcc_assert (fntype != NULL_TREE);
11748
11749 t = TYPE_ARG_TYPES (fntype);
11750 return (t != NULL_TREE);
11751 }
11752
11753 /* If BLOCK is inlined from an __attribute__((__artificial__))
11754 routine, return pointer to location from where it has been
11755 called. */
11756 location_t *
11757 block_nonartificial_location (tree block)
11758 {
11759 location_t *ret = NULL;
11760
11761 while (block && TREE_CODE (block) == BLOCK
11762 && BLOCK_ABSTRACT_ORIGIN (block))
11763 {
11764 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11765
11766 while (TREE_CODE (ao) == BLOCK
11767 && BLOCK_ABSTRACT_ORIGIN (ao)
11768 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11769 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11770
11771 if (TREE_CODE (ao) == FUNCTION_DECL)
11772 {
11773 /* If AO is an artificial inline, point RET to the
11774 call site locus at which it has been inlined and continue
11775 the loop, in case AO's caller is also an artificial
11776 inline. */
11777 if (DECL_DECLARED_INLINE_P (ao)
11778 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11779 ret = &BLOCK_SOURCE_LOCATION (block);
11780 else
11781 break;
11782 }
11783 else if (TREE_CODE (ao) != BLOCK)
11784 break;
11785
11786 block = BLOCK_SUPERCONTEXT (block);
11787 }
11788 return ret;
11789 }
11790
11791
11792 /* If EXP is inlined from an __attribute__((__artificial__))
11793 function, return the location of the original call expression. */
11794
11795 location_t
11796 tree_nonartificial_location (tree exp)
11797 {
11798 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11799
11800 if (loc)
11801 return *loc;
11802 else
11803 return EXPR_LOCATION (exp);
11804 }
11805
11806
11807 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11808 nodes. */
11809
11810 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11811
11812 hashval_t
11813 cl_option_hasher::hash (tree x)
11814 {
11815 const_tree const t = x;
11816 const char *p;
11817 size_t i;
11818 size_t len = 0;
11819 hashval_t hash = 0;
11820
11821 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11822 {
11823 p = (const char *)TREE_OPTIMIZATION (t);
11824 len = sizeof (struct cl_optimization);
11825 }
11826
11827 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11828 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11829
11830 else
11831 gcc_unreachable ();
11832
11833 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11834 something else. */
11835 for (i = 0; i < len; i++)
11836 if (p[i])
11837 hash = (hash << 4) ^ ((i << 2) | p[i]);
11838
11839 return hash;
11840 }
11841
11842 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11843 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11844 same. */
11845
11846 bool
11847 cl_option_hasher::equal (tree x, tree y)
11848 {
11849 const_tree const xt = x;
11850 const_tree const yt = y;
11851 const char *xp;
11852 const char *yp;
11853 size_t len;
11854
11855 if (TREE_CODE (xt) != TREE_CODE (yt))
11856 return 0;
11857
11858 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11859 {
11860 xp = (const char *)TREE_OPTIMIZATION (xt);
11861 yp = (const char *)TREE_OPTIMIZATION (yt);
11862 len = sizeof (struct cl_optimization);
11863 }
11864
11865 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11866 {
11867 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11868 TREE_TARGET_OPTION (yt));
11869 }
11870
11871 else
11872 gcc_unreachable ();
11873
11874 return (memcmp (xp, yp, len) == 0);
11875 }
11876
11877 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11878
11879 tree
11880 build_optimization_node (struct gcc_options *opts)
11881 {
11882 tree t;
11883
11884 /* Use the cache of optimization nodes. */
11885
11886 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11887 opts);
11888
11889 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11890 t = *slot;
11891 if (!t)
11892 {
11893 /* Insert this one into the hash table. */
11894 t = cl_optimization_node;
11895 *slot = t;
11896
11897 /* Make a new node for next time round. */
11898 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11899 }
11900
11901 return t;
11902 }
11903
11904 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11905
11906 tree
11907 build_target_option_node (struct gcc_options *opts)
11908 {
11909 tree t;
11910
11911 /* Use the cache of optimization nodes. */
11912
11913 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11914 opts);
11915
11916 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11917 t = *slot;
11918 if (!t)
11919 {
11920 /* Insert this one into the hash table. */
11921 t = cl_target_option_node;
11922 *slot = t;
11923
11924 /* Make a new node for next time round. */
11925 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11926 }
11927
11928 return t;
11929 }
11930
11931 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11932 so that they aren't saved during PCH writing. */
11933
11934 void
11935 prepare_target_option_nodes_for_pch (void)
11936 {
11937 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11938 for (; iter != cl_option_hash_table->end (); ++iter)
11939 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11940 TREE_TARGET_GLOBALS (*iter) = NULL;
11941 }
11942
11943 /* Determine the "ultimate origin" of a block. The block may be an inlined
11944 instance of an inlined instance of a block which is local to an inline
11945 function, so we have to trace all of the way back through the origin chain
11946 to find out what sort of node actually served as the original seed for the
11947 given block. */
11948
11949 tree
11950 block_ultimate_origin (const_tree block)
11951 {
11952 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11953
11954 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11955 we're trying to output the abstract instance of this function. */
11956 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11957 return NULL_TREE;
11958
11959 if (immediate_origin == NULL_TREE)
11960 return NULL_TREE;
11961 else
11962 {
11963 tree ret_val;
11964 tree lookahead = immediate_origin;
11965
11966 do
11967 {
11968 ret_val = lookahead;
11969 lookahead = (TREE_CODE (ret_val) == BLOCK
11970 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11971 }
11972 while (lookahead != NULL && lookahead != ret_val);
11973
11974 /* The block's abstract origin chain may not be the *ultimate* origin of
11975 the block. It could lead to a DECL that has an abstract origin set.
11976 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11977 will give us if it has one). Note that DECL's abstract origins are
11978 supposed to be the most distant ancestor (or so decl_ultimate_origin
11979 claims), so we don't need to loop following the DECL origins. */
11980 if (DECL_P (ret_val))
11981 return DECL_ORIGIN (ret_val);
11982
11983 return ret_val;
11984 }
11985 }
11986
11987 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11988 no instruction. */
11989
11990 bool
11991 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11992 {
11993 /* Use precision rather then machine mode when we can, which gives
11994 the correct answer even for submode (bit-field) types. */
11995 if ((INTEGRAL_TYPE_P (outer_type)
11996 || POINTER_TYPE_P (outer_type)
11997 || TREE_CODE (outer_type) == OFFSET_TYPE)
11998 && (INTEGRAL_TYPE_P (inner_type)
11999 || POINTER_TYPE_P (inner_type)
12000 || TREE_CODE (inner_type) == OFFSET_TYPE))
12001 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12002
12003 /* Otherwise fall back on comparing machine modes (e.g. for
12004 aggregate types, floats). */
12005 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12006 }
12007
12008 /* Return true iff conversion in EXP generates no instruction. Mark
12009 it inline so that we fully inline into the stripping functions even
12010 though we have two uses of this function. */
12011
12012 static inline bool
12013 tree_nop_conversion (const_tree exp)
12014 {
12015 tree outer_type, inner_type;
12016
12017 if (!CONVERT_EXPR_P (exp)
12018 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12019 return false;
12020 if (TREE_OPERAND (exp, 0) == error_mark_node)
12021 return false;
12022
12023 outer_type = TREE_TYPE (exp);
12024 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12025
12026 if (!inner_type)
12027 return false;
12028
12029 return tree_nop_conversion_p (outer_type, inner_type);
12030 }
12031
12032 /* Return true iff conversion in EXP generates no instruction. Don't
12033 consider conversions changing the signedness. */
12034
12035 static bool
12036 tree_sign_nop_conversion (const_tree exp)
12037 {
12038 tree outer_type, inner_type;
12039
12040 if (!tree_nop_conversion (exp))
12041 return false;
12042
12043 outer_type = TREE_TYPE (exp);
12044 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12045
12046 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12047 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12048 }
12049
12050 /* Strip conversions from EXP according to tree_nop_conversion and
12051 return the resulting expression. */
12052
12053 tree
12054 tree_strip_nop_conversions (tree exp)
12055 {
12056 while (tree_nop_conversion (exp))
12057 exp = TREE_OPERAND (exp, 0);
12058 return exp;
12059 }
12060
12061 /* Strip conversions from EXP according to tree_sign_nop_conversion
12062 and return the resulting expression. */
12063
12064 tree
12065 tree_strip_sign_nop_conversions (tree exp)
12066 {
12067 while (tree_sign_nop_conversion (exp))
12068 exp = TREE_OPERAND (exp, 0);
12069 return exp;
12070 }
12071
12072 /* Avoid any floating point extensions from EXP. */
12073 tree
12074 strip_float_extensions (tree exp)
12075 {
12076 tree sub, expt, subt;
12077
12078 /* For floating point constant look up the narrowest type that can hold
12079 it properly and handle it like (type)(narrowest_type)constant.
12080 This way we can optimize for instance a=a*2.0 where "a" is float
12081 but 2.0 is double constant. */
12082 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12083 {
12084 REAL_VALUE_TYPE orig;
12085 tree type = NULL;
12086
12087 orig = TREE_REAL_CST (exp);
12088 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12089 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12090 type = float_type_node;
12091 else if (TYPE_PRECISION (TREE_TYPE (exp))
12092 > TYPE_PRECISION (double_type_node)
12093 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12094 type = double_type_node;
12095 if (type)
12096 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
12097 }
12098
12099 if (!CONVERT_EXPR_P (exp))
12100 return exp;
12101
12102 sub = TREE_OPERAND (exp, 0);
12103 subt = TREE_TYPE (sub);
12104 expt = TREE_TYPE (exp);
12105
12106 if (!FLOAT_TYPE_P (subt))
12107 return exp;
12108
12109 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12110 return exp;
12111
12112 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12113 return exp;
12114
12115 return strip_float_extensions (sub);
12116 }
12117
12118 /* Strip out all handled components that produce invariant
12119 offsets. */
12120
12121 const_tree
12122 strip_invariant_refs (const_tree op)
12123 {
12124 while (handled_component_p (op))
12125 {
12126 switch (TREE_CODE (op))
12127 {
12128 case ARRAY_REF:
12129 case ARRAY_RANGE_REF:
12130 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12131 || TREE_OPERAND (op, 2) != NULL_TREE
12132 || TREE_OPERAND (op, 3) != NULL_TREE)
12133 return NULL;
12134 break;
12135
12136 case COMPONENT_REF:
12137 if (TREE_OPERAND (op, 2) != NULL_TREE)
12138 return NULL;
12139 break;
12140
12141 default:;
12142 }
12143 op = TREE_OPERAND (op, 0);
12144 }
12145
12146 return op;
12147 }
12148
12149 static GTY(()) tree gcc_eh_personality_decl;
12150
12151 /* Return the GCC personality function decl. */
12152
12153 tree
12154 lhd_gcc_personality (void)
12155 {
12156 if (!gcc_eh_personality_decl)
12157 gcc_eh_personality_decl = build_personality_function ("gcc");
12158 return gcc_eh_personality_decl;
12159 }
12160
12161 /* TARGET is a call target of GIMPLE call statement
12162 (obtained by gimple_call_fn). Return true if it is
12163 OBJ_TYPE_REF representing an virtual call of C++ method.
12164 (As opposed to OBJ_TYPE_REF representing objc calls
12165 through a cast where middle-end devirtualization machinery
12166 can't apply.) */
12167
12168 bool
12169 virtual_method_call_p (const_tree target)
12170 {
12171 if (TREE_CODE (target) != OBJ_TYPE_REF)
12172 return false;
12173 tree t = TREE_TYPE (target);
12174 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12175 t = TREE_TYPE (t);
12176 if (TREE_CODE (t) == FUNCTION_TYPE)
12177 return false;
12178 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12179 /* If we do not have BINFO associated, it means that type was built
12180 without devirtualization enabled. Do not consider this a virtual
12181 call. */
12182 if (!TYPE_BINFO (obj_type_ref_class (target)))
12183 return false;
12184 return true;
12185 }
12186
12187 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12188
12189 tree
12190 obj_type_ref_class (const_tree ref)
12191 {
12192 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12193 ref = TREE_TYPE (ref);
12194 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12195 ref = TREE_TYPE (ref);
12196 /* We look for type THIS points to. ObjC also builds
12197 OBJ_TYPE_REF with non-method calls, Their first parameter
12198 ID however also corresponds to class type. */
12199 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12200 || TREE_CODE (ref) == FUNCTION_TYPE);
12201 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12202 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12203 return TREE_TYPE (ref);
12204 }
12205
12206 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12207
12208 static tree
12209 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12210 {
12211 unsigned int i;
12212 tree base_binfo, b;
12213
12214 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12215 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12216 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12217 return base_binfo;
12218 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12219 return b;
12220 return NULL;
12221 }
12222
12223 /* Try to find a base info of BINFO that would have its field decl at offset
12224 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12225 found, return, otherwise return NULL_TREE. */
12226
12227 tree
12228 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12229 {
12230 tree type = BINFO_TYPE (binfo);
12231
12232 while (true)
12233 {
12234 HOST_WIDE_INT pos, size;
12235 tree fld;
12236 int i;
12237
12238 if (types_same_for_odr (type, expected_type))
12239 return binfo;
12240 if (offset < 0)
12241 return NULL_TREE;
12242
12243 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12244 {
12245 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12246 continue;
12247
12248 pos = int_bit_position (fld);
12249 size = tree_to_uhwi (DECL_SIZE (fld));
12250 if (pos <= offset && (pos + size) > offset)
12251 break;
12252 }
12253 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12254 return NULL_TREE;
12255
12256 /* Offset 0 indicates the primary base, whose vtable contents are
12257 represented in the binfo for the derived class. */
12258 else if (offset != 0)
12259 {
12260 tree found_binfo = NULL, base_binfo;
12261 /* Offsets in BINFO are in bytes relative to the whole structure
12262 while POS is in bits relative to the containing field. */
12263 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12264 / BITS_PER_UNIT);
12265
12266 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12267 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12268 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12269 {
12270 found_binfo = base_binfo;
12271 break;
12272 }
12273 if (found_binfo)
12274 binfo = found_binfo;
12275 else
12276 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12277 binfo_offset);
12278 }
12279
12280 type = TREE_TYPE (fld);
12281 offset -= pos;
12282 }
12283 }
12284
12285 /* Returns true if X is a typedef decl. */
12286
12287 bool
12288 is_typedef_decl (const_tree x)
12289 {
12290 return (x && TREE_CODE (x) == TYPE_DECL
12291 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12292 }
12293
12294 /* Returns true iff TYPE is a type variant created for a typedef. */
12295
12296 bool
12297 typedef_variant_p (const_tree type)
12298 {
12299 return is_typedef_decl (TYPE_NAME (type));
12300 }
12301
12302 /* Warn about a use of an identifier which was marked deprecated. */
12303 void
12304 warn_deprecated_use (tree node, tree attr)
12305 {
12306 const char *msg;
12307
12308 if (node == 0 || !warn_deprecated_decl)
12309 return;
12310
12311 if (!attr)
12312 {
12313 if (DECL_P (node))
12314 attr = DECL_ATTRIBUTES (node);
12315 else if (TYPE_P (node))
12316 {
12317 tree decl = TYPE_STUB_DECL (node);
12318 if (decl)
12319 attr = lookup_attribute ("deprecated",
12320 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12321 }
12322 }
12323
12324 if (attr)
12325 attr = lookup_attribute ("deprecated", attr);
12326
12327 if (attr)
12328 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12329 else
12330 msg = NULL;
12331
12332 bool w;
12333 if (DECL_P (node))
12334 {
12335 if (msg)
12336 w = warning (OPT_Wdeprecated_declarations,
12337 "%qD is deprecated: %s", node, msg);
12338 else
12339 w = warning (OPT_Wdeprecated_declarations,
12340 "%qD is deprecated", node);
12341 if (w)
12342 inform (DECL_SOURCE_LOCATION (node), "declared here");
12343 }
12344 else if (TYPE_P (node))
12345 {
12346 tree what = NULL_TREE;
12347 tree decl = TYPE_STUB_DECL (node);
12348
12349 if (TYPE_NAME (node))
12350 {
12351 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12352 what = TYPE_NAME (node);
12353 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12354 && DECL_NAME (TYPE_NAME (node)))
12355 what = DECL_NAME (TYPE_NAME (node));
12356 }
12357
12358 if (decl)
12359 {
12360 if (what)
12361 {
12362 if (msg)
12363 w = warning (OPT_Wdeprecated_declarations,
12364 "%qE is deprecated: %s", what, msg);
12365 else
12366 w = warning (OPT_Wdeprecated_declarations,
12367 "%qE is deprecated", what);
12368 }
12369 else
12370 {
12371 if (msg)
12372 w = warning (OPT_Wdeprecated_declarations,
12373 "type is deprecated: %s", msg);
12374 else
12375 w = warning (OPT_Wdeprecated_declarations,
12376 "type is deprecated");
12377 }
12378 if (w)
12379 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12380 }
12381 else
12382 {
12383 if (what)
12384 {
12385 if (msg)
12386 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12387 what, msg);
12388 else
12389 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12390 }
12391 else
12392 {
12393 if (msg)
12394 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12395 msg);
12396 else
12397 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12398 }
12399 }
12400 }
12401 }
12402
12403 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12404 somewhere in it. */
12405
12406 bool
12407 contains_bitfld_component_ref_p (const_tree ref)
12408 {
12409 while (handled_component_p (ref))
12410 {
12411 if (TREE_CODE (ref) == COMPONENT_REF
12412 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12413 return true;
12414 ref = TREE_OPERAND (ref, 0);
12415 }
12416
12417 return false;
12418 }
12419
12420 /* Try to determine whether a TRY_CATCH expression can fall through.
12421 This is a subroutine of block_may_fallthru. */
12422
12423 static bool
12424 try_catch_may_fallthru (const_tree stmt)
12425 {
12426 tree_stmt_iterator i;
12427
12428 /* If the TRY block can fall through, the whole TRY_CATCH can
12429 fall through. */
12430 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12431 return true;
12432
12433 i = tsi_start (TREE_OPERAND (stmt, 1));
12434 switch (TREE_CODE (tsi_stmt (i)))
12435 {
12436 case CATCH_EXPR:
12437 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12438 catch expression and a body. The whole TRY_CATCH may fall
12439 through iff any of the catch bodies falls through. */
12440 for (; !tsi_end_p (i); tsi_next (&i))
12441 {
12442 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12443 return true;
12444 }
12445 return false;
12446
12447 case EH_FILTER_EXPR:
12448 /* The exception filter expression only matters if there is an
12449 exception. If the exception does not match EH_FILTER_TYPES,
12450 we will execute EH_FILTER_FAILURE, and we will fall through
12451 if that falls through. If the exception does match
12452 EH_FILTER_TYPES, the stack unwinder will continue up the
12453 stack, so we will not fall through. We don't know whether we
12454 will throw an exception which matches EH_FILTER_TYPES or not,
12455 so we just ignore EH_FILTER_TYPES and assume that we might
12456 throw an exception which doesn't match. */
12457 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12458
12459 default:
12460 /* This case represents statements to be executed when an
12461 exception occurs. Those statements are implicitly followed
12462 by a RESX statement to resume execution after the exception.
12463 So in this case the TRY_CATCH never falls through. */
12464 return false;
12465 }
12466 }
12467
12468 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12469 need not be 100% accurate; simply be conservative and return true if we
12470 don't know. This is used only to avoid stupidly generating extra code.
12471 If we're wrong, we'll just delete the extra code later. */
12472
12473 bool
12474 block_may_fallthru (const_tree block)
12475 {
12476 /* This CONST_CAST is okay because expr_last returns its argument
12477 unmodified and we assign it to a const_tree. */
12478 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12479
12480 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12481 {
12482 case GOTO_EXPR:
12483 case RETURN_EXPR:
12484 /* Easy cases. If the last statement of the block implies
12485 control transfer, then we can't fall through. */
12486 return false;
12487
12488 case SWITCH_EXPR:
12489 /* If SWITCH_LABELS is set, this is lowered, and represents a
12490 branch to a selected label and hence can not fall through.
12491 Otherwise SWITCH_BODY is set, and the switch can fall
12492 through. */
12493 return SWITCH_LABELS (stmt) == NULL_TREE;
12494
12495 case COND_EXPR:
12496 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12497 return true;
12498 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12499
12500 case BIND_EXPR:
12501 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12502
12503 case TRY_CATCH_EXPR:
12504 return try_catch_may_fallthru (stmt);
12505
12506 case TRY_FINALLY_EXPR:
12507 /* The finally clause is always executed after the try clause,
12508 so if it does not fall through, then the try-finally will not
12509 fall through. Otherwise, if the try clause does not fall
12510 through, then when the finally clause falls through it will
12511 resume execution wherever the try clause was going. So the
12512 whole try-finally will only fall through if both the try
12513 clause and the finally clause fall through. */
12514 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12515 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12516
12517 case MODIFY_EXPR:
12518 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12519 stmt = TREE_OPERAND (stmt, 1);
12520 else
12521 return true;
12522 /* FALLTHRU */
12523
12524 case CALL_EXPR:
12525 /* Functions that do not return do not fall through. */
12526 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12527
12528 case CLEANUP_POINT_EXPR:
12529 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12530
12531 case TARGET_EXPR:
12532 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12533
12534 case ERROR_MARK:
12535 return true;
12536
12537 default:
12538 return lang_hooks.block_may_fallthru (stmt);
12539 }
12540 }
12541
12542 /* True if we are using EH to handle cleanups. */
12543 static bool using_eh_for_cleanups_flag = false;
12544
12545 /* This routine is called from front ends to indicate eh should be used for
12546 cleanups. */
12547 void
12548 using_eh_for_cleanups (void)
12549 {
12550 using_eh_for_cleanups_flag = true;
12551 }
12552
12553 /* Query whether EH is used for cleanups. */
12554 bool
12555 using_eh_for_cleanups_p (void)
12556 {
12557 return using_eh_for_cleanups_flag;
12558 }
12559
12560 /* Wrapper for tree_code_name to ensure that tree code is valid */
12561 const char *
12562 get_tree_code_name (enum tree_code code)
12563 {
12564 const char *invalid = "<invalid tree code>";
12565
12566 if (code >= MAX_TREE_CODES)
12567 return invalid;
12568
12569 return tree_code_name[code];
12570 }
12571
12572 /* Drops the TREE_OVERFLOW flag from T. */
12573
12574 tree
12575 drop_tree_overflow (tree t)
12576 {
12577 gcc_checking_assert (TREE_OVERFLOW (t));
12578
12579 /* For tree codes with a sharing machinery re-build the result. */
12580 if (TREE_CODE (t) == INTEGER_CST)
12581 return wide_int_to_tree (TREE_TYPE (t), t);
12582
12583 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12584 and drop the flag. */
12585 t = copy_node (t);
12586 TREE_OVERFLOW (t) = 0;
12587 return t;
12588 }
12589
12590 /* Given a memory reference expression T, return its base address.
12591 The base address of a memory reference expression is the main
12592 object being referenced. For instance, the base address for
12593 'array[i].fld[j]' is 'array'. You can think of this as stripping
12594 away the offset part from a memory address.
12595
12596 This function calls handled_component_p to strip away all the inner
12597 parts of the memory reference until it reaches the base object. */
12598
12599 tree
12600 get_base_address (tree t)
12601 {
12602 while (handled_component_p (t))
12603 t = TREE_OPERAND (t, 0);
12604
12605 if ((TREE_CODE (t) == MEM_REF
12606 || TREE_CODE (t) == TARGET_MEM_REF)
12607 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12608 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12609
12610 /* ??? Either the alias oracle or all callers need to properly deal
12611 with WITH_SIZE_EXPRs before we can look through those. */
12612 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12613 return NULL_TREE;
12614
12615 return t;
12616 }
12617
12618 /* Return a tree of sizetype representing the size, in bytes, of the element
12619 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12620
12621 tree
12622 array_ref_element_size (tree exp)
12623 {
12624 tree aligned_size = TREE_OPERAND (exp, 3);
12625 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12626 location_t loc = EXPR_LOCATION (exp);
12627
12628 /* If a size was specified in the ARRAY_REF, it's the size measured
12629 in alignment units of the element type. So multiply by that value. */
12630 if (aligned_size)
12631 {
12632 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12633 sizetype from another type of the same width and signedness. */
12634 if (TREE_TYPE (aligned_size) != sizetype)
12635 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12636 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12637 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12638 }
12639
12640 /* Otherwise, take the size from that of the element type. Substitute
12641 any PLACEHOLDER_EXPR that we have. */
12642 else
12643 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12644 }
12645
12646 /* Return a tree representing the lower bound of the array mentioned in
12647 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12648
12649 tree
12650 array_ref_low_bound (tree exp)
12651 {
12652 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12653
12654 /* If a lower bound is specified in EXP, use it. */
12655 if (TREE_OPERAND (exp, 2))
12656 return TREE_OPERAND (exp, 2);
12657
12658 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12659 substituting for a PLACEHOLDER_EXPR as needed. */
12660 if (domain_type && TYPE_MIN_VALUE (domain_type))
12661 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12662
12663 /* Otherwise, return a zero of the appropriate type. */
12664 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12665 }
12666
12667 /* Return a tree representing the upper bound of the array mentioned in
12668 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12669
12670 tree
12671 array_ref_up_bound (tree exp)
12672 {
12673 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12674
12675 /* If there is a domain type and it has an upper bound, use it, substituting
12676 for a PLACEHOLDER_EXPR as needed. */
12677 if (domain_type && TYPE_MAX_VALUE (domain_type))
12678 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12679
12680 /* Otherwise fail. */
12681 return NULL_TREE;
12682 }
12683
12684 /* Returns true if REF is an array reference to an array at the end of
12685 a structure. If this is the case, the array may be allocated larger
12686 than its upper bound implies. */
12687
12688 bool
12689 array_at_struct_end_p (tree ref)
12690 {
12691 if (TREE_CODE (ref) != ARRAY_REF
12692 && TREE_CODE (ref) != ARRAY_RANGE_REF)
12693 return false;
12694
12695 while (handled_component_p (ref))
12696 {
12697 /* If the reference chain contains a component reference to a
12698 non-union type and there follows another field the reference
12699 is not at the end of a structure. */
12700 if (TREE_CODE (ref) == COMPONENT_REF
12701 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12702 {
12703 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12704 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12705 nextf = DECL_CHAIN (nextf);
12706 if (nextf)
12707 return false;
12708 }
12709
12710 ref = TREE_OPERAND (ref, 0);
12711 }
12712
12713 /* If the reference is based on a declared entity, the size of the array
12714 is constrained by its given domain. */
12715 if (DECL_P (ref))
12716 return false;
12717
12718 return true;
12719 }
12720
12721 /* Return a tree representing the offset, in bytes, of the field referenced
12722 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12723
12724 tree
12725 component_ref_field_offset (tree exp)
12726 {
12727 tree aligned_offset = TREE_OPERAND (exp, 2);
12728 tree field = TREE_OPERAND (exp, 1);
12729 location_t loc = EXPR_LOCATION (exp);
12730
12731 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12732 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12733 value. */
12734 if (aligned_offset)
12735 {
12736 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12737 sizetype from another type of the same width and signedness. */
12738 if (TREE_TYPE (aligned_offset) != sizetype)
12739 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12740 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12741 size_int (DECL_OFFSET_ALIGN (field)
12742 / BITS_PER_UNIT));
12743 }
12744
12745 /* Otherwise, take the offset from that of the field. Substitute
12746 any PLACEHOLDER_EXPR that we have. */
12747 else
12748 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12749 }
12750
12751 /* Return the machine mode of T. For vectors, returns the mode of the
12752 inner type. The main use case is to feed the result to HONOR_NANS,
12753 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12754
12755 machine_mode
12756 element_mode (const_tree t)
12757 {
12758 if (!TYPE_P (t))
12759 t = TREE_TYPE (t);
12760 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12761 t = TREE_TYPE (t);
12762 return TYPE_MODE (t);
12763 }
12764
12765
12766 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12767 TV. TV should be the more specified variant (i.e. the main variant). */
12768
12769 static bool
12770 verify_type_variant (const_tree t, tree tv)
12771 {
12772 /* Type variant can differ by:
12773
12774 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12775 ENCODE_QUAL_ADDR_SPACE.
12776 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12777 in this case some values may not be set in the variant types
12778 (see TYPE_COMPLETE_P checks).
12779 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12780 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12781 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12782 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12783 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12784 this is necessary to make it possible to merge types form different TUs
12785 - arrays, pointers and references may have TREE_TYPE that is a variant
12786 of TREE_TYPE of their main variants.
12787 - aggregates may have new TYPE_FIELDS list that list variants of
12788 the main variant TYPE_FIELDS.
12789 - vector types may differ by TYPE_VECTOR_OPAQUE
12790 - TYPE_METHODS is always NULL for vairant types and maintained for
12791 main variant only.
12792 */
12793
12794 /* Convenience macro for matching individual fields. */
12795 #define verify_variant_match(flag) \
12796 do { \
12797 if (flag (tv) != flag (t)) \
12798 { \
12799 error ("type variant differs by " #flag "."); \
12800 debug_tree (tv); \
12801 return false; \
12802 } \
12803 } while (false)
12804
12805 /* tree_base checks. */
12806
12807 verify_variant_match (TREE_CODE);
12808 /* FIXME: Ada builds non-artificial variants of artificial types. */
12809 if (TYPE_ARTIFICIAL (tv) && 0)
12810 verify_variant_match (TYPE_ARTIFICIAL);
12811 if (POINTER_TYPE_P (tv))
12812 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12813 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12814 verify_variant_match (TYPE_UNSIGNED);
12815 verify_variant_match (TYPE_ALIGN_OK);
12816 verify_variant_match (TYPE_PACKED);
12817 if (TREE_CODE (t) == REFERENCE_TYPE)
12818 verify_variant_match (TYPE_REF_IS_RVALUE);
12819 verify_variant_match (TYPE_SATURATING);
12820 /* FIXME: This check trigger during libstdc++ build. */
12821 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12822 verify_variant_match (TYPE_FINAL_P);
12823
12824 /* tree_type_common checks. */
12825
12826 if (COMPLETE_TYPE_P (t))
12827 {
12828 verify_variant_match (TYPE_SIZE);
12829 verify_variant_match (TYPE_MODE);
12830 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
12831 /* FIXME: ideally we should compare pointer equality, but java FE
12832 produce variants where size is INTEGER_CST of different type (int
12833 wrt size_type) during libjava biuld. */
12834 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
12835 {
12836 error ("type variant has different TYPE_SIZE_UNIT");
12837 debug_tree (tv);
12838 error ("type variant's TYPE_SIZE_UNIT");
12839 debug_tree (TYPE_SIZE_UNIT (tv));
12840 error ("type's TYPE_SIZE_UNIT");
12841 debug_tree (TYPE_SIZE_UNIT (t));
12842 return false;
12843 }
12844 }
12845 verify_variant_match (TYPE_PRECISION);
12846 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12847 if (RECORD_OR_UNION_TYPE_P (t))
12848 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12849 else if (TREE_CODE (t) == ARRAY_TYPE)
12850 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12851 /* During LTO we merge variant lists from diferent translation units
12852 that may differ BY TYPE_CONTEXT that in turn may point
12853 to TRANSLATION_UNIT_DECL.
12854 Ada also builds variants of types with different TYPE_CONTEXT. */
12855 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12856 verify_variant_match (TYPE_CONTEXT);
12857 verify_variant_match (TYPE_STRING_FLAG);
12858 if (TYPE_ALIAS_SET_KNOWN_P (t) && TYPE_ALIAS_SET_KNOWN_P (tv))
12859 verify_variant_match (TYPE_ALIAS_SET);
12860
12861 /* tree_type_non_common checks. */
12862
12863 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12864 and dangle the pointer from time to time. */
12865 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12866 && (in_lto_p || !TYPE_VFIELD (tv)
12867 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12868 {
12869 error ("type variant has different TYPE_VFIELD");
12870 debug_tree (tv);
12871 return false;
12872 }
12873 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
12874 || TREE_CODE (t) == INTEGER_TYPE
12875 || TREE_CODE (t) == BOOLEAN_TYPE
12876 || TREE_CODE (t) == REAL_TYPE
12877 || TREE_CODE (t) == FIXED_POINT_TYPE)
12878 {
12879 verify_variant_match (TYPE_MAX_VALUE);
12880 verify_variant_match (TYPE_MIN_VALUE);
12881 }
12882 if (TREE_CODE (t) == METHOD_TYPE)
12883 verify_variant_match (TYPE_METHOD_BASETYPE);
12884 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
12885 {
12886 error ("type variant has TYPE_METHODS");
12887 debug_tree (tv);
12888 return false;
12889 }
12890 if (TREE_CODE (t) == OFFSET_TYPE)
12891 verify_variant_match (TYPE_OFFSET_BASETYPE);
12892 if (TREE_CODE (t) == ARRAY_TYPE)
12893 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
12894 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
12895 or even type's main variant. This is needed to make bootstrap pass
12896 and the bug seems new in GCC 5.
12897 C++ FE should be updated to make this consistent and we should check
12898 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
12899 is a match with main variant.
12900
12901 Also disable the check for Java for now because of parser hack that builds
12902 first an dummy BINFO and then sometimes replace it by real BINFO in some
12903 of the copies. */
12904 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
12905 && TYPE_BINFO (t) != TYPE_BINFO (tv)
12906 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
12907 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
12908 at LTO time only. */
12909 && (in_lto_p && odr_type_p (t)))
12910 {
12911 error ("type variant has different TYPE_BINFO");
12912 debug_tree (tv);
12913 error ("type variant's TYPE_BINFO");
12914 debug_tree (TYPE_BINFO (tv));
12915 error ("type's TYPE_BINFO");
12916 debug_tree (TYPE_BINFO (t));
12917 return false;
12918 }
12919
12920 /* Check various uses of TYPE_VALUES_RAW. */
12921 if (TREE_CODE (t) == ENUMERAL_TYPE)
12922 verify_variant_match (TYPE_VALUES);
12923 else if (TREE_CODE (t) == ARRAY_TYPE)
12924 verify_variant_match (TYPE_DOMAIN);
12925 /* Permit incomplete variants of complete type. While FEs may complete
12926 all variants, this does not happen for C++ templates in all cases. */
12927 else if (RECORD_OR_UNION_TYPE_P (t)
12928 && COMPLETE_TYPE_P (t)
12929 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
12930 {
12931 tree f1, f2;
12932
12933 /* Fortran builds qualified variants as new records with items of
12934 qualified type. Verify that they looks same. */
12935 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
12936 f1 && f2;
12937 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
12938 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
12939 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
12940 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
12941 /* FIXME: gfc_nonrestricted_type builds all types as variants
12942 with exception of pointer types. It deeply copies the type
12943 which means that we may end up with a variant type
12944 referring non-variant pointer. We may change it to
12945 produce types as variants, too, like
12946 objc_get_protocol_qualified_type does. */
12947 && !POINTER_TYPE_P (TREE_TYPE (f1)))
12948 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
12949 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
12950 break;
12951 if (f1 || f2)
12952 {
12953 error ("type variant has different TYPE_FIELDS");
12954 debug_tree (tv);
12955 error ("first mismatch is field");
12956 debug_tree (f1);
12957 error ("and field");
12958 debug_tree (f2);
12959 return false;
12960 }
12961 }
12962 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
12963 verify_variant_match (TYPE_ARG_TYPES);
12964 /* For C++ the qualified variant of array type is really an array type
12965 of qualified TREE_TYPE.
12966 objc builds variants of pointer where pointer to type is a variant, too
12967 in objc_get_protocol_qualified_type. */
12968 if (TREE_TYPE (t) != TREE_TYPE (tv)
12969 && ((TREE_CODE (t) != ARRAY_TYPE
12970 && !POINTER_TYPE_P (t))
12971 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
12972 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
12973 {
12974 error ("type variant has different TREE_TYPE");
12975 debug_tree (tv);
12976 error ("type variant's TREE_TYPE");
12977 debug_tree (TREE_TYPE (tv));
12978 error ("type's TREE_TYPE");
12979 debug_tree (TREE_TYPE (t));
12980 return false;
12981 }
12982 if (type_with_alias_set_p (t)
12983 && !gimple_canonical_types_compatible_p (t, tv, false))
12984 {
12985 error ("type is not compatible with its vairant");
12986 debug_tree (tv);
12987 error ("type variant's TREE_TYPE");
12988 debug_tree (TREE_TYPE (tv));
12989 error ("type's TREE_TYPE");
12990 debug_tree (TREE_TYPE (t));
12991 return false;
12992 }
12993 return true;
12994 #undef verify_variant_match
12995 }
12996
12997
12998 /* The TYPE_CANONICAL merging machinery. It should closely resemble
12999 the middle-end types_compatible_p function. It needs to avoid
13000 claiming types are different for types that should be treated
13001 the same with respect to TBAA. Canonical types are also used
13002 for IL consistency checks via the useless_type_conversion_p
13003 predicate which does not handle all type kinds itself but falls
13004 back to pointer-comparison of TYPE_CANONICAL for aggregates
13005 for example. */
13006
13007 /* Return true iff T1 and T2 are structurally identical for what
13008 TBAA is concerned.
13009 This function is used both by lto.c canonical type merging and by the
13010 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13011 that have TYPE_CANONICAL defined and assume them equivalent. */
13012
13013 bool
13014 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13015 bool trust_type_canonical)
13016 {
13017 /* Type variants should be same as the main variant. When not doing sanity
13018 checking to verify this fact, go to main variants and save some work. */
13019 if (trust_type_canonical)
13020 {
13021 t1 = TYPE_MAIN_VARIANT (t1);
13022 t2 = TYPE_MAIN_VARIANT (t2);
13023 }
13024
13025 /* Check first for the obvious case of pointer identity. */
13026 if (t1 == t2)
13027 return true;
13028
13029 /* Check that we have two types to compare. */
13030 if (t1 == NULL_TREE || t2 == NULL_TREE)
13031 return false;
13032
13033 /* We consider complete types always compatible with incomplete type.
13034 This does not make sense for canonical type calculation and thus we
13035 need to ensure that we are never called on it.
13036
13037 FIXME: For more correctness the function probably should have three modes
13038 1) mode assuming that types are complete mathcing their structure
13039 2) mode allowing incomplete types but producing equivalence classes
13040 and thus ignoring all info from complete types
13041 3) mode allowing incomplete types to match complete but checking
13042 compatibility between complete types.
13043
13044 1 and 2 can be used for canonical type calculation. 3 is the real
13045 definition of type compatibility that can be used i.e. for warnings during
13046 declaration merging. */
13047
13048 gcc_assert (!trust_type_canonical
13049 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13050 /* If the types have been previously registered and found equal
13051 they still are. */
13052 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13053 && trust_type_canonical)
13054 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13055
13056 /* Can't be the same type if the types don't have the same code. */
13057 if (tree_code_for_canonical_type_merging (TREE_CODE (t1))
13058 != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13059 return false;
13060
13061 /* Qualifiers do not matter for canonical type comparison purposes. */
13062
13063 /* Void types and nullptr types are always the same. */
13064 if (TREE_CODE (t1) == VOID_TYPE
13065 || TREE_CODE (t1) == NULLPTR_TYPE)
13066 return true;
13067
13068 /* Can't be the same type if they have different mode. */
13069 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13070 return false;
13071
13072 /* Non-aggregate types can be handled cheaply. */
13073 if (INTEGRAL_TYPE_P (t1)
13074 || SCALAR_FLOAT_TYPE_P (t1)
13075 || FIXED_POINT_TYPE_P (t1)
13076 || TREE_CODE (t1) == VECTOR_TYPE
13077 || TREE_CODE (t1) == COMPLEX_TYPE
13078 || TREE_CODE (t1) == OFFSET_TYPE
13079 || POINTER_TYPE_P (t1))
13080 {
13081 /* Can't be the same type if they have different sign or precision. */
13082 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
13083 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
13084 return false;
13085
13086 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13087 interoperable with "signed char". Unless all frontends are revisited
13088 to agree on these types, we must ignore the flag completely. */
13089
13090 /* Fortran standard define C_PTR type that is compatible with every
13091 C pointer. For this reason we need to glob all pointers into one.
13092 Still pointers in different address spaces are not compatible. */
13093 if (POINTER_TYPE_P (t1))
13094 {
13095 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13096 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13097 return false;
13098 }
13099
13100 /* Tail-recurse to components. */
13101 if (TREE_CODE (t1) == VECTOR_TYPE
13102 || TREE_CODE (t1) == COMPLEX_TYPE)
13103 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13104 TREE_TYPE (t2),
13105 trust_type_canonical);
13106
13107 return true;
13108 }
13109
13110 /* Do type-specific comparisons. */
13111 switch (TREE_CODE (t1))
13112 {
13113 case ARRAY_TYPE:
13114 /* Array types are the same if the element types are the same and
13115 the number of elements are the same. */
13116 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13117 trust_type_canonical)
13118 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13119 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13120 return false;
13121 else
13122 {
13123 tree i1 = TYPE_DOMAIN (t1);
13124 tree i2 = TYPE_DOMAIN (t2);
13125
13126 /* For an incomplete external array, the type domain can be
13127 NULL_TREE. Check this condition also. */
13128 if (i1 == NULL_TREE && i2 == NULL_TREE)
13129 return true;
13130 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13131 return false;
13132 else
13133 {
13134 tree min1 = TYPE_MIN_VALUE (i1);
13135 tree min2 = TYPE_MIN_VALUE (i2);
13136 tree max1 = TYPE_MAX_VALUE (i1);
13137 tree max2 = TYPE_MAX_VALUE (i2);
13138
13139 /* The minimum/maximum values have to be the same. */
13140 if ((min1 == min2
13141 || (min1 && min2
13142 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13143 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13144 || operand_equal_p (min1, min2, 0))))
13145 && (max1 == max2
13146 || (max1 && max2
13147 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13148 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13149 || operand_equal_p (max1, max2, 0)))))
13150 return true;
13151 else
13152 return false;
13153 }
13154 }
13155
13156 case METHOD_TYPE:
13157 case FUNCTION_TYPE:
13158 /* Function types are the same if the return type and arguments types
13159 are the same. */
13160 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13161 trust_type_canonical))
13162 return false;
13163
13164 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13165 return true;
13166 else
13167 {
13168 tree parms1, parms2;
13169
13170 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13171 parms1 && parms2;
13172 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13173 {
13174 if (!gimple_canonical_types_compatible_p
13175 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13176 trust_type_canonical))
13177 return false;
13178 }
13179
13180 if (parms1 || parms2)
13181 return false;
13182
13183 return true;
13184 }
13185
13186 case RECORD_TYPE:
13187 case UNION_TYPE:
13188 case QUAL_UNION_TYPE:
13189 {
13190 tree f1, f2;
13191
13192 /* For aggregate types, all the fields must be the same. */
13193 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13194 f1 || f2;
13195 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13196 {
13197 /* Skip non-fields. */
13198 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13199 f1 = TREE_CHAIN (f1);
13200 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13201 f2 = TREE_CHAIN (f2);
13202 if (!f1 || !f2)
13203 break;
13204 /* The fields must have the same name, offset and type. */
13205 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13206 || !gimple_compare_field_offset (f1, f2)
13207 || !gimple_canonical_types_compatible_p
13208 (TREE_TYPE (f1), TREE_TYPE (f2),
13209 trust_type_canonical))
13210 return false;
13211 }
13212
13213 /* If one aggregate has more fields than the other, they
13214 are not the same. */
13215 if (f1 || f2)
13216 return false;
13217
13218 return true;
13219 }
13220
13221 default:
13222 /* Consider all types with language specific trees in them mutually
13223 compatible. This is executed only from verify_type and false
13224 positives can be tolerated. */
13225 gcc_assert (!in_lto_p);
13226 return true;
13227 }
13228 }
13229
13230 /* Verify type T. */
13231
13232 void
13233 verify_type (const_tree t)
13234 {
13235 bool error_found = false;
13236 tree mv = TYPE_MAIN_VARIANT (t);
13237 if (!mv)
13238 {
13239 error ("Main variant is not defined");
13240 error_found = true;
13241 }
13242 else if (mv != TYPE_MAIN_VARIANT (mv))
13243 {
13244 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13245 debug_tree (mv);
13246 error_found = true;
13247 }
13248 else if (t != mv && !verify_type_variant (t, mv))
13249 error_found = true;
13250
13251 tree ct = TYPE_CANONICAL (t);
13252 if (!ct)
13253 ;
13254 else if (TYPE_CANONICAL (t) != ct)
13255 {
13256 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13257 debug_tree (ct);
13258 error_found = true;
13259 }
13260 /* Method and function types can not be used to address memory and thus
13261 TYPE_CANONICAL really matters only for determining useless conversions.
13262
13263 FIXME: C++ FE produce declarations of builtin functions that are not
13264 compatible with main variants. */
13265 else if (TREE_CODE (t) == FUNCTION_TYPE)
13266 ;
13267 else if (t != ct
13268 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13269 with variably sized arrays because their sizes possibly
13270 gimplified to different variables. */
13271 && !variably_modified_type_p (ct, NULL)
13272 && !gimple_canonical_types_compatible_p (t, ct, false))
13273 {
13274 error ("TYPE_CANONICAL is not compatible");
13275 debug_tree (ct);
13276 error_found = true;
13277 }
13278
13279
13280 /* Check various uses of TYPE_MINVAL. */
13281 if (RECORD_OR_UNION_TYPE_P (t))
13282 {
13283 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13284 and danagle the pointer from time to time. */
13285 if (TYPE_VFIELD (t)
13286 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13287 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13288 {
13289 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13290 debug_tree (TYPE_VFIELD (t));
13291 error_found = true;
13292 }
13293 }
13294 else if (TREE_CODE (t) == POINTER_TYPE)
13295 {
13296 if (TYPE_NEXT_PTR_TO (t)
13297 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13298 {
13299 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13300 debug_tree (TYPE_NEXT_PTR_TO (t));
13301 error_found = true;
13302 }
13303 }
13304 else if (TREE_CODE (t) == REFERENCE_TYPE)
13305 {
13306 if (TYPE_NEXT_REF_TO (t)
13307 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13308 {
13309 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13310 debug_tree (TYPE_NEXT_REF_TO (t));
13311 error_found = true;
13312 }
13313 }
13314 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13315 || TREE_CODE (t) == FIXED_POINT_TYPE)
13316 {
13317 /* FIXME: The following check should pass:
13318 useless_type_conversion_p (const_cast <tree> (t),
13319 TREE_TYPE (TYPE_MIN_VALUE (t))
13320 but does not for C sizetypes in LTO. */
13321 }
13322 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13323 else if (TYPE_MINVAL (t)
13324 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13325 || in_lto_p))
13326 {
13327 error ("TYPE_MINVAL non-NULL");
13328 debug_tree (TYPE_MINVAL (t));
13329 error_found = true;
13330 }
13331
13332 /* Check various uses of TYPE_MAXVAL. */
13333 if (RECORD_OR_UNION_TYPE_P (t))
13334 {
13335 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13336 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13337 && TYPE_METHODS (t) != error_mark_node)
13338 {
13339 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13340 debug_tree (TYPE_METHODS (t));
13341 error_found = true;
13342 }
13343 }
13344 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13345 {
13346 if (TYPE_METHOD_BASETYPE (t)
13347 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13348 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13349 {
13350 error ("TYPE_METHOD_BASETYPE is not record nor union");
13351 debug_tree (TYPE_METHOD_BASETYPE (t));
13352 error_found = true;
13353 }
13354 }
13355 else if (TREE_CODE (t) == OFFSET_TYPE)
13356 {
13357 if (TYPE_OFFSET_BASETYPE (t)
13358 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13359 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13360 {
13361 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13362 debug_tree (TYPE_OFFSET_BASETYPE (t));
13363 error_found = true;
13364 }
13365 }
13366 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13367 || TREE_CODE (t) == FIXED_POINT_TYPE)
13368 {
13369 /* FIXME: The following check should pass:
13370 useless_type_conversion_p (const_cast <tree> (t),
13371 TREE_TYPE (TYPE_MAX_VALUE (t))
13372 but does not for C sizetypes in LTO. */
13373 }
13374 else if (TREE_CODE (t) == ARRAY_TYPE)
13375 {
13376 if (TYPE_ARRAY_MAX_SIZE (t)
13377 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13378 {
13379 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13380 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13381 error_found = true;
13382 }
13383 }
13384 else if (TYPE_MAXVAL (t))
13385 {
13386 error ("TYPE_MAXVAL non-NULL");
13387 debug_tree (TYPE_MAXVAL (t));
13388 error_found = true;
13389 }
13390
13391 /* Check various uses of TYPE_BINFO. */
13392 if (RECORD_OR_UNION_TYPE_P (t))
13393 {
13394 if (!TYPE_BINFO (t))
13395 ;
13396 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13397 {
13398 error ("TYPE_BINFO is not TREE_BINFO");
13399 debug_tree (TYPE_BINFO (t));
13400 error_found = true;
13401 }
13402 /* FIXME: Java builds invalid empty binfos that do not have
13403 TREE_TYPE set. */
13404 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13405 {
13406 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13407 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13408 error_found = true;
13409 }
13410 }
13411 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13412 {
13413 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13414 debug_tree (TYPE_LANG_SLOT_1 (t));
13415 error_found = true;
13416 }
13417
13418 /* Check various uses of TYPE_VALUES_RAW. */
13419 if (TREE_CODE (t) == ENUMERAL_TYPE)
13420 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13421 {
13422 tree value = TREE_VALUE (l);
13423 tree name = TREE_PURPOSE (l);
13424
13425 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13426 CONST_DECL of ENUMERAL TYPE. */
13427 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13428 {
13429 error ("Enum value is not CONST_DECL or INTEGER_CST");
13430 debug_tree (value);
13431 debug_tree (name);
13432 error_found = true;
13433 }
13434 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13435 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13436 {
13437 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13438 debug_tree (value);
13439 debug_tree (name);
13440 error_found = true;
13441 }
13442 if (TREE_CODE (name) != IDENTIFIER_NODE)
13443 {
13444 error ("Enum value name is not IDENTIFIER_NODE");
13445 debug_tree (value);
13446 debug_tree (name);
13447 error_found = true;
13448 }
13449 }
13450 else if (TREE_CODE (t) == ARRAY_TYPE)
13451 {
13452 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13453 {
13454 error ("Array TYPE_DOMAIN is not integer type");
13455 debug_tree (TYPE_DOMAIN (t));
13456 error_found = true;
13457 }
13458 }
13459 else if (RECORD_OR_UNION_TYPE_P (t))
13460 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13461 {
13462 /* TODO: verify properties of decls. */
13463 if (TREE_CODE (fld) == FIELD_DECL)
13464 ;
13465 else if (TREE_CODE (fld) == TYPE_DECL)
13466 ;
13467 else if (TREE_CODE (fld) == CONST_DECL)
13468 ;
13469 else if (TREE_CODE (fld) == VAR_DECL)
13470 ;
13471 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13472 ;
13473 else if (TREE_CODE (fld) == USING_DECL)
13474 ;
13475 else
13476 {
13477 error ("Wrong tree in TYPE_FIELDS list");
13478 debug_tree (fld);
13479 error_found = true;
13480 }
13481 }
13482 else if (TREE_CODE (t) == INTEGER_TYPE
13483 || TREE_CODE (t) == BOOLEAN_TYPE
13484 || TREE_CODE (t) == OFFSET_TYPE
13485 || TREE_CODE (t) == REFERENCE_TYPE
13486 || TREE_CODE (t) == NULLPTR_TYPE
13487 || TREE_CODE (t) == POINTER_TYPE)
13488 {
13489 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13490 {
13491 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13492 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13493 error_found = true;
13494 }
13495 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13496 {
13497 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13498 debug_tree (TYPE_CACHED_VALUES (t));
13499 error_found = true;
13500 }
13501 /* Verify just enough of cache to ensure that no one copied it to new type.
13502 All copying should go by copy_node that should clear it. */
13503 else if (TYPE_CACHED_VALUES_P (t))
13504 {
13505 int i;
13506 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13507 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13508 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13509 {
13510 error ("wrong TYPE_CACHED_VALUES entry");
13511 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13512 error_found = true;
13513 break;
13514 }
13515 }
13516 }
13517 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13518 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13519 {
13520 /* C++ FE uses TREE_PURPOSE to store initial values. */
13521 if (TREE_PURPOSE (l) && in_lto_p)
13522 {
13523 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13524 debug_tree (l);
13525 error_found = true;
13526 }
13527 if (!TYPE_P (TREE_VALUE (l)))
13528 {
13529 error ("Wrong entry in TYPE_ARG_TYPES list");
13530 debug_tree (l);
13531 error_found = true;
13532 }
13533 }
13534 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13535 {
13536 error ("TYPE_VALUES_RAW field is non-NULL");
13537 debug_tree (TYPE_VALUES_RAW (t));
13538 error_found = true;
13539 }
13540 if (TREE_CODE (t) != INTEGER_TYPE
13541 && TREE_CODE (t) != BOOLEAN_TYPE
13542 && TREE_CODE (t) != OFFSET_TYPE
13543 && TREE_CODE (t) != REFERENCE_TYPE
13544 && TREE_CODE (t) != NULLPTR_TYPE
13545 && TREE_CODE (t) != POINTER_TYPE
13546 && TYPE_CACHED_VALUES_P (t))
13547 {
13548 error ("TYPE_CACHED_VALUES_P is set while it should not");
13549 error_found = true;
13550 }
13551 if (TYPE_STRING_FLAG (t)
13552 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13553 {
13554 error ("TYPE_STRING_FLAG is set on wrong type code");
13555 error_found = true;
13556 }
13557 else if (TYPE_STRING_FLAG (t))
13558 {
13559 const_tree b = t;
13560 if (TREE_CODE (b) == ARRAY_TYPE)
13561 b = TREE_TYPE (t);
13562 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13563 that is 32bits. */
13564 if (TREE_CODE (b) != INTEGER_TYPE)
13565 {
13566 error ("TYPE_STRING_FLAG is set on type that does not look like "
13567 "char nor array of chars");
13568 error_found = true;
13569 }
13570 }
13571
13572 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13573 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13574 of a type. */
13575 if (TREE_CODE (t) == METHOD_TYPE
13576 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13577 {
13578 error ("TYPE_METHOD_BASETYPE is not main variant");
13579 error_found = true;
13580 }
13581
13582 if (error_found)
13583 {
13584 debug_tree (const_cast <tree> (t));
13585 internal_error ("verify_type failed");
13586 }
13587 }
13588
13589
13590 /* Return true if ARG is marked with the nonnull attribute in the
13591 current function signature. */
13592
13593 bool
13594 nonnull_arg_p (const_tree arg)
13595 {
13596 tree t, attrs, fntype;
13597 unsigned HOST_WIDE_INT arg_num;
13598
13599 gcc_assert (TREE_CODE (arg) == PARM_DECL && POINTER_TYPE_P (TREE_TYPE (arg)));
13600
13601 /* The static chain decl is always non null. */
13602 if (arg == cfun->static_chain_decl)
13603 return true;
13604
13605 /* THIS argument of method is always non-NULL. */
13606 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13607 && arg == DECL_ARGUMENTS (cfun->decl)
13608 && flag_delete_null_pointer_checks)
13609 return true;
13610
13611 /* Values passed by reference are always non-NULL. */
13612 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13613 && flag_delete_null_pointer_checks)
13614 return true;
13615
13616 fntype = TREE_TYPE (cfun->decl);
13617 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13618 {
13619 attrs = lookup_attribute ("nonnull", attrs);
13620
13621 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13622 if (attrs == NULL_TREE)
13623 return false;
13624
13625 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13626 if (TREE_VALUE (attrs) == NULL_TREE)
13627 return true;
13628
13629 /* Get the position number for ARG in the function signature. */
13630 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13631 t;
13632 t = DECL_CHAIN (t), arg_num++)
13633 {
13634 if (t == arg)
13635 break;
13636 }
13637
13638 gcc_assert (t == arg);
13639
13640 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13641 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13642 {
13643 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13644 return true;
13645 }
13646 }
13647
13648 return false;
13649 }
13650
13651
13652 #include "gt-tree.h"