tree.c (recompute_tree_invariant_for_addr_expr): Assert that the argument is an ADDR_...
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "tree.h"
35 #include "gimple.h"
36 #include "rtl.h"
37 #include "ssa.h"
38 #include "flags.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "calls.h"
43 #include "attribs.h"
44 #include "varasm.h"
45 #include "tm_p.h"
46 #include "toplev.h" /* get_random_seed */
47 #include "filenames.h"
48 #include "output.h"
49 #include "target.h"
50 #include "common/common-target.h"
51 #include "langhooks.h"
52 #include "tree-inline.h"
53 #include "tree-iterator.h"
54 #include "internal-fn.h"
55 #include "gimple-iterator.h"
56 #include "gimplify.h"
57 #include "cgraph.h"
58 #include "insn-config.h"
59 #include "expmed.h"
60 #include "dojump.h"
61 #include "explow.h"
62 #include "emit-rtl.h"
63 #include "stmt.h"
64 #include "expr.h"
65 #include "tree-dfa.h"
66 #include "params.h"
67 #include "tree-pass.h"
68 #include "langhooks-def.h"
69 #include "diagnostic.h"
70 #include "tree-diagnostic.h"
71 #include "tree-pretty-print.h"
72 #include "except.h"
73 #include "debug.h"
74 #include "intl.h"
75 #include "builtins.h"
76 #include "print-tree.h"
77 #include "ipa-utils.h"
78
79 /* Tree code classes. */
80
81 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
82 #define END_OF_BASE_TREE_CODES tcc_exceptional,
83
84 const enum tree_code_class tree_code_type[] = {
85 #include "all-tree.def"
86 };
87
88 #undef DEFTREECODE
89 #undef END_OF_BASE_TREE_CODES
90
91 /* Table indexed by tree code giving number of expression
92 operands beyond the fixed part of the node structure.
93 Not used for types or decls. */
94
95 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
96 #define END_OF_BASE_TREE_CODES 0,
97
98 const unsigned char tree_code_length[] = {
99 #include "all-tree.def"
100 };
101
102 #undef DEFTREECODE
103 #undef END_OF_BASE_TREE_CODES
104
105 /* Names of tree components.
106 Used for printing out the tree and error messages. */
107 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
108 #define END_OF_BASE_TREE_CODES "@dummy",
109
110 static const char *const tree_code_name[] = {
111 #include "all-tree.def"
112 };
113
114 #undef DEFTREECODE
115 #undef END_OF_BASE_TREE_CODES
116
117 /* Each tree code class has an associated string representation.
118 These must correspond to the tree_code_class entries. */
119
120 const char *const tree_code_class_strings[] =
121 {
122 "exceptional",
123 "constant",
124 "type",
125 "declaration",
126 "reference",
127 "comparison",
128 "unary",
129 "binary",
130 "statement",
131 "vl_exp",
132 "expression"
133 };
134
135 /* obstack.[ch] explicitly declined to prototype this. */
136 extern int _obstack_allocated_p (struct obstack *h, void *obj);
137
138 /* Statistics-gathering stuff. */
139
140 static int tree_code_counts[MAX_TREE_CODES];
141 int tree_node_counts[(int) all_kinds];
142 int tree_node_sizes[(int) all_kinds];
143
144 /* Keep in sync with tree.h:enum tree_node_kind. */
145 static const char * const tree_node_kind_names[] = {
146 "decls",
147 "types",
148 "blocks",
149 "stmts",
150 "refs",
151 "exprs",
152 "constants",
153 "identifiers",
154 "vecs",
155 "binfos",
156 "ssa names",
157 "constructors",
158 "random kinds",
159 "lang_decl kinds",
160 "lang_type kinds",
161 "omp clauses",
162 };
163
164 /* Unique id for next decl created. */
165 static GTY(()) int next_decl_uid;
166 /* Unique id for next type created. */
167 static GTY(()) int next_type_uid = 1;
168 /* Unique id for next debug decl created. Use negative numbers,
169 to catch erroneous uses. */
170 static GTY(()) int next_debug_decl_uid;
171
172 /* Since we cannot rehash a type after it is in the table, we have to
173 keep the hash code. */
174
175 struct GTY((for_user)) type_hash {
176 unsigned long hash;
177 tree type;
178 };
179
180 /* Initial size of the hash table (rounded to next prime). */
181 #define TYPE_HASH_INITIAL_SIZE 1000
182
183 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
184 {
185 static hashval_t hash (type_hash *t) { return t->hash; }
186 static bool equal (type_hash *a, type_hash *b);
187
188 static int
189 keep_cache_entry (type_hash *&t)
190 {
191 return ggc_marked_p (t->type);
192 }
193 };
194
195 /* Now here is the hash table. When recording a type, it is added to
196 the slot whose index is the hash code. Note that the hash table is
197 used for several kinds of types (function types, array types and
198 array index range types, for now). While all these live in the
199 same table, they are completely independent, and the hash code is
200 computed differently for each of these. */
201
202 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
203
204 /* Hash table and temporary node for larger integer const values. */
205 static GTY (()) tree int_cst_node;
206
207 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
208 {
209 static hashval_t hash (tree t);
210 static bool equal (tree x, tree y);
211 };
212
213 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
214
215 /* Hash table for optimization flags and target option flags. Use the same
216 hash table for both sets of options. Nodes for building the current
217 optimization and target option nodes. The assumption is most of the time
218 the options created will already be in the hash table, so we avoid
219 allocating and freeing up a node repeatably. */
220 static GTY (()) tree cl_optimization_node;
221 static GTY (()) tree cl_target_option_node;
222
223 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
224 {
225 static hashval_t hash (tree t);
226 static bool equal (tree x, tree y);
227 };
228
229 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
230
231 /* General tree->tree mapping structure for use in hash tables. */
232
233
234 static GTY ((cache))
235 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
236
237 static GTY ((cache))
238 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
239
240 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
241 {
242 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
243
244 static bool
245 equal (tree_vec_map *a, tree_vec_map *b)
246 {
247 return a->base.from == b->base.from;
248 }
249
250 static int
251 keep_cache_entry (tree_vec_map *&m)
252 {
253 return ggc_marked_p (m->base.from);
254 }
255 };
256
257 static GTY ((cache))
258 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
259
260 static void set_type_quals (tree, int);
261 static void print_type_hash_statistics (void);
262 static void print_debug_expr_statistics (void);
263 static void print_value_expr_statistics (void);
264 static void type_hash_list (const_tree, inchash::hash &);
265 static void attribute_hash_list (const_tree, inchash::hash &);
266
267 tree global_trees[TI_MAX];
268 tree integer_types[itk_none];
269
270 bool int_n_enabled_p[NUM_INT_N_ENTS];
271 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
272
273 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
274
275 /* Number of operands for each OpenMP clause. */
276 unsigned const char omp_clause_num_ops[] =
277 {
278 0, /* OMP_CLAUSE_ERROR */
279 1, /* OMP_CLAUSE_PRIVATE */
280 1, /* OMP_CLAUSE_SHARED */
281 1, /* OMP_CLAUSE_FIRSTPRIVATE */
282 2, /* OMP_CLAUSE_LASTPRIVATE */
283 5, /* OMP_CLAUSE_REDUCTION */
284 1, /* OMP_CLAUSE_COPYIN */
285 1, /* OMP_CLAUSE_COPYPRIVATE */
286 3, /* OMP_CLAUSE_LINEAR */
287 2, /* OMP_CLAUSE_ALIGNED */
288 1, /* OMP_CLAUSE_DEPEND */
289 1, /* OMP_CLAUSE_UNIFORM */
290 1, /* OMP_CLAUSE_TO_DECLARE */
291 1, /* OMP_CLAUSE_LINK */
292 2, /* OMP_CLAUSE_FROM */
293 2, /* OMP_CLAUSE_TO */
294 2, /* OMP_CLAUSE_MAP */
295 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
296 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
297 2, /* OMP_CLAUSE__CACHE_ */
298 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
299 1, /* OMP_CLAUSE_USE_DEVICE */
300 2, /* OMP_CLAUSE_GANG */
301 1, /* OMP_CLAUSE_ASYNC */
302 1, /* OMP_CLAUSE_WAIT */
303 0, /* OMP_CLAUSE_AUTO */
304 0, /* OMP_CLAUSE_SEQ */
305 1, /* OMP_CLAUSE__LOOPTEMP_ */
306 1, /* OMP_CLAUSE_IF */
307 1, /* OMP_CLAUSE_NUM_THREADS */
308 1, /* OMP_CLAUSE_SCHEDULE */
309 0, /* OMP_CLAUSE_NOWAIT */
310 1, /* OMP_CLAUSE_ORDERED */
311 0, /* OMP_CLAUSE_DEFAULT */
312 3, /* OMP_CLAUSE_COLLAPSE */
313 0, /* OMP_CLAUSE_UNTIED */
314 1, /* OMP_CLAUSE_FINAL */
315 0, /* OMP_CLAUSE_MERGEABLE */
316 1, /* OMP_CLAUSE_DEVICE */
317 1, /* OMP_CLAUSE_DIST_SCHEDULE */
318 0, /* OMP_CLAUSE_INBRANCH */
319 0, /* OMP_CLAUSE_NOTINBRANCH */
320 1, /* OMP_CLAUSE_NUM_TEAMS */
321 1, /* OMP_CLAUSE_THREAD_LIMIT */
322 0, /* OMP_CLAUSE_PROC_BIND */
323 1, /* OMP_CLAUSE_SAFELEN */
324 1, /* OMP_CLAUSE_SIMDLEN */
325 0, /* OMP_CLAUSE_FOR */
326 0, /* OMP_CLAUSE_PARALLEL */
327 0, /* OMP_CLAUSE_SECTIONS */
328 0, /* OMP_CLAUSE_TASKGROUP */
329 1, /* OMP_CLAUSE_PRIORITY */
330 1, /* OMP_CLAUSE_GRAINSIZE */
331 1, /* OMP_CLAUSE_NUM_TASKS */
332 0, /* OMP_CLAUSE_NOGROUP */
333 0, /* OMP_CLAUSE_THREADS */
334 0, /* OMP_CLAUSE_SIMD */
335 1, /* OMP_CLAUSE_HINT */
336 0, /* OMP_CLAUSE_DEFALTMAP */
337 1, /* OMP_CLAUSE__SIMDUID_ */
338 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
339 0, /* OMP_CLAUSE_INDEPENDENT */
340 1, /* OMP_CLAUSE_WORKER */
341 1, /* OMP_CLAUSE_VECTOR */
342 1, /* OMP_CLAUSE_NUM_GANGS */
343 1, /* OMP_CLAUSE_NUM_WORKERS */
344 1, /* OMP_CLAUSE_VECTOR_LENGTH */
345 };
346
347 const char * const omp_clause_code_name[] =
348 {
349 "error_clause",
350 "private",
351 "shared",
352 "firstprivate",
353 "lastprivate",
354 "reduction",
355 "copyin",
356 "copyprivate",
357 "linear",
358 "aligned",
359 "depend",
360 "uniform",
361 "to",
362 "link",
363 "from",
364 "to",
365 "map",
366 "use_device_ptr",
367 "is_device_ptr",
368 "_cache_",
369 "device_resident",
370 "use_device",
371 "gang",
372 "async",
373 "wait",
374 "auto",
375 "seq",
376 "_looptemp_",
377 "if",
378 "num_threads",
379 "schedule",
380 "nowait",
381 "ordered",
382 "default",
383 "collapse",
384 "untied",
385 "final",
386 "mergeable",
387 "device",
388 "dist_schedule",
389 "inbranch",
390 "notinbranch",
391 "num_teams",
392 "thread_limit",
393 "proc_bind",
394 "safelen",
395 "simdlen",
396 "for",
397 "parallel",
398 "sections",
399 "taskgroup",
400 "priority",
401 "grainsize",
402 "num_tasks",
403 "nogroup",
404 "threads",
405 "simd",
406 "hint",
407 "defaultmap",
408 "_simduid_",
409 "_Cilk_for_count_",
410 "independent",
411 "worker",
412 "vector",
413 "num_gangs",
414 "num_workers",
415 "vector_length"
416 };
417
418
419 /* Return the tree node structure used by tree code CODE. */
420
421 static inline enum tree_node_structure_enum
422 tree_node_structure_for_code (enum tree_code code)
423 {
424 switch (TREE_CODE_CLASS (code))
425 {
426 case tcc_declaration:
427 {
428 switch (code)
429 {
430 case FIELD_DECL:
431 return TS_FIELD_DECL;
432 case PARM_DECL:
433 return TS_PARM_DECL;
434 case VAR_DECL:
435 return TS_VAR_DECL;
436 case LABEL_DECL:
437 return TS_LABEL_DECL;
438 case RESULT_DECL:
439 return TS_RESULT_DECL;
440 case DEBUG_EXPR_DECL:
441 return TS_DECL_WRTL;
442 case CONST_DECL:
443 return TS_CONST_DECL;
444 case TYPE_DECL:
445 return TS_TYPE_DECL;
446 case FUNCTION_DECL:
447 return TS_FUNCTION_DECL;
448 case TRANSLATION_UNIT_DECL:
449 return TS_TRANSLATION_UNIT_DECL;
450 default:
451 return TS_DECL_NON_COMMON;
452 }
453 }
454 case tcc_type:
455 return TS_TYPE_NON_COMMON;
456 case tcc_reference:
457 case tcc_comparison:
458 case tcc_unary:
459 case tcc_binary:
460 case tcc_expression:
461 case tcc_statement:
462 case tcc_vl_exp:
463 return TS_EXP;
464 default: /* tcc_constant and tcc_exceptional */
465 break;
466 }
467 switch (code)
468 {
469 /* tcc_constant cases. */
470 case VOID_CST: return TS_TYPED;
471 case INTEGER_CST: return TS_INT_CST;
472 case REAL_CST: return TS_REAL_CST;
473 case FIXED_CST: return TS_FIXED_CST;
474 case COMPLEX_CST: return TS_COMPLEX;
475 case VECTOR_CST: return TS_VECTOR;
476 case STRING_CST: return TS_STRING;
477 /* tcc_exceptional cases. */
478 case ERROR_MARK: return TS_COMMON;
479 case IDENTIFIER_NODE: return TS_IDENTIFIER;
480 case TREE_LIST: return TS_LIST;
481 case TREE_VEC: return TS_VEC;
482 case SSA_NAME: return TS_SSA_NAME;
483 case PLACEHOLDER_EXPR: return TS_COMMON;
484 case STATEMENT_LIST: return TS_STATEMENT_LIST;
485 case BLOCK: return TS_BLOCK;
486 case CONSTRUCTOR: return TS_CONSTRUCTOR;
487 case TREE_BINFO: return TS_BINFO;
488 case OMP_CLAUSE: return TS_OMP_CLAUSE;
489 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
490 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
491
492 default:
493 gcc_unreachable ();
494 }
495 }
496
497
498 /* Initialize tree_contains_struct to describe the hierarchy of tree
499 nodes. */
500
501 static void
502 initialize_tree_contains_struct (void)
503 {
504 unsigned i;
505
506 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
507 {
508 enum tree_code code;
509 enum tree_node_structure_enum ts_code;
510
511 code = (enum tree_code) i;
512 ts_code = tree_node_structure_for_code (code);
513
514 /* Mark the TS structure itself. */
515 tree_contains_struct[code][ts_code] = 1;
516
517 /* Mark all the structures that TS is derived from. */
518 switch (ts_code)
519 {
520 case TS_TYPED:
521 case TS_BLOCK:
522 MARK_TS_BASE (code);
523 break;
524
525 case TS_COMMON:
526 case TS_INT_CST:
527 case TS_REAL_CST:
528 case TS_FIXED_CST:
529 case TS_VECTOR:
530 case TS_STRING:
531 case TS_COMPLEX:
532 case TS_SSA_NAME:
533 case TS_CONSTRUCTOR:
534 case TS_EXP:
535 case TS_STATEMENT_LIST:
536 MARK_TS_TYPED (code);
537 break;
538
539 case TS_IDENTIFIER:
540 case TS_DECL_MINIMAL:
541 case TS_TYPE_COMMON:
542 case TS_LIST:
543 case TS_VEC:
544 case TS_BINFO:
545 case TS_OMP_CLAUSE:
546 case TS_OPTIMIZATION:
547 case TS_TARGET_OPTION:
548 MARK_TS_COMMON (code);
549 break;
550
551 case TS_TYPE_WITH_LANG_SPECIFIC:
552 MARK_TS_TYPE_COMMON (code);
553 break;
554
555 case TS_TYPE_NON_COMMON:
556 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
557 break;
558
559 case TS_DECL_COMMON:
560 MARK_TS_DECL_MINIMAL (code);
561 break;
562
563 case TS_DECL_WRTL:
564 case TS_CONST_DECL:
565 MARK_TS_DECL_COMMON (code);
566 break;
567
568 case TS_DECL_NON_COMMON:
569 MARK_TS_DECL_WITH_VIS (code);
570 break;
571
572 case TS_DECL_WITH_VIS:
573 case TS_PARM_DECL:
574 case TS_LABEL_DECL:
575 case TS_RESULT_DECL:
576 MARK_TS_DECL_WRTL (code);
577 break;
578
579 case TS_FIELD_DECL:
580 MARK_TS_DECL_COMMON (code);
581 break;
582
583 case TS_VAR_DECL:
584 MARK_TS_DECL_WITH_VIS (code);
585 break;
586
587 case TS_TYPE_DECL:
588 case TS_FUNCTION_DECL:
589 MARK_TS_DECL_NON_COMMON (code);
590 break;
591
592 case TS_TRANSLATION_UNIT_DECL:
593 MARK_TS_DECL_COMMON (code);
594 break;
595
596 default:
597 gcc_unreachable ();
598 }
599 }
600
601 /* Basic consistency checks for attributes used in fold. */
602 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
603 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
604 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
605 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
606 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
607 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
608 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
609 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
610 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
611 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
612 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
613 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
614 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
615 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
616 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
617 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
618 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
619 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
620 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
621 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
622 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
623 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
624 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
625 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
626 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
627 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
628 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
629 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
630 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
631 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
632 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
633 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
634 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
635 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
636 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
637 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
638 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
639 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
641 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
642 }
643
644
645 /* Init tree.c. */
646
647 void
648 init_ttree (void)
649 {
650 /* Initialize the hash table of types. */
651 type_hash_table
652 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
653
654 debug_expr_for_decl
655 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
656
657 value_expr_for_decl
658 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
659
660 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
661
662 int_cst_node = make_int_cst (1, 1);
663
664 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
665
666 cl_optimization_node = make_node (OPTIMIZATION_NODE);
667 cl_target_option_node = make_node (TARGET_OPTION_NODE);
668
669 /* Initialize the tree_contains_struct array. */
670 initialize_tree_contains_struct ();
671 lang_hooks.init_ts ();
672 }
673
674 \f
675 /* The name of the object as the assembler will see it (but before any
676 translations made by ASM_OUTPUT_LABELREF). Often this is the same
677 as DECL_NAME. It is an IDENTIFIER_NODE. */
678 tree
679 decl_assembler_name (tree decl)
680 {
681 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
682 lang_hooks.set_decl_assembler_name (decl);
683 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
684 }
685
686 /* When the target supports COMDAT groups, this indicates which group the
687 DECL is associated with. This can be either an IDENTIFIER_NODE or a
688 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
689 tree
690 decl_comdat_group (const_tree node)
691 {
692 struct symtab_node *snode = symtab_node::get (node);
693 if (!snode)
694 return NULL;
695 return snode->get_comdat_group ();
696 }
697
698 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
699 tree
700 decl_comdat_group_id (const_tree node)
701 {
702 struct symtab_node *snode = symtab_node::get (node);
703 if (!snode)
704 return NULL;
705 return snode->get_comdat_group_id ();
706 }
707
708 /* When the target supports named section, return its name as IDENTIFIER_NODE
709 or NULL if it is in no section. */
710 const char *
711 decl_section_name (const_tree node)
712 {
713 struct symtab_node *snode = symtab_node::get (node);
714 if (!snode)
715 return NULL;
716 return snode->get_section ();
717 }
718
719 /* Set section name of NODE to VALUE (that is expected to be
720 identifier node) */
721 void
722 set_decl_section_name (tree node, const char *value)
723 {
724 struct symtab_node *snode;
725
726 if (value == NULL)
727 {
728 snode = symtab_node::get (node);
729 if (!snode)
730 return;
731 }
732 else if (TREE_CODE (node) == VAR_DECL)
733 snode = varpool_node::get_create (node);
734 else
735 snode = cgraph_node::get_create (node);
736 snode->set_section (value);
737 }
738
739 /* Return TLS model of a variable NODE. */
740 enum tls_model
741 decl_tls_model (const_tree node)
742 {
743 struct varpool_node *snode = varpool_node::get (node);
744 if (!snode)
745 return TLS_MODEL_NONE;
746 return snode->tls_model;
747 }
748
749 /* Set TLS model of variable NODE to MODEL. */
750 void
751 set_decl_tls_model (tree node, enum tls_model model)
752 {
753 struct varpool_node *vnode;
754
755 if (model == TLS_MODEL_NONE)
756 {
757 vnode = varpool_node::get (node);
758 if (!vnode)
759 return;
760 }
761 else
762 vnode = varpool_node::get_create (node);
763 vnode->tls_model = model;
764 }
765
766 /* Compute the number of bytes occupied by a tree with code CODE.
767 This function cannot be used for nodes that have variable sizes,
768 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
769 size_t
770 tree_code_size (enum tree_code code)
771 {
772 switch (TREE_CODE_CLASS (code))
773 {
774 case tcc_declaration: /* A decl node */
775 {
776 switch (code)
777 {
778 case FIELD_DECL:
779 return sizeof (struct tree_field_decl);
780 case PARM_DECL:
781 return sizeof (struct tree_parm_decl);
782 case VAR_DECL:
783 return sizeof (struct tree_var_decl);
784 case LABEL_DECL:
785 return sizeof (struct tree_label_decl);
786 case RESULT_DECL:
787 return sizeof (struct tree_result_decl);
788 case CONST_DECL:
789 return sizeof (struct tree_const_decl);
790 case TYPE_DECL:
791 return sizeof (struct tree_type_decl);
792 case FUNCTION_DECL:
793 return sizeof (struct tree_function_decl);
794 case DEBUG_EXPR_DECL:
795 return sizeof (struct tree_decl_with_rtl);
796 case TRANSLATION_UNIT_DECL:
797 return sizeof (struct tree_translation_unit_decl);
798 case NAMESPACE_DECL:
799 case IMPORTED_DECL:
800 case NAMELIST_DECL:
801 return sizeof (struct tree_decl_non_common);
802 default:
803 return lang_hooks.tree_size (code);
804 }
805 }
806
807 case tcc_type: /* a type node */
808 return sizeof (struct tree_type_non_common);
809
810 case tcc_reference: /* a reference */
811 case tcc_expression: /* an expression */
812 case tcc_statement: /* an expression with side effects */
813 case tcc_comparison: /* a comparison expression */
814 case tcc_unary: /* a unary arithmetic expression */
815 case tcc_binary: /* a binary arithmetic expression */
816 return (sizeof (struct tree_exp)
817 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
818
819 case tcc_constant: /* a constant */
820 switch (code)
821 {
822 case VOID_CST: return sizeof (struct tree_typed);
823 case INTEGER_CST: gcc_unreachable ();
824 case REAL_CST: return sizeof (struct tree_real_cst);
825 case FIXED_CST: return sizeof (struct tree_fixed_cst);
826 case COMPLEX_CST: return sizeof (struct tree_complex);
827 case VECTOR_CST: return sizeof (struct tree_vector);
828 case STRING_CST: gcc_unreachable ();
829 default:
830 return lang_hooks.tree_size (code);
831 }
832
833 case tcc_exceptional: /* something random, like an identifier. */
834 switch (code)
835 {
836 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
837 case TREE_LIST: return sizeof (struct tree_list);
838
839 case ERROR_MARK:
840 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
841
842 case TREE_VEC:
843 case OMP_CLAUSE: gcc_unreachable ();
844
845 case SSA_NAME: return sizeof (struct tree_ssa_name);
846
847 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
848 case BLOCK: return sizeof (struct tree_block);
849 case CONSTRUCTOR: return sizeof (struct tree_constructor);
850 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
851 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
852
853 default:
854 return lang_hooks.tree_size (code);
855 }
856
857 default:
858 gcc_unreachable ();
859 }
860 }
861
862 /* Compute the number of bytes occupied by NODE. This routine only
863 looks at TREE_CODE, except for those nodes that have variable sizes. */
864 size_t
865 tree_size (const_tree node)
866 {
867 const enum tree_code code = TREE_CODE (node);
868 switch (code)
869 {
870 case INTEGER_CST:
871 return (sizeof (struct tree_int_cst)
872 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
873
874 case TREE_BINFO:
875 return (offsetof (struct tree_binfo, base_binfos)
876 + vec<tree, va_gc>
877 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
878
879 case TREE_VEC:
880 return (sizeof (struct tree_vec)
881 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
882
883 case VECTOR_CST:
884 return (sizeof (struct tree_vector)
885 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
886
887 case STRING_CST:
888 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
889
890 case OMP_CLAUSE:
891 return (sizeof (struct tree_omp_clause)
892 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
893 * sizeof (tree));
894
895 default:
896 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
897 return (sizeof (struct tree_exp)
898 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
899 else
900 return tree_code_size (code);
901 }
902 }
903
904 /* Record interesting allocation statistics for a tree node with CODE
905 and LENGTH. */
906
907 static void
908 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
909 size_t length ATTRIBUTE_UNUSED)
910 {
911 enum tree_code_class type = TREE_CODE_CLASS (code);
912 tree_node_kind kind;
913
914 if (!GATHER_STATISTICS)
915 return;
916
917 switch (type)
918 {
919 case tcc_declaration: /* A decl node */
920 kind = d_kind;
921 break;
922
923 case tcc_type: /* a type node */
924 kind = t_kind;
925 break;
926
927 case tcc_statement: /* an expression with side effects */
928 kind = s_kind;
929 break;
930
931 case tcc_reference: /* a reference */
932 kind = r_kind;
933 break;
934
935 case tcc_expression: /* an expression */
936 case tcc_comparison: /* a comparison expression */
937 case tcc_unary: /* a unary arithmetic expression */
938 case tcc_binary: /* a binary arithmetic expression */
939 kind = e_kind;
940 break;
941
942 case tcc_constant: /* a constant */
943 kind = c_kind;
944 break;
945
946 case tcc_exceptional: /* something random, like an identifier. */
947 switch (code)
948 {
949 case IDENTIFIER_NODE:
950 kind = id_kind;
951 break;
952
953 case TREE_VEC:
954 kind = vec_kind;
955 break;
956
957 case TREE_BINFO:
958 kind = binfo_kind;
959 break;
960
961 case SSA_NAME:
962 kind = ssa_name_kind;
963 break;
964
965 case BLOCK:
966 kind = b_kind;
967 break;
968
969 case CONSTRUCTOR:
970 kind = constr_kind;
971 break;
972
973 case OMP_CLAUSE:
974 kind = omp_clause_kind;
975 break;
976
977 default:
978 kind = x_kind;
979 break;
980 }
981 break;
982
983 case tcc_vl_exp:
984 kind = e_kind;
985 break;
986
987 default:
988 gcc_unreachable ();
989 }
990
991 tree_code_counts[(int) code]++;
992 tree_node_counts[(int) kind]++;
993 tree_node_sizes[(int) kind] += length;
994 }
995
996 /* Allocate and return a new UID from the DECL_UID namespace. */
997
998 int
999 allocate_decl_uid (void)
1000 {
1001 return next_decl_uid++;
1002 }
1003
1004 /* Return a newly allocated node of code CODE. For decl and type
1005 nodes, some other fields are initialized. The rest of the node is
1006 initialized to zero. This function cannot be used for TREE_VEC,
1007 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1008 tree_code_size.
1009
1010 Achoo! I got a code in the node. */
1011
1012 tree
1013 make_node_stat (enum tree_code code MEM_STAT_DECL)
1014 {
1015 tree t;
1016 enum tree_code_class type = TREE_CODE_CLASS (code);
1017 size_t length = tree_code_size (code);
1018
1019 record_node_allocation_statistics (code, length);
1020
1021 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1022 TREE_SET_CODE (t, code);
1023
1024 switch (type)
1025 {
1026 case tcc_statement:
1027 TREE_SIDE_EFFECTS (t) = 1;
1028 break;
1029
1030 case tcc_declaration:
1031 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1032 {
1033 if (code == FUNCTION_DECL)
1034 {
1035 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1036 DECL_MODE (t) = FUNCTION_MODE;
1037 }
1038 else
1039 DECL_ALIGN (t) = 1;
1040 }
1041 DECL_SOURCE_LOCATION (t) = input_location;
1042 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1043 DECL_UID (t) = --next_debug_decl_uid;
1044 else
1045 {
1046 DECL_UID (t) = allocate_decl_uid ();
1047 SET_DECL_PT_UID (t, -1);
1048 }
1049 if (TREE_CODE (t) == LABEL_DECL)
1050 LABEL_DECL_UID (t) = -1;
1051
1052 break;
1053
1054 case tcc_type:
1055 TYPE_UID (t) = next_type_uid++;
1056 TYPE_ALIGN (t) = BITS_PER_UNIT;
1057 TYPE_USER_ALIGN (t) = 0;
1058 TYPE_MAIN_VARIANT (t) = t;
1059 TYPE_CANONICAL (t) = t;
1060
1061 /* Default to no attributes for type, but let target change that. */
1062 TYPE_ATTRIBUTES (t) = NULL_TREE;
1063 targetm.set_default_type_attributes (t);
1064
1065 /* We have not yet computed the alias set for this type. */
1066 TYPE_ALIAS_SET (t) = -1;
1067 break;
1068
1069 case tcc_constant:
1070 TREE_CONSTANT (t) = 1;
1071 break;
1072
1073 case tcc_expression:
1074 switch (code)
1075 {
1076 case INIT_EXPR:
1077 case MODIFY_EXPR:
1078 case VA_ARG_EXPR:
1079 case PREDECREMENT_EXPR:
1080 case PREINCREMENT_EXPR:
1081 case POSTDECREMENT_EXPR:
1082 case POSTINCREMENT_EXPR:
1083 /* All of these have side-effects, no matter what their
1084 operands are. */
1085 TREE_SIDE_EFFECTS (t) = 1;
1086 break;
1087
1088 default:
1089 break;
1090 }
1091 break;
1092
1093 case tcc_exceptional:
1094 switch (code)
1095 {
1096 case TARGET_OPTION_NODE:
1097 TREE_TARGET_OPTION(t)
1098 = ggc_cleared_alloc<struct cl_target_option> ();
1099 break;
1100
1101 case OPTIMIZATION_NODE:
1102 TREE_OPTIMIZATION (t)
1103 = ggc_cleared_alloc<struct cl_optimization> ();
1104 break;
1105
1106 default:
1107 break;
1108 }
1109 break;
1110
1111 default:
1112 /* Other classes need no special treatment. */
1113 break;
1114 }
1115
1116 return t;
1117 }
1118 \f
1119 /* Return a new node with the same contents as NODE except that its
1120 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1121
1122 tree
1123 copy_node_stat (tree node MEM_STAT_DECL)
1124 {
1125 tree t;
1126 enum tree_code code = TREE_CODE (node);
1127 size_t length;
1128
1129 gcc_assert (code != STATEMENT_LIST);
1130
1131 length = tree_size (node);
1132 record_node_allocation_statistics (code, length);
1133 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1134 memcpy (t, node, length);
1135
1136 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1137 TREE_CHAIN (t) = 0;
1138 TREE_ASM_WRITTEN (t) = 0;
1139 TREE_VISITED (t) = 0;
1140
1141 if (TREE_CODE_CLASS (code) == tcc_declaration)
1142 {
1143 if (code == DEBUG_EXPR_DECL)
1144 DECL_UID (t) = --next_debug_decl_uid;
1145 else
1146 {
1147 DECL_UID (t) = allocate_decl_uid ();
1148 if (DECL_PT_UID_SET_P (node))
1149 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1150 }
1151 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1152 && DECL_HAS_VALUE_EXPR_P (node))
1153 {
1154 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1155 DECL_HAS_VALUE_EXPR_P (t) = 1;
1156 }
1157 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1158 if (TREE_CODE (node) == VAR_DECL)
1159 {
1160 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1161 t->decl_with_vis.symtab_node = NULL;
1162 }
1163 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1164 {
1165 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1166 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1167 }
1168 if (TREE_CODE (node) == FUNCTION_DECL)
1169 {
1170 DECL_STRUCT_FUNCTION (t) = NULL;
1171 t->decl_with_vis.symtab_node = NULL;
1172 }
1173 }
1174 else if (TREE_CODE_CLASS (code) == tcc_type)
1175 {
1176 TYPE_UID (t) = next_type_uid++;
1177 /* The following is so that the debug code for
1178 the copy is different from the original type.
1179 The two statements usually duplicate each other
1180 (because they clear fields of the same union),
1181 but the optimizer should catch that. */
1182 TYPE_SYMTAB_POINTER (t) = 0;
1183 TYPE_SYMTAB_ADDRESS (t) = 0;
1184
1185 /* Do not copy the values cache. */
1186 if (TYPE_CACHED_VALUES_P (t))
1187 {
1188 TYPE_CACHED_VALUES_P (t) = 0;
1189 TYPE_CACHED_VALUES (t) = NULL_TREE;
1190 }
1191 }
1192 else if (code == TARGET_OPTION_NODE)
1193 {
1194 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1195 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1196 sizeof (struct cl_target_option));
1197 }
1198 else if (code == OPTIMIZATION_NODE)
1199 {
1200 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1201 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1202 sizeof (struct cl_optimization));
1203 }
1204
1205 return t;
1206 }
1207
1208 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1209 For example, this can copy a list made of TREE_LIST nodes. */
1210
1211 tree
1212 copy_list (tree list)
1213 {
1214 tree head;
1215 tree prev, next;
1216
1217 if (list == 0)
1218 return 0;
1219
1220 head = prev = copy_node (list);
1221 next = TREE_CHAIN (list);
1222 while (next)
1223 {
1224 TREE_CHAIN (prev) = copy_node (next);
1225 prev = TREE_CHAIN (prev);
1226 next = TREE_CHAIN (next);
1227 }
1228 return head;
1229 }
1230
1231 \f
1232 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1233 INTEGER_CST with value CST and type TYPE. */
1234
1235 static unsigned int
1236 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1237 {
1238 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1239 /* We need an extra zero HWI if CST is an unsigned integer with its
1240 upper bit set, and if CST occupies a whole number of HWIs. */
1241 if (TYPE_UNSIGNED (type)
1242 && wi::neg_p (cst)
1243 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1244 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1245 return cst.get_len ();
1246 }
1247
1248 /* Return a new INTEGER_CST with value CST and type TYPE. */
1249
1250 static tree
1251 build_new_int_cst (tree type, const wide_int &cst)
1252 {
1253 unsigned int len = cst.get_len ();
1254 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1255 tree nt = make_int_cst (len, ext_len);
1256
1257 if (len < ext_len)
1258 {
1259 --ext_len;
1260 TREE_INT_CST_ELT (nt, ext_len) = 0;
1261 for (unsigned int i = len; i < ext_len; ++i)
1262 TREE_INT_CST_ELT (nt, i) = -1;
1263 }
1264 else if (TYPE_UNSIGNED (type)
1265 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1266 {
1267 len--;
1268 TREE_INT_CST_ELT (nt, len)
1269 = zext_hwi (cst.elt (len),
1270 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1271 }
1272
1273 for (unsigned int i = 0; i < len; i++)
1274 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1275 TREE_TYPE (nt) = type;
1276 return nt;
1277 }
1278
1279 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1280
1281 tree
1282 build_int_cst (tree type, HOST_WIDE_INT low)
1283 {
1284 /* Support legacy code. */
1285 if (!type)
1286 type = integer_type_node;
1287
1288 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1289 }
1290
1291 tree
1292 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1293 {
1294 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1295 }
1296
1297 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1298
1299 tree
1300 build_int_cst_type (tree type, HOST_WIDE_INT low)
1301 {
1302 gcc_assert (type);
1303 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1304 }
1305
1306 /* Constructs tree in type TYPE from with value given by CST. Signedness
1307 of CST is assumed to be the same as the signedness of TYPE. */
1308
1309 tree
1310 double_int_to_tree (tree type, double_int cst)
1311 {
1312 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1313 }
1314
1315 /* We force the wide_int CST to the range of the type TYPE by sign or
1316 zero extending it. OVERFLOWABLE indicates if we are interested in
1317 overflow of the value, when >0 we are only interested in signed
1318 overflow, for <0 we are interested in any overflow. OVERFLOWED
1319 indicates whether overflow has already occurred. CONST_OVERFLOWED
1320 indicates whether constant overflow has already occurred. We force
1321 T's value to be within range of T's type (by setting to 0 or 1 all
1322 the bits outside the type's range). We set TREE_OVERFLOWED if,
1323 OVERFLOWED is nonzero,
1324 or OVERFLOWABLE is >0 and signed overflow occurs
1325 or OVERFLOWABLE is <0 and any overflow occurs
1326 We return a new tree node for the extended wide_int. The node
1327 is shared if no overflow flags are set. */
1328
1329
1330 tree
1331 force_fit_type (tree type, const wide_int_ref &cst,
1332 int overflowable, bool overflowed)
1333 {
1334 signop sign = TYPE_SIGN (type);
1335
1336 /* If we need to set overflow flags, return a new unshared node. */
1337 if (overflowed || !wi::fits_to_tree_p (cst, type))
1338 {
1339 if (overflowed
1340 || overflowable < 0
1341 || (overflowable > 0 && sign == SIGNED))
1342 {
1343 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1344 tree t = build_new_int_cst (type, tmp);
1345 TREE_OVERFLOW (t) = 1;
1346 return t;
1347 }
1348 }
1349
1350 /* Else build a shared node. */
1351 return wide_int_to_tree (type, cst);
1352 }
1353
1354 /* These are the hash table functions for the hash table of INTEGER_CST
1355 nodes of a sizetype. */
1356
1357 /* Return the hash code X, an INTEGER_CST. */
1358
1359 hashval_t
1360 int_cst_hasher::hash (tree x)
1361 {
1362 const_tree const t = x;
1363 hashval_t code = TYPE_UID (TREE_TYPE (t));
1364 int i;
1365
1366 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1367 code ^= TREE_INT_CST_ELT (t, i);
1368
1369 return code;
1370 }
1371
1372 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1373 is the same as that given by *Y, which is the same. */
1374
1375 bool
1376 int_cst_hasher::equal (tree x, tree y)
1377 {
1378 const_tree const xt = x;
1379 const_tree const yt = y;
1380
1381 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1382 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1383 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1384 return false;
1385
1386 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1387 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1388 return false;
1389
1390 return true;
1391 }
1392
1393 /* Create an INT_CST node of TYPE and value CST.
1394 The returned node is always shared. For small integers we use a
1395 per-type vector cache, for larger ones we use a single hash table.
1396 The value is extended from its precision according to the sign of
1397 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1398 the upper bits and ensures that hashing and value equality based
1399 upon the underlying HOST_WIDE_INTs works without masking. */
1400
1401 tree
1402 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1403 {
1404 tree t;
1405 int ix = -1;
1406 int limit = 0;
1407
1408 gcc_assert (type);
1409 unsigned int prec = TYPE_PRECISION (type);
1410 signop sgn = TYPE_SIGN (type);
1411
1412 /* Verify that everything is canonical. */
1413 int l = pcst.get_len ();
1414 if (l > 1)
1415 {
1416 if (pcst.elt (l - 1) == 0)
1417 gcc_checking_assert (pcst.elt (l - 2) < 0);
1418 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1419 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1420 }
1421
1422 wide_int cst = wide_int::from (pcst, prec, sgn);
1423 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1424
1425 if (ext_len == 1)
1426 {
1427 /* We just need to store a single HOST_WIDE_INT. */
1428 HOST_WIDE_INT hwi;
1429 if (TYPE_UNSIGNED (type))
1430 hwi = cst.to_uhwi ();
1431 else
1432 hwi = cst.to_shwi ();
1433
1434 switch (TREE_CODE (type))
1435 {
1436 case NULLPTR_TYPE:
1437 gcc_assert (hwi == 0);
1438 /* Fallthru. */
1439
1440 case POINTER_TYPE:
1441 case REFERENCE_TYPE:
1442 case POINTER_BOUNDS_TYPE:
1443 /* Cache NULL pointer and zero bounds. */
1444 if (hwi == 0)
1445 {
1446 limit = 1;
1447 ix = 0;
1448 }
1449 break;
1450
1451 case BOOLEAN_TYPE:
1452 /* Cache false or true. */
1453 limit = 2;
1454 if (hwi < 2)
1455 ix = hwi;
1456 break;
1457
1458 case INTEGER_TYPE:
1459 case OFFSET_TYPE:
1460 if (TYPE_SIGN (type) == UNSIGNED)
1461 {
1462 /* Cache [0, N). */
1463 limit = INTEGER_SHARE_LIMIT;
1464 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1465 ix = hwi;
1466 }
1467 else
1468 {
1469 /* Cache [-1, N). */
1470 limit = INTEGER_SHARE_LIMIT + 1;
1471 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1472 ix = hwi + 1;
1473 }
1474 break;
1475
1476 case ENUMERAL_TYPE:
1477 break;
1478
1479 default:
1480 gcc_unreachable ();
1481 }
1482
1483 if (ix >= 0)
1484 {
1485 /* Look for it in the type's vector of small shared ints. */
1486 if (!TYPE_CACHED_VALUES_P (type))
1487 {
1488 TYPE_CACHED_VALUES_P (type) = 1;
1489 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1490 }
1491
1492 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1493 if (t)
1494 /* Make sure no one is clobbering the shared constant. */
1495 gcc_checking_assert (TREE_TYPE (t) == type
1496 && TREE_INT_CST_NUNITS (t) == 1
1497 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1498 && TREE_INT_CST_EXT_NUNITS (t) == 1
1499 && TREE_INT_CST_ELT (t, 0) == hwi);
1500 else
1501 {
1502 /* Create a new shared int. */
1503 t = build_new_int_cst (type, cst);
1504 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1505 }
1506 }
1507 else
1508 {
1509 /* Use the cache of larger shared ints, using int_cst_node as
1510 a temporary. */
1511
1512 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1513 TREE_TYPE (int_cst_node) = type;
1514
1515 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1516 t = *slot;
1517 if (!t)
1518 {
1519 /* Insert this one into the hash table. */
1520 t = int_cst_node;
1521 *slot = t;
1522 /* Make a new node for next time round. */
1523 int_cst_node = make_int_cst (1, 1);
1524 }
1525 }
1526 }
1527 else
1528 {
1529 /* The value either hashes properly or we drop it on the floor
1530 for the gc to take care of. There will not be enough of them
1531 to worry about. */
1532
1533 tree nt = build_new_int_cst (type, cst);
1534 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1535 t = *slot;
1536 if (!t)
1537 {
1538 /* Insert this one into the hash table. */
1539 t = nt;
1540 *slot = t;
1541 }
1542 }
1543
1544 return t;
1545 }
1546
1547 void
1548 cache_integer_cst (tree t)
1549 {
1550 tree type = TREE_TYPE (t);
1551 int ix = -1;
1552 int limit = 0;
1553 int prec = TYPE_PRECISION (type);
1554
1555 gcc_assert (!TREE_OVERFLOW (t));
1556
1557 switch (TREE_CODE (type))
1558 {
1559 case NULLPTR_TYPE:
1560 gcc_assert (integer_zerop (t));
1561 /* Fallthru. */
1562
1563 case POINTER_TYPE:
1564 case REFERENCE_TYPE:
1565 /* Cache NULL pointer. */
1566 if (integer_zerop (t))
1567 {
1568 limit = 1;
1569 ix = 0;
1570 }
1571 break;
1572
1573 case BOOLEAN_TYPE:
1574 /* Cache false or true. */
1575 limit = 2;
1576 if (wi::ltu_p (t, 2))
1577 ix = TREE_INT_CST_ELT (t, 0);
1578 break;
1579
1580 case INTEGER_TYPE:
1581 case OFFSET_TYPE:
1582 if (TYPE_UNSIGNED (type))
1583 {
1584 /* Cache 0..N */
1585 limit = INTEGER_SHARE_LIMIT;
1586
1587 /* This is a little hokie, but if the prec is smaller than
1588 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1589 obvious test will not get the correct answer. */
1590 if (prec < HOST_BITS_PER_WIDE_INT)
1591 {
1592 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1593 ix = tree_to_uhwi (t);
1594 }
1595 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1596 ix = tree_to_uhwi (t);
1597 }
1598 else
1599 {
1600 /* Cache -1..N */
1601 limit = INTEGER_SHARE_LIMIT + 1;
1602
1603 if (integer_minus_onep (t))
1604 ix = 0;
1605 else if (!wi::neg_p (t))
1606 {
1607 if (prec < HOST_BITS_PER_WIDE_INT)
1608 {
1609 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1610 ix = tree_to_shwi (t) + 1;
1611 }
1612 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1613 ix = tree_to_shwi (t) + 1;
1614 }
1615 }
1616 break;
1617
1618 case ENUMERAL_TYPE:
1619 break;
1620
1621 default:
1622 gcc_unreachable ();
1623 }
1624
1625 if (ix >= 0)
1626 {
1627 /* Look for it in the type's vector of small shared ints. */
1628 if (!TYPE_CACHED_VALUES_P (type))
1629 {
1630 TYPE_CACHED_VALUES_P (type) = 1;
1631 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1632 }
1633
1634 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1635 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1636 }
1637 else
1638 {
1639 /* Use the cache of larger shared ints. */
1640 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1641 /* If there is already an entry for the number verify it's the
1642 same. */
1643 if (*slot)
1644 gcc_assert (wi::eq_p (tree (*slot), t));
1645 else
1646 /* Otherwise insert this one into the hash table. */
1647 *slot = t;
1648 }
1649 }
1650
1651
1652 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1653 and the rest are zeros. */
1654
1655 tree
1656 build_low_bits_mask (tree type, unsigned bits)
1657 {
1658 gcc_assert (bits <= TYPE_PRECISION (type));
1659
1660 return wide_int_to_tree (type, wi::mask (bits, false,
1661 TYPE_PRECISION (type)));
1662 }
1663
1664 /* Checks that X is integer constant that can be expressed in (unsigned)
1665 HOST_WIDE_INT without loss of precision. */
1666
1667 bool
1668 cst_and_fits_in_hwi (const_tree x)
1669 {
1670 if (TREE_CODE (x) != INTEGER_CST)
1671 return false;
1672
1673 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1674 return false;
1675
1676 return TREE_INT_CST_NUNITS (x) == 1;
1677 }
1678
1679 /* Build a newly constructed VECTOR_CST node of length LEN. */
1680
1681 tree
1682 make_vector_stat (unsigned len MEM_STAT_DECL)
1683 {
1684 tree t;
1685 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1686
1687 record_node_allocation_statistics (VECTOR_CST, length);
1688
1689 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1690
1691 TREE_SET_CODE (t, VECTOR_CST);
1692 TREE_CONSTANT (t) = 1;
1693
1694 return t;
1695 }
1696
1697 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1698 are in a list pointed to by VALS. */
1699
1700 tree
1701 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1702 {
1703 int over = 0;
1704 unsigned cnt = 0;
1705 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1706 TREE_TYPE (v) = type;
1707
1708 /* Iterate through elements and check for overflow. */
1709 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1710 {
1711 tree value = vals[cnt];
1712
1713 VECTOR_CST_ELT (v, cnt) = value;
1714
1715 /* Don't crash if we get an address constant. */
1716 if (!CONSTANT_CLASS_P (value))
1717 continue;
1718
1719 over |= TREE_OVERFLOW (value);
1720 }
1721
1722 TREE_OVERFLOW (v) = over;
1723 return v;
1724 }
1725
1726 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1727 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1728
1729 tree
1730 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1731 {
1732 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1733 unsigned HOST_WIDE_INT idx;
1734 tree value;
1735
1736 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1737 vec[idx] = value;
1738 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1739 vec[idx] = build_zero_cst (TREE_TYPE (type));
1740
1741 return build_vector (type, vec);
1742 }
1743
1744 /* Build a vector of type VECTYPE where all the elements are SCs. */
1745 tree
1746 build_vector_from_val (tree vectype, tree sc)
1747 {
1748 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1749
1750 if (sc == error_mark_node)
1751 return sc;
1752
1753 /* Verify that the vector type is suitable for SC. Note that there
1754 is some inconsistency in the type-system with respect to restrict
1755 qualifications of pointers. Vector types always have a main-variant
1756 element type and the qualification is applied to the vector-type.
1757 So TREE_TYPE (vector-type) does not return a properly qualified
1758 vector element-type. */
1759 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1760 TREE_TYPE (vectype)));
1761
1762 if (CONSTANT_CLASS_P (sc))
1763 {
1764 tree *v = XALLOCAVEC (tree, nunits);
1765 for (i = 0; i < nunits; ++i)
1766 v[i] = sc;
1767 return build_vector (vectype, v);
1768 }
1769 else
1770 {
1771 vec<constructor_elt, va_gc> *v;
1772 vec_alloc (v, nunits);
1773 for (i = 0; i < nunits; ++i)
1774 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1775 return build_constructor (vectype, v);
1776 }
1777 }
1778
1779 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1780 are in the vec pointed to by VALS. */
1781 tree
1782 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1783 {
1784 tree c = make_node (CONSTRUCTOR);
1785 unsigned int i;
1786 constructor_elt *elt;
1787 bool constant_p = true;
1788 bool side_effects_p = false;
1789
1790 TREE_TYPE (c) = type;
1791 CONSTRUCTOR_ELTS (c) = vals;
1792
1793 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1794 {
1795 /* Mostly ctors will have elts that don't have side-effects, so
1796 the usual case is to scan all the elements. Hence a single
1797 loop for both const and side effects, rather than one loop
1798 each (with early outs). */
1799 if (!TREE_CONSTANT (elt->value))
1800 constant_p = false;
1801 if (TREE_SIDE_EFFECTS (elt->value))
1802 side_effects_p = true;
1803 }
1804
1805 TREE_SIDE_EFFECTS (c) = side_effects_p;
1806 TREE_CONSTANT (c) = constant_p;
1807
1808 return c;
1809 }
1810
1811 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1812 INDEX and VALUE. */
1813 tree
1814 build_constructor_single (tree type, tree index, tree value)
1815 {
1816 vec<constructor_elt, va_gc> *v;
1817 constructor_elt elt = {index, value};
1818
1819 vec_alloc (v, 1);
1820 v->quick_push (elt);
1821
1822 return build_constructor (type, v);
1823 }
1824
1825
1826 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1827 are in a list pointed to by VALS. */
1828 tree
1829 build_constructor_from_list (tree type, tree vals)
1830 {
1831 tree t;
1832 vec<constructor_elt, va_gc> *v = NULL;
1833
1834 if (vals)
1835 {
1836 vec_alloc (v, list_length (vals));
1837 for (t = vals; t; t = TREE_CHAIN (t))
1838 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1839 }
1840
1841 return build_constructor (type, v);
1842 }
1843
1844 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1845 of elements, provided as index/value pairs. */
1846
1847 tree
1848 build_constructor_va (tree type, int nelts, ...)
1849 {
1850 vec<constructor_elt, va_gc> *v = NULL;
1851 va_list p;
1852
1853 va_start (p, nelts);
1854 vec_alloc (v, nelts);
1855 while (nelts--)
1856 {
1857 tree index = va_arg (p, tree);
1858 tree value = va_arg (p, tree);
1859 CONSTRUCTOR_APPEND_ELT (v, index, value);
1860 }
1861 va_end (p);
1862 return build_constructor (type, v);
1863 }
1864
1865 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1866
1867 tree
1868 build_fixed (tree type, FIXED_VALUE_TYPE f)
1869 {
1870 tree v;
1871 FIXED_VALUE_TYPE *fp;
1872
1873 v = make_node (FIXED_CST);
1874 fp = ggc_alloc<fixed_value> ();
1875 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1876
1877 TREE_TYPE (v) = type;
1878 TREE_FIXED_CST_PTR (v) = fp;
1879 return v;
1880 }
1881
1882 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1883
1884 tree
1885 build_real (tree type, REAL_VALUE_TYPE d)
1886 {
1887 tree v;
1888 REAL_VALUE_TYPE *dp;
1889 int overflow = 0;
1890
1891 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1892 Consider doing it via real_convert now. */
1893
1894 v = make_node (REAL_CST);
1895 dp = ggc_alloc<real_value> ();
1896 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1897
1898 TREE_TYPE (v) = type;
1899 TREE_REAL_CST_PTR (v) = dp;
1900 TREE_OVERFLOW (v) = overflow;
1901 return v;
1902 }
1903
1904 /* Like build_real, but first truncate D to the type. */
1905
1906 tree
1907 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1908 {
1909 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1910 }
1911
1912 /* Return a new REAL_CST node whose type is TYPE
1913 and whose value is the integer value of the INTEGER_CST node I. */
1914
1915 REAL_VALUE_TYPE
1916 real_value_from_int_cst (const_tree type, const_tree i)
1917 {
1918 REAL_VALUE_TYPE d;
1919
1920 /* Clear all bits of the real value type so that we can later do
1921 bitwise comparisons to see if two values are the same. */
1922 memset (&d, 0, sizeof d);
1923
1924 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1925 TYPE_SIGN (TREE_TYPE (i)));
1926 return d;
1927 }
1928
1929 /* Given a tree representing an integer constant I, return a tree
1930 representing the same value as a floating-point constant of type TYPE. */
1931
1932 tree
1933 build_real_from_int_cst (tree type, const_tree i)
1934 {
1935 tree v;
1936 int overflow = TREE_OVERFLOW (i);
1937
1938 v = build_real (type, real_value_from_int_cst (type, i));
1939
1940 TREE_OVERFLOW (v) |= overflow;
1941 return v;
1942 }
1943
1944 /* Return a newly constructed STRING_CST node whose value is
1945 the LEN characters at STR.
1946 Note that for a C string literal, LEN should include the trailing NUL.
1947 The TREE_TYPE is not initialized. */
1948
1949 tree
1950 build_string (int len, const char *str)
1951 {
1952 tree s;
1953 size_t length;
1954
1955 /* Do not waste bytes provided by padding of struct tree_string. */
1956 length = len + offsetof (struct tree_string, str) + 1;
1957
1958 record_node_allocation_statistics (STRING_CST, length);
1959
1960 s = (tree) ggc_internal_alloc (length);
1961
1962 memset (s, 0, sizeof (struct tree_typed));
1963 TREE_SET_CODE (s, STRING_CST);
1964 TREE_CONSTANT (s) = 1;
1965 TREE_STRING_LENGTH (s) = len;
1966 memcpy (s->string.str, str, len);
1967 s->string.str[len] = '\0';
1968
1969 return s;
1970 }
1971
1972 /* Return a newly constructed COMPLEX_CST node whose value is
1973 specified by the real and imaginary parts REAL and IMAG.
1974 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1975 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1976
1977 tree
1978 build_complex (tree type, tree real, tree imag)
1979 {
1980 tree t = make_node (COMPLEX_CST);
1981
1982 TREE_REALPART (t) = real;
1983 TREE_IMAGPART (t) = imag;
1984 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1985 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1986 return t;
1987 }
1988
1989 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
1990 element is set to 1. In particular, this is 1 + i for complex types. */
1991
1992 tree
1993 build_each_one_cst (tree type)
1994 {
1995 if (TREE_CODE (type) == COMPLEX_TYPE)
1996 {
1997 tree scalar = build_one_cst (TREE_TYPE (type));
1998 return build_complex (type, scalar, scalar);
1999 }
2000 else
2001 return build_one_cst (type);
2002 }
2003
2004 /* Return a constant of arithmetic type TYPE which is the
2005 multiplicative identity of the set TYPE. */
2006
2007 tree
2008 build_one_cst (tree type)
2009 {
2010 switch (TREE_CODE (type))
2011 {
2012 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2013 case POINTER_TYPE: case REFERENCE_TYPE:
2014 case OFFSET_TYPE:
2015 return build_int_cst (type, 1);
2016
2017 case REAL_TYPE:
2018 return build_real (type, dconst1);
2019
2020 case FIXED_POINT_TYPE:
2021 /* We can only generate 1 for accum types. */
2022 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2023 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2024
2025 case VECTOR_TYPE:
2026 {
2027 tree scalar = build_one_cst (TREE_TYPE (type));
2028
2029 return build_vector_from_val (type, scalar);
2030 }
2031
2032 case COMPLEX_TYPE:
2033 return build_complex (type,
2034 build_one_cst (TREE_TYPE (type)),
2035 build_zero_cst (TREE_TYPE (type)));
2036
2037 default:
2038 gcc_unreachable ();
2039 }
2040 }
2041
2042 /* Return an integer of type TYPE containing all 1's in as much precision as
2043 it contains, or a complex or vector whose subparts are such integers. */
2044
2045 tree
2046 build_all_ones_cst (tree type)
2047 {
2048 if (TREE_CODE (type) == COMPLEX_TYPE)
2049 {
2050 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2051 return build_complex (type, scalar, scalar);
2052 }
2053 else
2054 return build_minus_one_cst (type);
2055 }
2056
2057 /* Return a constant of arithmetic type TYPE which is the
2058 opposite of the multiplicative identity of the set TYPE. */
2059
2060 tree
2061 build_minus_one_cst (tree type)
2062 {
2063 switch (TREE_CODE (type))
2064 {
2065 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2066 case POINTER_TYPE: case REFERENCE_TYPE:
2067 case OFFSET_TYPE:
2068 return build_int_cst (type, -1);
2069
2070 case REAL_TYPE:
2071 return build_real (type, dconstm1);
2072
2073 case FIXED_POINT_TYPE:
2074 /* We can only generate 1 for accum types. */
2075 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2076 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2077 TYPE_MODE (type)));
2078
2079 case VECTOR_TYPE:
2080 {
2081 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2082
2083 return build_vector_from_val (type, scalar);
2084 }
2085
2086 case COMPLEX_TYPE:
2087 return build_complex (type,
2088 build_minus_one_cst (TREE_TYPE (type)),
2089 build_zero_cst (TREE_TYPE (type)));
2090
2091 default:
2092 gcc_unreachable ();
2093 }
2094 }
2095
2096 /* Build 0 constant of type TYPE. This is used by constructor folding
2097 and thus the constant should be represented in memory by
2098 zero(es). */
2099
2100 tree
2101 build_zero_cst (tree type)
2102 {
2103 switch (TREE_CODE (type))
2104 {
2105 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2106 case POINTER_TYPE: case REFERENCE_TYPE:
2107 case OFFSET_TYPE: case NULLPTR_TYPE:
2108 return build_int_cst (type, 0);
2109
2110 case REAL_TYPE:
2111 return build_real (type, dconst0);
2112
2113 case FIXED_POINT_TYPE:
2114 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2115
2116 case VECTOR_TYPE:
2117 {
2118 tree scalar = build_zero_cst (TREE_TYPE (type));
2119
2120 return build_vector_from_val (type, scalar);
2121 }
2122
2123 case COMPLEX_TYPE:
2124 {
2125 tree zero = build_zero_cst (TREE_TYPE (type));
2126
2127 return build_complex (type, zero, zero);
2128 }
2129
2130 default:
2131 if (!AGGREGATE_TYPE_P (type))
2132 return fold_convert (type, integer_zero_node);
2133 return build_constructor (type, NULL);
2134 }
2135 }
2136
2137
2138 /* Build a BINFO with LEN language slots. */
2139
2140 tree
2141 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2142 {
2143 tree t;
2144 size_t length = (offsetof (struct tree_binfo, base_binfos)
2145 + vec<tree, va_gc>::embedded_size (base_binfos));
2146
2147 record_node_allocation_statistics (TREE_BINFO, length);
2148
2149 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2150
2151 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2152
2153 TREE_SET_CODE (t, TREE_BINFO);
2154
2155 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2156
2157 return t;
2158 }
2159
2160 /* Create a CASE_LABEL_EXPR tree node and return it. */
2161
2162 tree
2163 build_case_label (tree low_value, tree high_value, tree label_decl)
2164 {
2165 tree t = make_node (CASE_LABEL_EXPR);
2166
2167 TREE_TYPE (t) = void_type_node;
2168 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2169
2170 CASE_LOW (t) = low_value;
2171 CASE_HIGH (t) = high_value;
2172 CASE_LABEL (t) = label_decl;
2173 CASE_CHAIN (t) = NULL_TREE;
2174
2175 return t;
2176 }
2177
2178 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2179 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2180 The latter determines the length of the HOST_WIDE_INT vector. */
2181
2182 tree
2183 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2184 {
2185 tree t;
2186 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2187 + sizeof (struct tree_int_cst));
2188
2189 gcc_assert (len);
2190 record_node_allocation_statistics (INTEGER_CST, length);
2191
2192 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2193
2194 TREE_SET_CODE (t, INTEGER_CST);
2195 TREE_INT_CST_NUNITS (t) = len;
2196 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2197 /* to_offset can only be applied to trees that are offset_int-sized
2198 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2199 must be exactly the precision of offset_int and so LEN is correct. */
2200 if (ext_len <= OFFSET_INT_ELTS)
2201 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2202 else
2203 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2204
2205 TREE_CONSTANT (t) = 1;
2206
2207 return t;
2208 }
2209
2210 /* Build a newly constructed TREE_VEC node of length LEN. */
2211
2212 tree
2213 make_tree_vec_stat (int len MEM_STAT_DECL)
2214 {
2215 tree t;
2216 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2217
2218 record_node_allocation_statistics (TREE_VEC, length);
2219
2220 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2221
2222 TREE_SET_CODE (t, TREE_VEC);
2223 TREE_VEC_LENGTH (t) = len;
2224
2225 return t;
2226 }
2227
2228 /* Grow a TREE_VEC node to new length LEN. */
2229
2230 tree
2231 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2232 {
2233 gcc_assert (TREE_CODE (v) == TREE_VEC);
2234
2235 int oldlen = TREE_VEC_LENGTH (v);
2236 gcc_assert (len > oldlen);
2237
2238 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2239 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2240
2241 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2242
2243 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2244
2245 TREE_VEC_LENGTH (v) = len;
2246
2247 return v;
2248 }
2249 \f
2250 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2251 fixed, and scalar, complex or vector. */
2252
2253 int
2254 zerop (const_tree expr)
2255 {
2256 return (integer_zerop (expr)
2257 || real_zerop (expr)
2258 || fixed_zerop (expr));
2259 }
2260
2261 /* Return 1 if EXPR is the integer constant zero or a complex constant
2262 of zero. */
2263
2264 int
2265 integer_zerop (const_tree expr)
2266 {
2267 STRIP_NOPS (expr);
2268
2269 switch (TREE_CODE (expr))
2270 {
2271 case INTEGER_CST:
2272 return wi::eq_p (expr, 0);
2273 case COMPLEX_CST:
2274 return (integer_zerop (TREE_REALPART (expr))
2275 && integer_zerop (TREE_IMAGPART (expr)));
2276 case VECTOR_CST:
2277 {
2278 unsigned i;
2279 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2280 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2281 return false;
2282 return true;
2283 }
2284 default:
2285 return false;
2286 }
2287 }
2288
2289 /* Return 1 if EXPR is the integer constant one or the corresponding
2290 complex constant. */
2291
2292 int
2293 integer_onep (const_tree expr)
2294 {
2295 STRIP_NOPS (expr);
2296
2297 switch (TREE_CODE (expr))
2298 {
2299 case INTEGER_CST:
2300 return wi::eq_p (wi::to_widest (expr), 1);
2301 case COMPLEX_CST:
2302 return (integer_onep (TREE_REALPART (expr))
2303 && integer_zerop (TREE_IMAGPART (expr)));
2304 case VECTOR_CST:
2305 {
2306 unsigned i;
2307 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2308 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2309 return false;
2310 return true;
2311 }
2312 default:
2313 return false;
2314 }
2315 }
2316
2317 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2318 return 1 if every piece is the integer constant one. */
2319
2320 int
2321 integer_each_onep (const_tree expr)
2322 {
2323 STRIP_NOPS (expr);
2324
2325 if (TREE_CODE (expr) == COMPLEX_CST)
2326 return (integer_onep (TREE_REALPART (expr))
2327 && integer_onep (TREE_IMAGPART (expr)));
2328 else
2329 return integer_onep (expr);
2330 }
2331
2332 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2333 it contains, or a complex or vector whose subparts are such integers. */
2334
2335 int
2336 integer_all_onesp (const_tree expr)
2337 {
2338 STRIP_NOPS (expr);
2339
2340 if (TREE_CODE (expr) == COMPLEX_CST
2341 && integer_all_onesp (TREE_REALPART (expr))
2342 && integer_all_onesp (TREE_IMAGPART (expr)))
2343 return 1;
2344
2345 else if (TREE_CODE (expr) == VECTOR_CST)
2346 {
2347 unsigned i;
2348 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2349 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2350 return 0;
2351 return 1;
2352 }
2353
2354 else if (TREE_CODE (expr) != INTEGER_CST)
2355 return 0;
2356
2357 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2358 }
2359
2360 /* Return 1 if EXPR is the integer constant minus one. */
2361
2362 int
2363 integer_minus_onep (const_tree expr)
2364 {
2365 STRIP_NOPS (expr);
2366
2367 if (TREE_CODE (expr) == COMPLEX_CST)
2368 return (integer_all_onesp (TREE_REALPART (expr))
2369 && integer_zerop (TREE_IMAGPART (expr)));
2370 else
2371 return integer_all_onesp (expr);
2372 }
2373
2374 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2375 one bit on). */
2376
2377 int
2378 integer_pow2p (const_tree expr)
2379 {
2380 STRIP_NOPS (expr);
2381
2382 if (TREE_CODE (expr) == COMPLEX_CST
2383 && integer_pow2p (TREE_REALPART (expr))
2384 && integer_zerop (TREE_IMAGPART (expr)))
2385 return 1;
2386
2387 if (TREE_CODE (expr) != INTEGER_CST)
2388 return 0;
2389
2390 return wi::popcount (expr) == 1;
2391 }
2392
2393 /* Return 1 if EXPR is an integer constant other than zero or a
2394 complex constant other than zero. */
2395
2396 int
2397 integer_nonzerop (const_tree expr)
2398 {
2399 STRIP_NOPS (expr);
2400
2401 return ((TREE_CODE (expr) == INTEGER_CST
2402 && !wi::eq_p (expr, 0))
2403 || (TREE_CODE (expr) == COMPLEX_CST
2404 && (integer_nonzerop (TREE_REALPART (expr))
2405 || integer_nonzerop (TREE_IMAGPART (expr)))));
2406 }
2407
2408 /* Return 1 if EXPR is the integer constant one. For vector,
2409 return 1 if every piece is the integer constant minus one
2410 (representing the value TRUE). */
2411
2412 int
2413 integer_truep (const_tree expr)
2414 {
2415 STRIP_NOPS (expr);
2416
2417 if (TREE_CODE (expr) == VECTOR_CST)
2418 return integer_all_onesp (expr);
2419 return integer_onep (expr);
2420 }
2421
2422 /* Return 1 if EXPR is the fixed-point constant zero. */
2423
2424 int
2425 fixed_zerop (const_tree expr)
2426 {
2427 return (TREE_CODE (expr) == FIXED_CST
2428 && TREE_FIXED_CST (expr).data.is_zero ());
2429 }
2430
2431 /* Return the power of two represented by a tree node known to be a
2432 power of two. */
2433
2434 int
2435 tree_log2 (const_tree expr)
2436 {
2437 STRIP_NOPS (expr);
2438
2439 if (TREE_CODE (expr) == COMPLEX_CST)
2440 return tree_log2 (TREE_REALPART (expr));
2441
2442 return wi::exact_log2 (expr);
2443 }
2444
2445 /* Similar, but return the largest integer Y such that 2 ** Y is less
2446 than or equal to EXPR. */
2447
2448 int
2449 tree_floor_log2 (const_tree expr)
2450 {
2451 STRIP_NOPS (expr);
2452
2453 if (TREE_CODE (expr) == COMPLEX_CST)
2454 return tree_log2 (TREE_REALPART (expr));
2455
2456 return wi::floor_log2 (expr);
2457 }
2458
2459 /* Return number of known trailing zero bits in EXPR, or, if the value of
2460 EXPR is known to be zero, the precision of it's type. */
2461
2462 unsigned int
2463 tree_ctz (const_tree expr)
2464 {
2465 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2466 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2467 return 0;
2468
2469 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2470 switch (TREE_CODE (expr))
2471 {
2472 case INTEGER_CST:
2473 ret1 = wi::ctz (expr);
2474 return MIN (ret1, prec);
2475 case SSA_NAME:
2476 ret1 = wi::ctz (get_nonzero_bits (expr));
2477 return MIN (ret1, prec);
2478 case PLUS_EXPR:
2479 case MINUS_EXPR:
2480 case BIT_IOR_EXPR:
2481 case BIT_XOR_EXPR:
2482 case MIN_EXPR:
2483 case MAX_EXPR:
2484 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2485 if (ret1 == 0)
2486 return ret1;
2487 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2488 return MIN (ret1, ret2);
2489 case POINTER_PLUS_EXPR:
2490 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2491 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2492 /* Second operand is sizetype, which could be in theory
2493 wider than pointer's precision. Make sure we never
2494 return more than prec. */
2495 ret2 = MIN (ret2, prec);
2496 return MIN (ret1, ret2);
2497 case BIT_AND_EXPR:
2498 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2499 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2500 return MAX (ret1, ret2);
2501 case MULT_EXPR:
2502 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2503 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2504 return MIN (ret1 + ret2, prec);
2505 case LSHIFT_EXPR:
2506 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2507 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2508 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2509 {
2510 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2511 return MIN (ret1 + ret2, prec);
2512 }
2513 return ret1;
2514 case RSHIFT_EXPR:
2515 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2516 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2517 {
2518 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2519 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2520 if (ret1 > ret2)
2521 return ret1 - ret2;
2522 }
2523 return 0;
2524 case TRUNC_DIV_EXPR:
2525 case CEIL_DIV_EXPR:
2526 case FLOOR_DIV_EXPR:
2527 case ROUND_DIV_EXPR:
2528 case EXACT_DIV_EXPR:
2529 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2530 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2531 {
2532 int l = tree_log2 (TREE_OPERAND (expr, 1));
2533 if (l >= 0)
2534 {
2535 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2536 ret2 = l;
2537 if (ret1 > ret2)
2538 return ret1 - ret2;
2539 }
2540 }
2541 return 0;
2542 CASE_CONVERT:
2543 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2544 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2545 ret1 = prec;
2546 return MIN (ret1, prec);
2547 case SAVE_EXPR:
2548 return tree_ctz (TREE_OPERAND (expr, 0));
2549 case COND_EXPR:
2550 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2551 if (ret1 == 0)
2552 return 0;
2553 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2554 return MIN (ret1, ret2);
2555 case COMPOUND_EXPR:
2556 return tree_ctz (TREE_OPERAND (expr, 1));
2557 case ADDR_EXPR:
2558 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2559 if (ret1 > BITS_PER_UNIT)
2560 {
2561 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2562 return MIN (ret1, prec);
2563 }
2564 return 0;
2565 default:
2566 return 0;
2567 }
2568 }
2569
2570 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2571 decimal float constants, so don't return 1 for them. */
2572
2573 int
2574 real_zerop (const_tree expr)
2575 {
2576 STRIP_NOPS (expr);
2577
2578 switch (TREE_CODE (expr))
2579 {
2580 case REAL_CST:
2581 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2582 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2583 case COMPLEX_CST:
2584 return real_zerop (TREE_REALPART (expr))
2585 && real_zerop (TREE_IMAGPART (expr));
2586 case VECTOR_CST:
2587 {
2588 unsigned i;
2589 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2590 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2591 return false;
2592 return true;
2593 }
2594 default:
2595 return false;
2596 }
2597 }
2598
2599 /* Return 1 if EXPR is the real constant one in real or complex form.
2600 Trailing zeroes matter for decimal float constants, so don't return
2601 1 for them. */
2602
2603 int
2604 real_onep (const_tree expr)
2605 {
2606 STRIP_NOPS (expr);
2607
2608 switch (TREE_CODE (expr))
2609 {
2610 case REAL_CST:
2611 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2612 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2613 case COMPLEX_CST:
2614 return real_onep (TREE_REALPART (expr))
2615 && real_zerop (TREE_IMAGPART (expr));
2616 case VECTOR_CST:
2617 {
2618 unsigned i;
2619 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2620 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2621 return false;
2622 return true;
2623 }
2624 default:
2625 return false;
2626 }
2627 }
2628
2629 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2630 matter for decimal float constants, so don't return 1 for them. */
2631
2632 int
2633 real_minus_onep (const_tree expr)
2634 {
2635 STRIP_NOPS (expr);
2636
2637 switch (TREE_CODE (expr))
2638 {
2639 case REAL_CST:
2640 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2641 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2642 case COMPLEX_CST:
2643 return real_minus_onep (TREE_REALPART (expr))
2644 && real_zerop (TREE_IMAGPART (expr));
2645 case VECTOR_CST:
2646 {
2647 unsigned i;
2648 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2649 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2650 return false;
2651 return true;
2652 }
2653 default:
2654 return false;
2655 }
2656 }
2657
2658 /* Nonzero if EXP is a constant or a cast of a constant. */
2659
2660 int
2661 really_constant_p (const_tree exp)
2662 {
2663 /* This is not quite the same as STRIP_NOPS. It does more. */
2664 while (CONVERT_EXPR_P (exp)
2665 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2666 exp = TREE_OPERAND (exp, 0);
2667 return TREE_CONSTANT (exp);
2668 }
2669 \f
2670 /* Return first list element whose TREE_VALUE is ELEM.
2671 Return 0 if ELEM is not in LIST. */
2672
2673 tree
2674 value_member (tree elem, tree list)
2675 {
2676 while (list)
2677 {
2678 if (elem == TREE_VALUE (list))
2679 return list;
2680 list = TREE_CHAIN (list);
2681 }
2682 return NULL_TREE;
2683 }
2684
2685 /* Return first list element whose TREE_PURPOSE is ELEM.
2686 Return 0 if ELEM is not in LIST. */
2687
2688 tree
2689 purpose_member (const_tree elem, tree list)
2690 {
2691 while (list)
2692 {
2693 if (elem == TREE_PURPOSE (list))
2694 return list;
2695 list = TREE_CHAIN (list);
2696 }
2697 return NULL_TREE;
2698 }
2699
2700 /* Return true if ELEM is in V. */
2701
2702 bool
2703 vec_member (const_tree elem, vec<tree, va_gc> *v)
2704 {
2705 unsigned ix;
2706 tree t;
2707 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2708 if (elem == t)
2709 return true;
2710 return false;
2711 }
2712
2713 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2714 NULL_TREE. */
2715
2716 tree
2717 chain_index (int idx, tree chain)
2718 {
2719 for (; chain && idx > 0; --idx)
2720 chain = TREE_CHAIN (chain);
2721 return chain;
2722 }
2723
2724 /* Return nonzero if ELEM is part of the chain CHAIN. */
2725
2726 int
2727 chain_member (const_tree elem, const_tree chain)
2728 {
2729 while (chain)
2730 {
2731 if (elem == chain)
2732 return 1;
2733 chain = DECL_CHAIN (chain);
2734 }
2735
2736 return 0;
2737 }
2738
2739 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2740 We expect a null pointer to mark the end of the chain.
2741 This is the Lisp primitive `length'. */
2742
2743 int
2744 list_length (const_tree t)
2745 {
2746 const_tree p = t;
2747 #ifdef ENABLE_TREE_CHECKING
2748 const_tree q = t;
2749 #endif
2750 int len = 0;
2751
2752 while (p)
2753 {
2754 p = TREE_CHAIN (p);
2755 #ifdef ENABLE_TREE_CHECKING
2756 if (len % 2)
2757 q = TREE_CHAIN (q);
2758 gcc_assert (p != q);
2759 #endif
2760 len++;
2761 }
2762
2763 return len;
2764 }
2765
2766 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2767 UNION_TYPE TYPE, or NULL_TREE if none. */
2768
2769 tree
2770 first_field (const_tree type)
2771 {
2772 tree t = TYPE_FIELDS (type);
2773 while (t && TREE_CODE (t) != FIELD_DECL)
2774 t = TREE_CHAIN (t);
2775 return t;
2776 }
2777
2778 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2779 by modifying the last node in chain 1 to point to chain 2.
2780 This is the Lisp primitive `nconc'. */
2781
2782 tree
2783 chainon (tree op1, tree op2)
2784 {
2785 tree t1;
2786
2787 if (!op1)
2788 return op2;
2789 if (!op2)
2790 return op1;
2791
2792 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2793 continue;
2794 TREE_CHAIN (t1) = op2;
2795
2796 #ifdef ENABLE_TREE_CHECKING
2797 {
2798 tree t2;
2799 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2800 gcc_assert (t2 != t1);
2801 }
2802 #endif
2803
2804 return op1;
2805 }
2806
2807 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2808
2809 tree
2810 tree_last (tree chain)
2811 {
2812 tree next;
2813 if (chain)
2814 while ((next = TREE_CHAIN (chain)))
2815 chain = next;
2816 return chain;
2817 }
2818
2819 /* Reverse the order of elements in the chain T,
2820 and return the new head of the chain (old last element). */
2821
2822 tree
2823 nreverse (tree t)
2824 {
2825 tree prev = 0, decl, next;
2826 for (decl = t; decl; decl = next)
2827 {
2828 /* We shouldn't be using this function to reverse BLOCK chains; we
2829 have blocks_nreverse for that. */
2830 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2831 next = TREE_CHAIN (decl);
2832 TREE_CHAIN (decl) = prev;
2833 prev = decl;
2834 }
2835 return prev;
2836 }
2837 \f
2838 /* Return a newly created TREE_LIST node whose
2839 purpose and value fields are PARM and VALUE. */
2840
2841 tree
2842 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2843 {
2844 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2845 TREE_PURPOSE (t) = parm;
2846 TREE_VALUE (t) = value;
2847 return t;
2848 }
2849
2850 /* Build a chain of TREE_LIST nodes from a vector. */
2851
2852 tree
2853 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2854 {
2855 tree ret = NULL_TREE;
2856 tree *pp = &ret;
2857 unsigned int i;
2858 tree t;
2859 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2860 {
2861 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2862 pp = &TREE_CHAIN (*pp);
2863 }
2864 return ret;
2865 }
2866
2867 /* Return a newly created TREE_LIST node whose
2868 purpose and value fields are PURPOSE and VALUE
2869 and whose TREE_CHAIN is CHAIN. */
2870
2871 tree
2872 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2873 {
2874 tree node;
2875
2876 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2877 memset (node, 0, sizeof (struct tree_common));
2878
2879 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2880
2881 TREE_SET_CODE (node, TREE_LIST);
2882 TREE_CHAIN (node) = chain;
2883 TREE_PURPOSE (node) = purpose;
2884 TREE_VALUE (node) = value;
2885 return node;
2886 }
2887
2888 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2889 trees. */
2890
2891 vec<tree, va_gc> *
2892 ctor_to_vec (tree ctor)
2893 {
2894 vec<tree, va_gc> *vec;
2895 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2896 unsigned int ix;
2897 tree val;
2898
2899 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2900 vec->quick_push (val);
2901
2902 return vec;
2903 }
2904 \f
2905 /* Return the size nominally occupied by an object of type TYPE
2906 when it resides in memory. The value is measured in units of bytes,
2907 and its data type is that normally used for type sizes
2908 (which is the first type created by make_signed_type or
2909 make_unsigned_type). */
2910
2911 tree
2912 size_in_bytes (const_tree type)
2913 {
2914 tree t;
2915
2916 if (type == error_mark_node)
2917 return integer_zero_node;
2918
2919 type = TYPE_MAIN_VARIANT (type);
2920 t = TYPE_SIZE_UNIT (type);
2921
2922 if (t == 0)
2923 {
2924 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2925 return size_zero_node;
2926 }
2927
2928 return t;
2929 }
2930
2931 /* Return the size of TYPE (in bytes) as a wide integer
2932 or return -1 if the size can vary or is larger than an integer. */
2933
2934 HOST_WIDE_INT
2935 int_size_in_bytes (const_tree type)
2936 {
2937 tree t;
2938
2939 if (type == error_mark_node)
2940 return 0;
2941
2942 type = TYPE_MAIN_VARIANT (type);
2943 t = TYPE_SIZE_UNIT (type);
2944
2945 if (t && tree_fits_uhwi_p (t))
2946 return TREE_INT_CST_LOW (t);
2947 else
2948 return -1;
2949 }
2950
2951 /* Return the maximum size of TYPE (in bytes) as a wide integer
2952 or return -1 if the size can vary or is larger than an integer. */
2953
2954 HOST_WIDE_INT
2955 max_int_size_in_bytes (const_tree type)
2956 {
2957 HOST_WIDE_INT size = -1;
2958 tree size_tree;
2959
2960 /* If this is an array type, check for a possible MAX_SIZE attached. */
2961
2962 if (TREE_CODE (type) == ARRAY_TYPE)
2963 {
2964 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2965
2966 if (size_tree && tree_fits_uhwi_p (size_tree))
2967 size = tree_to_uhwi (size_tree);
2968 }
2969
2970 /* If we still haven't been able to get a size, see if the language
2971 can compute a maximum size. */
2972
2973 if (size == -1)
2974 {
2975 size_tree = lang_hooks.types.max_size (type);
2976
2977 if (size_tree && tree_fits_uhwi_p (size_tree))
2978 size = tree_to_uhwi (size_tree);
2979 }
2980
2981 return size;
2982 }
2983 \f
2984 /* Return the bit position of FIELD, in bits from the start of the record.
2985 This is a tree of type bitsizetype. */
2986
2987 tree
2988 bit_position (const_tree field)
2989 {
2990 return bit_from_pos (DECL_FIELD_OFFSET (field),
2991 DECL_FIELD_BIT_OFFSET (field));
2992 }
2993 \f
2994 /* Return the byte position of FIELD, in bytes from the start of the record.
2995 This is a tree of type sizetype. */
2996
2997 tree
2998 byte_position (const_tree field)
2999 {
3000 return byte_from_pos (DECL_FIELD_OFFSET (field),
3001 DECL_FIELD_BIT_OFFSET (field));
3002 }
3003
3004 /* Likewise, but return as an integer. It must be representable in
3005 that way (since it could be a signed value, we don't have the
3006 option of returning -1 like int_size_in_byte can. */
3007
3008 HOST_WIDE_INT
3009 int_byte_position (const_tree field)
3010 {
3011 return tree_to_shwi (byte_position (field));
3012 }
3013 \f
3014 /* Return the strictest alignment, in bits, that T is known to have. */
3015
3016 unsigned int
3017 expr_align (const_tree t)
3018 {
3019 unsigned int align0, align1;
3020
3021 switch (TREE_CODE (t))
3022 {
3023 CASE_CONVERT: case NON_LVALUE_EXPR:
3024 /* If we have conversions, we know that the alignment of the
3025 object must meet each of the alignments of the types. */
3026 align0 = expr_align (TREE_OPERAND (t, 0));
3027 align1 = TYPE_ALIGN (TREE_TYPE (t));
3028 return MAX (align0, align1);
3029
3030 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3031 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3032 case CLEANUP_POINT_EXPR:
3033 /* These don't change the alignment of an object. */
3034 return expr_align (TREE_OPERAND (t, 0));
3035
3036 case COND_EXPR:
3037 /* The best we can do is say that the alignment is the least aligned
3038 of the two arms. */
3039 align0 = expr_align (TREE_OPERAND (t, 1));
3040 align1 = expr_align (TREE_OPERAND (t, 2));
3041 return MIN (align0, align1);
3042
3043 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3044 meaningfully, it's always 1. */
3045 case LABEL_DECL: case CONST_DECL:
3046 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3047 case FUNCTION_DECL:
3048 gcc_assert (DECL_ALIGN (t) != 0);
3049 return DECL_ALIGN (t);
3050
3051 default:
3052 break;
3053 }
3054
3055 /* Otherwise take the alignment from that of the type. */
3056 return TYPE_ALIGN (TREE_TYPE (t));
3057 }
3058 \f
3059 /* Return, as a tree node, the number of elements for TYPE (which is an
3060 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3061
3062 tree
3063 array_type_nelts (const_tree type)
3064 {
3065 tree index_type, min, max;
3066
3067 /* If they did it with unspecified bounds, then we should have already
3068 given an error about it before we got here. */
3069 if (! TYPE_DOMAIN (type))
3070 return error_mark_node;
3071
3072 index_type = TYPE_DOMAIN (type);
3073 min = TYPE_MIN_VALUE (index_type);
3074 max = TYPE_MAX_VALUE (index_type);
3075
3076 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3077 if (!max)
3078 return error_mark_node;
3079
3080 return (integer_zerop (min)
3081 ? max
3082 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3083 }
3084 \f
3085 /* If arg is static -- a reference to an object in static storage -- then
3086 return the object. This is not the same as the C meaning of `static'.
3087 If arg isn't static, return NULL. */
3088
3089 tree
3090 staticp (tree arg)
3091 {
3092 switch (TREE_CODE (arg))
3093 {
3094 case FUNCTION_DECL:
3095 /* Nested functions are static, even though taking their address will
3096 involve a trampoline as we unnest the nested function and create
3097 the trampoline on the tree level. */
3098 return arg;
3099
3100 case VAR_DECL:
3101 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3102 && ! DECL_THREAD_LOCAL_P (arg)
3103 && ! DECL_DLLIMPORT_P (arg)
3104 ? arg : NULL);
3105
3106 case CONST_DECL:
3107 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3108 ? arg : NULL);
3109
3110 case CONSTRUCTOR:
3111 return TREE_STATIC (arg) ? arg : NULL;
3112
3113 case LABEL_DECL:
3114 case STRING_CST:
3115 return arg;
3116
3117 case COMPONENT_REF:
3118 /* If the thing being referenced is not a field, then it is
3119 something language specific. */
3120 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3121
3122 /* If we are referencing a bitfield, we can't evaluate an
3123 ADDR_EXPR at compile time and so it isn't a constant. */
3124 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3125 return NULL;
3126
3127 return staticp (TREE_OPERAND (arg, 0));
3128
3129 case BIT_FIELD_REF:
3130 return NULL;
3131
3132 case INDIRECT_REF:
3133 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3134
3135 case ARRAY_REF:
3136 case ARRAY_RANGE_REF:
3137 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3138 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3139 return staticp (TREE_OPERAND (arg, 0));
3140 else
3141 return NULL;
3142
3143 case COMPOUND_LITERAL_EXPR:
3144 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3145
3146 default:
3147 return NULL;
3148 }
3149 }
3150
3151 \f
3152
3153
3154 /* Return whether OP is a DECL whose address is function-invariant. */
3155
3156 bool
3157 decl_address_invariant_p (const_tree op)
3158 {
3159 /* The conditions below are slightly less strict than the one in
3160 staticp. */
3161
3162 switch (TREE_CODE (op))
3163 {
3164 case PARM_DECL:
3165 case RESULT_DECL:
3166 case LABEL_DECL:
3167 case FUNCTION_DECL:
3168 return true;
3169
3170 case VAR_DECL:
3171 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3172 || DECL_THREAD_LOCAL_P (op)
3173 || DECL_CONTEXT (op) == current_function_decl
3174 || decl_function_context (op) == current_function_decl)
3175 return true;
3176 break;
3177
3178 case CONST_DECL:
3179 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3180 || decl_function_context (op) == current_function_decl)
3181 return true;
3182 break;
3183
3184 default:
3185 break;
3186 }
3187
3188 return false;
3189 }
3190
3191 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3192
3193 bool
3194 decl_address_ip_invariant_p (const_tree op)
3195 {
3196 /* The conditions below are slightly less strict than the one in
3197 staticp. */
3198
3199 switch (TREE_CODE (op))
3200 {
3201 case LABEL_DECL:
3202 case FUNCTION_DECL:
3203 case STRING_CST:
3204 return true;
3205
3206 case VAR_DECL:
3207 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3208 && !DECL_DLLIMPORT_P (op))
3209 || DECL_THREAD_LOCAL_P (op))
3210 return true;
3211 break;
3212
3213 case CONST_DECL:
3214 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3215 return true;
3216 break;
3217
3218 default:
3219 break;
3220 }
3221
3222 return false;
3223 }
3224
3225
3226 /* Return true if T is function-invariant (internal function, does
3227 not handle arithmetic; that's handled in skip_simple_arithmetic and
3228 tree_invariant_p). */
3229
3230 static bool tree_invariant_p (tree t);
3231
3232 static bool
3233 tree_invariant_p_1 (tree t)
3234 {
3235 tree op;
3236
3237 if (TREE_CONSTANT (t)
3238 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3239 return true;
3240
3241 switch (TREE_CODE (t))
3242 {
3243 case SAVE_EXPR:
3244 return true;
3245
3246 case ADDR_EXPR:
3247 op = TREE_OPERAND (t, 0);
3248 while (handled_component_p (op))
3249 {
3250 switch (TREE_CODE (op))
3251 {
3252 case ARRAY_REF:
3253 case ARRAY_RANGE_REF:
3254 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3255 || TREE_OPERAND (op, 2) != NULL_TREE
3256 || TREE_OPERAND (op, 3) != NULL_TREE)
3257 return false;
3258 break;
3259
3260 case COMPONENT_REF:
3261 if (TREE_OPERAND (op, 2) != NULL_TREE)
3262 return false;
3263 break;
3264
3265 default:;
3266 }
3267 op = TREE_OPERAND (op, 0);
3268 }
3269
3270 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3271
3272 default:
3273 break;
3274 }
3275
3276 return false;
3277 }
3278
3279 /* Return true if T is function-invariant. */
3280
3281 static bool
3282 tree_invariant_p (tree t)
3283 {
3284 tree inner = skip_simple_arithmetic (t);
3285 return tree_invariant_p_1 (inner);
3286 }
3287
3288 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3289 Do this to any expression which may be used in more than one place,
3290 but must be evaluated only once.
3291
3292 Normally, expand_expr would reevaluate the expression each time.
3293 Calling save_expr produces something that is evaluated and recorded
3294 the first time expand_expr is called on it. Subsequent calls to
3295 expand_expr just reuse the recorded value.
3296
3297 The call to expand_expr that generates code that actually computes
3298 the value is the first call *at compile time*. Subsequent calls
3299 *at compile time* generate code to use the saved value.
3300 This produces correct result provided that *at run time* control
3301 always flows through the insns made by the first expand_expr
3302 before reaching the other places where the save_expr was evaluated.
3303 You, the caller of save_expr, must make sure this is so.
3304
3305 Constants, and certain read-only nodes, are returned with no
3306 SAVE_EXPR because that is safe. Expressions containing placeholders
3307 are not touched; see tree.def for an explanation of what these
3308 are used for. */
3309
3310 tree
3311 save_expr (tree expr)
3312 {
3313 tree t = fold (expr);
3314 tree inner;
3315
3316 /* If the tree evaluates to a constant, then we don't want to hide that
3317 fact (i.e. this allows further folding, and direct checks for constants).
3318 However, a read-only object that has side effects cannot be bypassed.
3319 Since it is no problem to reevaluate literals, we just return the
3320 literal node. */
3321 inner = skip_simple_arithmetic (t);
3322 if (TREE_CODE (inner) == ERROR_MARK)
3323 return inner;
3324
3325 if (tree_invariant_p_1 (inner))
3326 return t;
3327
3328 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3329 it means that the size or offset of some field of an object depends on
3330 the value within another field.
3331
3332 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3333 and some variable since it would then need to be both evaluated once and
3334 evaluated more than once. Front-ends must assure this case cannot
3335 happen by surrounding any such subexpressions in their own SAVE_EXPR
3336 and forcing evaluation at the proper time. */
3337 if (contains_placeholder_p (inner))
3338 return t;
3339
3340 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3341 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3342
3343 /* This expression might be placed ahead of a jump to ensure that the
3344 value was computed on both sides of the jump. So make sure it isn't
3345 eliminated as dead. */
3346 TREE_SIDE_EFFECTS (t) = 1;
3347 return t;
3348 }
3349
3350 /* Look inside EXPR into any simple arithmetic operations. Return the
3351 outermost non-arithmetic or non-invariant node. */
3352
3353 tree
3354 skip_simple_arithmetic (tree expr)
3355 {
3356 /* We don't care about whether this can be used as an lvalue in this
3357 context. */
3358 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3359 expr = TREE_OPERAND (expr, 0);
3360
3361 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3362 a constant, it will be more efficient to not make another SAVE_EXPR since
3363 it will allow better simplification and GCSE will be able to merge the
3364 computations if they actually occur. */
3365 while (true)
3366 {
3367 if (UNARY_CLASS_P (expr))
3368 expr = TREE_OPERAND (expr, 0);
3369 else if (BINARY_CLASS_P (expr))
3370 {
3371 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3372 expr = TREE_OPERAND (expr, 0);
3373 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3374 expr = TREE_OPERAND (expr, 1);
3375 else
3376 break;
3377 }
3378 else
3379 break;
3380 }
3381
3382 return expr;
3383 }
3384
3385 /* Look inside EXPR into simple arithmetic operations involving constants.
3386 Return the outermost non-arithmetic or non-constant node. */
3387
3388 tree
3389 skip_simple_constant_arithmetic (tree expr)
3390 {
3391 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3392 expr = TREE_OPERAND (expr, 0);
3393
3394 while (true)
3395 {
3396 if (UNARY_CLASS_P (expr))
3397 expr = TREE_OPERAND (expr, 0);
3398 else if (BINARY_CLASS_P (expr))
3399 {
3400 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3401 expr = TREE_OPERAND (expr, 0);
3402 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3403 expr = TREE_OPERAND (expr, 1);
3404 else
3405 break;
3406 }
3407 else
3408 break;
3409 }
3410
3411 return expr;
3412 }
3413
3414 /* Return which tree structure is used by T. */
3415
3416 enum tree_node_structure_enum
3417 tree_node_structure (const_tree t)
3418 {
3419 const enum tree_code code = TREE_CODE (t);
3420 return tree_node_structure_for_code (code);
3421 }
3422
3423 /* Set various status flags when building a CALL_EXPR object T. */
3424
3425 static void
3426 process_call_operands (tree t)
3427 {
3428 bool side_effects = TREE_SIDE_EFFECTS (t);
3429 bool read_only = false;
3430 int i = call_expr_flags (t);
3431
3432 /* Calls have side-effects, except those to const or pure functions. */
3433 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3434 side_effects = true;
3435 /* Propagate TREE_READONLY of arguments for const functions. */
3436 if (i & ECF_CONST)
3437 read_only = true;
3438
3439 if (!side_effects || read_only)
3440 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3441 {
3442 tree op = TREE_OPERAND (t, i);
3443 if (op && TREE_SIDE_EFFECTS (op))
3444 side_effects = true;
3445 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3446 read_only = false;
3447 }
3448
3449 TREE_SIDE_EFFECTS (t) = side_effects;
3450 TREE_READONLY (t) = read_only;
3451 }
3452 \f
3453 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3454 size or offset that depends on a field within a record. */
3455
3456 bool
3457 contains_placeholder_p (const_tree exp)
3458 {
3459 enum tree_code code;
3460
3461 if (!exp)
3462 return 0;
3463
3464 code = TREE_CODE (exp);
3465 if (code == PLACEHOLDER_EXPR)
3466 return 1;
3467
3468 switch (TREE_CODE_CLASS (code))
3469 {
3470 case tcc_reference:
3471 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3472 position computations since they will be converted into a
3473 WITH_RECORD_EXPR involving the reference, which will assume
3474 here will be valid. */
3475 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3476
3477 case tcc_exceptional:
3478 if (code == TREE_LIST)
3479 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3480 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3481 break;
3482
3483 case tcc_unary:
3484 case tcc_binary:
3485 case tcc_comparison:
3486 case tcc_expression:
3487 switch (code)
3488 {
3489 case COMPOUND_EXPR:
3490 /* Ignoring the first operand isn't quite right, but works best. */
3491 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3492
3493 case COND_EXPR:
3494 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3495 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3496 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3497
3498 case SAVE_EXPR:
3499 /* The save_expr function never wraps anything containing
3500 a PLACEHOLDER_EXPR. */
3501 return 0;
3502
3503 default:
3504 break;
3505 }
3506
3507 switch (TREE_CODE_LENGTH (code))
3508 {
3509 case 1:
3510 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3511 case 2:
3512 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3513 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3514 default:
3515 return 0;
3516 }
3517
3518 case tcc_vl_exp:
3519 switch (code)
3520 {
3521 case CALL_EXPR:
3522 {
3523 const_tree arg;
3524 const_call_expr_arg_iterator iter;
3525 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3526 if (CONTAINS_PLACEHOLDER_P (arg))
3527 return 1;
3528 return 0;
3529 }
3530 default:
3531 return 0;
3532 }
3533
3534 default:
3535 return 0;
3536 }
3537 return 0;
3538 }
3539
3540 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3541 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3542 field positions. */
3543
3544 static bool
3545 type_contains_placeholder_1 (const_tree type)
3546 {
3547 /* If the size contains a placeholder or the parent type (component type in
3548 the case of arrays) type involves a placeholder, this type does. */
3549 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3550 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3551 || (!POINTER_TYPE_P (type)
3552 && TREE_TYPE (type)
3553 && type_contains_placeholder_p (TREE_TYPE (type))))
3554 return true;
3555
3556 /* Now do type-specific checks. Note that the last part of the check above
3557 greatly limits what we have to do below. */
3558 switch (TREE_CODE (type))
3559 {
3560 case VOID_TYPE:
3561 case POINTER_BOUNDS_TYPE:
3562 case COMPLEX_TYPE:
3563 case ENUMERAL_TYPE:
3564 case BOOLEAN_TYPE:
3565 case POINTER_TYPE:
3566 case OFFSET_TYPE:
3567 case REFERENCE_TYPE:
3568 case METHOD_TYPE:
3569 case FUNCTION_TYPE:
3570 case VECTOR_TYPE:
3571 case NULLPTR_TYPE:
3572 return false;
3573
3574 case INTEGER_TYPE:
3575 case REAL_TYPE:
3576 case FIXED_POINT_TYPE:
3577 /* Here we just check the bounds. */
3578 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3579 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3580
3581 case ARRAY_TYPE:
3582 /* We have already checked the component type above, so just check the
3583 domain type. */
3584 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3585
3586 case RECORD_TYPE:
3587 case UNION_TYPE:
3588 case QUAL_UNION_TYPE:
3589 {
3590 tree field;
3591
3592 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3593 if (TREE_CODE (field) == FIELD_DECL
3594 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3595 || (TREE_CODE (type) == QUAL_UNION_TYPE
3596 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3597 || type_contains_placeholder_p (TREE_TYPE (field))))
3598 return true;
3599
3600 return false;
3601 }
3602
3603 default:
3604 gcc_unreachable ();
3605 }
3606 }
3607
3608 /* Wrapper around above function used to cache its result. */
3609
3610 bool
3611 type_contains_placeholder_p (tree type)
3612 {
3613 bool result;
3614
3615 /* If the contains_placeholder_bits field has been initialized,
3616 then we know the answer. */
3617 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3618 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3619
3620 /* Indicate that we've seen this type node, and the answer is false.
3621 This is what we want to return if we run into recursion via fields. */
3622 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3623
3624 /* Compute the real value. */
3625 result = type_contains_placeholder_1 (type);
3626
3627 /* Store the real value. */
3628 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3629
3630 return result;
3631 }
3632 \f
3633 /* Push tree EXP onto vector QUEUE if it is not already present. */
3634
3635 static void
3636 push_without_duplicates (tree exp, vec<tree> *queue)
3637 {
3638 unsigned int i;
3639 tree iter;
3640
3641 FOR_EACH_VEC_ELT (*queue, i, iter)
3642 if (simple_cst_equal (iter, exp) == 1)
3643 break;
3644
3645 if (!iter)
3646 queue->safe_push (exp);
3647 }
3648
3649 /* Given a tree EXP, find all occurrences of references to fields
3650 in a PLACEHOLDER_EXPR and place them in vector REFS without
3651 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3652 we assume here that EXP contains only arithmetic expressions
3653 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3654 argument list. */
3655
3656 void
3657 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3658 {
3659 enum tree_code code = TREE_CODE (exp);
3660 tree inner;
3661 int i;
3662
3663 /* We handle TREE_LIST and COMPONENT_REF separately. */
3664 if (code == TREE_LIST)
3665 {
3666 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3667 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3668 }
3669 else if (code == COMPONENT_REF)
3670 {
3671 for (inner = TREE_OPERAND (exp, 0);
3672 REFERENCE_CLASS_P (inner);
3673 inner = TREE_OPERAND (inner, 0))
3674 ;
3675
3676 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3677 push_without_duplicates (exp, refs);
3678 else
3679 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3680 }
3681 else
3682 switch (TREE_CODE_CLASS (code))
3683 {
3684 case tcc_constant:
3685 break;
3686
3687 case tcc_declaration:
3688 /* Variables allocated to static storage can stay. */
3689 if (!TREE_STATIC (exp))
3690 push_without_duplicates (exp, refs);
3691 break;
3692
3693 case tcc_expression:
3694 /* This is the pattern built in ada/make_aligning_type. */
3695 if (code == ADDR_EXPR
3696 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3697 {
3698 push_without_duplicates (exp, refs);
3699 break;
3700 }
3701
3702 /* Fall through... */
3703
3704 case tcc_exceptional:
3705 case tcc_unary:
3706 case tcc_binary:
3707 case tcc_comparison:
3708 case tcc_reference:
3709 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3710 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3711 break;
3712
3713 case tcc_vl_exp:
3714 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3715 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3716 break;
3717
3718 default:
3719 gcc_unreachable ();
3720 }
3721 }
3722
3723 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3724 return a tree with all occurrences of references to F in a
3725 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3726 CONST_DECLs. Note that we assume here that EXP contains only
3727 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3728 occurring only in their argument list. */
3729
3730 tree
3731 substitute_in_expr (tree exp, tree f, tree r)
3732 {
3733 enum tree_code code = TREE_CODE (exp);
3734 tree op0, op1, op2, op3;
3735 tree new_tree;
3736
3737 /* We handle TREE_LIST and COMPONENT_REF separately. */
3738 if (code == TREE_LIST)
3739 {
3740 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3741 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3742 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3743 return exp;
3744
3745 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3746 }
3747 else if (code == COMPONENT_REF)
3748 {
3749 tree inner;
3750
3751 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3752 and it is the right field, replace it with R. */
3753 for (inner = TREE_OPERAND (exp, 0);
3754 REFERENCE_CLASS_P (inner);
3755 inner = TREE_OPERAND (inner, 0))
3756 ;
3757
3758 /* The field. */
3759 op1 = TREE_OPERAND (exp, 1);
3760
3761 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3762 return r;
3763
3764 /* If this expression hasn't been completed let, leave it alone. */
3765 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3766 return exp;
3767
3768 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3769 if (op0 == TREE_OPERAND (exp, 0))
3770 return exp;
3771
3772 new_tree
3773 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3774 }
3775 else
3776 switch (TREE_CODE_CLASS (code))
3777 {
3778 case tcc_constant:
3779 return exp;
3780
3781 case tcc_declaration:
3782 if (exp == f)
3783 return r;
3784 else
3785 return exp;
3786
3787 case tcc_expression:
3788 if (exp == f)
3789 return r;
3790
3791 /* Fall through... */
3792
3793 case tcc_exceptional:
3794 case tcc_unary:
3795 case tcc_binary:
3796 case tcc_comparison:
3797 case tcc_reference:
3798 switch (TREE_CODE_LENGTH (code))
3799 {
3800 case 0:
3801 return exp;
3802
3803 case 1:
3804 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3805 if (op0 == TREE_OPERAND (exp, 0))
3806 return exp;
3807
3808 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3809 break;
3810
3811 case 2:
3812 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3813 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3814
3815 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3816 return exp;
3817
3818 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3819 break;
3820
3821 case 3:
3822 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3823 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3824 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3825
3826 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3827 && op2 == TREE_OPERAND (exp, 2))
3828 return exp;
3829
3830 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3831 break;
3832
3833 case 4:
3834 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3835 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3836 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3837 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3838
3839 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3840 && op2 == TREE_OPERAND (exp, 2)
3841 && op3 == TREE_OPERAND (exp, 3))
3842 return exp;
3843
3844 new_tree
3845 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3846 break;
3847
3848 default:
3849 gcc_unreachable ();
3850 }
3851 break;
3852
3853 case tcc_vl_exp:
3854 {
3855 int i;
3856
3857 new_tree = NULL_TREE;
3858
3859 /* If we are trying to replace F with a constant, inline back
3860 functions which do nothing else than computing a value from
3861 the arguments they are passed. This makes it possible to
3862 fold partially or entirely the replacement expression. */
3863 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3864 {
3865 tree t = maybe_inline_call_in_expr (exp);
3866 if (t)
3867 return SUBSTITUTE_IN_EXPR (t, f, r);
3868 }
3869
3870 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3871 {
3872 tree op = TREE_OPERAND (exp, i);
3873 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3874 if (new_op != op)
3875 {
3876 if (!new_tree)
3877 new_tree = copy_node (exp);
3878 TREE_OPERAND (new_tree, i) = new_op;
3879 }
3880 }
3881
3882 if (new_tree)
3883 {
3884 new_tree = fold (new_tree);
3885 if (TREE_CODE (new_tree) == CALL_EXPR)
3886 process_call_operands (new_tree);
3887 }
3888 else
3889 return exp;
3890 }
3891 break;
3892
3893 default:
3894 gcc_unreachable ();
3895 }
3896
3897 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3898
3899 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3900 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3901
3902 return new_tree;
3903 }
3904
3905 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3906 for it within OBJ, a tree that is an object or a chain of references. */
3907
3908 tree
3909 substitute_placeholder_in_expr (tree exp, tree obj)
3910 {
3911 enum tree_code code = TREE_CODE (exp);
3912 tree op0, op1, op2, op3;
3913 tree new_tree;
3914
3915 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3916 in the chain of OBJ. */
3917 if (code == PLACEHOLDER_EXPR)
3918 {
3919 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3920 tree elt;
3921
3922 for (elt = obj; elt != 0;
3923 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3924 || TREE_CODE (elt) == COND_EXPR)
3925 ? TREE_OPERAND (elt, 1)
3926 : (REFERENCE_CLASS_P (elt)
3927 || UNARY_CLASS_P (elt)
3928 || BINARY_CLASS_P (elt)
3929 || VL_EXP_CLASS_P (elt)
3930 || EXPRESSION_CLASS_P (elt))
3931 ? TREE_OPERAND (elt, 0) : 0))
3932 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3933 return elt;
3934
3935 for (elt = obj; elt != 0;
3936 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3937 || TREE_CODE (elt) == COND_EXPR)
3938 ? TREE_OPERAND (elt, 1)
3939 : (REFERENCE_CLASS_P (elt)
3940 || UNARY_CLASS_P (elt)
3941 || BINARY_CLASS_P (elt)
3942 || VL_EXP_CLASS_P (elt)
3943 || EXPRESSION_CLASS_P (elt))
3944 ? TREE_OPERAND (elt, 0) : 0))
3945 if (POINTER_TYPE_P (TREE_TYPE (elt))
3946 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3947 == need_type))
3948 return fold_build1 (INDIRECT_REF, need_type, elt);
3949
3950 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3951 survives until RTL generation, there will be an error. */
3952 return exp;
3953 }
3954
3955 /* TREE_LIST is special because we need to look at TREE_VALUE
3956 and TREE_CHAIN, not TREE_OPERANDS. */
3957 else if (code == TREE_LIST)
3958 {
3959 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3960 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3961 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3962 return exp;
3963
3964 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3965 }
3966 else
3967 switch (TREE_CODE_CLASS (code))
3968 {
3969 case tcc_constant:
3970 case tcc_declaration:
3971 return exp;
3972
3973 case tcc_exceptional:
3974 case tcc_unary:
3975 case tcc_binary:
3976 case tcc_comparison:
3977 case tcc_expression:
3978 case tcc_reference:
3979 case tcc_statement:
3980 switch (TREE_CODE_LENGTH (code))
3981 {
3982 case 0:
3983 return exp;
3984
3985 case 1:
3986 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3987 if (op0 == TREE_OPERAND (exp, 0))
3988 return exp;
3989
3990 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3991 break;
3992
3993 case 2:
3994 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3995 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3996
3997 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3998 return exp;
3999
4000 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4001 break;
4002
4003 case 3:
4004 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4005 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4006 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4007
4008 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4009 && op2 == TREE_OPERAND (exp, 2))
4010 return exp;
4011
4012 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4013 break;
4014
4015 case 4:
4016 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4017 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4018 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4019 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4020
4021 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4022 && op2 == TREE_OPERAND (exp, 2)
4023 && op3 == TREE_OPERAND (exp, 3))
4024 return exp;
4025
4026 new_tree
4027 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4028 break;
4029
4030 default:
4031 gcc_unreachable ();
4032 }
4033 break;
4034
4035 case tcc_vl_exp:
4036 {
4037 int i;
4038
4039 new_tree = NULL_TREE;
4040
4041 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4042 {
4043 tree op = TREE_OPERAND (exp, i);
4044 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4045 if (new_op != op)
4046 {
4047 if (!new_tree)
4048 new_tree = copy_node (exp);
4049 TREE_OPERAND (new_tree, i) = new_op;
4050 }
4051 }
4052
4053 if (new_tree)
4054 {
4055 new_tree = fold (new_tree);
4056 if (TREE_CODE (new_tree) == CALL_EXPR)
4057 process_call_operands (new_tree);
4058 }
4059 else
4060 return exp;
4061 }
4062 break;
4063
4064 default:
4065 gcc_unreachable ();
4066 }
4067
4068 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4069
4070 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4071 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4072
4073 return new_tree;
4074 }
4075 \f
4076
4077 /* Subroutine of stabilize_reference; this is called for subtrees of
4078 references. Any expression with side-effects must be put in a SAVE_EXPR
4079 to ensure that it is only evaluated once.
4080
4081 We don't put SAVE_EXPR nodes around everything, because assigning very
4082 simple expressions to temporaries causes us to miss good opportunities
4083 for optimizations. Among other things, the opportunity to fold in the
4084 addition of a constant into an addressing mode often gets lost, e.g.
4085 "y[i+1] += x;". In general, we take the approach that we should not make
4086 an assignment unless we are forced into it - i.e., that any non-side effect
4087 operator should be allowed, and that cse should take care of coalescing
4088 multiple utterances of the same expression should that prove fruitful. */
4089
4090 static tree
4091 stabilize_reference_1 (tree e)
4092 {
4093 tree result;
4094 enum tree_code code = TREE_CODE (e);
4095
4096 /* We cannot ignore const expressions because it might be a reference
4097 to a const array but whose index contains side-effects. But we can
4098 ignore things that are actual constant or that already have been
4099 handled by this function. */
4100
4101 if (tree_invariant_p (e))
4102 return e;
4103
4104 switch (TREE_CODE_CLASS (code))
4105 {
4106 case tcc_exceptional:
4107 case tcc_type:
4108 case tcc_declaration:
4109 case tcc_comparison:
4110 case tcc_statement:
4111 case tcc_expression:
4112 case tcc_reference:
4113 case tcc_vl_exp:
4114 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4115 so that it will only be evaluated once. */
4116 /* The reference (r) and comparison (<) classes could be handled as
4117 below, but it is generally faster to only evaluate them once. */
4118 if (TREE_SIDE_EFFECTS (e))
4119 return save_expr (e);
4120 return e;
4121
4122 case tcc_constant:
4123 /* Constants need no processing. In fact, we should never reach
4124 here. */
4125 return e;
4126
4127 case tcc_binary:
4128 /* Division is slow and tends to be compiled with jumps,
4129 especially the division by powers of 2 that is often
4130 found inside of an array reference. So do it just once. */
4131 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4132 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4133 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4134 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4135 return save_expr (e);
4136 /* Recursively stabilize each operand. */
4137 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4138 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4139 break;
4140
4141 case tcc_unary:
4142 /* Recursively stabilize each operand. */
4143 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4144 break;
4145
4146 default:
4147 gcc_unreachable ();
4148 }
4149
4150 TREE_TYPE (result) = TREE_TYPE (e);
4151 TREE_READONLY (result) = TREE_READONLY (e);
4152 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4153 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4154
4155 return result;
4156 }
4157
4158 /* Stabilize a reference so that we can use it any number of times
4159 without causing its operands to be evaluated more than once.
4160 Returns the stabilized reference. This works by means of save_expr,
4161 so see the caveats in the comments about save_expr.
4162
4163 Also allows conversion expressions whose operands are references.
4164 Any other kind of expression is returned unchanged. */
4165
4166 tree
4167 stabilize_reference (tree ref)
4168 {
4169 tree result;
4170 enum tree_code code = TREE_CODE (ref);
4171
4172 switch (code)
4173 {
4174 case VAR_DECL:
4175 case PARM_DECL:
4176 case RESULT_DECL:
4177 /* No action is needed in this case. */
4178 return ref;
4179
4180 CASE_CONVERT:
4181 case FLOAT_EXPR:
4182 case FIX_TRUNC_EXPR:
4183 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4184 break;
4185
4186 case INDIRECT_REF:
4187 result = build_nt (INDIRECT_REF,
4188 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4189 break;
4190
4191 case COMPONENT_REF:
4192 result = build_nt (COMPONENT_REF,
4193 stabilize_reference (TREE_OPERAND (ref, 0)),
4194 TREE_OPERAND (ref, 1), NULL_TREE);
4195 break;
4196
4197 case BIT_FIELD_REF:
4198 result = build_nt (BIT_FIELD_REF,
4199 stabilize_reference (TREE_OPERAND (ref, 0)),
4200 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4201 break;
4202
4203 case ARRAY_REF:
4204 result = build_nt (ARRAY_REF,
4205 stabilize_reference (TREE_OPERAND (ref, 0)),
4206 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4207 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4208 break;
4209
4210 case ARRAY_RANGE_REF:
4211 result = build_nt (ARRAY_RANGE_REF,
4212 stabilize_reference (TREE_OPERAND (ref, 0)),
4213 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4214 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4215 break;
4216
4217 case COMPOUND_EXPR:
4218 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4219 it wouldn't be ignored. This matters when dealing with
4220 volatiles. */
4221 return stabilize_reference_1 (ref);
4222
4223 /* If arg isn't a kind of lvalue we recognize, make no change.
4224 Caller should recognize the error for an invalid lvalue. */
4225 default:
4226 return ref;
4227
4228 case ERROR_MARK:
4229 return error_mark_node;
4230 }
4231
4232 TREE_TYPE (result) = TREE_TYPE (ref);
4233 TREE_READONLY (result) = TREE_READONLY (ref);
4234 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4235 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4236
4237 return result;
4238 }
4239 \f
4240 /* Low-level constructors for expressions. */
4241
4242 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4243 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4244
4245 void
4246 recompute_tree_invariant_for_addr_expr (tree t)
4247 {
4248 tree node;
4249 bool tc = true, se = false;
4250
4251 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4252
4253 /* We started out assuming this address is both invariant and constant, but
4254 does not have side effects. Now go down any handled components and see if
4255 any of them involve offsets that are either non-constant or non-invariant.
4256 Also check for side-effects.
4257
4258 ??? Note that this code makes no attempt to deal with the case where
4259 taking the address of something causes a copy due to misalignment. */
4260
4261 #define UPDATE_FLAGS(NODE) \
4262 do { tree _node = (NODE); \
4263 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4264 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4265
4266 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4267 node = TREE_OPERAND (node, 0))
4268 {
4269 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4270 array reference (probably made temporarily by the G++ front end),
4271 so ignore all the operands. */
4272 if ((TREE_CODE (node) == ARRAY_REF
4273 || TREE_CODE (node) == ARRAY_RANGE_REF)
4274 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4275 {
4276 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4277 if (TREE_OPERAND (node, 2))
4278 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4279 if (TREE_OPERAND (node, 3))
4280 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4281 }
4282 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4283 FIELD_DECL, apparently. The G++ front end can put something else
4284 there, at least temporarily. */
4285 else if (TREE_CODE (node) == COMPONENT_REF
4286 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4287 {
4288 if (TREE_OPERAND (node, 2))
4289 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4290 }
4291 }
4292
4293 node = lang_hooks.expr_to_decl (node, &tc, &se);
4294
4295 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4296 the address, since &(*a)->b is a form of addition. If it's a constant, the
4297 address is constant too. If it's a decl, its address is constant if the
4298 decl is static. Everything else is not constant and, furthermore,
4299 taking the address of a volatile variable is not volatile. */
4300 if (TREE_CODE (node) == INDIRECT_REF
4301 || TREE_CODE (node) == MEM_REF)
4302 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4303 else if (CONSTANT_CLASS_P (node))
4304 ;
4305 else if (DECL_P (node))
4306 tc &= (staticp (node) != NULL_TREE);
4307 else
4308 {
4309 tc = false;
4310 se |= TREE_SIDE_EFFECTS (node);
4311 }
4312
4313
4314 TREE_CONSTANT (t) = tc;
4315 TREE_SIDE_EFFECTS (t) = se;
4316 #undef UPDATE_FLAGS
4317 }
4318
4319 /* Build an expression of code CODE, data type TYPE, and operands as
4320 specified. Expressions and reference nodes can be created this way.
4321 Constants, decls, types and misc nodes cannot be.
4322
4323 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4324 enough for all extant tree codes. */
4325
4326 tree
4327 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4328 {
4329 tree t;
4330
4331 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4332
4333 t = make_node_stat (code PASS_MEM_STAT);
4334 TREE_TYPE (t) = tt;
4335
4336 return t;
4337 }
4338
4339 tree
4340 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4341 {
4342 int length = sizeof (struct tree_exp);
4343 tree t;
4344
4345 record_node_allocation_statistics (code, length);
4346
4347 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4348
4349 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4350
4351 memset (t, 0, sizeof (struct tree_common));
4352
4353 TREE_SET_CODE (t, code);
4354
4355 TREE_TYPE (t) = type;
4356 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4357 TREE_OPERAND (t, 0) = node;
4358 if (node && !TYPE_P (node))
4359 {
4360 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4361 TREE_READONLY (t) = TREE_READONLY (node);
4362 }
4363
4364 if (TREE_CODE_CLASS (code) == tcc_statement)
4365 TREE_SIDE_EFFECTS (t) = 1;
4366 else switch (code)
4367 {
4368 case VA_ARG_EXPR:
4369 /* All of these have side-effects, no matter what their
4370 operands are. */
4371 TREE_SIDE_EFFECTS (t) = 1;
4372 TREE_READONLY (t) = 0;
4373 break;
4374
4375 case INDIRECT_REF:
4376 /* Whether a dereference is readonly has nothing to do with whether
4377 its operand is readonly. */
4378 TREE_READONLY (t) = 0;
4379 break;
4380
4381 case ADDR_EXPR:
4382 if (node)
4383 recompute_tree_invariant_for_addr_expr (t);
4384 break;
4385
4386 default:
4387 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4388 && node && !TYPE_P (node)
4389 && TREE_CONSTANT (node))
4390 TREE_CONSTANT (t) = 1;
4391 if (TREE_CODE_CLASS (code) == tcc_reference
4392 && node && TREE_THIS_VOLATILE (node))
4393 TREE_THIS_VOLATILE (t) = 1;
4394 break;
4395 }
4396
4397 return t;
4398 }
4399
4400 #define PROCESS_ARG(N) \
4401 do { \
4402 TREE_OPERAND (t, N) = arg##N; \
4403 if (arg##N &&!TYPE_P (arg##N)) \
4404 { \
4405 if (TREE_SIDE_EFFECTS (arg##N)) \
4406 side_effects = 1; \
4407 if (!TREE_READONLY (arg##N) \
4408 && !CONSTANT_CLASS_P (arg##N)) \
4409 (void) (read_only = 0); \
4410 if (!TREE_CONSTANT (arg##N)) \
4411 (void) (constant = 0); \
4412 } \
4413 } while (0)
4414
4415 tree
4416 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4417 {
4418 bool constant, read_only, side_effects;
4419 tree t;
4420
4421 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4422
4423 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4424 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4425 /* When sizetype precision doesn't match that of pointers
4426 we need to be able to build explicit extensions or truncations
4427 of the offset argument. */
4428 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4429 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4430 && TREE_CODE (arg1) == INTEGER_CST);
4431
4432 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4433 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4434 && ptrofftype_p (TREE_TYPE (arg1)));
4435
4436 t = make_node_stat (code PASS_MEM_STAT);
4437 TREE_TYPE (t) = tt;
4438
4439 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4440 result based on those same flags for the arguments. But if the
4441 arguments aren't really even `tree' expressions, we shouldn't be trying
4442 to do this. */
4443
4444 /* Expressions without side effects may be constant if their
4445 arguments are as well. */
4446 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4447 || TREE_CODE_CLASS (code) == tcc_binary);
4448 read_only = 1;
4449 side_effects = TREE_SIDE_EFFECTS (t);
4450
4451 PROCESS_ARG (0);
4452 PROCESS_ARG (1);
4453
4454 TREE_SIDE_EFFECTS (t) = side_effects;
4455 if (code == MEM_REF)
4456 {
4457 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4458 {
4459 tree o = TREE_OPERAND (arg0, 0);
4460 TREE_READONLY (t) = TREE_READONLY (o);
4461 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4462 }
4463 }
4464 else
4465 {
4466 TREE_READONLY (t) = read_only;
4467 TREE_CONSTANT (t) = constant;
4468 TREE_THIS_VOLATILE (t)
4469 = (TREE_CODE_CLASS (code) == tcc_reference
4470 && arg0 && TREE_THIS_VOLATILE (arg0));
4471 }
4472
4473 return t;
4474 }
4475
4476
4477 tree
4478 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4479 tree arg2 MEM_STAT_DECL)
4480 {
4481 bool constant, read_only, side_effects;
4482 tree t;
4483
4484 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4485 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4486
4487 t = make_node_stat (code PASS_MEM_STAT);
4488 TREE_TYPE (t) = tt;
4489
4490 read_only = 1;
4491
4492 /* As a special exception, if COND_EXPR has NULL branches, we
4493 assume that it is a gimple statement and always consider
4494 it to have side effects. */
4495 if (code == COND_EXPR
4496 && tt == void_type_node
4497 && arg1 == NULL_TREE
4498 && arg2 == NULL_TREE)
4499 side_effects = true;
4500 else
4501 side_effects = TREE_SIDE_EFFECTS (t);
4502
4503 PROCESS_ARG (0);
4504 PROCESS_ARG (1);
4505 PROCESS_ARG (2);
4506
4507 if (code == COND_EXPR)
4508 TREE_READONLY (t) = read_only;
4509
4510 TREE_SIDE_EFFECTS (t) = side_effects;
4511 TREE_THIS_VOLATILE (t)
4512 = (TREE_CODE_CLASS (code) == tcc_reference
4513 && arg0 && TREE_THIS_VOLATILE (arg0));
4514
4515 return t;
4516 }
4517
4518 tree
4519 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4520 tree arg2, tree arg3 MEM_STAT_DECL)
4521 {
4522 bool constant, read_only, side_effects;
4523 tree t;
4524
4525 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4526
4527 t = make_node_stat (code PASS_MEM_STAT);
4528 TREE_TYPE (t) = tt;
4529
4530 side_effects = TREE_SIDE_EFFECTS (t);
4531
4532 PROCESS_ARG (0);
4533 PROCESS_ARG (1);
4534 PROCESS_ARG (2);
4535 PROCESS_ARG (3);
4536
4537 TREE_SIDE_EFFECTS (t) = side_effects;
4538 TREE_THIS_VOLATILE (t)
4539 = (TREE_CODE_CLASS (code) == tcc_reference
4540 && arg0 && TREE_THIS_VOLATILE (arg0));
4541
4542 return t;
4543 }
4544
4545 tree
4546 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4547 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4548 {
4549 bool constant, read_only, side_effects;
4550 tree t;
4551
4552 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4553
4554 t = make_node_stat (code PASS_MEM_STAT);
4555 TREE_TYPE (t) = tt;
4556
4557 side_effects = TREE_SIDE_EFFECTS (t);
4558
4559 PROCESS_ARG (0);
4560 PROCESS_ARG (1);
4561 PROCESS_ARG (2);
4562 PROCESS_ARG (3);
4563 PROCESS_ARG (4);
4564
4565 TREE_SIDE_EFFECTS (t) = side_effects;
4566 if (code == TARGET_MEM_REF)
4567 {
4568 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4569 {
4570 tree o = TREE_OPERAND (arg0, 0);
4571 TREE_READONLY (t) = TREE_READONLY (o);
4572 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4573 }
4574 }
4575 else
4576 TREE_THIS_VOLATILE (t)
4577 = (TREE_CODE_CLASS (code) == tcc_reference
4578 && arg0 && TREE_THIS_VOLATILE (arg0));
4579
4580 return t;
4581 }
4582
4583 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4584 on the pointer PTR. */
4585
4586 tree
4587 build_simple_mem_ref_loc (location_t loc, tree ptr)
4588 {
4589 HOST_WIDE_INT offset = 0;
4590 tree ptype = TREE_TYPE (ptr);
4591 tree tem;
4592 /* For convenience allow addresses that collapse to a simple base
4593 and offset. */
4594 if (TREE_CODE (ptr) == ADDR_EXPR
4595 && (handled_component_p (TREE_OPERAND (ptr, 0))
4596 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4597 {
4598 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4599 gcc_assert (ptr);
4600 ptr = build_fold_addr_expr (ptr);
4601 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4602 }
4603 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4604 ptr, build_int_cst (ptype, offset));
4605 SET_EXPR_LOCATION (tem, loc);
4606 return tem;
4607 }
4608
4609 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4610
4611 offset_int
4612 mem_ref_offset (const_tree t)
4613 {
4614 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4615 }
4616
4617 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4618 offsetted by OFFSET units. */
4619
4620 tree
4621 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4622 {
4623 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4624 build_fold_addr_expr (base),
4625 build_int_cst (ptr_type_node, offset));
4626 tree addr = build1 (ADDR_EXPR, type, ref);
4627 recompute_tree_invariant_for_addr_expr (addr);
4628 return addr;
4629 }
4630
4631 /* Similar except don't specify the TREE_TYPE
4632 and leave the TREE_SIDE_EFFECTS as 0.
4633 It is permissible for arguments to be null,
4634 or even garbage if their values do not matter. */
4635
4636 tree
4637 build_nt (enum tree_code code, ...)
4638 {
4639 tree t;
4640 int length;
4641 int i;
4642 va_list p;
4643
4644 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4645
4646 va_start (p, code);
4647
4648 t = make_node (code);
4649 length = TREE_CODE_LENGTH (code);
4650
4651 for (i = 0; i < length; i++)
4652 TREE_OPERAND (t, i) = va_arg (p, tree);
4653
4654 va_end (p);
4655 return t;
4656 }
4657
4658 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4659 tree vec. */
4660
4661 tree
4662 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4663 {
4664 tree ret, t;
4665 unsigned int ix;
4666
4667 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4668 CALL_EXPR_FN (ret) = fn;
4669 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4670 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4671 CALL_EXPR_ARG (ret, ix) = t;
4672 return ret;
4673 }
4674 \f
4675 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4676 We do NOT enter this node in any sort of symbol table.
4677
4678 LOC is the location of the decl.
4679
4680 layout_decl is used to set up the decl's storage layout.
4681 Other slots are initialized to 0 or null pointers. */
4682
4683 tree
4684 build_decl_stat (location_t loc, enum tree_code code, tree name,
4685 tree type MEM_STAT_DECL)
4686 {
4687 tree t;
4688
4689 t = make_node_stat (code PASS_MEM_STAT);
4690 DECL_SOURCE_LOCATION (t) = loc;
4691
4692 /* if (type == error_mark_node)
4693 type = integer_type_node; */
4694 /* That is not done, deliberately, so that having error_mark_node
4695 as the type can suppress useless errors in the use of this variable. */
4696
4697 DECL_NAME (t) = name;
4698 TREE_TYPE (t) = type;
4699
4700 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4701 layout_decl (t, 0);
4702
4703 return t;
4704 }
4705
4706 /* Builds and returns function declaration with NAME and TYPE. */
4707
4708 tree
4709 build_fn_decl (const char *name, tree type)
4710 {
4711 tree id = get_identifier (name);
4712 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4713
4714 DECL_EXTERNAL (decl) = 1;
4715 TREE_PUBLIC (decl) = 1;
4716 DECL_ARTIFICIAL (decl) = 1;
4717 TREE_NOTHROW (decl) = 1;
4718
4719 return decl;
4720 }
4721
4722 vec<tree, va_gc> *all_translation_units;
4723
4724 /* Builds a new translation-unit decl with name NAME, queues it in the
4725 global list of translation-unit decls and returns it. */
4726
4727 tree
4728 build_translation_unit_decl (tree name)
4729 {
4730 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4731 name, NULL_TREE);
4732 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4733 vec_safe_push (all_translation_units, tu);
4734 return tu;
4735 }
4736
4737 \f
4738 /* BLOCK nodes are used to represent the structure of binding contours
4739 and declarations, once those contours have been exited and their contents
4740 compiled. This information is used for outputting debugging info. */
4741
4742 tree
4743 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4744 {
4745 tree block = make_node (BLOCK);
4746
4747 BLOCK_VARS (block) = vars;
4748 BLOCK_SUBBLOCKS (block) = subblocks;
4749 BLOCK_SUPERCONTEXT (block) = supercontext;
4750 BLOCK_CHAIN (block) = chain;
4751 return block;
4752 }
4753
4754 \f
4755 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4756
4757 LOC is the location to use in tree T. */
4758
4759 void
4760 protected_set_expr_location (tree t, location_t loc)
4761 {
4762 if (CAN_HAVE_LOCATION_P (t))
4763 SET_EXPR_LOCATION (t, loc);
4764 }
4765 \f
4766 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4767 is ATTRIBUTE. */
4768
4769 tree
4770 build_decl_attribute_variant (tree ddecl, tree attribute)
4771 {
4772 DECL_ATTRIBUTES (ddecl) = attribute;
4773 return ddecl;
4774 }
4775
4776 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4777 is ATTRIBUTE and its qualifiers are QUALS.
4778
4779 Record such modified types already made so we don't make duplicates. */
4780
4781 tree
4782 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4783 {
4784 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4785 {
4786 inchash::hash hstate;
4787 tree ntype;
4788 int i;
4789 tree t;
4790 enum tree_code code = TREE_CODE (ttype);
4791
4792 /* Building a distinct copy of a tagged type is inappropriate; it
4793 causes breakage in code that expects there to be a one-to-one
4794 relationship between a struct and its fields.
4795 build_duplicate_type is another solution (as used in
4796 handle_transparent_union_attribute), but that doesn't play well
4797 with the stronger C++ type identity model. */
4798 if (TREE_CODE (ttype) == RECORD_TYPE
4799 || TREE_CODE (ttype) == UNION_TYPE
4800 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4801 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4802 {
4803 warning (OPT_Wattributes,
4804 "ignoring attributes applied to %qT after definition",
4805 TYPE_MAIN_VARIANT (ttype));
4806 return build_qualified_type (ttype, quals);
4807 }
4808
4809 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4810 ntype = build_distinct_type_copy (ttype);
4811
4812 TYPE_ATTRIBUTES (ntype) = attribute;
4813
4814 hstate.add_int (code);
4815 if (TREE_TYPE (ntype))
4816 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4817 attribute_hash_list (attribute, hstate);
4818
4819 switch (TREE_CODE (ntype))
4820 {
4821 case FUNCTION_TYPE:
4822 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4823 break;
4824 case ARRAY_TYPE:
4825 if (TYPE_DOMAIN (ntype))
4826 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4827 break;
4828 case INTEGER_TYPE:
4829 t = TYPE_MAX_VALUE (ntype);
4830 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4831 hstate.add_object (TREE_INT_CST_ELT (t, i));
4832 break;
4833 case REAL_TYPE:
4834 case FIXED_POINT_TYPE:
4835 {
4836 unsigned int precision = TYPE_PRECISION (ntype);
4837 hstate.add_object (precision);
4838 }
4839 break;
4840 default:
4841 break;
4842 }
4843
4844 ntype = type_hash_canon (hstate.end(), ntype);
4845
4846 /* If the target-dependent attributes make NTYPE different from
4847 its canonical type, we will need to use structural equality
4848 checks for this type. */
4849 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4850 || !comp_type_attributes (ntype, ttype))
4851 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4852 else if (TYPE_CANONICAL (ntype) == ntype)
4853 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4854
4855 ttype = build_qualified_type (ntype, quals);
4856 }
4857 else if (TYPE_QUALS (ttype) != quals)
4858 ttype = build_qualified_type (ttype, quals);
4859
4860 return ttype;
4861 }
4862
4863 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4864 the same. */
4865
4866 static bool
4867 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4868 {
4869 tree cl1, cl2;
4870 for (cl1 = clauses1, cl2 = clauses2;
4871 cl1 && cl2;
4872 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4873 {
4874 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4875 return false;
4876 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4877 {
4878 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4879 OMP_CLAUSE_DECL (cl2)) != 1)
4880 return false;
4881 }
4882 switch (OMP_CLAUSE_CODE (cl1))
4883 {
4884 case OMP_CLAUSE_ALIGNED:
4885 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4886 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4887 return false;
4888 break;
4889 case OMP_CLAUSE_LINEAR:
4890 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4891 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4892 return false;
4893 break;
4894 case OMP_CLAUSE_SIMDLEN:
4895 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4896 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4897 return false;
4898 default:
4899 break;
4900 }
4901 }
4902 return true;
4903 }
4904
4905 /* Compare two constructor-element-type constants. Return 1 if the lists
4906 are known to be equal; otherwise return 0. */
4907
4908 static bool
4909 simple_cst_list_equal (const_tree l1, const_tree l2)
4910 {
4911 while (l1 != NULL_TREE && l2 != NULL_TREE)
4912 {
4913 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4914 return false;
4915
4916 l1 = TREE_CHAIN (l1);
4917 l2 = TREE_CHAIN (l2);
4918 }
4919
4920 return l1 == l2;
4921 }
4922
4923 /* Compare two identifier nodes representing attributes. Either one may
4924 be in wrapped __ATTR__ form. Return true if they are the same, false
4925 otherwise. */
4926
4927 static bool
4928 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4929 {
4930 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4931 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4932 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4933
4934 /* Identifiers can be compared directly for equality. */
4935 if (attr1 == attr2)
4936 return true;
4937
4938 /* If they are not equal, they may still be one in the form
4939 'text' while the other one is in the form '__text__'. TODO:
4940 If we were storing attributes in normalized 'text' form, then
4941 this could all go away and we could take full advantage of
4942 the fact that we're comparing identifiers. :-) */
4943 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4944 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4945
4946 if (attr2_len == attr1_len + 4)
4947 {
4948 const char *p = IDENTIFIER_POINTER (attr2);
4949 const char *q = IDENTIFIER_POINTER (attr1);
4950 if (p[0] == '_' && p[1] == '_'
4951 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4952 && strncmp (q, p + 2, attr1_len) == 0)
4953 return true;;
4954 }
4955 else if (attr2_len + 4 == attr1_len)
4956 {
4957 const char *p = IDENTIFIER_POINTER (attr2);
4958 const char *q = IDENTIFIER_POINTER (attr1);
4959 if (q[0] == '_' && q[1] == '_'
4960 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4961 && strncmp (q + 2, p, attr2_len) == 0)
4962 return true;
4963 }
4964
4965 return false;
4966 }
4967
4968 /* Compare two attributes for their value identity. Return true if the
4969 attribute values are known to be equal; otherwise return false. */
4970
4971 bool
4972 attribute_value_equal (const_tree attr1, const_tree attr2)
4973 {
4974 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4975 return true;
4976
4977 if (TREE_VALUE (attr1) != NULL_TREE
4978 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4979 && TREE_VALUE (attr2) != NULL_TREE
4980 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4981 {
4982 /* Handle attribute format. */
4983 if (is_attribute_p ("format", TREE_PURPOSE (attr1)))
4984 {
4985 attr1 = TREE_VALUE (attr1);
4986 attr2 = TREE_VALUE (attr2);
4987 /* Compare the archetypes (printf/scanf/strftime/...). */
4988 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
4989 TREE_VALUE (attr2)))
4990 return false;
4991 /* Archetypes are the same. Compare the rest. */
4992 return (simple_cst_list_equal (TREE_CHAIN (attr1),
4993 TREE_CHAIN (attr2)) == 1);
4994 }
4995 return (simple_cst_list_equal (TREE_VALUE (attr1),
4996 TREE_VALUE (attr2)) == 1);
4997 }
4998
4999 if ((flag_openmp || flag_openmp_simd)
5000 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
5001 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
5002 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
5003 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
5004 TREE_VALUE (attr2));
5005
5006 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
5007 }
5008
5009 /* Return 0 if the attributes for two types are incompatible, 1 if they
5010 are compatible, and 2 if they are nearly compatible (which causes a
5011 warning to be generated). */
5012 int
5013 comp_type_attributes (const_tree type1, const_tree type2)
5014 {
5015 const_tree a1 = TYPE_ATTRIBUTES (type1);
5016 const_tree a2 = TYPE_ATTRIBUTES (type2);
5017 const_tree a;
5018
5019 if (a1 == a2)
5020 return 1;
5021 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
5022 {
5023 const struct attribute_spec *as;
5024 const_tree attr;
5025
5026 as = lookup_attribute_spec (get_attribute_name (a));
5027 if (!as || as->affects_type_identity == false)
5028 continue;
5029
5030 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
5031 if (!attr || !attribute_value_equal (a, attr))
5032 break;
5033 }
5034 if (!a)
5035 {
5036 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
5037 {
5038 const struct attribute_spec *as;
5039
5040 as = lookup_attribute_spec (get_attribute_name (a));
5041 if (!as || as->affects_type_identity == false)
5042 continue;
5043
5044 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5045 break;
5046 /* We don't need to compare trees again, as we did this
5047 already in first loop. */
5048 }
5049 /* All types - affecting identity - are equal, so
5050 there is no need to call target hook for comparison. */
5051 if (!a)
5052 return 1;
5053 }
5054 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
5055 return 0;
5056 /* As some type combinations - like default calling-convention - might
5057 be compatible, we have to call the target hook to get the final result. */
5058 return targetm.comp_type_attributes (type1, type2);
5059 }
5060
5061 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5062 is ATTRIBUTE.
5063
5064 Record such modified types already made so we don't make duplicates. */
5065
5066 tree
5067 build_type_attribute_variant (tree ttype, tree attribute)
5068 {
5069 return build_type_attribute_qual_variant (ttype, attribute,
5070 TYPE_QUALS (ttype));
5071 }
5072
5073
5074 /* Reset the expression *EXPR_P, a size or position.
5075
5076 ??? We could reset all non-constant sizes or positions. But it's cheap
5077 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5078
5079 We need to reset self-referential sizes or positions because they cannot
5080 be gimplified and thus can contain a CALL_EXPR after the gimplification
5081 is finished, which will run afoul of LTO streaming. And they need to be
5082 reset to something essentially dummy but not constant, so as to preserve
5083 the properties of the object they are attached to. */
5084
5085 static inline void
5086 free_lang_data_in_one_sizepos (tree *expr_p)
5087 {
5088 tree expr = *expr_p;
5089 if (CONTAINS_PLACEHOLDER_P (expr))
5090 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5091 }
5092
5093
5094 /* Reset all the fields in a binfo node BINFO. We only keep
5095 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5096
5097 static void
5098 free_lang_data_in_binfo (tree binfo)
5099 {
5100 unsigned i;
5101 tree t;
5102
5103 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5104
5105 BINFO_VIRTUALS (binfo) = NULL_TREE;
5106 BINFO_BASE_ACCESSES (binfo) = NULL;
5107 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5108 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5109
5110 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5111 free_lang_data_in_binfo (t);
5112 }
5113
5114
5115 /* Reset all language specific information still present in TYPE. */
5116
5117 static void
5118 free_lang_data_in_type (tree type)
5119 {
5120 gcc_assert (TYPE_P (type));
5121
5122 /* Give the FE a chance to remove its own data first. */
5123 lang_hooks.free_lang_data (type);
5124
5125 TREE_LANG_FLAG_0 (type) = 0;
5126 TREE_LANG_FLAG_1 (type) = 0;
5127 TREE_LANG_FLAG_2 (type) = 0;
5128 TREE_LANG_FLAG_3 (type) = 0;
5129 TREE_LANG_FLAG_4 (type) = 0;
5130 TREE_LANG_FLAG_5 (type) = 0;
5131 TREE_LANG_FLAG_6 (type) = 0;
5132
5133 if (TREE_CODE (type) == FUNCTION_TYPE)
5134 {
5135 /* Remove the const and volatile qualifiers from arguments. The
5136 C++ front end removes them, but the C front end does not,
5137 leading to false ODR violation errors when merging two
5138 instances of the same function signature compiled by
5139 different front ends. */
5140 tree p;
5141
5142 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5143 {
5144 tree arg_type = TREE_VALUE (p);
5145
5146 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5147 {
5148 int quals = TYPE_QUALS (arg_type)
5149 & ~TYPE_QUAL_CONST
5150 & ~TYPE_QUAL_VOLATILE;
5151 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5152 free_lang_data_in_type (TREE_VALUE (p));
5153 }
5154 /* C++ FE uses TREE_PURPOSE to store initial values. */
5155 TREE_PURPOSE (p) = NULL;
5156 }
5157 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5158 TYPE_MINVAL (type) = NULL;
5159 }
5160 if (TREE_CODE (type) == METHOD_TYPE)
5161 {
5162 tree p;
5163
5164 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5165 {
5166 /* C++ FE uses TREE_PURPOSE to store initial values. */
5167 TREE_PURPOSE (p) = NULL;
5168 }
5169 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5170 TYPE_MINVAL (type) = NULL;
5171 }
5172
5173 /* Remove members that are not actually FIELD_DECLs from the field
5174 list of an aggregate. These occur in C++. */
5175 if (RECORD_OR_UNION_TYPE_P (type))
5176 {
5177 tree prev, member;
5178
5179 /* Note that TYPE_FIELDS can be shared across distinct
5180 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5181 to be removed, we cannot set its TREE_CHAIN to NULL.
5182 Otherwise, we would not be able to find all the other fields
5183 in the other instances of this TREE_TYPE.
5184
5185 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5186 prev = NULL_TREE;
5187 member = TYPE_FIELDS (type);
5188 while (member)
5189 {
5190 if (TREE_CODE (member) == FIELD_DECL
5191 || TREE_CODE (member) == TYPE_DECL)
5192 {
5193 if (prev)
5194 TREE_CHAIN (prev) = member;
5195 else
5196 TYPE_FIELDS (type) = member;
5197 prev = member;
5198 }
5199
5200 member = TREE_CHAIN (member);
5201 }
5202
5203 if (prev)
5204 TREE_CHAIN (prev) = NULL_TREE;
5205 else
5206 TYPE_FIELDS (type) = NULL_TREE;
5207
5208 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5209 and danagle the pointer from time to time. */
5210 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5211 TYPE_VFIELD (type) = NULL_TREE;
5212
5213 /* Remove TYPE_METHODS list. While it would be nice to keep it
5214 to enable ODR warnings about different method lists, doing so
5215 seems to impractically increase size of LTO data streamed.
5216 Keep the infrmation if TYPE_METHODS was non-NULL. This is used
5217 by function.c and pretty printers. */
5218 if (TYPE_METHODS (type))
5219 TYPE_METHODS (type) = error_mark_node;
5220 if (TYPE_BINFO (type))
5221 {
5222 free_lang_data_in_binfo (TYPE_BINFO (type));
5223 /* We need to preserve link to bases and virtual table for all
5224 polymorphic types to make devirtualization machinery working.
5225 Debug output cares only about bases, but output also
5226 virtual table pointers so merging of -fdevirtualize and
5227 -fno-devirtualize units is easier. */
5228 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5229 || !flag_devirtualize)
5230 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5231 && !BINFO_VTABLE (TYPE_BINFO (type)))
5232 || debug_info_level != DINFO_LEVEL_NONE))
5233 TYPE_BINFO (type) = NULL;
5234 }
5235 }
5236 else
5237 {
5238 /* For non-aggregate types, clear out the language slot (which
5239 overloads TYPE_BINFO). */
5240 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5241
5242 if (INTEGRAL_TYPE_P (type)
5243 || SCALAR_FLOAT_TYPE_P (type)
5244 || FIXED_POINT_TYPE_P (type))
5245 {
5246 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5247 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5248 }
5249 }
5250
5251 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5252 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5253
5254 if (TYPE_CONTEXT (type)
5255 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5256 {
5257 tree ctx = TYPE_CONTEXT (type);
5258 do
5259 {
5260 ctx = BLOCK_SUPERCONTEXT (ctx);
5261 }
5262 while (ctx && TREE_CODE (ctx) == BLOCK);
5263 TYPE_CONTEXT (type) = ctx;
5264 }
5265 }
5266
5267
5268 /* Return true if DECL may need an assembler name to be set. */
5269
5270 static inline bool
5271 need_assembler_name_p (tree decl)
5272 {
5273 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5274 Rule merging. This makes type_odr_p to return true on those types during
5275 LTO and by comparing the mangled name, we can say what types are intended
5276 to be equivalent across compilation unit.
5277
5278 We do not store names of type_in_anonymous_namespace_p.
5279
5280 Record, union and enumeration type have linkage that allows use
5281 to check type_in_anonymous_namespace_p. We do not mangle compound types
5282 that always can be compared structurally.
5283
5284 Similarly for builtin types, we compare properties of their main variant.
5285 A special case are integer types where mangling do make differences
5286 between char/signed char/unsigned char etc. Storing name for these makes
5287 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5288 See cp/mangle.c:write_builtin_type for details. */
5289
5290 if (flag_lto_odr_type_mering
5291 && TREE_CODE (decl) == TYPE_DECL
5292 && DECL_NAME (decl)
5293 && decl == TYPE_NAME (TREE_TYPE (decl))
5294 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5295 && (type_with_linkage_p (TREE_TYPE (decl))
5296 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5297 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5298 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5299 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5300 if (TREE_CODE (decl) != FUNCTION_DECL
5301 && TREE_CODE (decl) != VAR_DECL)
5302 return false;
5303
5304 /* If DECL already has its assembler name set, it does not need a
5305 new one. */
5306 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5307 || DECL_ASSEMBLER_NAME_SET_P (decl))
5308 return false;
5309
5310 /* Abstract decls do not need an assembler name. */
5311 if (DECL_ABSTRACT_P (decl))
5312 return false;
5313
5314 /* For VAR_DECLs, only static, public and external symbols need an
5315 assembler name. */
5316 if (TREE_CODE (decl) == VAR_DECL
5317 && !TREE_STATIC (decl)
5318 && !TREE_PUBLIC (decl)
5319 && !DECL_EXTERNAL (decl))
5320 return false;
5321
5322 if (TREE_CODE (decl) == FUNCTION_DECL)
5323 {
5324 /* Do not set assembler name on builtins. Allow RTL expansion to
5325 decide whether to expand inline or via a regular call. */
5326 if (DECL_BUILT_IN (decl)
5327 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5328 return false;
5329
5330 /* Functions represented in the callgraph need an assembler name. */
5331 if (cgraph_node::get (decl) != NULL)
5332 return true;
5333
5334 /* Unused and not public functions don't need an assembler name. */
5335 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5336 return false;
5337 }
5338
5339 return true;
5340 }
5341
5342
5343 /* Reset all language specific information still present in symbol
5344 DECL. */
5345
5346 static void
5347 free_lang_data_in_decl (tree decl)
5348 {
5349 gcc_assert (DECL_P (decl));
5350
5351 /* Give the FE a chance to remove its own data first. */
5352 lang_hooks.free_lang_data (decl);
5353
5354 TREE_LANG_FLAG_0 (decl) = 0;
5355 TREE_LANG_FLAG_1 (decl) = 0;
5356 TREE_LANG_FLAG_2 (decl) = 0;
5357 TREE_LANG_FLAG_3 (decl) = 0;
5358 TREE_LANG_FLAG_4 (decl) = 0;
5359 TREE_LANG_FLAG_5 (decl) = 0;
5360 TREE_LANG_FLAG_6 (decl) = 0;
5361
5362 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5363 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5364 if (TREE_CODE (decl) == FIELD_DECL)
5365 {
5366 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5367 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5368 DECL_QUALIFIER (decl) = NULL_TREE;
5369 }
5370
5371 if (TREE_CODE (decl) == FUNCTION_DECL)
5372 {
5373 struct cgraph_node *node;
5374 if (!(node = cgraph_node::get (decl))
5375 || (!node->definition && !node->clones))
5376 {
5377 if (node)
5378 node->release_body ();
5379 else
5380 {
5381 release_function_body (decl);
5382 DECL_ARGUMENTS (decl) = NULL;
5383 DECL_RESULT (decl) = NULL;
5384 DECL_INITIAL (decl) = error_mark_node;
5385 }
5386 }
5387 if (gimple_has_body_p (decl))
5388 {
5389 tree t;
5390
5391 /* If DECL has a gimple body, then the context for its
5392 arguments must be DECL. Otherwise, it doesn't really
5393 matter, as we will not be emitting any code for DECL. In
5394 general, there may be other instances of DECL created by
5395 the front end and since PARM_DECLs are generally shared,
5396 their DECL_CONTEXT changes as the replicas of DECL are
5397 created. The only time where DECL_CONTEXT is important
5398 is for the FUNCTION_DECLs that have a gimple body (since
5399 the PARM_DECL will be used in the function's body). */
5400 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5401 DECL_CONTEXT (t) = decl;
5402 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5403 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5404 = target_option_default_node;
5405 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5406 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5407 = optimization_default_node;
5408 }
5409
5410 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5411 At this point, it is not needed anymore. */
5412 DECL_SAVED_TREE (decl) = NULL_TREE;
5413
5414 /* Clear the abstract origin if it refers to a method. Otherwise
5415 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5416 origin will not be output correctly. */
5417 if (DECL_ABSTRACT_ORIGIN (decl)
5418 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5419 && RECORD_OR_UNION_TYPE_P
5420 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5421 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5422
5423 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5424 DECL_VINDEX referring to itself into a vtable slot number as it
5425 should. Happens with functions that are copied and then forgotten
5426 about. Just clear it, it won't matter anymore. */
5427 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5428 DECL_VINDEX (decl) = NULL_TREE;
5429 }
5430 else if (TREE_CODE (decl) == VAR_DECL)
5431 {
5432 if ((DECL_EXTERNAL (decl)
5433 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5434 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5435 DECL_INITIAL (decl) = NULL_TREE;
5436 }
5437 else if (TREE_CODE (decl) == TYPE_DECL
5438 || TREE_CODE (decl) == FIELD_DECL)
5439 DECL_INITIAL (decl) = NULL_TREE;
5440 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5441 && DECL_INITIAL (decl)
5442 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5443 {
5444 /* Strip builtins from the translation-unit BLOCK. We still have targets
5445 without builtin_decl_explicit support and also builtins are shared
5446 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5447 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5448 while (*nextp)
5449 {
5450 tree var = *nextp;
5451 if (TREE_CODE (var) == FUNCTION_DECL
5452 && DECL_BUILT_IN (var))
5453 *nextp = TREE_CHAIN (var);
5454 else
5455 nextp = &TREE_CHAIN (var);
5456 }
5457 }
5458 }
5459
5460
5461 /* Data used when collecting DECLs and TYPEs for language data removal. */
5462
5463 struct free_lang_data_d
5464 {
5465 /* Worklist to avoid excessive recursion. */
5466 vec<tree> worklist;
5467
5468 /* Set of traversed objects. Used to avoid duplicate visits. */
5469 hash_set<tree> *pset;
5470
5471 /* Array of symbols to process with free_lang_data_in_decl. */
5472 vec<tree> decls;
5473
5474 /* Array of types to process with free_lang_data_in_type. */
5475 vec<tree> types;
5476 };
5477
5478
5479 /* Save all language fields needed to generate proper debug information
5480 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5481
5482 static void
5483 save_debug_info_for_decl (tree t)
5484 {
5485 /*struct saved_debug_info_d *sdi;*/
5486
5487 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5488
5489 /* FIXME. Partial implementation for saving debug info removed. */
5490 }
5491
5492
5493 /* Save all language fields needed to generate proper debug information
5494 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5495
5496 static void
5497 save_debug_info_for_type (tree t)
5498 {
5499 /*struct saved_debug_info_d *sdi;*/
5500
5501 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5502
5503 /* FIXME. Partial implementation for saving debug info removed. */
5504 }
5505
5506
5507 /* Add type or decl T to one of the list of tree nodes that need their
5508 language data removed. The lists are held inside FLD. */
5509
5510 static void
5511 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5512 {
5513 if (DECL_P (t))
5514 {
5515 fld->decls.safe_push (t);
5516 if (debug_info_level > DINFO_LEVEL_TERSE)
5517 save_debug_info_for_decl (t);
5518 }
5519 else if (TYPE_P (t))
5520 {
5521 fld->types.safe_push (t);
5522 if (debug_info_level > DINFO_LEVEL_TERSE)
5523 save_debug_info_for_type (t);
5524 }
5525 else
5526 gcc_unreachable ();
5527 }
5528
5529 /* Push tree node T into FLD->WORKLIST. */
5530
5531 static inline void
5532 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5533 {
5534 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5535 fld->worklist.safe_push ((t));
5536 }
5537
5538
5539 /* Operand callback helper for free_lang_data_in_node. *TP is the
5540 subtree operand being considered. */
5541
5542 static tree
5543 find_decls_types_r (tree *tp, int *ws, void *data)
5544 {
5545 tree t = *tp;
5546 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5547
5548 if (TREE_CODE (t) == TREE_LIST)
5549 return NULL_TREE;
5550
5551 /* Language specific nodes will be removed, so there is no need
5552 to gather anything under them. */
5553 if (is_lang_specific (t))
5554 {
5555 *ws = 0;
5556 return NULL_TREE;
5557 }
5558
5559 if (DECL_P (t))
5560 {
5561 /* Note that walk_tree does not traverse every possible field in
5562 decls, so we have to do our own traversals here. */
5563 add_tree_to_fld_list (t, fld);
5564
5565 fld_worklist_push (DECL_NAME (t), fld);
5566 fld_worklist_push (DECL_CONTEXT (t), fld);
5567 fld_worklist_push (DECL_SIZE (t), fld);
5568 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5569
5570 /* We are going to remove everything under DECL_INITIAL for
5571 TYPE_DECLs. No point walking them. */
5572 if (TREE_CODE (t) != TYPE_DECL)
5573 fld_worklist_push (DECL_INITIAL (t), fld);
5574
5575 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5576 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5577
5578 if (TREE_CODE (t) == FUNCTION_DECL)
5579 {
5580 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5581 fld_worklist_push (DECL_RESULT (t), fld);
5582 }
5583 else if (TREE_CODE (t) == TYPE_DECL)
5584 {
5585 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5586 }
5587 else if (TREE_CODE (t) == FIELD_DECL)
5588 {
5589 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5590 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5591 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5592 fld_worklist_push (DECL_FCONTEXT (t), fld);
5593 }
5594
5595 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5596 && DECL_HAS_VALUE_EXPR_P (t))
5597 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5598
5599 if (TREE_CODE (t) != FIELD_DECL
5600 && TREE_CODE (t) != TYPE_DECL)
5601 fld_worklist_push (TREE_CHAIN (t), fld);
5602 *ws = 0;
5603 }
5604 else if (TYPE_P (t))
5605 {
5606 /* Note that walk_tree does not traverse every possible field in
5607 types, so we have to do our own traversals here. */
5608 add_tree_to_fld_list (t, fld);
5609
5610 if (!RECORD_OR_UNION_TYPE_P (t))
5611 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5612 fld_worklist_push (TYPE_SIZE (t), fld);
5613 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5614 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5615 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5616 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5617 fld_worklist_push (TYPE_NAME (t), fld);
5618 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5619 them and thus do not and want not to reach unused pointer types
5620 this way. */
5621 if (!POINTER_TYPE_P (t))
5622 fld_worklist_push (TYPE_MINVAL (t), fld);
5623 if (!RECORD_OR_UNION_TYPE_P (t))
5624 fld_worklist_push (TYPE_MAXVAL (t), fld);
5625 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5626 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5627 do not and want not to reach unused variants this way. */
5628 if (TYPE_CONTEXT (t))
5629 {
5630 tree ctx = TYPE_CONTEXT (t);
5631 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5632 So push that instead. */
5633 while (ctx && TREE_CODE (ctx) == BLOCK)
5634 ctx = BLOCK_SUPERCONTEXT (ctx);
5635 fld_worklist_push (ctx, fld);
5636 }
5637 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5638 and want not to reach unused types this way. */
5639
5640 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5641 {
5642 unsigned i;
5643 tree tem;
5644 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5645 fld_worklist_push (TREE_TYPE (tem), fld);
5646 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5647 if (tem
5648 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5649 && TREE_CODE (tem) == TREE_LIST)
5650 do
5651 {
5652 fld_worklist_push (TREE_VALUE (tem), fld);
5653 tem = TREE_CHAIN (tem);
5654 }
5655 while (tem);
5656 }
5657 if (RECORD_OR_UNION_TYPE_P (t))
5658 {
5659 tree tem;
5660 /* Push all TYPE_FIELDS - there can be interleaving interesting
5661 and non-interesting things. */
5662 tem = TYPE_FIELDS (t);
5663 while (tem)
5664 {
5665 if (TREE_CODE (tem) == FIELD_DECL
5666 || TREE_CODE (tem) == TYPE_DECL)
5667 fld_worklist_push (tem, fld);
5668 tem = TREE_CHAIN (tem);
5669 }
5670 }
5671
5672 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5673 *ws = 0;
5674 }
5675 else if (TREE_CODE (t) == BLOCK)
5676 {
5677 tree tem;
5678 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5679 fld_worklist_push (tem, fld);
5680 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5681 fld_worklist_push (tem, fld);
5682 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5683 }
5684
5685 if (TREE_CODE (t) != IDENTIFIER_NODE
5686 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5687 fld_worklist_push (TREE_TYPE (t), fld);
5688
5689 return NULL_TREE;
5690 }
5691
5692
5693 /* Find decls and types in T. */
5694
5695 static void
5696 find_decls_types (tree t, struct free_lang_data_d *fld)
5697 {
5698 while (1)
5699 {
5700 if (!fld->pset->contains (t))
5701 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5702 if (fld->worklist.is_empty ())
5703 break;
5704 t = fld->worklist.pop ();
5705 }
5706 }
5707
5708 /* Translate all the types in LIST with the corresponding runtime
5709 types. */
5710
5711 static tree
5712 get_eh_types_for_runtime (tree list)
5713 {
5714 tree head, prev;
5715
5716 if (list == NULL_TREE)
5717 return NULL_TREE;
5718
5719 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5720 prev = head;
5721 list = TREE_CHAIN (list);
5722 while (list)
5723 {
5724 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5725 TREE_CHAIN (prev) = n;
5726 prev = TREE_CHAIN (prev);
5727 list = TREE_CHAIN (list);
5728 }
5729
5730 return head;
5731 }
5732
5733
5734 /* Find decls and types referenced in EH region R and store them in
5735 FLD->DECLS and FLD->TYPES. */
5736
5737 static void
5738 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5739 {
5740 switch (r->type)
5741 {
5742 case ERT_CLEANUP:
5743 break;
5744
5745 case ERT_TRY:
5746 {
5747 eh_catch c;
5748
5749 /* The types referenced in each catch must first be changed to the
5750 EH types used at runtime. This removes references to FE types
5751 in the region. */
5752 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5753 {
5754 c->type_list = get_eh_types_for_runtime (c->type_list);
5755 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5756 }
5757 }
5758 break;
5759
5760 case ERT_ALLOWED_EXCEPTIONS:
5761 r->u.allowed.type_list
5762 = get_eh_types_for_runtime (r->u.allowed.type_list);
5763 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5764 break;
5765
5766 case ERT_MUST_NOT_THROW:
5767 walk_tree (&r->u.must_not_throw.failure_decl,
5768 find_decls_types_r, fld, fld->pset);
5769 break;
5770 }
5771 }
5772
5773
5774 /* Find decls and types referenced in cgraph node N and store them in
5775 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5776 look for *every* kind of DECL and TYPE node reachable from N,
5777 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5778 NAMESPACE_DECLs, etc). */
5779
5780 static void
5781 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5782 {
5783 basic_block bb;
5784 struct function *fn;
5785 unsigned ix;
5786 tree t;
5787
5788 find_decls_types (n->decl, fld);
5789
5790 if (!gimple_has_body_p (n->decl))
5791 return;
5792
5793 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5794
5795 fn = DECL_STRUCT_FUNCTION (n->decl);
5796
5797 /* Traverse locals. */
5798 FOR_EACH_LOCAL_DECL (fn, ix, t)
5799 find_decls_types (t, fld);
5800
5801 /* Traverse EH regions in FN. */
5802 {
5803 eh_region r;
5804 FOR_ALL_EH_REGION_FN (r, fn)
5805 find_decls_types_in_eh_region (r, fld);
5806 }
5807
5808 /* Traverse every statement in FN. */
5809 FOR_EACH_BB_FN (bb, fn)
5810 {
5811 gphi_iterator psi;
5812 gimple_stmt_iterator si;
5813 unsigned i;
5814
5815 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5816 {
5817 gphi *phi = psi.phi ();
5818
5819 for (i = 0; i < gimple_phi_num_args (phi); i++)
5820 {
5821 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5822 find_decls_types (*arg_p, fld);
5823 }
5824 }
5825
5826 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5827 {
5828 gimple *stmt = gsi_stmt (si);
5829
5830 if (is_gimple_call (stmt))
5831 find_decls_types (gimple_call_fntype (stmt), fld);
5832
5833 for (i = 0; i < gimple_num_ops (stmt); i++)
5834 {
5835 tree arg = gimple_op (stmt, i);
5836 find_decls_types (arg, fld);
5837 }
5838 }
5839 }
5840 }
5841
5842
5843 /* Find decls and types referenced in varpool node N and store them in
5844 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5845 look for *every* kind of DECL and TYPE node reachable from N,
5846 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5847 NAMESPACE_DECLs, etc). */
5848
5849 static void
5850 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5851 {
5852 find_decls_types (v->decl, fld);
5853 }
5854
5855 /* If T needs an assembler name, have one created for it. */
5856
5857 void
5858 assign_assembler_name_if_neeeded (tree t)
5859 {
5860 if (need_assembler_name_p (t))
5861 {
5862 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5863 diagnostics that use input_location to show locus
5864 information. The problem here is that, at this point,
5865 input_location is generally anchored to the end of the file
5866 (since the parser is long gone), so we don't have a good
5867 position to pin it to.
5868
5869 To alleviate this problem, this uses the location of T's
5870 declaration. Examples of this are
5871 testsuite/g++.dg/template/cond2.C and
5872 testsuite/g++.dg/template/pr35240.C. */
5873 location_t saved_location = input_location;
5874 input_location = DECL_SOURCE_LOCATION (t);
5875
5876 decl_assembler_name (t);
5877
5878 input_location = saved_location;
5879 }
5880 }
5881
5882
5883 /* Free language specific information for every operand and expression
5884 in every node of the call graph. This process operates in three stages:
5885
5886 1- Every callgraph node and varpool node is traversed looking for
5887 decls and types embedded in them. This is a more exhaustive
5888 search than that done by find_referenced_vars, because it will
5889 also collect individual fields, decls embedded in types, etc.
5890
5891 2- All the decls found are sent to free_lang_data_in_decl.
5892
5893 3- All the types found are sent to free_lang_data_in_type.
5894
5895 The ordering between decls and types is important because
5896 free_lang_data_in_decl sets assembler names, which includes
5897 mangling. So types cannot be freed up until assembler names have
5898 been set up. */
5899
5900 static void
5901 free_lang_data_in_cgraph (void)
5902 {
5903 struct cgraph_node *n;
5904 varpool_node *v;
5905 struct free_lang_data_d fld;
5906 tree t;
5907 unsigned i;
5908 alias_pair *p;
5909
5910 /* Initialize sets and arrays to store referenced decls and types. */
5911 fld.pset = new hash_set<tree>;
5912 fld.worklist.create (0);
5913 fld.decls.create (100);
5914 fld.types.create (100);
5915
5916 /* Find decls and types in the body of every function in the callgraph. */
5917 FOR_EACH_FUNCTION (n)
5918 find_decls_types_in_node (n, &fld);
5919
5920 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5921 find_decls_types (p->decl, &fld);
5922
5923 /* Find decls and types in every varpool symbol. */
5924 FOR_EACH_VARIABLE (v)
5925 find_decls_types_in_var (v, &fld);
5926
5927 /* Set the assembler name on every decl found. We need to do this
5928 now because free_lang_data_in_decl will invalidate data needed
5929 for mangling. This breaks mangling on interdependent decls. */
5930 FOR_EACH_VEC_ELT (fld.decls, i, t)
5931 assign_assembler_name_if_neeeded (t);
5932
5933 /* Traverse every decl found freeing its language data. */
5934 FOR_EACH_VEC_ELT (fld.decls, i, t)
5935 free_lang_data_in_decl (t);
5936
5937 /* Traverse every type found freeing its language data. */
5938 FOR_EACH_VEC_ELT (fld.types, i, t)
5939 free_lang_data_in_type (t);
5940 #ifdef ENABLE_CHECKING
5941 FOR_EACH_VEC_ELT (fld.types, i, t)
5942 verify_type (t);
5943 #endif
5944
5945 delete fld.pset;
5946 fld.worklist.release ();
5947 fld.decls.release ();
5948 fld.types.release ();
5949 }
5950
5951
5952 /* Free resources that are used by FE but are not needed once they are done. */
5953
5954 static unsigned
5955 free_lang_data (void)
5956 {
5957 unsigned i;
5958
5959 /* If we are the LTO frontend we have freed lang-specific data already. */
5960 if (in_lto_p
5961 || (!flag_generate_lto && !flag_generate_offload))
5962 return 0;
5963
5964 /* Allocate and assign alias sets to the standard integer types
5965 while the slots are still in the way the frontends generated them. */
5966 for (i = 0; i < itk_none; ++i)
5967 if (integer_types[i])
5968 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5969
5970 /* Traverse the IL resetting language specific information for
5971 operands, expressions, etc. */
5972 free_lang_data_in_cgraph ();
5973
5974 /* Create gimple variants for common types. */
5975 ptrdiff_type_node = integer_type_node;
5976 fileptr_type_node = ptr_type_node;
5977
5978 /* Reset some langhooks. Do not reset types_compatible_p, it may
5979 still be used indirectly via the get_alias_set langhook. */
5980 lang_hooks.dwarf_name = lhd_dwarf_name;
5981 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5982 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5983
5984 /* We do not want the default decl_assembler_name implementation,
5985 rather if we have fixed everything we want a wrapper around it
5986 asserting that all non-local symbols already got their assembler
5987 name and only produce assembler names for local symbols. Or rather
5988 make sure we never call decl_assembler_name on local symbols and
5989 devise a separate, middle-end private scheme for it. */
5990
5991 /* Reset diagnostic machinery. */
5992 tree_diagnostics_defaults (global_dc);
5993
5994 return 0;
5995 }
5996
5997
5998 namespace {
5999
6000 const pass_data pass_data_ipa_free_lang_data =
6001 {
6002 SIMPLE_IPA_PASS, /* type */
6003 "*free_lang_data", /* name */
6004 OPTGROUP_NONE, /* optinfo_flags */
6005 TV_IPA_FREE_LANG_DATA, /* tv_id */
6006 0, /* properties_required */
6007 0, /* properties_provided */
6008 0, /* properties_destroyed */
6009 0, /* todo_flags_start */
6010 0, /* todo_flags_finish */
6011 };
6012
6013 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6014 {
6015 public:
6016 pass_ipa_free_lang_data (gcc::context *ctxt)
6017 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6018 {}
6019
6020 /* opt_pass methods: */
6021 virtual unsigned int execute (function *) { return free_lang_data (); }
6022
6023 }; // class pass_ipa_free_lang_data
6024
6025 } // anon namespace
6026
6027 simple_ipa_opt_pass *
6028 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6029 {
6030 return new pass_ipa_free_lang_data (ctxt);
6031 }
6032
6033 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
6034 ATTR_NAME. Also used internally by remove_attribute(). */
6035 bool
6036 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
6037 {
6038 size_t ident_len = IDENTIFIER_LENGTH (ident);
6039
6040 if (ident_len == attr_len)
6041 {
6042 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
6043 return true;
6044 }
6045 else if (ident_len == attr_len + 4)
6046 {
6047 /* There is the possibility that ATTR is 'text' and IDENT is
6048 '__text__'. */
6049 const char *p = IDENTIFIER_POINTER (ident);
6050 if (p[0] == '_' && p[1] == '_'
6051 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6052 && strncmp (attr_name, p + 2, attr_len) == 0)
6053 return true;
6054 }
6055
6056 return false;
6057 }
6058
6059 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6060 of ATTR_NAME, and LIST is not NULL_TREE. */
6061 tree
6062 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6063 {
6064 while (list)
6065 {
6066 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6067
6068 if (ident_len == attr_len)
6069 {
6070 if (!strcmp (attr_name,
6071 IDENTIFIER_POINTER (get_attribute_name (list))))
6072 break;
6073 }
6074 /* TODO: If we made sure that attributes were stored in the
6075 canonical form without '__...__' (ie, as in 'text' as opposed
6076 to '__text__') then we could avoid the following case. */
6077 else if (ident_len == attr_len + 4)
6078 {
6079 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6080 if (p[0] == '_' && p[1] == '_'
6081 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6082 && strncmp (attr_name, p + 2, attr_len) == 0)
6083 break;
6084 }
6085 list = TREE_CHAIN (list);
6086 }
6087
6088 return list;
6089 }
6090
6091 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6092 return a pointer to the attribute's list first element if the attribute
6093 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6094 '__text__'). */
6095
6096 tree
6097 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6098 tree list)
6099 {
6100 while (list)
6101 {
6102 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6103
6104 if (attr_len > ident_len)
6105 {
6106 list = TREE_CHAIN (list);
6107 continue;
6108 }
6109
6110 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6111
6112 if (strncmp (attr_name, p, attr_len) == 0)
6113 break;
6114
6115 /* TODO: If we made sure that attributes were stored in the
6116 canonical form without '__...__' (ie, as in 'text' as opposed
6117 to '__text__') then we could avoid the following case. */
6118 if (p[0] == '_' && p[1] == '_' &&
6119 strncmp (attr_name, p + 2, attr_len) == 0)
6120 break;
6121
6122 list = TREE_CHAIN (list);
6123 }
6124
6125 return list;
6126 }
6127
6128
6129 /* A variant of lookup_attribute() that can be used with an identifier
6130 as the first argument, and where the identifier can be either
6131 'text' or '__text__'.
6132
6133 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6134 return a pointer to the attribute's list element if the attribute
6135 is part of the list, or NULL_TREE if not found. If the attribute
6136 appears more than once, this only returns the first occurrence; the
6137 TREE_CHAIN of the return value should be passed back in if further
6138 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6139 can be in the form 'text' or '__text__'. */
6140 static tree
6141 lookup_ident_attribute (tree attr_identifier, tree list)
6142 {
6143 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6144
6145 while (list)
6146 {
6147 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6148 == IDENTIFIER_NODE);
6149
6150 if (cmp_attrib_identifiers (attr_identifier,
6151 get_attribute_name (list)))
6152 /* Found it. */
6153 break;
6154 list = TREE_CHAIN (list);
6155 }
6156
6157 return list;
6158 }
6159
6160 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6161 modified list. */
6162
6163 tree
6164 remove_attribute (const char *attr_name, tree list)
6165 {
6166 tree *p;
6167 size_t attr_len = strlen (attr_name);
6168
6169 gcc_checking_assert (attr_name[0] != '_');
6170
6171 for (p = &list; *p; )
6172 {
6173 tree l = *p;
6174 /* TODO: If we were storing attributes in normalized form, here
6175 we could use a simple strcmp(). */
6176 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6177 *p = TREE_CHAIN (l);
6178 else
6179 p = &TREE_CHAIN (l);
6180 }
6181
6182 return list;
6183 }
6184
6185 /* Return an attribute list that is the union of a1 and a2. */
6186
6187 tree
6188 merge_attributes (tree a1, tree a2)
6189 {
6190 tree attributes;
6191
6192 /* Either one unset? Take the set one. */
6193
6194 if ((attributes = a1) == 0)
6195 attributes = a2;
6196
6197 /* One that completely contains the other? Take it. */
6198
6199 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6200 {
6201 if (attribute_list_contained (a2, a1))
6202 attributes = a2;
6203 else
6204 {
6205 /* Pick the longest list, and hang on the other list. */
6206
6207 if (list_length (a1) < list_length (a2))
6208 attributes = a2, a2 = a1;
6209
6210 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6211 {
6212 tree a;
6213 for (a = lookup_ident_attribute (get_attribute_name (a2),
6214 attributes);
6215 a != NULL_TREE && !attribute_value_equal (a, a2);
6216 a = lookup_ident_attribute (get_attribute_name (a2),
6217 TREE_CHAIN (a)))
6218 ;
6219 if (a == NULL_TREE)
6220 {
6221 a1 = copy_node (a2);
6222 TREE_CHAIN (a1) = attributes;
6223 attributes = a1;
6224 }
6225 }
6226 }
6227 }
6228 return attributes;
6229 }
6230
6231 /* Given types T1 and T2, merge their attributes and return
6232 the result. */
6233
6234 tree
6235 merge_type_attributes (tree t1, tree t2)
6236 {
6237 return merge_attributes (TYPE_ATTRIBUTES (t1),
6238 TYPE_ATTRIBUTES (t2));
6239 }
6240
6241 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6242 the result. */
6243
6244 tree
6245 merge_decl_attributes (tree olddecl, tree newdecl)
6246 {
6247 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6248 DECL_ATTRIBUTES (newdecl));
6249 }
6250
6251 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6252
6253 /* Specialization of merge_decl_attributes for various Windows targets.
6254
6255 This handles the following situation:
6256
6257 __declspec (dllimport) int foo;
6258 int foo;
6259
6260 The second instance of `foo' nullifies the dllimport. */
6261
6262 tree
6263 merge_dllimport_decl_attributes (tree old, tree new_tree)
6264 {
6265 tree a;
6266 int delete_dllimport_p = 1;
6267
6268 /* What we need to do here is remove from `old' dllimport if it doesn't
6269 appear in `new'. dllimport behaves like extern: if a declaration is
6270 marked dllimport and a definition appears later, then the object
6271 is not dllimport'd. We also remove a `new' dllimport if the old list
6272 contains dllexport: dllexport always overrides dllimport, regardless
6273 of the order of declaration. */
6274 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6275 delete_dllimport_p = 0;
6276 else if (DECL_DLLIMPORT_P (new_tree)
6277 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6278 {
6279 DECL_DLLIMPORT_P (new_tree) = 0;
6280 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6281 "dllimport ignored", new_tree);
6282 }
6283 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6284 {
6285 /* Warn about overriding a symbol that has already been used, e.g.:
6286 extern int __attribute__ ((dllimport)) foo;
6287 int* bar () {return &foo;}
6288 int foo;
6289 */
6290 if (TREE_USED (old))
6291 {
6292 warning (0, "%q+D redeclared without dllimport attribute "
6293 "after being referenced with dll linkage", new_tree);
6294 /* If we have used a variable's address with dllimport linkage,
6295 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6296 decl may already have had TREE_CONSTANT computed.
6297 We still remove the attribute so that assembler code refers
6298 to '&foo rather than '_imp__foo'. */
6299 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6300 DECL_DLLIMPORT_P (new_tree) = 1;
6301 }
6302
6303 /* Let an inline definition silently override the external reference,
6304 but otherwise warn about attribute inconsistency. */
6305 else if (TREE_CODE (new_tree) == VAR_DECL
6306 || !DECL_DECLARED_INLINE_P (new_tree))
6307 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6308 "previous dllimport ignored", new_tree);
6309 }
6310 else
6311 delete_dllimport_p = 0;
6312
6313 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6314
6315 if (delete_dllimport_p)
6316 a = remove_attribute ("dllimport", a);
6317
6318 return a;
6319 }
6320
6321 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6322 struct attribute_spec.handler. */
6323
6324 tree
6325 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6326 bool *no_add_attrs)
6327 {
6328 tree node = *pnode;
6329 bool is_dllimport;
6330
6331 /* These attributes may apply to structure and union types being created,
6332 but otherwise should pass to the declaration involved. */
6333 if (!DECL_P (node))
6334 {
6335 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6336 | (int) ATTR_FLAG_ARRAY_NEXT))
6337 {
6338 *no_add_attrs = true;
6339 return tree_cons (name, args, NULL_TREE);
6340 }
6341 if (TREE_CODE (node) == RECORD_TYPE
6342 || TREE_CODE (node) == UNION_TYPE)
6343 {
6344 node = TYPE_NAME (node);
6345 if (!node)
6346 return NULL_TREE;
6347 }
6348 else
6349 {
6350 warning (OPT_Wattributes, "%qE attribute ignored",
6351 name);
6352 *no_add_attrs = true;
6353 return NULL_TREE;
6354 }
6355 }
6356
6357 if (TREE_CODE (node) != FUNCTION_DECL
6358 && TREE_CODE (node) != VAR_DECL
6359 && TREE_CODE (node) != TYPE_DECL)
6360 {
6361 *no_add_attrs = true;
6362 warning (OPT_Wattributes, "%qE attribute ignored",
6363 name);
6364 return NULL_TREE;
6365 }
6366
6367 if (TREE_CODE (node) == TYPE_DECL
6368 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6369 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6370 {
6371 *no_add_attrs = true;
6372 warning (OPT_Wattributes, "%qE attribute ignored",
6373 name);
6374 return NULL_TREE;
6375 }
6376
6377 is_dllimport = is_attribute_p ("dllimport", name);
6378
6379 /* Report error on dllimport ambiguities seen now before they cause
6380 any damage. */
6381 if (is_dllimport)
6382 {
6383 /* Honor any target-specific overrides. */
6384 if (!targetm.valid_dllimport_attribute_p (node))
6385 *no_add_attrs = true;
6386
6387 else if (TREE_CODE (node) == FUNCTION_DECL
6388 && DECL_DECLARED_INLINE_P (node))
6389 {
6390 warning (OPT_Wattributes, "inline function %q+D declared as "
6391 " dllimport: attribute ignored", node);
6392 *no_add_attrs = true;
6393 }
6394 /* Like MS, treat definition of dllimported variables and
6395 non-inlined functions on declaration as syntax errors. */
6396 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6397 {
6398 error ("function %q+D definition is marked dllimport", node);
6399 *no_add_attrs = true;
6400 }
6401
6402 else if (TREE_CODE (node) == VAR_DECL)
6403 {
6404 if (DECL_INITIAL (node))
6405 {
6406 error ("variable %q+D definition is marked dllimport",
6407 node);
6408 *no_add_attrs = true;
6409 }
6410
6411 /* `extern' needn't be specified with dllimport.
6412 Specify `extern' now and hope for the best. Sigh. */
6413 DECL_EXTERNAL (node) = 1;
6414 /* Also, implicitly give dllimport'd variables declared within
6415 a function global scope, unless declared static. */
6416 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6417 TREE_PUBLIC (node) = 1;
6418 }
6419
6420 if (*no_add_attrs == false)
6421 DECL_DLLIMPORT_P (node) = 1;
6422 }
6423 else if (TREE_CODE (node) == FUNCTION_DECL
6424 && DECL_DECLARED_INLINE_P (node)
6425 && flag_keep_inline_dllexport)
6426 /* An exported function, even if inline, must be emitted. */
6427 DECL_EXTERNAL (node) = 0;
6428
6429 /* Report error if symbol is not accessible at global scope. */
6430 if (!TREE_PUBLIC (node)
6431 && (TREE_CODE (node) == VAR_DECL
6432 || TREE_CODE (node) == FUNCTION_DECL))
6433 {
6434 error ("external linkage required for symbol %q+D because of "
6435 "%qE attribute", node, name);
6436 *no_add_attrs = true;
6437 }
6438
6439 /* A dllexport'd entity must have default visibility so that other
6440 program units (shared libraries or the main executable) can see
6441 it. A dllimport'd entity must have default visibility so that
6442 the linker knows that undefined references within this program
6443 unit can be resolved by the dynamic linker. */
6444 if (!*no_add_attrs)
6445 {
6446 if (DECL_VISIBILITY_SPECIFIED (node)
6447 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6448 error ("%qE implies default visibility, but %qD has already "
6449 "been declared with a different visibility",
6450 name, node);
6451 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6452 DECL_VISIBILITY_SPECIFIED (node) = 1;
6453 }
6454
6455 return NULL_TREE;
6456 }
6457
6458 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6459 \f
6460 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6461 of the various TYPE_QUAL values. */
6462
6463 static void
6464 set_type_quals (tree type, int type_quals)
6465 {
6466 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6467 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6468 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6469 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6470 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6471 }
6472
6473 /* Returns true iff unqualified CAND and BASE are equivalent. */
6474
6475 bool
6476 check_base_type (const_tree cand, const_tree base)
6477 {
6478 return (TYPE_NAME (cand) == TYPE_NAME (base)
6479 /* Apparently this is needed for Objective-C. */
6480 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6481 /* Check alignment. */
6482 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6483 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6484 TYPE_ATTRIBUTES (base)));
6485 }
6486
6487 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6488
6489 bool
6490 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6491 {
6492 return (TYPE_QUALS (cand) == type_quals
6493 && check_base_type (cand, base));
6494 }
6495
6496 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6497
6498 static bool
6499 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6500 {
6501 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6502 && TYPE_NAME (cand) == TYPE_NAME (base)
6503 /* Apparently this is needed for Objective-C. */
6504 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6505 /* Check alignment. */
6506 && TYPE_ALIGN (cand) == align
6507 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6508 TYPE_ATTRIBUTES (base)));
6509 }
6510
6511 /* This function checks to see if TYPE matches the size one of the built-in
6512 atomic types, and returns that core atomic type. */
6513
6514 static tree
6515 find_atomic_core_type (tree type)
6516 {
6517 tree base_atomic_type;
6518
6519 /* Only handle complete types. */
6520 if (TYPE_SIZE (type) == NULL_TREE)
6521 return NULL_TREE;
6522
6523 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6524 switch (type_size)
6525 {
6526 case 8:
6527 base_atomic_type = atomicQI_type_node;
6528 break;
6529
6530 case 16:
6531 base_atomic_type = atomicHI_type_node;
6532 break;
6533
6534 case 32:
6535 base_atomic_type = atomicSI_type_node;
6536 break;
6537
6538 case 64:
6539 base_atomic_type = atomicDI_type_node;
6540 break;
6541
6542 case 128:
6543 base_atomic_type = atomicTI_type_node;
6544 break;
6545
6546 default:
6547 base_atomic_type = NULL_TREE;
6548 }
6549
6550 return base_atomic_type;
6551 }
6552
6553 /* Return a version of the TYPE, qualified as indicated by the
6554 TYPE_QUALS, if one exists. If no qualified version exists yet,
6555 return NULL_TREE. */
6556
6557 tree
6558 get_qualified_type (tree type, int type_quals)
6559 {
6560 tree t;
6561
6562 if (TYPE_QUALS (type) == type_quals)
6563 return type;
6564
6565 /* Search the chain of variants to see if there is already one there just
6566 like the one we need to have. If so, use that existing one. We must
6567 preserve the TYPE_NAME, since there is code that depends on this. */
6568 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6569 if (check_qualified_type (t, type, type_quals))
6570 return t;
6571
6572 return NULL_TREE;
6573 }
6574
6575 /* Like get_qualified_type, but creates the type if it does not
6576 exist. This function never returns NULL_TREE. */
6577
6578 tree
6579 build_qualified_type (tree type, int type_quals)
6580 {
6581 tree t;
6582
6583 /* See if we already have the appropriate qualified variant. */
6584 t = get_qualified_type (type, type_quals);
6585
6586 /* If not, build it. */
6587 if (!t)
6588 {
6589 t = build_variant_type_copy (type);
6590 set_type_quals (t, type_quals);
6591
6592 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6593 {
6594 /* See if this object can map to a basic atomic type. */
6595 tree atomic_type = find_atomic_core_type (type);
6596 if (atomic_type)
6597 {
6598 /* Ensure the alignment of this type is compatible with
6599 the required alignment of the atomic type. */
6600 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6601 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6602 }
6603 }
6604
6605 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6606 /* Propagate structural equality. */
6607 SET_TYPE_STRUCTURAL_EQUALITY (t);
6608 else if (TYPE_CANONICAL (type) != type)
6609 /* Build the underlying canonical type, since it is different
6610 from TYPE. */
6611 {
6612 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6613 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6614 }
6615 else
6616 /* T is its own canonical type. */
6617 TYPE_CANONICAL (t) = t;
6618
6619 }
6620
6621 return t;
6622 }
6623
6624 /* Create a variant of type T with alignment ALIGN. */
6625
6626 tree
6627 build_aligned_type (tree type, unsigned int align)
6628 {
6629 tree t;
6630
6631 if (TYPE_PACKED (type)
6632 || TYPE_ALIGN (type) == align)
6633 return type;
6634
6635 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6636 if (check_aligned_type (t, type, align))
6637 return t;
6638
6639 t = build_variant_type_copy (type);
6640 TYPE_ALIGN (t) = align;
6641
6642 return t;
6643 }
6644
6645 /* Create a new distinct copy of TYPE. The new type is made its own
6646 MAIN_VARIANT. If TYPE requires structural equality checks, the
6647 resulting type requires structural equality checks; otherwise, its
6648 TYPE_CANONICAL points to itself. */
6649
6650 tree
6651 build_distinct_type_copy (tree type)
6652 {
6653 tree t = copy_node (type);
6654
6655 TYPE_POINTER_TO (t) = 0;
6656 TYPE_REFERENCE_TO (t) = 0;
6657
6658 /* Set the canonical type either to a new equivalence class, or
6659 propagate the need for structural equality checks. */
6660 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6661 SET_TYPE_STRUCTURAL_EQUALITY (t);
6662 else
6663 TYPE_CANONICAL (t) = t;
6664
6665 /* Make it its own variant. */
6666 TYPE_MAIN_VARIANT (t) = t;
6667 TYPE_NEXT_VARIANT (t) = 0;
6668
6669 /* We do not record methods in type copies nor variants
6670 so we do not need to keep them up to date when new method
6671 is inserted. */
6672 if (RECORD_OR_UNION_TYPE_P (t))
6673 TYPE_METHODS (t) = NULL_TREE;
6674
6675 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6676 whose TREE_TYPE is not t. This can also happen in the Ada
6677 frontend when using subtypes. */
6678
6679 return t;
6680 }
6681
6682 /* Create a new variant of TYPE, equivalent but distinct. This is so
6683 the caller can modify it. TYPE_CANONICAL for the return type will
6684 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6685 are considered equal by the language itself (or that both types
6686 require structural equality checks). */
6687
6688 tree
6689 build_variant_type_copy (tree type)
6690 {
6691 tree t, m = TYPE_MAIN_VARIANT (type);
6692
6693 t = build_distinct_type_copy (type);
6694
6695 /* Since we're building a variant, assume that it is a non-semantic
6696 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6697 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6698
6699 /* Add the new type to the chain of variants of TYPE. */
6700 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6701 TYPE_NEXT_VARIANT (m) = t;
6702 TYPE_MAIN_VARIANT (t) = m;
6703
6704 return t;
6705 }
6706 \f
6707 /* Return true if the from tree in both tree maps are equal. */
6708
6709 int
6710 tree_map_base_eq (const void *va, const void *vb)
6711 {
6712 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6713 *const b = (const struct tree_map_base *) vb;
6714 return (a->from == b->from);
6715 }
6716
6717 /* Hash a from tree in a tree_base_map. */
6718
6719 unsigned int
6720 tree_map_base_hash (const void *item)
6721 {
6722 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6723 }
6724
6725 /* Return true if this tree map structure is marked for garbage collection
6726 purposes. We simply return true if the from tree is marked, so that this
6727 structure goes away when the from tree goes away. */
6728
6729 int
6730 tree_map_base_marked_p (const void *p)
6731 {
6732 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6733 }
6734
6735 /* Hash a from tree in a tree_map. */
6736
6737 unsigned int
6738 tree_map_hash (const void *item)
6739 {
6740 return (((const struct tree_map *) item)->hash);
6741 }
6742
6743 /* Hash a from tree in a tree_decl_map. */
6744
6745 unsigned int
6746 tree_decl_map_hash (const void *item)
6747 {
6748 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6749 }
6750
6751 /* Return the initialization priority for DECL. */
6752
6753 priority_type
6754 decl_init_priority_lookup (tree decl)
6755 {
6756 symtab_node *snode = symtab_node::get (decl);
6757
6758 if (!snode)
6759 return DEFAULT_INIT_PRIORITY;
6760 return
6761 snode->get_init_priority ();
6762 }
6763
6764 /* Return the finalization priority for DECL. */
6765
6766 priority_type
6767 decl_fini_priority_lookup (tree decl)
6768 {
6769 cgraph_node *node = cgraph_node::get (decl);
6770
6771 if (!node)
6772 return DEFAULT_INIT_PRIORITY;
6773 return
6774 node->get_fini_priority ();
6775 }
6776
6777 /* Set the initialization priority for DECL to PRIORITY. */
6778
6779 void
6780 decl_init_priority_insert (tree decl, priority_type priority)
6781 {
6782 struct symtab_node *snode;
6783
6784 if (priority == DEFAULT_INIT_PRIORITY)
6785 {
6786 snode = symtab_node::get (decl);
6787 if (!snode)
6788 return;
6789 }
6790 else if (TREE_CODE (decl) == VAR_DECL)
6791 snode = varpool_node::get_create (decl);
6792 else
6793 snode = cgraph_node::get_create (decl);
6794 snode->set_init_priority (priority);
6795 }
6796
6797 /* Set the finalization priority for DECL to PRIORITY. */
6798
6799 void
6800 decl_fini_priority_insert (tree decl, priority_type priority)
6801 {
6802 struct cgraph_node *node;
6803
6804 if (priority == DEFAULT_INIT_PRIORITY)
6805 {
6806 node = cgraph_node::get (decl);
6807 if (!node)
6808 return;
6809 }
6810 else
6811 node = cgraph_node::get_create (decl);
6812 node->set_fini_priority (priority);
6813 }
6814
6815 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6816
6817 static void
6818 print_debug_expr_statistics (void)
6819 {
6820 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6821 (long) debug_expr_for_decl->size (),
6822 (long) debug_expr_for_decl->elements (),
6823 debug_expr_for_decl->collisions ());
6824 }
6825
6826 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6827
6828 static void
6829 print_value_expr_statistics (void)
6830 {
6831 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6832 (long) value_expr_for_decl->size (),
6833 (long) value_expr_for_decl->elements (),
6834 value_expr_for_decl->collisions ());
6835 }
6836
6837 /* Lookup a debug expression for FROM, and return it if we find one. */
6838
6839 tree
6840 decl_debug_expr_lookup (tree from)
6841 {
6842 struct tree_decl_map *h, in;
6843 in.base.from = from;
6844
6845 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6846 if (h)
6847 return h->to;
6848 return NULL_TREE;
6849 }
6850
6851 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6852
6853 void
6854 decl_debug_expr_insert (tree from, tree to)
6855 {
6856 struct tree_decl_map *h;
6857
6858 h = ggc_alloc<tree_decl_map> ();
6859 h->base.from = from;
6860 h->to = to;
6861 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6862 }
6863
6864 /* Lookup a value expression for FROM, and return it if we find one. */
6865
6866 tree
6867 decl_value_expr_lookup (tree from)
6868 {
6869 struct tree_decl_map *h, in;
6870 in.base.from = from;
6871
6872 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6873 if (h)
6874 return h->to;
6875 return NULL_TREE;
6876 }
6877
6878 /* Insert a mapping FROM->TO in the value expression hashtable. */
6879
6880 void
6881 decl_value_expr_insert (tree from, tree to)
6882 {
6883 struct tree_decl_map *h;
6884
6885 h = ggc_alloc<tree_decl_map> ();
6886 h->base.from = from;
6887 h->to = to;
6888 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6889 }
6890
6891 /* Lookup a vector of debug arguments for FROM, and return it if we
6892 find one. */
6893
6894 vec<tree, va_gc> **
6895 decl_debug_args_lookup (tree from)
6896 {
6897 struct tree_vec_map *h, in;
6898
6899 if (!DECL_HAS_DEBUG_ARGS_P (from))
6900 return NULL;
6901 gcc_checking_assert (debug_args_for_decl != NULL);
6902 in.base.from = from;
6903 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6904 if (h)
6905 return &h->to;
6906 return NULL;
6907 }
6908
6909 /* Insert a mapping FROM->empty vector of debug arguments in the value
6910 expression hashtable. */
6911
6912 vec<tree, va_gc> **
6913 decl_debug_args_insert (tree from)
6914 {
6915 struct tree_vec_map *h;
6916 tree_vec_map **loc;
6917
6918 if (DECL_HAS_DEBUG_ARGS_P (from))
6919 return decl_debug_args_lookup (from);
6920 if (debug_args_for_decl == NULL)
6921 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6922 h = ggc_alloc<tree_vec_map> ();
6923 h->base.from = from;
6924 h->to = NULL;
6925 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6926 *loc = h;
6927 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6928 return &h->to;
6929 }
6930
6931 /* Hashing of types so that we don't make duplicates.
6932 The entry point is `type_hash_canon'. */
6933
6934 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6935 with types in the TREE_VALUE slots), by adding the hash codes
6936 of the individual types. */
6937
6938 static void
6939 type_hash_list (const_tree list, inchash::hash &hstate)
6940 {
6941 const_tree tail;
6942
6943 for (tail = list; tail; tail = TREE_CHAIN (tail))
6944 if (TREE_VALUE (tail) != error_mark_node)
6945 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6946 }
6947
6948 /* These are the Hashtable callback functions. */
6949
6950 /* Returns true iff the types are equivalent. */
6951
6952 bool
6953 type_cache_hasher::equal (type_hash *a, type_hash *b)
6954 {
6955 /* First test the things that are the same for all types. */
6956 if (a->hash != b->hash
6957 || TREE_CODE (a->type) != TREE_CODE (b->type)
6958 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6959 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6960 TYPE_ATTRIBUTES (b->type))
6961 || (TREE_CODE (a->type) != COMPLEX_TYPE
6962 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6963 return 0;
6964
6965 /* Be careful about comparing arrays before and after the element type
6966 has been completed; don't compare TYPE_ALIGN unless both types are
6967 complete. */
6968 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6969 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6970 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6971 return 0;
6972
6973 switch (TREE_CODE (a->type))
6974 {
6975 case VOID_TYPE:
6976 case COMPLEX_TYPE:
6977 case POINTER_TYPE:
6978 case REFERENCE_TYPE:
6979 case NULLPTR_TYPE:
6980 return 1;
6981
6982 case VECTOR_TYPE:
6983 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6984
6985 case ENUMERAL_TYPE:
6986 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6987 && !(TYPE_VALUES (a->type)
6988 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6989 && TYPE_VALUES (b->type)
6990 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6991 && type_list_equal (TYPE_VALUES (a->type),
6992 TYPE_VALUES (b->type))))
6993 return 0;
6994
6995 /* ... fall through ... */
6996
6997 case INTEGER_TYPE:
6998 case REAL_TYPE:
6999 case BOOLEAN_TYPE:
7000 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7001 return false;
7002 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7003 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7004 TYPE_MAX_VALUE (b->type)))
7005 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7006 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7007 TYPE_MIN_VALUE (b->type))));
7008
7009 case FIXED_POINT_TYPE:
7010 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7011
7012 case OFFSET_TYPE:
7013 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7014
7015 case METHOD_TYPE:
7016 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7017 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7018 || (TYPE_ARG_TYPES (a->type)
7019 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7020 && TYPE_ARG_TYPES (b->type)
7021 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7022 && type_list_equal (TYPE_ARG_TYPES (a->type),
7023 TYPE_ARG_TYPES (b->type)))))
7024 break;
7025 return 0;
7026 case ARRAY_TYPE:
7027 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
7028
7029 case RECORD_TYPE:
7030 case UNION_TYPE:
7031 case QUAL_UNION_TYPE:
7032 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7033 || (TYPE_FIELDS (a->type)
7034 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7035 && TYPE_FIELDS (b->type)
7036 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7037 && type_list_equal (TYPE_FIELDS (a->type),
7038 TYPE_FIELDS (b->type))));
7039
7040 case FUNCTION_TYPE:
7041 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7042 || (TYPE_ARG_TYPES (a->type)
7043 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7044 && TYPE_ARG_TYPES (b->type)
7045 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7046 && type_list_equal (TYPE_ARG_TYPES (a->type),
7047 TYPE_ARG_TYPES (b->type))))
7048 break;
7049 return 0;
7050
7051 default:
7052 return 0;
7053 }
7054
7055 if (lang_hooks.types.type_hash_eq != NULL)
7056 return lang_hooks.types.type_hash_eq (a->type, b->type);
7057
7058 return 1;
7059 }
7060
7061 /* Given TYPE, and HASHCODE its hash code, return the canonical
7062 object for an identical type if one already exists.
7063 Otherwise, return TYPE, and record it as the canonical object.
7064
7065 To use this function, first create a type of the sort you want.
7066 Then compute its hash code from the fields of the type that
7067 make it different from other similar types.
7068 Then call this function and use the value. */
7069
7070 tree
7071 type_hash_canon (unsigned int hashcode, tree type)
7072 {
7073 type_hash in;
7074 type_hash **loc;
7075
7076 /* The hash table only contains main variants, so ensure that's what we're
7077 being passed. */
7078 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7079
7080 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7081 must call that routine before comparing TYPE_ALIGNs. */
7082 layout_type (type);
7083
7084 in.hash = hashcode;
7085 in.type = type;
7086
7087 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7088 if (*loc)
7089 {
7090 tree t1 = ((type_hash *) *loc)->type;
7091 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7092 if (GATHER_STATISTICS)
7093 {
7094 tree_code_counts[(int) TREE_CODE (type)]--;
7095 tree_node_counts[(int) t_kind]--;
7096 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
7097 }
7098 return t1;
7099 }
7100 else
7101 {
7102 struct type_hash *h;
7103
7104 h = ggc_alloc<type_hash> ();
7105 h->hash = hashcode;
7106 h->type = type;
7107 *loc = h;
7108
7109 return type;
7110 }
7111 }
7112
7113 static void
7114 print_type_hash_statistics (void)
7115 {
7116 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7117 (long) type_hash_table->size (),
7118 (long) type_hash_table->elements (),
7119 type_hash_table->collisions ());
7120 }
7121
7122 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7123 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7124 by adding the hash codes of the individual attributes. */
7125
7126 static void
7127 attribute_hash_list (const_tree list, inchash::hash &hstate)
7128 {
7129 const_tree tail;
7130
7131 for (tail = list; tail; tail = TREE_CHAIN (tail))
7132 /* ??? Do we want to add in TREE_VALUE too? */
7133 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7134 }
7135
7136 /* Given two lists of attributes, return true if list l2 is
7137 equivalent to l1. */
7138
7139 int
7140 attribute_list_equal (const_tree l1, const_tree l2)
7141 {
7142 if (l1 == l2)
7143 return 1;
7144
7145 return attribute_list_contained (l1, l2)
7146 && attribute_list_contained (l2, l1);
7147 }
7148
7149 /* Given two lists of attributes, return true if list L2 is
7150 completely contained within L1. */
7151 /* ??? This would be faster if attribute names were stored in a canonicalized
7152 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7153 must be used to show these elements are equivalent (which they are). */
7154 /* ??? It's not clear that attributes with arguments will always be handled
7155 correctly. */
7156
7157 int
7158 attribute_list_contained (const_tree l1, const_tree l2)
7159 {
7160 const_tree t1, t2;
7161
7162 /* First check the obvious, maybe the lists are identical. */
7163 if (l1 == l2)
7164 return 1;
7165
7166 /* Maybe the lists are similar. */
7167 for (t1 = l1, t2 = l2;
7168 t1 != 0 && t2 != 0
7169 && get_attribute_name (t1) == get_attribute_name (t2)
7170 && TREE_VALUE (t1) == TREE_VALUE (t2);
7171 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7172 ;
7173
7174 /* Maybe the lists are equal. */
7175 if (t1 == 0 && t2 == 0)
7176 return 1;
7177
7178 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7179 {
7180 const_tree attr;
7181 /* This CONST_CAST is okay because lookup_attribute does not
7182 modify its argument and the return value is assigned to a
7183 const_tree. */
7184 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7185 CONST_CAST_TREE (l1));
7186 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7187 attr = lookup_ident_attribute (get_attribute_name (t2),
7188 TREE_CHAIN (attr)))
7189 ;
7190
7191 if (attr == NULL_TREE)
7192 return 0;
7193 }
7194
7195 return 1;
7196 }
7197
7198 /* Given two lists of types
7199 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7200 return 1 if the lists contain the same types in the same order.
7201 Also, the TREE_PURPOSEs must match. */
7202
7203 int
7204 type_list_equal (const_tree l1, const_tree l2)
7205 {
7206 const_tree t1, t2;
7207
7208 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7209 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7210 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7211 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7212 && (TREE_TYPE (TREE_PURPOSE (t1))
7213 == TREE_TYPE (TREE_PURPOSE (t2))))))
7214 return 0;
7215
7216 return t1 == t2;
7217 }
7218
7219 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7220 given by TYPE. If the argument list accepts variable arguments,
7221 then this function counts only the ordinary arguments. */
7222
7223 int
7224 type_num_arguments (const_tree type)
7225 {
7226 int i = 0;
7227 tree t;
7228
7229 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7230 /* If the function does not take a variable number of arguments,
7231 the last element in the list will have type `void'. */
7232 if (VOID_TYPE_P (TREE_VALUE (t)))
7233 break;
7234 else
7235 ++i;
7236
7237 return i;
7238 }
7239
7240 /* Nonzero if integer constants T1 and T2
7241 represent the same constant value. */
7242
7243 int
7244 tree_int_cst_equal (const_tree t1, const_tree t2)
7245 {
7246 if (t1 == t2)
7247 return 1;
7248
7249 if (t1 == 0 || t2 == 0)
7250 return 0;
7251
7252 if (TREE_CODE (t1) == INTEGER_CST
7253 && TREE_CODE (t2) == INTEGER_CST
7254 && wi::to_widest (t1) == wi::to_widest (t2))
7255 return 1;
7256
7257 return 0;
7258 }
7259
7260 /* Return true if T is an INTEGER_CST whose numerical value (extended
7261 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7262
7263 bool
7264 tree_fits_shwi_p (const_tree t)
7265 {
7266 return (t != NULL_TREE
7267 && TREE_CODE (t) == INTEGER_CST
7268 && wi::fits_shwi_p (wi::to_widest (t)));
7269 }
7270
7271 /* Return true if T is an INTEGER_CST whose numerical value (extended
7272 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7273
7274 bool
7275 tree_fits_uhwi_p (const_tree t)
7276 {
7277 return (t != NULL_TREE
7278 && TREE_CODE (t) == INTEGER_CST
7279 && wi::fits_uhwi_p (wi::to_widest (t)));
7280 }
7281
7282 /* T is an INTEGER_CST whose numerical value (extended according to
7283 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7284 HOST_WIDE_INT. */
7285
7286 HOST_WIDE_INT
7287 tree_to_shwi (const_tree t)
7288 {
7289 gcc_assert (tree_fits_shwi_p (t));
7290 return TREE_INT_CST_LOW (t);
7291 }
7292
7293 /* T is an INTEGER_CST whose numerical value (extended according to
7294 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7295 HOST_WIDE_INT. */
7296
7297 unsigned HOST_WIDE_INT
7298 tree_to_uhwi (const_tree t)
7299 {
7300 gcc_assert (tree_fits_uhwi_p (t));
7301 return TREE_INT_CST_LOW (t);
7302 }
7303
7304 /* Return the most significant (sign) bit of T. */
7305
7306 int
7307 tree_int_cst_sign_bit (const_tree t)
7308 {
7309 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7310
7311 return wi::extract_uhwi (t, bitno, 1);
7312 }
7313
7314 /* Return an indication of the sign of the integer constant T.
7315 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7316 Note that -1 will never be returned if T's type is unsigned. */
7317
7318 int
7319 tree_int_cst_sgn (const_tree t)
7320 {
7321 if (wi::eq_p (t, 0))
7322 return 0;
7323 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7324 return 1;
7325 else if (wi::neg_p (t))
7326 return -1;
7327 else
7328 return 1;
7329 }
7330
7331 /* Return the minimum number of bits needed to represent VALUE in a
7332 signed or unsigned type, UNSIGNEDP says which. */
7333
7334 unsigned int
7335 tree_int_cst_min_precision (tree value, signop sgn)
7336 {
7337 /* If the value is negative, compute its negative minus 1. The latter
7338 adjustment is because the absolute value of the largest negative value
7339 is one larger than the largest positive value. This is equivalent to
7340 a bit-wise negation, so use that operation instead. */
7341
7342 if (tree_int_cst_sgn (value) < 0)
7343 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7344
7345 /* Return the number of bits needed, taking into account the fact
7346 that we need one more bit for a signed than unsigned type.
7347 If value is 0 or -1, the minimum precision is 1 no matter
7348 whether unsignedp is true or false. */
7349
7350 if (integer_zerop (value))
7351 return 1;
7352 else
7353 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7354 }
7355
7356 /* Return truthvalue of whether T1 is the same tree structure as T2.
7357 Return 1 if they are the same.
7358 Return 0 if they are understandably different.
7359 Return -1 if either contains tree structure not understood by
7360 this function. */
7361
7362 int
7363 simple_cst_equal (const_tree t1, const_tree t2)
7364 {
7365 enum tree_code code1, code2;
7366 int cmp;
7367 int i;
7368
7369 if (t1 == t2)
7370 return 1;
7371 if (t1 == 0 || t2 == 0)
7372 return 0;
7373
7374 code1 = TREE_CODE (t1);
7375 code2 = TREE_CODE (t2);
7376
7377 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7378 {
7379 if (CONVERT_EXPR_CODE_P (code2)
7380 || code2 == NON_LVALUE_EXPR)
7381 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7382 else
7383 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7384 }
7385
7386 else if (CONVERT_EXPR_CODE_P (code2)
7387 || code2 == NON_LVALUE_EXPR)
7388 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7389
7390 if (code1 != code2)
7391 return 0;
7392
7393 switch (code1)
7394 {
7395 case INTEGER_CST:
7396 return wi::to_widest (t1) == wi::to_widest (t2);
7397
7398 case REAL_CST:
7399 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7400
7401 case FIXED_CST:
7402 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7403
7404 case STRING_CST:
7405 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7406 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7407 TREE_STRING_LENGTH (t1)));
7408
7409 case CONSTRUCTOR:
7410 {
7411 unsigned HOST_WIDE_INT idx;
7412 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7413 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7414
7415 if (vec_safe_length (v1) != vec_safe_length (v2))
7416 return false;
7417
7418 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7419 /* ??? Should we handle also fields here? */
7420 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7421 return false;
7422 return true;
7423 }
7424
7425 case SAVE_EXPR:
7426 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7427
7428 case CALL_EXPR:
7429 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7430 if (cmp <= 0)
7431 return cmp;
7432 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7433 return 0;
7434 {
7435 const_tree arg1, arg2;
7436 const_call_expr_arg_iterator iter1, iter2;
7437 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7438 arg2 = first_const_call_expr_arg (t2, &iter2);
7439 arg1 && arg2;
7440 arg1 = next_const_call_expr_arg (&iter1),
7441 arg2 = next_const_call_expr_arg (&iter2))
7442 {
7443 cmp = simple_cst_equal (arg1, arg2);
7444 if (cmp <= 0)
7445 return cmp;
7446 }
7447 return arg1 == arg2;
7448 }
7449
7450 case TARGET_EXPR:
7451 /* Special case: if either target is an unallocated VAR_DECL,
7452 it means that it's going to be unified with whatever the
7453 TARGET_EXPR is really supposed to initialize, so treat it
7454 as being equivalent to anything. */
7455 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7456 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7457 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7458 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7459 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7460 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7461 cmp = 1;
7462 else
7463 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7464
7465 if (cmp <= 0)
7466 return cmp;
7467
7468 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7469
7470 case WITH_CLEANUP_EXPR:
7471 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7472 if (cmp <= 0)
7473 return cmp;
7474
7475 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7476
7477 case COMPONENT_REF:
7478 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7479 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7480
7481 return 0;
7482
7483 case VAR_DECL:
7484 case PARM_DECL:
7485 case CONST_DECL:
7486 case FUNCTION_DECL:
7487 return 0;
7488
7489 default:
7490 break;
7491 }
7492
7493 /* This general rule works for most tree codes. All exceptions should be
7494 handled above. If this is a language-specific tree code, we can't
7495 trust what might be in the operand, so say we don't know
7496 the situation. */
7497 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7498 return -1;
7499
7500 switch (TREE_CODE_CLASS (code1))
7501 {
7502 case tcc_unary:
7503 case tcc_binary:
7504 case tcc_comparison:
7505 case tcc_expression:
7506 case tcc_reference:
7507 case tcc_statement:
7508 cmp = 1;
7509 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7510 {
7511 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7512 if (cmp <= 0)
7513 return cmp;
7514 }
7515
7516 return cmp;
7517
7518 default:
7519 return -1;
7520 }
7521 }
7522
7523 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7524 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7525 than U, respectively. */
7526
7527 int
7528 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7529 {
7530 if (tree_int_cst_sgn (t) < 0)
7531 return -1;
7532 else if (!tree_fits_uhwi_p (t))
7533 return 1;
7534 else if (TREE_INT_CST_LOW (t) == u)
7535 return 0;
7536 else if (TREE_INT_CST_LOW (t) < u)
7537 return -1;
7538 else
7539 return 1;
7540 }
7541
7542 /* Return true if SIZE represents a constant size that is in bounds of
7543 what the middle-end and the backend accepts (covering not more than
7544 half of the address-space). */
7545
7546 bool
7547 valid_constant_size_p (const_tree size)
7548 {
7549 if (! tree_fits_uhwi_p (size)
7550 || TREE_OVERFLOW (size)
7551 || tree_int_cst_sign_bit (size) != 0)
7552 return false;
7553 return true;
7554 }
7555
7556 /* Return the precision of the type, or for a complex or vector type the
7557 precision of the type of its elements. */
7558
7559 unsigned int
7560 element_precision (const_tree type)
7561 {
7562 if (!TYPE_P (type))
7563 type = TREE_TYPE (type);
7564 enum tree_code code = TREE_CODE (type);
7565 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7566 type = TREE_TYPE (type);
7567
7568 return TYPE_PRECISION (type);
7569 }
7570
7571 /* Return true if CODE represents an associative tree code. Otherwise
7572 return false. */
7573 bool
7574 associative_tree_code (enum tree_code code)
7575 {
7576 switch (code)
7577 {
7578 case BIT_IOR_EXPR:
7579 case BIT_AND_EXPR:
7580 case BIT_XOR_EXPR:
7581 case PLUS_EXPR:
7582 case MULT_EXPR:
7583 case MIN_EXPR:
7584 case MAX_EXPR:
7585 return true;
7586
7587 default:
7588 break;
7589 }
7590 return false;
7591 }
7592
7593 /* Return true if CODE represents a commutative tree code. Otherwise
7594 return false. */
7595 bool
7596 commutative_tree_code (enum tree_code code)
7597 {
7598 switch (code)
7599 {
7600 case PLUS_EXPR:
7601 case MULT_EXPR:
7602 case MULT_HIGHPART_EXPR:
7603 case MIN_EXPR:
7604 case MAX_EXPR:
7605 case BIT_IOR_EXPR:
7606 case BIT_XOR_EXPR:
7607 case BIT_AND_EXPR:
7608 case NE_EXPR:
7609 case EQ_EXPR:
7610 case UNORDERED_EXPR:
7611 case ORDERED_EXPR:
7612 case UNEQ_EXPR:
7613 case LTGT_EXPR:
7614 case TRUTH_AND_EXPR:
7615 case TRUTH_XOR_EXPR:
7616 case TRUTH_OR_EXPR:
7617 case WIDEN_MULT_EXPR:
7618 case VEC_WIDEN_MULT_HI_EXPR:
7619 case VEC_WIDEN_MULT_LO_EXPR:
7620 case VEC_WIDEN_MULT_EVEN_EXPR:
7621 case VEC_WIDEN_MULT_ODD_EXPR:
7622 return true;
7623
7624 default:
7625 break;
7626 }
7627 return false;
7628 }
7629
7630 /* Return true if CODE represents a ternary tree code for which the
7631 first two operands are commutative. Otherwise return false. */
7632 bool
7633 commutative_ternary_tree_code (enum tree_code code)
7634 {
7635 switch (code)
7636 {
7637 case WIDEN_MULT_PLUS_EXPR:
7638 case WIDEN_MULT_MINUS_EXPR:
7639 case DOT_PROD_EXPR:
7640 case FMA_EXPR:
7641 return true;
7642
7643 default:
7644 break;
7645 }
7646 return false;
7647 }
7648
7649 /* Returns true if CODE can overflow. */
7650
7651 bool
7652 operation_can_overflow (enum tree_code code)
7653 {
7654 switch (code)
7655 {
7656 case PLUS_EXPR:
7657 case MINUS_EXPR:
7658 case MULT_EXPR:
7659 case LSHIFT_EXPR:
7660 /* Can overflow in various ways. */
7661 return true;
7662 case TRUNC_DIV_EXPR:
7663 case EXACT_DIV_EXPR:
7664 case FLOOR_DIV_EXPR:
7665 case CEIL_DIV_EXPR:
7666 /* For INT_MIN / -1. */
7667 return true;
7668 case NEGATE_EXPR:
7669 case ABS_EXPR:
7670 /* For -INT_MIN. */
7671 return true;
7672 default:
7673 /* These operators cannot overflow. */
7674 return false;
7675 }
7676 }
7677
7678 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7679 ftrapv doesn't generate trapping insns for CODE. */
7680
7681 bool
7682 operation_no_trapping_overflow (tree type, enum tree_code code)
7683 {
7684 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7685
7686 /* We don't generate instructions that trap on overflow for complex or vector
7687 types. */
7688 if (!INTEGRAL_TYPE_P (type))
7689 return true;
7690
7691 if (!TYPE_OVERFLOW_TRAPS (type))
7692 return true;
7693
7694 switch (code)
7695 {
7696 case PLUS_EXPR:
7697 case MINUS_EXPR:
7698 case MULT_EXPR:
7699 case NEGATE_EXPR:
7700 case ABS_EXPR:
7701 /* These operators can overflow, and -ftrapv generates trapping code for
7702 these. */
7703 return false;
7704 case TRUNC_DIV_EXPR:
7705 case EXACT_DIV_EXPR:
7706 case FLOOR_DIV_EXPR:
7707 case CEIL_DIV_EXPR:
7708 case LSHIFT_EXPR:
7709 /* These operators can overflow, but -ftrapv does not generate trapping
7710 code for these. */
7711 return true;
7712 default:
7713 /* These operators cannot overflow. */
7714 return true;
7715 }
7716 }
7717
7718 namespace inchash
7719 {
7720
7721 /* Generate a hash value for an expression. This can be used iteratively
7722 by passing a previous result as the HSTATE argument.
7723
7724 This function is intended to produce the same hash for expressions which
7725 would compare equal using operand_equal_p. */
7726 void
7727 add_expr (const_tree t, inchash::hash &hstate)
7728 {
7729 int i;
7730 enum tree_code code;
7731 enum tree_code_class tclass;
7732
7733 if (t == NULL_TREE)
7734 {
7735 hstate.merge_hash (0);
7736 return;
7737 }
7738
7739 code = TREE_CODE (t);
7740
7741 switch (code)
7742 {
7743 /* Alas, constants aren't shared, so we can't rely on pointer
7744 identity. */
7745 case VOID_CST:
7746 hstate.merge_hash (0);
7747 return;
7748 case INTEGER_CST:
7749 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7750 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7751 return;
7752 case REAL_CST:
7753 {
7754 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7755 hstate.merge_hash (val2);
7756 return;
7757 }
7758 case FIXED_CST:
7759 {
7760 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7761 hstate.merge_hash (val2);
7762 return;
7763 }
7764 case STRING_CST:
7765 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7766 return;
7767 case COMPLEX_CST:
7768 inchash::add_expr (TREE_REALPART (t), hstate);
7769 inchash::add_expr (TREE_IMAGPART (t), hstate);
7770 return;
7771 case VECTOR_CST:
7772 {
7773 unsigned i;
7774 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7775 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7776 return;
7777 }
7778 case SSA_NAME:
7779 /* We can just compare by pointer. */
7780 hstate.add_wide_int (SSA_NAME_VERSION (t));
7781 return;
7782 case PLACEHOLDER_EXPR:
7783 /* The node itself doesn't matter. */
7784 return;
7785 case TREE_LIST:
7786 /* A list of expressions, for a CALL_EXPR or as the elements of a
7787 VECTOR_CST. */
7788 for (; t; t = TREE_CHAIN (t))
7789 inchash::add_expr (TREE_VALUE (t), hstate);
7790 return;
7791 case CONSTRUCTOR:
7792 {
7793 unsigned HOST_WIDE_INT idx;
7794 tree field, value;
7795 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7796 {
7797 inchash::add_expr (field, hstate);
7798 inchash::add_expr (value, hstate);
7799 }
7800 return;
7801 }
7802 case FUNCTION_DECL:
7803 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7804 Otherwise nodes that compare equal according to operand_equal_p might
7805 get different hash codes. However, don't do this for machine specific
7806 or front end builtins, since the function code is overloaded in those
7807 cases. */
7808 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7809 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7810 {
7811 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7812 code = TREE_CODE (t);
7813 }
7814 /* FALL THROUGH */
7815 default:
7816 tclass = TREE_CODE_CLASS (code);
7817
7818 if (tclass == tcc_declaration)
7819 {
7820 /* DECL's have a unique ID */
7821 hstate.add_wide_int (DECL_UID (t));
7822 }
7823 else
7824 {
7825 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7826
7827 hstate.add_object (code);
7828
7829 /* Don't hash the type, that can lead to having nodes which
7830 compare equal according to operand_equal_p, but which
7831 have different hash codes. */
7832 if (CONVERT_EXPR_CODE_P (code)
7833 || code == NON_LVALUE_EXPR)
7834 {
7835 /* Make sure to include signness in the hash computation. */
7836 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7837 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7838 }
7839
7840 else if (commutative_tree_code (code))
7841 {
7842 /* It's a commutative expression. We want to hash it the same
7843 however it appears. We do this by first hashing both operands
7844 and then rehashing based on the order of their independent
7845 hashes. */
7846 inchash::hash one, two;
7847 inchash::add_expr (TREE_OPERAND (t, 0), one);
7848 inchash::add_expr (TREE_OPERAND (t, 1), two);
7849 hstate.add_commutative (one, two);
7850 }
7851 else
7852 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7853 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7854 }
7855 return;
7856 }
7857 }
7858
7859 }
7860
7861 /* Constructors for pointer, array and function types.
7862 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7863 constructed by language-dependent code, not here.) */
7864
7865 /* Construct, lay out and return the type of pointers to TO_TYPE with
7866 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7867 reference all of memory. If such a type has already been
7868 constructed, reuse it. */
7869
7870 tree
7871 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7872 bool can_alias_all)
7873 {
7874 tree t;
7875 bool could_alias = can_alias_all;
7876
7877 if (to_type == error_mark_node)
7878 return error_mark_node;
7879
7880 /* If the pointed-to type has the may_alias attribute set, force
7881 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7882 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7883 can_alias_all = true;
7884
7885 /* In some cases, languages will have things that aren't a POINTER_TYPE
7886 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7887 In that case, return that type without regard to the rest of our
7888 operands.
7889
7890 ??? This is a kludge, but consistent with the way this function has
7891 always operated and there doesn't seem to be a good way to avoid this
7892 at the moment. */
7893 if (TYPE_POINTER_TO (to_type) != 0
7894 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7895 return TYPE_POINTER_TO (to_type);
7896
7897 /* First, if we already have a type for pointers to TO_TYPE and it's
7898 the proper mode, use it. */
7899 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7900 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7901 return t;
7902
7903 t = make_node (POINTER_TYPE);
7904
7905 TREE_TYPE (t) = to_type;
7906 SET_TYPE_MODE (t, mode);
7907 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7908 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7909 TYPE_POINTER_TO (to_type) = t;
7910
7911 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7912 SET_TYPE_STRUCTURAL_EQUALITY (t);
7913 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7914 TYPE_CANONICAL (t)
7915 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7916 mode, false);
7917
7918 /* Lay out the type. This function has many callers that are concerned
7919 with expression-construction, and this simplifies them all. */
7920 layout_type (t);
7921
7922 return t;
7923 }
7924
7925 /* By default build pointers in ptr_mode. */
7926
7927 tree
7928 build_pointer_type (tree to_type)
7929 {
7930 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7931 : TYPE_ADDR_SPACE (to_type);
7932 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7933 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7934 }
7935
7936 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7937
7938 tree
7939 build_reference_type_for_mode (tree to_type, machine_mode mode,
7940 bool can_alias_all)
7941 {
7942 tree t;
7943 bool could_alias = can_alias_all;
7944
7945 if (to_type == error_mark_node)
7946 return error_mark_node;
7947
7948 /* If the pointed-to type has the may_alias attribute set, force
7949 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7950 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7951 can_alias_all = true;
7952
7953 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7954 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7955 In that case, return that type without regard to the rest of our
7956 operands.
7957
7958 ??? This is a kludge, but consistent with the way this function has
7959 always operated and there doesn't seem to be a good way to avoid this
7960 at the moment. */
7961 if (TYPE_REFERENCE_TO (to_type) != 0
7962 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7963 return TYPE_REFERENCE_TO (to_type);
7964
7965 /* First, if we already have a type for pointers to TO_TYPE and it's
7966 the proper mode, use it. */
7967 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7968 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7969 return t;
7970
7971 t = make_node (REFERENCE_TYPE);
7972
7973 TREE_TYPE (t) = to_type;
7974 SET_TYPE_MODE (t, mode);
7975 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7976 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7977 TYPE_REFERENCE_TO (to_type) = t;
7978
7979 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7980 SET_TYPE_STRUCTURAL_EQUALITY (t);
7981 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7982 TYPE_CANONICAL (t)
7983 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7984 mode, false);
7985
7986 layout_type (t);
7987
7988 return t;
7989 }
7990
7991
7992 /* Build the node for the type of references-to-TO_TYPE by default
7993 in ptr_mode. */
7994
7995 tree
7996 build_reference_type (tree to_type)
7997 {
7998 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7999 : TYPE_ADDR_SPACE (to_type);
8000 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8001 return build_reference_type_for_mode (to_type, pointer_mode, false);
8002 }
8003
8004 #define MAX_INT_CACHED_PREC \
8005 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8006 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8007
8008 /* Builds a signed or unsigned integer type of precision PRECISION.
8009 Used for C bitfields whose precision does not match that of
8010 built-in target types. */
8011 tree
8012 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8013 int unsignedp)
8014 {
8015 tree itype, ret;
8016
8017 if (unsignedp)
8018 unsignedp = MAX_INT_CACHED_PREC + 1;
8019
8020 if (precision <= MAX_INT_CACHED_PREC)
8021 {
8022 itype = nonstandard_integer_type_cache[precision + unsignedp];
8023 if (itype)
8024 return itype;
8025 }
8026
8027 itype = make_node (INTEGER_TYPE);
8028 TYPE_PRECISION (itype) = precision;
8029
8030 if (unsignedp)
8031 fixup_unsigned_type (itype);
8032 else
8033 fixup_signed_type (itype);
8034
8035 ret = itype;
8036 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
8037 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
8038 if (precision <= MAX_INT_CACHED_PREC)
8039 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8040
8041 return ret;
8042 }
8043
8044 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8045 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8046 is true, reuse such a type that has already been constructed. */
8047
8048 static tree
8049 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8050 {
8051 tree itype = make_node (INTEGER_TYPE);
8052 inchash::hash hstate;
8053
8054 TREE_TYPE (itype) = type;
8055
8056 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8057 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8058
8059 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8060 SET_TYPE_MODE (itype, TYPE_MODE (type));
8061 TYPE_SIZE (itype) = TYPE_SIZE (type);
8062 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8063 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
8064 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8065
8066 if (!shared)
8067 return itype;
8068
8069 if ((TYPE_MIN_VALUE (itype)
8070 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8071 || (TYPE_MAX_VALUE (itype)
8072 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8073 {
8074 /* Since we cannot reliably merge this type, we need to compare it using
8075 structural equality checks. */
8076 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8077 return itype;
8078 }
8079
8080 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
8081 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8082 hstate.merge_hash (TYPE_HASH (type));
8083 itype = type_hash_canon (hstate.end (), itype);
8084
8085 return itype;
8086 }
8087
8088 /* Wrapper around build_range_type_1 with SHARED set to true. */
8089
8090 tree
8091 build_range_type (tree type, tree lowval, tree highval)
8092 {
8093 return build_range_type_1 (type, lowval, highval, true);
8094 }
8095
8096 /* Wrapper around build_range_type_1 with SHARED set to false. */
8097
8098 tree
8099 build_nonshared_range_type (tree type, tree lowval, tree highval)
8100 {
8101 return build_range_type_1 (type, lowval, highval, false);
8102 }
8103
8104 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8105 MAXVAL should be the maximum value in the domain
8106 (one less than the length of the array).
8107
8108 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8109 We don't enforce this limit, that is up to caller (e.g. language front end).
8110 The limit exists because the result is a signed type and we don't handle
8111 sizes that use more than one HOST_WIDE_INT. */
8112
8113 tree
8114 build_index_type (tree maxval)
8115 {
8116 return build_range_type (sizetype, size_zero_node, maxval);
8117 }
8118
8119 /* Return true if the debug information for TYPE, a subtype, should be emitted
8120 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8121 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8122 debug info and doesn't reflect the source code. */
8123
8124 bool
8125 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8126 {
8127 tree base_type = TREE_TYPE (type), low, high;
8128
8129 /* Subrange types have a base type which is an integral type. */
8130 if (!INTEGRAL_TYPE_P (base_type))
8131 return false;
8132
8133 /* Get the real bounds of the subtype. */
8134 if (lang_hooks.types.get_subrange_bounds)
8135 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8136 else
8137 {
8138 low = TYPE_MIN_VALUE (type);
8139 high = TYPE_MAX_VALUE (type);
8140 }
8141
8142 /* If the type and its base type have the same representation and the same
8143 name, then the type is not a subrange but a copy of the base type. */
8144 if ((TREE_CODE (base_type) == INTEGER_TYPE
8145 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8146 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8147 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8148 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8149 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8150 return false;
8151
8152 if (lowval)
8153 *lowval = low;
8154 if (highval)
8155 *highval = high;
8156 return true;
8157 }
8158
8159 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8160 and number of elements specified by the range of values of INDEX_TYPE.
8161 If SHARED is true, reuse such a type that has already been constructed. */
8162
8163 static tree
8164 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8165 {
8166 tree t;
8167
8168 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8169 {
8170 error ("arrays of functions are not meaningful");
8171 elt_type = integer_type_node;
8172 }
8173
8174 t = make_node (ARRAY_TYPE);
8175 TREE_TYPE (t) = elt_type;
8176 TYPE_DOMAIN (t) = index_type;
8177 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8178 layout_type (t);
8179
8180 /* If the element type is incomplete at this point we get marked for
8181 structural equality. Do not record these types in the canonical
8182 type hashtable. */
8183 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8184 return t;
8185
8186 if (shared)
8187 {
8188 inchash::hash hstate;
8189 hstate.add_object (TYPE_HASH (elt_type));
8190 if (index_type)
8191 hstate.add_object (TYPE_HASH (index_type));
8192 t = type_hash_canon (hstate.end (), t);
8193 }
8194
8195 if (TYPE_CANONICAL (t) == t)
8196 {
8197 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8198 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
8199 SET_TYPE_STRUCTURAL_EQUALITY (t);
8200 else if (TYPE_CANONICAL (elt_type) != elt_type
8201 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8202 TYPE_CANONICAL (t)
8203 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8204 index_type
8205 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8206 shared);
8207 }
8208
8209 return t;
8210 }
8211
8212 /* Wrapper around build_array_type_1 with SHARED set to true. */
8213
8214 tree
8215 build_array_type (tree elt_type, tree index_type)
8216 {
8217 return build_array_type_1 (elt_type, index_type, true);
8218 }
8219
8220 /* Wrapper around build_array_type_1 with SHARED set to false. */
8221
8222 tree
8223 build_nonshared_array_type (tree elt_type, tree index_type)
8224 {
8225 return build_array_type_1 (elt_type, index_type, false);
8226 }
8227
8228 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8229 sizetype. */
8230
8231 tree
8232 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8233 {
8234 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8235 }
8236
8237 /* Recursively examines the array elements of TYPE, until a non-array
8238 element type is found. */
8239
8240 tree
8241 strip_array_types (tree type)
8242 {
8243 while (TREE_CODE (type) == ARRAY_TYPE)
8244 type = TREE_TYPE (type);
8245
8246 return type;
8247 }
8248
8249 /* Computes the canonical argument types from the argument type list
8250 ARGTYPES.
8251
8252 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8253 on entry to this function, or if any of the ARGTYPES are
8254 structural.
8255
8256 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8257 true on entry to this function, or if any of the ARGTYPES are
8258 non-canonical.
8259
8260 Returns a canonical argument list, which may be ARGTYPES when the
8261 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8262 true) or would not differ from ARGTYPES. */
8263
8264 static tree
8265 maybe_canonicalize_argtypes (tree argtypes,
8266 bool *any_structural_p,
8267 bool *any_noncanonical_p)
8268 {
8269 tree arg;
8270 bool any_noncanonical_argtypes_p = false;
8271
8272 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8273 {
8274 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8275 /* Fail gracefully by stating that the type is structural. */
8276 *any_structural_p = true;
8277 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8278 *any_structural_p = true;
8279 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8280 || TREE_PURPOSE (arg))
8281 /* If the argument has a default argument, we consider it
8282 non-canonical even though the type itself is canonical.
8283 That way, different variants of function and method types
8284 with default arguments will all point to the variant with
8285 no defaults as their canonical type. */
8286 any_noncanonical_argtypes_p = true;
8287 }
8288
8289 if (*any_structural_p)
8290 return argtypes;
8291
8292 if (any_noncanonical_argtypes_p)
8293 {
8294 /* Build the canonical list of argument types. */
8295 tree canon_argtypes = NULL_TREE;
8296 bool is_void = false;
8297
8298 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8299 {
8300 if (arg == void_list_node)
8301 is_void = true;
8302 else
8303 canon_argtypes = tree_cons (NULL_TREE,
8304 TYPE_CANONICAL (TREE_VALUE (arg)),
8305 canon_argtypes);
8306 }
8307
8308 canon_argtypes = nreverse (canon_argtypes);
8309 if (is_void)
8310 canon_argtypes = chainon (canon_argtypes, void_list_node);
8311
8312 /* There is a non-canonical type. */
8313 *any_noncanonical_p = true;
8314 return canon_argtypes;
8315 }
8316
8317 /* The canonical argument types are the same as ARGTYPES. */
8318 return argtypes;
8319 }
8320
8321 /* Construct, lay out and return
8322 the type of functions returning type VALUE_TYPE
8323 given arguments of types ARG_TYPES.
8324 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8325 are data type nodes for the arguments of the function.
8326 If such a type has already been constructed, reuse it. */
8327
8328 tree
8329 build_function_type (tree value_type, tree arg_types)
8330 {
8331 tree t;
8332 inchash::hash hstate;
8333 bool any_structural_p, any_noncanonical_p;
8334 tree canon_argtypes;
8335
8336 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8337 {
8338 error ("function return type cannot be function");
8339 value_type = integer_type_node;
8340 }
8341
8342 /* Make a node of the sort we want. */
8343 t = make_node (FUNCTION_TYPE);
8344 TREE_TYPE (t) = value_type;
8345 TYPE_ARG_TYPES (t) = arg_types;
8346
8347 /* If we already have such a type, use the old one. */
8348 hstate.add_object (TYPE_HASH (value_type));
8349 type_hash_list (arg_types, hstate);
8350 t = type_hash_canon (hstate.end (), t);
8351
8352 /* Set up the canonical type. */
8353 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8354 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8355 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8356 &any_structural_p,
8357 &any_noncanonical_p);
8358 if (any_structural_p)
8359 SET_TYPE_STRUCTURAL_EQUALITY (t);
8360 else if (any_noncanonical_p)
8361 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8362 canon_argtypes);
8363
8364 if (!COMPLETE_TYPE_P (t))
8365 layout_type (t);
8366 return t;
8367 }
8368
8369 /* Build a function type. The RETURN_TYPE is the type returned by the
8370 function. If VAARGS is set, no void_type_node is appended to the
8371 the list. ARGP must be always be terminated be a NULL_TREE. */
8372
8373 static tree
8374 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8375 {
8376 tree t, args, last;
8377
8378 t = va_arg (argp, tree);
8379 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8380 args = tree_cons (NULL_TREE, t, args);
8381
8382 if (vaargs)
8383 {
8384 last = args;
8385 if (args != NULL_TREE)
8386 args = nreverse (args);
8387 gcc_assert (last != void_list_node);
8388 }
8389 else if (args == NULL_TREE)
8390 args = void_list_node;
8391 else
8392 {
8393 last = args;
8394 args = nreverse (args);
8395 TREE_CHAIN (last) = void_list_node;
8396 }
8397 args = build_function_type (return_type, args);
8398
8399 return args;
8400 }
8401
8402 /* Build a function type. The RETURN_TYPE is the type returned by the
8403 function. If additional arguments are provided, they are
8404 additional argument types. The list of argument types must always
8405 be terminated by NULL_TREE. */
8406
8407 tree
8408 build_function_type_list (tree return_type, ...)
8409 {
8410 tree args;
8411 va_list p;
8412
8413 va_start (p, return_type);
8414 args = build_function_type_list_1 (false, return_type, p);
8415 va_end (p);
8416 return args;
8417 }
8418
8419 /* Build a variable argument function type. The RETURN_TYPE is the
8420 type returned by the function. If additional arguments are provided,
8421 they are additional argument types. The list of argument types must
8422 always be terminated by NULL_TREE. */
8423
8424 tree
8425 build_varargs_function_type_list (tree return_type, ...)
8426 {
8427 tree args;
8428 va_list p;
8429
8430 va_start (p, return_type);
8431 args = build_function_type_list_1 (true, return_type, p);
8432 va_end (p);
8433
8434 return args;
8435 }
8436
8437 /* Build a function type. RETURN_TYPE is the type returned by the
8438 function; VAARGS indicates whether the function takes varargs. The
8439 function takes N named arguments, the types of which are provided in
8440 ARG_TYPES. */
8441
8442 static tree
8443 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8444 tree *arg_types)
8445 {
8446 int i;
8447 tree t = vaargs ? NULL_TREE : void_list_node;
8448
8449 for (i = n - 1; i >= 0; i--)
8450 t = tree_cons (NULL_TREE, arg_types[i], t);
8451
8452 return build_function_type (return_type, t);
8453 }
8454
8455 /* Build a function type. RETURN_TYPE is the type returned by the
8456 function. The function takes N named arguments, the types of which
8457 are provided in ARG_TYPES. */
8458
8459 tree
8460 build_function_type_array (tree return_type, int n, tree *arg_types)
8461 {
8462 return build_function_type_array_1 (false, return_type, n, arg_types);
8463 }
8464
8465 /* Build a variable argument function type. RETURN_TYPE is the type
8466 returned by the function. The function takes N named arguments, the
8467 types of which are provided in ARG_TYPES. */
8468
8469 tree
8470 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8471 {
8472 return build_function_type_array_1 (true, return_type, n, arg_types);
8473 }
8474
8475 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8476 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8477 for the method. An implicit additional parameter (of type
8478 pointer-to-BASETYPE) is added to the ARGTYPES. */
8479
8480 tree
8481 build_method_type_directly (tree basetype,
8482 tree rettype,
8483 tree argtypes)
8484 {
8485 tree t;
8486 tree ptype;
8487 inchash::hash hstate;
8488 bool any_structural_p, any_noncanonical_p;
8489 tree canon_argtypes;
8490
8491 /* Make a node of the sort we want. */
8492 t = make_node (METHOD_TYPE);
8493
8494 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8495 TREE_TYPE (t) = rettype;
8496 ptype = build_pointer_type (basetype);
8497
8498 /* The actual arglist for this function includes a "hidden" argument
8499 which is "this". Put it into the list of argument types. */
8500 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8501 TYPE_ARG_TYPES (t) = argtypes;
8502
8503 /* If we already have such a type, use the old one. */
8504 hstate.add_object (TYPE_HASH (basetype));
8505 hstate.add_object (TYPE_HASH (rettype));
8506 type_hash_list (argtypes, hstate);
8507 t = type_hash_canon (hstate.end (), t);
8508
8509 /* Set up the canonical type. */
8510 any_structural_p
8511 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8512 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8513 any_noncanonical_p
8514 = (TYPE_CANONICAL (basetype) != basetype
8515 || TYPE_CANONICAL (rettype) != rettype);
8516 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8517 &any_structural_p,
8518 &any_noncanonical_p);
8519 if (any_structural_p)
8520 SET_TYPE_STRUCTURAL_EQUALITY (t);
8521 else if (any_noncanonical_p)
8522 TYPE_CANONICAL (t)
8523 = build_method_type_directly (TYPE_CANONICAL (basetype),
8524 TYPE_CANONICAL (rettype),
8525 canon_argtypes);
8526 if (!COMPLETE_TYPE_P (t))
8527 layout_type (t);
8528
8529 return t;
8530 }
8531
8532 /* Construct, lay out and return the type of methods belonging to class
8533 BASETYPE and whose arguments and values are described by TYPE.
8534 If that type exists already, reuse it.
8535 TYPE must be a FUNCTION_TYPE node. */
8536
8537 tree
8538 build_method_type (tree basetype, tree type)
8539 {
8540 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8541
8542 return build_method_type_directly (basetype,
8543 TREE_TYPE (type),
8544 TYPE_ARG_TYPES (type));
8545 }
8546
8547 /* Construct, lay out and return the type of offsets to a value
8548 of type TYPE, within an object of type BASETYPE.
8549 If a suitable offset type exists already, reuse it. */
8550
8551 tree
8552 build_offset_type (tree basetype, tree type)
8553 {
8554 tree t;
8555 inchash::hash hstate;
8556
8557 /* Make a node of the sort we want. */
8558 t = make_node (OFFSET_TYPE);
8559
8560 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8561 TREE_TYPE (t) = type;
8562
8563 /* If we already have such a type, use the old one. */
8564 hstate.add_object (TYPE_HASH (basetype));
8565 hstate.add_object (TYPE_HASH (type));
8566 t = type_hash_canon (hstate.end (), t);
8567
8568 if (!COMPLETE_TYPE_P (t))
8569 layout_type (t);
8570
8571 if (TYPE_CANONICAL (t) == t)
8572 {
8573 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8574 || TYPE_STRUCTURAL_EQUALITY_P (type))
8575 SET_TYPE_STRUCTURAL_EQUALITY (t);
8576 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8577 || TYPE_CANONICAL (type) != type)
8578 TYPE_CANONICAL (t)
8579 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8580 TYPE_CANONICAL (type));
8581 }
8582
8583 return t;
8584 }
8585
8586 /* Create a complex type whose components are COMPONENT_TYPE. */
8587
8588 tree
8589 build_complex_type (tree component_type)
8590 {
8591 tree t;
8592 inchash::hash hstate;
8593
8594 gcc_assert (INTEGRAL_TYPE_P (component_type)
8595 || SCALAR_FLOAT_TYPE_P (component_type)
8596 || FIXED_POINT_TYPE_P (component_type));
8597
8598 /* Make a node of the sort we want. */
8599 t = make_node (COMPLEX_TYPE);
8600
8601 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8602
8603 /* If we already have such a type, use the old one. */
8604 hstate.add_object (TYPE_HASH (component_type));
8605 t = type_hash_canon (hstate.end (), t);
8606
8607 if (!COMPLETE_TYPE_P (t))
8608 layout_type (t);
8609
8610 if (TYPE_CANONICAL (t) == t)
8611 {
8612 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8613 SET_TYPE_STRUCTURAL_EQUALITY (t);
8614 else if (TYPE_CANONICAL (component_type) != component_type)
8615 TYPE_CANONICAL (t)
8616 = build_complex_type (TYPE_CANONICAL (component_type));
8617 }
8618
8619 /* We need to create a name, since complex is a fundamental type. */
8620 if (! TYPE_NAME (t))
8621 {
8622 const char *name;
8623 if (component_type == char_type_node)
8624 name = "complex char";
8625 else if (component_type == signed_char_type_node)
8626 name = "complex signed char";
8627 else if (component_type == unsigned_char_type_node)
8628 name = "complex unsigned char";
8629 else if (component_type == short_integer_type_node)
8630 name = "complex short int";
8631 else if (component_type == short_unsigned_type_node)
8632 name = "complex short unsigned int";
8633 else if (component_type == integer_type_node)
8634 name = "complex int";
8635 else if (component_type == unsigned_type_node)
8636 name = "complex unsigned int";
8637 else if (component_type == long_integer_type_node)
8638 name = "complex long int";
8639 else if (component_type == long_unsigned_type_node)
8640 name = "complex long unsigned int";
8641 else if (component_type == long_long_integer_type_node)
8642 name = "complex long long int";
8643 else if (component_type == long_long_unsigned_type_node)
8644 name = "complex long long unsigned int";
8645 else
8646 name = 0;
8647
8648 if (name != 0)
8649 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8650 get_identifier (name), t);
8651 }
8652
8653 return build_qualified_type (t, TYPE_QUALS (component_type));
8654 }
8655
8656 /* If TYPE is a real or complex floating-point type and the target
8657 does not directly support arithmetic on TYPE then return the wider
8658 type to be used for arithmetic on TYPE. Otherwise, return
8659 NULL_TREE. */
8660
8661 tree
8662 excess_precision_type (tree type)
8663 {
8664 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8665 {
8666 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8667 switch (TREE_CODE (type))
8668 {
8669 case REAL_TYPE:
8670 switch (flt_eval_method)
8671 {
8672 case 1:
8673 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8674 return double_type_node;
8675 break;
8676 case 2:
8677 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8678 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8679 return long_double_type_node;
8680 break;
8681 default:
8682 gcc_unreachable ();
8683 }
8684 break;
8685 case COMPLEX_TYPE:
8686 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8687 return NULL_TREE;
8688 switch (flt_eval_method)
8689 {
8690 case 1:
8691 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8692 return complex_double_type_node;
8693 break;
8694 case 2:
8695 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8696 || (TYPE_MODE (TREE_TYPE (type))
8697 == TYPE_MODE (double_type_node)))
8698 return complex_long_double_type_node;
8699 break;
8700 default:
8701 gcc_unreachable ();
8702 }
8703 break;
8704 default:
8705 break;
8706 }
8707 }
8708 return NULL_TREE;
8709 }
8710 \f
8711 /* Return OP, stripped of any conversions to wider types as much as is safe.
8712 Converting the value back to OP's type makes a value equivalent to OP.
8713
8714 If FOR_TYPE is nonzero, we return a value which, if converted to
8715 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8716
8717 OP must have integer, real or enumeral type. Pointers are not allowed!
8718
8719 There are some cases where the obvious value we could return
8720 would regenerate to OP if converted to OP's type,
8721 but would not extend like OP to wider types.
8722 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8723 For example, if OP is (unsigned short)(signed char)-1,
8724 we avoid returning (signed char)-1 if FOR_TYPE is int,
8725 even though extending that to an unsigned short would regenerate OP,
8726 since the result of extending (signed char)-1 to (int)
8727 is different from (int) OP. */
8728
8729 tree
8730 get_unwidened (tree op, tree for_type)
8731 {
8732 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8733 tree type = TREE_TYPE (op);
8734 unsigned final_prec
8735 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8736 int uns
8737 = (for_type != 0 && for_type != type
8738 && final_prec > TYPE_PRECISION (type)
8739 && TYPE_UNSIGNED (type));
8740 tree win = op;
8741
8742 while (CONVERT_EXPR_P (op))
8743 {
8744 int bitschange;
8745
8746 /* TYPE_PRECISION on vector types has different meaning
8747 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8748 so avoid them here. */
8749 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8750 break;
8751
8752 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8753 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8754
8755 /* Truncations are many-one so cannot be removed.
8756 Unless we are later going to truncate down even farther. */
8757 if (bitschange < 0
8758 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8759 break;
8760
8761 /* See what's inside this conversion. If we decide to strip it,
8762 we will set WIN. */
8763 op = TREE_OPERAND (op, 0);
8764
8765 /* If we have not stripped any zero-extensions (uns is 0),
8766 we can strip any kind of extension.
8767 If we have previously stripped a zero-extension,
8768 only zero-extensions can safely be stripped.
8769 Any extension can be stripped if the bits it would produce
8770 are all going to be discarded later by truncating to FOR_TYPE. */
8771
8772 if (bitschange > 0)
8773 {
8774 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8775 win = op;
8776 /* TYPE_UNSIGNED says whether this is a zero-extension.
8777 Let's avoid computing it if it does not affect WIN
8778 and if UNS will not be needed again. */
8779 if ((uns
8780 || CONVERT_EXPR_P (op))
8781 && TYPE_UNSIGNED (TREE_TYPE (op)))
8782 {
8783 uns = 1;
8784 win = op;
8785 }
8786 }
8787 }
8788
8789 /* If we finally reach a constant see if it fits in for_type and
8790 in that case convert it. */
8791 if (for_type
8792 && TREE_CODE (win) == INTEGER_CST
8793 && TREE_TYPE (win) != for_type
8794 && int_fits_type_p (win, for_type))
8795 win = fold_convert (for_type, win);
8796
8797 return win;
8798 }
8799 \f
8800 /* Return OP or a simpler expression for a narrower value
8801 which can be sign-extended or zero-extended to give back OP.
8802 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8803 or 0 if the value should be sign-extended. */
8804
8805 tree
8806 get_narrower (tree op, int *unsignedp_ptr)
8807 {
8808 int uns = 0;
8809 int first = 1;
8810 tree win = op;
8811 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8812
8813 while (TREE_CODE (op) == NOP_EXPR)
8814 {
8815 int bitschange
8816 = (TYPE_PRECISION (TREE_TYPE (op))
8817 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8818
8819 /* Truncations are many-one so cannot be removed. */
8820 if (bitschange < 0)
8821 break;
8822
8823 /* See what's inside this conversion. If we decide to strip it,
8824 we will set WIN. */
8825
8826 if (bitschange > 0)
8827 {
8828 op = TREE_OPERAND (op, 0);
8829 /* An extension: the outermost one can be stripped,
8830 but remember whether it is zero or sign extension. */
8831 if (first)
8832 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8833 /* Otherwise, if a sign extension has been stripped,
8834 only sign extensions can now be stripped;
8835 if a zero extension has been stripped, only zero-extensions. */
8836 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8837 break;
8838 first = 0;
8839 }
8840 else /* bitschange == 0 */
8841 {
8842 /* A change in nominal type can always be stripped, but we must
8843 preserve the unsignedness. */
8844 if (first)
8845 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8846 first = 0;
8847 op = TREE_OPERAND (op, 0);
8848 /* Keep trying to narrow, but don't assign op to win if it
8849 would turn an integral type into something else. */
8850 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8851 continue;
8852 }
8853
8854 win = op;
8855 }
8856
8857 if (TREE_CODE (op) == COMPONENT_REF
8858 /* Since type_for_size always gives an integer type. */
8859 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8860 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8861 /* Ensure field is laid out already. */
8862 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8863 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8864 {
8865 unsigned HOST_WIDE_INT innerprec
8866 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8867 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8868 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8869 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8870
8871 /* We can get this structure field in a narrower type that fits it,
8872 but the resulting extension to its nominal type (a fullword type)
8873 must satisfy the same conditions as for other extensions.
8874
8875 Do this only for fields that are aligned (not bit-fields),
8876 because when bit-field insns will be used there is no
8877 advantage in doing this. */
8878
8879 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8880 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8881 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8882 && type != 0)
8883 {
8884 if (first)
8885 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8886 win = fold_convert (type, op);
8887 }
8888 }
8889
8890 *unsignedp_ptr = uns;
8891 return win;
8892 }
8893 \f
8894 /* Returns true if integer constant C has a value that is permissible
8895 for type TYPE (an INTEGER_TYPE). */
8896
8897 bool
8898 int_fits_type_p (const_tree c, const_tree type)
8899 {
8900 tree type_low_bound, type_high_bound;
8901 bool ok_for_low_bound, ok_for_high_bound;
8902 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8903
8904 retry:
8905 type_low_bound = TYPE_MIN_VALUE (type);
8906 type_high_bound = TYPE_MAX_VALUE (type);
8907
8908 /* If at least one bound of the type is a constant integer, we can check
8909 ourselves and maybe make a decision. If no such decision is possible, but
8910 this type is a subtype, try checking against that. Otherwise, use
8911 fits_to_tree_p, which checks against the precision.
8912
8913 Compute the status for each possibly constant bound, and return if we see
8914 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8915 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8916 for "constant known to fit". */
8917
8918 /* Check if c >= type_low_bound. */
8919 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8920 {
8921 if (tree_int_cst_lt (c, type_low_bound))
8922 return false;
8923 ok_for_low_bound = true;
8924 }
8925 else
8926 ok_for_low_bound = false;
8927
8928 /* Check if c <= type_high_bound. */
8929 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8930 {
8931 if (tree_int_cst_lt (type_high_bound, c))
8932 return false;
8933 ok_for_high_bound = true;
8934 }
8935 else
8936 ok_for_high_bound = false;
8937
8938 /* If the constant fits both bounds, the result is known. */
8939 if (ok_for_low_bound && ok_for_high_bound)
8940 return true;
8941
8942 /* Perform some generic filtering which may allow making a decision
8943 even if the bounds are not constant. First, negative integers
8944 never fit in unsigned types, */
8945 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8946 return false;
8947
8948 /* Second, narrower types always fit in wider ones. */
8949 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8950 return true;
8951
8952 /* Third, unsigned integers with top bit set never fit signed types. */
8953 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8954 {
8955 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8956 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8957 {
8958 /* When a tree_cst is converted to a wide-int, the precision
8959 is taken from the type. However, if the precision of the
8960 mode underneath the type is smaller than that, it is
8961 possible that the value will not fit. The test below
8962 fails if any bit is set between the sign bit of the
8963 underlying mode and the top bit of the type. */
8964 if (wi::ne_p (wi::zext (c, prec - 1), c))
8965 return false;
8966 }
8967 else if (wi::neg_p (c))
8968 return false;
8969 }
8970
8971 /* If we haven't been able to decide at this point, there nothing more we
8972 can check ourselves here. Look at the base type if we have one and it
8973 has the same precision. */
8974 if (TREE_CODE (type) == INTEGER_TYPE
8975 && TREE_TYPE (type) != 0
8976 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8977 {
8978 type = TREE_TYPE (type);
8979 goto retry;
8980 }
8981
8982 /* Or to fits_to_tree_p, if nothing else. */
8983 return wi::fits_to_tree_p (c, type);
8984 }
8985
8986 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8987 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8988 represented (assuming two's-complement arithmetic) within the bit
8989 precision of the type are returned instead. */
8990
8991 void
8992 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8993 {
8994 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8995 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8996 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8997 else
8998 {
8999 if (TYPE_UNSIGNED (type))
9000 mpz_set_ui (min, 0);
9001 else
9002 {
9003 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9004 wi::to_mpz (mn, min, SIGNED);
9005 }
9006 }
9007
9008 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9009 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9010 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
9011 else
9012 {
9013 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9014 wi::to_mpz (mn, max, TYPE_SIGN (type));
9015 }
9016 }
9017
9018 /* Return true if VAR is an automatic variable defined in function FN. */
9019
9020 bool
9021 auto_var_in_fn_p (const_tree var, const_tree fn)
9022 {
9023 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9024 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
9025 || TREE_CODE (var) == PARM_DECL)
9026 && ! TREE_STATIC (var))
9027 || TREE_CODE (var) == LABEL_DECL
9028 || TREE_CODE (var) == RESULT_DECL));
9029 }
9030
9031 /* Subprogram of following function. Called by walk_tree.
9032
9033 Return *TP if it is an automatic variable or parameter of the
9034 function passed in as DATA. */
9035
9036 static tree
9037 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9038 {
9039 tree fn = (tree) data;
9040
9041 if (TYPE_P (*tp))
9042 *walk_subtrees = 0;
9043
9044 else if (DECL_P (*tp)
9045 && auto_var_in_fn_p (*tp, fn))
9046 return *tp;
9047
9048 return NULL_TREE;
9049 }
9050
9051 /* Returns true if T is, contains, or refers to a type with variable
9052 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9053 arguments, but not the return type. If FN is nonzero, only return
9054 true if a modifier of the type or position of FN is a variable or
9055 parameter inside FN.
9056
9057 This concept is more general than that of C99 'variably modified types':
9058 in C99, a struct type is never variably modified because a VLA may not
9059 appear as a structure member. However, in GNU C code like:
9060
9061 struct S { int i[f()]; };
9062
9063 is valid, and other languages may define similar constructs. */
9064
9065 bool
9066 variably_modified_type_p (tree type, tree fn)
9067 {
9068 tree t;
9069
9070 /* Test if T is either variable (if FN is zero) or an expression containing
9071 a variable in FN. If TYPE isn't gimplified, return true also if
9072 gimplify_one_sizepos would gimplify the expression into a local
9073 variable. */
9074 #define RETURN_TRUE_IF_VAR(T) \
9075 do { tree _t = (T); \
9076 if (_t != NULL_TREE \
9077 && _t != error_mark_node \
9078 && TREE_CODE (_t) != INTEGER_CST \
9079 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9080 && (!fn \
9081 || (!TYPE_SIZES_GIMPLIFIED (type) \
9082 && !is_gimple_sizepos (_t)) \
9083 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9084 return true; } while (0)
9085
9086 if (type == error_mark_node)
9087 return false;
9088
9089 /* If TYPE itself has variable size, it is variably modified. */
9090 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9091 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9092
9093 switch (TREE_CODE (type))
9094 {
9095 case POINTER_TYPE:
9096 case REFERENCE_TYPE:
9097 case VECTOR_TYPE:
9098 if (variably_modified_type_p (TREE_TYPE (type), fn))
9099 return true;
9100 break;
9101
9102 case FUNCTION_TYPE:
9103 case METHOD_TYPE:
9104 /* If TYPE is a function type, it is variably modified if the
9105 return type is variably modified. */
9106 if (variably_modified_type_p (TREE_TYPE (type), fn))
9107 return true;
9108 break;
9109
9110 case INTEGER_TYPE:
9111 case REAL_TYPE:
9112 case FIXED_POINT_TYPE:
9113 case ENUMERAL_TYPE:
9114 case BOOLEAN_TYPE:
9115 /* Scalar types are variably modified if their end points
9116 aren't constant. */
9117 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9118 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9119 break;
9120
9121 case RECORD_TYPE:
9122 case UNION_TYPE:
9123 case QUAL_UNION_TYPE:
9124 /* We can't see if any of the fields are variably-modified by the
9125 definition we normally use, since that would produce infinite
9126 recursion via pointers. */
9127 /* This is variably modified if some field's type is. */
9128 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9129 if (TREE_CODE (t) == FIELD_DECL)
9130 {
9131 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9132 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9133 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9134
9135 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9136 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9137 }
9138 break;
9139
9140 case ARRAY_TYPE:
9141 /* Do not call ourselves to avoid infinite recursion. This is
9142 variably modified if the element type is. */
9143 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9144 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9145 break;
9146
9147 default:
9148 break;
9149 }
9150
9151 /* The current language may have other cases to check, but in general,
9152 all other types are not variably modified. */
9153 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9154
9155 #undef RETURN_TRUE_IF_VAR
9156 }
9157
9158 /* Given a DECL or TYPE, return the scope in which it was declared, or
9159 NULL_TREE if there is no containing scope. */
9160
9161 tree
9162 get_containing_scope (const_tree t)
9163 {
9164 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9165 }
9166
9167 /* Return the innermost context enclosing DECL that is
9168 a FUNCTION_DECL, or zero if none. */
9169
9170 tree
9171 decl_function_context (const_tree decl)
9172 {
9173 tree context;
9174
9175 if (TREE_CODE (decl) == ERROR_MARK)
9176 return 0;
9177
9178 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9179 where we look up the function at runtime. Such functions always take
9180 a first argument of type 'pointer to real context'.
9181
9182 C++ should really be fixed to use DECL_CONTEXT for the real context,
9183 and use something else for the "virtual context". */
9184 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9185 context
9186 = TYPE_MAIN_VARIANT
9187 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9188 else
9189 context = DECL_CONTEXT (decl);
9190
9191 while (context && TREE_CODE (context) != FUNCTION_DECL)
9192 {
9193 if (TREE_CODE (context) == BLOCK)
9194 context = BLOCK_SUPERCONTEXT (context);
9195 else
9196 context = get_containing_scope (context);
9197 }
9198
9199 return context;
9200 }
9201
9202 /* Return the innermost context enclosing DECL that is
9203 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9204 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9205
9206 tree
9207 decl_type_context (const_tree decl)
9208 {
9209 tree context = DECL_CONTEXT (decl);
9210
9211 while (context)
9212 switch (TREE_CODE (context))
9213 {
9214 case NAMESPACE_DECL:
9215 case TRANSLATION_UNIT_DECL:
9216 return NULL_TREE;
9217
9218 case RECORD_TYPE:
9219 case UNION_TYPE:
9220 case QUAL_UNION_TYPE:
9221 return context;
9222
9223 case TYPE_DECL:
9224 case FUNCTION_DECL:
9225 context = DECL_CONTEXT (context);
9226 break;
9227
9228 case BLOCK:
9229 context = BLOCK_SUPERCONTEXT (context);
9230 break;
9231
9232 default:
9233 gcc_unreachable ();
9234 }
9235
9236 return NULL_TREE;
9237 }
9238
9239 /* CALL is a CALL_EXPR. Return the declaration for the function
9240 called, or NULL_TREE if the called function cannot be
9241 determined. */
9242
9243 tree
9244 get_callee_fndecl (const_tree call)
9245 {
9246 tree addr;
9247
9248 if (call == error_mark_node)
9249 return error_mark_node;
9250
9251 /* It's invalid to call this function with anything but a
9252 CALL_EXPR. */
9253 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9254
9255 /* The first operand to the CALL is the address of the function
9256 called. */
9257 addr = CALL_EXPR_FN (call);
9258
9259 /* If there is no function, return early. */
9260 if (addr == NULL_TREE)
9261 return NULL_TREE;
9262
9263 STRIP_NOPS (addr);
9264
9265 /* If this is a readonly function pointer, extract its initial value. */
9266 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9267 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9268 && DECL_INITIAL (addr))
9269 addr = DECL_INITIAL (addr);
9270
9271 /* If the address is just `&f' for some function `f', then we know
9272 that `f' is being called. */
9273 if (TREE_CODE (addr) == ADDR_EXPR
9274 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9275 return TREE_OPERAND (addr, 0);
9276
9277 /* We couldn't figure out what was being called. */
9278 return NULL_TREE;
9279 }
9280
9281 #define TREE_MEM_USAGE_SPACES 40
9282
9283 /* Print debugging information about tree nodes generated during the compile,
9284 and any language-specific information. */
9285
9286 void
9287 dump_tree_statistics (void)
9288 {
9289 if (GATHER_STATISTICS)
9290 {
9291 int i;
9292 int total_nodes, total_bytes;
9293 fprintf (stderr, "\nKind Nodes Bytes\n");
9294 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9295 total_nodes = total_bytes = 0;
9296 for (i = 0; i < (int) all_kinds; i++)
9297 {
9298 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9299 tree_node_counts[i], tree_node_sizes[i]);
9300 total_nodes += tree_node_counts[i];
9301 total_bytes += tree_node_sizes[i];
9302 }
9303 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9304 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9305 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9306 fprintf (stderr, "Code Nodes\n");
9307 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9308 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9309 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9310 tree_code_counts[i]);
9311 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9312 fprintf (stderr, "\n");
9313 ssanames_print_statistics ();
9314 fprintf (stderr, "\n");
9315 phinodes_print_statistics ();
9316 fprintf (stderr, "\n");
9317 }
9318 else
9319 fprintf (stderr, "(No per-node statistics)\n");
9320
9321 print_type_hash_statistics ();
9322 print_debug_expr_statistics ();
9323 print_value_expr_statistics ();
9324 lang_hooks.print_statistics ();
9325 }
9326 \f
9327 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9328
9329 /* Generate a crc32 of a byte. */
9330
9331 static unsigned
9332 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9333 {
9334 unsigned ix;
9335
9336 for (ix = bits; ix--; value <<= 1)
9337 {
9338 unsigned feedback;
9339
9340 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9341 chksum <<= 1;
9342 chksum ^= feedback;
9343 }
9344 return chksum;
9345 }
9346
9347 /* Generate a crc32 of a 32-bit unsigned. */
9348
9349 unsigned
9350 crc32_unsigned (unsigned chksum, unsigned value)
9351 {
9352 return crc32_unsigned_bits (chksum, value, 32);
9353 }
9354
9355 /* Generate a crc32 of a byte. */
9356
9357 unsigned
9358 crc32_byte (unsigned chksum, char byte)
9359 {
9360 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9361 }
9362
9363 /* Generate a crc32 of a string. */
9364
9365 unsigned
9366 crc32_string (unsigned chksum, const char *string)
9367 {
9368 do
9369 {
9370 chksum = crc32_byte (chksum, *string);
9371 }
9372 while (*string++);
9373 return chksum;
9374 }
9375
9376 /* P is a string that will be used in a symbol. Mask out any characters
9377 that are not valid in that context. */
9378
9379 void
9380 clean_symbol_name (char *p)
9381 {
9382 for (; *p; p++)
9383 if (! (ISALNUM (*p)
9384 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9385 || *p == '$'
9386 #endif
9387 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9388 || *p == '.'
9389 #endif
9390 ))
9391 *p = '_';
9392 }
9393
9394 /* For anonymous aggregate types, we need some sort of name to
9395 hold on to. In practice, this should not appear, but it should
9396 not be harmful if it does. */
9397 bool
9398 anon_aggrname_p(const_tree id_node)
9399 {
9400 #ifndef NO_DOT_IN_LABEL
9401 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9402 && IDENTIFIER_POINTER (id_node)[1] == '_');
9403 #else /* NO_DOT_IN_LABEL */
9404 #ifndef NO_DOLLAR_IN_LABEL
9405 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9406 && IDENTIFIER_POINTER (id_node)[1] == '_');
9407 #else /* NO_DOLLAR_IN_LABEL */
9408 #define ANON_AGGRNAME_PREFIX "__anon_"
9409 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9410 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9411 #endif /* NO_DOLLAR_IN_LABEL */
9412 #endif /* NO_DOT_IN_LABEL */
9413 }
9414
9415 /* Return a format for an anonymous aggregate name. */
9416 const char *
9417 anon_aggrname_format()
9418 {
9419 #ifndef NO_DOT_IN_LABEL
9420 return "._%d";
9421 #else /* NO_DOT_IN_LABEL */
9422 #ifndef NO_DOLLAR_IN_LABEL
9423 return "$_%d";
9424 #else /* NO_DOLLAR_IN_LABEL */
9425 return "__anon_%d";
9426 #endif /* NO_DOLLAR_IN_LABEL */
9427 #endif /* NO_DOT_IN_LABEL */
9428 }
9429
9430 /* Generate a name for a special-purpose function.
9431 The generated name may need to be unique across the whole link.
9432 Changes to this function may also require corresponding changes to
9433 xstrdup_mask_random.
9434 TYPE is some string to identify the purpose of this function to the
9435 linker or collect2; it must start with an uppercase letter,
9436 one of:
9437 I - for constructors
9438 D - for destructors
9439 N - for C++ anonymous namespaces
9440 F - for DWARF unwind frame information. */
9441
9442 tree
9443 get_file_function_name (const char *type)
9444 {
9445 char *buf;
9446 const char *p;
9447 char *q;
9448
9449 /* If we already have a name we know to be unique, just use that. */
9450 if (first_global_object_name)
9451 p = q = ASTRDUP (first_global_object_name);
9452 /* If the target is handling the constructors/destructors, they
9453 will be local to this file and the name is only necessary for
9454 debugging purposes.
9455 We also assign sub_I and sub_D sufixes to constructors called from
9456 the global static constructors. These are always local. */
9457 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9458 || (strncmp (type, "sub_", 4) == 0
9459 && (type[4] == 'I' || type[4] == 'D')))
9460 {
9461 const char *file = main_input_filename;
9462 if (! file)
9463 file = LOCATION_FILE (input_location);
9464 /* Just use the file's basename, because the full pathname
9465 might be quite long. */
9466 p = q = ASTRDUP (lbasename (file));
9467 }
9468 else
9469 {
9470 /* Otherwise, the name must be unique across the entire link.
9471 We don't have anything that we know to be unique to this translation
9472 unit, so use what we do have and throw in some randomness. */
9473 unsigned len;
9474 const char *name = weak_global_object_name;
9475 const char *file = main_input_filename;
9476
9477 if (! name)
9478 name = "";
9479 if (! file)
9480 file = LOCATION_FILE (input_location);
9481
9482 len = strlen (file);
9483 q = (char *) alloca (9 + 17 + len + 1);
9484 memcpy (q, file, len + 1);
9485
9486 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9487 crc32_string (0, name), get_random_seed (false));
9488
9489 p = q;
9490 }
9491
9492 clean_symbol_name (q);
9493 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9494 + strlen (type));
9495
9496 /* Set up the name of the file-level functions we may need.
9497 Use a global object (which is already required to be unique over
9498 the program) rather than the file name (which imposes extra
9499 constraints). */
9500 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9501
9502 return get_identifier (buf);
9503 }
9504 \f
9505 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9506
9507 /* Complain that the tree code of NODE does not match the expected 0
9508 terminated list of trailing codes. The trailing code list can be
9509 empty, for a more vague error message. FILE, LINE, and FUNCTION
9510 are of the caller. */
9511
9512 void
9513 tree_check_failed (const_tree node, const char *file,
9514 int line, const char *function, ...)
9515 {
9516 va_list args;
9517 const char *buffer;
9518 unsigned length = 0;
9519 enum tree_code code;
9520
9521 va_start (args, function);
9522 while ((code = (enum tree_code) va_arg (args, int)))
9523 length += 4 + strlen (get_tree_code_name (code));
9524 va_end (args);
9525 if (length)
9526 {
9527 char *tmp;
9528 va_start (args, function);
9529 length += strlen ("expected ");
9530 buffer = tmp = (char *) alloca (length);
9531 length = 0;
9532 while ((code = (enum tree_code) va_arg (args, int)))
9533 {
9534 const char *prefix = length ? " or " : "expected ";
9535
9536 strcpy (tmp + length, prefix);
9537 length += strlen (prefix);
9538 strcpy (tmp + length, get_tree_code_name (code));
9539 length += strlen (get_tree_code_name (code));
9540 }
9541 va_end (args);
9542 }
9543 else
9544 buffer = "unexpected node";
9545
9546 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9547 buffer, get_tree_code_name (TREE_CODE (node)),
9548 function, trim_filename (file), line);
9549 }
9550
9551 /* Complain that the tree code of NODE does match the expected 0
9552 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9553 the caller. */
9554
9555 void
9556 tree_not_check_failed (const_tree node, const char *file,
9557 int line, const char *function, ...)
9558 {
9559 va_list args;
9560 char *buffer;
9561 unsigned length = 0;
9562 enum tree_code code;
9563
9564 va_start (args, function);
9565 while ((code = (enum tree_code) va_arg (args, int)))
9566 length += 4 + strlen (get_tree_code_name (code));
9567 va_end (args);
9568 va_start (args, function);
9569 buffer = (char *) alloca (length);
9570 length = 0;
9571 while ((code = (enum tree_code) va_arg (args, int)))
9572 {
9573 if (length)
9574 {
9575 strcpy (buffer + length, " or ");
9576 length += 4;
9577 }
9578 strcpy (buffer + length, get_tree_code_name (code));
9579 length += strlen (get_tree_code_name (code));
9580 }
9581 va_end (args);
9582
9583 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9584 buffer, get_tree_code_name (TREE_CODE (node)),
9585 function, trim_filename (file), line);
9586 }
9587
9588 /* Similar to tree_check_failed, except that we check for a class of tree
9589 code, given in CL. */
9590
9591 void
9592 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9593 const char *file, int line, const char *function)
9594 {
9595 internal_error
9596 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9597 TREE_CODE_CLASS_STRING (cl),
9598 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9599 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9600 }
9601
9602 /* Similar to tree_check_failed, except that instead of specifying a
9603 dozen codes, use the knowledge that they're all sequential. */
9604
9605 void
9606 tree_range_check_failed (const_tree node, const char *file, int line,
9607 const char *function, enum tree_code c1,
9608 enum tree_code c2)
9609 {
9610 char *buffer;
9611 unsigned length = 0;
9612 unsigned int c;
9613
9614 for (c = c1; c <= c2; ++c)
9615 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9616
9617 length += strlen ("expected ");
9618 buffer = (char *) alloca (length);
9619 length = 0;
9620
9621 for (c = c1; c <= c2; ++c)
9622 {
9623 const char *prefix = length ? " or " : "expected ";
9624
9625 strcpy (buffer + length, prefix);
9626 length += strlen (prefix);
9627 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9628 length += strlen (get_tree_code_name ((enum tree_code) c));
9629 }
9630
9631 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9632 buffer, get_tree_code_name (TREE_CODE (node)),
9633 function, trim_filename (file), line);
9634 }
9635
9636
9637 /* Similar to tree_check_failed, except that we check that a tree does
9638 not have the specified code, given in CL. */
9639
9640 void
9641 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9642 const char *file, int line, const char *function)
9643 {
9644 internal_error
9645 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9646 TREE_CODE_CLASS_STRING (cl),
9647 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9648 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9649 }
9650
9651
9652 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9653
9654 void
9655 omp_clause_check_failed (const_tree node, const char *file, int line,
9656 const char *function, enum omp_clause_code code)
9657 {
9658 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9659 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9660 function, trim_filename (file), line);
9661 }
9662
9663
9664 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9665
9666 void
9667 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9668 const char *function, enum omp_clause_code c1,
9669 enum omp_clause_code c2)
9670 {
9671 char *buffer;
9672 unsigned length = 0;
9673 unsigned int c;
9674
9675 for (c = c1; c <= c2; ++c)
9676 length += 4 + strlen (omp_clause_code_name[c]);
9677
9678 length += strlen ("expected ");
9679 buffer = (char *) alloca (length);
9680 length = 0;
9681
9682 for (c = c1; c <= c2; ++c)
9683 {
9684 const char *prefix = length ? " or " : "expected ";
9685
9686 strcpy (buffer + length, prefix);
9687 length += strlen (prefix);
9688 strcpy (buffer + length, omp_clause_code_name[c]);
9689 length += strlen (omp_clause_code_name[c]);
9690 }
9691
9692 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9693 buffer, omp_clause_code_name[TREE_CODE (node)],
9694 function, trim_filename (file), line);
9695 }
9696
9697
9698 #undef DEFTREESTRUCT
9699 #define DEFTREESTRUCT(VAL, NAME) NAME,
9700
9701 static const char *ts_enum_names[] = {
9702 #include "treestruct.def"
9703 };
9704 #undef DEFTREESTRUCT
9705
9706 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9707
9708 /* Similar to tree_class_check_failed, except that we check for
9709 whether CODE contains the tree structure identified by EN. */
9710
9711 void
9712 tree_contains_struct_check_failed (const_tree node,
9713 const enum tree_node_structure_enum en,
9714 const char *file, int line,
9715 const char *function)
9716 {
9717 internal_error
9718 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9719 TS_ENUM_NAME (en),
9720 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9721 }
9722
9723
9724 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9725 (dynamically sized) vector. */
9726
9727 void
9728 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9729 const char *function)
9730 {
9731 internal_error
9732 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9733 idx + 1, len, function, trim_filename (file), line);
9734 }
9735
9736 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9737 (dynamically sized) vector. */
9738
9739 void
9740 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9741 const char *function)
9742 {
9743 internal_error
9744 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9745 idx + 1, len, function, trim_filename (file), line);
9746 }
9747
9748 /* Similar to above, except that the check is for the bounds of the operand
9749 vector of an expression node EXP. */
9750
9751 void
9752 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9753 int line, const char *function)
9754 {
9755 enum tree_code code = TREE_CODE (exp);
9756 internal_error
9757 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9758 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9759 function, trim_filename (file), line);
9760 }
9761
9762 /* Similar to above, except that the check is for the number of
9763 operands of an OMP_CLAUSE node. */
9764
9765 void
9766 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9767 int line, const char *function)
9768 {
9769 internal_error
9770 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9771 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9772 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9773 trim_filename (file), line);
9774 }
9775 #endif /* ENABLE_TREE_CHECKING */
9776 \f
9777 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9778 and mapped to the machine mode MODE. Initialize its fields and build
9779 the information necessary for debugging output. */
9780
9781 static tree
9782 make_vector_type (tree innertype, int nunits, machine_mode mode)
9783 {
9784 tree t;
9785 inchash::hash hstate;
9786
9787 t = make_node (VECTOR_TYPE);
9788 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9789 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9790 SET_TYPE_MODE (t, mode);
9791
9792 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9793 SET_TYPE_STRUCTURAL_EQUALITY (t);
9794 else if (TYPE_CANONICAL (innertype) != innertype
9795 || mode != VOIDmode)
9796 TYPE_CANONICAL (t)
9797 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9798
9799 layout_type (t);
9800
9801 hstate.add_wide_int (VECTOR_TYPE);
9802 hstate.add_wide_int (nunits);
9803 hstate.add_wide_int (mode);
9804 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9805 t = type_hash_canon (hstate.end (), t);
9806
9807 /* We have built a main variant, based on the main variant of the
9808 inner type. Use it to build the variant we return. */
9809 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9810 && TREE_TYPE (t) != innertype)
9811 return build_type_attribute_qual_variant (t,
9812 TYPE_ATTRIBUTES (innertype),
9813 TYPE_QUALS (innertype));
9814
9815 return t;
9816 }
9817
9818 static tree
9819 make_or_reuse_type (unsigned size, int unsignedp)
9820 {
9821 int i;
9822
9823 if (size == INT_TYPE_SIZE)
9824 return unsignedp ? unsigned_type_node : integer_type_node;
9825 if (size == CHAR_TYPE_SIZE)
9826 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9827 if (size == SHORT_TYPE_SIZE)
9828 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9829 if (size == LONG_TYPE_SIZE)
9830 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9831 if (size == LONG_LONG_TYPE_SIZE)
9832 return (unsignedp ? long_long_unsigned_type_node
9833 : long_long_integer_type_node);
9834
9835 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9836 if (size == int_n_data[i].bitsize
9837 && int_n_enabled_p[i])
9838 return (unsignedp ? int_n_trees[i].unsigned_type
9839 : int_n_trees[i].signed_type);
9840
9841 if (unsignedp)
9842 return make_unsigned_type (size);
9843 else
9844 return make_signed_type (size);
9845 }
9846
9847 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9848
9849 static tree
9850 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9851 {
9852 if (satp)
9853 {
9854 if (size == SHORT_FRACT_TYPE_SIZE)
9855 return unsignedp ? sat_unsigned_short_fract_type_node
9856 : sat_short_fract_type_node;
9857 if (size == FRACT_TYPE_SIZE)
9858 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9859 if (size == LONG_FRACT_TYPE_SIZE)
9860 return unsignedp ? sat_unsigned_long_fract_type_node
9861 : sat_long_fract_type_node;
9862 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9863 return unsignedp ? sat_unsigned_long_long_fract_type_node
9864 : sat_long_long_fract_type_node;
9865 }
9866 else
9867 {
9868 if (size == SHORT_FRACT_TYPE_SIZE)
9869 return unsignedp ? unsigned_short_fract_type_node
9870 : short_fract_type_node;
9871 if (size == FRACT_TYPE_SIZE)
9872 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9873 if (size == LONG_FRACT_TYPE_SIZE)
9874 return unsignedp ? unsigned_long_fract_type_node
9875 : long_fract_type_node;
9876 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9877 return unsignedp ? unsigned_long_long_fract_type_node
9878 : long_long_fract_type_node;
9879 }
9880
9881 return make_fract_type (size, unsignedp, satp);
9882 }
9883
9884 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9885
9886 static tree
9887 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9888 {
9889 if (satp)
9890 {
9891 if (size == SHORT_ACCUM_TYPE_SIZE)
9892 return unsignedp ? sat_unsigned_short_accum_type_node
9893 : sat_short_accum_type_node;
9894 if (size == ACCUM_TYPE_SIZE)
9895 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9896 if (size == LONG_ACCUM_TYPE_SIZE)
9897 return unsignedp ? sat_unsigned_long_accum_type_node
9898 : sat_long_accum_type_node;
9899 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9900 return unsignedp ? sat_unsigned_long_long_accum_type_node
9901 : sat_long_long_accum_type_node;
9902 }
9903 else
9904 {
9905 if (size == SHORT_ACCUM_TYPE_SIZE)
9906 return unsignedp ? unsigned_short_accum_type_node
9907 : short_accum_type_node;
9908 if (size == ACCUM_TYPE_SIZE)
9909 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9910 if (size == LONG_ACCUM_TYPE_SIZE)
9911 return unsignedp ? unsigned_long_accum_type_node
9912 : long_accum_type_node;
9913 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9914 return unsignedp ? unsigned_long_long_accum_type_node
9915 : long_long_accum_type_node;
9916 }
9917
9918 return make_accum_type (size, unsignedp, satp);
9919 }
9920
9921
9922 /* Create an atomic variant node for TYPE. This routine is called
9923 during initialization of data types to create the 5 basic atomic
9924 types. The generic build_variant_type function requires these to
9925 already be set up in order to function properly, so cannot be
9926 called from there. If ALIGN is non-zero, then ensure alignment is
9927 overridden to this value. */
9928
9929 static tree
9930 build_atomic_base (tree type, unsigned int align)
9931 {
9932 tree t;
9933
9934 /* Make sure its not already registered. */
9935 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9936 return t;
9937
9938 t = build_variant_type_copy (type);
9939 set_type_quals (t, TYPE_QUAL_ATOMIC);
9940
9941 if (align)
9942 TYPE_ALIGN (t) = align;
9943
9944 return t;
9945 }
9946
9947 /* Create nodes for all integer types (and error_mark_node) using the sizes
9948 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9949 SHORT_DOUBLE specifies whether double should be of the same precision
9950 as float. */
9951
9952 void
9953 build_common_tree_nodes (bool signed_char, bool short_double)
9954 {
9955 int i;
9956
9957 error_mark_node = make_node (ERROR_MARK);
9958 TREE_TYPE (error_mark_node) = error_mark_node;
9959
9960 initialize_sizetypes ();
9961
9962 /* Define both `signed char' and `unsigned char'. */
9963 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9964 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9965 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9966 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9967
9968 /* Define `char', which is like either `signed char' or `unsigned char'
9969 but not the same as either. */
9970 char_type_node
9971 = (signed_char
9972 ? make_signed_type (CHAR_TYPE_SIZE)
9973 : make_unsigned_type (CHAR_TYPE_SIZE));
9974 TYPE_STRING_FLAG (char_type_node) = 1;
9975
9976 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9977 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9978 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9979 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9980 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9981 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9982 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9983 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9984
9985 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9986 {
9987 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9988 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9989 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9990 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9991
9992 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9993 && int_n_enabled_p[i])
9994 {
9995 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9996 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9997 }
9998 }
9999
10000 /* Define a boolean type. This type only represents boolean values but
10001 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10002 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10003 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10004 TYPE_PRECISION (boolean_type_node) = 1;
10005 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10006
10007 /* Define what type to use for size_t. */
10008 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10009 size_type_node = unsigned_type_node;
10010 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10011 size_type_node = long_unsigned_type_node;
10012 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10013 size_type_node = long_long_unsigned_type_node;
10014 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10015 size_type_node = short_unsigned_type_node;
10016 else
10017 {
10018 int i;
10019
10020 size_type_node = NULL_TREE;
10021 for (i = 0; i < NUM_INT_N_ENTS; i++)
10022 if (int_n_enabled_p[i])
10023 {
10024 char name[50];
10025 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10026
10027 if (strcmp (name, SIZE_TYPE) == 0)
10028 {
10029 size_type_node = int_n_trees[i].unsigned_type;
10030 }
10031 }
10032 if (size_type_node == NULL_TREE)
10033 gcc_unreachable ();
10034 }
10035
10036 /* Fill in the rest of the sized types. Reuse existing type nodes
10037 when possible. */
10038 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10039 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10040 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10041 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10042 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10043
10044 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10045 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10046 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10047 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10048 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10049
10050 /* Don't call build_qualified type for atomics. That routine does
10051 special processing for atomics, and until they are initialized
10052 it's better not to make that call.
10053
10054 Check to see if there is a target override for atomic types. */
10055
10056 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10057 targetm.atomic_align_for_mode (QImode));
10058 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10059 targetm.atomic_align_for_mode (HImode));
10060 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10061 targetm.atomic_align_for_mode (SImode));
10062 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10063 targetm.atomic_align_for_mode (DImode));
10064 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10065 targetm.atomic_align_for_mode (TImode));
10066
10067 access_public_node = get_identifier ("public");
10068 access_protected_node = get_identifier ("protected");
10069 access_private_node = get_identifier ("private");
10070
10071 /* Define these next since types below may used them. */
10072 integer_zero_node = build_int_cst (integer_type_node, 0);
10073 integer_one_node = build_int_cst (integer_type_node, 1);
10074 integer_three_node = build_int_cst (integer_type_node, 3);
10075 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10076
10077 size_zero_node = size_int (0);
10078 size_one_node = size_int (1);
10079 bitsize_zero_node = bitsize_int (0);
10080 bitsize_one_node = bitsize_int (1);
10081 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10082
10083 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10084 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10085
10086 void_type_node = make_node (VOID_TYPE);
10087 layout_type (void_type_node);
10088
10089 pointer_bounds_type_node = targetm.chkp_bound_type ();
10090
10091 /* We are not going to have real types in C with less than byte alignment,
10092 so we might as well not have any types that claim to have it. */
10093 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
10094 TYPE_USER_ALIGN (void_type_node) = 0;
10095
10096 void_node = make_node (VOID_CST);
10097 TREE_TYPE (void_node) = void_type_node;
10098
10099 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10100 layout_type (TREE_TYPE (null_pointer_node));
10101
10102 ptr_type_node = build_pointer_type (void_type_node);
10103 const_ptr_type_node
10104 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10105 fileptr_type_node = ptr_type_node;
10106
10107 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10108
10109 float_type_node = make_node (REAL_TYPE);
10110 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10111 layout_type (float_type_node);
10112
10113 double_type_node = make_node (REAL_TYPE);
10114 if (short_double)
10115 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
10116 else
10117 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10118 layout_type (double_type_node);
10119
10120 long_double_type_node = make_node (REAL_TYPE);
10121 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10122 layout_type (long_double_type_node);
10123
10124 float_ptr_type_node = build_pointer_type (float_type_node);
10125 double_ptr_type_node = build_pointer_type (double_type_node);
10126 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10127 integer_ptr_type_node = build_pointer_type (integer_type_node);
10128
10129 /* Fixed size integer types. */
10130 uint16_type_node = make_or_reuse_type (16, 1);
10131 uint32_type_node = make_or_reuse_type (32, 1);
10132 uint64_type_node = make_or_reuse_type (64, 1);
10133
10134 /* Decimal float types. */
10135 dfloat32_type_node = make_node (REAL_TYPE);
10136 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10137 layout_type (dfloat32_type_node);
10138 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10139 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10140
10141 dfloat64_type_node = make_node (REAL_TYPE);
10142 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10143 layout_type (dfloat64_type_node);
10144 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10145 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10146
10147 dfloat128_type_node = make_node (REAL_TYPE);
10148 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10149 layout_type (dfloat128_type_node);
10150 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10151 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10152
10153 complex_integer_type_node = build_complex_type (integer_type_node);
10154 complex_float_type_node = build_complex_type (float_type_node);
10155 complex_double_type_node = build_complex_type (double_type_node);
10156 complex_long_double_type_node = build_complex_type (long_double_type_node);
10157
10158 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10159 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10160 sat_ ## KIND ## _type_node = \
10161 make_sat_signed_ ## KIND ## _type (SIZE); \
10162 sat_unsigned_ ## KIND ## _type_node = \
10163 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10164 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10165 unsigned_ ## KIND ## _type_node = \
10166 make_unsigned_ ## KIND ## _type (SIZE);
10167
10168 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10169 sat_ ## WIDTH ## KIND ## _type_node = \
10170 make_sat_signed_ ## KIND ## _type (SIZE); \
10171 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10172 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10173 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10174 unsigned_ ## WIDTH ## KIND ## _type_node = \
10175 make_unsigned_ ## KIND ## _type (SIZE);
10176
10177 /* Make fixed-point type nodes based on four different widths. */
10178 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10179 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10180 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10181 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10182 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10183
10184 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10185 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10186 NAME ## _type_node = \
10187 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10188 u ## NAME ## _type_node = \
10189 make_or_reuse_unsigned_ ## KIND ## _type \
10190 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10191 sat_ ## NAME ## _type_node = \
10192 make_or_reuse_sat_signed_ ## KIND ## _type \
10193 (GET_MODE_BITSIZE (MODE ## mode)); \
10194 sat_u ## NAME ## _type_node = \
10195 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10196 (GET_MODE_BITSIZE (U ## MODE ## mode));
10197
10198 /* Fixed-point type and mode nodes. */
10199 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10200 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10201 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10202 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10203 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10204 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10205 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10206 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10207 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10208 MAKE_FIXED_MODE_NODE (accum, da, DA)
10209 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10210
10211 {
10212 tree t = targetm.build_builtin_va_list ();
10213
10214 /* Many back-ends define record types without setting TYPE_NAME.
10215 If we copied the record type here, we'd keep the original
10216 record type without a name. This breaks name mangling. So,
10217 don't copy record types and let c_common_nodes_and_builtins()
10218 declare the type to be __builtin_va_list. */
10219 if (TREE_CODE (t) != RECORD_TYPE)
10220 t = build_variant_type_copy (t);
10221
10222 va_list_type_node = t;
10223 }
10224 }
10225
10226 /* Modify DECL for given flags.
10227 TM_PURE attribute is set only on types, so the function will modify
10228 DECL's type when ECF_TM_PURE is used. */
10229
10230 void
10231 set_call_expr_flags (tree decl, int flags)
10232 {
10233 if (flags & ECF_NOTHROW)
10234 TREE_NOTHROW (decl) = 1;
10235 if (flags & ECF_CONST)
10236 TREE_READONLY (decl) = 1;
10237 if (flags & ECF_PURE)
10238 DECL_PURE_P (decl) = 1;
10239 if (flags & ECF_LOOPING_CONST_OR_PURE)
10240 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10241 if (flags & ECF_NOVOPS)
10242 DECL_IS_NOVOPS (decl) = 1;
10243 if (flags & ECF_NORETURN)
10244 TREE_THIS_VOLATILE (decl) = 1;
10245 if (flags & ECF_MALLOC)
10246 DECL_IS_MALLOC (decl) = 1;
10247 if (flags & ECF_RETURNS_TWICE)
10248 DECL_IS_RETURNS_TWICE (decl) = 1;
10249 if (flags & ECF_LEAF)
10250 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10251 NULL, DECL_ATTRIBUTES (decl));
10252 if ((flags & ECF_TM_PURE) && flag_tm)
10253 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10254 /* Looping const or pure is implied by noreturn.
10255 There is currently no way to declare looping const or looping pure alone. */
10256 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10257 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10258 }
10259
10260
10261 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10262
10263 static void
10264 local_define_builtin (const char *name, tree type, enum built_in_function code,
10265 const char *library_name, int ecf_flags)
10266 {
10267 tree decl;
10268
10269 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10270 library_name, NULL_TREE);
10271 set_call_expr_flags (decl, ecf_flags);
10272
10273 set_builtin_decl (code, decl, true);
10274 }
10275
10276 /* Call this function after instantiating all builtins that the language
10277 front end cares about. This will build the rest of the builtins
10278 and internal functions that are relied upon by the tree optimizers and
10279 the middle-end. */
10280
10281 void
10282 build_common_builtin_nodes (void)
10283 {
10284 tree tmp, ftype;
10285 int ecf_flags;
10286
10287 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10288 {
10289 ftype = build_function_type (void_type_node, void_list_node);
10290 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10291 "__builtin_unreachable",
10292 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10293 | ECF_CONST);
10294 }
10295
10296 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10297 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10298 {
10299 ftype = build_function_type_list (ptr_type_node,
10300 ptr_type_node, const_ptr_type_node,
10301 size_type_node, NULL_TREE);
10302
10303 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10304 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10305 "memcpy", ECF_NOTHROW | ECF_LEAF);
10306 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10307 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10308 "memmove", ECF_NOTHROW | ECF_LEAF);
10309 }
10310
10311 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10312 {
10313 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10314 const_ptr_type_node, size_type_node,
10315 NULL_TREE);
10316 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10317 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10318 }
10319
10320 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10321 {
10322 ftype = build_function_type_list (ptr_type_node,
10323 ptr_type_node, integer_type_node,
10324 size_type_node, NULL_TREE);
10325 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10326 "memset", ECF_NOTHROW | ECF_LEAF);
10327 }
10328
10329 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10330 {
10331 ftype = build_function_type_list (ptr_type_node,
10332 size_type_node, NULL_TREE);
10333 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10334 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10335 }
10336
10337 ftype = build_function_type_list (ptr_type_node, size_type_node,
10338 size_type_node, NULL_TREE);
10339 local_define_builtin ("__builtin_alloca_with_align", ftype,
10340 BUILT_IN_ALLOCA_WITH_ALIGN,
10341 "__builtin_alloca_with_align",
10342 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10343
10344 /* If we're checking the stack, `alloca' can throw. */
10345 if (flag_stack_check)
10346 {
10347 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10348 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10349 }
10350
10351 ftype = build_function_type_list (void_type_node,
10352 ptr_type_node, ptr_type_node,
10353 ptr_type_node, NULL_TREE);
10354 local_define_builtin ("__builtin_init_trampoline", ftype,
10355 BUILT_IN_INIT_TRAMPOLINE,
10356 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10357 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10358 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10359 "__builtin_init_heap_trampoline",
10360 ECF_NOTHROW | ECF_LEAF);
10361
10362 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10363 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10364 BUILT_IN_ADJUST_TRAMPOLINE,
10365 "__builtin_adjust_trampoline",
10366 ECF_CONST | ECF_NOTHROW);
10367
10368 ftype = build_function_type_list (void_type_node,
10369 ptr_type_node, ptr_type_node, NULL_TREE);
10370 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10371 BUILT_IN_NONLOCAL_GOTO,
10372 "__builtin_nonlocal_goto",
10373 ECF_NORETURN | ECF_NOTHROW);
10374
10375 ftype = build_function_type_list (void_type_node,
10376 ptr_type_node, ptr_type_node, NULL_TREE);
10377 local_define_builtin ("__builtin_setjmp_setup", ftype,
10378 BUILT_IN_SETJMP_SETUP,
10379 "__builtin_setjmp_setup", ECF_NOTHROW);
10380
10381 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10382 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10383 BUILT_IN_SETJMP_RECEIVER,
10384 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10385
10386 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10387 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10388 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10389
10390 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10391 local_define_builtin ("__builtin_stack_restore", ftype,
10392 BUILT_IN_STACK_RESTORE,
10393 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10394
10395 /* If there's a possibility that we might use the ARM EABI, build the
10396 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10397 if (targetm.arm_eabi_unwinder)
10398 {
10399 ftype = build_function_type_list (void_type_node, NULL_TREE);
10400 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10401 BUILT_IN_CXA_END_CLEANUP,
10402 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10403 }
10404
10405 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10406 local_define_builtin ("__builtin_unwind_resume", ftype,
10407 BUILT_IN_UNWIND_RESUME,
10408 ((targetm_common.except_unwind_info (&global_options)
10409 == UI_SJLJ)
10410 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10411 ECF_NORETURN);
10412
10413 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10414 {
10415 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10416 NULL_TREE);
10417 local_define_builtin ("__builtin_return_address", ftype,
10418 BUILT_IN_RETURN_ADDRESS,
10419 "__builtin_return_address",
10420 ECF_NOTHROW);
10421 }
10422
10423 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10424 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10425 {
10426 ftype = build_function_type_list (void_type_node, ptr_type_node,
10427 ptr_type_node, NULL_TREE);
10428 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10429 local_define_builtin ("__cyg_profile_func_enter", ftype,
10430 BUILT_IN_PROFILE_FUNC_ENTER,
10431 "__cyg_profile_func_enter", 0);
10432 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10433 local_define_builtin ("__cyg_profile_func_exit", ftype,
10434 BUILT_IN_PROFILE_FUNC_EXIT,
10435 "__cyg_profile_func_exit", 0);
10436 }
10437
10438 /* The exception object and filter values from the runtime. The argument
10439 must be zero before exception lowering, i.e. from the front end. After
10440 exception lowering, it will be the region number for the exception
10441 landing pad. These functions are PURE instead of CONST to prevent
10442 them from being hoisted past the exception edge that will initialize
10443 its value in the landing pad. */
10444 ftype = build_function_type_list (ptr_type_node,
10445 integer_type_node, NULL_TREE);
10446 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10447 /* Only use TM_PURE if we have TM language support. */
10448 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10449 ecf_flags |= ECF_TM_PURE;
10450 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10451 "__builtin_eh_pointer", ecf_flags);
10452
10453 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10454 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10455 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10456 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10457
10458 ftype = build_function_type_list (void_type_node,
10459 integer_type_node, integer_type_node,
10460 NULL_TREE);
10461 local_define_builtin ("__builtin_eh_copy_values", ftype,
10462 BUILT_IN_EH_COPY_VALUES,
10463 "__builtin_eh_copy_values", ECF_NOTHROW);
10464
10465 /* Complex multiplication and division. These are handled as builtins
10466 rather than optabs because emit_library_call_value doesn't support
10467 complex. Further, we can do slightly better with folding these
10468 beasties if the real and complex parts of the arguments are separate. */
10469 {
10470 int mode;
10471
10472 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10473 {
10474 char mode_name_buf[4], *q;
10475 const char *p;
10476 enum built_in_function mcode, dcode;
10477 tree type, inner_type;
10478 const char *prefix = "__";
10479
10480 if (targetm.libfunc_gnu_prefix)
10481 prefix = "__gnu_";
10482
10483 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10484 if (type == NULL)
10485 continue;
10486 inner_type = TREE_TYPE (type);
10487
10488 ftype = build_function_type_list (type, inner_type, inner_type,
10489 inner_type, inner_type, NULL_TREE);
10490
10491 mcode = ((enum built_in_function)
10492 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10493 dcode = ((enum built_in_function)
10494 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10495
10496 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10497 *q = TOLOWER (*p);
10498 *q = '\0';
10499
10500 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10501 NULL);
10502 local_define_builtin (built_in_names[mcode], ftype, mcode,
10503 built_in_names[mcode],
10504 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10505
10506 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10507 NULL);
10508 local_define_builtin (built_in_names[dcode], ftype, dcode,
10509 built_in_names[dcode],
10510 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10511 }
10512 }
10513
10514 init_internal_fns ();
10515 }
10516
10517 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10518 better way.
10519
10520 If we requested a pointer to a vector, build up the pointers that
10521 we stripped off while looking for the inner type. Similarly for
10522 return values from functions.
10523
10524 The argument TYPE is the top of the chain, and BOTTOM is the
10525 new type which we will point to. */
10526
10527 tree
10528 reconstruct_complex_type (tree type, tree bottom)
10529 {
10530 tree inner, outer;
10531
10532 if (TREE_CODE (type) == POINTER_TYPE)
10533 {
10534 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10535 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10536 TYPE_REF_CAN_ALIAS_ALL (type));
10537 }
10538 else if (TREE_CODE (type) == REFERENCE_TYPE)
10539 {
10540 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10541 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10542 TYPE_REF_CAN_ALIAS_ALL (type));
10543 }
10544 else if (TREE_CODE (type) == ARRAY_TYPE)
10545 {
10546 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10547 outer = build_array_type (inner, TYPE_DOMAIN (type));
10548 }
10549 else if (TREE_CODE (type) == FUNCTION_TYPE)
10550 {
10551 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10552 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10553 }
10554 else if (TREE_CODE (type) == METHOD_TYPE)
10555 {
10556 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10557 /* The build_method_type_directly() routine prepends 'this' to argument list,
10558 so we must compensate by getting rid of it. */
10559 outer
10560 = build_method_type_directly
10561 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10562 inner,
10563 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10564 }
10565 else if (TREE_CODE (type) == OFFSET_TYPE)
10566 {
10567 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10568 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10569 }
10570 else
10571 return bottom;
10572
10573 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10574 TYPE_QUALS (type));
10575 }
10576
10577 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10578 the inner type. */
10579 tree
10580 build_vector_type_for_mode (tree innertype, machine_mode mode)
10581 {
10582 int nunits;
10583
10584 switch (GET_MODE_CLASS (mode))
10585 {
10586 case MODE_VECTOR_INT:
10587 case MODE_VECTOR_FLOAT:
10588 case MODE_VECTOR_FRACT:
10589 case MODE_VECTOR_UFRACT:
10590 case MODE_VECTOR_ACCUM:
10591 case MODE_VECTOR_UACCUM:
10592 nunits = GET_MODE_NUNITS (mode);
10593 break;
10594
10595 case MODE_INT:
10596 /* Check that there are no leftover bits. */
10597 gcc_assert (GET_MODE_BITSIZE (mode)
10598 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10599
10600 nunits = GET_MODE_BITSIZE (mode)
10601 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10602 break;
10603
10604 default:
10605 gcc_unreachable ();
10606 }
10607
10608 return make_vector_type (innertype, nunits, mode);
10609 }
10610
10611 /* Similarly, but takes the inner type and number of units, which must be
10612 a power of two. */
10613
10614 tree
10615 build_vector_type (tree innertype, int nunits)
10616 {
10617 return make_vector_type (innertype, nunits, VOIDmode);
10618 }
10619
10620 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10621
10622 tree
10623 build_opaque_vector_type (tree innertype, int nunits)
10624 {
10625 tree t = make_vector_type (innertype, nunits, VOIDmode);
10626 tree cand;
10627 /* We always build the non-opaque variant before the opaque one,
10628 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10629 cand = TYPE_NEXT_VARIANT (t);
10630 if (cand
10631 && TYPE_VECTOR_OPAQUE (cand)
10632 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10633 return cand;
10634 /* Othewise build a variant type and make sure to queue it after
10635 the non-opaque type. */
10636 cand = build_distinct_type_copy (t);
10637 TYPE_VECTOR_OPAQUE (cand) = true;
10638 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10639 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10640 TYPE_NEXT_VARIANT (t) = cand;
10641 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10642 return cand;
10643 }
10644
10645
10646 /* Given an initializer INIT, return TRUE if INIT is zero or some
10647 aggregate of zeros. Otherwise return FALSE. */
10648 bool
10649 initializer_zerop (const_tree init)
10650 {
10651 tree elt;
10652
10653 STRIP_NOPS (init);
10654
10655 switch (TREE_CODE (init))
10656 {
10657 case INTEGER_CST:
10658 return integer_zerop (init);
10659
10660 case REAL_CST:
10661 /* ??? Note that this is not correct for C4X float formats. There,
10662 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10663 negative exponent. */
10664 return real_zerop (init)
10665 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10666
10667 case FIXED_CST:
10668 return fixed_zerop (init);
10669
10670 case COMPLEX_CST:
10671 return integer_zerop (init)
10672 || (real_zerop (init)
10673 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10674 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10675
10676 case VECTOR_CST:
10677 {
10678 unsigned i;
10679 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10680 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10681 return false;
10682 return true;
10683 }
10684
10685 case CONSTRUCTOR:
10686 {
10687 unsigned HOST_WIDE_INT idx;
10688
10689 if (TREE_CLOBBER_P (init))
10690 return false;
10691 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10692 if (!initializer_zerop (elt))
10693 return false;
10694 return true;
10695 }
10696
10697 case STRING_CST:
10698 {
10699 int i;
10700
10701 /* We need to loop through all elements to handle cases like
10702 "\0" and "\0foobar". */
10703 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10704 if (TREE_STRING_POINTER (init)[i] != '\0')
10705 return false;
10706
10707 return true;
10708 }
10709
10710 default:
10711 return false;
10712 }
10713 }
10714
10715 /* Check if vector VEC consists of all the equal elements and
10716 that the number of elements corresponds to the type of VEC.
10717 The function returns first element of the vector
10718 or NULL_TREE if the vector is not uniform. */
10719 tree
10720 uniform_vector_p (const_tree vec)
10721 {
10722 tree first, t;
10723 unsigned i;
10724
10725 if (vec == NULL_TREE)
10726 return NULL_TREE;
10727
10728 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10729
10730 if (TREE_CODE (vec) == VECTOR_CST)
10731 {
10732 first = VECTOR_CST_ELT (vec, 0);
10733 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10734 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10735 return NULL_TREE;
10736
10737 return first;
10738 }
10739
10740 else if (TREE_CODE (vec) == CONSTRUCTOR)
10741 {
10742 first = error_mark_node;
10743
10744 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10745 {
10746 if (i == 0)
10747 {
10748 first = t;
10749 continue;
10750 }
10751 if (!operand_equal_p (first, t, 0))
10752 return NULL_TREE;
10753 }
10754 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10755 return NULL_TREE;
10756
10757 return first;
10758 }
10759
10760 return NULL_TREE;
10761 }
10762
10763 /* Build an empty statement at location LOC. */
10764
10765 tree
10766 build_empty_stmt (location_t loc)
10767 {
10768 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10769 SET_EXPR_LOCATION (t, loc);
10770 return t;
10771 }
10772
10773
10774 /* Build an OpenMP clause with code CODE. LOC is the location of the
10775 clause. */
10776
10777 tree
10778 build_omp_clause (location_t loc, enum omp_clause_code code)
10779 {
10780 tree t;
10781 int size, length;
10782
10783 length = omp_clause_num_ops[code];
10784 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10785
10786 record_node_allocation_statistics (OMP_CLAUSE, size);
10787
10788 t = (tree) ggc_internal_alloc (size);
10789 memset (t, 0, size);
10790 TREE_SET_CODE (t, OMP_CLAUSE);
10791 OMP_CLAUSE_SET_CODE (t, code);
10792 OMP_CLAUSE_LOCATION (t) = loc;
10793
10794 return t;
10795 }
10796
10797 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10798 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10799 Except for the CODE and operand count field, other storage for the
10800 object is initialized to zeros. */
10801
10802 tree
10803 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10804 {
10805 tree t;
10806 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10807
10808 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10809 gcc_assert (len >= 1);
10810
10811 record_node_allocation_statistics (code, length);
10812
10813 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10814
10815 TREE_SET_CODE (t, code);
10816
10817 /* Can't use TREE_OPERAND to store the length because if checking is
10818 enabled, it will try to check the length before we store it. :-P */
10819 t->exp.operands[0] = build_int_cst (sizetype, len);
10820
10821 return t;
10822 }
10823
10824 /* Helper function for build_call_* functions; build a CALL_EXPR with
10825 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10826 the argument slots. */
10827
10828 static tree
10829 build_call_1 (tree return_type, tree fn, int nargs)
10830 {
10831 tree t;
10832
10833 t = build_vl_exp (CALL_EXPR, nargs + 3);
10834 TREE_TYPE (t) = return_type;
10835 CALL_EXPR_FN (t) = fn;
10836 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10837
10838 return t;
10839 }
10840
10841 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10842 FN and a null static chain slot. NARGS is the number of call arguments
10843 which are specified as "..." arguments. */
10844
10845 tree
10846 build_call_nary (tree return_type, tree fn, int nargs, ...)
10847 {
10848 tree ret;
10849 va_list args;
10850 va_start (args, nargs);
10851 ret = build_call_valist (return_type, fn, nargs, args);
10852 va_end (args);
10853 return ret;
10854 }
10855
10856 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10857 FN and a null static chain slot. NARGS is the number of call arguments
10858 which are specified as a va_list ARGS. */
10859
10860 tree
10861 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10862 {
10863 tree t;
10864 int i;
10865
10866 t = build_call_1 (return_type, fn, nargs);
10867 for (i = 0; i < nargs; i++)
10868 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10869 process_call_operands (t);
10870 return t;
10871 }
10872
10873 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10874 FN and a null static chain slot. NARGS is the number of call arguments
10875 which are specified as a tree array ARGS. */
10876
10877 tree
10878 build_call_array_loc (location_t loc, tree return_type, tree fn,
10879 int nargs, const tree *args)
10880 {
10881 tree t;
10882 int i;
10883
10884 t = build_call_1 (return_type, fn, nargs);
10885 for (i = 0; i < nargs; i++)
10886 CALL_EXPR_ARG (t, i) = args[i];
10887 process_call_operands (t);
10888 SET_EXPR_LOCATION (t, loc);
10889 return t;
10890 }
10891
10892 /* Like build_call_array, but takes a vec. */
10893
10894 tree
10895 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10896 {
10897 tree ret, t;
10898 unsigned int ix;
10899
10900 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10901 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10902 CALL_EXPR_ARG (ret, ix) = t;
10903 process_call_operands (ret);
10904 return ret;
10905 }
10906
10907 /* Conveniently construct a function call expression. FNDECL names the
10908 function to be called and N arguments are passed in the array
10909 ARGARRAY. */
10910
10911 tree
10912 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10913 {
10914 tree fntype = TREE_TYPE (fndecl);
10915 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10916
10917 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10918 }
10919
10920 /* Conveniently construct a function call expression. FNDECL names the
10921 function to be called and the arguments are passed in the vector
10922 VEC. */
10923
10924 tree
10925 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10926 {
10927 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10928 vec_safe_address (vec));
10929 }
10930
10931
10932 /* Conveniently construct a function call expression. FNDECL names the
10933 function to be called, N is the number of arguments, and the "..."
10934 parameters are the argument expressions. */
10935
10936 tree
10937 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10938 {
10939 va_list ap;
10940 tree *argarray = XALLOCAVEC (tree, n);
10941 int i;
10942
10943 va_start (ap, n);
10944 for (i = 0; i < n; i++)
10945 argarray[i] = va_arg (ap, tree);
10946 va_end (ap);
10947 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10948 }
10949
10950 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10951 varargs macros aren't supported by all bootstrap compilers. */
10952
10953 tree
10954 build_call_expr (tree fndecl, int n, ...)
10955 {
10956 va_list ap;
10957 tree *argarray = XALLOCAVEC (tree, n);
10958 int i;
10959
10960 va_start (ap, n);
10961 for (i = 0; i < n; i++)
10962 argarray[i] = va_arg (ap, tree);
10963 va_end (ap);
10964 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10965 }
10966
10967 /* Build internal call expression. This is just like CALL_EXPR, except
10968 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10969 internal function. */
10970
10971 tree
10972 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10973 tree type, int n, ...)
10974 {
10975 va_list ap;
10976 int i;
10977
10978 tree fn = build_call_1 (type, NULL_TREE, n);
10979 va_start (ap, n);
10980 for (i = 0; i < n; i++)
10981 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10982 va_end (ap);
10983 SET_EXPR_LOCATION (fn, loc);
10984 CALL_EXPR_IFN (fn) = ifn;
10985 return fn;
10986 }
10987
10988 /* Create a new constant string literal and return a char* pointer to it.
10989 The STRING_CST value is the LEN characters at STR. */
10990 tree
10991 build_string_literal (int len, const char *str)
10992 {
10993 tree t, elem, index, type;
10994
10995 t = build_string (len, str);
10996 elem = build_type_variant (char_type_node, 1, 0);
10997 index = build_index_type (size_int (len - 1));
10998 type = build_array_type (elem, index);
10999 TREE_TYPE (t) = type;
11000 TREE_CONSTANT (t) = 1;
11001 TREE_READONLY (t) = 1;
11002 TREE_STATIC (t) = 1;
11003
11004 type = build_pointer_type (elem);
11005 t = build1 (ADDR_EXPR, type,
11006 build4 (ARRAY_REF, elem,
11007 t, integer_zero_node, NULL_TREE, NULL_TREE));
11008 return t;
11009 }
11010
11011
11012
11013 /* Return true if T (assumed to be a DECL) must be assigned a memory
11014 location. */
11015
11016 bool
11017 needs_to_live_in_memory (const_tree t)
11018 {
11019 return (TREE_ADDRESSABLE (t)
11020 || is_global_var (t)
11021 || (TREE_CODE (t) == RESULT_DECL
11022 && !DECL_BY_REFERENCE (t)
11023 && aggregate_value_p (t, current_function_decl)));
11024 }
11025
11026 /* Return value of a constant X and sign-extend it. */
11027
11028 HOST_WIDE_INT
11029 int_cst_value (const_tree x)
11030 {
11031 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11032 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11033
11034 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11035 gcc_assert (cst_and_fits_in_hwi (x));
11036
11037 if (bits < HOST_BITS_PER_WIDE_INT)
11038 {
11039 bool negative = ((val >> (bits - 1)) & 1) != 0;
11040 if (negative)
11041 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
11042 else
11043 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
11044 }
11045
11046 return val;
11047 }
11048
11049 /* If TYPE is an integral or pointer type, return an integer type with
11050 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11051 if TYPE is already an integer type of signedness UNSIGNEDP. */
11052
11053 tree
11054 signed_or_unsigned_type_for (int unsignedp, tree type)
11055 {
11056 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11057 return type;
11058
11059 if (TREE_CODE (type) == VECTOR_TYPE)
11060 {
11061 tree inner = TREE_TYPE (type);
11062 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11063 if (!inner2)
11064 return NULL_TREE;
11065 if (inner == inner2)
11066 return type;
11067 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11068 }
11069
11070 if (!INTEGRAL_TYPE_P (type)
11071 && !POINTER_TYPE_P (type)
11072 && TREE_CODE (type) != OFFSET_TYPE)
11073 return NULL_TREE;
11074
11075 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11076 }
11077
11078 /* If TYPE is an integral or pointer type, return an integer type with
11079 the same precision which is unsigned, or itself if TYPE is already an
11080 unsigned integer type. */
11081
11082 tree
11083 unsigned_type_for (tree type)
11084 {
11085 return signed_or_unsigned_type_for (1, type);
11086 }
11087
11088 /* If TYPE is an integral or pointer type, return an integer type with
11089 the same precision which is signed, or itself if TYPE is already a
11090 signed integer type. */
11091
11092 tree
11093 signed_type_for (tree type)
11094 {
11095 return signed_or_unsigned_type_for (0, type);
11096 }
11097
11098 /* If TYPE is a vector type, return a signed integer vector type with the
11099 same width and number of subparts. Otherwise return boolean_type_node. */
11100
11101 tree
11102 truth_type_for (tree type)
11103 {
11104 if (TREE_CODE (type) == VECTOR_TYPE)
11105 {
11106 tree elem = lang_hooks.types.type_for_size
11107 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
11108 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
11109 }
11110 else
11111 return boolean_type_node;
11112 }
11113
11114 /* Returns the largest value obtainable by casting something in INNER type to
11115 OUTER type. */
11116
11117 tree
11118 upper_bound_in_type (tree outer, tree inner)
11119 {
11120 unsigned int det = 0;
11121 unsigned oprec = TYPE_PRECISION (outer);
11122 unsigned iprec = TYPE_PRECISION (inner);
11123 unsigned prec;
11124
11125 /* Compute a unique number for every combination. */
11126 det |= (oprec > iprec) ? 4 : 0;
11127 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11128 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11129
11130 /* Determine the exponent to use. */
11131 switch (det)
11132 {
11133 case 0:
11134 case 1:
11135 /* oprec <= iprec, outer: signed, inner: don't care. */
11136 prec = oprec - 1;
11137 break;
11138 case 2:
11139 case 3:
11140 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11141 prec = oprec;
11142 break;
11143 case 4:
11144 /* oprec > iprec, outer: signed, inner: signed. */
11145 prec = iprec - 1;
11146 break;
11147 case 5:
11148 /* oprec > iprec, outer: signed, inner: unsigned. */
11149 prec = iprec;
11150 break;
11151 case 6:
11152 /* oprec > iprec, outer: unsigned, inner: signed. */
11153 prec = oprec;
11154 break;
11155 case 7:
11156 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11157 prec = iprec;
11158 break;
11159 default:
11160 gcc_unreachable ();
11161 }
11162
11163 return wide_int_to_tree (outer,
11164 wi::mask (prec, false, TYPE_PRECISION (outer)));
11165 }
11166
11167 /* Returns the smallest value obtainable by casting something in INNER type to
11168 OUTER type. */
11169
11170 tree
11171 lower_bound_in_type (tree outer, tree inner)
11172 {
11173 unsigned oprec = TYPE_PRECISION (outer);
11174 unsigned iprec = TYPE_PRECISION (inner);
11175
11176 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11177 and obtain 0. */
11178 if (TYPE_UNSIGNED (outer)
11179 /* If we are widening something of an unsigned type, OUTER type
11180 contains all values of INNER type. In particular, both INNER
11181 and OUTER types have zero in common. */
11182 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11183 return build_int_cst (outer, 0);
11184 else
11185 {
11186 /* If we are widening a signed type to another signed type, we
11187 want to obtain -2^^(iprec-1). If we are keeping the
11188 precision or narrowing to a signed type, we want to obtain
11189 -2^(oprec-1). */
11190 unsigned prec = oprec > iprec ? iprec : oprec;
11191 return wide_int_to_tree (outer,
11192 wi::mask (prec - 1, true,
11193 TYPE_PRECISION (outer)));
11194 }
11195 }
11196
11197 /* Return nonzero if two operands that are suitable for PHI nodes are
11198 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11199 SSA_NAME or invariant. Note that this is strictly an optimization.
11200 That is, callers of this function can directly call operand_equal_p
11201 and get the same result, only slower. */
11202
11203 int
11204 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11205 {
11206 if (arg0 == arg1)
11207 return 1;
11208 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11209 return 0;
11210 return operand_equal_p (arg0, arg1, 0);
11211 }
11212
11213 /* Returns number of zeros at the end of binary representation of X. */
11214
11215 tree
11216 num_ending_zeros (const_tree x)
11217 {
11218 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11219 }
11220
11221
11222 #define WALK_SUBTREE(NODE) \
11223 do \
11224 { \
11225 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11226 if (result) \
11227 return result; \
11228 } \
11229 while (0)
11230
11231 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11232 be walked whenever a type is seen in the tree. Rest of operands and return
11233 value are as for walk_tree. */
11234
11235 static tree
11236 walk_type_fields (tree type, walk_tree_fn func, void *data,
11237 hash_set<tree> *pset, walk_tree_lh lh)
11238 {
11239 tree result = NULL_TREE;
11240
11241 switch (TREE_CODE (type))
11242 {
11243 case POINTER_TYPE:
11244 case REFERENCE_TYPE:
11245 case VECTOR_TYPE:
11246 /* We have to worry about mutually recursive pointers. These can't
11247 be written in C. They can in Ada. It's pathological, but
11248 there's an ACATS test (c38102a) that checks it. Deal with this
11249 by checking if we're pointing to another pointer, that one
11250 points to another pointer, that one does too, and we have no htab.
11251 If so, get a hash table. We check three levels deep to avoid
11252 the cost of the hash table if we don't need one. */
11253 if (POINTER_TYPE_P (TREE_TYPE (type))
11254 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11255 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11256 && !pset)
11257 {
11258 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11259 func, data);
11260 if (result)
11261 return result;
11262
11263 break;
11264 }
11265
11266 /* ... fall through ... */
11267
11268 case COMPLEX_TYPE:
11269 WALK_SUBTREE (TREE_TYPE (type));
11270 break;
11271
11272 case METHOD_TYPE:
11273 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11274
11275 /* Fall through. */
11276
11277 case FUNCTION_TYPE:
11278 WALK_SUBTREE (TREE_TYPE (type));
11279 {
11280 tree arg;
11281
11282 /* We never want to walk into default arguments. */
11283 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11284 WALK_SUBTREE (TREE_VALUE (arg));
11285 }
11286 break;
11287
11288 case ARRAY_TYPE:
11289 /* Don't follow this nodes's type if a pointer for fear that
11290 we'll have infinite recursion. If we have a PSET, then we
11291 need not fear. */
11292 if (pset
11293 || (!POINTER_TYPE_P (TREE_TYPE (type))
11294 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11295 WALK_SUBTREE (TREE_TYPE (type));
11296 WALK_SUBTREE (TYPE_DOMAIN (type));
11297 break;
11298
11299 case OFFSET_TYPE:
11300 WALK_SUBTREE (TREE_TYPE (type));
11301 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11302 break;
11303
11304 default:
11305 break;
11306 }
11307
11308 return NULL_TREE;
11309 }
11310
11311 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11312 called with the DATA and the address of each sub-tree. If FUNC returns a
11313 non-NULL value, the traversal is stopped, and the value returned by FUNC
11314 is returned. If PSET is non-NULL it is used to record the nodes visited,
11315 and to avoid visiting a node more than once. */
11316
11317 tree
11318 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11319 hash_set<tree> *pset, walk_tree_lh lh)
11320 {
11321 enum tree_code code;
11322 int walk_subtrees;
11323 tree result;
11324
11325 #define WALK_SUBTREE_TAIL(NODE) \
11326 do \
11327 { \
11328 tp = & (NODE); \
11329 goto tail_recurse; \
11330 } \
11331 while (0)
11332
11333 tail_recurse:
11334 /* Skip empty subtrees. */
11335 if (!*tp)
11336 return NULL_TREE;
11337
11338 /* Don't walk the same tree twice, if the user has requested
11339 that we avoid doing so. */
11340 if (pset && pset->add (*tp))
11341 return NULL_TREE;
11342
11343 /* Call the function. */
11344 walk_subtrees = 1;
11345 result = (*func) (tp, &walk_subtrees, data);
11346
11347 /* If we found something, return it. */
11348 if (result)
11349 return result;
11350
11351 code = TREE_CODE (*tp);
11352
11353 /* Even if we didn't, FUNC may have decided that there was nothing
11354 interesting below this point in the tree. */
11355 if (!walk_subtrees)
11356 {
11357 /* But we still need to check our siblings. */
11358 if (code == TREE_LIST)
11359 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11360 else if (code == OMP_CLAUSE)
11361 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11362 else
11363 return NULL_TREE;
11364 }
11365
11366 if (lh)
11367 {
11368 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11369 if (result || !walk_subtrees)
11370 return result;
11371 }
11372
11373 switch (code)
11374 {
11375 case ERROR_MARK:
11376 case IDENTIFIER_NODE:
11377 case INTEGER_CST:
11378 case REAL_CST:
11379 case FIXED_CST:
11380 case VECTOR_CST:
11381 case STRING_CST:
11382 case BLOCK:
11383 case PLACEHOLDER_EXPR:
11384 case SSA_NAME:
11385 case FIELD_DECL:
11386 case RESULT_DECL:
11387 /* None of these have subtrees other than those already walked
11388 above. */
11389 break;
11390
11391 case TREE_LIST:
11392 WALK_SUBTREE (TREE_VALUE (*tp));
11393 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11394 break;
11395
11396 case TREE_VEC:
11397 {
11398 int len = TREE_VEC_LENGTH (*tp);
11399
11400 if (len == 0)
11401 break;
11402
11403 /* Walk all elements but the first. */
11404 while (--len)
11405 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11406
11407 /* Now walk the first one as a tail call. */
11408 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11409 }
11410
11411 case COMPLEX_CST:
11412 WALK_SUBTREE (TREE_REALPART (*tp));
11413 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11414
11415 case CONSTRUCTOR:
11416 {
11417 unsigned HOST_WIDE_INT idx;
11418 constructor_elt *ce;
11419
11420 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11421 idx++)
11422 WALK_SUBTREE (ce->value);
11423 }
11424 break;
11425
11426 case SAVE_EXPR:
11427 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11428
11429 case BIND_EXPR:
11430 {
11431 tree decl;
11432 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11433 {
11434 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11435 into declarations that are just mentioned, rather than
11436 declared; they don't really belong to this part of the tree.
11437 And, we can see cycles: the initializer for a declaration
11438 can refer to the declaration itself. */
11439 WALK_SUBTREE (DECL_INITIAL (decl));
11440 WALK_SUBTREE (DECL_SIZE (decl));
11441 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11442 }
11443 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11444 }
11445
11446 case STATEMENT_LIST:
11447 {
11448 tree_stmt_iterator i;
11449 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11450 WALK_SUBTREE (*tsi_stmt_ptr (i));
11451 }
11452 break;
11453
11454 case OMP_CLAUSE:
11455 switch (OMP_CLAUSE_CODE (*tp))
11456 {
11457 case OMP_CLAUSE_GANG:
11458 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11459 /* FALLTHRU */
11460
11461 case OMP_CLAUSE_DEVICE_RESIDENT:
11462 case OMP_CLAUSE_USE_DEVICE:
11463 case OMP_CLAUSE_ASYNC:
11464 case OMP_CLAUSE_WAIT:
11465 case OMP_CLAUSE_WORKER:
11466 case OMP_CLAUSE_VECTOR:
11467 case OMP_CLAUSE_NUM_GANGS:
11468 case OMP_CLAUSE_NUM_WORKERS:
11469 case OMP_CLAUSE_VECTOR_LENGTH:
11470 case OMP_CLAUSE_PRIVATE:
11471 case OMP_CLAUSE_SHARED:
11472 case OMP_CLAUSE_FIRSTPRIVATE:
11473 case OMP_CLAUSE_COPYIN:
11474 case OMP_CLAUSE_COPYPRIVATE:
11475 case OMP_CLAUSE_FINAL:
11476 case OMP_CLAUSE_IF:
11477 case OMP_CLAUSE_NUM_THREADS:
11478 case OMP_CLAUSE_SCHEDULE:
11479 case OMP_CLAUSE_UNIFORM:
11480 case OMP_CLAUSE_DEPEND:
11481 case OMP_CLAUSE_NUM_TEAMS:
11482 case OMP_CLAUSE_THREAD_LIMIT:
11483 case OMP_CLAUSE_DEVICE:
11484 case OMP_CLAUSE_DIST_SCHEDULE:
11485 case OMP_CLAUSE_SAFELEN:
11486 case OMP_CLAUSE_SIMDLEN:
11487 case OMP_CLAUSE_ORDERED:
11488 case OMP_CLAUSE_PRIORITY:
11489 case OMP_CLAUSE_GRAINSIZE:
11490 case OMP_CLAUSE_NUM_TASKS:
11491 case OMP_CLAUSE_HINT:
11492 case OMP_CLAUSE_TO_DECLARE:
11493 case OMP_CLAUSE_LINK:
11494 case OMP_CLAUSE_USE_DEVICE_PTR:
11495 case OMP_CLAUSE_IS_DEVICE_PTR:
11496 case OMP_CLAUSE__LOOPTEMP_:
11497 case OMP_CLAUSE__SIMDUID_:
11498 case OMP_CLAUSE__CILK_FOR_COUNT_:
11499 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11500 /* FALLTHRU */
11501
11502 case OMP_CLAUSE_INDEPENDENT:
11503 case OMP_CLAUSE_NOWAIT:
11504 case OMP_CLAUSE_DEFAULT:
11505 case OMP_CLAUSE_UNTIED:
11506 case OMP_CLAUSE_MERGEABLE:
11507 case OMP_CLAUSE_PROC_BIND:
11508 case OMP_CLAUSE_INBRANCH:
11509 case OMP_CLAUSE_NOTINBRANCH:
11510 case OMP_CLAUSE_FOR:
11511 case OMP_CLAUSE_PARALLEL:
11512 case OMP_CLAUSE_SECTIONS:
11513 case OMP_CLAUSE_TASKGROUP:
11514 case OMP_CLAUSE_NOGROUP:
11515 case OMP_CLAUSE_THREADS:
11516 case OMP_CLAUSE_SIMD:
11517 case OMP_CLAUSE_DEFAULTMAP:
11518 case OMP_CLAUSE_AUTO:
11519 case OMP_CLAUSE_SEQ:
11520 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11521
11522 case OMP_CLAUSE_LASTPRIVATE:
11523 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11524 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11525 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11526
11527 case OMP_CLAUSE_COLLAPSE:
11528 {
11529 int i;
11530 for (i = 0; i < 3; i++)
11531 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11532 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11533 }
11534
11535 case OMP_CLAUSE_LINEAR:
11536 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11537 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11538 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11539 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11540
11541 case OMP_CLAUSE_ALIGNED:
11542 case OMP_CLAUSE_FROM:
11543 case OMP_CLAUSE_TO:
11544 case OMP_CLAUSE_MAP:
11545 case OMP_CLAUSE__CACHE_:
11546 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11547 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11548 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11549
11550 case OMP_CLAUSE_REDUCTION:
11551 {
11552 int i;
11553 for (i = 0; i < 5; i++)
11554 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11555 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11556 }
11557
11558 default:
11559 gcc_unreachable ();
11560 }
11561 break;
11562
11563 case TARGET_EXPR:
11564 {
11565 int i, len;
11566
11567 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11568 But, we only want to walk once. */
11569 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11570 for (i = 0; i < len; ++i)
11571 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11572 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11573 }
11574
11575 case DECL_EXPR:
11576 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11577 defining. We only want to walk into these fields of a type in this
11578 case and not in the general case of a mere reference to the type.
11579
11580 The criterion is as follows: if the field can be an expression, it
11581 must be walked only here. This should be in keeping with the fields
11582 that are directly gimplified in gimplify_type_sizes in order for the
11583 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11584 variable-sized types.
11585
11586 Note that DECLs get walked as part of processing the BIND_EXPR. */
11587 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11588 {
11589 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11590 if (TREE_CODE (*type_p) == ERROR_MARK)
11591 return NULL_TREE;
11592
11593 /* Call the function for the type. See if it returns anything or
11594 doesn't want us to continue. If we are to continue, walk both
11595 the normal fields and those for the declaration case. */
11596 result = (*func) (type_p, &walk_subtrees, data);
11597 if (result || !walk_subtrees)
11598 return result;
11599
11600 /* But do not walk a pointed-to type since it may itself need to
11601 be walked in the declaration case if it isn't anonymous. */
11602 if (!POINTER_TYPE_P (*type_p))
11603 {
11604 result = walk_type_fields (*type_p, func, data, pset, lh);
11605 if (result)
11606 return result;
11607 }
11608
11609 /* If this is a record type, also walk the fields. */
11610 if (RECORD_OR_UNION_TYPE_P (*type_p))
11611 {
11612 tree field;
11613
11614 for (field = TYPE_FIELDS (*type_p); field;
11615 field = DECL_CHAIN (field))
11616 {
11617 /* We'd like to look at the type of the field, but we can
11618 easily get infinite recursion. So assume it's pointed
11619 to elsewhere in the tree. Also, ignore things that
11620 aren't fields. */
11621 if (TREE_CODE (field) != FIELD_DECL)
11622 continue;
11623
11624 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11625 WALK_SUBTREE (DECL_SIZE (field));
11626 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11627 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11628 WALK_SUBTREE (DECL_QUALIFIER (field));
11629 }
11630 }
11631
11632 /* Same for scalar types. */
11633 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11634 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11635 || TREE_CODE (*type_p) == INTEGER_TYPE
11636 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11637 || TREE_CODE (*type_p) == REAL_TYPE)
11638 {
11639 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11640 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11641 }
11642
11643 WALK_SUBTREE (TYPE_SIZE (*type_p));
11644 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11645 }
11646 /* FALLTHRU */
11647
11648 default:
11649 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11650 {
11651 int i, len;
11652
11653 /* Walk over all the sub-trees of this operand. */
11654 len = TREE_OPERAND_LENGTH (*tp);
11655
11656 /* Go through the subtrees. We need to do this in forward order so
11657 that the scope of a FOR_EXPR is handled properly. */
11658 if (len)
11659 {
11660 for (i = 0; i < len - 1; ++i)
11661 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11662 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11663 }
11664 }
11665 /* If this is a type, walk the needed fields in the type. */
11666 else if (TYPE_P (*tp))
11667 return walk_type_fields (*tp, func, data, pset, lh);
11668 break;
11669 }
11670
11671 /* We didn't find what we were looking for. */
11672 return NULL_TREE;
11673
11674 #undef WALK_SUBTREE_TAIL
11675 }
11676 #undef WALK_SUBTREE
11677
11678 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11679
11680 tree
11681 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11682 walk_tree_lh lh)
11683 {
11684 tree result;
11685
11686 hash_set<tree> pset;
11687 result = walk_tree_1 (tp, func, data, &pset, lh);
11688 return result;
11689 }
11690
11691
11692 tree
11693 tree_block (tree t)
11694 {
11695 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11696
11697 if (IS_EXPR_CODE_CLASS (c))
11698 return LOCATION_BLOCK (t->exp.locus);
11699 gcc_unreachable ();
11700 return NULL;
11701 }
11702
11703 void
11704 tree_set_block (tree t, tree b)
11705 {
11706 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11707
11708 if (IS_EXPR_CODE_CLASS (c))
11709 {
11710 if (b)
11711 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11712 else
11713 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11714 }
11715 else
11716 gcc_unreachable ();
11717 }
11718
11719 /* Create a nameless artificial label and put it in the current
11720 function context. The label has a location of LOC. Returns the
11721 newly created label. */
11722
11723 tree
11724 create_artificial_label (location_t loc)
11725 {
11726 tree lab = build_decl (loc,
11727 LABEL_DECL, NULL_TREE, void_type_node);
11728
11729 DECL_ARTIFICIAL (lab) = 1;
11730 DECL_IGNORED_P (lab) = 1;
11731 DECL_CONTEXT (lab) = current_function_decl;
11732 return lab;
11733 }
11734
11735 /* Given a tree, try to return a useful variable name that we can use
11736 to prefix a temporary that is being assigned the value of the tree.
11737 I.E. given <temp> = &A, return A. */
11738
11739 const char *
11740 get_name (tree t)
11741 {
11742 tree stripped_decl;
11743
11744 stripped_decl = t;
11745 STRIP_NOPS (stripped_decl);
11746 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11747 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11748 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11749 {
11750 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11751 if (!name)
11752 return NULL;
11753 return IDENTIFIER_POINTER (name);
11754 }
11755 else
11756 {
11757 switch (TREE_CODE (stripped_decl))
11758 {
11759 case ADDR_EXPR:
11760 return get_name (TREE_OPERAND (stripped_decl, 0));
11761 default:
11762 return NULL;
11763 }
11764 }
11765 }
11766
11767 /* Return true if TYPE has a variable argument list. */
11768
11769 bool
11770 stdarg_p (const_tree fntype)
11771 {
11772 function_args_iterator args_iter;
11773 tree n = NULL_TREE, t;
11774
11775 if (!fntype)
11776 return false;
11777
11778 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11779 {
11780 n = t;
11781 }
11782
11783 return n != NULL_TREE && n != void_type_node;
11784 }
11785
11786 /* Return true if TYPE has a prototype. */
11787
11788 bool
11789 prototype_p (const_tree fntype)
11790 {
11791 tree t;
11792
11793 gcc_assert (fntype != NULL_TREE);
11794
11795 t = TYPE_ARG_TYPES (fntype);
11796 return (t != NULL_TREE);
11797 }
11798
11799 /* If BLOCK is inlined from an __attribute__((__artificial__))
11800 routine, return pointer to location from where it has been
11801 called. */
11802 location_t *
11803 block_nonartificial_location (tree block)
11804 {
11805 location_t *ret = NULL;
11806
11807 while (block && TREE_CODE (block) == BLOCK
11808 && BLOCK_ABSTRACT_ORIGIN (block))
11809 {
11810 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11811
11812 while (TREE_CODE (ao) == BLOCK
11813 && BLOCK_ABSTRACT_ORIGIN (ao)
11814 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11815 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11816
11817 if (TREE_CODE (ao) == FUNCTION_DECL)
11818 {
11819 /* If AO is an artificial inline, point RET to the
11820 call site locus at which it has been inlined and continue
11821 the loop, in case AO's caller is also an artificial
11822 inline. */
11823 if (DECL_DECLARED_INLINE_P (ao)
11824 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11825 ret = &BLOCK_SOURCE_LOCATION (block);
11826 else
11827 break;
11828 }
11829 else if (TREE_CODE (ao) != BLOCK)
11830 break;
11831
11832 block = BLOCK_SUPERCONTEXT (block);
11833 }
11834 return ret;
11835 }
11836
11837
11838 /* If EXP is inlined from an __attribute__((__artificial__))
11839 function, return the location of the original call expression. */
11840
11841 location_t
11842 tree_nonartificial_location (tree exp)
11843 {
11844 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11845
11846 if (loc)
11847 return *loc;
11848 else
11849 return EXPR_LOCATION (exp);
11850 }
11851
11852
11853 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11854 nodes. */
11855
11856 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11857
11858 hashval_t
11859 cl_option_hasher::hash (tree x)
11860 {
11861 const_tree const t = x;
11862 const char *p;
11863 size_t i;
11864 size_t len = 0;
11865 hashval_t hash = 0;
11866
11867 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11868 {
11869 p = (const char *)TREE_OPTIMIZATION (t);
11870 len = sizeof (struct cl_optimization);
11871 }
11872
11873 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11874 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11875
11876 else
11877 gcc_unreachable ();
11878
11879 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11880 something else. */
11881 for (i = 0; i < len; i++)
11882 if (p[i])
11883 hash = (hash << 4) ^ ((i << 2) | p[i]);
11884
11885 return hash;
11886 }
11887
11888 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11889 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11890 same. */
11891
11892 bool
11893 cl_option_hasher::equal (tree x, tree y)
11894 {
11895 const_tree const xt = x;
11896 const_tree const yt = y;
11897 const char *xp;
11898 const char *yp;
11899 size_t len;
11900
11901 if (TREE_CODE (xt) != TREE_CODE (yt))
11902 return 0;
11903
11904 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11905 {
11906 xp = (const char *)TREE_OPTIMIZATION (xt);
11907 yp = (const char *)TREE_OPTIMIZATION (yt);
11908 len = sizeof (struct cl_optimization);
11909 }
11910
11911 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11912 {
11913 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11914 TREE_TARGET_OPTION (yt));
11915 }
11916
11917 else
11918 gcc_unreachable ();
11919
11920 return (memcmp (xp, yp, len) == 0);
11921 }
11922
11923 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11924
11925 tree
11926 build_optimization_node (struct gcc_options *opts)
11927 {
11928 tree t;
11929
11930 /* Use the cache of optimization nodes. */
11931
11932 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11933 opts);
11934
11935 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11936 t = *slot;
11937 if (!t)
11938 {
11939 /* Insert this one into the hash table. */
11940 t = cl_optimization_node;
11941 *slot = t;
11942
11943 /* Make a new node for next time round. */
11944 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11945 }
11946
11947 return t;
11948 }
11949
11950 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11951
11952 tree
11953 build_target_option_node (struct gcc_options *opts)
11954 {
11955 tree t;
11956
11957 /* Use the cache of optimization nodes. */
11958
11959 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11960 opts);
11961
11962 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11963 t = *slot;
11964 if (!t)
11965 {
11966 /* Insert this one into the hash table. */
11967 t = cl_target_option_node;
11968 *slot = t;
11969
11970 /* Make a new node for next time round. */
11971 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11972 }
11973
11974 return t;
11975 }
11976
11977 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11978 so that they aren't saved during PCH writing. */
11979
11980 void
11981 prepare_target_option_nodes_for_pch (void)
11982 {
11983 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11984 for (; iter != cl_option_hash_table->end (); ++iter)
11985 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11986 TREE_TARGET_GLOBALS (*iter) = NULL;
11987 }
11988
11989 /* Determine the "ultimate origin" of a block. The block may be an inlined
11990 instance of an inlined instance of a block which is local to an inline
11991 function, so we have to trace all of the way back through the origin chain
11992 to find out what sort of node actually served as the original seed for the
11993 given block. */
11994
11995 tree
11996 block_ultimate_origin (const_tree block)
11997 {
11998 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11999
12000 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12001 we're trying to output the abstract instance of this function. */
12002 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12003 return NULL_TREE;
12004
12005 if (immediate_origin == NULL_TREE)
12006 return NULL_TREE;
12007 else
12008 {
12009 tree ret_val;
12010 tree lookahead = immediate_origin;
12011
12012 do
12013 {
12014 ret_val = lookahead;
12015 lookahead = (TREE_CODE (ret_val) == BLOCK
12016 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12017 }
12018 while (lookahead != NULL && lookahead != ret_val);
12019
12020 /* The block's abstract origin chain may not be the *ultimate* origin of
12021 the block. It could lead to a DECL that has an abstract origin set.
12022 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12023 will give us if it has one). Note that DECL's abstract origins are
12024 supposed to be the most distant ancestor (or so decl_ultimate_origin
12025 claims), so we don't need to loop following the DECL origins. */
12026 if (DECL_P (ret_val))
12027 return DECL_ORIGIN (ret_val);
12028
12029 return ret_val;
12030 }
12031 }
12032
12033 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12034 no instruction. */
12035
12036 bool
12037 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12038 {
12039 /* Use precision rather then machine mode when we can, which gives
12040 the correct answer even for submode (bit-field) types. */
12041 if ((INTEGRAL_TYPE_P (outer_type)
12042 || POINTER_TYPE_P (outer_type)
12043 || TREE_CODE (outer_type) == OFFSET_TYPE)
12044 && (INTEGRAL_TYPE_P (inner_type)
12045 || POINTER_TYPE_P (inner_type)
12046 || TREE_CODE (inner_type) == OFFSET_TYPE))
12047 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12048
12049 /* Otherwise fall back on comparing machine modes (e.g. for
12050 aggregate types, floats). */
12051 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12052 }
12053
12054 /* Return true iff conversion in EXP generates no instruction. Mark
12055 it inline so that we fully inline into the stripping functions even
12056 though we have two uses of this function. */
12057
12058 static inline bool
12059 tree_nop_conversion (const_tree exp)
12060 {
12061 tree outer_type, inner_type;
12062
12063 if (!CONVERT_EXPR_P (exp)
12064 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12065 return false;
12066 if (TREE_OPERAND (exp, 0) == error_mark_node)
12067 return false;
12068
12069 outer_type = TREE_TYPE (exp);
12070 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12071
12072 if (!inner_type)
12073 return false;
12074
12075 return tree_nop_conversion_p (outer_type, inner_type);
12076 }
12077
12078 /* Return true iff conversion in EXP generates no instruction. Don't
12079 consider conversions changing the signedness. */
12080
12081 static bool
12082 tree_sign_nop_conversion (const_tree exp)
12083 {
12084 tree outer_type, inner_type;
12085
12086 if (!tree_nop_conversion (exp))
12087 return false;
12088
12089 outer_type = TREE_TYPE (exp);
12090 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12091
12092 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12093 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12094 }
12095
12096 /* Strip conversions from EXP according to tree_nop_conversion and
12097 return the resulting expression. */
12098
12099 tree
12100 tree_strip_nop_conversions (tree exp)
12101 {
12102 while (tree_nop_conversion (exp))
12103 exp = TREE_OPERAND (exp, 0);
12104 return exp;
12105 }
12106
12107 /* Strip conversions from EXP according to tree_sign_nop_conversion
12108 and return the resulting expression. */
12109
12110 tree
12111 tree_strip_sign_nop_conversions (tree exp)
12112 {
12113 while (tree_sign_nop_conversion (exp))
12114 exp = TREE_OPERAND (exp, 0);
12115 return exp;
12116 }
12117
12118 /* Avoid any floating point extensions from EXP. */
12119 tree
12120 strip_float_extensions (tree exp)
12121 {
12122 tree sub, expt, subt;
12123
12124 /* For floating point constant look up the narrowest type that can hold
12125 it properly and handle it like (type)(narrowest_type)constant.
12126 This way we can optimize for instance a=a*2.0 where "a" is float
12127 but 2.0 is double constant. */
12128 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12129 {
12130 REAL_VALUE_TYPE orig;
12131 tree type = NULL;
12132
12133 orig = TREE_REAL_CST (exp);
12134 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12135 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12136 type = float_type_node;
12137 else if (TYPE_PRECISION (TREE_TYPE (exp))
12138 > TYPE_PRECISION (double_type_node)
12139 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12140 type = double_type_node;
12141 if (type)
12142 return build_real_truncate (type, orig);
12143 }
12144
12145 if (!CONVERT_EXPR_P (exp))
12146 return exp;
12147
12148 sub = TREE_OPERAND (exp, 0);
12149 subt = TREE_TYPE (sub);
12150 expt = TREE_TYPE (exp);
12151
12152 if (!FLOAT_TYPE_P (subt))
12153 return exp;
12154
12155 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12156 return exp;
12157
12158 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12159 return exp;
12160
12161 return strip_float_extensions (sub);
12162 }
12163
12164 /* Strip out all handled components that produce invariant
12165 offsets. */
12166
12167 const_tree
12168 strip_invariant_refs (const_tree op)
12169 {
12170 while (handled_component_p (op))
12171 {
12172 switch (TREE_CODE (op))
12173 {
12174 case ARRAY_REF:
12175 case ARRAY_RANGE_REF:
12176 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12177 || TREE_OPERAND (op, 2) != NULL_TREE
12178 || TREE_OPERAND (op, 3) != NULL_TREE)
12179 return NULL;
12180 break;
12181
12182 case COMPONENT_REF:
12183 if (TREE_OPERAND (op, 2) != NULL_TREE)
12184 return NULL;
12185 break;
12186
12187 default:;
12188 }
12189 op = TREE_OPERAND (op, 0);
12190 }
12191
12192 return op;
12193 }
12194
12195 static GTY(()) tree gcc_eh_personality_decl;
12196
12197 /* Return the GCC personality function decl. */
12198
12199 tree
12200 lhd_gcc_personality (void)
12201 {
12202 if (!gcc_eh_personality_decl)
12203 gcc_eh_personality_decl = build_personality_function ("gcc");
12204 return gcc_eh_personality_decl;
12205 }
12206
12207 /* TARGET is a call target of GIMPLE call statement
12208 (obtained by gimple_call_fn). Return true if it is
12209 OBJ_TYPE_REF representing an virtual call of C++ method.
12210 (As opposed to OBJ_TYPE_REF representing objc calls
12211 through a cast where middle-end devirtualization machinery
12212 can't apply.) */
12213
12214 bool
12215 virtual_method_call_p (const_tree target)
12216 {
12217 if (TREE_CODE (target) != OBJ_TYPE_REF)
12218 return false;
12219 tree t = TREE_TYPE (target);
12220 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12221 t = TREE_TYPE (t);
12222 if (TREE_CODE (t) == FUNCTION_TYPE)
12223 return false;
12224 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12225 /* If we do not have BINFO associated, it means that type was built
12226 without devirtualization enabled. Do not consider this a virtual
12227 call. */
12228 if (!TYPE_BINFO (obj_type_ref_class (target)))
12229 return false;
12230 return true;
12231 }
12232
12233 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12234
12235 tree
12236 obj_type_ref_class (const_tree ref)
12237 {
12238 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12239 ref = TREE_TYPE (ref);
12240 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12241 ref = TREE_TYPE (ref);
12242 /* We look for type THIS points to. ObjC also builds
12243 OBJ_TYPE_REF with non-method calls, Their first parameter
12244 ID however also corresponds to class type. */
12245 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12246 || TREE_CODE (ref) == FUNCTION_TYPE);
12247 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12248 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12249 return TREE_TYPE (ref);
12250 }
12251
12252 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12253
12254 static tree
12255 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12256 {
12257 unsigned int i;
12258 tree base_binfo, b;
12259
12260 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12261 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12262 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12263 return base_binfo;
12264 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12265 return b;
12266 return NULL;
12267 }
12268
12269 /* Try to find a base info of BINFO that would have its field decl at offset
12270 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12271 found, return, otherwise return NULL_TREE. */
12272
12273 tree
12274 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12275 {
12276 tree type = BINFO_TYPE (binfo);
12277
12278 while (true)
12279 {
12280 HOST_WIDE_INT pos, size;
12281 tree fld;
12282 int i;
12283
12284 if (types_same_for_odr (type, expected_type))
12285 return binfo;
12286 if (offset < 0)
12287 return NULL_TREE;
12288
12289 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12290 {
12291 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12292 continue;
12293
12294 pos = int_bit_position (fld);
12295 size = tree_to_uhwi (DECL_SIZE (fld));
12296 if (pos <= offset && (pos + size) > offset)
12297 break;
12298 }
12299 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12300 return NULL_TREE;
12301
12302 /* Offset 0 indicates the primary base, whose vtable contents are
12303 represented in the binfo for the derived class. */
12304 else if (offset != 0)
12305 {
12306 tree found_binfo = NULL, base_binfo;
12307 /* Offsets in BINFO are in bytes relative to the whole structure
12308 while POS is in bits relative to the containing field. */
12309 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12310 / BITS_PER_UNIT);
12311
12312 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12313 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12314 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12315 {
12316 found_binfo = base_binfo;
12317 break;
12318 }
12319 if (found_binfo)
12320 binfo = found_binfo;
12321 else
12322 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12323 binfo_offset);
12324 }
12325
12326 type = TREE_TYPE (fld);
12327 offset -= pos;
12328 }
12329 }
12330
12331 /* Returns true if X is a typedef decl. */
12332
12333 bool
12334 is_typedef_decl (const_tree x)
12335 {
12336 return (x && TREE_CODE (x) == TYPE_DECL
12337 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12338 }
12339
12340 /* Returns true iff TYPE is a type variant created for a typedef. */
12341
12342 bool
12343 typedef_variant_p (const_tree type)
12344 {
12345 return is_typedef_decl (TYPE_NAME (type));
12346 }
12347
12348 /* Warn about a use of an identifier which was marked deprecated. */
12349 void
12350 warn_deprecated_use (tree node, tree attr)
12351 {
12352 const char *msg;
12353
12354 if (node == 0 || !warn_deprecated_decl)
12355 return;
12356
12357 if (!attr)
12358 {
12359 if (DECL_P (node))
12360 attr = DECL_ATTRIBUTES (node);
12361 else if (TYPE_P (node))
12362 {
12363 tree decl = TYPE_STUB_DECL (node);
12364 if (decl)
12365 attr = lookup_attribute ("deprecated",
12366 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12367 }
12368 }
12369
12370 if (attr)
12371 attr = lookup_attribute ("deprecated", attr);
12372
12373 if (attr)
12374 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12375 else
12376 msg = NULL;
12377
12378 bool w;
12379 if (DECL_P (node))
12380 {
12381 if (msg)
12382 w = warning (OPT_Wdeprecated_declarations,
12383 "%qD is deprecated: %s", node, msg);
12384 else
12385 w = warning (OPT_Wdeprecated_declarations,
12386 "%qD is deprecated", node);
12387 if (w)
12388 inform (DECL_SOURCE_LOCATION (node), "declared here");
12389 }
12390 else if (TYPE_P (node))
12391 {
12392 tree what = NULL_TREE;
12393 tree decl = TYPE_STUB_DECL (node);
12394
12395 if (TYPE_NAME (node))
12396 {
12397 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12398 what = TYPE_NAME (node);
12399 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12400 && DECL_NAME (TYPE_NAME (node)))
12401 what = DECL_NAME (TYPE_NAME (node));
12402 }
12403
12404 if (decl)
12405 {
12406 if (what)
12407 {
12408 if (msg)
12409 w = warning (OPT_Wdeprecated_declarations,
12410 "%qE is deprecated: %s", what, msg);
12411 else
12412 w = warning (OPT_Wdeprecated_declarations,
12413 "%qE is deprecated", what);
12414 }
12415 else
12416 {
12417 if (msg)
12418 w = warning (OPT_Wdeprecated_declarations,
12419 "type is deprecated: %s", msg);
12420 else
12421 w = warning (OPT_Wdeprecated_declarations,
12422 "type is deprecated");
12423 }
12424 if (w)
12425 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12426 }
12427 else
12428 {
12429 if (what)
12430 {
12431 if (msg)
12432 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12433 what, msg);
12434 else
12435 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12436 }
12437 else
12438 {
12439 if (msg)
12440 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12441 msg);
12442 else
12443 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12444 }
12445 }
12446 }
12447 }
12448
12449 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12450 somewhere in it. */
12451
12452 bool
12453 contains_bitfld_component_ref_p (const_tree ref)
12454 {
12455 while (handled_component_p (ref))
12456 {
12457 if (TREE_CODE (ref) == COMPONENT_REF
12458 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12459 return true;
12460 ref = TREE_OPERAND (ref, 0);
12461 }
12462
12463 return false;
12464 }
12465
12466 /* Try to determine whether a TRY_CATCH expression can fall through.
12467 This is a subroutine of block_may_fallthru. */
12468
12469 static bool
12470 try_catch_may_fallthru (const_tree stmt)
12471 {
12472 tree_stmt_iterator i;
12473
12474 /* If the TRY block can fall through, the whole TRY_CATCH can
12475 fall through. */
12476 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12477 return true;
12478
12479 i = tsi_start (TREE_OPERAND (stmt, 1));
12480 switch (TREE_CODE (tsi_stmt (i)))
12481 {
12482 case CATCH_EXPR:
12483 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12484 catch expression and a body. The whole TRY_CATCH may fall
12485 through iff any of the catch bodies falls through. */
12486 for (; !tsi_end_p (i); tsi_next (&i))
12487 {
12488 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12489 return true;
12490 }
12491 return false;
12492
12493 case EH_FILTER_EXPR:
12494 /* The exception filter expression only matters if there is an
12495 exception. If the exception does not match EH_FILTER_TYPES,
12496 we will execute EH_FILTER_FAILURE, and we will fall through
12497 if that falls through. If the exception does match
12498 EH_FILTER_TYPES, the stack unwinder will continue up the
12499 stack, so we will not fall through. We don't know whether we
12500 will throw an exception which matches EH_FILTER_TYPES or not,
12501 so we just ignore EH_FILTER_TYPES and assume that we might
12502 throw an exception which doesn't match. */
12503 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12504
12505 default:
12506 /* This case represents statements to be executed when an
12507 exception occurs. Those statements are implicitly followed
12508 by a RESX statement to resume execution after the exception.
12509 So in this case the TRY_CATCH never falls through. */
12510 return false;
12511 }
12512 }
12513
12514 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12515 need not be 100% accurate; simply be conservative and return true if we
12516 don't know. This is used only to avoid stupidly generating extra code.
12517 If we're wrong, we'll just delete the extra code later. */
12518
12519 bool
12520 block_may_fallthru (const_tree block)
12521 {
12522 /* This CONST_CAST is okay because expr_last returns its argument
12523 unmodified and we assign it to a const_tree. */
12524 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12525
12526 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12527 {
12528 case GOTO_EXPR:
12529 case RETURN_EXPR:
12530 /* Easy cases. If the last statement of the block implies
12531 control transfer, then we can't fall through. */
12532 return false;
12533
12534 case SWITCH_EXPR:
12535 /* If SWITCH_LABELS is set, this is lowered, and represents a
12536 branch to a selected label and hence can not fall through.
12537 Otherwise SWITCH_BODY is set, and the switch can fall
12538 through. */
12539 return SWITCH_LABELS (stmt) == NULL_TREE;
12540
12541 case COND_EXPR:
12542 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12543 return true;
12544 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12545
12546 case BIND_EXPR:
12547 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12548
12549 case TRY_CATCH_EXPR:
12550 return try_catch_may_fallthru (stmt);
12551
12552 case TRY_FINALLY_EXPR:
12553 /* The finally clause is always executed after the try clause,
12554 so if it does not fall through, then the try-finally will not
12555 fall through. Otherwise, if the try clause does not fall
12556 through, then when the finally clause falls through it will
12557 resume execution wherever the try clause was going. So the
12558 whole try-finally will only fall through if both the try
12559 clause and the finally clause fall through. */
12560 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12561 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12562
12563 case MODIFY_EXPR:
12564 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12565 stmt = TREE_OPERAND (stmt, 1);
12566 else
12567 return true;
12568 /* FALLTHRU */
12569
12570 case CALL_EXPR:
12571 /* Functions that do not return do not fall through. */
12572 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12573
12574 case CLEANUP_POINT_EXPR:
12575 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12576
12577 case TARGET_EXPR:
12578 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12579
12580 case ERROR_MARK:
12581 return true;
12582
12583 default:
12584 return lang_hooks.block_may_fallthru (stmt);
12585 }
12586 }
12587
12588 /* True if we are using EH to handle cleanups. */
12589 static bool using_eh_for_cleanups_flag = false;
12590
12591 /* This routine is called from front ends to indicate eh should be used for
12592 cleanups. */
12593 void
12594 using_eh_for_cleanups (void)
12595 {
12596 using_eh_for_cleanups_flag = true;
12597 }
12598
12599 /* Query whether EH is used for cleanups. */
12600 bool
12601 using_eh_for_cleanups_p (void)
12602 {
12603 return using_eh_for_cleanups_flag;
12604 }
12605
12606 /* Wrapper for tree_code_name to ensure that tree code is valid */
12607 const char *
12608 get_tree_code_name (enum tree_code code)
12609 {
12610 const char *invalid = "<invalid tree code>";
12611
12612 if (code >= MAX_TREE_CODES)
12613 return invalid;
12614
12615 return tree_code_name[code];
12616 }
12617
12618 /* Drops the TREE_OVERFLOW flag from T. */
12619
12620 tree
12621 drop_tree_overflow (tree t)
12622 {
12623 gcc_checking_assert (TREE_OVERFLOW (t));
12624
12625 /* For tree codes with a sharing machinery re-build the result. */
12626 if (TREE_CODE (t) == INTEGER_CST)
12627 return wide_int_to_tree (TREE_TYPE (t), t);
12628
12629 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12630 and drop the flag. */
12631 t = copy_node (t);
12632 TREE_OVERFLOW (t) = 0;
12633 return t;
12634 }
12635
12636 /* Given a memory reference expression T, return its base address.
12637 The base address of a memory reference expression is the main
12638 object being referenced. For instance, the base address for
12639 'array[i].fld[j]' is 'array'. You can think of this as stripping
12640 away the offset part from a memory address.
12641
12642 This function calls handled_component_p to strip away all the inner
12643 parts of the memory reference until it reaches the base object. */
12644
12645 tree
12646 get_base_address (tree t)
12647 {
12648 while (handled_component_p (t))
12649 t = TREE_OPERAND (t, 0);
12650
12651 if ((TREE_CODE (t) == MEM_REF
12652 || TREE_CODE (t) == TARGET_MEM_REF)
12653 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12654 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12655
12656 /* ??? Either the alias oracle or all callers need to properly deal
12657 with WITH_SIZE_EXPRs before we can look through those. */
12658 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12659 return NULL_TREE;
12660
12661 return t;
12662 }
12663
12664 /* Return a tree of sizetype representing the size, in bytes, of the element
12665 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12666
12667 tree
12668 array_ref_element_size (tree exp)
12669 {
12670 tree aligned_size = TREE_OPERAND (exp, 3);
12671 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12672 location_t loc = EXPR_LOCATION (exp);
12673
12674 /* If a size was specified in the ARRAY_REF, it's the size measured
12675 in alignment units of the element type. So multiply by that value. */
12676 if (aligned_size)
12677 {
12678 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12679 sizetype from another type of the same width and signedness. */
12680 if (TREE_TYPE (aligned_size) != sizetype)
12681 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12682 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12683 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12684 }
12685
12686 /* Otherwise, take the size from that of the element type. Substitute
12687 any PLACEHOLDER_EXPR that we have. */
12688 else
12689 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12690 }
12691
12692 /* Return a tree representing the lower bound of the array mentioned in
12693 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12694
12695 tree
12696 array_ref_low_bound (tree exp)
12697 {
12698 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12699
12700 /* If a lower bound is specified in EXP, use it. */
12701 if (TREE_OPERAND (exp, 2))
12702 return TREE_OPERAND (exp, 2);
12703
12704 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12705 substituting for a PLACEHOLDER_EXPR as needed. */
12706 if (domain_type && TYPE_MIN_VALUE (domain_type))
12707 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12708
12709 /* Otherwise, return a zero of the appropriate type. */
12710 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12711 }
12712
12713 /* Return a tree representing the upper bound of the array mentioned in
12714 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12715
12716 tree
12717 array_ref_up_bound (tree exp)
12718 {
12719 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12720
12721 /* If there is a domain type and it has an upper bound, use it, substituting
12722 for a PLACEHOLDER_EXPR as needed. */
12723 if (domain_type && TYPE_MAX_VALUE (domain_type))
12724 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12725
12726 /* Otherwise fail. */
12727 return NULL_TREE;
12728 }
12729
12730 /* Returns true if REF is an array reference to an array at the end of
12731 a structure. If this is the case, the array may be allocated larger
12732 than its upper bound implies. */
12733
12734 bool
12735 array_at_struct_end_p (tree ref)
12736 {
12737 if (TREE_CODE (ref) != ARRAY_REF
12738 && TREE_CODE (ref) != ARRAY_RANGE_REF)
12739 return false;
12740
12741 while (handled_component_p (ref))
12742 {
12743 /* If the reference chain contains a component reference to a
12744 non-union type and there follows another field the reference
12745 is not at the end of a structure. */
12746 if (TREE_CODE (ref) == COMPONENT_REF
12747 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12748 {
12749 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12750 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12751 nextf = DECL_CHAIN (nextf);
12752 if (nextf)
12753 return false;
12754 }
12755
12756 ref = TREE_OPERAND (ref, 0);
12757 }
12758
12759 /* If the reference is based on a declared entity, the size of the array
12760 is constrained by its given domain. */
12761 if (DECL_P (ref))
12762 return false;
12763
12764 return true;
12765 }
12766
12767 /* Return a tree representing the offset, in bytes, of the field referenced
12768 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12769
12770 tree
12771 component_ref_field_offset (tree exp)
12772 {
12773 tree aligned_offset = TREE_OPERAND (exp, 2);
12774 tree field = TREE_OPERAND (exp, 1);
12775 location_t loc = EXPR_LOCATION (exp);
12776
12777 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12778 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12779 value. */
12780 if (aligned_offset)
12781 {
12782 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12783 sizetype from another type of the same width and signedness. */
12784 if (TREE_TYPE (aligned_offset) != sizetype)
12785 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12786 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12787 size_int (DECL_OFFSET_ALIGN (field)
12788 / BITS_PER_UNIT));
12789 }
12790
12791 /* Otherwise, take the offset from that of the field. Substitute
12792 any PLACEHOLDER_EXPR that we have. */
12793 else
12794 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12795 }
12796
12797 /* Return the machine mode of T. For vectors, returns the mode of the
12798 inner type. The main use case is to feed the result to HONOR_NANS,
12799 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12800
12801 machine_mode
12802 element_mode (const_tree t)
12803 {
12804 if (!TYPE_P (t))
12805 t = TREE_TYPE (t);
12806 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12807 t = TREE_TYPE (t);
12808 return TYPE_MODE (t);
12809 }
12810
12811
12812 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12813 TV. TV should be the more specified variant (i.e. the main variant). */
12814
12815 static bool
12816 verify_type_variant (const_tree t, tree tv)
12817 {
12818 /* Type variant can differ by:
12819
12820 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12821 ENCODE_QUAL_ADDR_SPACE.
12822 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12823 in this case some values may not be set in the variant types
12824 (see TYPE_COMPLETE_P checks).
12825 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12826 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12827 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12828 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12829 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12830 this is necessary to make it possible to merge types form different TUs
12831 - arrays, pointers and references may have TREE_TYPE that is a variant
12832 of TREE_TYPE of their main variants.
12833 - aggregates may have new TYPE_FIELDS list that list variants of
12834 the main variant TYPE_FIELDS.
12835 - vector types may differ by TYPE_VECTOR_OPAQUE
12836 - TYPE_METHODS is always NULL for vairant types and maintained for
12837 main variant only.
12838 */
12839
12840 /* Convenience macro for matching individual fields. */
12841 #define verify_variant_match(flag) \
12842 do { \
12843 if (flag (tv) != flag (t)) \
12844 { \
12845 error ("type variant differs by " #flag "."); \
12846 debug_tree (tv); \
12847 return false; \
12848 } \
12849 } while (false)
12850
12851 /* tree_base checks. */
12852
12853 verify_variant_match (TREE_CODE);
12854 /* FIXME: Ada builds non-artificial variants of artificial types. */
12855 if (TYPE_ARTIFICIAL (tv) && 0)
12856 verify_variant_match (TYPE_ARTIFICIAL);
12857 if (POINTER_TYPE_P (tv))
12858 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12859 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12860 verify_variant_match (TYPE_UNSIGNED);
12861 verify_variant_match (TYPE_ALIGN_OK);
12862 verify_variant_match (TYPE_PACKED);
12863 if (TREE_CODE (t) == REFERENCE_TYPE)
12864 verify_variant_match (TYPE_REF_IS_RVALUE);
12865 verify_variant_match (TYPE_SATURATING);
12866 /* FIXME: This check trigger during libstdc++ build. */
12867 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12868 verify_variant_match (TYPE_FINAL_P);
12869
12870 /* tree_type_common checks. */
12871
12872 if (COMPLETE_TYPE_P (t))
12873 {
12874 verify_variant_match (TYPE_SIZE);
12875 verify_variant_match (TYPE_MODE);
12876 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
12877 /* FIXME: ideally we should compare pointer equality, but java FE
12878 produce variants where size is INTEGER_CST of different type (int
12879 wrt size_type) during libjava biuld. */
12880 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
12881 {
12882 error ("type variant has different TYPE_SIZE_UNIT");
12883 debug_tree (tv);
12884 error ("type variant's TYPE_SIZE_UNIT");
12885 debug_tree (TYPE_SIZE_UNIT (tv));
12886 error ("type's TYPE_SIZE_UNIT");
12887 debug_tree (TYPE_SIZE_UNIT (t));
12888 return false;
12889 }
12890 }
12891 verify_variant_match (TYPE_PRECISION);
12892 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12893 if (RECORD_OR_UNION_TYPE_P (t))
12894 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12895 else if (TREE_CODE (t) == ARRAY_TYPE)
12896 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12897 /* During LTO we merge variant lists from diferent translation units
12898 that may differ BY TYPE_CONTEXT that in turn may point
12899 to TRANSLATION_UNIT_DECL.
12900 Ada also builds variants of types with different TYPE_CONTEXT. */
12901 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12902 verify_variant_match (TYPE_CONTEXT);
12903 verify_variant_match (TYPE_STRING_FLAG);
12904 if (TYPE_ALIAS_SET_KNOWN_P (t) && TYPE_ALIAS_SET_KNOWN_P (tv))
12905 verify_variant_match (TYPE_ALIAS_SET);
12906
12907 /* tree_type_non_common checks. */
12908
12909 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12910 and dangle the pointer from time to time. */
12911 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12912 && (in_lto_p || !TYPE_VFIELD (tv)
12913 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12914 {
12915 error ("type variant has different TYPE_VFIELD");
12916 debug_tree (tv);
12917 return false;
12918 }
12919 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
12920 || TREE_CODE (t) == INTEGER_TYPE
12921 || TREE_CODE (t) == BOOLEAN_TYPE
12922 || TREE_CODE (t) == REAL_TYPE
12923 || TREE_CODE (t) == FIXED_POINT_TYPE)
12924 {
12925 verify_variant_match (TYPE_MAX_VALUE);
12926 verify_variant_match (TYPE_MIN_VALUE);
12927 }
12928 if (TREE_CODE (t) == METHOD_TYPE)
12929 verify_variant_match (TYPE_METHOD_BASETYPE);
12930 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
12931 {
12932 error ("type variant has TYPE_METHODS");
12933 debug_tree (tv);
12934 return false;
12935 }
12936 if (TREE_CODE (t) == OFFSET_TYPE)
12937 verify_variant_match (TYPE_OFFSET_BASETYPE);
12938 if (TREE_CODE (t) == ARRAY_TYPE)
12939 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
12940 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
12941 or even type's main variant. This is needed to make bootstrap pass
12942 and the bug seems new in GCC 5.
12943 C++ FE should be updated to make this consistent and we should check
12944 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
12945 is a match with main variant.
12946
12947 Also disable the check for Java for now because of parser hack that builds
12948 first an dummy BINFO and then sometimes replace it by real BINFO in some
12949 of the copies. */
12950 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
12951 && TYPE_BINFO (t) != TYPE_BINFO (tv)
12952 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
12953 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
12954 at LTO time only. */
12955 && (in_lto_p && odr_type_p (t)))
12956 {
12957 error ("type variant has different TYPE_BINFO");
12958 debug_tree (tv);
12959 error ("type variant's TYPE_BINFO");
12960 debug_tree (TYPE_BINFO (tv));
12961 error ("type's TYPE_BINFO");
12962 debug_tree (TYPE_BINFO (t));
12963 return false;
12964 }
12965
12966 /* Check various uses of TYPE_VALUES_RAW. */
12967 if (TREE_CODE (t) == ENUMERAL_TYPE)
12968 verify_variant_match (TYPE_VALUES);
12969 else if (TREE_CODE (t) == ARRAY_TYPE)
12970 verify_variant_match (TYPE_DOMAIN);
12971 /* Permit incomplete variants of complete type. While FEs may complete
12972 all variants, this does not happen for C++ templates in all cases. */
12973 else if (RECORD_OR_UNION_TYPE_P (t)
12974 && COMPLETE_TYPE_P (t)
12975 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
12976 {
12977 tree f1, f2;
12978
12979 /* Fortran builds qualified variants as new records with items of
12980 qualified type. Verify that they looks same. */
12981 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
12982 f1 && f2;
12983 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
12984 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
12985 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
12986 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
12987 /* FIXME: gfc_nonrestricted_type builds all types as variants
12988 with exception of pointer types. It deeply copies the type
12989 which means that we may end up with a variant type
12990 referring non-variant pointer. We may change it to
12991 produce types as variants, too, like
12992 objc_get_protocol_qualified_type does. */
12993 && !POINTER_TYPE_P (TREE_TYPE (f1)))
12994 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
12995 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
12996 break;
12997 if (f1 || f2)
12998 {
12999 error ("type variant has different TYPE_FIELDS");
13000 debug_tree (tv);
13001 error ("first mismatch is field");
13002 debug_tree (f1);
13003 error ("and field");
13004 debug_tree (f2);
13005 return false;
13006 }
13007 }
13008 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13009 verify_variant_match (TYPE_ARG_TYPES);
13010 /* For C++ the qualified variant of array type is really an array type
13011 of qualified TREE_TYPE.
13012 objc builds variants of pointer where pointer to type is a variant, too
13013 in objc_get_protocol_qualified_type. */
13014 if (TREE_TYPE (t) != TREE_TYPE (tv)
13015 && ((TREE_CODE (t) != ARRAY_TYPE
13016 && !POINTER_TYPE_P (t))
13017 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13018 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13019 {
13020 error ("type variant has different TREE_TYPE");
13021 debug_tree (tv);
13022 error ("type variant's TREE_TYPE");
13023 debug_tree (TREE_TYPE (tv));
13024 error ("type's TREE_TYPE");
13025 debug_tree (TREE_TYPE (t));
13026 return false;
13027 }
13028 if (type_with_alias_set_p (t)
13029 && !gimple_canonical_types_compatible_p (t, tv, false))
13030 {
13031 error ("type is not compatible with its vairant");
13032 debug_tree (tv);
13033 error ("type variant's TREE_TYPE");
13034 debug_tree (TREE_TYPE (tv));
13035 error ("type's TREE_TYPE");
13036 debug_tree (TREE_TYPE (t));
13037 return false;
13038 }
13039 return true;
13040 #undef verify_variant_match
13041 }
13042
13043
13044 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13045 the middle-end types_compatible_p function. It needs to avoid
13046 claiming types are different for types that should be treated
13047 the same with respect to TBAA. Canonical types are also used
13048 for IL consistency checks via the useless_type_conversion_p
13049 predicate which does not handle all type kinds itself but falls
13050 back to pointer-comparison of TYPE_CANONICAL for aggregates
13051 for example. */
13052
13053 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13054 type calculation because we need to allow inter-operability between signed
13055 and unsigned variants. */
13056
13057 bool
13058 type_with_interoperable_signedness (const_tree type)
13059 {
13060 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13061 signed char and unsigned char. Similarly fortran FE builds
13062 C_SIZE_T as signed type, while C defines it unsigned. */
13063
13064 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13065 == INTEGER_TYPE
13066 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13067 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13068 }
13069
13070 /* Return true iff T1 and T2 are structurally identical for what
13071 TBAA is concerned.
13072 This function is used both by lto.c canonical type merging and by the
13073 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13074 that have TYPE_CANONICAL defined and assume them equivalent. */
13075
13076 bool
13077 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13078 bool trust_type_canonical)
13079 {
13080 /* Type variants should be same as the main variant. When not doing sanity
13081 checking to verify this fact, go to main variants and save some work. */
13082 if (trust_type_canonical)
13083 {
13084 t1 = TYPE_MAIN_VARIANT (t1);
13085 t2 = TYPE_MAIN_VARIANT (t2);
13086 }
13087
13088 /* Check first for the obvious case of pointer identity. */
13089 if (t1 == t2)
13090 return true;
13091
13092 /* Check that we have two types to compare. */
13093 if (t1 == NULL_TREE || t2 == NULL_TREE)
13094 return false;
13095
13096 /* We consider complete types always compatible with incomplete type.
13097 This does not make sense for canonical type calculation and thus we
13098 need to ensure that we are never called on it.
13099
13100 FIXME: For more correctness the function probably should have three modes
13101 1) mode assuming that types are complete mathcing their structure
13102 2) mode allowing incomplete types but producing equivalence classes
13103 and thus ignoring all info from complete types
13104 3) mode allowing incomplete types to match complete but checking
13105 compatibility between complete types.
13106
13107 1 and 2 can be used for canonical type calculation. 3 is the real
13108 definition of type compatibility that can be used i.e. for warnings during
13109 declaration merging. */
13110
13111 gcc_assert (!trust_type_canonical
13112 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13113 /* If the types have been previously registered and found equal
13114 they still are. */
13115 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13116 && trust_type_canonical)
13117 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13118
13119 /* Can't be the same type if the types don't have the same code. */
13120 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13121 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13122 return false;
13123
13124 /* Qualifiers do not matter for canonical type comparison purposes. */
13125
13126 /* Void types and nullptr types are always the same. */
13127 if (TREE_CODE (t1) == VOID_TYPE
13128 || TREE_CODE (t1) == NULLPTR_TYPE)
13129 return true;
13130
13131 /* Can't be the same type if they have different mode. */
13132 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13133 return false;
13134
13135 /* Non-aggregate types can be handled cheaply. */
13136 if (INTEGRAL_TYPE_P (t1)
13137 || SCALAR_FLOAT_TYPE_P (t1)
13138 || FIXED_POINT_TYPE_P (t1)
13139 || TREE_CODE (t1) == VECTOR_TYPE
13140 || TREE_CODE (t1) == COMPLEX_TYPE
13141 || TREE_CODE (t1) == OFFSET_TYPE
13142 || POINTER_TYPE_P (t1))
13143 {
13144 /* Can't be the same type if they have different recision. */
13145 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13146 return false;
13147
13148 /* In some cases the signed and unsigned types are required to be
13149 inter-operable. */
13150 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13151 && !type_with_interoperable_signedness (t1))
13152 return false;
13153
13154 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13155 interoperable with "signed char". Unless all frontends are revisited
13156 to agree on these types, we must ignore the flag completely. */
13157
13158 /* Fortran standard define C_PTR type that is compatible with every
13159 C pointer. For this reason we need to glob all pointers into one.
13160 Still pointers in different address spaces are not compatible. */
13161 if (POINTER_TYPE_P (t1))
13162 {
13163 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13164 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13165 return false;
13166 }
13167
13168 /* Tail-recurse to components. */
13169 if (TREE_CODE (t1) == VECTOR_TYPE
13170 || TREE_CODE (t1) == COMPLEX_TYPE)
13171 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13172 TREE_TYPE (t2),
13173 trust_type_canonical);
13174
13175 return true;
13176 }
13177
13178 /* Do type-specific comparisons. */
13179 switch (TREE_CODE (t1))
13180 {
13181 case ARRAY_TYPE:
13182 /* Array types are the same if the element types are the same and
13183 the number of elements are the same. */
13184 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13185 trust_type_canonical)
13186 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13187 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13188 return false;
13189 else
13190 {
13191 tree i1 = TYPE_DOMAIN (t1);
13192 tree i2 = TYPE_DOMAIN (t2);
13193
13194 /* For an incomplete external array, the type domain can be
13195 NULL_TREE. Check this condition also. */
13196 if (i1 == NULL_TREE && i2 == NULL_TREE)
13197 return true;
13198 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13199 return false;
13200 else
13201 {
13202 tree min1 = TYPE_MIN_VALUE (i1);
13203 tree min2 = TYPE_MIN_VALUE (i2);
13204 tree max1 = TYPE_MAX_VALUE (i1);
13205 tree max2 = TYPE_MAX_VALUE (i2);
13206
13207 /* The minimum/maximum values have to be the same. */
13208 if ((min1 == min2
13209 || (min1 && min2
13210 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13211 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13212 || operand_equal_p (min1, min2, 0))))
13213 && (max1 == max2
13214 || (max1 && max2
13215 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13216 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13217 || operand_equal_p (max1, max2, 0)))))
13218 return true;
13219 else
13220 return false;
13221 }
13222 }
13223
13224 case METHOD_TYPE:
13225 case FUNCTION_TYPE:
13226 /* Function types are the same if the return type and arguments types
13227 are the same. */
13228 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13229 trust_type_canonical))
13230 return false;
13231
13232 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13233 return true;
13234 else
13235 {
13236 tree parms1, parms2;
13237
13238 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13239 parms1 && parms2;
13240 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13241 {
13242 if (!gimple_canonical_types_compatible_p
13243 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13244 trust_type_canonical))
13245 return false;
13246 }
13247
13248 if (parms1 || parms2)
13249 return false;
13250
13251 return true;
13252 }
13253
13254 case RECORD_TYPE:
13255 case UNION_TYPE:
13256 case QUAL_UNION_TYPE:
13257 {
13258 tree f1, f2;
13259
13260 /* For aggregate types, all the fields must be the same. */
13261 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13262 f1 || f2;
13263 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13264 {
13265 /* Skip non-fields. */
13266 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13267 f1 = TREE_CHAIN (f1);
13268 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13269 f2 = TREE_CHAIN (f2);
13270 if (!f1 || !f2)
13271 break;
13272 /* The fields must have the same name, offset and type. */
13273 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13274 || !gimple_compare_field_offset (f1, f2)
13275 || !gimple_canonical_types_compatible_p
13276 (TREE_TYPE (f1), TREE_TYPE (f2),
13277 trust_type_canonical))
13278 return false;
13279 }
13280
13281 /* If one aggregate has more fields than the other, they
13282 are not the same. */
13283 if (f1 || f2)
13284 return false;
13285
13286 return true;
13287 }
13288
13289 default:
13290 /* Consider all types with language specific trees in them mutually
13291 compatible. This is executed only from verify_type and false
13292 positives can be tolerated. */
13293 gcc_assert (!in_lto_p);
13294 return true;
13295 }
13296 }
13297
13298 /* Verify type T. */
13299
13300 void
13301 verify_type (const_tree t)
13302 {
13303 bool error_found = false;
13304 tree mv = TYPE_MAIN_VARIANT (t);
13305 if (!mv)
13306 {
13307 error ("Main variant is not defined");
13308 error_found = true;
13309 }
13310 else if (mv != TYPE_MAIN_VARIANT (mv))
13311 {
13312 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13313 debug_tree (mv);
13314 error_found = true;
13315 }
13316 else if (t != mv && !verify_type_variant (t, mv))
13317 error_found = true;
13318
13319 tree ct = TYPE_CANONICAL (t);
13320 if (!ct)
13321 ;
13322 else if (TYPE_CANONICAL (t) != ct)
13323 {
13324 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13325 debug_tree (ct);
13326 error_found = true;
13327 }
13328 /* Method and function types can not be used to address memory and thus
13329 TYPE_CANONICAL really matters only for determining useless conversions.
13330
13331 FIXME: C++ FE produce declarations of builtin functions that are not
13332 compatible with main variants. */
13333 else if (TREE_CODE (t) == FUNCTION_TYPE)
13334 ;
13335 else if (t != ct
13336 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13337 with variably sized arrays because their sizes possibly
13338 gimplified to different variables. */
13339 && !variably_modified_type_p (ct, NULL)
13340 && !gimple_canonical_types_compatible_p (t, ct, false))
13341 {
13342 error ("TYPE_CANONICAL is not compatible");
13343 debug_tree (ct);
13344 error_found = true;
13345 }
13346
13347
13348 /* Check various uses of TYPE_MINVAL. */
13349 if (RECORD_OR_UNION_TYPE_P (t))
13350 {
13351 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13352 and danagle the pointer from time to time. */
13353 if (TYPE_VFIELD (t)
13354 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13355 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13356 {
13357 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13358 debug_tree (TYPE_VFIELD (t));
13359 error_found = true;
13360 }
13361 }
13362 else if (TREE_CODE (t) == POINTER_TYPE)
13363 {
13364 if (TYPE_NEXT_PTR_TO (t)
13365 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13366 {
13367 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13368 debug_tree (TYPE_NEXT_PTR_TO (t));
13369 error_found = true;
13370 }
13371 }
13372 else if (TREE_CODE (t) == REFERENCE_TYPE)
13373 {
13374 if (TYPE_NEXT_REF_TO (t)
13375 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13376 {
13377 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13378 debug_tree (TYPE_NEXT_REF_TO (t));
13379 error_found = true;
13380 }
13381 }
13382 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13383 || TREE_CODE (t) == FIXED_POINT_TYPE)
13384 {
13385 /* FIXME: The following check should pass:
13386 useless_type_conversion_p (const_cast <tree> (t),
13387 TREE_TYPE (TYPE_MIN_VALUE (t))
13388 but does not for C sizetypes in LTO. */
13389 }
13390 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13391 else if (TYPE_MINVAL (t)
13392 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13393 || in_lto_p))
13394 {
13395 error ("TYPE_MINVAL non-NULL");
13396 debug_tree (TYPE_MINVAL (t));
13397 error_found = true;
13398 }
13399
13400 /* Check various uses of TYPE_MAXVAL. */
13401 if (RECORD_OR_UNION_TYPE_P (t))
13402 {
13403 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13404 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13405 && TYPE_METHODS (t) != error_mark_node)
13406 {
13407 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13408 debug_tree (TYPE_METHODS (t));
13409 error_found = true;
13410 }
13411 }
13412 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13413 {
13414 if (TYPE_METHOD_BASETYPE (t)
13415 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13416 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13417 {
13418 error ("TYPE_METHOD_BASETYPE is not record nor union");
13419 debug_tree (TYPE_METHOD_BASETYPE (t));
13420 error_found = true;
13421 }
13422 }
13423 else if (TREE_CODE (t) == OFFSET_TYPE)
13424 {
13425 if (TYPE_OFFSET_BASETYPE (t)
13426 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13427 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13428 {
13429 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13430 debug_tree (TYPE_OFFSET_BASETYPE (t));
13431 error_found = true;
13432 }
13433 }
13434 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13435 || TREE_CODE (t) == FIXED_POINT_TYPE)
13436 {
13437 /* FIXME: The following check should pass:
13438 useless_type_conversion_p (const_cast <tree> (t),
13439 TREE_TYPE (TYPE_MAX_VALUE (t))
13440 but does not for C sizetypes in LTO. */
13441 }
13442 else if (TREE_CODE (t) == ARRAY_TYPE)
13443 {
13444 if (TYPE_ARRAY_MAX_SIZE (t)
13445 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13446 {
13447 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13448 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13449 error_found = true;
13450 }
13451 }
13452 else if (TYPE_MAXVAL (t))
13453 {
13454 error ("TYPE_MAXVAL non-NULL");
13455 debug_tree (TYPE_MAXVAL (t));
13456 error_found = true;
13457 }
13458
13459 /* Check various uses of TYPE_BINFO. */
13460 if (RECORD_OR_UNION_TYPE_P (t))
13461 {
13462 if (!TYPE_BINFO (t))
13463 ;
13464 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13465 {
13466 error ("TYPE_BINFO is not TREE_BINFO");
13467 debug_tree (TYPE_BINFO (t));
13468 error_found = true;
13469 }
13470 /* FIXME: Java builds invalid empty binfos that do not have
13471 TREE_TYPE set. */
13472 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13473 {
13474 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13475 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13476 error_found = true;
13477 }
13478 }
13479 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13480 {
13481 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13482 debug_tree (TYPE_LANG_SLOT_1 (t));
13483 error_found = true;
13484 }
13485
13486 /* Check various uses of TYPE_VALUES_RAW. */
13487 if (TREE_CODE (t) == ENUMERAL_TYPE)
13488 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13489 {
13490 tree value = TREE_VALUE (l);
13491 tree name = TREE_PURPOSE (l);
13492
13493 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13494 CONST_DECL of ENUMERAL TYPE. */
13495 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13496 {
13497 error ("Enum value is not CONST_DECL or INTEGER_CST");
13498 debug_tree (value);
13499 debug_tree (name);
13500 error_found = true;
13501 }
13502 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13503 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13504 {
13505 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13506 debug_tree (value);
13507 debug_tree (name);
13508 error_found = true;
13509 }
13510 if (TREE_CODE (name) != IDENTIFIER_NODE)
13511 {
13512 error ("Enum value name is not IDENTIFIER_NODE");
13513 debug_tree (value);
13514 debug_tree (name);
13515 error_found = true;
13516 }
13517 }
13518 else if (TREE_CODE (t) == ARRAY_TYPE)
13519 {
13520 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13521 {
13522 error ("Array TYPE_DOMAIN is not integer type");
13523 debug_tree (TYPE_DOMAIN (t));
13524 error_found = true;
13525 }
13526 }
13527 else if (RECORD_OR_UNION_TYPE_P (t))
13528 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13529 {
13530 /* TODO: verify properties of decls. */
13531 if (TREE_CODE (fld) == FIELD_DECL)
13532 ;
13533 else if (TREE_CODE (fld) == TYPE_DECL)
13534 ;
13535 else if (TREE_CODE (fld) == CONST_DECL)
13536 ;
13537 else if (TREE_CODE (fld) == VAR_DECL)
13538 ;
13539 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13540 ;
13541 else if (TREE_CODE (fld) == USING_DECL)
13542 ;
13543 else
13544 {
13545 error ("Wrong tree in TYPE_FIELDS list");
13546 debug_tree (fld);
13547 error_found = true;
13548 }
13549 }
13550 else if (TREE_CODE (t) == INTEGER_TYPE
13551 || TREE_CODE (t) == BOOLEAN_TYPE
13552 || TREE_CODE (t) == OFFSET_TYPE
13553 || TREE_CODE (t) == REFERENCE_TYPE
13554 || TREE_CODE (t) == NULLPTR_TYPE
13555 || TREE_CODE (t) == POINTER_TYPE)
13556 {
13557 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13558 {
13559 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13560 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13561 error_found = true;
13562 }
13563 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13564 {
13565 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13566 debug_tree (TYPE_CACHED_VALUES (t));
13567 error_found = true;
13568 }
13569 /* Verify just enough of cache to ensure that no one copied it to new type.
13570 All copying should go by copy_node that should clear it. */
13571 else if (TYPE_CACHED_VALUES_P (t))
13572 {
13573 int i;
13574 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13575 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13576 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13577 {
13578 error ("wrong TYPE_CACHED_VALUES entry");
13579 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13580 error_found = true;
13581 break;
13582 }
13583 }
13584 }
13585 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13586 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13587 {
13588 /* C++ FE uses TREE_PURPOSE to store initial values. */
13589 if (TREE_PURPOSE (l) && in_lto_p)
13590 {
13591 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13592 debug_tree (l);
13593 error_found = true;
13594 }
13595 if (!TYPE_P (TREE_VALUE (l)))
13596 {
13597 error ("Wrong entry in TYPE_ARG_TYPES list");
13598 debug_tree (l);
13599 error_found = true;
13600 }
13601 }
13602 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13603 {
13604 error ("TYPE_VALUES_RAW field is non-NULL");
13605 debug_tree (TYPE_VALUES_RAW (t));
13606 error_found = true;
13607 }
13608 if (TREE_CODE (t) != INTEGER_TYPE
13609 && TREE_CODE (t) != BOOLEAN_TYPE
13610 && TREE_CODE (t) != OFFSET_TYPE
13611 && TREE_CODE (t) != REFERENCE_TYPE
13612 && TREE_CODE (t) != NULLPTR_TYPE
13613 && TREE_CODE (t) != POINTER_TYPE
13614 && TYPE_CACHED_VALUES_P (t))
13615 {
13616 error ("TYPE_CACHED_VALUES_P is set while it should not");
13617 error_found = true;
13618 }
13619 if (TYPE_STRING_FLAG (t)
13620 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13621 {
13622 error ("TYPE_STRING_FLAG is set on wrong type code");
13623 error_found = true;
13624 }
13625 else if (TYPE_STRING_FLAG (t))
13626 {
13627 const_tree b = t;
13628 if (TREE_CODE (b) == ARRAY_TYPE)
13629 b = TREE_TYPE (t);
13630 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13631 that is 32bits. */
13632 if (TREE_CODE (b) != INTEGER_TYPE)
13633 {
13634 error ("TYPE_STRING_FLAG is set on type that does not look like "
13635 "char nor array of chars");
13636 error_found = true;
13637 }
13638 }
13639
13640 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13641 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13642 of a type. */
13643 if (TREE_CODE (t) == METHOD_TYPE
13644 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13645 {
13646 error ("TYPE_METHOD_BASETYPE is not main variant");
13647 error_found = true;
13648 }
13649
13650 if (error_found)
13651 {
13652 debug_tree (const_cast <tree> (t));
13653 internal_error ("verify_type failed");
13654 }
13655 }
13656
13657
13658 /* Return true if ARG is marked with the nonnull attribute in the
13659 current function signature. */
13660
13661 bool
13662 nonnull_arg_p (const_tree arg)
13663 {
13664 tree t, attrs, fntype;
13665 unsigned HOST_WIDE_INT arg_num;
13666
13667 gcc_assert (TREE_CODE (arg) == PARM_DECL && POINTER_TYPE_P (TREE_TYPE (arg)));
13668
13669 /* The static chain decl is always non null. */
13670 if (arg == cfun->static_chain_decl)
13671 return true;
13672
13673 /* THIS argument of method is always non-NULL. */
13674 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13675 && arg == DECL_ARGUMENTS (cfun->decl)
13676 && flag_delete_null_pointer_checks)
13677 return true;
13678
13679 /* Values passed by reference are always non-NULL. */
13680 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13681 && flag_delete_null_pointer_checks)
13682 return true;
13683
13684 fntype = TREE_TYPE (cfun->decl);
13685 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13686 {
13687 attrs = lookup_attribute ("nonnull", attrs);
13688
13689 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13690 if (attrs == NULL_TREE)
13691 return false;
13692
13693 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13694 if (TREE_VALUE (attrs) == NULL_TREE)
13695 return true;
13696
13697 /* Get the position number for ARG in the function signature. */
13698 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13699 t;
13700 t = DECL_CHAIN (t), arg_num++)
13701 {
13702 if (t == arg)
13703 break;
13704 }
13705
13706 gcc_assert (t == arg);
13707
13708 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13709 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13710 {
13711 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13712 return true;
13713 }
13714 }
13715
13716 return false;
13717 }
13718
13719
13720 #include "gt-tree.h"