re PR tree-optimization/68835 (ICE in set_value_range, at tree-vrp.c:387, with __int1...
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64
65 /* Tree code classes. */
66
67 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
68 #define END_OF_BASE_TREE_CODES tcc_exceptional,
69
70 const enum tree_code_class tree_code_type[] = {
71 #include "all-tree.def"
72 };
73
74 #undef DEFTREECODE
75 #undef END_OF_BASE_TREE_CODES
76
77 /* Table indexed by tree code giving number of expression
78 operands beyond the fixed part of the node structure.
79 Not used for types or decls. */
80
81 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
82 #define END_OF_BASE_TREE_CODES 0,
83
84 const unsigned char tree_code_length[] = {
85 #include "all-tree.def"
86 };
87
88 #undef DEFTREECODE
89 #undef END_OF_BASE_TREE_CODES
90
91 /* Names of tree components.
92 Used for printing out the tree and error messages. */
93 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
94 #define END_OF_BASE_TREE_CODES "@dummy",
95
96 static const char *const tree_code_name[] = {
97 #include "all-tree.def"
98 };
99
100 #undef DEFTREECODE
101 #undef END_OF_BASE_TREE_CODES
102
103 /* Each tree code class has an associated string representation.
104 These must correspond to the tree_code_class entries. */
105
106 const char *const tree_code_class_strings[] =
107 {
108 "exceptional",
109 "constant",
110 "type",
111 "declaration",
112 "reference",
113 "comparison",
114 "unary",
115 "binary",
116 "statement",
117 "vl_exp",
118 "expression"
119 };
120
121 /* obstack.[ch] explicitly declined to prototype this. */
122 extern int _obstack_allocated_p (struct obstack *h, void *obj);
123
124 /* Statistics-gathering stuff. */
125
126 static int tree_code_counts[MAX_TREE_CODES];
127 int tree_node_counts[(int) all_kinds];
128 int tree_node_sizes[(int) all_kinds];
129
130 /* Keep in sync with tree.h:enum tree_node_kind. */
131 static const char * const tree_node_kind_names[] = {
132 "decls",
133 "types",
134 "blocks",
135 "stmts",
136 "refs",
137 "exprs",
138 "constants",
139 "identifiers",
140 "vecs",
141 "binfos",
142 "ssa names",
143 "constructors",
144 "random kinds",
145 "lang_decl kinds",
146 "lang_type kinds",
147 "omp clauses",
148 };
149
150 /* Unique id for next decl created. */
151 static GTY(()) int next_decl_uid;
152 /* Unique id for next type created. */
153 static GTY(()) int next_type_uid = 1;
154 /* Unique id for next debug decl created. Use negative numbers,
155 to catch erroneous uses. */
156 static GTY(()) int next_debug_decl_uid;
157
158 /* Since we cannot rehash a type after it is in the table, we have to
159 keep the hash code. */
160
161 struct GTY((for_user)) type_hash {
162 unsigned long hash;
163 tree type;
164 };
165
166 /* Initial size of the hash table (rounded to next prime). */
167 #define TYPE_HASH_INITIAL_SIZE 1000
168
169 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
170 {
171 static hashval_t hash (type_hash *t) { return t->hash; }
172 static bool equal (type_hash *a, type_hash *b);
173
174 static int
175 keep_cache_entry (type_hash *&t)
176 {
177 return ggc_marked_p (t->type);
178 }
179 };
180
181 /* Now here is the hash table. When recording a type, it is added to
182 the slot whose index is the hash code. Note that the hash table is
183 used for several kinds of types (function types, array types and
184 array index range types, for now). While all these live in the
185 same table, they are completely independent, and the hash code is
186 computed differently for each of these. */
187
188 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
189
190 /* Hash table and temporary node for larger integer const values. */
191 static GTY (()) tree int_cst_node;
192
193 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
194 {
195 static hashval_t hash (tree t);
196 static bool equal (tree x, tree y);
197 };
198
199 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
200
201 /* Hash table for optimization flags and target option flags. Use the same
202 hash table for both sets of options. Nodes for building the current
203 optimization and target option nodes. The assumption is most of the time
204 the options created will already be in the hash table, so we avoid
205 allocating and freeing up a node repeatably. */
206 static GTY (()) tree cl_optimization_node;
207 static GTY (()) tree cl_target_option_node;
208
209 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
210 {
211 static hashval_t hash (tree t);
212 static bool equal (tree x, tree y);
213 };
214
215 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
216
217 /* General tree->tree mapping structure for use in hash tables. */
218
219
220 static GTY ((cache))
221 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
222
223 static GTY ((cache))
224 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
225
226 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
227 {
228 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
229
230 static bool
231 equal (tree_vec_map *a, tree_vec_map *b)
232 {
233 return a->base.from == b->base.from;
234 }
235
236 static int
237 keep_cache_entry (tree_vec_map *&m)
238 {
239 return ggc_marked_p (m->base.from);
240 }
241 };
242
243 static GTY ((cache))
244 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
245
246 static void set_type_quals (tree, int);
247 static void print_type_hash_statistics (void);
248 static void print_debug_expr_statistics (void);
249 static void print_value_expr_statistics (void);
250 static void type_hash_list (const_tree, inchash::hash &);
251 static void attribute_hash_list (const_tree, inchash::hash &);
252
253 tree global_trees[TI_MAX];
254 tree integer_types[itk_none];
255
256 bool int_n_enabled_p[NUM_INT_N_ENTS];
257 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
258
259 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
260
261 /* Number of operands for each OpenMP clause. */
262 unsigned const char omp_clause_num_ops[] =
263 {
264 0, /* OMP_CLAUSE_ERROR */
265 1, /* OMP_CLAUSE_PRIVATE */
266 1, /* OMP_CLAUSE_SHARED */
267 1, /* OMP_CLAUSE_FIRSTPRIVATE */
268 2, /* OMP_CLAUSE_LASTPRIVATE */
269 5, /* OMP_CLAUSE_REDUCTION */
270 1, /* OMP_CLAUSE_COPYIN */
271 1, /* OMP_CLAUSE_COPYPRIVATE */
272 3, /* OMP_CLAUSE_LINEAR */
273 2, /* OMP_CLAUSE_ALIGNED */
274 1, /* OMP_CLAUSE_DEPEND */
275 1, /* OMP_CLAUSE_UNIFORM */
276 1, /* OMP_CLAUSE_TO_DECLARE */
277 1, /* OMP_CLAUSE_LINK */
278 2, /* OMP_CLAUSE_FROM */
279 2, /* OMP_CLAUSE_TO */
280 2, /* OMP_CLAUSE_MAP */
281 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
282 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
283 2, /* OMP_CLAUSE__CACHE_ */
284 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
285 1, /* OMP_CLAUSE_USE_DEVICE */
286 2, /* OMP_CLAUSE_GANG */
287 1, /* OMP_CLAUSE_ASYNC */
288 1, /* OMP_CLAUSE_WAIT */
289 0, /* OMP_CLAUSE_AUTO */
290 0, /* OMP_CLAUSE_SEQ */
291 1, /* OMP_CLAUSE__LOOPTEMP_ */
292 1, /* OMP_CLAUSE_IF */
293 1, /* OMP_CLAUSE_NUM_THREADS */
294 1, /* OMP_CLAUSE_SCHEDULE */
295 0, /* OMP_CLAUSE_NOWAIT */
296 1, /* OMP_CLAUSE_ORDERED */
297 0, /* OMP_CLAUSE_DEFAULT */
298 3, /* OMP_CLAUSE_COLLAPSE */
299 0, /* OMP_CLAUSE_UNTIED */
300 1, /* OMP_CLAUSE_FINAL */
301 0, /* OMP_CLAUSE_MERGEABLE */
302 1, /* OMP_CLAUSE_DEVICE */
303 1, /* OMP_CLAUSE_DIST_SCHEDULE */
304 0, /* OMP_CLAUSE_INBRANCH */
305 0, /* OMP_CLAUSE_NOTINBRANCH */
306 1, /* OMP_CLAUSE_NUM_TEAMS */
307 1, /* OMP_CLAUSE_THREAD_LIMIT */
308 0, /* OMP_CLAUSE_PROC_BIND */
309 1, /* OMP_CLAUSE_SAFELEN */
310 1, /* OMP_CLAUSE_SIMDLEN */
311 0, /* OMP_CLAUSE_FOR */
312 0, /* OMP_CLAUSE_PARALLEL */
313 0, /* OMP_CLAUSE_SECTIONS */
314 0, /* OMP_CLAUSE_TASKGROUP */
315 1, /* OMP_CLAUSE_PRIORITY */
316 1, /* OMP_CLAUSE_GRAINSIZE */
317 1, /* OMP_CLAUSE_NUM_TASKS */
318 0, /* OMP_CLAUSE_NOGROUP */
319 0, /* OMP_CLAUSE_THREADS */
320 0, /* OMP_CLAUSE_SIMD */
321 1, /* OMP_CLAUSE_HINT */
322 0, /* OMP_CLAUSE_DEFALTMAP */
323 1, /* OMP_CLAUSE__SIMDUID_ */
324 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
325 0, /* OMP_CLAUSE_INDEPENDENT */
326 1, /* OMP_CLAUSE_WORKER */
327 1, /* OMP_CLAUSE_VECTOR */
328 1, /* OMP_CLAUSE_NUM_GANGS */
329 1, /* OMP_CLAUSE_NUM_WORKERS */
330 1, /* OMP_CLAUSE_VECTOR_LENGTH */
331 1, /* OMP_CLAUSE_TILE */
332 };
333
334 const char * const omp_clause_code_name[] =
335 {
336 "error_clause",
337 "private",
338 "shared",
339 "firstprivate",
340 "lastprivate",
341 "reduction",
342 "copyin",
343 "copyprivate",
344 "linear",
345 "aligned",
346 "depend",
347 "uniform",
348 "to",
349 "link",
350 "from",
351 "to",
352 "map",
353 "use_device_ptr",
354 "is_device_ptr",
355 "_cache_",
356 "device_resident",
357 "use_device",
358 "gang",
359 "async",
360 "wait",
361 "auto",
362 "seq",
363 "_looptemp_",
364 "if",
365 "num_threads",
366 "schedule",
367 "nowait",
368 "ordered",
369 "default",
370 "collapse",
371 "untied",
372 "final",
373 "mergeable",
374 "device",
375 "dist_schedule",
376 "inbranch",
377 "notinbranch",
378 "num_teams",
379 "thread_limit",
380 "proc_bind",
381 "safelen",
382 "simdlen",
383 "for",
384 "parallel",
385 "sections",
386 "taskgroup",
387 "priority",
388 "grainsize",
389 "num_tasks",
390 "nogroup",
391 "threads",
392 "simd",
393 "hint",
394 "defaultmap",
395 "_simduid_",
396 "_Cilk_for_count_",
397 "independent",
398 "worker",
399 "vector",
400 "num_gangs",
401 "num_workers",
402 "vector_length",
403 "tile"
404 };
405
406
407 /* Return the tree node structure used by tree code CODE. */
408
409 static inline enum tree_node_structure_enum
410 tree_node_structure_for_code (enum tree_code code)
411 {
412 switch (TREE_CODE_CLASS (code))
413 {
414 case tcc_declaration:
415 {
416 switch (code)
417 {
418 case FIELD_DECL:
419 return TS_FIELD_DECL;
420 case PARM_DECL:
421 return TS_PARM_DECL;
422 case VAR_DECL:
423 return TS_VAR_DECL;
424 case LABEL_DECL:
425 return TS_LABEL_DECL;
426 case RESULT_DECL:
427 return TS_RESULT_DECL;
428 case DEBUG_EXPR_DECL:
429 return TS_DECL_WRTL;
430 case CONST_DECL:
431 return TS_CONST_DECL;
432 case TYPE_DECL:
433 return TS_TYPE_DECL;
434 case FUNCTION_DECL:
435 return TS_FUNCTION_DECL;
436 case TRANSLATION_UNIT_DECL:
437 return TS_TRANSLATION_UNIT_DECL;
438 default:
439 return TS_DECL_NON_COMMON;
440 }
441 }
442 case tcc_type:
443 return TS_TYPE_NON_COMMON;
444 case tcc_reference:
445 case tcc_comparison:
446 case tcc_unary:
447 case tcc_binary:
448 case tcc_expression:
449 case tcc_statement:
450 case tcc_vl_exp:
451 return TS_EXP;
452 default: /* tcc_constant and tcc_exceptional */
453 break;
454 }
455 switch (code)
456 {
457 /* tcc_constant cases. */
458 case VOID_CST: return TS_TYPED;
459 case INTEGER_CST: return TS_INT_CST;
460 case REAL_CST: return TS_REAL_CST;
461 case FIXED_CST: return TS_FIXED_CST;
462 case COMPLEX_CST: return TS_COMPLEX;
463 case VECTOR_CST: return TS_VECTOR;
464 case STRING_CST: return TS_STRING;
465 /* tcc_exceptional cases. */
466 case ERROR_MARK: return TS_COMMON;
467 case IDENTIFIER_NODE: return TS_IDENTIFIER;
468 case TREE_LIST: return TS_LIST;
469 case TREE_VEC: return TS_VEC;
470 case SSA_NAME: return TS_SSA_NAME;
471 case PLACEHOLDER_EXPR: return TS_COMMON;
472 case STATEMENT_LIST: return TS_STATEMENT_LIST;
473 case BLOCK: return TS_BLOCK;
474 case CONSTRUCTOR: return TS_CONSTRUCTOR;
475 case TREE_BINFO: return TS_BINFO;
476 case OMP_CLAUSE: return TS_OMP_CLAUSE;
477 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
478 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
479
480 default:
481 gcc_unreachable ();
482 }
483 }
484
485
486 /* Initialize tree_contains_struct to describe the hierarchy of tree
487 nodes. */
488
489 static void
490 initialize_tree_contains_struct (void)
491 {
492 unsigned i;
493
494 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
495 {
496 enum tree_code code;
497 enum tree_node_structure_enum ts_code;
498
499 code = (enum tree_code) i;
500 ts_code = tree_node_structure_for_code (code);
501
502 /* Mark the TS structure itself. */
503 tree_contains_struct[code][ts_code] = 1;
504
505 /* Mark all the structures that TS is derived from. */
506 switch (ts_code)
507 {
508 case TS_TYPED:
509 case TS_BLOCK:
510 MARK_TS_BASE (code);
511 break;
512
513 case TS_COMMON:
514 case TS_INT_CST:
515 case TS_REAL_CST:
516 case TS_FIXED_CST:
517 case TS_VECTOR:
518 case TS_STRING:
519 case TS_COMPLEX:
520 case TS_SSA_NAME:
521 case TS_CONSTRUCTOR:
522 case TS_EXP:
523 case TS_STATEMENT_LIST:
524 MARK_TS_TYPED (code);
525 break;
526
527 case TS_IDENTIFIER:
528 case TS_DECL_MINIMAL:
529 case TS_TYPE_COMMON:
530 case TS_LIST:
531 case TS_VEC:
532 case TS_BINFO:
533 case TS_OMP_CLAUSE:
534 case TS_OPTIMIZATION:
535 case TS_TARGET_OPTION:
536 MARK_TS_COMMON (code);
537 break;
538
539 case TS_TYPE_WITH_LANG_SPECIFIC:
540 MARK_TS_TYPE_COMMON (code);
541 break;
542
543 case TS_TYPE_NON_COMMON:
544 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
545 break;
546
547 case TS_DECL_COMMON:
548 MARK_TS_DECL_MINIMAL (code);
549 break;
550
551 case TS_DECL_WRTL:
552 case TS_CONST_DECL:
553 MARK_TS_DECL_COMMON (code);
554 break;
555
556 case TS_DECL_NON_COMMON:
557 MARK_TS_DECL_WITH_VIS (code);
558 break;
559
560 case TS_DECL_WITH_VIS:
561 case TS_PARM_DECL:
562 case TS_LABEL_DECL:
563 case TS_RESULT_DECL:
564 MARK_TS_DECL_WRTL (code);
565 break;
566
567 case TS_FIELD_DECL:
568 MARK_TS_DECL_COMMON (code);
569 break;
570
571 case TS_VAR_DECL:
572 MARK_TS_DECL_WITH_VIS (code);
573 break;
574
575 case TS_TYPE_DECL:
576 case TS_FUNCTION_DECL:
577 MARK_TS_DECL_NON_COMMON (code);
578 break;
579
580 case TS_TRANSLATION_UNIT_DECL:
581 MARK_TS_DECL_COMMON (code);
582 break;
583
584 default:
585 gcc_unreachable ();
586 }
587 }
588
589 /* Basic consistency checks for attributes used in fold. */
590 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
591 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
592 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
593 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
594 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
595 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
596 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
597 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
598 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
599 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
600 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
601 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
602 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
603 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
604 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
605 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
606 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
607 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
608 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
609 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
610 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
611 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
612 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
613 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
614 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
615 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
616 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
617 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
618 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
619 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
620 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
621 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
622 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
623 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
624 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
625 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
626 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
627 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
628 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
629 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
630 }
631
632
633 /* Init tree.c. */
634
635 void
636 init_ttree (void)
637 {
638 /* Initialize the hash table of types. */
639 type_hash_table
640 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
641
642 debug_expr_for_decl
643 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
644
645 value_expr_for_decl
646 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
647
648 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
649
650 int_cst_node = make_int_cst (1, 1);
651
652 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
653
654 cl_optimization_node = make_node (OPTIMIZATION_NODE);
655 cl_target_option_node = make_node (TARGET_OPTION_NODE);
656
657 /* Initialize the tree_contains_struct array. */
658 initialize_tree_contains_struct ();
659 lang_hooks.init_ts ();
660 }
661
662 \f
663 /* The name of the object as the assembler will see it (but before any
664 translations made by ASM_OUTPUT_LABELREF). Often this is the same
665 as DECL_NAME. It is an IDENTIFIER_NODE. */
666 tree
667 decl_assembler_name (tree decl)
668 {
669 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
670 lang_hooks.set_decl_assembler_name (decl);
671 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
672 }
673
674 /* When the target supports COMDAT groups, this indicates which group the
675 DECL is associated with. This can be either an IDENTIFIER_NODE or a
676 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
677 tree
678 decl_comdat_group (const_tree node)
679 {
680 struct symtab_node *snode = symtab_node::get (node);
681 if (!snode)
682 return NULL;
683 return snode->get_comdat_group ();
684 }
685
686 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
687 tree
688 decl_comdat_group_id (const_tree node)
689 {
690 struct symtab_node *snode = symtab_node::get (node);
691 if (!snode)
692 return NULL;
693 return snode->get_comdat_group_id ();
694 }
695
696 /* When the target supports named section, return its name as IDENTIFIER_NODE
697 or NULL if it is in no section. */
698 const char *
699 decl_section_name (const_tree node)
700 {
701 struct symtab_node *snode = symtab_node::get (node);
702 if (!snode)
703 return NULL;
704 return snode->get_section ();
705 }
706
707 /* Set section name of NODE to VALUE (that is expected to be
708 identifier node) */
709 void
710 set_decl_section_name (tree node, const char *value)
711 {
712 struct symtab_node *snode;
713
714 if (value == NULL)
715 {
716 snode = symtab_node::get (node);
717 if (!snode)
718 return;
719 }
720 else if (TREE_CODE (node) == VAR_DECL)
721 snode = varpool_node::get_create (node);
722 else
723 snode = cgraph_node::get_create (node);
724 snode->set_section (value);
725 }
726
727 /* Return TLS model of a variable NODE. */
728 enum tls_model
729 decl_tls_model (const_tree node)
730 {
731 struct varpool_node *snode = varpool_node::get (node);
732 if (!snode)
733 return TLS_MODEL_NONE;
734 return snode->tls_model;
735 }
736
737 /* Set TLS model of variable NODE to MODEL. */
738 void
739 set_decl_tls_model (tree node, enum tls_model model)
740 {
741 struct varpool_node *vnode;
742
743 if (model == TLS_MODEL_NONE)
744 {
745 vnode = varpool_node::get (node);
746 if (!vnode)
747 return;
748 }
749 else
750 vnode = varpool_node::get_create (node);
751 vnode->tls_model = model;
752 }
753
754 /* Compute the number of bytes occupied by a tree with code CODE.
755 This function cannot be used for nodes that have variable sizes,
756 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
757 size_t
758 tree_code_size (enum tree_code code)
759 {
760 switch (TREE_CODE_CLASS (code))
761 {
762 case tcc_declaration: /* A decl node */
763 {
764 switch (code)
765 {
766 case FIELD_DECL:
767 return sizeof (struct tree_field_decl);
768 case PARM_DECL:
769 return sizeof (struct tree_parm_decl);
770 case VAR_DECL:
771 return sizeof (struct tree_var_decl);
772 case LABEL_DECL:
773 return sizeof (struct tree_label_decl);
774 case RESULT_DECL:
775 return sizeof (struct tree_result_decl);
776 case CONST_DECL:
777 return sizeof (struct tree_const_decl);
778 case TYPE_DECL:
779 return sizeof (struct tree_type_decl);
780 case FUNCTION_DECL:
781 return sizeof (struct tree_function_decl);
782 case DEBUG_EXPR_DECL:
783 return sizeof (struct tree_decl_with_rtl);
784 case TRANSLATION_UNIT_DECL:
785 return sizeof (struct tree_translation_unit_decl);
786 case NAMESPACE_DECL:
787 case IMPORTED_DECL:
788 case NAMELIST_DECL:
789 return sizeof (struct tree_decl_non_common);
790 default:
791 return lang_hooks.tree_size (code);
792 }
793 }
794
795 case tcc_type: /* a type node */
796 return sizeof (struct tree_type_non_common);
797
798 case tcc_reference: /* a reference */
799 case tcc_expression: /* an expression */
800 case tcc_statement: /* an expression with side effects */
801 case tcc_comparison: /* a comparison expression */
802 case tcc_unary: /* a unary arithmetic expression */
803 case tcc_binary: /* a binary arithmetic expression */
804 return (sizeof (struct tree_exp)
805 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
806
807 case tcc_constant: /* a constant */
808 switch (code)
809 {
810 case VOID_CST: return sizeof (struct tree_typed);
811 case INTEGER_CST: gcc_unreachable ();
812 case REAL_CST: return sizeof (struct tree_real_cst);
813 case FIXED_CST: return sizeof (struct tree_fixed_cst);
814 case COMPLEX_CST: return sizeof (struct tree_complex);
815 case VECTOR_CST: return sizeof (struct tree_vector);
816 case STRING_CST: gcc_unreachable ();
817 default:
818 return lang_hooks.tree_size (code);
819 }
820
821 case tcc_exceptional: /* something random, like an identifier. */
822 switch (code)
823 {
824 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
825 case TREE_LIST: return sizeof (struct tree_list);
826
827 case ERROR_MARK:
828 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
829
830 case TREE_VEC:
831 case OMP_CLAUSE: gcc_unreachable ();
832
833 case SSA_NAME: return sizeof (struct tree_ssa_name);
834
835 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
836 case BLOCK: return sizeof (struct tree_block);
837 case CONSTRUCTOR: return sizeof (struct tree_constructor);
838 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
839 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
840
841 default:
842 return lang_hooks.tree_size (code);
843 }
844
845 default:
846 gcc_unreachable ();
847 }
848 }
849
850 /* Compute the number of bytes occupied by NODE. This routine only
851 looks at TREE_CODE, except for those nodes that have variable sizes. */
852 size_t
853 tree_size (const_tree node)
854 {
855 const enum tree_code code = TREE_CODE (node);
856 switch (code)
857 {
858 case INTEGER_CST:
859 return (sizeof (struct tree_int_cst)
860 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
861
862 case TREE_BINFO:
863 return (offsetof (struct tree_binfo, base_binfos)
864 + vec<tree, va_gc>
865 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
866
867 case TREE_VEC:
868 return (sizeof (struct tree_vec)
869 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
870
871 case VECTOR_CST:
872 return (sizeof (struct tree_vector)
873 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
874
875 case STRING_CST:
876 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
877
878 case OMP_CLAUSE:
879 return (sizeof (struct tree_omp_clause)
880 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
881 * sizeof (tree));
882
883 default:
884 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
885 return (sizeof (struct tree_exp)
886 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
887 else
888 return tree_code_size (code);
889 }
890 }
891
892 /* Record interesting allocation statistics for a tree node with CODE
893 and LENGTH. */
894
895 static void
896 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
897 size_t length ATTRIBUTE_UNUSED)
898 {
899 enum tree_code_class type = TREE_CODE_CLASS (code);
900 tree_node_kind kind;
901
902 if (!GATHER_STATISTICS)
903 return;
904
905 switch (type)
906 {
907 case tcc_declaration: /* A decl node */
908 kind = d_kind;
909 break;
910
911 case tcc_type: /* a type node */
912 kind = t_kind;
913 break;
914
915 case tcc_statement: /* an expression with side effects */
916 kind = s_kind;
917 break;
918
919 case tcc_reference: /* a reference */
920 kind = r_kind;
921 break;
922
923 case tcc_expression: /* an expression */
924 case tcc_comparison: /* a comparison expression */
925 case tcc_unary: /* a unary arithmetic expression */
926 case tcc_binary: /* a binary arithmetic expression */
927 kind = e_kind;
928 break;
929
930 case tcc_constant: /* a constant */
931 kind = c_kind;
932 break;
933
934 case tcc_exceptional: /* something random, like an identifier. */
935 switch (code)
936 {
937 case IDENTIFIER_NODE:
938 kind = id_kind;
939 break;
940
941 case TREE_VEC:
942 kind = vec_kind;
943 break;
944
945 case TREE_BINFO:
946 kind = binfo_kind;
947 break;
948
949 case SSA_NAME:
950 kind = ssa_name_kind;
951 break;
952
953 case BLOCK:
954 kind = b_kind;
955 break;
956
957 case CONSTRUCTOR:
958 kind = constr_kind;
959 break;
960
961 case OMP_CLAUSE:
962 kind = omp_clause_kind;
963 break;
964
965 default:
966 kind = x_kind;
967 break;
968 }
969 break;
970
971 case tcc_vl_exp:
972 kind = e_kind;
973 break;
974
975 default:
976 gcc_unreachable ();
977 }
978
979 tree_code_counts[(int) code]++;
980 tree_node_counts[(int) kind]++;
981 tree_node_sizes[(int) kind] += length;
982 }
983
984 /* Allocate and return a new UID from the DECL_UID namespace. */
985
986 int
987 allocate_decl_uid (void)
988 {
989 return next_decl_uid++;
990 }
991
992 /* Return a newly allocated node of code CODE. For decl and type
993 nodes, some other fields are initialized. The rest of the node is
994 initialized to zero. This function cannot be used for TREE_VEC,
995 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
996 tree_code_size.
997
998 Achoo! I got a code in the node. */
999
1000 tree
1001 make_node_stat (enum tree_code code MEM_STAT_DECL)
1002 {
1003 tree t;
1004 enum tree_code_class type = TREE_CODE_CLASS (code);
1005 size_t length = tree_code_size (code);
1006
1007 record_node_allocation_statistics (code, length);
1008
1009 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1010 TREE_SET_CODE (t, code);
1011
1012 switch (type)
1013 {
1014 case tcc_statement:
1015 TREE_SIDE_EFFECTS (t) = 1;
1016 break;
1017
1018 case tcc_declaration:
1019 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1020 {
1021 if (code == FUNCTION_DECL)
1022 {
1023 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1024 DECL_MODE (t) = FUNCTION_MODE;
1025 }
1026 else
1027 DECL_ALIGN (t) = 1;
1028 }
1029 DECL_SOURCE_LOCATION (t) = input_location;
1030 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1031 DECL_UID (t) = --next_debug_decl_uid;
1032 else
1033 {
1034 DECL_UID (t) = allocate_decl_uid ();
1035 SET_DECL_PT_UID (t, -1);
1036 }
1037 if (TREE_CODE (t) == LABEL_DECL)
1038 LABEL_DECL_UID (t) = -1;
1039
1040 break;
1041
1042 case tcc_type:
1043 TYPE_UID (t) = next_type_uid++;
1044 TYPE_ALIGN (t) = BITS_PER_UNIT;
1045 TYPE_USER_ALIGN (t) = 0;
1046 TYPE_MAIN_VARIANT (t) = t;
1047 TYPE_CANONICAL (t) = t;
1048
1049 /* Default to no attributes for type, but let target change that. */
1050 TYPE_ATTRIBUTES (t) = NULL_TREE;
1051 targetm.set_default_type_attributes (t);
1052
1053 /* We have not yet computed the alias set for this type. */
1054 TYPE_ALIAS_SET (t) = -1;
1055 break;
1056
1057 case tcc_constant:
1058 TREE_CONSTANT (t) = 1;
1059 break;
1060
1061 case tcc_expression:
1062 switch (code)
1063 {
1064 case INIT_EXPR:
1065 case MODIFY_EXPR:
1066 case VA_ARG_EXPR:
1067 case PREDECREMENT_EXPR:
1068 case PREINCREMENT_EXPR:
1069 case POSTDECREMENT_EXPR:
1070 case POSTINCREMENT_EXPR:
1071 /* All of these have side-effects, no matter what their
1072 operands are. */
1073 TREE_SIDE_EFFECTS (t) = 1;
1074 break;
1075
1076 default:
1077 break;
1078 }
1079 break;
1080
1081 case tcc_exceptional:
1082 switch (code)
1083 {
1084 case TARGET_OPTION_NODE:
1085 TREE_TARGET_OPTION(t)
1086 = ggc_cleared_alloc<struct cl_target_option> ();
1087 break;
1088
1089 case OPTIMIZATION_NODE:
1090 TREE_OPTIMIZATION (t)
1091 = ggc_cleared_alloc<struct cl_optimization> ();
1092 break;
1093
1094 default:
1095 break;
1096 }
1097 break;
1098
1099 default:
1100 /* Other classes need no special treatment. */
1101 break;
1102 }
1103
1104 return t;
1105 }
1106
1107 /* Free tree node. */
1108
1109 void
1110 free_node (tree node)
1111 {
1112 enum tree_code code = TREE_CODE (node);
1113 if (GATHER_STATISTICS)
1114 {
1115 tree_code_counts[(int) TREE_CODE (node)]--;
1116 tree_node_counts[(int) t_kind]--;
1117 tree_node_sizes[(int) t_kind] -= tree_code_size (TREE_CODE (node));
1118 }
1119 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1120 vec_free (CONSTRUCTOR_ELTS (node));
1121 else if (code == BLOCK)
1122 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1123 else if (code == TREE_BINFO)
1124 vec_free (BINFO_BASE_ACCESSES (node));
1125 ggc_free (node);
1126 }
1127 \f
1128 /* Return a new node with the same contents as NODE except that its
1129 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1130
1131 tree
1132 copy_node_stat (tree node MEM_STAT_DECL)
1133 {
1134 tree t;
1135 enum tree_code code = TREE_CODE (node);
1136 size_t length;
1137
1138 gcc_assert (code != STATEMENT_LIST);
1139
1140 length = tree_size (node);
1141 record_node_allocation_statistics (code, length);
1142 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1143 memcpy (t, node, length);
1144
1145 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1146 TREE_CHAIN (t) = 0;
1147 TREE_ASM_WRITTEN (t) = 0;
1148 TREE_VISITED (t) = 0;
1149
1150 if (TREE_CODE_CLASS (code) == tcc_declaration)
1151 {
1152 if (code == DEBUG_EXPR_DECL)
1153 DECL_UID (t) = --next_debug_decl_uid;
1154 else
1155 {
1156 DECL_UID (t) = allocate_decl_uid ();
1157 if (DECL_PT_UID_SET_P (node))
1158 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1159 }
1160 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1161 && DECL_HAS_VALUE_EXPR_P (node))
1162 {
1163 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1164 DECL_HAS_VALUE_EXPR_P (t) = 1;
1165 }
1166 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1167 if (TREE_CODE (node) == VAR_DECL)
1168 {
1169 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1170 t->decl_with_vis.symtab_node = NULL;
1171 }
1172 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1173 {
1174 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1175 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1176 }
1177 if (TREE_CODE (node) == FUNCTION_DECL)
1178 {
1179 DECL_STRUCT_FUNCTION (t) = NULL;
1180 t->decl_with_vis.symtab_node = NULL;
1181 }
1182 }
1183 else if (TREE_CODE_CLASS (code) == tcc_type)
1184 {
1185 TYPE_UID (t) = next_type_uid++;
1186 /* The following is so that the debug code for
1187 the copy is different from the original type.
1188 The two statements usually duplicate each other
1189 (because they clear fields of the same union),
1190 but the optimizer should catch that. */
1191 TYPE_SYMTAB_POINTER (t) = 0;
1192 TYPE_SYMTAB_ADDRESS (t) = 0;
1193
1194 /* Do not copy the values cache. */
1195 if (TYPE_CACHED_VALUES_P (t))
1196 {
1197 TYPE_CACHED_VALUES_P (t) = 0;
1198 TYPE_CACHED_VALUES (t) = NULL_TREE;
1199 }
1200 }
1201 else if (code == TARGET_OPTION_NODE)
1202 {
1203 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1204 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1205 sizeof (struct cl_target_option));
1206 }
1207 else if (code == OPTIMIZATION_NODE)
1208 {
1209 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1210 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1211 sizeof (struct cl_optimization));
1212 }
1213
1214 return t;
1215 }
1216
1217 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1218 For example, this can copy a list made of TREE_LIST nodes. */
1219
1220 tree
1221 copy_list (tree list)
1222 {
1223 tree head;
1224 tree prev, next;
1225
1226 if (list == 0)
1227 return 0;
1228
1229 head = prev = copy_node (list);
1230 next = TREE_CHAIN (list);
1231 while (next)
1232 {
1233 TREE_CHAIN (prev) = copy_node (next);
1234 prev = TREE_CHAIN (prev);
1235 next = TREE_CHAIN (next);
1236 }
1237 return head;
1238 }
1239
1240 \f
1241 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1242 INTEGER_CST with value CST and type TYPE. */
1243
1244 static unsigned int
1245 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1246 {
1247 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1248 /* We need extra HWIs if CST is an unsigned integer with its
1249 upper bit set. */
1250 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1251 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1252 return cst.get_len ();
1253 }
1254
1255 /* Return a new INTEGER_CST with value CST and type TYPE. */
1256
1257 static tree
1258 build_new_int_cst (tree type, const wide_int &cst)
1259 {
1260 unsigned int len = cst.get_len ();
1261 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1262 tree nt = make_int_cst (len, ext_len);
1263
1264 if (len < ext_len)
1265 {
1266 --ext_len;
1267 TREE_INT_CST_ELT (nt, ext_len)
1268 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1269 for (unsigned int i = len; i < ext_len; ++i)
1270 TREE_INT_CST_ELT (nt, i) = -1;
1271 }
1272 else if (TYPE_UNSIGNED (type)
1273 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1274 {
1275 len--;
1276 TREE_INT_CST_ELT (nt, len)
1277 = zext_hwi (cst.elt (len),
1278 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1279 }
1280
1281 for (unsigned int i = 0; i < len; i++)
1282 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1283 TREE_TYPE (nt) = type;
1284 return nt;
1285 }
1286
1287 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1288
1289 tree
1290 build_int_cst (tree type, HOST_WIDE_INT low)
1291 {
1292 /* Support legacy code. */
1293 if (!type)
1294 type = integer_type_node;
1295
1296 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1297 }
1298
1299 tree
1300 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1301 {
1302 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1303 }
1304
1305 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1306
1307 tree
1308 build_int_cst_type (tree type, HOST_WIDE_INT low)
1309 {
1310 gcc_assert (type);
1311 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1312 }
1313
1314 /* Constructs tree in type TYPE from with value given by CST. Signedness
1315 of CST is assumed to be the same as the signedness of TYPE. */
1316
1317 tree
1318 double_int_to_tree (tree type, double_int cst)
1319 {
1320 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1321 }
1322
1323 /* We force the wide_int CST to the range of the type TYPE by sign or
1324 zero extending it. OVERFLOWABLE indicates if we are interested in
1325 overflow of the value, when >0 we are only interested in signed
1326 overflow, for <0 we are interested in any overflow. OVERFLOWED
1327 indicates whether overflow has already occurred. CONST_OVERFLOWED
1328 indicates whether constant overflow has already occurred. We force
1329 T's value to be within range of T's type (by setting to 0 or 1 all
1330 the bits outside the type's range). We set TREE_OVERFLOWED if,
1331 OVERFLOWED is nonzero,
1332 or OVERFLOWABLE is >0 and signed overflow occurs
1333 or OVERFLOWABLE is <0 and any overflow occurs
1334 We return a new tree node for the extended wide_int. The node
1335 is shared if no overflow flags are set. */
1336
1337
1338 tree
1339 force_fit_type (tree type, const wide_int_ref &cst,
1340 int overflowable, bool overflowed)
1341 {
1342 signop sign = TYPE_SIGN (type);
1343
1344 /* If we need to set overflow flags, return a new unshared node. */
1345 if (overflowed || !wi::fits_to_tree_p (cst, type))
1346 {
1347 if (overflowed
1348 || overflowable < 0
1349 || (overflowable > 0 && sign == SIGNED))
1350 {
1351 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1352 tree t = build_new_int_cst (type, tmp);
1353 TREE_OVERFLOW (t) = 1;
1354 return t;
1355 }
1356 }
1357
1358 /* Else build a shared node. */
1359 return wide_int_to_tree (type, cst);
1360 }
1361
1362 /* These are the hash table functions for the hash table of INTEGER_CST
1363 nodes of a sizetype. */
1364
1365 /* Return the hash code X, an INTEGER_CST. */
1366
1367 hashval_t
1368 int_cst_hasher::hash (tree x)
1369 {
1370 const_tree const t = x;
1371 hashval_t code = TYPE_UID (TREE_TYPE (t));
1372 int i;
1373
1374 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1375 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1376
1377 return code;
1378 }
1379
1380 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1381 is the same as that given by *Y, which is the same. */
1382
1383 bool
1384 int_cst_hasher::equal (tree x, tree y)
1385 {
1386 const_tree const xt = x;
1387 const_tree const yt = y;
1388
1389 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1390 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1391 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1392 return false;
1393
1394 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1395 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1396 return false;
1397
1398 return true;
1399 }
1400
1401 /* Create an INT_CST node of TYPE and value CST.
1402 The returned node is always shared. For small integers we use a
1403 per-type vector cache, for larger ones we use a single hash table.
1404 The value is extended from its precision according to the sign of
1405 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1406 the upper bits and ensures that hashing and value equality based
1407 upon the underlying HOST_WIDE_INTs works without masking. */
1408
1409 tree
1410 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1411 {
1412 tree t;
1413 int ix = -1;
1414 int limit = 0;
1415
1416 gcc_assert (type);
1417 unsigned int prec = TYPE_PRECISION (type);
1418 signop sgn = TYPE_SIGN (type);
1419
1420 /* Verify that everything is canonical. */
1421 int l = pcst.get_len ();
1422 if (l > 1)
1423 {
1424 if (pcst.elt (l - 1) == 0)
1425 gcc_checking_assert (pcst.elt (l - 2) < 0);
1426 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1427 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1428 }
1429
1430 wide_int cst = wide_int::from (pcst, prec, sgn);
1431 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1432
1433 if (ext_len == 1)
1434 {
1435 /* We just need to store a single HOST_WIDE_INT. */
1436 HOST_WIDE_INT hwi;
1437 if (TYPE_UNSIGNED (type))
1438 hwi = cst.to_uhwi ();
1439 else
1440 hwi = cst.to_shwi ();
1441
1442 switch (TREE_CODE (type))
1443 {
1444 case NULLPTR_TYPE:
1445 gcc_assert (hwi == 0);
1446 /* Fallthru. */
1447
1448 case POINTER_TYPE:
1449 case REFERENCE_TYPE:
1450 case POINTER_BOUNDS_TYPE:
1451 /* Cache NULL pointer and zero bounds. */
1452 if (hwi == 0)
1453 {
1454 limit = 1;
1455 ix = 0;
1456 }
1457 break;
1458
1459 case BOOLEAN_TYPE:
1460 /* Cache false or true. */
1461 limit = 2;
1462 if (IN_RANGE (hwi, 0, 1))
1463 ix = hwi;
1464 break;
1465
1466 case INTEGER_TYPE:
1467 case OFFSET_TYPE:
1468 if (TYPE_SIGN (type) == UNSIGNED)
1469 {
1470 /* Cache [0, N). */
1471 limit = INTEGER_SHARE_LIMIT;
1472 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1473 ix = hwi;
1474 }
1475 else
1476 {
1477 /* Cache [-1, N). */
1478 limit = INTEGER_SHARE_LIMIT + 1;
1479 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1480 ix = hwi + 1;
1481 }
1482 break;
1483
1484 case ENUMERAL_TYPE:
1485 break;
1486
1487 default:
1488 gcc_unreachable ();
1489 }
1490
1491 if (ix >= 0)
1492 {
1493 /* Look for it in the type's vector of small shared ints. */
1494 if (!TYPE_CACHED_VALUES_P (type))
1495 {
1496 TYPE_CACHED_VALUES_P (type) = 1;
1497 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1498 }
1499
1500 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1501 if (t)
1502 /* Make sure no one is clobbering the shared constant. */
1503 gcc_checking_assert (TREE_TYPE (t) == type
1504 && TREE_INT_CST_NUNITS (t) == 1
1505 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1506 && TREE_INT_CST_EXT_NUNITS (t) == 1
1507 && TREE_INT_CST_ELT (t, 0) == hwi);
1508 else
1509 {
1510 /* Create a new shared int. */
1511 t = build_new_int_cst (type, cst);
1512 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1513 }
1514 }
1515 else
1516 {
1517 /* Use the cache of larger shared ints, using int_cst_node as
1518 a temporary. */
1519
1520 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1521 TREE_TYPE (int_cst_node) = type;
1522
1523 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1524 t = *slot;
1525 if (!t)
1526 {
1527 /* Insert this one into the hash table. */
1528 t = int_cst_node;
1529 *slot = t;
1530 /* Make a new node for next time round. */
1531 int_cst_node = make_int_cst (1, 1);
1532 }
1533 }
1534 }
1535 else
1536 {
1537 /* The value either hashes properly or we drop it on the floor
1538 for the gc to take care of. There will not be enough of them
1539 to worry about. */
1540
1541 tree nt = build_new_int_cst (type, cst);
1542 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1543 t = *slot;
1544 if (!t)
1545 {
1546 /* Insert this one into the hash table. */
1547 t = nt;
1548 *slot = t;
1549 }
1550 }
1551
1552 return t;
1553 }
1554
1555 void
1556 cache_integer_cst (tree t)
1557 {
1558 tree type = TREE_TYPE (t);
1559 int ix = -1;
1560 int limit = 0;
1561 int prec = TYPE_PRECISION (type);
1562
1563 gcc_assert (!TREE_OVERFLOW (t));
1564
1565 switch (TREE_CODE (type))
1566 {
1567 case NULLPTR_TYPE:
1568 gcc_assert (integer_zerop (t));
1569 /* Fallthru. */
1570
1571 case POINTER_TYPE:
1572 case REFERENCE_TYPE:
1573 /* Cache NULL pointer. */
1574 if (integer_zerop (t))
1575 {
1576 limit = 1;
1577 ix = 0;
1578 }
1579 break;
1580
1581 case BOOLEAN_TYPE:
1582 /* Cache false or true. */
1583 limit = 2;
1584 if (wi::ltu_p (t, 2))
1585 ix = TREE_INT_CST_ELT (t, 0);
1586 break;
1587
1588 case INTEGER_TYPE:
1589 case OFFSET_TYPE:
1590 if (TYPE_UNSIGNED (type))
1591 {
1592 /* Cache 0..N */
1593 limit = INTEGER_SHARE_LIMIT;
1594
1595 /* This is a little hokie, but if the prec is smaller than
1596 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1597 obvious test will not get the correct answer. */
1598 if (prec < HOST_BITS_PER_WIDE_INT)
1599 {
1600 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1601 ix = tree_to_uhwi (t);
1602 }
1603 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1604 ix = tree_to_uhwi (t);
1605 }
1606 else
1607 {
1608 /* Cache -1..N */
1609 limit = INTEGER_SHARE_LIMIT + 1;
1610
1611 if (integer_minus_onep (t))
1612 ix = 0;
1613 else if (!wi::neg_p (t))
1614 {
1615 if (prec < HOST_BITS_PER_WIDE_INT)
1616 {
1617 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1618 ix = tree_to_shwi (t) + 1;
1619 }
1620 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1621 ix = tree_to_shwi (t) + 1;
1622 }
1623 }
1624 break;
1625
1626 case ENUMERAL_TYPE:
1627 break;
1628
1629 default:
1630 gcc_unreachable ();
1631 }
1632
1633 if (ix >= 0)
1634 {
1635 /* Look for it in the type's vector of small shared ints. */
1636 if (!TYPE_CACHED_VALUES_P (type))
1637 {
1638 TYPE_CACHED_VALUES_P (type) = 1;
1639 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1640 }
1641
1642 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1643 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1644 }
1645 else
1646 {
1647 /* Use the cache of larger shared ints. */
1648 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1649 /* If there is already an entry for the number verify it's the
1650 same. */
1651 if (*slot)
1652 gcc_assert (wi::eq_p (tree (*slot), t));
1653 else
1654 /* Otherwise insert this one into the hash table. */
1655 *slot = t;
1656 }
1657 }
1658
1659
1660 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1661 and the rest are zeros. */
1662
1663 tree
1664 build_low_bits_mask (tree type, unsigned bits)
1665 {
1666 gcc_assert (bits <= TYPE_PRECISION (type));
1667
1668 return wide_int_to_tree (type, wi::mask (bits, false,
1669 TYPE_PRECISION (type)));
1670 }
1671
1672 /* Checks that X is integer constant that can be expressed in (unsigned)
1673 HOST_WIDE_INT without loss of precision. */
1674
1675 bool
1676 cst_and_fits_in_hwi (const_tree x)
1677 {
1678 if (TREE_CODE (x) != INTEGER_CST)
1679 return false;
1680
1681 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1682 return false;
1683
1684 return TREE_INT_CST_NUNITS (x) == 1;
1685 }
1686
1687 /* Build a newly constructed VECTOR_CST node of length LEN. */
1688
1689 tree
1690 make_vector_stat (unsigned len MEM_STAT_DECL)
1691 {
1692 tree t;
1693 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1694
1695 record_node_allocation_statistics (VECTOR_CST, length);
1696
1697 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1698
1699 TREE_SET_CODE (t, VECTOR_CST);
1700 TREE_CONSTANT (t) = 1;
1701
1702 return t;
1703 }
1704
1705 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1706 are in a list pointed to by VALS. */
1707
1708 tree
1709 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1710 {
1711 int over = 0;
1712 unsigned cnt = 0;
1713 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1714 TREE_TYPE (v) = type;
1715
1716 /* Iterate through elements and check for overflow. */
1717 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1718 {
1719 tree value = vals[cnt];
1720
1721 VECTOR_CST_ELT (v, cnt) = value;
1722
1723 /* Don't crash if we get an address constant. */
1724 if (!CONSTANT_CLASS_P (value))
1725 continue;
1726
1727 over |= TREE_OVERFLOW (value);
1728 }
1729
1730 TREE_OVERFLOW (v) = over;
1731 return v;
1732 }
1733
1734 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1735 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1736
1737 tree
1738 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1739 {
1740 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1741 unsigned HOST_WIDE_INT idx, pos = 0;
1742 tree value;
1743
1744 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1745 {
1746 if (TREE_CODE (value) == VECTOR_CST)
1747 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1748 vec[pos++] = VECTOR_CST_ELT (value, i);
1749 else
1750 vec[pos++] = value;
1751 }
1752 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1753 vec[pos++] = build_zero_cst (TREE_TYPE (type));
1754
1755 return build_vector (type, vec);
1756 }
1757
1758 /* Build a vector of type VECTYPE where all the elements are SCs. */
1759 tree
1760 build_vector_from_val (tree vectype, tree sc)
1761 {
1762 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1763
1764 if (sc == error_mark_node)
1765 return sc;
1766
1767 /* Verify that the vector type is suitable for SC. Note that there
1768 is some inconsistency in the type-system with respect to restrict
1769 qualifications of pointers. Vector types always have a main-variant
1770 element type and the qualification is applied to the vector-type.
1771 So TREE_TYPE (vector-type) does not return a properly qualified
1772 vector element-type. */
1773 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1774 TREE_TYPE (vectype)));
1775
1776 if (CONSTANT_CLASS_P (sc))
1777 {
1778 tree *v = XALLOCAVEC (tree, nunits);
1779 for (i = 0; i < nunits; ++i)
1780 v[i] = sc;
1781 return build_vector (vectype, v);
1782 }
1783 else
1784 {
1785 vec<constructor_elt, va_gc> *v;
1786 vec_alloc (v, nunits);
1787 for (i = 0; i < nunits; ++i)
1788 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1789 return build_constructor (vectype, v);
1790 }
1791 }
1792
1793 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1794 are in the vec pointed to by VALS. */
1795 tree
1796 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1797 {
1798 tree c = make_node (CONSTRUCTOR);
1799 unsigned int i;
1800 constructor_elt *elt;
1801 bool constant_p = true;
1802 bool side_effects_p = false;
1803
1804 TREE_TYPE (c) = type;
1805 CONSTRUCTOR_ELTS (c) = vals;
1806
1807 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1808 {
1809 /* Mostly ctors will have elts that don't have side-effects, so
1810 the usual case is to scan all the elements. Hence a single
1811 loop for both const and side effects, rather than one loop
1812 each (with early outs). */
1813 if (!TREE_CONSTANT (elt->value))
1814 constant_p = false;
1815 if (TREE_SIDE_EFFECTS (elt->value))
1816 side_effects_p = true;
1817 }
1818
1819 TREE_SIDE_EFFECTS (c) = side_effects_p;
1820 TREE_CONSTANT (c) = constant_p;
1821
1822 return c;
1823 }
1824
1825 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1826 INDEX and VALUE. */
1827 tree
1828 build_constructor_single (tree type, tree index, tree value)
1829 {
1830 vec<constructor_elt, va_gc> *v;
1831 constructor_elt elt = {index, value};
1832
1833 vec_alloc (v, 1);
1834 v->quick_push (elt);
1835
1836 return build_constructor (type, v);
1837 }
1838
1839
1840 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1841 are in a list pointed to by VALS. */
1842 tree
1843 build_constructor_from_list (tree type, tree vals)
1844 {
1845 tree t;
1846 vec<constructor_elt, va_gc> *v = NULL;
1847
1848 if (vals)
1849 {
1850 vec_alloc (v, list_length (vals));
1851 for (t = vals; t; t = TREE_CHAIN (t))
1852 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1853 }
1854
1855 return build_constructor (type, v);
1856 }
1857
1858 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1859 of elements, provided as index/value pairs. */
1860
1861 tree
1862 build_constructor_va (tree type, int nelts, ...)
1863 {
1864 vec<constructor_elt, va_gc> *v = NULL;
1865 va_list p;
1866
1867 va_start (p, nelts);
1868 vec_alloc (v, nelts);
1869 while (nelts--)
1870 {
1871 tree index = va_arg (p, tree);
1872 tree value = va_arg (p, tree);
1873 CONSTRUCTOR_APPEND_ELT (v, index, value);
1874 }
1875 va_end (p);
1876 return build_constructor (type, v);
1877 }
1878
1879 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1880
1881 tree
1882 build_fixed (tree type, FIXED_VALUE_TYPE f)
1883 {
1884 tree v;
1885 FIXED_VALUE_TYPE *fp;
1886
1887 v = make_node (FIXED_CST);
1888 fp = ggc_alloc<fixed_value> ();
1889 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1890
1891 TREE_TYPE (v) = type;
1892 TREE_FIXED_CST_PTR (v) = fp;
1893 return v;
1894 }
1895
1896 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1897
1898 tree
1899 build_real (tree type, REAL_VALUE_TYPE d)
1900 {
1901 tree v;
1902 REAL_VALUE_TYPE *dp;
1903 int overflow = 0;
1904
1905 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1906 Consider doing it via real_convert now. */
1907
1908 v = make_node (REAL_CST);
1909 dp = ggc_alloc<real_value> ();
1910 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1911
1912 TREE_TYPE (v) = type;
1913 TREE_REAL_CST_PTR (v) = dp;
1914 TREE_OVERFLOW (v) = overflow;
1915 return v;
1916 }
1917
1918 /* Like build_real, but first truncate D to the type. */
1919
1920 tree
1921 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1922 {
1923 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1924 }
1925
1926 /* Return a new REAL_CST node whose type is TYPE
1927 and whose value is the integer value of the INTEGER_CST node I. */
1928
1929 REAL_VALUE_TYPE
1930 real_value_from_int_cst (const_tree type, const_tree i)
1931 {
1932 REAL_VALUE_TYPE d;
1933
1934 /* Clear all bits of the real value type so that we can later do
1935 bitwise comparisons to see if two values are the same. */
1936 memset (&d, 0, sizeof d);
1937
1938 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1939 TYPE_SIGN (TREE_TYPE (i)));
1940 return d;
1941 }
1942
1943 /* Given a tree representing an integer constant I, return a tree
1944 representing the same value as a floating-point constant of type TYPE. */
1945
1946 tree
1947 build_real_from_int_cst (tree type, const_tree i)
1948 {
1949 tree v;
1950 int overflow = TREE_OVERFLOW (i);
1951
1952 v = build_real (type, real_value_from_int_cst (type, i));
1953
1954 TREE_OVERFLOW (v) |= overflow;
1955 return v;
1956 }
1957
1958 /* Return a newly constructed STRING_CST node whose value is
1959 the LEN characters at STR.
1960 Note that for a C string literal, LEN should include the trailing NUL.
1961 The TREE_TYPE is not initialized. */
1962
1963 tree
1964 build_string (int len, const char *str)
1965 {
1966 tree s;
1967 size_t length;
1968
1969 /* Do not waste bytes provided by padding of struct tree_string. */
1970 length = len + offsetof (struct tree_string, str) + 1;
1971
1972 record_node_allocation_statistics (STRING_CST, length);
1973
1974 s = (tree) ggc_internal_alloc (length);
1975
1976 memset (s, 0, sizeof (struct tree_typed));
1977 TREE_SET_CODE (s, STRING_CST);
1978 TREE_CONSTANT (s) = 1;
1979 TREE_STRING_LENGTH (s) = len;
1980 memcpy (s->string.str, str, len);
1981 s->string.str[len] = '\0';
1982
1983 return s;
1984 }
1985
1986 /* Return a newly constructed COMPLEX_CST node whose value is
1987 specified by the real and imaginary parts REAL and IMAG.
1988 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1989 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1990
1991 tree
1992 build_complex (tree type, tree real, tree imag)
1993 {
1994 tree t = make_node (COMPLEX_CST);
1995
1996 TREE_REALPART (t) = real;
1997 TREE_IMAGPART (t) = imag;
1998 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1999 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2000 return t;
2001 }
2002
2003 /* Build a complex (inf +- 0i), such as for the result of cproj.
2004 TYPE is the complex tree type of the result. If NEG is true, the
2005 imaginary zero is negative. */
2006
2007 tree
2008 build_complex_inf (tree type, bool neg)
2009 {
2010 REAL_VALUE_TYPE rinf, rzero = dconst0;
2011
2012 real_inf (&rinf);
2013 rzero.sign = neg;
2014 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2015 build_real (TREE_TYPE (type), rzero));
2016 }
2017
2018 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2019 element is set to 1. In particular, this is 1 + i for complex types. */
2020
2021 tree
2022 build_each_one_cst (tree type)
2023 {
2024 if (TREE_CODE (type) == COMPLEX_TYPE)
2025 {
2026 tree scalar = build_one_cst (TREE_TYPE (type));
2027 return build_complex (type, scalar, scalar);
2028 }
2029 else
2030 return build_one_cst (type);
2031 }
2032
2033 /* Return a constant of arithmetic type TYPE which is the
2034 multiplicative identity of the set TYPE. */
2035
2036 tree
2037 build_one_cst (tree type)
2038 {
2039 switch (TREE_CODE (type))
2040 {
2041 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2042 case POINTER_TYPE: case REFERENCE_TYPE:
2043 case OFFSET_TYPE:
2044 return build_int_cst (type, 1);
2045
2046 case REAL_TYPE:
2047 return build_real (type, dconst1);
2048
2049 case FIXED_POINT_TYPE:
2050 /* We can only generate 1 for accum types. */
2051 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2052 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2053
2054 case VECTOR_TYPE:
2055 {
2056 tree scalar = build_one_cst (TREE_TYPE (type));
2057
2058 return build_vector_from_val (type, scalar);
2059 }
2060
2061 case COMPLEX_TYPE:
2062 return build_complex (type,
2063 build_one_cst (TREE_TYPE (type)),
2064 build_zero_cst (TREE_TYPE (type)));
2065
2066 default:
2067 gcc_unreachable ();
2068 }
2069 }
2070
2071 /* Return an integer of type TYPE containing all 1's in as much precision as
2072 it contains, or a complex or vector whose subparts are such integers. */
2073
2074 tree
2075 build_all_ones_cst (tree type)
2076 {
2077 if (TREE_CODE (type) == COMPLEX_TYPE)
2078 {
2079 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2080 return build_complex (type, scalar, scalar);
2081 }
2082 else
2083 return build_minus_one_cst (type);
2084 }
2085
2086 /* Return a constant of arithmetic type TYPE which is the
2087 opposite of the multiplicative identity of the set TYPE. */
2088
2089 tree
2090 build_minus_one_cst (tree type)
2091 {
2092 switch (TREE_CODE (type))
2093 {
2094 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2095 case POINTER_TYPE: case REFERENCE_TYPE:
2096 case OFFSET_TYPE:
2097 return build_int_cst (type, -1);
2098
2099 case REAL_TYPE:
2100 return build_real (type, dconstm1);
2101
2102 case FIXED_POINT_TYPE:
2103 /* We can only generate 1 for accum types. */
2104 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2105 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2106 TYPE_MODE (type)));
2107
2108 case VECTOR_TYPE:
2109 {
2110 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2111
2112 return build_vector_from_val (type, scalar);
2113 }
2114
2115 case COMPLEX_TYPE:
2116 return build_complex (type,
2117 build_minus_one_cst (TREE_TYPE (type)),
2118 build_zero_cst (TREE_TYPE (type)));
2119
2120 default:
2121 gcc_unreachable ();
2122 }
2123 }
2124
2125 /* Build 0 constant of type TYPE. This is used by constructor folding
2126 and thus the constant should be represented in memory by
2127 zero(es). */
2128
2129 tree
2130 build_zero_cst (tree type)
2131 {
2132 switch (TREE_CODE (type))
2133 {
2134 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2135 case POINTER_TYPE: case REFERENCE_TYPE:
2136 case OFFSET_TYPE: case NULLPTR_TYPE:
2137 return build_int_cst (type, 0);
2138
2139 case REAL_TYPE:
2140 return build_real (type, dconst0);
2141
2142 case FIXED_POINT_TYPE:
2143 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2144
2145 case VECTOR_TYPE:
2146 {
2147 tree scalar = build_zero_cst (TREE_TYPE (type));
2148
2149 return build_vector_from_val (type, scalar);
2150 }
2151
2152 case COMPLEX_TYPE:
2153 {
2154 tree zero = build_zero_cst (TREE_TYPE (type));
2155
2156 return build_complex (type, zero, zero);
2157 }
2158
2159 default:
2160 if (!AGGREGATE_TYPE_P (type))
2161 return fold_convert (type, integer_zero_node);
2162 return build_constructor (type, NULL);
2163 }
2164 }
2165
2166
2167 /* Build a BINFO with LEN language slots. */
2168
2169 tree
2170 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2171 {
2172 tree t;
2173 size_t length = (offsetof (struct tree_binfo, base_binfos)
2174 + vec<tree, va_gc>::embedded_size (base_binfos));
2175
2176 record_node_allocation_statistics (TREE_BINFO, length);
2177
2178 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2179
2180 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2181
2182 TREE_SET_CODE (t, TREE_BINFO);
2183
2184 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2185
2186 return t;
2187 }
2188
2189 /* Create a CASE_LABEL_EXPR tree node and return it. */
2190
2191 tree
2192 build_case_label (tree low_value, tree high_value, tree label_decl)
2193 {
2194 tree t = make_node (CASE_LABEL_EXPR);
2195
2196 TREE_TYPE (t) = void_type_node;
2197 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2198
2199 CASE_LOW (t) = low_value;
2200 CASE_HIGH (t) = high_value;
2201 CASE_LABEL (t) = label_decl;
2202 CASE_CHAIN (t) = NULL_TREE;
2203
2204 return t;
2205 }
2206
2207 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2208 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2209 The latter determines the length of the HOST_WIDE_INT vector. */
2210
2211 tree
2212 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2213 {
2214 tree t;
2215 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2216 + sizeof (struct tree_int_cst));
2217
2218 gcc_assert (len);
2219 record_node_allocation_statistics (INTEGER_CST, length);
2220
2221 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2222
2223 TREE_SET_CODE (t, INTEGER_CST);
2224 TREE_INT_CST_NUNITS (t) = len;
2225 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2226 /* to_offset can only be applied to trees that are offset_int-sized
2227 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2228 must be exactly the precision of offset_int and so LEN is correct. */
2229 if (ext_len <= OFFSET_INT_ELTS)
2230 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2231 else
2232 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2233
2234 TREE_CONSTANT (t) = 1;
2235
2236 return t;
2237 }
2238
2239 /* Build a newly constructed TREE_VEC node of length LEN. */
2240
2241 tree
2242 make_tree_vec_stat (int len MEM_STAT_DECL)
2243 {
2244 tree t;
2245 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2246
2247 record_node_allocation_statistics (TREE_VEC, length);
2248
2249 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2250
2251 TREE_SET_CODE (t, TREE_VEC);
2252 TREE_VEC_LENGTH (t) = len;
2253
2254 return t;
2255 }
2256
2257 /* Grow a TREE_VEC node to new length LEN. */
2258
2259 tree
2260 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2261 {
2262 gcc_assert (TREE_CODE (v) == TREE_VEC);
2263
2264 int oldlen = TREE_VEC_LENGTH (v);
2265 gcc_assert (len > oldlen);
2266
2267 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2268 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2269
2270 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2271
2272 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2273
2274 TREE_VEC_LENGTH (v) = len;
2275
2276 return v;
2277 }
2278 \f
2279 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2280 fixed, and scalar, complex or vector. */
2281
2282 int
2283 zerop (const_tree expr)
2284 {
2285 return (integer_zerop (expr)
2286 || real_zerop (expr)
2287 || fixed_zerop (expr));
2288 }
2289
2290 /* Return 1 if EXPR is the integer constant zero or a complex constant
2291 of zero. */
2292
2293 int
2294 integer_zerop (const_tree expr)
2295 {
2296 switch (TREE_CODE (expr))
2297 {
2298 case INTEGER_CST:
2299 return wi::eq_p (expr, 0);
2300 case COMPLEX_CST:
2301 return (integer_zerop (TREE_REALPART (expr))
2302 && integer_zerop (TREE_IMAGPART (expr)));
2303 case VECTOR_CST:
2304 {
2305 unsigned i;
2306 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2307 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2308 return false;
2309 return true;
2310 }
2311 default:
2312 return false;
2313 }
2314 }
2315
2316 /* Return 1 if EXPR is the integer constant one or the corresponding
2317 complex constant. */
2318
2319 int
2320 integer_onep (const_tree expr)
2321 {
2322 switch (TREE_CODE (expr))
2323 {
2324 case INTEGER_CST:
2325 return wi::eq_p (wi::to_widest (expr), 1);
2326 case COMPLEX_CST:
2327 return (integer_onep (TREE_REALPART (expr))
2328 && integer_zerop (TREE_IMAGPART (expr)));
2329 case VECTOR_CST:
2330 {
2331 unsigned i;
2332 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2333 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2334 return false;
2335 return true;
2336 }
2337 default:
2338 return false;
2339 }
2340 }
2341
2342 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2343 return 1 if every piece is the integer constant one. */
2344
2345 int
2346 integer_each_onep (const_tree expr)
2347 {
2348 if (TREE_CODE (expr) == COMPLEX_CST)
2349 return (integer_onep (TREE_REALPART (expr))
2350 && integer_onep (TREE_IMAGPART (expr)));
2351 else
2352 return integer_onep (expr);
2353 }
2354
2355 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2356 it contains, or a complex or vector whose subparts are such integers. */
2357
2358 int
2359 integer_all_onesp (const_tree expr)
2360 {
2361 if (TREE_CODE (expr) == COMPLEX_CST
2362 && integer_all_onesp (TREE_REALPART (expr))
2363 && integer_all_onesp (TREE_IMAGPART (expr)))
2364 return 1;
2365
2366 else if (TREE_CODE (expr) == VECTOR_CST)
2367 {
2368 unsigned i;
2369 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2370 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2371 return 0;
2372 return 1;
2373 }
2374
2375 else if (TREE_CODE (expr) != INTEGER_CST)
2376 return 0;
2377
2378 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2379 }
2380
2381 /* Return 1 if EXPR is the integer constant minus one. */
2382
2383 int
2384 integer_minus_onep (const_tree expr)
2385 {
2386 if (TREE_CODE (expr) == COMPLEX_CST)
2387 return (integer_all_onesp (TREE_REALPART (expr))
2388 && integer_zerop (TREE_IMAGPART (expr)));
2389 else
2390 return integer_all_onesp (expr);
2391 }
2392
2393 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2394 one bit on). */
2395
2396 int
2397 integer_pow2p (const_tree expr)
2398 {
2399 if (TREE_CODE (expr) == COMPLEX_CST
2400 && integer_pow2p (TREE_REALPART (expr))
2401 && integer_zerop (TREE_IMAGPART (expr)))
2402 return 1;
2403
2404 if (TREE_CODE (expr) != INTEGER_CST)
2405 return 0;
2406
2407 return wi::popcount (expr) == 1;
2408 }
2409
2410 /* Return 1 if EXPR is an integer constant other than zero or a
2411 complex constant other than zero. */
2412
2413 int
2414 integer_nonzerop (const_tree expr)
2415 {
2416 return ((TREE_CODE (expr) == INTEGER_CST
2417 && !wi::eq_p (expr, 0))
2418 || (TREE_CODE (expr) == COMPLEX_CST
2419 && (integer_nonzerop (TREE_REALPART (expr))
2420 || integer_nonzerop (TREE_IMAGPART (expr)))));
2421 }
2422
2423 /* Return 1 if EXPR is the integer constant one. For vector,
2424 return 1 if every piece is the integer constant minus one
2425 (representing the value TRUE). */
2426
2427 int
2428 integer_truep (const_tree expr)
2429 {
2430 if (TREE_CODE (expr) == VECTOR_CST)
2431 return integer_all_onesp (expr);
2432 return integer_onep (expr);
2433 }
2434
2435 /* Return 1 if EXPR is the fixed-point constant zero. */
2436
2437 int
2438 fixed_zerop (const_tree expr)
2439 {
2440 return (TREE_CODE (expr) == FIXED_CST
2441 && TREE_FIXED_CST (expr).data.is_zero ());
2442 }
2443
2444 /* Return the power of two represented by a tree node known to be a
2445 power of two. */
2446
2447 int
2448 tree_log2 (const_tree expr)
2449 {
2450 if (TREE_CODE (expr) == COMPLEX_CST)
2451 return tree_log2 (TREE_REALPART (expr));
2452
2453 return wi::exact_log2 (expr);
2454 }
2455
2456 /* Similar, but return the largest integer Y such that 2 ** Y is less
2457 than or equal to EXPR. */
2458
2459 int
2460 tree_floor_log2 (const_tree expr)
2461 {
2462 if (TREE_CODE (expr) == COMPLEX_CST)
2463 return tree_log2 (TREE_REALPART (expr));
2464
2465 return wi::floor_log2 (expr);
2466 }
2467
2468 /* Return number of known trailing zero bits in EXPR, or, if the value of
2469 EXPR is known to be zero, the precision of it's type. */
2470
2471 unsigned int
2472 tree_ctz (const_tree expr)
2473 {
2474 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2475 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2476 return 0;
2477
2478 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2479 switch (TREE_CODE (expr))
2480 {
2481 case INTEGER_CST:
2482 ret1 = wi::ctz (expr);
2483 return MIN (ret1, prec);
2484 case SSA_NAME:
2485 ret1 = wi::ctz (get_nonzero_bits (expr));
2486 return MIN (ret1, prec);
2487 case PLUS_EXPR:
2488 case MINUS_EXPR:
2489 case BIT_IOR_EXPR:
2490 case BIT_XOR_EXPR:
2491 case MIN_EXPR:
2492 case MAX_EXPR:
2493 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2494 if (ret1 == 0)
2495 return ret1;
2496 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2497 return MIN (ret1, ret2);
2498 case POINTER_PLUS_EXPR:
2499 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2500 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2501 /* Second operand is sizetype, which could be in theory
2502 wider than pointer's precision. Make sure we never
2503 return more than prec. */
2504 ret2 = MIN (ret2, prec);
2505 return MIN (ret1, ret2);
2506 case BIT_AND_EXPR:
2507 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2508 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2509 return MAX (ret1, ret2);
2510 case MULT_EXPR:
2511 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2512 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2513 return MIN (ret1 + ret2, prec);
2514 case LSHIFT_EXPR:
2515 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2516 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2517 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2518 {
2519 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2520 return MIN (ret1 + ret2, prec);
2521 }
2522 return ret1;
2523 case RSHIFT_EXPR:
2524 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2525 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2526 {
2527 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2528 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2529 if (ret1 > ret2)
2530 return ret1 - ret2;
2531 }
2532 return 0;
2533 case TRUNC_DIV_EXPR:
2534 case CEIL_DIV_EXPR:
2535 case FLOOR_DIV_EXPR:
2536 case ROUND_DIV_EXPR:
2537 case EXACT_DIV_EXPR:
2538 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2539 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2540 {
2541 int l = tree_log2 (TREE_OPERAND (expr, 1));
2542 if (l >= 0)
2543 {
2544 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2545 ret2 = l;
2546 if (ret1 > ret2)
2547 return ret1 - ret2;
2548 }
2549 }
2550 return 0;
2551 CASE_CONVERT:
2552 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2553 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2554 ret1 = prec;
2555 return MIN (ret1, prec);
2556 case SAVE_EXPR:
2557 return tree_ctz (TREE_OPERAND (expr, 0));
2558 case COND_EXPR:
2559 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2560 if (ret1 == 0)
2561 return 0;
2562 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2563 return MIN (ret1, ret2);
2564 case COMPOUND_EXPR:
2565 return tree_ctz (TREE_OPERAND (expr, 1));
2566 case ADDR_EXPR:
2567 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2568 if (ret1 > BITS_PER_UNIT)
2569 {
2570 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2571 return MIN (ret1, prec);
2572 }
2573 return 0;
2574 default:
2575 return 0;
2576 }
2577 }
2578
2579 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2580 decimal float constants, so don't return 1 for them. */
2581
2582 int
2583 real_zerop (const_tree expr)
2584 {
2585 switch (TREE_CODE (expr))
2586 {
2587 case REAL_CST:
2588 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2589 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2590 case COMPLEX_CST:
2591 return real_zerop (TREE_REALPART (expr))
2592 && real_zerop (TREE_IMAGPART (expr));
2593 case VECTOR_CST:
2594 {
2595 unsigned i;
2596 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2597 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2598 return false;
2599 return true;
2600 }
2601 default:
2602 return false;
2603 }
2604 }
2605
2606 /* Return 1 if EXPR is the real constant one in real or complex form.
2607 Trailing zeroes matter for decimal float constants, so don't return
2608 1 for them. */
2609
2610 int
2611 real_onep (const_tree expr)
2612 {
2613 switch (TREE_CODE (expr))
2614 {
2615 case REAL_CST:
2616 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2617 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2618 case COMPLEX_CST:
2619 return real_onep (TREE_REALPART (expr))
2620 && real_zerop (TREE_IMAGPART (expr));
2621 case VECTOR_CST:
2622 {
2623 unsigned i;
2624 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2625 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2626 return false;
2627 return true;
2628 }
2629 default:
2630 return false;
2631 }
2632 }
2633
2634 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2635 matter for decimal float constants, so don't return 1 for them. */
2636
2637 int
2638 real_minus_onep (const_tree expr)
2639 {
2640 switch (TREE_CODE (expr))
2641 {
2642 case REAL_CST:
2643 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2644 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2645 case COMPLEX_CST:
2646 return real_minus_onep (TREE_REALPART (expr))
2647 && real_zerop (TREE_IMAGPART (expr));
2648 case VECTOR_CST:
2649 {
2650 unsigned i;
2651 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2652 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2653 return false;
2654 return true;
2655 }
2656 default:
2657 return false;
2658 }
2659 }
2660
2661 /* Nonzero if EXP is a constant or a cast of a constant. */
2662
2663 int
2664 really_constant_p (const_tree exp)
2665 {
2666 /* This is not quite the same as STRIP_NOPS. It does more. */
2667 while (CONVERT_EXPR_P (exp)
2668 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2669 exp = TREE_OPERAND (exp, 0);
2670 return TREE_CONSTANT (exp);
2671 }
2672 \f
2673 /* Return first list element whose TREE_VALUE is ELEM.
2674 Return 0 if ELEM is not in LIST. */
2675
2676 tree
2677 value_member (tree elem, tree list)
2678 {
2679 while (list)
2680 {
2681 if (elem == TREE_VALUE (list))
2682 return list;
2683 list = TREE_CHAIN (list);
2684 }
2685 return NULL_TREE;
2686 }
2687
2688 /* Return first list element whose TREE_PURPOSE is ELEM.
2689 Return 0 if ELEM is not in LIST. */
2690
2691 tree
2692 purpose_member (const_tree elem, tree list)
2693 {
2694 while (list)
2695 {
2696 if (elem == TREE_PURPOSE (list))
2697 return list;
2698 list = TREE_CHAIN (list);
2699 }
2700 return NULL_TREE;
2701 }
2702
2703 /* Return true if ELEM is in V. */
2704
2705 bool
2706 vec_member (const_tree elem, vec<tree, va_gc> *v)
2707 {
2708 unsigned ix;
2709 tree t;
2710 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2711 if (elem == t)
2712 return true;
2713 return false;
2714 }
2715
2716 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2717 NULL_TREE. */
2718
2719 tree
2720 chain_index (int idx, tree chain)
2721 {
2722 for (; chain && idx > 0; --idx)
2723 chain = TREE_CHAIN (chain);
2724 return chain;
2725 }
2726
2727 /* Return nonzero if ELEM is part of the chain CHAIN. */
2728
2729 int
2730 chain_member (const_tree elem, const_tree chain)
2731 {
2732 while (chain)
2733 {
2734 if (elem == chain)
2735 return 1;
2736 chain = DECL_CHAIN (chain);
2737 }
2738
2739 return 0;
2740 }
2741
2742 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2743 We expect a null pointer to mark the end of the chain.
2744 This is the Lisp primitive `length'. */
2745
2746 int
2747 list_length (const_tree t)
2748 {
2749 const_tree p = t;
2750 #ifdef ENABLE_TREE_CHECKING
2751 const_tree q = t;
2752 #endif
2753 int len = 0;
2754
2755 while (p)
2756 {
2757 p = TREE_CHAIN (p);
2758 #ifdef ENABLE_TREE_CHECKING
2759 if (len % 2)
2760 q = TREE_CHAIN (q);
2761 gcc_assert (p != q);
2762 #endif
2763 len++;
2764 }
2765
2766 return len;
2767 }
2768
2769 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2770 UNION_TYPE TYPE, or NULL_TREE if none. */
2771
2772 tree
2773 first_field (const_tree type)
2774 {
2775 tree t = TYPE_FIELDS (type);
2776 while (t && TREE_CODE (t) != FIELD_DECL)
2777 t = TREE_CHAIN (t);
2778 return t;
2779 }
2780
2781 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2782 by modifying the last node in chain 1 to point to chain 2.
2783 This is the Lisp primitive `nconc'. */
2784
2785 tree
2786 chainon (tree op1, tree op2)
2787 {
2788 tree t1;
2789
2790 if (!op1)
2791 return op2;
2792 if (!op2)
2793 return op1;
2794
2795 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2796 continue;
2797 TREE_CHAIN (t1) = op2;
2798
2799 #ifdef ENABLE_TREE_CHECKING
2800 {
2801 tree t2;
2802 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2803 gcc_assert (t2 != t1);
2804 }
2805 #endif
2806
2807 return op1;
2808 }
2809
2810 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2811
2812 tree
2813 tree_last (tree chain)
2814 {
2815 tree next;
2816 if (chain)
2817 while ((next = TREE_CHAIN (chain)))
2818 chain = next;
2819 return chain;
2820 }
2821
2822 /* Reverse the order of elements in the chain T,
2823 and return the new head of the chain (old last element). */
2824
2825 tree
2826 nreverse (tree t)
2827 {
2828 tree prev = 0, decl, next;
2829 for (decl = t; decl; decl = next)
2830 {
2831 /* We shouldn't be using this function to reverse BLOCK chains; we
2832 have blocks_nreverse for that. */
2833 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2834 next = TREE_CHAIN (decl);
2835 TREE_CHAIN (decl) = prev;
2836 prev = decl;
2837 }
2838 return prev;
2839 }
2840 \f
2841 /* Return a newly created TREE_LIST node whose
2842 purpose and value fields are PARM and VALUE. */
2843
2844 tree
2845 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2846 {
2847 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2848 TREE_PURPOSE (t) = parm;
2849 TREE_VALUE (t) = value;
2850 return t;
2851 }
2852
2853 /* Build a chain of TREE_LIST nodes from a vector. */
2854
2855 tree
2856 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2857 {
2858 tree ret = NULL_TREE;
2859 tree *pp = &ret;
2860 unsigned int i;
2861 tree t;
2862 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2863 {
2864 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2865 pp = &TREE_CHAIN (*pp);
2866 }
2867 return ret;
2868 }
2869
2870 /* Return a newly created TREE_LIST node whose
2871 purpose and value fields are PURPOSE and VALUE
2872 and whose TREE_CHAIN is CHAIN. */
2873
2874 tree
2875 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2876 {
2877 tree node;
2878
2879 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2880 memset (node, 0, sizeof (struct tree_common));
2881
2882 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2883
2884 TREE_SET_CODE (node, TREE_LIST);
2885 TREE_CHAIN (node) = chain;
2886 TREE_PURPOSE (node) = purpose;
2887 TREE_VALUE (node) = value;
2888 return node;
2889 }
2890
2891 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2892 trees. */
2893
2894 vec<tree, va_gc> *
2895 ctor_to_vec (tree ctor)
2896 {
2897 vec<tree, va_gc> *vec;
2898 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2899 unsigned int ix;
2900 tree val;
2901
2902 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2903 vec->quick_push (val);
2904
2905 return vec;
2906 }
2907 \f
2908 /* Return the size nominally occupied by an object of type TYPE
2909 when it resides in memory. The value is measured in units of bytes,
2910 and its data type is that normally used for type sizes
2911 (which is the first type created by make_signed_type or
2912 make_unsigned_type). */
2913
2914 tree
2915 size_in_bytes (const_tree type)
2916 {
2917 tree t;
2918
2919 if (type == error_mark_node)
2920 return integer_zero_node;
2921
2922 type = TYPE_MAIN_VARIANT (type);
2923 t = TYPE_SIZE_UNIT (type);
2924
2925 if (t == 0)
2926 {
2927 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2928 return size_zero_node;
2929 }
2930
2931 return t;
2932 }
2933
2934 /* Return the size of TYPE (in bytes) as a wide integer
2935 or return -1 if the size can vary or is larger than an integer. */
2936
2937 HOST_WIDE_INT
2938 int_size_in_bytes (const_tree type)
2939 {
2940 tree t;
2941
2942 if (type == error_mark_node)
2943 return 0;
2944
2945 type = TYPE_MAIN_VARIANT (type);
2946 t = TYPE_SIZE_UNIT (type);
2947
2948 if (t && tree_fits_uhwi_p (t))
2949 return TREE_INT_CST_LOW (t);
2950 else
2951 return -1;
2952 }
2953
2954 /* Return the maximum size of TYPE (in bytes) as a wide integer
2955 or return -1 if the size can vary or is larger than an integer. */
2956
2957 HOST_WIDE_INT
2958 max_int_size_in_bytes (const_tree type)
2959 {
2960 HOST_WIDE_INT size = -1;
2961 tree size_tree;
2962
2963 /* If this is an array type, check for a possible MAX_SIZE attached. */
2964
2965 if (TREE_CODE (type) == ARRAY_TYPE)
2966 {
2967 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2968
2969 if (size_tree && tree_fits_uhwi_p (size_tree))
2970 size = tree_to_uhwi (size_tree);
2971 }
2972
2973 /* If we still haven't been able to get a size, see if the language
2974 can compute a maximum size. */
2975
2976 if (size == -1)
2977 {
2978 size_tree = lang_hooks.types.max_size (type);
2979
2980 if (size_tree && tree_fits_uhwi_p (size_tree))
2981 size = tree_to_uhwi (size_tree);
2982 }
2983
2984 return size;
2985 }
2986 \f
2987 /* Return the bit position of FIELD, in bits from the start of the record.
2988 This is a tree of type bitsizetype. */
2989
2990 tree
2991 bit_position (const_tree field)
2992 {
2993 return bit_from_pos (DECL_FIELD_OFFSET (field),
2994 DECL_FIELD_BIT_OFFSET (field));
2995 }
2996 \f
2997 /* Return the byte position of FIELD, in bytes from the start of the record.
2998 This is a tree of type sizetype. */
2999
3000 tree
3001 byte_position (const_tree field)
3002 {
3003 return byte_from_pos (DECL_FIELD_OFFSET (field),
3004 DECL_FIELD_BIT_OFFSET (field));
3005 }
3006
3007 /* Likewise, but return as an integer. It must be representable in
3008 that way (since it could be a signed value, we don't have the
3009 option of returning -1 like int_size_in_byte can. */
3010
3011 HOST_WIDE_INT
3012 int_byte_position (const_tree field)
3013 {
3014 return tree_to_shwi (byte_position (field));
3015 }
3016 \f
3017 /* Return the strictest alignment, in bits, that T is known to have. */
3018
3019 unsigned int
3020 expr_align (const_tree t)
3021 {
3022 unsigned int align0, align1;
3023
3024 switch (TREE_CODE (t))
3025 {
3026 CASE_CONVERT: case NON_LVALUE_EXPR:
3027 /* If we have conversions, we know that the alignment of the
3028 object must meet each of the alignments of the types. */
3029 align0 = expr_align (TREE_OPERAND (t, 0));
3030 align1 = TYPE_ALIGN (TREE_TYPE (t));
3031 return MAX (align0, align1);
3032
3033 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3034 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3035 case CLEANUP_POINT_EXPR:
3036 /* These don't change the alignment of an object. */
3037 return expr_align (TREE_OPERAND (t, 0));
3038
3039 case COND_EXPR:
3040 /* The best we can do is say that the alignment is the least aligned
3041 of the two arms. */
3042 align0 = expr_align (TREE_OPERAND (t, 1));
3043 align1 = expr_align (TREE_OPERAND (t, 2));
3044 return MIN (align0, align1);
3045
3046 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3047 meaningfully, it's always 1. */
3048 case LABEL_DECL: case CONST_DECL:
3049 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3050 case FUNCTION_DECL:
3051 gcc_assert (DECL_ALIGN (t) != 0);
3052 return DECL_ALIGN (t);
3053
3054 default:
3055 break;
3056 }
3057
3058 /* Otherwise take the alignment from that of the type. */
3059 return TYPE_ALIGN (TREE_TYPE (t));
3060 }
3061 \f
3062 /* Return, as a tree node, the number of elements for TYPE (which is an
3063 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3064
3065 tree
3066 array_type_nelts (const_tree type)
3067 {
3068 tree index_type, min, max;
3069
3070 /* If they did it with unspecified bounds, then we should have already
3071 given an error about it before we got here. */
3072 if (! TYPE_DOMAIN (type))
3073 return error_mark_node;
3074
3075 index_type = TYPE_DOMAIN (type);
3076 min = TYPE_MIN_VALUE (index_type);
3077 max = TYPE_MAX_VALUE (index_type);
3078
3079 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3080 if (!max)
3081 return error_mark_node;
3082
3083 return (integer_zerop (min)
3084 ? max
3085 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3086 }
3087 \f
3088 /* If arg is static -- a reference to an object in static storage -- then
3089 return the object. This is not the same as the C meaning of `static'.
3090 If arg isn't static, return NULL. */
3091
3092 tree
3093 staticp (tree arg)
3094 {
3095 switch (TREE_CODE (arg))
3096 {
3097 case FUNCTION_DECL:
3098 /* Nested functions are static, even though taking their address will
3099 involve a trampoline as we unnest the nested function and create
3100 the trampoline on the tree level. */
3101 return arg;
3102
3103 case VAR_DECL:
3104 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3105 && ! DECL_THREAD_LOCAL_P (arg)
3106 && ! DECL_DLLIMPORT_P (arg)
3107 ? arg : NULL);
3108
3109 case CONST_DECL:
3110 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3111 ? arg : NULL);
3112
3113 case CONSTRUCTOR:
3114 return TREE_STATIC (arg) ? arg : NULL;
3115
3116 case LABEL_DECL:
3117 case STRING_CST:
3118 return arg;
3119
3120 case COMPONENT_REF:
3121 /* If the thing being referenced is not a field, then it is
3122 something language specific. */
3123 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3124
3125 /* If we are referencing a bitfield, we can't evaluate an
3126 ADDR_EXPR at compile time and so it isn't a constant. */
3127 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3128 return NULL;
3129
3130 return staticp (TREE_OPERAND (arg, 0));
3131
3132 case BIT_FIELD_REF:
3133 return NULL;
3134
3135 case INDIRECT_REF:
3136 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3137
3138 case ARRAY_REF:
3139 case ARRAY_RANGE_REF:
3140 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3141 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3142 return staticp (TREE_OPERAND (arg, 0));
3143 else
3144 return NULL;
3145
3146 case COMPOUND_LITERAL_EXPR:
3147 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3148
3149 default:
3150 return NULL;
3151 }
3152 }
3153
3154 \f
3155
3156
3157 /* Return whether OP is a DECL whose address is function-invariant. */
3158
3159 bool
3160 decl_address_invariant_p (const_tree op)
3161 {
3162 /* The conditions below are slightly less strict than the one in
3163 staticp. */
3164
3165 switch (TREE_CODE (op))
3166 {
3167 case PARM_DECL:
3168 case RESULT_DECL:
3169 case LABEL_DECL:
3170 case FUNCTION_DECL:
3171 return true;
3172
3173 case VAR_DECL:
3174 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3175 || DECL_THREAD_LOCAL_P (op)
3176 || DECL_CONTEXT (op) == current_function_decl
3177 || decl_function_context (op) == current_function_decl)
3178 return true;
3179 break;
3180
3181 case CONST_DECL:
3182 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3183 || decl_function_context (op) == current_function_decl)
3184 return true;
3185 break;
3186
3187 default:
3188 break;
3189 }
3190
3191 return false;
3192 }
3193
3194 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3195
3196 bool
3197 decl_address_ip_invariant_p (const_tree op)
3198 {
3199 /* The conditions below are slightly less strict than the one in
3200 staticp. */
3201
3202 switch (TREE_CODE (op))
3203 {
3204 case LABEL_DECL:
3205 case FUNCTION_DECL:
3206 case STRING_CST:
3207 return true;
3208
3209 case VAR_DECL:
3210 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3211 && !DECL_DLLIMPORT_P (op))
3212 || DECL_THREAD_LOCAL_P (op))
3213 return true;
3214 break;
3215
3216 case CONST_DECL:
3217 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3218 return true;
3219 break;
3220
3221 default:
3222 break;
3223 }
3224
3225 return false;
3226 }
3227
3228
3229 /* Return true if T is function-invariant (internal function, does
3230 not handle arithmetic; that's handled in skip_simple_arithmetic and
3231 tree_invariant_p). */
3232
3233 static bool
3234 tree_invariant_p_1 (tree t)
3235 {
3236 tree op;
3237
3238 if (TREE_CONSTANT (t)
3239 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3240 return true;
3241
3242 switch (TREE_CODE (t))
3243 {
3244 case SAVE_EXPR:
3245 return true;
3246
3247 case ADDR_EXPR:
3248 op = TREE_OPERAND (t, 0);
3249 while (handled_component_p (op))
3250 {
3251 switch (TREE_CODE (op))
3252 {
3253 case ARRAY_REF:
3254 case ARRAY_RANGE_REF:
3255 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3256 || TREE_OPERAND (op, 2) != NULL_TREE
3257 || TREE_OPERAND (op, 3) != NULL_TREE)
3258 return false;
3259 break;
3260
3261 case COMPONENT_REF:
3262 if (TREE_OPERAND (op, 2) != NULL_TREE)
3263 return false;
3264 break;
3265
3266 default:;
3267 }
3268 op = TREE_OPERAND (op, 0);
3269 }
3270
3271 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3272
3273 default:
3274 break;
3275 }
3276
3277 return false;
3278 }
3279
3280 /* Return true if T is function-invariant. */
3281
3282 bool
3283 tree_invariant_p (tree t)
3284 {
3285 tree inner = skip_simple_arithmetic (t);
3286 return tree_invariant_p_1 (inner);
3287 }
3288
3289 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3290 Do this to any expression which may be used in more than one place,
3291 but must be evaluated only once.
3292
3293 Normally, expand_expr would reevaluate the expression each time.
3294 Calling save_expr produces something that is evaluated and recorded
3295 the first time expand_expr is called on it. Subsequent calls to
3296 expand_expr just reuse the recorded value.
3297
3298 The call to expand_expr that generates code that actually computes
3299 the value is the first call *at compile time*. Subsequent calls
3300 *at compile time* generate code to use the saved value.
3301 This produces correct result provided that *at run time* control
3302 always flows through the insns made by the first expand_expr
3303 before reaching the other places where the save_expr was evaluated.
3304 You, the caller of save_expr, must make sure this is so.
3305
3306 Constants, and certain read-only nodes, are returned with no
3307 SAVE_EXPR because that is safe. Expressions containing placeholders
3308 are not touched; see tree.def for an explanation of what these
3309 are used for. */
3310
3311 tree
3312 save_expr (tree expr)
3313 {
3314 tree t = fold (expr);
3315 tree inner;
3316
3317 /* If the tree evaluates to a constant, then we don't want to hide that
3318 fact (i.e. this allows further folding, and direct checks for constants).
3319 However, a read-only object that has side effects cannot be bypassed.
3320 Since it is no problem to reevaluate literals, we just return the
3321 literal node. */
3322 inner = skip_simple_arithmetic (t);
3323 if (TREE_CODE (inner) == ERROR_MARK)
3324 return inner;
3325
3326 if (tree_invariant_p_1 (inner))
3327 return t;
3328
3329 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3330 it means that the size or offset of some field of an object depends on
3331 the value within another field.
3332
3333 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3334 and some variable since it would then need to be both evaluated once and
3335 evaluated more than once. Front-ends must assure this case cannot
3336 happen by surrounding any such subexpressions in their own SAVE_EXPR
3337 and forcing evaluation at the proper time. */
3338 if (contains_placeholder_p (inner))
3339 return t;
3340
3341 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3342 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3343
3344 /* This expression might be placed ahead of a jump to ensure that the
3345 value was computed on both sides of the jump. So make sure it isn't
3346 eliminated as dead. */
3347 TREE_SIDE_EFFECTS (t) = 1;
3348 return t;
3349 }
3350
3351 /* Look inside EXPR into any simple arithmetic operations. Return the
3352 outermost non-arithmetic or non-invariant node. */
3353
3354 tree
3355 skip_simple_arithmetic (tree expr)
3356 {
3357 /* We don't care about whether this can be used as an lvalue in this
3358 context. */
3359 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3360 expr = TREE_OPERAND (expr, 0);
3361
3362 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3363 a constant, it will be more efficient to not make another SAVE_EXPR since
3364 it will allow better simplification and GCSE will be able to merge the
3365 computations if they actually occur. */
3366 while (true)
3367 {
3368 if (UNARY_CLASS_P (expr))
3369 expr = TREE_OPERAND (expr, 0);
3370 else if (BINARY_CLASS_P (expr))
3371 {
3372 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3373 expr = TREE_OPERAND (expr, 0);
3374 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3375 expr = TREE_OPERAND (expr, 1);
3376 else
3377 break;
3378 }
3379 else
3380 break;
3381 }
3382
3383 return expr;
3384 }
3385
3386 /* Look inside EXPR into simple arithmetic operations involving constants.
3387 Return the outermost non-arithmetic or non-constant node. */
3388
3389 tree
3390 skip_simple_constant_arithmetic (tree expr)
3391 {
3392 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3393 expr = TREE_OPERAND (expr, 0);
3394
3395 while (true)
3396 {
3397 if (UNARY_CLASS_P (expr))
3398 expr = TREE_OPERAND (expr, 0);
3399 else if (BINARY_CLASS_P (expr))
3400 {
3401 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3402 expr = TREE_OPERAND (expr, 0);
3403 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3404 expr = TREE_OPERAND (expr, 1);
3405 else
3406 break;
3407 }
3408 else
3409 break;
3410 }
3411
3412 return expr;
3413 }
3414
3415 /* Return which tree structure is used by T. */
3416
3417 enum tree_node_structure_enum
3418 tree_node_structure (const_tree t)
3419 {
3420 const enum tree_code code = TREE_CODE (t);
3421 return tree_node_structure_for_code (code);
3422 }
3423
3424 /* Set various status flags when building a CALL_EXPR object T. */
3425
3426 static void
3427 process_call_operands (tree t)
3428 {
3429 bool side_effects = TREE_SIDE_EFFECTS (t);
3430 bool read_only = false;
3431 int i = call_expr_flags (t);
3432
3433 /* Calls have side-effects, except those to const or pure functions. */
3434 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3435 side_effects = true;
3436 /* Propagate TREE_READONLY of arguments for const functions. */
3437 if (i & ECF_CONST)
3438 read_only = true;
3439
3440 if (!side_effects || read_only)
3441 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3442 {
3443 tree op = TREE_OPERAND (t, i);
3444 if (op && TREE_SIDE_EFFECTS (op))
3445 side_effects = true;
3446 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3447 read_only = false;
3448 }
3449
3450 TREE_SIDE_EFFECTS (t) = side_effects;
3451 TREE_READONLY (t) = read_only;
3452 }
3453 \f
3454 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3455 size or offset that depends on a field within a record. */
3456
3457 bool
3458 contains_placeholder_p (const_tree exp)
3459 {
3460 enum tree_code code;
3461
3462 if (!exp)
3463 return 0;
3464
3465 code = TREE_CODE (exp);
3466 if (code == PLACEHOLDER_EXPR)
3467 return 1;
3468
3469 switch (TREE_CODE_CLASS (code))
3470 {
3471 case tcc_reference:
3472 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3473 position computations since they will be converted into a
3474 WITH_RECORD_EXPR involving the reference, which will assume
3475 here will be valid. */
3476 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3477
3478 case tcc_exceptional:
3479 if (code == TREE_LIST)
3480 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3481 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3482 break;
3483
3484 case tcc_unary:
3485 case tcc_binary:
3486 case tcc_comparison:
3487 case tcc_expression:
3488 switch (code)
3489 {
3490 case COMPOUND_EXPR:
3491 /* Ignoring the first operand isn't quite right, but works best. */
3492 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3493
3494 case COND_EXPR:
3495 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3496 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3497 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3498
3499 case SAVE_EXPR:
3500 /* The save_expr function never wraps anything containing
3501 a PLACEHOLDER_EXPR. */
3502 return 0;
3503
3504 default:
3505 break;
3506 }
3507
3508 switch (TREE_CODE_LENGTH (code))
3509 {
3510 case 1:
3511 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3512 case 2:
3513 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3514 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3515 default:
3516 return 0;
3517 }
3518
3519 case tcc_vl_exp:
3520 switch (code)
3521 {
3522 case CALL_EXPR:
3523 {
3524 const_tree arg;
3525 const_call_expr_arg_iterator iter;
3526 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3527 if (CONTAINS_PLACEHOLDER_P (arg))
3528 return 1;
3529 return 0;
3530 }
3531 default:
3532 return 0;
3533 }
3534
3535 default:
3536 return 0;
3537 }
3538 return 0;
3539 }
3540
3541 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3542 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3543 field positions. */
3544
3545 static bool
3546 type_contains_placeholder_1 (const_tree type)
3547 {
3548 /* If the size contains a placeholder or the parent type (component type in
3549 the case of arrays) type involves a placeholder, this type does. */
3550 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3551 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3552 || (!POINTER_TYPE_P (type)
3553 && TREE_TYPE (type)
3554 && type_contains_placeholder_p (TREE_TYPE (type))))
3555 return true;
3556
3557 /* Now do type-specific checks. Note that the last part of the check above
3558 greatly limits what we have to do below. */
3559 switch (TREE_CODE (type))
3560 {
3561 case VOID_TYPE:
3562 case POINTER_BOUNDS_TYPE:
3563 case COMPLEX_TYPE:
3564 case ENUMERAL_TYPE:
3565 case BOOLEAN_TYPE:
3566 case POINTER_TYPE:
3567 case OFFSET_TYPE:
3568 case REFERENCE_TYPE:
3569 case METHOD_TYPE:
3570 case FUNCTION_TYPE:
3571 case VECTOR_TYPE:
3572 case NULLPTR_TYPE:
3573 return false;
3574
3575 case INTEGER_TYPE:
3576 case REAL_TYPE:
3577 case FIXED_POINT_TYPE:
3578 /* Here we just check the bounds. */
3579 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3580 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3581
3582 case ARRAY_TYPE:
3583 /* We have already checked the component type above, so just check
3584 the domain type. Flexible array members have a null domain. */
3585 return TYPE_DOMAIN (type) ?
3586 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3587
3588 case RECORD_TYPE:
3589 case UNION_TYPE:
3590 case QUAL_UNION_TYPE:
3591 {
3592 tree field;
3593
3594 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3595 if (TREE_CODE (field) == FIELD_DECL
3596 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3597 || (TREE_CODE (type) == QUAL_UNION_TYPE
3598 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3599 || type_contains_placeholder_p (TREE_TYPE (field))))
3600 return true;
3601
3602 return false;
3603 }
3604
3605 default:
3606 gcc_unreachable ();
3607 }
3608 }
3609
3610 /* Wrapper around above function used to cache its result. */
3611
3612 bool
3613 type_contains_placeholder_p (tree type)
3614 {
3615 bool result;
3616
3617 /* If the contains_placeholder_bits field has been initialized,
3618 then we know the answer. */
3619 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3620 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3621
3622 /* Indicate that we've seen this type node, and the answer is false.
3623 This is what we want to return if we run into recursion via fields. */
3624 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3625
3626 /* Compute the real value. */
3627 result = type_contains_placeholder_1 (type);
3628
3629 /* Store the real value. */
3630 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3631
3632 return result;
3633 }
3634 \f
3635 /* Push tree EXP onto vector QUEUE if it is not already present. */
3636
3637 static void
3638 push_without_duplicates (tree exp, vec<tree> *queue)
3639 {
3640 unsigned int i;
3641 tree iter;
3642
3643 FOR_EACH_VEC_ELT (*queue, i, iter)
3644 if (simple_cst_equal (iter, exp) == 1)
3645 break;
3646
3647 if (!iter)
3648 queue->safe_push (exp);
3649 }
3650
3651 /* Given a tree EXP, find all occurrences of references to fields
3652 in a PLACEHOLDER_EXPR and place them in vector REFS without
3653 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3654 we assume here that EXP contains only arithmetic expressions
3655 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3656 argument list. */
3657
3658 void
3659 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3660 {
3661 enum tree_code code = TREE_CODE (exp);
3662 tree inner;
3663 int i;
3664
3665 /* We handle TREE_LIST and COMPONENT_REF separately. */
3666 if (code == TREE_LIST)
3667 {
3668 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3669 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3670 }
3671 else if (code == COMPONENT_REF)
3672 {
3673 for (inner = TREE_OPERAND (exp, 0);
3674 REFERENCE_CLASS_P (inner);
3675 inner = TREE_OPERAND (inner, 0))
3676 ;
3677
3678 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3679 push_without_duplicates (exp, refs);
3680 else
3681 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3682 }
3683 else
3684 switch (TREE_CODE_CLASS (code))
3685 {
3686 case tcc_constant:
3687 break;
3688
3689 case tcc_declaration:
3690 /* Variables allocated to static storage can stay. */
3691 if (!TREE_STATIC (exp))
3692 push_without_duplicates (exp, refs);
3693 break;
3694
3695 case tcc_expression:
3696 /* This is the pattern built in ada/make_aligning_type. */
3697 if (code == ADDR_EXPR
3698 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3699 {
3700 push_without_duplicates (exp, refs);
3701 break;
3702 }
3703
3704 /* Fall through... */
3705
3706 case tcc_exceptional:
3707 case tcc_unary:
3708 case tcc_binary:
3709 case tcc_comparison:
3710 case tcc_reference:
3711 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3712 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3713 break;
3714
3715 case tcc_vl_exp:
3716 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3717 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3718 break;
3719
3720 default:
3721 gcc_unreachable ();
3722 }
3723 }
3724
3725 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3726 return a tree with all occurrences of references to F in a
3727 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3728 CONST_DECLs. Note that we assume here that EXP contains only
3729 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3730 occurring only in their argument list. */
3731
3732 tree
3733 substitute_in_expr (tree exp, tree f, tree r)
3734 {
3735 enum tree_code code = TREE_CODE (exp);
3736 tree op0, op1, op2, op3;
3737 tree new_tree;
3738
3739 /* We handle TREE_LIST and COMPONENT_REF separately. */
3740 if (code == TREE_LIST)
3741 {
3742 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3743 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3744 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3745 return exp;
3746
3747 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3748 }
3749 else if (code == COMPONENT_REF)
3750 {
3751 tree inner;
3752
3753 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3754 and it is the right field, replace it with R. */
3755 for (inner = TREE_OPERAND (exp, 0);
3756 REFERENCE_CLASS_P (inner);
3757 inner = TREE_OPERAND (inner, 0))
3758 ;
3759
3760 /* The field. */
3761 op1 = TREE_OPERAND (exp, 1);
3762
3763 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3764 return r;
3765
3766 /* If this expression hasn't been completed let, leave it alone. */
3767 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3768 return exp;
3769
3770 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3771 if (op0 == TREE_OPERAND (exp, 0))
3772 return exp;
3773
3774 new_tree
3775 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3776 }
3777 else
3778 switch (TREE_CODE_CLASS (code))
3779 {
3780 case tcc_constant:
3781 return exp;
3782
3783 case tcc_declaration:
3784 if (exp == f)
3785 return r;
3786 else
3787 return exp;
3788
3789 case tcc_expression:
3790 if (exp == f)
3791 return r;
3792
3793 /* Fall through... */
3794
3795 case tcc_exceptional:
3796 case tcc_unary:
3797 case tcc_binary:
3798 case tcc_comparison:
3799 case tcc_reference:
3800 switch (TREE_CODE_LENGTH (code))
3801 {
3802 case 0:
3803 return exp;
3804
3805 case 1:
3806 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3807 if (op0 == TREE_OPERAND (exp, 0))
3808 return exp;
3809
3810 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3811 break;
3812
3813 case 2:
3814 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3815 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3816
3817 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3818 return exp;
3819
3820 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3821 break;
3822
3823 case 3:
3824 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3825 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3826 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3827
3828 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3829 && op2 == TREE_OPERAND (exp, 2))
3830 return exp;
3831
3832 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3833 break;
3834
3835 case 4:
3836 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3837 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3838 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3839 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3840
3841 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3842 && op2 == TREE_OPERAND (exp, 2)
3843 && op3 == TREE_OPERAND (exp, 3))
3844 return exp;
3845
3846 new_tree
3847 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3848 break;
3849
3850 default:
3851 gcc_unreachable ();
3852 }
3853 break;
3854
3855 case tcc_vl_exp:
3856 {
3857 int i;
3858
3859 new_tree = NULL_TREE;
3860
3861 /* If we are trying to replace F with a constant, inline back
3862 functions which do nothing else than computing a value from
3863 the arguments they are passed. This makes it possible to
3864 fold partially or entirely the replacement expression. */
3865 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3866 {
3867 tree t = maybe_inline_call_in_expr (exp);
3868 if (t)
3869 return SUBSTITUTE_IN_EXPR (t, f, r);
3870 }
3871
3872 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3873 {
3874 tree op = TREE_OPERAND (exp, i);
3875 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3876 if (new_op != op)
3877 {
3878 if (!new_tree)
3879 new_tree = copy_node (exp);
3880 TREE_OPERAND (new_tree, i) = new_op;
3881 }
3882 }
3883
3884 if (new_tree)
3885 {
3886 new_tree = fold (new_tree);
3887 if (TREE_CODE (new_tree) == CALL_EXPR)
3888 process_call_operands (new_tree);
3889 }
3890 else
3891 return exp;
3892 }
3893 break;
3894
3895 default:
3896 gcc_unreachable ();
3897 }
3898
3899 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3900
3901 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3902 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3903
3904 return new_tree;
3905 }
3906
3907 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3908 for it within OBJ, a tree that is an object or a chain of references. */
3909
3910 tree
3911 substitute_placeholder_in_expr (tree exp, tree obj)
3912 {
3913 enum tree_code code = TREE_CODE (exp);
3914 tree op0, op1, op2, op3;
3915 tree new_tree;
3916
3917 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3918 in the chain of OBJ. */
3919 if (code == PLACEHOLDER_EXPR)
3920 {
3921 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3922 tree elt;
3923
3924 for (elt = obj; elt != 0;
3925 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3926 || TREE_CODE (elt) == COND_EXPR)
3927 ? TREE_OPERAND (elt, 1)
3928 : (REFERENCE_CLASS_P (elt)
3929 || UNARY_CLASS_P (elt)
3930 || BINARY_CLASS_P (elt)
3931 || VL_EXP_CLASS_P (elt)
3932 || EXPRESSION_CLASS_P (elt))
3933 ? TREE_OPERAND (elt, 0) : 0))
3934 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3935 return elt;
3936
3937 for (elt = obj; elt != 0;
3938 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3939 || TREE_CODE (elt) == COND_EXPR)
3940 ? TREE_OPERAND (elt, 1)
3941 : (REFERENCE_CLASS_P (elt)
3942 || UNARY_CLASS_P (elt)
3943 || BINARY_CLASS_P (elt)
3944 || VL_EXP_CLASS_P (elt)
3945 || EXPRESSION_CLASS_P (elt))
3946 ? TREE_OPERAND (elt, 0) : 0))
3947 if (POINTER_TYPE_P (TREE_TYPE (elt))
3948 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3949 == need_type))
3950 return fold_build1 (INDIRECT_REF, need_type, elt);
3951
3952 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3953 survives until RTL generation, there will be an error. */
3954 return exp;
3955 }
3956
3957 /* TREE_LIST is special because we need to look at TREE_VALUE
3958 and TREE_CHAIN, not TREE_OPERANDS. */
3959 else if (code == TREE_LIST)
3960 {
3961 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3962 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3963 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3964 return exp;
3965
3966 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3967 }
3968 else
3969 switch (TREE_CODE_CLASS (code))
3970 {
3971 case tcc_constant:
3972 case tcc_declaration:
3973 return exp;
3974
3975 case tcc_exceptional:
3976 case tcc_unary:
3977 case tcc_binary:
3978 case tcc_comparison:
3979 case tcc_expression:
3980 case tcc_reference:
3981 case tcc_statement:
3982 switch (TREE_CODE_LENGTH (code))
3983 {
3984 case 0:
3985 return exp;
3986
3987 case 1:
3988 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3989 if (op0 == TREE_OPERAND (exp, 0))
3990 return exp;
3991
3992 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3993 break;
3994
3995 case 2:
3996 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3997 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3998
3999 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4000 return exp;
4001
4002 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4003 break;
4004
4005 case 3:
4006 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4007 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4008 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4009
4010 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4011 && op2 == TREE_OPERAND (exp, 2))
4012 return exp;
4013
4014 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4015 break;
4016
4017 case 4:
4018 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4019 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4020 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4021 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4022
4023 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4024 && op2 == TREE_OPERAND (exp, 2)
4025 && op3 == TREE_OPERAND (exp, 3))
4026 return exp;
4027
4028 new_tree
4029 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4030 break;
4031
4032 default:
4033 gcc_unreachable ();
4034 }
4035 break;
4036
4037 case tcc_vl_exp:
4038 {
4039 int i;
4040
4041 new_tree = NULL_TREE;
4042
4043 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4044 {
4045 tree op = TREE_OPERAND (exp, i);
4046 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4047 if (new_op != op)
4048 {
4049 if (!new_tree)
4050 new_tree = copy_node (exp);
4051 TREE_OPERAND (new_tree, i) = new_op;
4052 }
4053 }
4054
4055 if (new_tree)
4056 {
4057 new_tree = fold (new_tree);
4058 if (TREE_CODE (new_tree) == CALL_EXPR)
4059 process_call_operands (new_tree);
4060 }
4061 else
4062 return exp;
4063 }
4064 break;
4065
4066 default:
4067 gcc_unreachable ();
4068 }
4069
4070 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4071
4072 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4073 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4074
4075 return new_tree;
4076 }
4077 \f
4078
4079 /* Subroutine of stabilize_reference; this is called for subtrees of
4080 references. Any expression with side-effects must be put in a SAVE_EXPR
4081 to ensure that it is only evaluated once.
4082
4083 We don't put SAVE_EXPR nodes around everything, because assigning very
4084 simple expressions to temporaries causes us to miss good opportunities
4085 for optimizations. Among other things, the opportunity to fold in the
4086 addition of a constant into an addressing mode often gets lost, e.g.
4087 "y[i+1] += x;". In general, we take the approach that we should not make
4088 an assignment unless we are forced into it - i.e., that any non-side effect
4089 operator should be allowed, and that cse should take care of coalescing
4090 multiple utterances of the same expression should that prove fruitful. */
4091
4092 static tree
4093 stabilize_reference_1 (tree e)
4094 {
4095 tree result;
4096 enum tree_code code = TREE_CODE (e);
4097
4098 /* We cannot ignore const expressions because it might be a reference
4099 to a const array but whose index contains side-effects. But we can
4100 ignore things that are actual constant or that already have been
4101 handled by this function. */
4102
4103 if (tree_invariant_p (e))
4104 return e;
4105
4106 switch (TREE_CODE_CLASS (code))
4107 {
4108 case tcc_exceptional:
4109 case tcc_type:
4110 case tcc_declaration:
4111 case tcc_comparison:
4112 case tcc_statement:
4113 case tcc_expression:
4114 case tcc_reference:
4115 case tcc_vl_exp:
4116 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4117 so that it will only be evaluated once. */
4118 /* The reference (r) and comparison (<) classes could be handled as
4119 below, but it is generally faster to only evaluate them once. */
4120 if (TREE_SIDE_EFFECTS (e))
4121 return save_expr (e);
4122 return e;
4123
4124 case tcc_constant:
4125 /* Constants need no processing. In fact, we should never reach
4126 here. */
4127 return e;
4128
4129 case tcc_binary:
4130 /* Division is slow and tends to be compiled with jumps,
4131 especially the division by powers of 2 that is often
4132 found inside of an array reference. So do it just once. */
4133 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4134 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4135 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4136 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4137 return save_expr (e);
4138 /* Recursively stabilize each operand. */
4139 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4140 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4141 break;
4142
4143 case tcc_unary:
4144 /* Recursively stabilize each operand. */
4145 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4146 break;
4147
4148 default:
4149 gcc_unreachable ();
4150 }
4151
4152 TREE_TYPE (result) = TREE_TYPE (e);
4153 TREE_READONLY (result) = TREE_READONLY (e);
4154 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4155 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4156
4157 return result;
4158 }
4159
4160 /* Stabilize a reference so that we can use it any number of times
4161 without causing its operands to be evaluated more than once.
4162 Returns the stabilized reference. This works by means of save_expr,
4163 so see the caveats in the comments about save_expr.
4164
4165 Also allows conversion expressions whose operands are references.
4166 Any other kind of expression is returned unchanged. */
4167
4168 tree
4169 stabilize_reference (tree ref)
4170 {
4171 tree result;
4172 enum tree_code code = TREE_CODE (ref);
4173
4174 switch (code)
4175 {
4176 case VAR_DECL:
4177 case PARM_DECL:
4178 case RESULT_DECL:
4179 /* No action is needed in this case. */
4180 return ref;
4181
4182 CASE_CONVERT:
4183 case FLOAT_EXPR:
4184 case FIX_TRUNC_EXPR:
4185 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4186 break;
4187
4188 case INDIRECT_REF:
4189 result = build_nt (INDIRECT_REF,
4190 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4191 break;
4192
4193 case COMPONENT_REF:
4194 result = build_nt (COMPONENT_REF,
4195 stabilize_reference (TREE_OPERAND (ref, 0)),
4196 TREE_OPERAND (ref, 1), NULL_TREE);
4197 break;
4198
4199 case BIT_FIELD_REF:
4200 result = build_nt (BIT_FIELD_REF,
4201 stabilize_reference (TREE_OPERAND (ref, 0)),
4202 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4203 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4204 break;
4205
4206 case ARRAY_REF:
4207 result = build_nt (ARRAY_REF,
4208 stabilize_reference (TREE_OPERAND (ref, 0)),
4209 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4210 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4211 break;
4212
4213 case ARRAY_RANGE_REF:
4214 result = build_nt (ARRAY_RANGE_REF,
4215 stabilize_reference (TREE_OPERAND (ref, 0)),
4216 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4217 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4218 break;
4219
4220 case COMPOUND_EXPR:
4221 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4222 it wouldn't be ignored. This matters when dealing with
4223 volatiles. */
4224 return stabilize_reference_1 (ref);
4225
4226 /* If arg isn't a kind of lvalue we recognize, make no change.
4227 Caller should recognize the error for an invalid lvalue. */
4228 default:
4229 return ref;
4230
4231 case ERROR_MARK:
4232 return error_mark_node;
4233 }
4234
4235 TREE_TYPE (result) = TREE_TYPE (ref);
4236 TREE_READONLY (result) = TREE_READONLY (ref);
4237 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4238 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4239
4240 return result;
4241 }
4242 \f
4243 /* Low-level constructors for expressions. */
4244
4245 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4246 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4247
4248 void
4249 recompute_tree_invariant_for_addr_expr (tree t)
4250 {
4251 tree node;
4252 bool tc = true, se = false;
4253
4254 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4255
4256 /* We started out assuming this address is both invariant and constant, but
4257 does not have side effects. Now go down any handled components and see if
4258 any of them involve offsets that are either non-constant or non-invariant.
4259 Also check for side-effects.
4260
4261 ??? Note that this code makes no attempt to deal with the case where
4262 taking the address of something causes a copy due to misalignment. */
4263
4264 #define UPDATE_FLAGS(NODE) \
4265 do { tree _node = (NODE); \
4266 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4267 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4268
4269 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4270 node = TREE_OPERAND (node, 0))
4271 {
4272 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4273 array reference (probably made temporarily by the G++ front end),
4274 so ignore all the operands. */
4275 if ((TREE_CODE (node) == ARRAY_REF
4276 || TREE_CODE (node) == ARRAY_RANGE_REF)
4277 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4278 {
4279 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4280 if (TREE_OPERAND (node, 2))
4281 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4282 if (TREE_OPERAND (node, 3))
4283 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4284 }
4285 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4286 FIELD_DECL, apparently. The G++ front end can put something else
4287 there, at least temporarily. */
4288 else if (TREE_CODE (node) == COMPONENT_REF
4289 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4290 {
4291 if (TREE_OPERAND (node, 2))
4292 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4293 }
4294 }
4295
4296 node = lang_hooks.expr_to_decl (node, &tc, &se);
4297
4298 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4299 the address, since &(*a)->b is a form of addition. If it's a constant, the
4300 address is constant too. If it's a decl, its address is constant if the
4301 decl is static. Everything else is not constant and, furthermore,
4302 taking the address of a volatile variable is not volatile. */
4303 if (TREE_CODE (node) == INDIRECT_REF
4304 || TREE_CODE (node) == MEM_REF)
4305 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4306 else if (CONSTANT_CLASS_P (node))
4307 ;
4308 else if (DECL_P (node))
4309 tc &= (staticp (node) != NULL_TREE);
4310 else
4311 {
4312 tc = false;
4313 se |= TREE_SIDE_EFFECTS (node);
4314 }
4315
4316
4317 TREE_CONSTANT (t) = tc;
4318 TREE_SIDE_EFFECTS (t) = se;
4319 #undef UPDATE_FLAGS
4320 }
4321
4322 /* Build an expression of code CODE, data type TYPE, and operands as
4323 specified. Expressions and reference nodes can be created this way.
4324 Constants, decls, types and misc nodes cannot be.
4325
4326 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4327 enough for all extant tree codes. */
4328
4329 tree
4330 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4331 {
4332 tree t;
4333
4334 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4335
4336 t = make_node_stat (code PASS_MEM_STAT);
4337 TREE_TYPE (t) = tt;
4338
4339 return t;
4340 }
4341
4342 tree
4343 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4344 {
4345 int length = sizeof (struct tree_exp);
4346 tree t;
4347
4348 record_node_allocation_statistics (code, length);
4349
4350 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4351
4352 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4353
4354 memset (t, 0, sizeof (struct tree_common));
4355
4356 TREE_SET_CODE (t, code);
4357
4358 TREE_TYPE (t) = type;
4359 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4360 TREE_OPERAND (t, 0) = node;
4361 if (node && !TYPE_P (node))
4362 {
4363 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4364 TREE_READONLY (t) = TREE_READONLY (node);
4365 }
4366
4367 if (TREE_CODE_CLASS (code) == tcc_statement)
4368 TREE_SIDE_EFFECTS (t) = 1;
4369 else switch (code)
4370 {
4371 case VA_ARG_EXPR:
4372 /* All of these have side-effects, no matter what their
4373 operands are. */
4374 TREE_SIDE_EFFECTS (t) = 1;
4375 TREE_READONLY (t) = 0;
4376 break;
4377
4378 case INDIRECT_REF:
4379 /* Whether a dereference is readonly has nothing to do with whether
4380 its operand is readonly. */
4381 TREE_READONLY (t) = 0;
4382 break;
4383
4384 case ADDR_EXPR:
4385 if (node)
4386 recompute_tree_invariant_for_addr_expr (t);
4387 break;
4388
4389 default:
4390 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4391 && node && !TYPE_P (node)
4392 && TREE_CONSTANT (node))
4393 TREE_CONSTANT (t) = 1;
4394 if (TREE_CODE_CLASS (code) == tcc_reference
4395 && node && TREE_THIS_VOLATILE (node))
4396 TREE_THIS_VOLATILE (t) = 1;
4397 break;
4398 }
4399
4400 return t;
4401 }
4402
4403 #define PROCESS_ARG(N) \
4404 do { \
4405 TREE_OPERAND (t, N) = arg##N; \
4406 if (arg##N &&!TYPE_P (arg##N)) \
4407 { \
4408 if (TREE_SIDE_EFFECTS (arg##N)) \
4409 side_effects = 1; \
4410 if (!TREE_READONLY (arg##N) \
4411 && !CONSTANT_CLASS_P (arg##N)) \
4412 (void) (read_only = 0); \
4413 if (!TREE_CONSTANT (arg##N)) \
4414 (void) (constant = 0); \
4415 } \
4416 } while (0)
4417
4418 tree
4419 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4420 {
4421 bool constant, read_only, side_effects;
4422 tree t;
4423
4424 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4425
4426 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4427 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4428 /* When sizetype precision doesn't match that of pointers
4429 we need to be able to build explicit extensions or truncations
4430 of the offset argument. */
4431 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4432 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4433 && TREE_CODE (arg1) == INTEGER_CST);
4434
4435 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4436 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4437 && ptrofftype_p (TREE_TYPE (arg1)));
4438
4439 t = make_node_stat (code PASS_MEM_STAT);
4440 TREE_TYPE (t) = tt;
4441
4442 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4443 result based on those same flags for the arguments. But if the
4444 arguments aren't really even `tree' expressions, we shouldn't be trying
4445 to do this. */
4446
4447 /* Expressions without side effects may be constant if their
4448 arguments are as well. */
4449 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4450 || TREE_CODE_CLASS (code) == tcc_binary);
4451 read_only = 1;
4452 side_effects = TREE_SIDE_EFFECTS (t);
4453
4454 PROCESS_ARG (0);
4455 PROCESS_ARG (1);
4456
4457 TREE_SIDE_EFFECTS (t) = side_effects;
4458 if (code == MEM_REF)
4459 {
4460 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4461 {
4462 tree o = TREE_OPERAND (arg0, 0);
4463 TREE_READONLY (t) = TREE_READONLY (o);
4464 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4465 }
4466 }
4467 else
4468 {
4469 TREE_READONLY (t) = read_only;
4470 TREE_CONSTANT (t) = constant;
4471 TREE_THIS_VOLATILE (t)
4472 = (TREE_CODE_CLASS (code) == tcc_reference
4473 && arg0 && TREE_THIS_VOLATILE (arg0));
4474 }
4475
4476 return t;
4477 }
4478
4479
4480 tree
4481 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4482 tree arg2 MEM_STAT_DECL)
4483 {
4484 bool constant, read_only, side_effects;
4485 tree t;
4486
4487 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4488 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4489
4490 t = make_node_stat (code PASS_MEM_STAT);
4491 TREE_TYPE (t) = tt;
4492
4493 read_only = 1;
4494
4495 /* As a special exception, if COND_EXPR has NULL branches, we
4496 assume that it is a gimple statement and always consider
4497 it to have side effects. */
4498 if (code == COND_EXPR
4499 && tt == void_type_node
4500 && arg1 == NULL_TREE
4501 && arg2 == NULL_TREE)
4502 side_effects = true;
4503 else
4504 side_effects = TREE_SIDE_EFFECTS (t);
4505
4506 PROCESS_ARG (0);
4507 PROCESS_ARG (1);
4508 PROCESS_ARG (2);
4509
4510 if (code == COND_EXPR)
4511 TREE_READONLY (t) = read_only;
4512
4513 TREE_SIDE_EFFECTS (t) = side_effects;
4514 TREE_THIS_VOLATILE (t)
4515 = (TREE_CODE_CLASS (code) == tcc_reference
4516 && arg0 && TREE_THIS_VOLATILE (arg0));
4517
4518 return t;
4519 }
4520
4521 tree
4522 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4523 tree arg2, tree arg3 MEM_STAT_DECL)
4524 {
4525 bool constant, read_only, side_effects;
4526 tree t;
4527
4528 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4529
4530 t = make_node_stat (code PASS_MEM_STAT);
4531 TREE_TYPE (t) = tt;
4532
4533 side_effects = TREE_SIDE_EFFECTS (t);
4534
4535 PROCESS_ARG (0);
4536 PROCESS_ARG (1);
4537 PROCESS_ARG (2);
4538 PROCESS_ARG (3);
4539
4540 TREE_SIDE_EFFECTS (t) = side_effects;
4541 TREE_THIS_VOLATILE (t)
4542 = (TREE_CODE_CLASS (code) == tcc_reference
4543 && arg0 && TREE_THIS_VOLATILE (arg0));
4544
4545 return t;
4546 }
4547
4548 tree
4549 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4550 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4551 {
4552 bool constant, read_only, side_effects;
4553 tree t;
4554
4555 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4556
4557 t = make_node_stat (code PASS_MEM_STAT);
4558 TREE_TYPE (t) = tt;
4559
4560 side_effects = TREE_SIDE_EFFECTS (t);
4561
4562 PROCESS_ARG (0);
4563 PROCESS_ARG (1);
4564 PROCESS_ARG (2);
4565 PROCESS_ARG (3);
4566 PROCESS_ARG (4);
4567
4568 TREE_SIDE_EFFECTS (t) = side_effects;
4569 if (code == TARGET_MEM_REF)
4570 {
4571 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4572 {
4573 tree o = TREE_OPERAND (arg0, 0);
4574 TREE_READONLY (t) = TREE_READONLY (o);
4575 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4576 }
4577 }
4578 else
4579 TREE_THIS_VOLATILE (t)
4580 = (TREE_CODE_CLASS (code) == tcc_reference
4581 && arg0 && TREE_THIS_VOLATILE (arg0));
4582
4583 return t;
4584 }
4585
4586 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4587 on the pointer PTR. */
4588
4589 tree
4590 build_simple_mem_ref_loc (location_t loc, tree ptr)
4591 {
4592 HOST_WIDE_INT offset = 0;
4593 tree ptype = TREE_TYPE (ptr);
4594 tree tem;
4595 /* For convenience allow addresses that collapse to a simple base
4596 and offset. */
4597 if (TREE_CODE (ptr) == ADDR_EXPR
4598 && (handled_component_p (TREE_OPERAND (ptr, 0))
4599 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4600 {
4601 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4602 gcc_assert (ptr);
4603 ptr = build_fold_addr_expr (ptr);
4604 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4605 }
4606 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4607 ptr, build_int_cst (ptype, offset));
4608 SET_EXPR_LOCATION (tem, loc);
4609 return tem;
4610 }
4611
4612 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4613
4614 offset_int
4615 mem_ref_offset (const_tree t)
4616 {
4617 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4618 }
4619
4620 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4621 offsetted by OFFSET units. */
4622
4623 tree
4624 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4625 {
4626 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4627 build_fold_addr_expr (base),
4628 build_int_cst (ptr_type_node, offset));
4629 tree addr = build1 (ADDR_EXPR, type, ref);
4630 recompute_tree_invariant_for_addr_expr (addr);
4631 return addr;
4632 }
4633
4634 /* Similar except don't specify the TREE_TYPE
4635 and leave the TREE_SIDE_EFFECTS as 0.
4636 It is permissible for arguments to be null,
4637 or even garbage if their values do not matter. */
4638
4639 tree
4640 build_nt (enum tree_code code, ...)
4641 {
4642 tree t;
4643 int length;
4644 int i;
4645 va_list p;
4646
4647 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4648
4649 va_start (p, code);
4650
4651 t = make_node (code);
4652 length = TREE_CODE_LENGTH (code);
4653
4654 for (i = 0; i < length; i++)
4655 TREE_OPERAND (t, i) = va_arg (p, tree);
4656
4657 va_end (p);
4658 return t;
4659 }
4660
4661 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4662 tree vec. */
4663
4664 tree
4665 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4666 {
4667 tree ret, t;
4668 unsigned int ix;
4669
4670 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4671 CALL_EXPR_FN (ret) = fn;
4672 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4673 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4674 CALL_EXPR_ARG (ret, ix) = t;
4675 return ret;
4676 }
4677 \f
4678 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4679 We do NOT enter this node in any sort of symbol table.
4680
4681 LOC is the location of the decl.
4682
4683 layout_decl is used to set up the decl's storage layout.
4684 Other slots are initialized to 0 or null pointers. */
4685
4686 tree
4687 build_decl_stat (location_t loc, enum tree_code code, tree name,
4688 tree type MEM_STAT_DECL)
4689 {
4690 tree t;
4691
4692 t = make_node_stat (code PASS_MEM_STAT);
4693 DECL_SOURCE_LOCATION (t) = loc;
4694
4695 /* if (type == error_mark_node)
4696 type = integer_type_node; */
4697 /* That is not done, deliberately, so that having error_mark_node
4698 as the type can suppress useless errors in the use of this variable. */
4699
4700 DECL_NAME (t) = name;
4701 TREE_TYPE (t) = type;
4702
4703 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4704 layout_decl (t, 0);
4705
4706 return t;
4707 }
4708
4709 /* Builds and returns function declaration with NAME and TYPE. */
4710
4711 tree
4712 build_fn_decl (const char *name, tree type)
4713 {
4714 tree id = get_identifier (name);
4715 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4716
4717 DECL_EXTERNAL (decl) = 1;
4718 TREE_PUBLIC (decl) = 1;
4719 DECL_ARTIFICIAL (decl) = 1;
4720 TREE_NOTHROW (decl) = 1;
4721
4722 return decl;
4723 }
4724
4725 vec<tree, va_gc> *all_translation_units;
4726
4727 /* Builds a new translation-unit decl with name NAME, queues it in the
4728 global list of translation-unit decls and returns it. */
4729
4730 tree
4731 build_translation_unit_decl (tree name)
4732 {
4733 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4734 name, NULL_TREE);
4735 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4736 vec_safe_push (all_translation_units, tu);
4737 return tu;
4738 }
4739
4740 \f
4741 /* BLOCK nodes are used to represent the structure of binding contours
4742 and declarations, once those contours have been exited and their contents
4743 compiled. This information is used for outputting debugging info. */
4744
4745 tree
4746 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4747 {
4748 tree block = make_node (BLOCK);
4749
4750 BLOCK_VARS (block) = vars;
4751 BLOCK_SUBBLOCKS (block) = subblocks;
4752 BLOCK_SUPERCONTEXT (block) = supercontext;
4753 BLOCK_CHAIN (block) = chain;
4754 return block;
4755 }
4756
4757 \f
4758 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4759
4760 LOC is the location to use in tree T. */
4761
4762 void
4763 protected_set_expr_location (tree t, location_t loc)
4764 {
4765 if (CAN_HAVE_LOCATION_P (t))
4766 SET_EXPR_LOCATION (t, loc);
4767 }
4768 \f
4769 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4770 is ATTRIBUTE. */
4771
4772 tree
4773 build_decl_attribute_variant (tree ddecl, tree attribute)
4774 {
4775 DECL_ATTRIBUTES (ddecl) = attribute;
4776 return ddecl;
4777 }
4778
4779 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4780 is ATTRIBUTE and its qualifiers are QUALS.
4781
4782 Record such modified types already made so we don't make duplicates. */
4783
4784 tree
4785 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4786 {
4787 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4788 {
4789 inchash::hash hstate;
4790 tree ntype;
4791 int i;
4792 tree t;
4793 enum tree_code code = TREE_CODE (ttype);
4794
4795 /* Building a distinct copy of a tagged type is inappropriate; it
4796 causes breakage in code that expects there to be a one-to-one
4797 relationship between a struct and its fields.
4798 build_duplicate_type is another solution (as used in
4799 handle_transparent_union_attribute), but that doesn't play well
4800 with the stronger C++ type identity model. */
4801 if (TREE_CODE (ttype) == RECORD_TYPE
4802 || TREE_CODE (ttype) == UNION_TYPE
4803 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4804 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4805 {
4806 warning (OPT_Wattributes,
4807 "ignoring attributes applied to %qT after definition",
4808 TYPE_MAIN_VARIANT (ttype));
4809 return build_qualified_type (ttype, quals);
4810 }
4811
4812 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4813 ntype = build_distinct_type_copy (ttype);
4814
4815 TYPE_ATTRIBUTES (ntype) = attribute;
4816
4817 hstate.add_int (code);
4818 if (TREE_TYPE (ntype))
4819 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4820 attribute_hash_list (attribute, hstate);
4821
4822 switch (TREE_CODE (ntype))
4823 {
4824 case FUNCTION_TYPE:
4825 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4826 break;
4827 case ARRAY_TYPE:
4828 if (TYPE_DOMAIN (ntype))
4829 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4830 break;
4831 case INTEGER_TYPE:
4832 t = TYPE_MAX_VALUE (ntype);
4833 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4834 hstate.add_object (TREE_INT_CST_ELT (t, i));
4835 break;
4836 case REAL_TYPE:
4837 case FIXED_POINT_TYPE:
4838 {
4839 unsigned int precision = TYPE_PRECISION (ntype);
4840 hstate.add_object (precision);
4841 }
4842 break;
4843 default:
4844 break;
4845 }
4846
4847 ntype = type_hash_canon (hstate.end(), ntype);
4848
4849 /* If the target-dependent attributes make NTYPE different from
4850 its canonical type, we will need to use structural equality
4851 checks for this type. */
4852 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4853 || !comp_type_attributes (ntype, ttype))
4854 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4855 else if (TYPE_CANONICAL (ntype) == ntype)
4856 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4857
4858 ttype = build_qualified_type (ntype, quals);
4859 }
4860 else if (TYPE_QUALS (ttype) != quals)
4861 ttype = build_qualified_type (ttype, quals);
4862
4863 return ttype;
4864 }
4865
4866 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4867 the same. */
4868
4869 static bool
4870 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4871 {
4872 tree cl1, cl2;
4873 for (cl1 = clauses1, cl2 = clauses2;
4874 cl1 && cl2;
4875 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4876 {
4877 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4878 return false;
4879 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4880 {
4881 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4882 OMP_CLAUSE_DECL (cl2)) != 1)
4883 return false;
4884 }
4885 switch (OMP_CLAUSE_CODE (cl1))
4886 {
4887 case OMP_CLAUSE_ALIGNED:
4888 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4889 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4890 return false;
4891 break;
4892 case OMP_CLAUSE_LINEAR:
4893 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4894 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4895 return false;
4896 break;
4897 case OMP_CLAUSE_SIMDLEN:
4898 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4899 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4900 return false;
4901 default:
4902 break;
4903 }
4904 }
4905 return true;
4906 }
4907
4908 /* Compare two constructor-element-type constants. Return 1 if the lists
4909 are known to be equal; otherwise return 0. */
4910
4911 static bool
4912 simple_cst_list_equal (const_tree l1, const_tree l2)
4913 {
4914 while (l1 != NULL_TREE && l2 != NULL_TREE)
4915 {
4916 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4917 return false;
4918
4919 l1 = TREE_CHAIN (l1);
4920 l2 = TREE_CHAIN (l2);
4921 }
4922
4923 return l1 == l2;
4924 }
4925
4926 /* Compare two identifier nodes representing attributes. Either one may
4927 be in wrapped __ATTR__ form. Return true if they are the same, false
4928 otherwise. */
4929
4930 static bool
4931 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4932 {
4933 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4934 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4935 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4936
4937 /* Identifiers can be compared directly for equality. */
4938 if (attr1 == attr2)
4939 return true;
4940
4941 /* If they are not equal, they may still be one in the form
4942 'text' while the other one is in the form '__text__'. TODO:
4943 If we were storing attributes in normalized 'text' form, then
4944 this could all go away and we could take full advantage of
4945 the fact that we're comparing identifiers. :-) */
4946 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4947 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4948
4949 if (attr2_len == attr1_len + 4)
4950 {
4951 const char *p = IDENTIFIER_POINTER (attr2);
4952 const char *q = IDENTIFIER_POINTER (attr1);
4953 if (p[0] == '_' && p[1] == '_'
4954 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4955 && strncmp (q, p + 2, attr1_len) == 0)
4956 return true;;
4957 }
4958 else if (attr2_len + 4 == attr1_len)
4959 {
4960 const char *p = IDENTIFIER_POINTER (attr2);
4961 const char *q = IDENTIFIER_POINTER (attr1);
4962 if (q[0] == '_' && q[1] == '_'
4963 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4964 && strncmp (q + 2, p, attr2_len) == 0)
4965 return true;
4966 }
4967
4968 return false;
4969 }
4970
4971 /* Compare two attributes for their value identity. Return true if the
4972 attribute values are known to be equal; otherwise return false. */
4973
4974 bool
4975 attribute_value_equal (const_tree attr1, const_tree attr2)
4976 {
4977 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4978 return true;
4979
4980 if (TREE_VALUE (attr1) != NULL_TREE
4981 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4982 && TREE_VALUE (attr2) != NULL_TREE
4983 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4984 {
4985 /* Handle attribute format. */
4986 if (is_attribute_p ("format", TREE_PURPOSE (attr1)))
4987 {
4988 attr1 = TREE_VALUE (attr1);
4989 attr2 = TREE_VALUE (attr2);
4990 /* Compare the archetypes (printf/scanf/strftime/...). */
4991 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
4992 TREE_VALUE (attr2)))
4993 return false;
4994 /* Archetypes are the same. Compare the rest. */
4995 return (simple_cst_list_equal (TREE_CHAIN (attr1),
4996 TREE_CHAIN (attr2)) == 1);
4997 }
4998 return (simple_cst_list_equal (TREE_VALUE (attr1),
4999 TREE_VALUE (attr2)) == 1);
5000 }
5001
5002 if ((flag_openmp || flag_openmp_simd)
5003 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
5004 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
5005 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
5006 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
5007 TREE_VALUE (attr2));
5008
5009 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
5010 }
5011
5012 /* Return 0 if the attributes for two types are incompatible, 1 if they
5013 are compatible, and 2 if they are nearly compatible (which causes a
5014 warning to be generated). */
5015 int
5016 comp_type_attributes (const_tree type1, const_tree type2)
5017 {
5018 const_tree a1 = TYPE_ATTRIBUTES (type1);
5019 const_tree a2 = TYPE_ATTRIBUTES (type2);
5020 const_tree a;
5021
5022 if (a1 == a2)
5023 return 1;
5024 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
5025 {
5026 const struct attribute_spec *as;
5027 const_tree attr;
5028
5029 as = lookup_attribute_spec (get_attribute_name (a));
5030 if (!as || as->affects_type_identity == false)
5031 continue;
5032
5033 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
5034 if (!attr || !attribute_value_equal (a, attr))
5035 break;
5036 }
5037 if (!a)
5038 {
5039 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
5040 {
5041 const struct attribute_spec *as;
5042
5043 as = lookup_attribute_spec (get_attribute_name (a));
5044 if (!as || as->affects_type_identity == false)
5045 continue;
5046
5047 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5048 break;
5049 /* We don't need to compare trees again, as we did this
5050 already in first loop. */
5051 }
5052 /* All types - affecting identity - are equal, so
5053 there is no need to call target hook for comparison. */
5054 if (!a)
5055 return 1;
5056 }
5057 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
5058 return 0;
5059 /* As some type combinations - like default calling-convention - might
5060 be compatible, we have to call the target hook to get the final result. */
5061 return targetm.comp_type_attributes (type1, type2);
5062 }
5063
5064 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5065 is ATTRIBUTE.
5066
5067 Record such modified types already made so we don't make duplicates. */
5068
5069 tree
5070 build_type_attribute_variant (tree ttype, tree attribute)
5071 {
5072 return build_type_attribute_qual_variant (ttype, attribute,
5073 TYPE_QUALS (ttype));
5074 }
5075
5076
5077 /* Reset the expression *EXPR_P, a size or position.
5078
5079 ??? We could reset all non-constant sizes or positions. But it's cheap
5080 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5081
5082 We need to reset self-referential sizes or positions because they cannot
5083 be gimplified and thus can contain a CALL_EXPR after the gimplification
5084 is finished, which will run afoul of LTO streaming. And they need to be
5085 reset to something essentially dummy but not constant, so as to preserve
5086 the properties of the object they are attached to. */
5087
5088 static inline void
5089 free_lang_data_in_one_sizepos (tree *expr_p)
5090 {
5091 tree expr = *expr_p;
5092 if (CONTAINS_PLACEHOLDER_P (expr))
5093 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5094 }
5095
5096
5097 /* Reset all the fields in a binfo node BINFO. We only keep
5098 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5099
5100 static void
5101 free_lang_data_in_binfo (tree binfo)
5102 {
5103 unsigned i;
5104 tree t;
5105
5106 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5107
5108 BINFO_VIRTUALS (binfo) = NULL_TREE;
5109 BINFO_BASE_ACCESSES (binfo) = NULL;
5110 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5111 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5112
5113 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5114 free_lang_data_in_binfo (t);
5115 }
5116
5117
5118 /* Reset all language specific information still present in TYPE. */
5119
5120 static void
5121 free_lang_data_in_type (tree type)
5122 {
5123 gcc_assert (TYPE_P (type));
5124
5125 /* Give the FE a chance to remove its own data first. */
5126 lang_hooks.free_lang_data (type);
5127
5128 TREE_LANG_FLAG_0 (type) = 0;
5129 TREE_LANG_FLAG_1 (type) = 0;
5130 TREE_LANG_FLAG_2 (type) = 0;
5131 TREE_LANG_FLAG_3 (type) = 0;
5132 TREE_LANG_FLAG_4 (type) = 0;
5133 TREE_LANG_FLAG_5 (type) = 0;
5134 TREE_LANG_FLAG_6 (type) = 0;
5135
5136 if (TREE_CODE (type) == FUNCTION_TYPE)
5137 {
5138 /* Remove the const and volatile qualifiers from arguments. The
5139 C++ front end removes them, but the C front end does not,
5140 leading to false ODR violation errors when merging two
5141 instances of the same function signature compiled by
5142 different front ends. */
5143 tree p;
5144
5145 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5146 {
5147 tree arg_type = TREE_VALUE (p);
5148
5149 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5150 {
5151 int quals = TYPE_QUALS (arg_type)
5152 & ~TYPE_QUAL_CONST
5153 & ~TYPE_QUAL_VOLATILE;
5154 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5155 free_lang_data_in_type (TREE_VALUE (p));
5156 }
5157 /* C++ FE uses TREE_PURPOSE to store initial values. */
5158 TREE_PURPOSE (p) = NULL;
5159 }
5160 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5161 TYPE_MINVAL (type) = NULL;
5162 }
5163 if (TREE_CODE (type) == METHOD_TYPE)
5164 {
5165 tree p;
5166
5167 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5168 {
5169 /* C++ FE uses TREE_PURPOSE to store initial values. */
5170 TREE_PURPOSE (p) = NULL;
5171 }
5172 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5173 TYPE_MINVAL (type) = NULL;
5174 }
5175
5176 /* Remove members that are not actually FIELD_DECLs from the field
5177 list of an aggregate. These occur in C++. */
5178 if (RECORD_OR_UNION_TYPE_P (type))
5179 {
5180 tree prev, member;
5181
5182 /* Note that TYPE_FIELDS can be shared across distinct
5183 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5184 to be removed, we cannot set its TREE_CHAIN to NULL.
5185 Otherwise, we would not be able to find all the other fields
5186 in the other instances of this TREE_TYPE.
5187
5188 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5189 prev = NULL_TREE;
5190 member = TYPE_FIELDS (type);
5191 while (member)
5192 {
5193 if (TREE_CODE (member) == FIELD_DECL
5194 || (TREE_CODE (member) == TYPE_DECL
5195 && !DECL_IGNORED_P (member)
5196 && debug_info_level > DINFO_LEVEL_TERSE
5197 && !is_redundant_typedef (member)))
5198 {
5199 if (prev)
5200 TREE_CHAIN (prev) = member;
5201 else
5202 TYPE_FIELDS (type) = member;
5203 prev = member;
5204 }
5205
5206 member = TREE_CHAIN (member);
5207 }
5208
5209 if (prev)
5210 TREE_CHAIN (prev) = NULL_TREE;
5211 else
5212 TYPE_FIELDS (type) = NULL_TREE;
5213
5214 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5215 and danagle the pointer from time to time. */
5216 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5217 TYPE_VFIELD (type) = NULL_TREE;
5218
5219 /* Remove TYPE_METHODS list. While it would be nice to keep it
5220 to enable ODR warnings about different method lists, doing so
5221 seems to impractically increase size of LTO data streamed.
5222 Keep the information if TYPE_METHODS was non-NULL. This is used
5223 by function.c and pretty printers. */
5224 if (TYPE_METHODS (type))
5225 TYPE_METHODS (type) = error_mark_node;
5226 if (TYPE_BINFO (type))
5227 {
5228 free_lang_data_in_binfo (TYPE_BINFO (type));
5229 /* We need to preserve link to bases and virtual table for all
5230 polymorphic types to make devirtualization machinery working.
5231 Debug output cares only about bases, but output also
5232 virtual table pointers so merging of -fdevirtualize and
5233 -fno-devirtualize units is easier. */
5234 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5235 || !flag_devirtualize)
5236 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5237 && !BINFO_VTABLE (TYPE_BINFO (type)))
5238 || debug_info_level != DINFO_LEVEL_NONE))
5239 TYPE_BINFO (type) = NULL;
5240 }
5241 }
5242 else
5243 {
5244 /* For non-aggregate types, clear out the language slot (which
5245 overloads TYPE_BINFO). */
5246 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5247
5248 if (INTEGRAL_TYPE_P (type)
5249 || SCALAR_FLOAT_TYPE_P (type)
5250 || FIXED_POINT_TYPE_P (type))
5251 {
5252 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5253 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5254 }
5255 }
5256
5257 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5258 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5259
5260 if (TYPE_CONTEXT (type)
5261 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5262 {
5263 tree ctx = TYPE_CONTEXT (type);
5264 do
5265 {
5266 ctx = BLOCK_SUPERCONTEXT (ctx);
5267 }
5268 while (ctx && TREE_CODE (ctx) == BLOCK);
5269 TYPE_CONTEXT (type) = ctx;
5270 }
5271 }
5272
5273
5274 /* Return true if DECL may need an assembler name to be set. */
5275
5276 static inline bool
5277 need_assembler_name_p (tree decl)
5278 {
5279 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5280 Rule merging. This makes type_odr_p to return true on those types during
5281 LTO and by comparing the mangled name, we can say what types are intended
5282 to be equivalent across compilation unit.
5283
5284 We do not store names of type_in_anonymous_namespace_p.
5285
5286 Record, union and enumeration type have linkage that allows use
5287 to check type_in_anonymous_namespace_p. We do not mangle compound types
5288 that always can be compared structurally.
5289
5290 Similarly for builtin types, we compare properties of their main variant.
5291 A special case are integer types where mangling do make differences
5292 between char/signed char/unsigned char etc. Storing name for these makes
5293 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5294 See cp/mangle.c:write_builtin_type for details. */
5295
5296 if (flag_lto_odr_type_mering
5297 && TREE_CODE (decl) == TYPE_DECL
5298 && DECL_NAME (decl)
5299 && decl == TYPE_NAME (TREE_TYPE (decl))
5300 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5301 && (type_with_linkage_p (TREE_TYPE (decl))
5302 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5303 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5304 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5305 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5306 if (TREE_CODE (decl) != FUNCTION_DECL
5307 && TREE_CODE (decl) != VAR_DECL)
5308 return false;
5309
5310 /* If DECL already has its assembler name set, it does not need a
5311 new one. */
5312 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5313 || DECL_ASSEMBLER_NAME_SET_P (decl))
5314 return false;
5315
5316 /* Abstract decls do not need an assembler name. */
5317 if (DECL_ABSTRACT_P (decl))
5318 return false;
5319
5320 /* For VAR_DECLs, only static, public and external symbols need an
5321 assembler name. */
5322 if (TREE_CODE (decl) == VAR_DECL
5323 && !TREE_STATIC (decl)
5324 && !TREE_PUBLIC (decl)
5325 && !DECL_EXTERNAL (decl))
5326 return false;
5327
5328 if (TREE_CODE (decl) == FUNCTION_DECL)
5329 {
5330 /* Do not set assembler name on builtins. Allow RTL expansion to
5331 decide whether to expand inline or via a regular call. */
5332 if (DECL_BUILT_IN (decl)
5333 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5334 return false;
5335
5336 /* Functions represented in the callgraph need an assembler name. */
5337 if (cgraph_node::get (decl) != NULL)
5338 return true;
5339
5340 /* Unused and not public functions don't need an assembler name. */
5341 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5342 return false;
5343 }
5344
5345 return true;
5346 }
5347
5348
5349 /* Reset all language specific information still present in symbol
5350 DECL. */
5351
5352 static void
5353 free_lang_data_in_decl (tree decl)
5354 {
5355 gcc_assert (DECL_P (decl));
5356
5357 /* Give the FE a chance to remove its own data first. */
5358 lang_hooks.free_lang_data (decl);
5359
5360 TREE_LANG_FLAG_0 (decl) = 0;
5361 TREE_LANG_FLAG_1 (decl) = 0;
5362 TREE_LANG_FLAG_2 (decl) = 0;
5363 TREE_LANG_FLAG_3 (decl) = 0;
5364 TREE_LANG_FLAG_4 (decl) = 0;
5365 TREE_LANG_FLAG_5 (decl) = 0;
5366 TREE_LANG_FLAG_6 (decl) = 0;
5367
5368 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5369 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5370 if (TREE_CODE (decl) == FIELD_DECL)
5371 {
5372 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5373 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5374 DECL_QUALIFIER (decl) = NULL_TREE;
5375 }
5376
5377 if (TREE_CODE (decl) == FUNCTION_DECL)
5378 {
5379 struct cgraph_node *node;
5380 if (!(node = cgraph_node::get (decl))
5381 || (!node->definition && !node->clones))
5382 {
5383 if (node)
5384 node->release_body ();
5385 else
5386 {
5387 release_function_body (decl);
5388 DECL_ARGUMENTS (decl) = NULL;
5389 DECL_RESULT (decl) = NULL;
5390 DECL_INITIAL (decl) = error_mark_node;
5391 }
5392 }
5393 if (gimple_has_body_p (decl))
5394 {
5395 tree t;
5396
5397 /* If DECL has a gimple body, then the context for its
5398 arguments must be DECL. Otherwise, it doesn't really
5399 matter, as we will not be emitting any code for DECL. In
5400 general, there may be other instances of DECL created by
5401 the front end and since PARM_DECLs are generally shared,
5402 their DECL_CONTEXT changes as the replicas of DECL are
5403 created. The only time where DECL_CONTEXT is important
5404 is for the FUNCTION_DECLs that have a gimple body (since
5405 the PARM_DECL will be used in the function's body). */
5406 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5407 DECL_CONTEXT (t) = decl;
5408 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5409 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5410 = target_option_default_node;
5411 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5412 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5413 = optimization_default_node;
5414 }
5415
5416 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5417 At this point, it is not needed anymore. */
5418 DECL_SAVED_TREE (decl) = NULL_TREE;
5419
5420 /* Clear the abstract origin if it refers to a method. Otherwise
5421 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5422 origin will not be output correctly. */
5423 if (DECL_ABSTRACT_ORIGIN (decl)
5424 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5425 && RECORD_OR_UNION_TYPE_P
5426 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5427 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5428
5429 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5430 DECL_VINDEX referring to itself into a vtable slot number as it
5431 should. Happens with functions that are copied and then forgotten
5432 about. Just clear it, it won't matter anymore. */
5433 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5434 DECL_VINDEX (decl) = NULL_TREE;
5435 }
5436 else if (TREE_CODE (decl) == VAR_DECL)
5437 {
5438 if ((DECL_EXTERNAL (decl)
5439 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5440 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5441 DECL_INITIAL (decl) = NULL_TREE;
5442 }
5443 else if (TREE_CODE (decl) == TYPE_DECL
5444 || TREE_CODE (decl) == FIELD_DECL)
5445 DECL_INITIAL (decl) = NULL_TREE;
5446 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5447 && DECL_INITIAL (decl)
5448 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5449 {
5450 /* Strip builtins from the translation-unit BLOCK. We still have targets
5451 without builtin_decl_explicit support and also builtins are shared
5452 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5453 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5454 while (*nextp)
5455 {
5456 tree var = *nextp;
5457 if (TREE_CODE (var) == FUNCTION_DECL
5458 && DECL_BUILT_IN (var))
5459 *nextp = TREE_CHAIN (var);
5460 else
5461 nextp = &TREE_CHAIN (var);
5462 }
5463 }
5464 }
5465
5466
5467 /* Data used when collecting DECLs and TYPEs for language data removal. */
5468
5469 struct free_lang_data_d
5470 {
5471 /* Worklist to avoid excessive recursion. */
5472 vec<tree> worklist;
5473
5474 /* Set of traversed objects. Used to avoid duplicate visits. */
5475 hash_set<tree> *pset;
5476
5477 /* Array of symbols to process with free_lang_data_in_decl. */
5478 vec<tree> decls;
5479
5480 /* Array of types to process with free_lang_data_in_type. */
5481 vec<tree> types;
5482 };
5483
5484
5485 /* Save all language fields needed to generate proper debug information
5486 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5487
5488 static void
5489 save_debug_info_for_decl (tree t)
5490 {
5491 /*struct saved_debug_info_d *sdi;*/
5492
5493 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5494
5495 /* FIXME. Partial implementation for saving debug info removed. */
5496 }
5497
5498
5499 /* Save all language fields needed to generate proper debug information
5500 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5501
5502 static void
5503 save_debug_info_for_type (tree t)
5504 {
5505 /*struct saved_debug_info_d *sdi;*/
5506
5507 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5508
5509 /* FIXME. Partial implementation for saving debug info removed. */
5510 }
5511
5512
5513 /* Add type or decl T to one of the list of tree nodes that need their
5514 language data removed. The lists are held inside FLD. */
5515
5516 static void
5517 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5518 {
5519 if (DECL_P (t))
5520 {
5521 fld->decls.safe_push (t);
5522 if (debug_info_level > DINFO_LEVEL_TERSE)
5523 save_debug_info_for_decl (t);
5524 }
5525 else if (TYPE_P (t))
5526 {
5527 fld->types.safe_push (t);
5528 if (debug_info_level > DINFO_LEVEL_TERSE)
5529 save_debug_info_for_type (t);
5530 }
5531 else
5532 gcc_unreachable ();
5533 }
5534
5535 /* Push tree node T into FLD->WORKLIST. */
5536
5537 static inline void
5538 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5539 {
5540 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5541 fld->worklist.safe_push ((t));
5542 }
5543
5544
5545 /* Operand callback helper for free_lang_data_in_node. *TP is the
5546 subtree operand being considered. */
5547
5548 static tree
5549 find_decls_types_r (tree *tp, int *ws, void *data)
5550 {
5551 tree t = *tp;
5552 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5553
5554 if (TREE_CODE (t) == TREE_LIST)
5555 return NULL_TREE;
5556
5557 /* Language specific nodes will be removed, so there is no need
5558 to gather anything under them. */
5559 if (is_lang_specific (t))
5560 {
5561 *ws = 0;
5562 return NULL_TREE;
5563 }
5564
5565 if (DECL_P (t))
5566 {
5567 /* Note that walk_tree does not traverse every possible field in
5568 decls, so we have to do our own traversals here. */
5569 add_tree_to_fld_list (t, fld);
5570
5571 fld_worklist_push (DECL_NAME (t), fld);
5572 fld_worklist_push (DECL_CONTEXT (t), fld);
5573 fld_worklist_push (DECL_SIZE (t), fld);
5574 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5575
5576 /* We are going to remove everything under DECL_INITIAL for
5577 TYPE_DECLs. No point walking them. */
5578 if (TREE_CODE (t) != TYPE_DECL)
5579 fld_worklist_push (DECL_INITIAL (t), fld);
5580
5581 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5582 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5583
5584 if (TREE_CODE (t) == FUNCTION_DECL)
5585 {
5586 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5587 fld_worklist_push (DECL_RESULT (t), fld);
5588 }
5589 else if (TREE_CODE (t) == TYPE_DECL)
5590 {
5591 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5592 }
5593 else if (TREE_CODE (t) == FIELD_DECL)
5594 {
5595 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5596 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5597 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5598 fld_worklist_push (DECL_FCONTEXT (t), fld);
5599 }
5600
5601 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5602 && DECL_HAS_VALUE_EXPR_P (t))
5603 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5604
5605 if (TREE_CODE (t) != FIELD_DECL
5606 && TREE_CODE (t) != TYPE_DECL)
5607 fld_worklist_push (TREE_CHAIN (t), fld);
5608 *ws = 0;
5609 }
5610 else if (TYPE_P (t))
5611 {
5612 /* Note that walk_tree does not traverse every possible field in
5613 types, so we have to do our own traversals here. */
5614 add_tree_to_fld_list (t, fld);
5615
5616 if (!RECORD_OR_UNION_TYPE_P (t))
5617 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5618 fld_worklist_push (TYPE_SIZE (t), fld);
5619 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5620 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5621 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5622 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5623 fld_worklist_push (TYPE_NAME (t), fld);
5624 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5625 them and thus do not and want not to reach unused pointer types
5626 this way. */
5627 if (!POINTER_TYPE_P (t))
5628 fld_worklist_push (TYPE_MINVAL (t), fld);
5629 if (!RECORD_OR_UNION_TYPE_P (t))
5630 fld_worklist_push (TYPE_MAXVAL (t), fld);
5631 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5632 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5633 do not and want not to reach unused variants this way. */
5634 if (TYPE_CONTEXT (t))
5635 {
5636 tree ctx = TYPE_CONTEXT (t);
5637 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5638 So push that instead. */
5639 while (ctx && TREE_CODE (ctx) == BLOCK)
5640 ctx = BLOCK_SUPERCONTEXT (ctx);
5641 fld_worklist_push (ctx, fld);
5642 }
5643 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5644 and want not to reach unused types this way. */
5645
5646 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5647 {
5648 unsigned i;
5649 tree tem;
5650 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5651 fld_worklist_push (TREE_TYPE (tem), fld);
5652 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5653 if (tem
5654 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5655 && TREE_CODE (tem) == TREE_LIST)
5656 do
5657 {
5658 fld_worklist_push (TREE_VALUE (tem), fld);
5659 tem = TREE_CHAIN (tem);
5660 }
5661 while (tem);
5662 }
5663 if (RECORD_OR_UNION_TYPE_P (t))
5664 {
5665 tree tem;
5666 /* Push all TYPE_FIELDS - there can be interleaving interesting
5667 and non-interesting things. */
5668 tem = TYPE_FIELDS (t);
5669 while (tem)
5670 {
5671 if (TREE_CODE (tem) == FIELD_DECL
5672 || (TREE_CODE (tem) == TYPE_DECL
5673 && !DECL_IGNORED_P (tem)
5674 && debug_info_level > DINFO_LEVEL_TERSE
5675 && !is_redundant_typedef (tem)))
5676 fld_worklist_push (tem, fld);
5677 tem = TREE_CHAIN (tem);
5678 }
5679 }
5680
5681 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5682 *ws = 0;
5683 }
5684 else if (TREE_CODE (t) == BLOCK)
5685 {
5686 tree tem;
5687 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5688 fld_worklist_push (tem, fld);
5689 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5690 fld_worklist_push (tem, fld);
5691 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5692 }
5693
5694 if (TREE_CODE (t) != IDENTIFIER_NODE
5695 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5696 fld_worklist_push (TREE_TYPE (t), fld);
5697
5698 return NULL_TREE;
5699 }
5700
5701
5702 /* Find decls and types in T. */
5703
5704 static void
5705 find_decls_types (tree t, struct free_lang_data_d *fld)
5706 {
5707 while (1)
5708 {
5709 if (!fld->pset->contains (t))
5710 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5711 if (fld->worklist.is_empty ())
5712 break;
5713 t = fld->worklist.pop ();
5714 }
5715 }
5716
5717 /* Translate all the types in LIST with the corresponding runtime
5718 types. */
5719
5720 static tree
5721 get_eh_types_for_runtime (tree list)
5722 {
5723 tree head, prev;
5724
5725 if (list == NULL_TREE)
5726 return NULL_TREE;
5727
5728 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5729 prev = head;
5730 list = TREE_CHAIN (list);
5731 while (list)
5732 {
5733 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5734 TREE_CHAIN (prev) = n;
5735 prev = TREE_CHAIN (prev);
5736 list = TREE_CHAIN (list);
5737 }
5738
5739 return head;
5740 }
5741
5742
5743 /* Find decls and types referenced in EH region R and store them in
5744 FLD->DECLS and FLD->TYPES. */
5745
5746 static void
5747 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5748 {
5749 switch (r->type)
5750 {
5751 case ERT_CLEANUP:
5752 break;
5753
5754 case ERT_TRY:
5755 {
5756 eh_catch c;
5757
5758 /* The types referenced in each catch must first be changed to the
5759 EH types used at runtime. This removes references to FE types
5760 in the region. */
5761 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5762 {
5763 c->type_list = get_eh_types_for_runtime (c->type_list);
5764 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5765 }
5766 }
5767 break;
5768
5769 case ERT_ALLOWED_EXCEPTIONS:
5770 r->u.allowed.type_list
5771 = get_eh_types_for_runtime (r->u.allowed.type_list);
5772 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5773 break;
5774
5775 case ERT_MUST_NOT_THROW:
5776 walk_tree (&r->u.must_not_throw.failure_decl,
5777 find_decls_types_r, fld, fld->pset);
5778 break;
5779 }
5780 }
5781
5782
5783 /* Find decls and types referenced in cgraph node N and store them in
5784 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5785 look for *every* kind of DECL and TYPE node reachable from N,
5786 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5787 NAMESPACE_DECLs, etc). */
5788
5789 static void
5790 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5791 {
5792 basic_block bb;
5793 struct function *fn;
5794 unsigned ix;
5795 tree t;
5796
5797 find_decls_types (n->decl, fld);
5798
5799 if (!gimple_has_body_p (n->decl))
5800 return;
5801
5802 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5803
5804 fn = DECL_STRUCT_FUNCTION (n->decl);
5805
5806 /* Traverse locals. */
5807 FOR_EACH_LOCAL_DECL (fn, ix, t)
5808 find_decls_types (t, fld);
5809
5810 /* Traverse EH regions in FN. */
5811 {
5812 eh_region r;
5813 FOR_ALL_EH_REGION_FN (r, fn)
5814 find_decls_types_in_eh_region (r, fld);
5815 }
5816
5817 /* Traverse every statement in FN. */
5818 FOR_EACH_BB_FN (bb, fn)
5819 {
5820 gphi_iterator psi;
5821 gimple_stmt_iterator si;
5822 unsigned i;
5823
5824 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5825 {
5826 gphi *phi = psi.phi ();
5827
5828 for (i = 0; i < gimple_phi_num_args (phi); i++)
5829 {
5830 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5831 find_decls_types (*arg_p, fld);
5832 }
5833 }
5834
5835 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5836 {
5837 gimple *stmt = gsi_stmt (si);
5838
5839 if (is_gimple_call (stmt))
5840 find_decls_types (gimple_call_fntype (stmt), fld);
5841
5842 for (i = 0; i < gimple_num_ops (stmt); i++)
5843 {
5844 tree arg = gimple_op (stmt, i);
5845 find_decls_types (arg, fld);
5846 }
5847 }
5848 }
5849 }
5850
5851
5852 /* Find decls and types referenced in varpool node N and store them in
5853 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5854 look for *every* kind of DECL and TYPE node reachable from N,
5855 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5856 NAMESPACE_DECLs, etc). */
5857
5858 static void
5859 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5860 {
5861 find_decls_types (v->decl, fld);
5862 }
5863
5864 /* If T needs an assembler name, have one created for it. */
5865
5866 void
5867 assign_assembler_name_if_neeeded (tree t)
5868 {
5869 if (need_assembler_name_p (t))
5870 {
5871 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5872 diagnostics that use input_location to show locus
5873 information. The problem here is that, at this point,
5874 input_location is generally anchored to the end of the file
5875 (since the parser is long gone), so we don't have a good
5876 position to pin it to.
5877
5878 To alleviate this problem, this uses the location of T's
5879 declaration. Examples of this are
5880 testsuite/g++.dg/template/cond2.C and
5881 testsuite/g++.dg/template/pr35240.C. */
5882 location_t saved_location = input_location;
5883 input_location = DECL_SOURCE_LOCATION (t);
5884
5885 decl_assembler_name (t);
5886
5887 input_location = saved_location;
5888 }
5889 }
5890
5891
5892 /* Free language specific information for every operand and expression
5893 in every node of the call graph. This process operates in three stages:
5894
5895 1- Every callgraph node and varpool node is traversed looking for
5896 decls and types embedded in them. This is a more exhaustive
5897 search than that done by find_referenced_vars, because it will
5898 also collect individual fields, decls embedded in types, etc.
5899
5900 2- All the decls found are sent to free_lang_data_in_decl.
5901
5902 3- All the types found are sent to free_lang_data_in_type.
5903
5904 The ordering between decls and types is important because
5905 free_lang_data_in_decl sets assembler names, which includes
5906 mangling. So types cannot be freed up until assembler names have
5907 been set up. */
5908
5909 static void
5910 free_lang_data_in_cgraph (void)
5911 {
5912 struct cgraph_node *n;
5913 varpool_node *v;
5914 struct free_lang_data_d fld;
5915 tree t;
5916 unsigned i;
5917 alias_pair *p;
5918
5919 /* Initialize sets and arrays to store referenced decls and types. */
5920 fld.pset = new hash_set<tree>;
5921 fld.worklist.create (0);
5922 fld.decls.create (100);
5923 fld.types.create (100);
5924
5925 /* Find decls and types in the body of every function in the callgraph. */
5926 FOR_EACH_FUNCTION (n)
5927 find_decls_types_in_node (n, &fld);
5928
5929 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5930 find_decls_types (p->decl, &fld);
5931
5932 /* Find decls and types in every varpool symbol. */
5933 FOR_EACH_VARIABLE (v)
5934 find_decls_types_in_var (v, &fld);
5935
5936 /* Set the assembler name on every decl found. We need to do this
5937 now because free_lang_data_in_decl will invalidate data needed
5938 for mangling. This breaks mangling on interdependent decls. */
5939 FOR_EACH_VEC_ELT (fld.decls, i, t)
5940 assign_assembler_name_if_neeeded (t);
5941
5942 /* Traverse every decl found freeing its language data. */
5943 FOR_EACH_VEC_ELT (fld.decls, i, t)
5944 free_lang_data_in_decl (t);
5945
5946 /* Traverse every type found freeing its language data. */
5947 FOR_EACH_VEC_ELT (fld.types, i, t)
5948 free_lang_data_in_type (t);
5949 if (flag_checking)
5950 {
5951 FOR_EACH_VEC_ELT (fld.types, i, t)
5952 verify_type (t);
5953 }
5954
5955 delete fld.pset;
5956 fld.worklist.release ();
5957 fld.decls.release ();
5958 fld.types.release ();
5959 }
5960
5961
5962 /* Free resources that are used by FE but are not needed once they are done. */
5963
5964 static unsigned
5965 free_lang_data (void)
5966 {
5967 unsigned i;
5968
5969 /* If we are the LTO frontend we have freed lang-specific data already. */
5970 if (in_lto_p
5971 || (!flag_generate_lto && !flag_generate_offload))
5972 return 0;
5973
5974 /* Allocate and assign alias sets to the standard integer types
5975 while the slots are still in the way the frontends generated them. */
5976 for (i = 0; i < itk_none; ++i)
5977 if (integer_types[i])
5978 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5979
5980 /* Traverse the IL resetting language specific information for
5981 operands, expressions, etc. */
5982 free_lang_data_in_cgraph ();
5983
5984 /* Create gimple variants for common types. */
5985 ptrdiff_type_node = integer_type_node;
5986 fileptr_type_node = ptr_type_node;
5987
5988 /* Reset some langhooks. Do not reset types_compatible_p, it may
5989 still be used indirectly via the get_alias_set langhook. */
5990 lang_hooks.dwarf_name = lhd_dwarf_name;
5991 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5992 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5993
5994 /* We do not want the default decl_assembler_name implementation,
5995 rather if we have fixed everything we want a wrapper around it
5996 asserting that all non-local symbols already got their assembler
5997 name and only produce assembler names for local symbols. Or rather
5998 make sure we never call decl_assembler_name on local symbols and
5999 devise a separate, middle-end private scheme for it. */
6000
6001 /* Reset diagnostic machinery. */
6002 tree_diagnostics_defaults (global_dc);
6003
6004 return 0;
6005 }
6006
6007
6008 namespace {
6009
6010 const pass_data pass_data_ipa_free_lang_data =
6011 {
6012 SIMPLE_IPA_PASS, /* type */
6013 "*free_lang_data", /* name */
6014 OPTGROUP_NONE, /* optinfo_flags */
6015 TV_IPA_FREE_LANG_DATA, /* tv_id */
6016 0, /* properties_required */
6017 0, /* properties_provided */
6018 0, /* properties_destroyed */
6019 0, /* todo_flags_start */
6020 0, /* todo_flags_finish */
6021 };
6022
6023 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6024 {
6025 public:
6026 pass_ipa_free_lang_data (gcc::context *ctxt)
6027 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6028 {}
6029
6030 /* opt_pass methods: */
6031 virtual unsigned int execute (function *) { return free_lang_data (); }
6032
6033 }; // class pass_ipa_free_lang_data
6034
6035 } // anon namespace
6036
6037 simple_ipa_opt_pass *
6038 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6039 {
6040 return new pass_ipa_free_lang_data (ctxt);
6041 }
6042
6043 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
6044 ATTR_NAME. Also used internally by remove_attribute(). */
6045 bool
6046 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
6047 {
6048 size_t ident_len = IDENTIFIER_LENGTH (ident);
6049
6050 if (ident_len == attr_len)
6051 {
6052 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
6053 return true;
6054 }
6055 else if (ident_len == attr_len + 4)
6056 {
6057 /* There is the possibility that ATTR is 'text' and IDENT is
6058 '__text__'. */
6059 const char *p = IDENTIFIER_POINTER (ident);
6060 if (p[0] == '_' && p[1] == '_'
6061 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6062 && strncmp (attr_name, p + 2, attr_len) == 0)
6063 return true;
6064 }
6065
6066 return false;
6067 }
6068
6069 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6070 of ATTR_NAME, and LIST is not NULL_TREE. */
6071 tree
6072 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6073 {
6074 while (list)
6075 {
6076 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6077
6078 if (ident_len == attr_len)
6079 {
6080 if (!strcmp (attr_name,
6081 IDENTIFIER_POINTER (get_attribute_name (list))))
6082 break;
6083 }
6084 /* TODO: If we made sure that attributes were stored in the
6085 canonical form without '__...__' (ie, as in 'text' as opposed
6086 to '__text__') then we could avoid the following case. */
6087 else if (ident_len == attr_len + 4)
6088 {
6089 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6090 if (p[0] == '_' && p[1] == '_'
6091 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6092 && strncmp (attr_name, p + 2, attr_len) == 0)
6093 break;
6094 }
6095 list = TREE_CHAIN (list);
6096 }
6097
6098 return list;
6099 }
6100
6101 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6102 return a pointer to the attribute's list first element if the attribute
6103 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6104 '__text__'). */
6105
6106 tree
6107 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6108 tree list)
6109 {
6110 while (list)
6111 {
6112 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6113
6114 if (attr_len > ident_len)
6115 {
6116 list = TREE_CHAIN (list);
6117 continue;
6118 }
6119
6120 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6121
6122 if (strncmp (attr_name, p, attr_len) == 0)
6123 break;
6124
6125 /* TODO: If we made sure that attributes were stored in the
6126 canonical form without '__...__' (ie, as in 'text' as opposed
6127 to '__text__') then we could avoid the following case. */
6128 if (p[0] == '_' && p[1] == '_' &&
6129 strncmp (attr_name, p + 2, attr_len) == 0)
6130 break;
6131
6132 list = TREE_CHAIN (list);
6133 }
6134
6135 return list;
6136 }
6137
6138
6139 /* A variant of lookup_attribute() that can be used with an identifier
6140 as the first argument, and where the identifier can be either
6141 'text' or '__text__'.
6142
6143 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6144 return a pointer to the attribute's list element if the attribute
6145 is part of the list, or NULL_TREE if not found. If the attribute
6146 appears more than once, this only returns the first occurrence; the
6147 TREE_CHAIN of the return value should be passed back in if further
6148 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6149 can be in the form 'text' or '__text__'. */
6150 static tree
6151 lookup_ident_attribute (tree attr_identifier, tree list)
6152 {
6153 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6154
6155 while (list)
6156 {
6157 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6158 == IDENTIFIER_NODE);
6159
6160 if (cmp_attrib_identifiers (attr_identifier,
6161 get_attribute_name (list)))
6162 /* Found it. */
6163 break;
6164 list = TREE_CHAIN (list);
6165 }
6166
6167 return list;
6168 }
6169
6170 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6171 modified list. */
6172
6173 tree
6174 remove_attribute (const char *attr_name, tree list)
6175 {
6176 tree *p;
6177 size_t attr_len = strlen (attr_name);
6178
6179 gcc_checking_assert (attr_name[0] != '_');
6180
6181 for (p = &list; *p; )
6182 {
6183 tree l = *p;
6184 /* TODO: If we were storing attributes in normalized form, here
6185 we could use a simple strcmp(). */
6186 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6187 *p = TREE_CHAIN (l);
6188 else
6189 p = &TREE_CHAIN (l);
6190 }
6191
6192 return list;
6193 }
6194
6195 /* Return an attribute list that is the union of a1 and a2. */
6196
6197 tree
6198 merge_attributes (tree a1, tree a2)
6199 {
6200 tree attributes;
6201
6202 /* Either one unset? Take the set one. */
6203
6204 if ((attributes = a1) == 0)
6205 attributes = a2;
6206
6207 /* One that completely contains the other? Take it. */
6208
6209 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6210 {
6211 if (attribute_list_contained (a2, a1))
6212 attributes = a2;
6213 else
6214 {
6215 /* Pick the longest list, and hang on the other list. */
6216
6217 if (list_length (a1) < list_length (a2))
6218 attributes = a2, a2 = a1;
6219
6220 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6221 {
6222 tree a;
6223 for (a = lookup_ident_attribute (get_attribute_name (a2),
6224 attributes);
6225 a != NULL_TREE && !attribute_value_equal (a, a2);
6226 a = lookup_ident_attribute (get_attribute_name (a2),
6227 TREE_CHAIN (a)))
6228 ;
6229 if (a == NULL_TREE)
6230 {
6231 a1 = copy_node (a2);
6232 TREE_CHAIN (a1) = attributes;
6233 attributes = a1;
6234 }
6235 }
6236 }
6237 }
6238 return attributes;
6239 }
6240
6241 /* Given types T1 and T2, merge their attributes and return
6242 the result. */
6243
6244 tree
6245 merge_type_attributes (tree t1, tree t2)
6246 {
6247 return merge_attributes (TYPE_ATTRIBUTES (t1),
6248 TYPE_ATTRIBUTES (t2));
6249 }
6250
6251 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6252 the result. */
6253
6254 tree
6255 merge_decl_attributes (tree olddecl, tree newdecl)
6256 {
6257 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6258 DECL_ATTRIBUTES (newdecl));
6259 }
6260
6261 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6262
6263 /* Specialization of merge_decl_attributes for various Windows targets.
6264
6265 This handles the following situation:
6266
6267 __declspec (dllimport) int foo;
6268 int foo;
6269
6270 The second instance of `foo' nullifies the dllimport. */
6271
6272 tree
6273 merge_dllimport_decl_attributes (tree old, tree new_tree)
6274 {
6275 tree a;
6276 int delete_dllimport_p = 1;
6277
6278 /* What we need to do here is remove from `old' dllimport if it doesn't
6279 appear in `new'. dllimport behaves like extern: if a declaration is
6280 marked dllimport and a definition appears later, then the object
6281 is not dllimport'd. We also remove a `new' dllimport if the old list
6282 contains dllexport: dllexport always overrides dllimport, regardless
6283 of the order of declaration. */
6284 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6285 delete_dllimport_p = 0;
6286 else if (DECL_DLLIMPORT_P (new_tree)
6287 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6288 {
6289 DECL_DLLIMPORT_P (new_tree) = 0;
6290 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6291 "dllimport ignored", new_tree);
6292 }
6293 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6294 {
6295 /* Warn about overriding a symbol that has already been used, e.g.:
6296 extern int __attribute__ ((dllimport)) foo;
6297 int* bar () {return &foo;}
6298 int foo;
6299 */
6300 if (TREE_USED (old))
6301 {
6302 warning (0, "%q+D redeclared without dllimport attribute "
6303 "after being referenced with dll linkage", new_tree);
6304 /* If we have used a variable's address with dllimport linkage,
6305 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6306 decl may already have had TREE_CONSTANT computed.
6307 We still remove the attribute so that assembler code refers
6308 to '&foo rather than '_imp__foo'. */
6309 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6310 DECL_DLLIMPORT_P (new_tree) = 1;
6311 }
6312
6313 /* Let an inline definition silently override the external reference,
6314 but otherwise warn about attribute inconsistency. */
6315 else if (TREE_CODE (new_tree) == VAR_DECL
6316 || !DECL_DECLARED_INLINE_P (new_tree))
6317 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6318 "previous dllimport ignored", new_tree);
6319 }
6320 else
6321 delete_dllimport_p = 0;
6322
6323 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6324
6325 if (delete_dllimport_p)
6326 a = remove_attribute ("dllimport", a);
6327
6328 return a;
6329 }
6330
6331 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6332 struct attribute_spec.handler. */
6333
6334 tree
6335 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6336 bool *no_add_attrs)
6337 {
6338 tree node = *pnode;
6339 bool is_dllimport;
6340
6341 /* These attributes may apply to structure and union types being created,
6342 but otherwise should pass to the declaration involved. */
6343 if (!DECL_P (node))
6344 {
6345 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6346 | (int) ATTR_FLAG_ARRAY_NEXT))
6347 {
6348 *no_add_attrs = true;
6349 return tree_cons (name, args, NULL_TREE);
6350 }
6351 if (TREE_CODE (node) == RECORD_TYPE
6352 || TREE_CODE (node) == UNION_TYPE)
6353 {
6354 node = TYPE_NAME (node);
6355 if (!node)
6356 return NULL_TREE;
6357 }
6358 else
6359 {
6360 warning (OPT_Wattributes, "%qE attribute ignored",
6361 name);
6362 *no_add_attrs = true;
6363 return NULL_TREE;
6364 }
6365 }
6366
6367 if (TREE_CODE (node) != FUNCTION_DECL
6368 && TREE_CODE (node) != VAR_DECL
6369 && TREE_CODE (node) != TYPE_DECL)
6370 {
6371 *no_add_attrs = true;
6372 warning (OPT_Wattributes, "%qE attribute ignored",
6373 name);
6374 return NULL_TREE;
6375 }
6376
6377 if (TREE_CODE (node) == TYPE_DECL
6378 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6379 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6380 {
6381 *no_add_attrs = true;
6382 warning (OPT_Wattributes, "%qE attribute ignored",
6383 name);
6384 return NULL_TREE;
6385 }
6386
6387 is_dllimport = is_attribute_p ("dllimport", name);
6388
6389 /* Report error on dllimport ambiguities seen now before they cause
6390 any damage. */
6391 if (is_dllimport)
6392 {
6393 /* Honor any target-specific overrides. */
6394 if (!targetm.valid_dllimport_attribute_p (node))
6395 *no_add_attrs = true;
6396
6397 else if (TREE_CODE (node) == FUNCTION_DECL
6398 && DECL_DECLARED_INLINE_P (node))
6399 {
6400 warning (OPT_Wattributes, "inline function %q+D declared as "
6401 " dllimport: attribute ignored", node);
6402 *no_add_attrs = true;
6403 }
6404 /* Like MS, treat definition of dllimported variables and
6405 non-inlined functions on declaration as syntax errors. */
6406 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6407 {
6408 error ("function %q+D definition is marked dllimport", node);
6409 *no_add_attrs = true;
6410 }
6411
6412 else if (TREE_CODE (node) == VAR_DECL)
6413 {
6414 if (DECL_INITIAL (node))
6415 {
6416 error ("variable %q+D definition is marked dllimport",
6417 node);
6418 *no_add_attrs = true;
6419 }
6420
6421 /* `extern' needn't be specified with dllimport.
6422 Specify `extern' now and hope for the best. Sigh. */
6423 DECL_EXTERNAL (node) = 1;
6424 /* Also, implicitly give dllimport'd variables declared within
6425 a function global scope, unless declared static. */
6426 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6427 TREE_PUBLIC (node) = 1;
6428 }
6429
6430 if (*no_add_attrs == false)
6431 DECL_DLLIMPORT_P (node) = 1;
6432 }
6433 else if (TREE_CODE (node) == FUNCTION_DECL
6434 && DECL_DECLARED_INLINE_P (node)
6435 && flag_keep_inline_dllexport)
6436 /* An exported function, even if inline, must be emitted. */
6437 DECL_EXTERNAL (node) = 0;
6438
6439 /* Report error if symbol is not accessible at global scope. */
6440 if (!TREE_PUBLIC (node)
6441 && (TREE_CODE (node) == VAR_DECL
6442 || TREE_CODE (node) == FUNCTION_DECL))
6443 {
6444 error ("external linkage required for symbol %q+D because of "
6445 "%qE attribute", node, name);
6446 *no_add_attrs = true;
6447 }
6448
6449 /* A dllexport'd entity must have default visibility so that other
6450 program units (shared libraries or the main executable) can see
6451 it. A dllimport'd entity must have default visibility so that
6452 the linker knows that undefined references within this program
6453 unit can be resolved by the dynamic linker. */
6454 if (!*no_add_attrs)
6455 {
6456 if (DECL_VISIBILITY_SPECIFIED (node)
6457 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6458 error ("%qE implies default visibility, but %qD has already "
6459 "been declared with a different visibility",
6460 name, node);
6461 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6462 DECL_VISIBILITY_SPECIFIED (node) = 1;
6463 }
6464
6465 return NULL_TREE;
6466 }
6467
6468 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6469 \f
6470 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6471 of the various TYPE_QUAL values. */
6472
6473 static void
6474 set_type_quals (tree type, int type_quals)
6475 {
6476 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6477 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6478 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6479 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6480 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6481 }
6482
6483 /* Returns true iff unqualified CAND and BASE are equivalent. */
6484
6485 bool
6486 check_base_type (const_tree cand, const_tree base)
6487 {
6488 return (TYPE_NAME (cand) == TYPE_NAME (base)
6489 /* Apparently this is needed for Objective-C. */
6490 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6491 /* Check alignment. */
6492 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6493 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6494 TYPE_ATTRIBUTES (base)));
6495 }
6496
6497 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6498
6499 bool
6500 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6501 {
6502 return (TYPE_QUALS (cand) == type_quals
6503 && check_base_type (cand, base));
6504 }
6505
6506 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6507
6508 static bool
6509 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6510 {
6511 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6512 && TYPE_NAME (cand) == TYPE_NAME (base)
6513 /* Apparently this is needed for Objective-C. */
6514 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6515 /* Check alignment. */
6516 && TYPE_ALIGN (cand) == align
6517 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6518 TYPE_ATTRIBUTES (base)));
6519 }
6520
6521 /* This function checks to see if TYPE matches the size one of the built-in
6522 atomic types, and returns that core atomic type. */
6523
6524 static tree
6525 find_atomic_core_type (tree type)
6526 {
6527 tree base_atomic_type;
6528
6529 /* Only handle complete types. */
6530 if (TYPE_SIZE (type) == NULL_TREE)
6531 return NULL_TREE;
6532
6533 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6534 switch (type_size)
6535 {
6536 case 8:
6537 base_atomic_type = atomicQI_type_node;
6538 break;
6539
6540 case 16:
6541 base_atomic_type = atomicHI_type_node;
6542 break;
6543
6544 case 32:
6545 base_atomic_type = atomicSI_type_node;
6546 break;
6547
6548 case 64:
6549 base_atomic_type = atomicDI_type_node;
6550 break;
6551
6552 case 128:
6553 base_atomic_type = atomicTI_type_node;
6554 break;
6555
6556 default:
6557 base_atomic_type = NULL_TREE;
6558 }
6559
6560 return base_atomic_type;
6561 }
6562
6563 /* Return a version of the TYPE, qualified as indicated by the
6564 TYPE_QUALS, if one exists. If no qualified version exists yet,
6565 return NULL_TREE. */
6566
6567 tree
6568 get_qualified_type (tree type, int type_quals)
6569 {
6570 tree t;
6571
6572 if (TYPE_QUALS (type) == type_quals)
6573 return type;
6574
6575 /* Search the chain of variants to see if there is already one there just
6576 like the one we need to have. If so, use that existing one. We must
6577 preserve the TYPE_NAME, since there is code that depends on this. */
6578 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6579 if (check_qualified_type (t, type, type_quals))
6580 return t;
6581
6582 return NULL_TREE;
6583 }
6584
6585 /* Like get_qualified_type, but creates the type if it does not
6586 exist. This function never returns NULL_TREE. */
6587
6588 tree
6589 build_qualified_type (tree type, int type_quals)
6590 {
6591 tree t;
6592
6593 /* See if we already have the appropriate qualified variant. */
6594 t = get_qualified_type (type, type_quals);
6595
6596 /* If not, build it. */
6597 if (!t)
6598 {
6599 t = build_variant_type_copy (type);
6600 set_type_quals (t, type_quals);
6601
6602 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6603 {
6604 /* See if this object can map to a basic atomic type. */
6605 tree atomic_type = find_atomic_core_type (type);
6606 if (atomic_type)
6607 {
6608 /* Ensure the alignment of this type is compatible with
6609 the required alignment of the atomic type. */
6610 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6611 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6612 }
6613 }
6614
6615 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6616 /* Propagate structural equality. */
6617 SET_TYPE_STRUCTURAL_EQUALITY (t);
6618 else if (TYPE_CANONICAL (type) != type)
6619 /* Build the underlying canonical type, since it is different
6620 from TYPE. */
6621 {
6622 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6623 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6624 }
6625 else
6626 /* T is its own canonical type. */
6627 TYPE_CANONICAL (t) = t;
6628
6629 }
6630
6631 return t;
6632 }
6633
6634 /* Create a variant of type T with alignment ALIGN. */
6635
6636 tree
6637 build_aligned_type (tree type, unsigned int align)
6638 {
6639 tree t;
6640
6641 if (TYPE_PACKED (type)
6642 || TYPE_ALIGN (type) == align)
6643 return type;
6644
6645 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6646 if (check_aligned_type (t, type, align))
6647 return t;
6648
6649 t = build_variant_type_copy (type);
6650 TYPE_ALIGN (t) = align;
6651
6652 return t;
6653 }
6654
6655 /* Create a new distinct copy of TYPE. The new type is made its own
6656 MAIN_VARIANT. If TYPE requires structural equality checks, the
6657 resulting type requires structural equality checks; otherwise, its
6658 TYPE_CANONICAL points to itself. */
6659
6660 tree
6661 build_distinct_type_copy (tree type)
6662 {
6663 tree t = copy_node (type);
6664
6665 TYPE_POINTER_TO (t) = 0;
6666 TYPE_REFERENCE_TO (t) = 0;
6667
6668 /* Set the canonical type either to a new equivalence class, or
6669 propagate the need for structural equality checks. */
6670 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6671 SET_TYPE_STRUCTURAL_EQUALITY (t);
6672 else
6673 TYPE_CANONICAL (t) = t;
6674
6675 /* Make it its own variant. */
6676 TYPE_MAIN_VARIANT (t) = t;
6677 TYPE_NEXT_VARIANT (t) = 0;
6678
6679 /* We do not record methods in type copies nor variants
6680 so we do not need to keep them up to date when new method
6681 is inserted. */
6682 if (RECORD_OR_UNION_TYPE_P (t))
6683 TYPE_METHODS (t) = NULL_TREE;
6684
6685 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6686 whose TREE_TYPE is not t. This can also happen in the Ada
6687 frontend when using subtypes. */
6688
6689 return t;
6690 }
6691
6692 /* Create a new variant of TYPE, equivalent but distinct. This is so
6693 the caller can modify it. TYPE_CANONICAL for the return type will
6694 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6695 are considered equal by the language itself (or that both types
6696 require structural equality checks). */
6697
6698 tree
6699 build_variant_type_copy (tree type)
6700 {
6701 tree t, m = TYPE_MAIN_VARIANT (type);
6702
6703 t = build_distinct_type_copy (type);
6704
6705 /* Since we're building a variant, assume that it is a non-semantic
6706 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6707 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6708 /* Type variants have no alias set defined. */
6709 TYPE_ALIAS_SET (t) = -1;
6710
6711 /* Add the new type to the chain of variants of TYPE. */
6712 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6713 TYPE_NEXT_VARIANT (m) = t;
6714 TYPE_MAIN_VARIANT (t) = m;
6715
6716 return t;
6717 }
6718 \f
6719 /* Return true if the from tree in both tree maps are equal. */
6720
6721 int
6722 tree_map_base_eq (const void *va, const void *vb)
6723 {
6724 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6725 *const b = (const struct tree_map_base *) vb;
6726 return (a->from == b->from);
6727 }
6728
6729 /* Hash a from tree in a tree_base_map. */
6730
6731 unsigned int
6732 tree_map_base_hash (const void *item)
6733 {
6734 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6735 }
6736
6737 /* Return true if this tree map structure is marked for garbage collection
6738 purposes. We simply return true if the from tree is marked, so that this
6739 structure goes away when the from tree goes away. */
6740
6741 int
6742 tree_map_base_marked_p (const void *p)
6743 {
6744 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6745 }
6746
6747 /* Hash a from tree in a tree_map. */
6748
6749 unsigned int
6750 tree_map_hash (const void *item)
6751 {
6752 return (((const struct tree_map *) item)->hash);
6753 }
6754
6755 /* Hash a from tree in a tree_decl_map. */
6756
6757 unsigned int
6758 tree_decl_map_hash (const void *item)
6759 {
6760 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6761 }
6762
6763 /* Return the initialization priority for DECL. */
6764
6765 priority_type
6766 decl_init_priority_lookup (tree decl)
6767 {
6768 symtab_node *snode = symtab_node::get (decl);
6769
6770 if (!snode)
6771 return DEFAULT_INIT_PRIORITY;
6772 return
6773 snode->get_init_priority ();
6774 }
6775
6776 /* Return the finalization priority for DECL. */
6777
6778 priority_type
6779 decl_fini_priority_lookup (tree decl)
6780 {
6781 cgraph_node *node = cgraph_node::get (decl);
6782
6783 if (!node)
6784 return DEFAULT_INIT_PRIORITY;
6785 return
6786 node->get_fini_priority ();
6787 }
6788
6789 /* Set the initialization priority for DECL to PRIORITY. */
6790
6791 void
6792 decl_init_priority_insert (tree decl, priority_type priority)
6793 {
6794 struct symtab_node *snode;
6795
6796 if (priority == DEFAULT_INIT_PRIORITY)
6797 {
6798 snode = symtab_node::get (decl);
6799 if (!snode)
6800 return;
6801 }
6802 else if (TREE_CODE (decl) == VAR_DECL)
6803 snode = varpool_node::get_create (decl);
6804 else
6805 snode = cgraph_node::get_create (decl);
6806 snode->set_init_priority (priority);
6807 }
6808
6809 /* Set the finalization priority for DECL to PRIORITY. */
6810
6811 void
6812 decl_fini_priority_insert (tree decl, priority_type priority)
6813 {
6814 struct cgraph_node *node;
6815
6816 if (priority == DEFAULT_INIT_PRIORITY)
6817 {
6818 node = cgraph_node::get (decl);
6819 if (!node)
6820 return;
6821 }
6822 else
6823 node = cgraph_node::get_create (decl);
6824 node->set_fini_priority (priority);
6825 }
6826
6827 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6828
6829 static void
6830 print_debug_expr_statistics (void)
6831 {
6832 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6833 (long) debug_expr_for_decl->size (),
6834 (long) debug_expr_for_decl->elements (),
6835 debug_expr_for_decl->collisions ());
6836 }
6837
6838 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6839
6840 static void
6841 print_value_expr_statistics (void)
6842 {
6843 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6844 (long) value_expr_for_decl->size (),
6845 (long) value_expr_for_decl->elements (),
6846 value_expr_for_decl->collisions ());
6847 }
6848
6849 /* Lookup a debug expression for FROM, and return it if we find one. */
6850
6851 tree
6852 decl_debug_expr_lookup (tree from)
6853 {
6854 struct tree_decl_map *h, in;
6855 in.base.from = from;
6856
6857 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6858 if (h)
6859 return h->to;
6860 return NULL_TREE;
6861 }
6862
6863 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6864
6865 void
6866 decl_debug_expr_insert (tree from, tree to)
6867 {
6868 struct tree_decl_map *h;
6869
6870 h = ggc_alloc<tree_decl_map> ();
6871 h->base.from = from;
6872 h->to = to;
6873 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6874 }
6875
6876 /* Lookup a value expression for FROM, and return it if we find one. */
6877
6878 tree
6879 decl_value_expr_lookup (tree from)
6880 {
6881 struct tree_decl_map *h, in;
6882 in.base.from = from;
6883
6884 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6885 if (h)
6886 return h->to;
6887 return NULL_TREE;
6888 }
6889
6890 /* Insert a mapping FROM->TO in the value expression hashtable. */
6891
6892 void
6893 decl_value_expr_insert (tree from, tree to)
6894 {
6895 struct tree_decl_map *h;
6896
6897 h = ggc_alloc<tree_decl_map> ();
6898 h->base.from = from;
6899 h->to = to;
6900 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6901 }
6902
6903 /* Lookup a vector of debug arguments for FROM, and return it if we
6904 find one. */
6905
6906 vec<tree, va_gc> **
6907 decl_debug_args_lookup (tree from)
6908 {
6909 struct tree_vec_map *h, in;
6910
6911 if (!DECL_HAS_DEBUG_ARGS_P (from))
6912 return NULL;
6913 gcc_checking_assert (debug_args_for_decl != NULL);
6914 in.base.from = from;
6915 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6916 if (h)
6917 return &h->to;
6918 return NULL;
6919 }
6920
6921 /* Insert a mapping FROM->empty vector of debug arguments in the value
6922 expression hashtable. */
6923
6924 vec<tree, va_gc> **
6925 decl_debug_args_insert (tree from)
6926 {
6927 struct tree_vec_map *h;
6928 tree_vec_map **loc;
6929
6930 if (DECL_HAS_DEBUG_ARGS_P (from))
6931 return decl_debug_args_lookup (from);
6932 if (debug_args_for_decl == NULL)
6933 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6934 h = ggc_alloc<tree_vec_map> ();
6935 h->base.from = from;
6936 h->to = NULL;
6937 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6938 *loc = h;
6939 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6940 return &h->to;
6941 }
6942
6943 /* Hashing of types so that we don't make duplicates.
6944 The entry point is `type_hash_canon'. */
6945
6946 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6947 with types in the TREE_VALUE slots), by adding the hash codes
6948 of the individual types. */
6949
6950 static void
6951 type_hash_list (const_tree list, inchash::hash &hstate)
6952 {
6953 const_tree tail;
6954
6955 for (tail = list; tail; tail = TREE_CHAIN (tail))
6956 if (TREE_VALUE (tail) != error_mark_node)
6957 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6958 }
6959
6960 /* These are the Hashtable callback functions. */
6961
6962 /* Returns true iff the types are equivalent. */
6963
6964 bool
6965 type_cache_hasher::equal (type_hash *a, type_hash *b)
6966 {
6967 /* First test the things that are the same for all types. */
6968 if (a->hash != b->hash
6969 || TREE_CODE (a->type) != TREE_CODE (b->type)
6970 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6971 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6972 TYPE_ATTRIBUTES (b->type))
6973 || (TREE_CODE (a->type) != COMPLEX_TYPE
6974 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6975 return 0;
6976
6977 /* Be careful about comparing arrays before and after the element type
6978 has been completed; don't compare TYPE_ALIGN unless both types are
6979 complete. */
6980 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6981 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6982 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6983 return 0;
6984
6985 switch (TREE_CODE (a->type))
6986 {
6987 case VOID_TYPE:
6988 case COMPLEX_TYPE:
6989 case POINTER_TYPE:
6990 case REFERENCE_TYPE:
6991 case NULLPTR_TYPE:
6992 return 1;
6993
6994 case VECTOR_TYPE:
6995 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6996
6997 case ENUMERAL_TYPE:
6998 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6999 && !(TYPE_VALUES (a->type)
7000 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
7001 && TYPE_VALUES (b->type)
7002 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7003 && type_list_equal (TYPE_VALUES (a->type),
7004 TYPE_VALUES (b->type))))
7005 return 0;
7006
7007 /* ... fall through ... */
7008
7009 case INTEGER_TYPE:
7010 case REAL_TYPE:
7011 case BOOLEAN_TYPE:
7012 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7013 return false;
7014 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7015 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7016 TYPE_MAX_VALUE (b->type)))
7017 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7018 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7019 TYPE_MIN_VALUE (b->type))));
7020
7021 case FIXED_POINT_TYPE:
7022 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7023
7024 case OFFSET_TYPE:
7025 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7026
7027 case METHOD_TYPE:
7028 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7029 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7030 || (TYPE_ARG_TYPES (a->type)
7031 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7032 && TYPE_ARG_TYPES (b->type)
7033 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7034 && type_list_equal (TYPE_ARG_TYPES (a->type),
7035 TYPE_ARG_TYPES (b->type)))))
7036 break;
7037 return 0;
7038 case ARRAY_TYPE:
7039 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
7040
7041 case RECORD_TYPE:
7042 case UNION_TYPE:
7043 case QUAL_UNION_TYPE:
7044 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7045 || (TYPE_FIELDS (a->type)
7046 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7047 && TYPE_FIELDS (b->type)
7048 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7049 && type_list_equal (TYPE_FIELDS (a->type),
7050 TYPE_FIELDS (b->type))));
7051
7052 case FUNCTION_TYPE:
7053 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7054 || (TYPE_ARG_TYPES (a->type)
7055 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7056 && TYPE_ARG_TYPES (b->type)
7057 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7058 && type_list_equal (TYPE_ARG_TYPES (a->type),
7059 TYPE_ARG_TYPES (b->type))))
7060 break;
7061 return 0;
7062
7063 default:
7064 return 0;
7065 }
7066
7067 if (lang_hooks.types.type_hash_eq != NULL)
7068 return lang_hooks.types.type_hash_eq (a->type, b->type);
7069
7070 return 1;
7071 }
7072
7073 /* Given TYPE, and HASHCODE its hash code, return the canonical
7074 object for an identical type if one already exists.
7075 Otherwise, return TYPE, and record it as the canonical object.
7076
7077 To use this function, first create a type of the sort you want.
7078 Then compute its hash code from the fields of the type that
7079 make it different from other similar types.
7080 Then call this function and use the value. */
7081
7082 tree
7083 type_hash_canon (unsigned int hashcode, tree type)
7084 {
7085 type_hash in;
7086 type_hash **loc;
7087
7088 /* The hash table only contains main variants, so ensure that's what we're
7089 being passed. */
7090 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7091
7092 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7093 must call that routine before comparing TYPE_ALIGNs. */
7094 layout_type (type);
7095
7096 in.hash = hashcode;
7097 in.type = type;
7098
7099 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7100 if (*loc)
7101 {
7102 tree t1 = ((type_hash *) *loc)->type;
7103 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7104 free_node (type);
7105 return t1;
7106 }
7107 else
7108 {
7109 struct type_hash *h;
7110
7111 h = ggc_alloc<type_hash> ();
7112 h->hash = hashcode;
7113 h->type = type;
7114 *loc = h;
7115
7116 return type;
7117 }
7118 }
7119
7120 static void
7121 print_type_hash_statistics (void)
7122 {
7123 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7124 (long) type_hash_table->size (),
7125 (long) type_hash_table->elements (),
7126 type_hash_table->collisions ());
7127 }
7128
7129 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7130 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7131 by adding the hash codes of the individual attributes. */
7132
7133 static void
7134 attribute_hash_list (const_tree list, inchash::hash &hstate)
7135 {
7136 const_tree tail;
7137
7138 for (tail = list; tail; tail = TREE_CHAIN (tail))
7139 /* ??? Do we want to add in TREE_VALUE too? */
7140 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7141 }
7142
7143 /* Given two lists of attributes, return true if list l2 is
7144 equivalent to l1. */
7145
7146 int
7147 attribute_list_equal (const_tree l1, const_tree l2)
7148 {
7149 if (l1 == l2)
7150 return 1;
7151
7152 return attribute_list_contained (l1, l2)
7153 && attribute_list_contained (l2, l1);
7154 }
7155
7156 /* Given two lists of attributes, return true if list L2 is
7157 completely contained within L1. */
7158 /* ??? This would be faster if attribute names were stored in a canonicalized
7159 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7160 must be used to show these elements are equivalent (which they are). */
7161 /* ??? It's not clear that attributes with arguments will always be handled
7162 correctly. */
7163
7164 int
7165 attribute_list_contained (const_tree l1, const_tree l2)
7166 {
7167 const_tree t1, t2;
7168
7169 /* First check the obvious, maybe the lists are identical. */
7170 if (l1 == l2)
7171 return 1;
7172
7173 /* Maybe the lists are similar. */
7174 for (t1 = l1, t2 = l2;
7175 t1 != 0 && t2 != 0
7176 && get_attribute_name (t1) == get_attribute_name (t2)
7177 && TREE_VALUE (t1) == TREE_VALUE (t2);
7178 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7179 ;
7180
7181 /* Maybe the lists are equal. */
7182 if (t1 == 0 && t2 == 0)
7183 return 1;
7184
7185 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7186 {
7187 const_tree attr;
7188 /* This CONST_CAST is okay because lookup_attribute does not
7189 modify its argument and the return value is assigned to a
7190 const_tree. */
7191 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7192 CONST_CAST_TREE (l1));
7193 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7194 attr = lookup_ident_attribute (get_attribute_name (t2),
7195 TREE_CHAIN (attr)))
7196 ;
7197
7198 if (attr == NULL_TREE)
7199 return 0;
7200 }
7201
7202 return 1;
7203 }
7204
7205 /* Given two lists of types
7206 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7207 return 1 if the lists contain the same types in the same order.
7208 Also, the TREE_PURPOSEs must match. */
7209
7210 int
7211 type_list_equal (const_tree l1, const_tree l2)
7212 {
7213 const_tree t1, t2;
7214
7215 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7216 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7217 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7218 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7219 && (TREE_TYPE (TREE_PURPOSE (t1))
7220 == TREE_TYPE (TREE_PURPOSE (t2))))))
7221 return 0;
7222
7223 return t1 == t2;
7224 }
7225
7226 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7227 given by TYPE. If the argument list accepts variable arguments,
7228 then this function counts only the ordinary arguments. */
7229
7230 int
7231 type_num_arguments (const_tree type)
7232 {
7233 int i = 0;
7234 tree t;
7235
7236 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7237 /* If the function does not take a variable number of arguments,
7238 the last element in the list will have type `void'. */
7239 if (VOID_TYPE_P (TREE_VALUE (t)))
7240 break;
7241 else
7242 ++i;
7243
7244 return i;
7245 }
7246
7247 /* Nonzero if integer constants T1 and T2
7248 represent the same constant value. */
7249
7250 int
7251 tree_int_cst_equal (const_tree t1, const_tree t2)
7252 {
7253 if (t1 == t2)
7254 return 1;
7255
7256 if (t1 == 0 || t2 == 0)
7257 return 0;
7258
7259 if (TREE_CODE (t1) == INTEGER_CST
7260 && TREE_CODE (t2) == INTEGER_CST
7261 && wi::to_widest (t1) == wi::to_widest (t2))
7262 return 1;
7263
7264 return 0;
7265 }
7266
7267 /* Return true if T is an INTEGER_CST whose numerical value (extended
7268 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7269
7270 bool
7271 tree_fits_shwi_p (const_tree t)
7272 {
7273 return (t != NULL_TREE
7274 && TREE_CODE (t) == INTEGER_CST
7275 && wi::fits_shwi_p (wi::to_widest (t)));
7276 }
7277
7278 /* Return true if T is an INTEGER_CST whose numerical value (extended
7279 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7280
7281 bool
7282 tree_fits_uhwi_p (const_tree t)
7283 {
7284 return (t != NULL_TREE
7285 && TREE_CODE (t) == INTEGER_CST
7286 && wi::fits_uhwi_p (wi::to_widest (t)));
7287 }
7288
7289 /* T is an INTEGER_CST whose numerical value (extended according to
7290 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7291 HOST_WIDE_INT. */
7292
7293 HOST_WIDE_INT
7294 tree_to_shwi (const_tree t)
7295 {
7296 gcc_assert (tree_fits_shwi_p (t));
7297 return TREE_INT_CST_LOW (t);
7298 }
7299
7300 /* T is an INTEGER_CST whose numerical value (extended according to
7301 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7302 HOST_WIDE_INT. */
7303
7304 unsigned HOST_WIDE_INT
7305 tree_to_uhwi (const_tree t)
7306 {
7307 gcc_assert (tree_fits_uhwi_p (t));
7308 return TREE_INT_CST_LOW (t);
7309 }
7310
7311 /* Return the most significant (sign) bit of T. */
7312
7313 int
7314 tree_int_cst_sign_bit (const_tree t)
7315 {
7316 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7317
7318 return wi::extract_uhwi (t, bitno, 1);
7319 }
7320
7321 /* Return an indication of the sign of the integer constant T.
7322 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7323 Note that -1 will never be returned if T's type is unsigned. */
7324
7325 int
7326 tree_int_cst_sgn (const_tree t)
7327 {
7328 if (wi::eq_p (t, 0))
7329 return 0;
7330 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7331 return 1;
7332 else if (wi::neg_p (t))
7333 return -1;
7334 else
7335 return 1;
7336 }
7337
7338 /* Return the minimum number of bits needed to represent VALUE in a
7339 signed or unsigned type, UNSIGNEDP says which. */
7340
7341 unsigned int
7342 tree_int_cst_min_precision (tree value, signop sgn)
7343 {
7344 /* If the value is negative, compute its negative minus 1. The latter
7345 adjustment is because the absolute value of the largest negative value
7346 is one larger than the largest positive value. This is equivalent to
7347 a bit-wise negation, so use that operation instead. */
7348
7349 if (tree_int_cst_sgn (value) < 0)
7350 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7351
7352 /* Return the number of bits needed, taking into account the fact
7353 that we need one more bit for a signed than unsigned type.
7354 If value is 0 or -1, the minimum precision is 1 no matter
7355 whether unsignedp is true or false. */
7356
7357 if (integer_zerop (value))
7358 return 1;
7359 else
7360 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7361 }
7362
7363 /* Return truthvalue of whether T1 is the same tree structure as T2.
7364 Return 1 if they are the same.
7365 Return 0 if they are understandably different.
7366 Return -1 if either contains tree structure not understood by
7367 this function. */
7368
7369 int
7370 simple_cst_equal (const_tree t1, const_tree t2)
7371 {
7372 enum tree_code code1, code2;
7373 int cmp;
7374 int i;
7375
7376 if (t1 == t2)
7377 return 1;
7378 if (t1 == 0 || t2 == 0)
7379 return 0;
7380
7381 code1 = TREE_CODE (t1);
7382 code2 = TREE_CODE (t2);
7383
7384 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7385 {
7386 if (CONVERT_EXPR_CODE_P (code2)
7387 || code2 == NON_LVALUE_EXPR)
7388 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7389 else
7390 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7391 }
7392
7393 else if (CONVERT_EXPR_CODE_P (code2)
7394 || code2 == NON_LVALUE_EXPR)
7395 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7396
7397 if (code1 != code2)
7398 return 0;
7399
7400 switch (code1)
7401 {
7402 case INTEGER_CST:
7403 return wi::to_widest (t1) == wi::to_widest (t2);
7404
7405 case REAL_CST:
7406 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7407
7408 case FIXED_CST:
7409 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7410
7411 case STRING_CST:
7412 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7413 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7414 TREE_STRING_LENGTH (t1)));
7415
7416 case CONSTRUCTOR:
7417 {
7418 unsigned HOST_WIDE_INT idx;
7419 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7420 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7421
7422 if (vec_safe_length (v1) != vec_safe_length (v2))
7423 return false;
7424
7425 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7426 /* ??? Should we handle also fields here? */
7427 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7428 return false;
7429 return true;
7430 }
7431
7432 case SAVE_EXPR:
7433 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7434
7435 case CALL_EXPR:
7436 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7437 if (cmp <= 0)
7438 return cmp;
7439 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7440 return 0;
7441 {
7442 const_tree arg1, arg2;
7443 const_call_expr_arg_iterator iter1, iter2;
7444 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7445 arg2 = first_const_call_expr_arg (t2, &iter2);
7446 arg1 && arg2;
7447 arg1 = next_const_call_expr_arg (&iter1),
7448 arg2 = next_const_call_expr_arg (&iter2))
7449 {
7450 cmp = simple_cst_equal (arg1, arg2);
7451 if (cmp <= 0)
7452 return cmp;
7453 }
7454 return arg1 == arg2;
7455 }
7456
7457 case TARGET_EXPR:
7458 /* Special case: if either target is an unallocated VAR_DECL,
7459 it means that it's going to be unified with whatever the
7460 TARGET_EXPR is really supposed to initialize, so treat it
7461 as being equivalent to anything. */
7462 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7463 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7464 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7465 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7466 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7467 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7468 cmp = 1;
7469 else
7470 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7471
7472 if (cmp <= 0)
7473 return cmp;
7474
7475 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7476
7477 case WITH_CLEANUP_EXPR:
7478 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7479 if (cmp <= 0)
7480 return cmp;
7481
7482 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7483
7484 case COMPONENT_REF:
7485 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7486 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7487
7488 return 0;
7489
7490 case VAR_DECL:
7491 case PARM_DECL:
7492 case CONST_DECL:
7493 case FUNCTION_DECL:
7494 return 0;
7495
7496 default:
7497 break;
7498 }
7499
7500 /* This general rule works for most tree codes. All exceptions should be
7501 handled above. If this is a language-specific tree code, we can't
7502 trust what might be in the operand, so say we don't know
7503 the situation. */
7504 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7505 return -1;
7506
7507 switch (TREE_CODE_CLASS (code1))
7508 {
7509 case tcc_unary:
7510 case tcc_binary:
7511 case tcc_comparison:
7512 case tcc_expression:
7513 case tcc_reference:
7514 case tcc_statement:
7515 cmp = 1;
7516 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7517 {
7518 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7519 if (cmp <= 0)
7520 return cmp;
7521 }
7522
7523 return cmp;
7524
7525 default:
7526 return -1;
7527 }
7528 }
7529
7530 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7531 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7532 than U, respectively. */
7533
7534 int
7535 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7536 {
7537 if (tree_int_cst_sgn (t) < 0)
7538 return -1;
7539 else if (!tree_fits_uhwi_p (t))
7540 return 1;
7541 else if (TREE_INT_CST_LOW (t) == u)
7542 return 0;
7543 else if (TREE_INT_CST_LOW (t) < u)
7544 return -1;
7545 else
7546 return 1;
7547 }
7548
7549 /* Return true if SIZE represents a constant size that is in bounds of
7550 what the middle-end and the backend accepts (covering not more than
7551 half of the address-space). */
7552
7553 bool
7554 valid_constant_size_p (const_tree size)
7555 {
7556 if (! tree_fits_uhwi_p (size)
7557 || TREE_OVERFLOW (size)
7558 || tree_int_cst_sign_bit (size) != 0)
7559 return false;
7560 return true;
7561 }
7562
7563 /* Return the precision of the type, or for a complex or vector type the
7564 precision of the type of its elements. */
7565
7566 unsigned int
7567 element_precision (const_tree type)
7568 {
7569 if (!TYPE_P (type))
7570 type = TREE_TYPE (type);
7571 enum tree_code code = TREE_CODE (type);
7572 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7573 type = TREE_TYPE (type);
7574
7575 return TYPE_PRECISION (type);
7576 }
7577
7578 /* Return true if CODE represents an associative tree code. Otherwise
7579 return false. */
7580 bool
7581 associative_tree_code (enum tree_code code)
7582 {
7583 switch (code)
7584 {
7585 case BIT_IOR_EXPR:
7586 case BIT_AND_EXPR:
7587 case BIT_XOR_EXPR:
7588 case PLUS_EXPR:
7589 case MULT_EXPR:
7590 case MIN_EXPR:
7591 case MAX_EXPR:
7592 return true;
7593
7594 default:
7595 break;
7596 }
7597 return false;
7598 }
7599
7600 /* Return true if CODE represents a commutative tree code. Otherwise
7601 return false. */
7602 bool
7603 commutative_tree_code (enum tree_code code)
7604 {
7605 switch (code)
7606 {
7607 case PLUS_EXPR:
7608 case MULT_EXPR:
7609 case MULT_HIGHPART_EXPR:
7610 case MIN_EXPR:
7611 case MAX_EXPR:
7612 case BIT_IOR_EXPR:
7613 case BIT_XOR_EXPR:
7614 case BIT_AND_EXPR:
7615 case NE_EXPR:
7616 case EQ_EXPR:
7617 case UNORDERED_EXPR:
7618 case ORDERED_EXPR:
7619 case UNEQ_EXPR:
7620 case LTGT_EXPR:
7621 case TRUTH_AND_EXPR:
7622 case TRUTH_XOR_EXPR:
7623 case TRUTH_OR_EXPR:
7624 case WIDEN_MULT_EXPR:
7625 case VEC_WIDEN_MULT_HI_EXPR:
7626 case VEC_WIDEN_MULT_LO_EXPR:
7627 case VEC_WIDEN_MULT_EVEN_EXPR:
7628 case VEC_WIDEN_MULT_ODD_EXPR:
7629 return true;
7630
7631 default:
7632 break;
7633 }
7634 return false;
7635 }
7636
7637 /* Return true if CODE represents a ternary tree code for which the
7638 first two operands are commutative. Otherwise return false. */
7639 bool
7640 commutative_ternary_tree_code (enum tree_code code)
7641 {
7642 switch (code)
7643 {
7644 case WIDEN_MULT_PLUS_EXPR:
7645 case WIDEN_MULT_MINUS_EXPR:
7646 case DOT_PROD_EXPR:
7647 case FMA_EXPR:
7648 return true;
7649
7650 default:
7651 break;
7652 }
7653 return false;
7654 }
7655
7656 /* Returns true if CODE can overflow. */
7657
7658 bool
7659 operation_can_overflow (enum tree_code code)
7660 {
7661 switch (code)
7662 {
7663 case PLUS_EXPR:
7664 case MINUS_EXPR:
7665 case MULT_EXPR:
7666 case LSHIFT_EXPR:
7667 /* Can overflow in various ways. */
7668 return true;
7669 case TRUNC_DIV_EXPR:
7670 case EXACT_DIV_EXPR:
7671 case FLOOR_DIV_EXPR:
7672 case CEIL_DIV_EXPR:
7673 /* For INT_MIN / -1. */
7674 return true;
7675 case NEGATE_EXPR:
7676 case ABS_EXPR:
7677 /* For -INT_MIN. */
7678 return true;
7679 default:
7680 /* These operators cannot overflow. */
7681 return false;
7682 }
7683 }
7684
7685 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7686 ftrapv doesn't generate trapping insns for CODE. */
7687
7688 bool
7689 operation_no_trapping_overflow (tree type, enum tree_code code)
7690 {
7691 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7692
7693 /* We don't generate instructions that trap on overflow for complex or vector
7694 types. */
7695 if (!INTEGRAL_TYPE_P (type))
7696 return true;
7697
7698 if (!TYPE_OVERFLOW_TRAPS (type))
7699 return true;
7700
7701 switch (code)
7702 {
7703 case PLUS_EXPR:
7704 case MINUS_EXPR:
7705 case MULT_EXPR:
7706 case NEGATE_EXPR:
7707 case ABS_EXPR:
7708 /* These operators can overflow, and -ftrapv generates trapping code for
7709 these. */
7710 return false;
7711 case TRUNC_DIV_EXPR:
7712 case EXACT_DIV_EXPR:
7713 case FLOOR_DIV_EXPR:
7714 case CEIL_DIV_EXPR:
7715 case LSHIFT_EXPR:
7716 /* These operators can overflow, but -ftrapv does not generate trapping
7717 code for these. */
7718 return true;
7719 default:
7720 /* These operators cannot overflow. */
7721 return true;
7722 }
7723 }
7724
7725 namespace inchash
7726 {
7727
7728 /* Generate a hash value for an expression. This can be used iteratively
7729 by passing a previous result as the HSTATE argument.
7730
7731 This function is intended to produce the same hash for expressions which
7732 would compare equal using operand_equal_p. */
7733 void
7734 add_expr (const_tree t, inchash::hash &hstate)
7735 {
7736 int i;
7737 enum tree_code code;
7738 enum tree_code_class tclass;
7739
7740 if (t == NULL_TREE)
7741 {
7742 hstate.merge_hash (0);
7743 return;
7744 }
7745
7746 code = TREE_CODE (t);
7747
7748 switch (code)
7749 {
7750 /* Alas, constants aren't shared, so we can't rely on pointer
7751 identity. */
7752 case VOID_CST:
7753 hstate.merge_hash (0);
7754 return;
7755 case INTEGER_CST:
7756 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7757 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7758 return;
7759 case REAL_CST:
7760 {
7761 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7762 hstate.merge_hash (val2);
7763 return;
7764 }
7765 case FIXED_CST:
7766 {
7767 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7768 hstate.merge_hash (val2);
7769 return;
7770 }
7771 case STRING_CST:
7772 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7773 return;
7774 case COMPLEX_CST:
7775 inchash::add_expr (TREE_REALPART (t), hstate);
7776 inchash::add_expr (TREE_IMAGPART (t), hstate);
7777 return;
7778 case VECTOR_CST:
7779 {
7780 unsigned i;
7781 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7782 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7783 return;
7784 }
7785 case SSA_NAME:
7786 /* We can just compare by pointer. */
7787 hstate.add_wide_int (SSA_NAME_VERSION (t));
7788 return;
7789 case PLACEHOLDER_EXPR:
7790 /* The node itself doesn't matter. */
7791 return;
7792 case TREE_LIST:
7793 /* A list of expressions, for a CALL_EXPR or as the elements of a
7794 VECTOR_CST. */
7795 for (; t; t = TREE_CHAIN (t))
7796 inchash::add_expr (TREE_VALUE (t), hstate);
7797 return;
7798 case CONSTRUCTOR:
7799 {
7800 unsigned HOST_WIDE_INT idx;
7801 tree field, value;
7802 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7803 {
7804 inchash::add_expr (field, hstate);
7805 inchash::add_expr (value, hstate);
7806 }
7807 return;
7808 }
7809 case FUNCTION_DECL:
7810 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7811 Otherwise nodes that compare equal according to operand_equal_p might
7812 get different hash codes. However, don't do this for machine specific
7813 or front end builtins, since the function code is overloaded in those
7814 cases. */
7815 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7816 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7817 {
7818 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7819 code = TREE_CODE (t);
7820 }
7821 /* FALL THROUGH */
7822 default:
7823 tclass = TREE_CODE_CLASS (code);
7824
7825 if (tclass == tcc_declaration)
7826 {
7827 /* DECL's have a unique ID */
7828 hstate.add_wide_int (DECL_UID (t));
7829 }
7830 else
7831 {
7832 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7833
7834 hstate.add_object (code);
7835
7836 /* Don't hash the type, that can lead to having nodes which
7837 compare equal according to operand_equal_p, but which
7838 have different hash codes. */
7839 if (CONVERT_EXPR_CODE_P (code)
7840 || code == NON_LVALUE_EXPR)
7841 {
7842 /* Make sure to include signness in the hash computation. */
7843 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7844 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7845 }
7846
7847 else if (commutative_tree_code (code))
7848 {
7849 /* It's a commutative expression. We want to hash it the same
7850 however it appears. We do this by first hashing both operands
7851 and then rehashing based on the order of their independent
7852 hashes. */
7853 inchash::hash one, two;
7854 inchash::add_expr (TREE_OPERAND (t, 0), one);
7855 inchash::add_expr (TREE_OPERAND (t, 1), two);
7856 hstate.add_commutative (one, two);
7857 }
7858 else
7859 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7860 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7861 }
7862 return;
7863 }
7864 }
7865
7866 }
7867
7868 /* Constructors for pointer, array and function types.
7869 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7870 constructed by language-dependent code, not here.) */
7871
7872 /* Construct, lay out and return the type of pointers to TO_TYPE with
7873 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7874 reference all of memory. If such a type has already been
7875 constructed, reuse it. */
7876
7877 tree
7878 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7879 bool can_alias_all)
7880 {
7881 tree t;
7882 bool could_alias = can_alias_all;
7883
7884 if (to_type == error_mark_node)
7885 return error_mark_node;
7886
7887 /* If the pointed-to type has the may_alias attribute set, force
7888 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7889 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7890 can_alias_all = true;
7891
7892 /* In some cases, languages will have things that aren't a POINTER_TYPE
7893 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7894 In that case, return that type without regard to the rest of our
7895 operands.
7896
7897 ??? This is a kludge, but consistent with the way this function has
7898 always operated and there doesn't seem to be a good way to avoid this
7899 at the moment. */
7900 if (TYPE_POINTER_TO (to_type) != 0
7901 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7902 return TYPE_POINTER_TO (to_type);
7903
7904 /* First, if we already have a type for pointers to TO_TYPE and it's
7905 the proper mode, use it. */
7906 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7907 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7908 return t;
7909
7910 t = make_node (POINTER_TYPE);
7911
7912 TREE_TYPE (t) = to_type;
7913 SET_TYPE_MODE (t, mode);
7914 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7915 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7916 TYPE_POINTER_TO (to_type) = t;
7917
7918 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7919 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7920 SET_TYPE_STRUCTURAL_EQUALITY (t);
7921 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7922 TYPE_CANONICAL (t)
7923 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7924 mode, false);
7925
7926 /* Lay out the type. This function has many callers that are concerned
7927 with expression-construction, and this simplifies them all. */
7928 layout_type (t);
7929
7930 return t;
7931 }
7932
7933 /* By default build pointers in ptr_mode. */
7934
7935 tree
7936 build_pointer_type (tree to_type)
7937 {
7938 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7939 : TYPE_ADDR_SPACE (to_type);
7940 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7941 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7942 }
7943
7944 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7945
7946 tree
7947 build_reference_type_for_mode (tree to_type, machine_mode mode,
7948 bool can_alias_all)
7949 {
7950 tree t;
7951 bool could_alias = can_alias_all;
7952
7953 if (to_type == error_mark_node)
7954 return error_mark_node;
7955
7956 /* If the pointed-to type has the may_alias attribute set, force
7957 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7958 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7959 can_alias_all = true;
7960
7961 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7962 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7963 In that case, return that type without regard to the rest of our
7964 operands.
7965
7966 ??? This is a kludge, but consistent with the way this function has
7967 always operated and there doesn't seem to be a good way to avoid this
7968 at the moment. */
7969 if (TYPE_REFERENCE_TO (to_type) != 0
7970 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7971 return TYPE_REFERENCE_TO (to_type);
7972
7973 /* First, if we already have a type for pointers to TO_TYPE and it's
7974 the proper mode, use it. */
7975 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7976 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7977 return t;
7978
7979 t = make_node (REFERENCE_TYPE);
7980
7981 TREE_TYPE (t) = to_type;
7982 SET_TYPE_MODE (t, mode);
7983 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7984 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7985 TYPE_REFERENCE_TO (to_type) = t;
7986
7987 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7988 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7989 SET_TYPE_STRUCTURAL_EQUALITY (t);
7990 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7991 TYPE_CANONICAL (t)
7992 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7993 mode, false);
7994
7995 layout_type (t);
7996
7997 return t;
7998 }
7999
8000
8001 /* Build the node for the type of references-to-TO_TYPE by default
8002 in ptr_mode. */
8003
8004 tree
8005 build_reference_type (tree to_type)
8006 {
8007 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8008 : TYPE_ADDR_SPACE (to_type);
8009 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8010 return build_reference_type_for_mode (to_type, pointer_mode, false);
8011 }
8012
8013 #define MAX_INT_CACHED_PREC \
8014 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8015 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8016
8017 /* Builds a signed or unsigned integer type of precision PRECISION.
8018 Used for C bitfields whose precision does not match that of
8019 built-in target types. */
8020 tree
8021 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8022 int unsignedp)
8023 {
8024 tree itype, ret;
8025
8026 if (unsignedp)
8027 unsignedp = MAX_INT_CACHED_PREC + 1;
8028
8029 if (precision <= MAX_INT_CACHED_PREC)
8030 {
8031 itype = nonstandard_integer_type_cache[precision + unsignedp];
8032 if (itype)
8033 return itype;
8034 }
8035
8036 itype = make_node (INTEGER_TYPE);
8037 TYPE_PRECISION (itype) = precision;
8038
8039 if (unsignedp)
8040 fixup_unsigned_type (itype);
8041 else
8042 fixup_signed_type (itype);
8043
8044 ret = itype;
8045 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
8046 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
8047 if (precision <= MAX_INT_CACHED_PREC)
8048 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8049
8050 return ret;
8051 }
8052
8053 #define MAX_BOOL_CACHED_PREC \
8054 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8055 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8056
8057 /* Builds a boolean type of precision PRECISION.
8058 Used for boolean vectors to choose proper vector element size. */
8059 tree
8060 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8061 {
8062 tree type;
8063
8064 if (precision <= MAX_BOOL_CACHED_PREC)
8065 {
8066 type = nonstandard_boolean_type_cache[precision];
8067 if (type)
8068 return type;
8069 }
8070
8071 type = make_node (BOOLEAN_TYPE);
8072 TYPE_PRECISION (type) = precision;
8073 fixup_signed_type (type);
8074
8075 if (precision <= MAX_INT_CACHED_PREC)
8076 nonstandard_boolean_type_cache[precision] = type;
8077
8078 return type;
8079 }
8080
8081 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8082 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8083 is true, reuse such a type that has already been constructed. */
8084
8085 static tree
8086 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8087 {
8088 tree itype = make_node (INTEGER_TYPE);
8089 inchash::hash hstate;
8090
8091 TREE_TYPE (itype) = type;
8092
8093 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8094 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8095
8096 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8097 SET_TYPE_MODE (itype, TYPE_MODE (type));
8098 TYPE_SIZE (itype) = TYPE_SIZE (type);
8099 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8100 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
8101 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8102
8103 if (!shared)
8104 return itype;
8105
8106 if ((TYPE_MIN_VALUE (itype)
8107 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8108 || (TYPE_MAX_VALUE (itype)
8109 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8110 {
8111 /* Since we cannot reliably merge this type, we need to compare it using
8112 structural equality checks. */
8113 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8114 return itype;
8115 }
8116
8117 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
8118 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8119 hstate.merge_hash (TYPE_HASH (type));
8120 itype = type_hash_canon (hstate.end (), itype);
8121
8122 return itype;
8123 }
8124
8125 /* Wrapper around build_range_type_1 with SHARED set to true. */
8126
8127 tree
8128 build_range_type (tree type, tree lowval, tree highval)
8129 {
8130 return build_range_type_1 (type, lowval, highval, true);
8131 }
8132
8133 /* Wrapper around build_range_type_1 with SHARED set to false. */
8134
8135 tree
8136 build_nonshared_range_type (tree type, tree lowval, tree highval)
8137 {
8138 return build_range_type_1 (type, lowval, highval, false);
8139 }
8140
8141 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8142 MAXVAL should be the maximum value in the domain
8143 (one less than the length of the array).
8144
8145 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8146 We don't enforce this limit, that is up to caller (e.g. language front end).
8147 The limit exists because the result is a signed type and we don't handle
8148 sizes that use more than one HOST_WIDE_INT. */
8149
8150 tree
8151 build_index_type (tree maxval)
8152 {
8153 return build_range_type (sizetype, size_zero_node, maxval);
8154 }
8155
8156 /* Return true if the debug information for TYPE, a subtype, should be emitted
8157 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8158 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8159 debug info and doesn't reflect the source code. */
8160
8161 bool
8162 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8163 {
8164 tree base_type = TREE_TYPE (type), low, high;
8165
8166 /* Subrange types have a base type which is an integral type. */
8167 if (!INTEGRAL_TYPE_P (base_type))
8168 return false;
8169
8170 /* Get the real bounds of the subtype. */
8171 if (lang_hooks.types.get_subrange_bounds)
8172 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8173 else
8174 {
8175 low = TYPE_MIN_VALUE (type);
8176 high = TYPE_MAX_VALUE (type);
8177 }
8178
8179 /* If the type and its base type have the same representation and the same
8180 name, then the type is not a subrange but a copy of the base type. */
8181 if ((TREE_CODE (base_type) == INTEGER_TYPE
8182 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8183 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8184 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8185 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8186 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8187 return false;
8188
8189 if (lowval)
8190 *lowval = low;
8191 if (highval)
8192 *highval = high;
8193 return true;
8194 }
8195
8196 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8197 and number of elements specified by the range of values of INDEX_TYPE.
8198 If SHARED is true, reuse such a type that has already been constructed. */
8199
8200 static tree
8201 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8202 {
8203 tree t;
8204
8205 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8206 {
8207 error ("arrays of functions are not meaningful");
8208 elt_type = integer_type_node;
8209 }
8210
8211 t = make_node (ARRAY_TYPE);
8212 TREE_TYPE (t) = elt_type;
8213 TYPE_DOMAIN (t) = index_type;
8214 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8215 layout_type (t);
8216
8217 /* If the element type is incomplete at this point we get marked for
8218 structural equality. Do not record these types in the canonical
8219 type hashtable. */
8220 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8221 return t;
8222
8223 if (shared)
8224 {
8225 inchash::hash hstate;
8226 hstate.add_object (TYPE_HASH (elt_type));
8227 if (index_type)
8228 hstate.add_object (TYPE_HASH (index_type));
8229 t = type_hash_canon (hstate.end (), t);
8230 }
8231
8232 if (TYPE_CANONICAL (t) == t)
8233 {
8234 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8235 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8236 || in_lto_p)
8237 SET_TYPE_STRUCTURAL_EQUALITY (t);
8238 else if (TYPE_CANONICAL (elt_type) != elt_type
8239 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8240 TYPE_CANONICAL (t)
8241 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8242 index_type
8243 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8244 shared);
8245 }
8246
8247 return t;
8248 }
8249
8250 /* Wrapper around build_array_type_1 with SHARED set to true. */
8251
8252 tree
8253 build_array_type (tree elt_type, tree index_type)
8254 {
8255 return build_array_type_1 (elt_type, index_type, true);
8256 }
8257
8258 /* Wrapper around build_array_type_1 with SHARED set to false. */
8259
8260 tree
8261 build_nonshared_array_type (tree elt_type, tree index_type)
8262 {
8263 return build_array_type_1 (elt_type, index_type, false);
8264 }
8265
8266 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8267 sizetype. */
8268
8269 tree
8270 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8271 {
8272 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8273 }
8274
8275 /* Recursively examines the array elements of TYPE, until a non-array
8276 element type is found. */
8277
8278 tree
8279 strip_array_types (tree type)
8280 {
8281 while (TREE_CODE (type) == ARRAY_TYPE)
8282 type = TREE_TYPE (type);
8283
8284 return type;
8285 }
8286
8287 /* Computes the canonical argument types from the argument type list
8288 ARGTYPES.
8289
8290 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8291 on entry to this function, or if any of the ARGTYPES are
8292 structural.
8293
8294 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8295 true on entry to this function, or if any of the ARGTYPES are
8296 non-canonical.
8297
8298 Returns a canonical argument list, which may be ARGTYPES when the
8299 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8300 true) or would not differ from ARGTYPES. */
8301
8302 static tree
8303 maybe_canonicalize_argtypes (tree argtypes,
8304 bool *any_structural_p,
8305 bool *any_noncanonical_p)
8306 {
8307 tree arg;
8308 bool any_noncanonical_argtypes_p = false;
8309
8310 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8311 {
8312 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8313 /* Fail gracefully by stating that the type is structural. */
8314 *any_structural_p = true;
8315 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8316 *any_structural_p = true;
8317 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8318 || TREE_PURPOSE (arg))
8319 /* If the argument has a default argument, we consider it
8320 non-canonical even though the type itself is canonical.
8321 That way, different variants of function and method types
8322 with default arguments will all point to the variant with
8323 no defaults as their canonical type. */
8324 any_noncanonical_argtypes_p = true;
8325 }
8326
8327 if (*any_structural_p)
8328 return argtypes;
8329
8330 if (any_noncanonical_argtypes_p)
8331 {
8332 /* Build the canonical list of argument types. */
8333 tree canon_argtypes = NULL_TREE;
8334 bool is_void = false;
8335
8336 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8337 {
8338 if (arg == void_list_node)
8339 is_void = true;
8340 else
8341 canon_argtypes = tree_cons (NULL_TREE,
8342 TYPE_CANONICAL (TREE_VALUE (arg)),
8343 canon_argtypes);
8344 }
8345
8346 canon_argtypes = nreverse (canon_argtypes);
8347 if (is_void)
8348 canon_argtypes = chainon (canon_argtypes, void_list_node);
8349
8350 /* There is a non-canonical type. */
8351 *any_noncanonical_p = true;
8352 return canon_argtypes;
8353 }
8354
8355 /* The canonical argument types are the same as ARGTYPES. */
8356 return argtypes;
8357 }
8358
8359 /* Construct, lay out and return
8360 the type of functions returning type VALUE_TYPE
8361 given arguments of types ARG_TYPES.
8362 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8363 are data type nodes for the arguments of the function.
8364 If such a type has already been constructed, reuse it. */
8365
8366 tree
8367 build_function_type (tree value_type, tree arg_types)
8368 {
8369 tree t;
8370 inchash::hash hstate;
8371 bool any_structural_p, any_noncanonical_p;
8372 tree canon_argtypes;
8373
8374 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8375 {
8376 error ("function return type cannot be function");
8377 value_type = integer_type_node;
8378 }
8379
8380 /* Make a node of the sort we want. */
8381 t = make_node (FUNCTION_TYPE);
8382 TREE_TYPE (t) = value_type;
8383 TYPE_ARG_TYPES (t) = arg_types;
8384
8385 /* If we already have such a type, use the old one. */
8386 hstate.add_object (TYPE_HASH (value_type));
8387 type_hash_list (arg_types, hstate);
8388 t = type_hash_canon (hstate.end (), t);
8389
8390 /* Set up the canonical type. */
8391 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8392 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8393 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8394 &any_structural_p,
8395 &any_noncanonical_p);
8396 if (any_structural_p)
8397 SET_TYPE_STRUCTURAL_EQUALITY (t);
8398 else if (any_noncanonical_p)
8399 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8400 canon_argtypes);
8401
8402 if (!COMPLETE_TYPE_P (t))
8403 layout_type (t);
8404 return t;
8405 }
8406
8407 /* Build a function type. The RETURN_TYPE is the type returned by the
8408 function. If VAARGS is set, no void_type_node is appended to the
8409 the list. ARGP must be always be terminated be a NULL_TREE. */
8410
8411 static tree
8412 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8413 {
8414 tree t, args, last;
8415
8416 t = va_arg (argp, tree);
8417 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8418 args = tree_cons (NULL_TREE, t, args);
8419
8420 if (vaargs)
8421 {
8422 last = args;
8423 if (args != NULL_TREE)
8424 args = nreverse (args);
8425 gcc_assert (last != void_list_node);
8426 }
8427 else if (args == NULL_TREE)
8428 args = void_list_node;
8429 else
8430 {
8431 last = args;
8432 args = nreverse (args);
8433 TREE_CHAIN (last) = void_list_node;
8434 }
8435 args = build_function_type (return_type, args);
8436
8437 return args;
8438 }
8439
8440 /* Build a function type. The RETURN_TYPE is the type returned by the
8441 function. If additional arguments are provided, they are
8442 additional argument types. The list of argument types must always
8443 be terminated by NULL_TREE. */
8444
8445 tree
8446 build_function_type_list (tree return_type, ...)
8447 {
8448 tree args;
8449 va_list p;
8450
8451 va_start (p, return_type);
8452 args = build_function_type_list_1 (false, return_type, p);
8453 va_end (p);
8454 return args;
8455 }
8456
8457 /* Build a variable argument function type. The RETURN_TYPE is the
8458 type returned by the function. If additional arguments are provided,
8459 they are additional argument types. The list of argument types must
8460 always be terminated by NULL_TREE. */
8461
8462 tree
8463 build_varargs_function_type_list (tree return_type, ...)
8464 {
8465 tree args;
8466 va_list p;
8467
8468 va_start (p, return_type);
8469 args = build_function_type_list_1 (true, return_type, p);
8470 va_end (p);
8471
8472 return args;
8473 }
8474
8475 /* Build a function type. RETURN_TYPE is the type returned by the
8476 function; VAARGS indicates whether the function takes varargs. The
8477 function takes N named arguments, the types of which are provided in
8478 ARG_TYPES. */
8479
8480 static tree
8481 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8482 tree *arg_types)
8483 {
8484 int i;
8485 tree t = vaargs ? NULL_TREE : void_list_node;
8486
8487 for (i = n - 1; i >= 0; i--)
8488 t = tree_cons (NULL_TREE, arg_types[i], t);
8489
8490 return build_function_type (return_type, t);
8491 }
8492
8493 /* Build a function type. RETURN_TYPE is the type returned by the
8494 function. The function takes N named arguments, the types of which
8495 are provided in ARG_TYPES. */
8496
8497 tree
8498 build_function_type_array (tree return_type, int n, tree *arg_types)
8499 {
8500 return build_function_type_array_1 (false, return_type, n, arg_types);
8501 }
8502
8503 /* Build a variable argument function type. RETURN_TYPE is the type
8504 returned by the function. The function takes N named arguments, the
8505 types of which are provided in ARG_TYPES. */
8506
8507 tree
8508 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8509 {
8510 return build_function_type_array_1 (true, return_type, n, arg_types);
8511 }
8512
8513 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8514 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8515 for the method. An implicit additional parameter (of type
8516 pointer-to-BASETYPE) is added to the ARGTYPES. */
8517
8518 tree
8519 build_method_type_directly (tree basetype,
8520 tree rettype,
8521 tree argtypes)
8522 {
8523 tree t;
8524 tree ptype;
8525 inchash::hash hstate;
8526 bool any_structural_p, any_noncanonical_p;
8527 tree canon_argtypes;
8528
8529 /* Make a node of the sort we want. */
8530 t = make_node (METHOD_TYPE);
8531
8532 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8533 TREE_TYPE (t) = rettype;
8534 ptype = build_pointer_type (basetype);
8535
8536 /* The actual arglist for this function includes a "hidden" argument
8537 which is "this". Put it into the list of argument types. */
8538 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8539 TYPE_ARG_TYPES (t) = argtypes;
8540
8541 /* If we already have such a type, use the old one. */
8542 hstate.add_object (TYPE_HASH (basetype));
8543 hstate.add_object (TYPE_HASH (rettype));
8544 type_hash_list (argtypes, hstate);
8545 t = type_hash_canon (hstate.end (), t);
8546
8547 /* Set up the canonical type. */
8548 any_structural_p
8549 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8550 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8551 any_noncanonical_p
8552 = (TYPE_CANONICAL (basetype) != basetype
8553 || TYPE_CANONICAL (rettype) != rettype);
8554 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8555 &any_structural_p,
8556 &any_noncanonical_p);
8557 if (any_structural_p)
8558 SET_TYPE_STRUCTURAL_EQUALITY (t);
8559 else if (any_noncanonical_p)
8560 TYPE_CANONICAL (t)
8561 = build_method_type_directly (TYPE_CANONICAL (basetype),
8562 TYPE_CANONICAL (rettype),
8563 canon_argtypes);
8564 if (!COMPLETE_TYPE_P (t))
8565 layout_type (t);
8566
8567 return t;
8568 }
8569
8570 /* Construct, lay out and return the type of methods belonging to class
8571 BASETYPE and whose arguments and values are described by TYPE.
8572 If that type exists already, reuse it.
8573 TYPE must be a FUNCTION_TYPE node. */
8574
8575 tree
8576 build_method_type (tree basetype, tree type)
8577 {
8578 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8579
8580 return build_method_type_directly (basetype,
8581 TREE_TYPE (type),
8582 TYPE_ARG_TYPES (type));
8583 }
8584
8585 /* Construct, lay out and return the type of offsets to a value
8586 of type TYPE, within an object of type BASETYPE.
8587 If a suitable offset type exists already, reuse it. */
8588
8589 tree
8590 build_offset_type (tree basetype, tree type)
8591 {
8592 tree t;
8593 inchash::hash hstate;
8594
8595 /* Make a node of the sort we want. */
8596 t = make_node (OFFSET_TYPE);
8597
8598 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8599 TREE_TYPE (t) = type;
8600
8601 /* If we already have such a type, use the old one. */
8602 hstate.add_object (TYPE_HASH (basetype));
8603 hstate.add_object (TYPE_HASH (type));
8604 t = type_hash_canon (hstate.end (), t);
8605
8606 if (!COMPLETE_TYPE_P (t))
8607 layout_type (t);
8608
8609 if (TYPE_CANONICAL (t) == t)
8610 {
8611 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8612 || TYPE_STRUCTURAL_EQUALITY_P (type))
8613 SET_TYPE_STRUCTURAL_EQUALITY (t);
8614 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8615 || TYPE_CANONICAL (type) != type)
8616 TYPE_CANONICAL (t)
8617 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8618 TYPE_CANONICAL (type));
8619 }
8620
8621 return t;
8622 }
8623
8624 /* Create a complex type whose components are COMPONENT_TYPE. */
8625
8626 tree
8627 build_complex_type (tree component_type)
8628 {
8629 tree t;
8630 inchash::hash hstate;
8631
8632 gcc_assert (INTEGRAL_TYPE_P (component_type)
8633 || SCALAR_FLOAT_TYPE_P (component_type)
8634 || FIXED_POINT_TYPE_P (component_type));
8635
8636 /* Make a node of the sort we want. */
8637 t = make_node (COMPLEX_TYPE);
8638
8639 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8640
8641 /* If we already have such a type, use the old one. */
8642 hstate.add_object (TYPE_HASH (component_type));
8643 t = type_hash_canon (hstate.end (), t);
8644
8645 if (!COMPLETE_TYPE_P (t))
8646 layout_type (t);
8647
8648 if (TYPE_CANONICAL (t) == t)
8649 {
8650 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8651 SET_TYPE_STRUCTURAL_EQUALITY (t);
8652 else if (TYPE_CANONICAL (component_type) != component_type)
8653 TYPE_CANONICAL (t)
8654 = build_complex_type (TYPE_CANONICAL (component_type));
8655 }
8656
8657 /* We need to create a name, since complex is a fundamental type. */
8658 if (! TYPE_NAME (t))
8659 {
8660 const char *name;
8661 if (component_type == char_type_node)
8662 name = "complex char";
8663 else if (component_type == signed_char_type_node)
8664 name = "complex signed char";
8665 else if (component_type == unsigned_char_type_node)
8666 name = "complex unsigned char";
8667 else if (component_type == short_integer_type_node)
8668 name = "complex short int";
8669 else if (component_type == short_unsigned_type_node)
8670 name = "complex short unsigned int";
8671 else if (component_type == integer_type_node)
8672 name = "complex int";
8673 else if (component_type == unsigned_type_node)
8674 name = "complex unsigned int";
8675 else if (component_type == long_integer_type_node)
8676 name = "complex long int";
8677 else if (component_type == long_unsigned_type_node)
8678 name = "complex long unsigned int";
8679 else if (component_type == long_long_integer_type_node)
8680 name = "complex long long int";
8681 else if (component_type == long_long_unsigned_type_node)
8682 name = "complex long long unsigned int";
8683 else
8684 name = 0;
8685
8686 if (name != 0)
8687 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8688 get_identifier (name), t);
8689 }
8690
8691 return build_qualified_type (t, TYPE_QUALS (component_type));
8692 }
8693
8694 /* If TYPE is a real or complex floating-point type and the target
8695 does not directly support arithmetic on TYPE then return the wider
8696 type to be used for arithmetic on TYPE. Otherwise, return
8697 NULL_TREE. */
8698
8699 tree
8700 excess_precision_type (tree type)
8701 {
8702 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8703 {
8704 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8705 switch (TREE_CODE (type))
8706 {
8707 case REAL_TYPE:
8708 switch (flt_eval_method)
8709 {
8710 case 1:
8711 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8712 return double_type_node;
8713 break;
8714 case 2:
8715 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8716 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8717 return long_double_type_node;
8718 break;
8719 default:
8720 gcc_unreachable ();
8721 }
8722 break;
8723 case COMPLEX_TYPE:
8724 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8725 return NULL_TREE;
8726 switch (flt_eval_method)
8727 {
8728 case 1:
8729 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8730 return complex_double_type_node;
8731 break;
8732 case 2:
8733 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8734 || (TYPE_MODE (TREE_TYPE (type))
8735 == TYPE_MODE (double_type_node)))
8736 return complex_long_double_type_node;
8737 break;
8738 default:
8739 gcc_unreachable ();
8740 }
8741 break;
8742 default:
8743 break;
8744 }
8745 }
8746 return NULL_TREE;
8747 }
8748 \f
8749 /* Return OP, stripped of any conversions to wider types as much as is safe.
8750 Converting the value back to OP's type makes a value equivalent to OP.
8751
8752 If FOR_TYPE is nonzero, we return a value which, if converted to
8753 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8754
8755 OP must have integer, real or enumeral type. Pointers are not allowed!
8756
8757 There are some cases where the obvious value we could return
8758 would regenerate to OP if converted to OP's type,
8759 but would not extend like OP to wider types.
8760 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8761 For example, if OP is (unsigned short)(signed char)-1,
8762 we avoid returning (signed char)-1 if FOR_TYPE is int,
8763 even though extending that to an unsigned short would regenerate OP,
8764 since the result of extending (signed char)-1 to (int)
8765 is different from (int) OP. */
8766
8767 tree
8768 get_unwidened (tree op, tree for_type)
8769 {
8770 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8771 tree type = TREE_TYPE (op);
8772 unsigned final_prec
8773 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8774 int uns
8775 = (for_type != 0 && for_type != type
8776 && final_prec > TYPE_PRECISION (type)
8777 && TYPE_UNSIGNED (type));
8778 tree win = op;
8779
8780 while (CONVERT_EXPR_P (op))
8781 {
8782 int bitschange;
8783
8784 /* TYPE_PRECISION on vector types has different meaning
8785 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8786 so avoid them here. */
8787 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8788 break;
8789
8790 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8791 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8792
8793 /* Truncations are many-one so cannot be removed.
8794 Unless we are later going to truncate down even farther. */
8795 if (bitschange < 0
8796 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8797 break;
8798
8799 /* See what's inside this conversion. If we decide to strip it,
8800 we will set WIN. */
8801 op = TREE_OPERAND (op, 0);
8802
8803 /* If we have not stripped any zero-extensions (uns is 0),
8804 we can strip any kind of extension.
8805 If we have previously stripped a zero-extension,
8806 only zero-extensions can safely be stripped.
8807 Any extension can be stripped if the bits it would produce
8808 are all going to be discarded later by truncating to FOR_TYPE. */
8809
8810 if (bitschange > 0)
8811 {
8812 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8813 win = op;
8814 /* TYPE_UNSIGNED says whether this is a zero-extension.
8815 Let's avoid computing it if it does not affect WIN
8816 and if UNS will not be needed again. */
8817 if ((uns
8818 || CONVERT_EXPR_P (op))
8819 && TYPE_UNSIGNED (TREE_TYPE (op)))
8820 {
8821 uns = 1;
8822 win = op;
8823 }
8824 }
8825 }
8826
8827 /* If we finally reach a constant see if it fits in for_type and
8828 in that case convert it. */
8829 if (for_type
8830 && TREE_CODE (win) == INTEGER_CST
8831 && TREE_TYPE (win) != for_type
8832 && int_fits_type_p (win, for_type))
8833 win = fold_convert (for_type, win);
8834
8835 return win;
8836 }
8837 \f
8838 /* Return OP or a simpler expression for a narrower value
8839 which can be sign-extended or zero-extended to give back OP.
8840 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8841 or 0 if the value should be sign-extended. */
8842
8843 tree
8844 get_narrower (tree op, int *unsignedp_ptr)
8845 {
8846 int uns = 0;
8847 int first = 1;
8848 tree win = op;
8849 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8850
8851 while (TREE_CODE (op) == NOP_EXPR)
8852 {
8853 int bitschange
8854 = (TYPE_PRECISION (TREE_TYPE (op))
8855 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8856
8857 /* Truncations are many-one so cannot be removed. */
8858 if (bitschange < 0)
8859 break;
8860
8861 /* See what's inside this conversion. If we decide to strip it,
8862 we will set WIN. */
8863
8864 if (bitschange > 0)
8865 {
8866 op = TREE_OPERAND (op, 0);
8867 /* An extension: the outermost one can be stripped,
8868 but remember whether it is zero or sign extension. */
8869 if (first)
8870 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8871 /* Otherwise, if a sign extension has been stripped,
8872 only sign extensions can now be stripped;
8873 if a zero extension has been stripped, only zero-extensions. */
8874 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8875 break;
8876 first = 0;
8877 }
8878 else /* bitschange == 0 */
8879 {
8880 /* A change in nominal type can always be stripped, but we must
8881 preserve the unsignedness. */
8882 if (first)
8883 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8884 first = 0;
8885 op = TREE_OPERAND (op, 0);
8886 /* Keep trying to narrow, but don't assign op to win if it
8887 would turn an integral type into something else. */
8888 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8889 continue;
8890 }
8891
8892 win = op;
8893 }
8894
8895 if (TREE_CODE (op) == COMPONENT_REF
8896 /* Since type_for_size always gives an integer type. */
8897 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8898 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8899 /* Ensure field is laid out already. */
8900 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8901 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8902 {
8903 unsigned HOST_WIDE_INT innerprec
8904 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8905 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8906 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8907 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8908
8909 /* We can get this structure field in a narrower type that fits it,
8910 but the resulting extension to its nominal type (a fullword type)
8911 must satisfy the same conditions as for other extensions.
8912
8913 Do this only for fields that are aligned (not bit-fields),
8914 because when bit-field insns will be used there is no
8915 advantage in doing this. */
8916
8917 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8918 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8919 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8920 && type != 0)
8921 {
8922 if (first)
8923 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8924 win = fold_convert (type, op);
8925 }
8926 }
8927
8928 *unsignedp_ptr = uns;
8929 return win;
8930 }
8931 \f
8932 /* Returns true if integer constant C has a value that is permissible
8933 for type TYPE (an INTEGER_TYPE). */
8934
8935 bool
8936 int_fits_type_p (const_tree c, const_tree type)
8937 {
8938 tree type_low_bound, type_high_bound;
8939 bool ok_for_low_bound, ok_for_high_bound;
8940 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8941
8942 retry:
8943 type_low_bound = TYPE_MIN_VALUE (type);
8944 type_high_bound = TYPE_MAX_VALUE (type);
8945
8946 /* If at least one bound of the type is a constant integer, we can check
8947 ourselves and maybe make a decision. If no such decision is possible, but
8948 this type is a subtype, try checking against that. Otherwise, use
8949 fits_to_tree_p, which checks against the precision.
8950
8951 Compute the status for each possibly constant bound, and return if we see
8952 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8953 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8954 for "constant known to fit". */
8955
8956 /* Check if c >= type_low_bound. */
8957 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8958 {
8959 if (tree_int_cst_lt (c, type_low_bound))
8960 return false;
8961 ok_for_low_bound = true;
8962 }
8963 else
8964 ok_for_low_bound = false;
8965
8966 /* Check if c <= type_high_bound. */
8967 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8968 {
8969 if (tree_int_cst_lt (type_high_bound, c))
8970 return false;
8971 ok_for_high_bound = true;
8972 }
8973 else
8974 ok_for_high_bound = false;
8975
8976 /* If the constant fits both bounds, the result is known. */
8977 if (ok_for_low_bound && ok_for_high_bound)
8978 return true;
8979
8980 /* Perform some generic filtering which may allow making a decision
8981 even if the bounds are not constant. First, negative integers
8982 never fit in unsigned types, */
8983 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8984 return false;
8985
8986 /* Second, narrower types always fit in wider ones. */
8987 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8988 return true;
8989
8990 /* Third, unsigned integers with top bit set never fit signed types. */
8991 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8992 {
8993 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8994 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8995 {
8996 /* When a tree_cst is converted to a wide-int, the precision
8997 is taken from the type. However, if the precision of the
8998 mode underneath the type is smaller than that, it is
8999 possible that the value will not fit. The test below
9000 fails if any bit is set between the sign bit of the
9001 underlying mode and the top bit of the type. */
9002 if (wi::ne_p (wi::zext (c, prec - 1), c))
9003 return false;
9004 }
9005 else if (wi::neg_p (c))
9006 return false;
9007 }
9008
9009 /* If we haven't been able to decide at this point, there nothing more we
9010 can check ourselves here. Look at the base type if we have one and it
9011 has the same precision. */
9012 if (TREE_CODE (type) == INTEGER_TYPE
9013 && TREE_TYPE (type) != 0
9014 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9015 {
9016 type = TREE_TYPE (type);
9017 goto retry;
9018 }
9019
9020 /* Or to fits_to_tree_p, if nothing else. */
9021 return wi::fits_to_tree_p (c, type);
9022 }
9023
9024 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9025 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9026 represented (assuming two's-complement arithmetic) within the bit
9027 precision of the type are returned instead. */
9028
9029 void
9030 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9031 {
9032 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9033 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9034 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
9035 else
9036 {
9037 if (TYPE_UNSIGNED (type))
9038 mpz_set_ui (min, 0);
9039 else
9040 {
9041 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9042 wi::to_mpz (mn, min, SIGNED);
9043 }
9044 }
9045
9046 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9047 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9048 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
9049 else
9050 {
9051 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9052 wi::to_mpz (mn, max, TYPE_SIGN (type));
9053 }
9054 }
9055
9056 /* Return true if VAR is an automatic variable defined in function FN. */
9057
9058 bool
9059 auto_var_in_fn_p (const_tree var, const_tree fn)
9060 {
9061 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9062 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
9063 || TREE_CODE (var) == PARM_DECL)
9064 && ! TREE_STATIC (var))
9065 || TREE_CODE (var) == LABEL_DECL
9066 || TREE_CODE (var) == RESULT_DECL));
9067 }
9068
9069 /* Subprogram of following function. Called by walk_tree.
9070
9071 Return *TP if it is an automatic variable or parameter of the
9072 function passed in as DATA. */
9073
9074 static tree
9075 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9076 {
9077 tree fn = (tree) data;
9078
9079 if (TYPE_P (*tp))
9080 *walk_subtrees = 0;
9081
9082 else if (DECL_P (*tp)
9083 && auto_var_in_fn_p (*tp, fn))
9084 return *tp;
9085
9086 return NULL_TREE;
9087 }
9088
9089 /* Returns true if T is, contains, or refers to a type with variable
9090 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9091 arguments, but not the return type. If FN is nonzero, only return
9092 true if a modifier of the type or position of FN is a variable or
9093 parameter inside FN.
9094
9095 This concept is more general than that of C99 'variably modified types':
9096 in C99, a struct type is never variably modified because a VLA may not
9097 appear as a structure member. However, in GNU C code like:
9098
9099 struct S { int i[f()]; };
9100
9101 is valid, and other languages may define similar constructs. */
9102
9103 bool
9104 variably_modified_type_p (tree type, tree fn)
9105 {
9106 tree t;
9107
9108 /* Test if T is either variable (if FN is zero) or an expression containing
9109 a variable in FN. If TYPE isn't gimplified, return true also if
9110 gimplify_one_sizepos would gimplify the expression into a local
9111 variable. */
9112 #define RETURN_TRUE_IF_VAR(T) \
9113 do { tree _t = (T); \
9114 if (_t != NULL_TREE \
9115 && _t != error_mark_node \
9116 && TREE_CODE (_t) != INTEGER_CST \
9117 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9118 && (!fn \
9119 || (!TYPE_SIZES_GIMPLIFIED (type) \
9120 && !is_gimple_sizepos (_t)) \
9121 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9122 return true; } while (0)
9123
9124 if (type == error_mark_node)
9125 return false;
9126
9127 /* If TYPE itself has variable size, it is variably modified. */
9128 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9129 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9130
9131 switch (TREE_CODE (type))
9132 {
9133 case POINTER_TYPE:
9134 case REFERENCE_TYPE:
9135 case VECTOR_TYPE:
9136 if (variably_modified_type_p (TREE_TYPE (type), fn))
9137 return true;
9138 break;
9139
9140 case FUNCTION_TYPE:
9141 case METHOD_TYPE:
9142 /* If TYPE is a function type, it is variably modified if the
9143 return type is variably modified. */
9144 if (variably_modified_type_p (TREE_TYPE (type), fn))
9145 return true;
9146 break;
9147
9148 case INTEGER_TYPE:
9149 case REAL_TYPE:
9150 case FIXED_POINT_TYPE:
9151 case ENUMERAL_TYPE:
9152 case BOOLEAN_TYPE:
9153 /* Scalar types are variably modified if their end points
9154 aren't constant. */
9155 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9156 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9157 break;
9158
9159 case RECORD_TYPE:
9160 case UNION_TYPE:
9161 case QUAL_UNION_TYPE:
9162 /* We can't see if any of the fields are variably-modified by the
9163 definition we normally use, since that would produce infinite
9164 recursion via pointers. */
9165 /* This is variably modified if some field's type is. */
9166 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9167 if (TREE_CODE (t) == FIELD_DECL)
9168 {
9169 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9170 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9171 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9172
9173 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9174 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9175 }
9176 break;
9177
9178 case ARRAY_TYPE:
9179 /* Do not call ourselves to avoid infinite recursion. This is
9180 variably modified if the element type is. */
9181 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9182 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9183 break;
9184
9185 default:
9186 break;
9187 }
9188
9189 /* The current language may have other cases to check, but in general,
9190 all other types are not variably modified. */
9191 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9192
9193 #undef RETURN_TRUE_IF_VAR
9194 }
9195
9196 /* Given a DECL or TYPE, return the scope in which it was declared, or
9197 NULL_TREE if there is no containing scope. */
9198
9199 tree
9200 get_containing_scope (const_tree t)
9201 {
9202 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9203 }
9204
9205 /* Return the innermost context enclosing DECL that is
9206 a FUNCTION_DECL, or zero if none. */
9207
9208 tree
9209 decl_function_context (const_tree decl)
9210 {
9211 tree context;
9212
9213 if (TREE_CODE (decl) == ERROR_MARK)
9214 return 0;
9215
9216 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9217 where we look up the function at runtime. Such functions always take
9218 a first argument of type 'pointer to real context'.
9219
9220 C++ should really be fixed to use DECL_CONTEXT for the real context,
9221 and use something else for the "virtual context". */
9222 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9223 context
9224 = TYPE_MAIN_VARIANT
9225 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9226 else
9227 context = DECL_CONTEXT (decl);
9228
9229 while (context && TREE_CODE (context) != FUNCTION_DECL)
9230 {
9231 if (TREE_CODE (context) == BLOCK)
9232 context = BLOCK_SUPERCONTEXT (context);
9233 else
9234 context = get_containing_scope (context);
9235 }
9236
9237 return context;
9238 }
9239
9240 /* Return the innermost context enclosing DECL that is
9241 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9242 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9243
9244 tree
9245 decl_type_context (const_tree decl)
9246 {
9247 tree context = DECL_CONTEXT (decl);
9248
9249 while (context)
9250 switch (TREE_CODE (context))
9251 {
9252 case NAMESPACE_DECL:
9253 case TRANSLATION_UNIT_DECL:
9254 return NULL_TREE;
9255
9256 case RECORD_TYPE:
9257 case UNION_TYPE:
9258 case QUAL_UNION_TYPE:
9259 return context;
9260
9261 case TYPE_DECL:
9262 case FUNCTION_DECL:
9263 context = DECL_CONTEXT (context);
9264 break;
9265
9266 case BLOCK:
9267 context = BLOCK_SUPERCONTEXT (context);
9268 break;
9269
9270 default:
9271 gcc_unreachable ();
9272 }
9273
9274 return NULL_TREE;
9275 }
9276
9277 /* CALL is a CALL_EXPR. Return the declaration for the function
9278 called, or NULL_TREE if the called function cannot be
9279 determined. */
9280
9281 tree
9282 get_callee_fndecl (const_tree call)
9283 {
9284 tree addr;
9285
9286 if (call == error_mark_node)
9287 return error_mark_node;
9288
9289 /* It's invalid to call this function with anything but a
9290 CALL_EXPR. */
9291 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9292
9293 /* The first operand to the CALL is the address of the function
9294 called. */
9295 addr = CALL_EXPR_FN (call);
9296
9297 /* If there is no function, return early. */
9298 if (addr == NULL_TREE)
9299 return NULL_TREE;
9300
9301 STRIP_NOPS (addr);
9302
9303 /* If this is a readonly function pointer, extract its initial value. */
9304 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9305 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9306 && DECL_INITIAL (addr))
9307 addr = DECL_INITIAL (addr);
9308
9309 /* If the address is just `&f' for some function `f', then we know
9310 that `f' is being called. */
9311 if (TREE_CODE (addr) == ADDR_EXPR
9312 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9313 return TREE_OPERAND (addr, 0);
9314
9315 /* We couldn't figure out what was being called. */
9316 return NULL_TREE;
9317 }
9318
9319 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9320 return the associated function code, otherwise return CFN_LAST. */
9321
9322 combined_fn
9323 get_call_combined_fn (const_tree call)
9324 {
9325 /* It's invalid to call this function with anything but a CALL_EXPR. */
9326 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9327
9328 if (!CALL_EXPR_FN (call))
9329 return as_combined_fn (CALL_EXPR_IFN (call));
9330
9331 tree fndecl = get_callee_fndecl (call);
9332 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9333 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9334
9335 return CFN_LAST;
9336 }
9337
9338 #define TREE_MEM_USAGE_SPACES 40
9339
9340 /* Print debugging information about tree nodes generated during the compile,
9341 and any language-specific information. */
9342
9343 void
9344 dump_tree_statistics (void)
9345 {
9346 if (GATHER_STATISTICS)
9347 {
9348 int i;
9349 int total_nodes, total_bytes;
9350 fprintf (stderr, "\nKind Nodes Bytes\n");
9351 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9352 total_nodes = total_bytes = 0;
9353 for (i = 0; i < (int) all_kinds; i++)
9354 {
9355 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9356 tree_node_counts[i], tree_node_sizes[i]);
9357 total_nodes += tree_node_counts[i];
9358 total_bytes += tree_node_sizes[i];
9359 }
9360 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9361 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9362 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9363 fprintf (stderr, "Code Nodes\n");
9364 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9365 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9366 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9367 tree_code_counts[i]);
9368 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9369 fprintf (stderr, "\n");
9370 ssanames_print_statistics ();
9371 fprintf (stderr, "\n");
9372 phinodes_print_statistics ();
9373 fprintf (stderr, "\n");
9374 }
9375 else
9376 fprintf (stderr, "(No per-node statistics)\n");
9377
9378 print_type_hash_statistics ();
9379 print_debug_expr_statistics ();
9380 print_value_expr_statistics ();
9381 lang_hooks.print_statistics ();
9382 }
9383 \f
9384 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9385
9386 /* Generate a crc32 of a byte. */
9387
9388 static unsigned
9389 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9390 {
9391 unsigned ix;
9392
9393 for (ix = bits; ix--; value <<= 1)
9394 {
9395 unsigned feedback;
9396
9397 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9398 chksum <<= 1;
9399 chksum ^= feedback;
9400 }
9401 return chksum;
9402 }
9403
9404 /* Generate a crc32 of a 32-bit unsigned. */
9405
9406 unsigned
9407 crc32_unsigned (unsigned chksum, unsigned value)
9408 {
9409 return crc32_unsigned_bits (chksum, value, 32);
9410 }
9411
9412 /* Generate a crc32 of a byte. */
9413
9414 unsigned
9415 crc32_byte (unsigned chksum, char byte)
9416 {
9417 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9418 }
9419
9420 /* Generate a crc32 of a string. */
9421
9422 unsigned
9423 crc32_string (unsigned chksum, const char *string)
9424 {
9425 do
9426 {
9427 chksum = crc32_byte (chksum, *string);
9428 }
9429 while (*string++);
9430 return chksum;
9431 }
9432
9433 /* P is a string that will be used in a symbol. Mask out any characters
9434 that are not valid in that context. */
9435
9436 void
9437 clean_symbol_name (char *p)
9438 {
9439 for (; *p; p++)
9440 if (! (ISALNUM (*p)
9441 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9442 || *p == '$'
9443 #endif
9444 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9445 || *p == '.'
9446 #endif
9447 ))
9448 *p = '_';
9449 }
9450
9451 /* For anonymous aggregate types, we need some sort of name to
9452 hold on to. In practice, this should not appear, but it should
9453 not be harmful if it does. */
9454 bool
9455 anon_aggrname_p(const_tree id_node)
9456 {
9457 #ifndef NO_DOT_IN_LABEL
9458 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9459 && IDENTIFIER_POINTER (id_node)[1] == '_');
9460 #else /* NO_DOT_IN_LABEL */
9461 #ifndef NO_DOLLAR_IN_LABEL
9462 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9463 && IDENTIFIER_POINTER (id_node)[1] == '_');
9464 #else /* NO_DOLLAR_IN_LABEL */
9465 #define ANON_AGGRNAME_PREFIX "__anon_"
9466 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9467 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9468 #endif /* NO_DOLLAR_IN_LABEL */
9469 #endif /* NO_DOT_IN_LABEL */
9470 }
9471
9472 /* Return a format for an anonymous aggregate name. */
9473 const char *
9474 anon_aggrname_format()
9475 {
9476 #ifndef NO_DOT_IN_LABEL
9477 return "._%d";
9478 #else /* NO_DOT_IN_LABEL */
9479 #ifndef NO_DOLLAR_IN_LABEL
9480 return "$_%d";
9481 #else /* NO_DOLLAR_IN_LABEL */
9482 return "__anon_%d";
9483 #endif /* NO_DOLLAR_IN_LABEL */
9484 #endif /* NO_DOT_IN_LABEL */
9485 }
9486
9487 /* Generate a name for a special-purpose function.
9488 The generated name may need to be unique across the whole link.
9489 Changes to this function may also require corresponding changes to
9490 xstrdup_mask_random.
9491 TYPE is some string to identify the purpose of this function to the
9492 linker or collect2; it must start with an uppercase letter,
9493 one of:
9494 I - for constructors
9495 D - for destructors
9496 N - for C++ anonymous namespaces
9497 F - for DWARF unwind frame information. */
9498
9499 tree
9500 get_file_function_name (const char *type)
9501 {
9502 char *buf;
9503 const char *p;
9504 char *q;
9505
9506 /* If we already have a name we know to be unique, just use that. */
9507 if (first_global_object_name)
9508 p = q = ASTRDUP (first_global_object_name);
9509 /* If the target is handling the constructors/destructors, they
9510 will be local to this file and the name is only necessary for
9511 debugging purposes.
9512 We also assign sub_I and sub_D sufixes to constructors called from
9513 the global static constructors. These are always local. */
9514 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9515 || (strncmp (type, "sub_", 4) == 0
9516 && (type[4] == 'I' || type[4] == 'D')))
9517 {
9518 const char *file = main_input_filename;
9519 if (! file)
9520 file = LOCATION_FILE (input_location);
9521 /* Just use the file's basename, because the full pathname
9522 might be quite long. */
9523 p = q = ASTRDUP (lbasename (file));
9524 }
9525 else
9526 {
9527 /* Otherwise, the name must be unique across the entire link.
9528 We don't have anything that we know to be unique to this translation
9529 unit, so use what we do have and throw in some randomness. */
9530 unsigned len;
9531 const char *name = weak_global_object_name;
9532 const char *file = main_input_filename;
9533
9534 if (! name)
9535 name = "";
9536 if (! file)
9537 file = LOCATION_FILE (input_location);
9538
9539 len = strlen (file);
9540 q = (char *) alloca (9 + 17 + len + 1);
9541 memcpy (q, file, len + 1);
9542
9543 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9544 crc32_string (0, name), get_random_seed (false));
9545
9546 p = q;
9547 }
9548
9549 clean_symbol_name (q);
9550 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9551 + strlen (type));
9552
9553 /* Set up the name of the file-level functions we may need.
9554 Use a global object (which is already required to be unique over
9555 the program) rather than the file name (which imposes extra
9556 constraints). */
9557 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9558
9559 return get_identifier (buf);
9560 }
9561 \f
9562 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9563
9564 /* Complain that the tree code of NODE does not match the expected 0
9565 terminated list of trailing codes. The trailing code list can be
9566 empty, for a more vague error message. FILE, LINE, and FUNCTION
9567 are of the caller. */
9568
9569 void
9570 tree_check_failed (const_tree node, const char *file,
9571 int line, const char *function, ...)
9572 {
9573 va_list args;
9574 const char *buffer;
9575 unsigned length = 0;
9576 enum tree_code code;
9577
9578 va_start (args, function);
9579 while ((code = (enum tree_code) va_arg (args, int)))
9580 length += 4 + strlen (get_tree_code_name (code));
9581 va_end (args);
9582 if (length)
9583 {
9584 char *tmp;
9585 va_start (args, function);
9586 length += strlen ("expected ");
9587 buffer = tmp = (char *) alloca (length);
9588 length = 0;
9589 while ((code = (enum tree_code) va_arg (args, int)))
9590 {
9591 const char *prefix = length ? " or " : "expected ";
9592
9593 strcpy (tmp + length, prefix);
9594 length += strlen (prefix);
9595 strcpy (tmp + length, get_tree_code_name (code));
9596 length += strlen (get_tree_code_name (code));
9597 }
9598 va_end (args);
9599 }
9600 else
9601 buffer = "unexpected node";
9602
9603 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9604 buffer, get_tree_code_name (TREE_CODE (node)),
9605 function, trim_filename (file), line);
9606 }
9607
9608 /* Complain that the tree code of NODE does match the expected 0
9609 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9610 the caller. */
9611
9612 void
9613 tree_not_check_failed (const_tree node, const char *file,
9614 int line, const char *function, ...)
9615 {
9616 va_list args;
9617 char *buffer;
9618 unsigned length = 0;
9619 enum tree_code code;
9620
9621 va_start (args, function);
9622 while ((code = (enum tree_code) va_arg (args, int)))
9623 length += 4 + strlen (get_tree_code_name (code));
9624 va_end (args);
9625 va_start (args, function);
9626 buffer = (char *) alloca (length);
9627 length = 0;
9628 while ((code = (enum tree_code) va_arg (args, int)))
9629 {
9630 if (length)
9631 {
9632 strcpy (buffer + length, " or ");
9633 length += 4;
9634 }
9635 strcpy (buffer + length, get_tree_code_name (code));
9636 length += strlen (get_tree_code_name (code));
9637 }
9638 va_end (args);
9639
9640 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9641 buffer, get_tree_code_name (TREE_CODE (node)),
9642 function, trim_filename (file), line);
9643 }
9644
9645 /* Similar to tree_check_failed, except that we check for a class of tree
9646 code, given in CL. */
9647
9648 void
9649 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9650 const char *file, int line, const char *function)
9651 {
9652 internal_error
9653 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9654 TREE_CODE_CLASS_STRING (cl),
9655 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9656 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9657 }
9658
9659 /* Similar to tree_check_failed, except that instead of specifying a
9660 dozen codes, use the knowledge that they're all sequential. */
9661
9662 void
9663 tree_range_check_failed (const_tree node, const char *file, int line,
9664 const char *function, enum tree_code c1,
9665 enum tree_code c2)
9666 {
9667 char *buffer;
9668 unsigned length = 0;
9669 unsigned int c;
9670
9671 for (c = c1; c <= c2; ++c)
9672 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9673
9674 length += strlen ("expected ");
9675 buffer = (char *) alloca (length);
9676 length = 0;
9677
9678 for (c = c1; c <= c2; ++c)
9679 {
9680 const char *prefix = length ? " or " : "expected ";
9681
9682 strcpy (buffer + length, prefix);
9683 length += strlen (prefix);
9684 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9685 length += strlen (get_tree_code_name ((enum tree_code) c));
9686 }
9687
9688 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9689 buffer, get_tree_code_name (TREE_CODE (node)),
9690 function, trim_filename (file), line);
9691 }
9692
9693
9694 /* Similar to tree_check_failed, except that we check that a tree does
9695 not have the specified code, given in CL. */
9696
9697 void
9698 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9699 const char *file, int line, const char *function)
9700 {
9701 internal_error
9702 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9703 TREE_CODE_CLASS_STRING (cl),
9704 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9705 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9706 }
9707
9708
9709 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9710
9711 void
9712 omp_clause_check_failed (const_tree node, const char *file, int line,
9713 const char *function, enum omp_clause_code code)
9714 {
9715 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9716 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9717 function, trim_filename (file), line);
9718 }
9719
9720
9721 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9722
9723 void
9724 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9725 const char *function, enum omp_clause_code c1,
9726 enum omp_clause_code c2)
9727 {
9728 char *buffer;
9729 unsigned length = 0;
9730 unsigned int c;
9731
9732 for (c = c1; c <= c2; ++c)
9733 length += 4 + strlen (omp_clause_code_name[c]);
9734
9735 length += strlen ("expected ");
9736 buffer = (char *) alloca (length);
9737 length = 0;
9738
9739 for (c = c1; c <= c2; ++c)
9740 {
9741 const char *prefix = length ? " or " : "expected ";
9742
9743 strcpy (buffer + length, prefix);
9744 length += strlen (prefix);
9745 strcpy (buffer + length, omp_clause_code_name[c]);
9746 length += strlen (omp_clause_code_name[c]);
9747 }
9748
9749 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9750 buffer, omp_clause_code_name[TREE_CODE (node)],
9751 function, trim_filename (file), line);
9752 }
9753
9754
9755 #undef DEFTREESTRUCT
9756 #define DEFTREESTRUCT(VAL, NAME) NAME,
9757
9758 static const char *ts_enum_names[] = {
9759 #include "treestruct.def"
9760 };
9761 #undef DEFTREESTRUCT
9762
9763 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9764
9765 /* Similar to tree_class_check_failed, except that we check for
9766 whether CODE contains the tree structure identified by EN. */
9767
9768 void
9769 tree_contains_struct_check_failed (const_tree node,
9770 const enum tree_node_structure_enum en,
9771 const char *file, int line,
9772 const char *function)
9773 {
9774 internal_error
9775 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9776 TS_ENUM_NAME (en),
9777 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9778 }
9779
9780
9781 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9782 (dynamically sized) vector. */
9783
9784 void
9785 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9786 const char *function)
9787 {
9788 internal_error
9789 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9790 idx + 1, len, function, trim_filename (file), line);
9791 }
9792
9793 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9794 (dynamically sized) vector. */
9795
9796 void
9797 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9798 const char *function)
9799 {
9800 internal_error
9801 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9802 idx + 1, len, function, trim_filename (file), line);
9803 }
9804
9805 /* Similar to above, except that the check is for the bounds of the operand
9806 vector of an expression node EXP. */
9807
9808 void
9809 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9810 int line, const char *function)
9811 {
9812 enum tree_code code = TREE_CODE (exp);
9813 internal_error
9814 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9815 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9816 function, trim_filename (file), line);
9817 }
9818
9819 /* Similar to above, except that the check is for the number of
9820 operands of an OMP_CLAUSE node. */
9821
9822 void
9823 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9824 int line, const char *function)
9825 {
9826 internal_error
9827 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9828 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9829 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9830 trim_filename (file), line);
9831 }
9832 #endif /* ENABLE_TREE_CHECKING */
9833 \f
9834 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9835 and mapped to the machine mode MODE. Initialize its fields and build
9836 the information necessary for debugging output. */
9837
9838 static tree
9839 make_vector_type (tree innertype, int nunits, machine_mode mode)
9840 {
9841 tree t;
9842 inchash::hash hstate;
9843 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9844
9845 t = make_node (VECTOR_TYPE);
9846 TREE_TYPE (t) = mv_innertype;
9847 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9848 SET_TYPE_MODE (t, mode);
9849
9850 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9851 SET_TYPE_STRUCTURAL_EQUALITY (t);
9852 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9853 || mode != VOIDmode)
9854 && !VECTOR_BOOLEAN_TYPE_P (t))
9855 TYPE_CANONICAL (t)
9856 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9857
9858 layout_type (t);
9859
9860 hstate.add_wide_int (VECTOR_TYPE);
9861 hstate.add_wide_int (nunits);
9862 hstate.add_wide_int (mode);
9863 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9864 t = type_hash_canon (hstate.end (), t);
9865
9866 /* We have built a main variant, based on the main variant of the
9867 inner type. Use it to build the variant we return. */
9868 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9869 && TREE_TYPE (t) != innertype)
9870 return build_type_attribute_qual_variant (t,
9871 TYPE_ATTRIBUTES (innertype),
9872 TYPE_QUALS (innertype));
9873
9874 return t;
9875 }
9876
9877 static tree
9878 make_or_reuse_type (unsigned size, int unsignedp)
9879 {
9880 int i;
9881
9882 if (size == INT_TYPE_SIZE)
9883 return unsignedp ? unsigned_type_node : integer_type_node;
9884 if (size == CHAR_TYPE_SIZE)
9885 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9886 if (size == SHORT_TYPE_SIZE)
9887 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9888 if (size == LONG_TYPE_SIZE)
9889 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9890 if (size == LONG_LONG_TYPE_SIZE)
9891 return (unsignedp ? long_long_unsigned_type_node
9892 : long_long_integer_type_node);
9893
9894 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9895 if (size == int_n_data[i].bitsize
9896 && int_n_enabled_p[i])
9897 return (unsignedp ? int_n_trees[i].unsigned_type
9898 : int_n_trees[i].signed_type);
9899
9900 if (unsignedp)
9901 return make_unsigned_type (size);
9902 else
9903 return make_signed_type (size);
9904 }
9905
9906 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9907
9908 static tree
9909 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9910 {
9911 if (satp)
9912 {
9913 if (size == SHORT_FRACT_TYPE_SIZE)
9914 return unsignedp ? sat_unsigned_short_fract_type_node
9915 : sat_short_fract_type_node;
9916 if (size == FRACT_TYPE_SIZE)
9917 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9918 if (size == LONG_FRACT_TYPE_SIZE)
9919 return unsignedp ? sat_unsigned_long_fract_type_node
9920 : sat_long_fract_type_node;
9921 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9922 return unsignedp ? sat_unsigned_long_long_fract_type_node
9923 : sat_long_long_fract_type_node;
9924 }
9925 else
9926 {
9927 if (size == SHORT_FRACT_TYPE_SIZE)
9928 return unsignedp ? unsigned_short_fract_type_node
9929 : short_fract_type_node;
9930 if (size == FRACT_TYPE_SIZE)
9931 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9932 if (size == LONG_FRACT_TYPE_SIZE)
9933 return unsignedp ? unsigned_long_fract_type_node
9934 : long_fract_type_node;
9935 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9936 return unsignedp ? unsigned_long_long_fract_type_node
9937 : long_long_fract_type_node;
9938 }
9939
9940 return make_fract_type (size, unsignedp, satp);
9941 }
9942
9943 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9944
9945 static tree
9946 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9947 {
9948 if (satp)
9949 {
9950 if (size == SHORT_ACCUM_TYPE_SIZE)
9951 return unsignedp ? sat_unsigned_short_accum_type_node
9952 : sat_short_accum_type_node;
9953 if (size == ACCUM_TYPE_SIZE)
9954 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9955 if (size == LONG_ACCUM_TYPE_SIZE)
9956 return unsignedp ? sat_unsigned_long_accum_type_node
9957 : sat_long_accum_type_node;
9958 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9959 return unsignedp ? sat_unsigned_long_long_accum_type_node
9960 : sat_long_long_accum_type_node;
9961 }
9962 else
9963 {
9964 if (size == SHORT_ACCUM_TYPE_SIZE)
9965 return unsignedp ? unsigned_short_accum_type_node
9966 : short_accum_type_node;
9967 if (size == ACCUM_TYPE_SIZE)
9968 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9969 if (size == LONG_ACCUM_TYPE_SIZE)
9970 return unsignedp ? unsigned_long_accum_type_node
9971 : long_accum_type_node;
9972 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9973 return unsignedp ? unsigned_long_long_accum_type_node
9974 : long_long_accum_type_node;
9975 }
9976
9977 return make_accum_type (size, unsignedp, satp);
9978 }
9979
9980
9981 /* Create an atomic variant node for TYPE. This routine is called
9982 during initialization of data types to create the 5 basic atomic
9983 types. The generic build_variant_type function requires these to
9984 already be set up in order to function properly, so cannot be
9985 called from there. If ALIGN is non-zero, then ensure alignment is
9986 overridden to this value. */
9987
9988 static tree
9989 build_atomic_base (tree type, unsigned int align)
9990 {
9991 tree t;
9992
9993 /* Make sure its not already registered. */
9994 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9995 return t;
9996
9997 t = build_variant_type_copy (type);
9998 set_type_quals (t, TYPE_QUAL_ATOMIC);
9999
10000 if (align)
10001 TYPE_ALIGN (t) = align;
10002
10003 return t;
10004 }
10005
10006 /* Create nodes for all integer types (and error_mark_node) using the sizes
10007 of C datatypes. SIGNED_CHAR specifies whether char is signed,
10008 SHORT_DOUBLE specifies whether double should be of the same precision
10009 as float. */
10010
10011 void
10012 build_common_tree_nodes (bool signed_char, bool short_double)
10013 {
10014 int i;
10015
10016 error_mark_node = make_node (ERROR_MARK);
10017 TREE_TYPE (error_mark_node) = error_mark_node;
10018
10019 initialize_sizetypes ();
10020
10021 /* Define both `signed char' and `unsigned char'. */
10022 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10023 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10024 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10025 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10026
10027 /* Define `char', which is like either `signed char' or `unsigned char'
10028 but not the same as either. */
10029 char_type_node
10030 = (signed_char
10031 ? make_signed_type (CHAR_TYPE_SIZE)
10032 : make_unsigned_type (CHAR_TYPE_SIZE));
10033 TYPE_STRING_FLAG (char_type_node) = 1;
10034
10035 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10036 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10037 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10038 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10039 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10040 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10041 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10042 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10043
10044 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10045 {
10046 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10047 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10048 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
10049 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
10050
10051 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
10052 && int_n_enabled_p[i])
10053 {
10054 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10055 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10056 }
10057 }
10058
10059 /* Define a boolean type. This type only represents boolean values but
10060 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10061 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10062 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10063 TYPE_PRECISION (boolean_type_node) = 1;
10064 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10065
10066 /* Define what type to use for size_t. */
10067 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10068 size_type_node = unsigned_type_node;
10069 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10070 size_type_node = long_unsigned_type_node;
10071 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10072 size_type_node = long_long_unsigned_type_node;
10073 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10074 size_type_node = short_unsigned_type_node;
10075 else
10076 {
10077 int i;
10078
10079 size_type_node = NULL_TREE;
10080 for (i = 0; i < NUM_INT_N_ENTS; i++)
10081 if (int_n_enabled_p[i])
10082 {
10083 char name[50];
10084 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10085
10086 if (strcmp (name, SIZE_TYPE) == 0)
10087 {
10088 size_type_node = int_n_trees[i].unsigned_type;
10089 }
10090 }
10091 if (size_type_node == NULL_TREE)
10092 gcc_unreachable ();
10093 }
10094
10095 /* Fill in the rest of the sized types. Reuse existing type nodes
10096 when possible. */
10097 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10098 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10099 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10100 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10101 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10102
10103 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10104 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10105 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10106 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10107 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10108
10109 /* Don't call build_qualified type for atomics. That routine does
10110 special processing for atomics, and until they are initialized
10111 it's better not to make that call.
10112
10113 Check to see if there is a target override for atomic types. */
10114
10115 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10116 targetm.atomic_align_for_mode (QImode));
10117 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10118 targetm.atomic_align_for_mode (HImode));
10119 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10120 targetm.atomic_align_for_mode (SImode));
10121 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10122 targetm.atomic_align_for_mode (DImode));
10123 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10124 targetm.atomic_align_for_mode (TImode));
10125
10126 access_public_node = get_identifier ("public");
10127 access_protected_node = get_identifier ("protected");
10128 access_private_node = get_identifier ("private");
10129
10130 /* Define these next since types below may used them. */
10131 integer_zero_node = build_int_cst (integer_type_node, 0);
10132 integer_one_node = build_int_cst (integer_type_node, 1);
10133 integer_three_node = build_int_cst (integer_type_node, 3);
10134 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10135
10136 size_zero_node = size_int (0);
10137 size_one_node = size_int (1);
10138 bitsize_zero_node = bitsize_int (0);
10139 bitsize_one_node = bitsize_int (1);
10140 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10141
10142 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10143 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10144
10145 void_type_node = make_node (VOID_TYPE);
10146 layout_type (void_type_node);
10147
10148 pointer_bounds_type_node = targetm.chkp_bound_type ();
10149
10150 /* We are not going to have real types in C with less than byte alignment,
10151 so we might as well not have any types that claim to have it. */
10152 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
10153 TYPE_USER_ALIGN (void_type_node) = 0;
10154
10155 void_node = make_node (VOID_CST);
10156 TREE_TYPE (void_node) = void_type_node;
10157
10158 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10159 layout_type (TREE_TYPE (null_pointer_node));
10160
10161 ptr_type_node = build_pointer_type (void_type_node);
10162 const_ptr_type_node
10163 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10164 fileptr_type_node = ptr_type_node;
10165
10166 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10167
10168 float_type_node = make_node (REAL_TYPE);
10169 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10170 layout_type (float_type_node);
10171
10172 double_type_node = make_node (REAL_TYPE);
10173 if (short_double)
10174 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
10175 else
10176 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10177 layout_type (double_type_node);
10178
10179 long_double_type_node = make_node (REAL_TYPE);
10180 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10181 layout_type (long_double_type_node);
10182
10183 float_ptr_type_node = build_pointer_type (float_type_node);
10184 double_ptr_type_node = build_pointer_type (double_type_node);
10185 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10186 integer_ptr_type_node = build_pointer_type (integer_type_node);
10187
10188 /* Fixed size integer types. */
10189 uint16_type_node = make_or_reuse_type (16, 1);
10190 uint32_type_node = make_or_reuse_type (32, 1);
10191 uint64_type_node = make_or_reuse_type (64, 1);
10192
10193 /* Decimal float types. */
10194 dfloat32_type_node = make_node (REAL_TYPE);
10195 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10196 layout_type (dfloat32_type_node);
10197 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10198 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10199
10200 dfloat64_type_node = make_node (REAL_TYPE);
10201 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10202 layout_type (dfloat64_type_node);
10203 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10204 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10205
10206 dfloat128_type_node = make_node (REAL_TYPE);
10207 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10208 layout_type (dfloat128_type_node);
10209 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10210 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10211
10212 complex_integer_type_node = build_complex_type (integer_type_node);
10213 complex_float_type_node = build_complex_type (float_type_node);
10214 complex_double_type_node = build_complex_type (double_type_node);
10215 complex_long_double_type_node = build_complex_type (long_double_type_node);
10216
10217 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10218 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10219 sat_ ## KIND ## _type_node = \
10220 make_sat_signed_ ## KIND ## _type (SIZE); \
10221 sat_unsigned_ ## KIND ## _type_node = \
10222 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10223 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10224 unsigned_ ## KIND ## _type_node = \
10225 make_unsigned_ ## KIND ## _type (SIZE);
10226
10227 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10228 sat_ ## WIDTH ## KIND ## _type_node = \
10229 make_sat_signed_ ## KIND ## _type (SIZE); \
10230 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10231 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10232 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10233 unsigned_ ## WIDTH ## KIND ## _type_node = \
10234 make_unsigned_ ## KIND ## _type (SIZE);
10235
10236 /* Make fixed-point type nodes based on four different widths. */
10237 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10238 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10239 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10240 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10241 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10242
10243 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10244 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10245 NAME ## _type_node = \
10246 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10247 u ## NAME ## _type_node = \
10248 make_or_reuse_unsigned_ ## KIND ## _type \
10249 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10250 sat_ ## NAME ## _type_node = \
10251 make_or_reuse_sat_signed_ ## KIND ## _type \
10252 (GET_MODE_BITSIZE (MODE ## mode)); \
10253 sat_u ## NAME ## _type_node = \
10254 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10255 (GET_MODE_BITSIZE (U ## MODE ## mode));
10256
10257 /* Fixed-point type and mode nodes. */
10258 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10259 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10260 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10261 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10262 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10263 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10264 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10265 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10266 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10267 MAKE_FIXED_MODE_NODE (accum, da, DA)
10268 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10269
10270 {
10271 tree t = targetm.build_builtin_va_list ();
10272
10273 /* Many back-ends define record types without setting TYPE_NAME.
10274 If we copied the record type here, we'd keep the original
10275 record type without a name. This breaks name mangling. So,
10276 don't copy record types and let c_common_nodes_and_builtins()
10277 declare the type to be __builtin_va_list. */
10278 if (TREE_CODE (t) != RECORD_TYPE)
10279 t = build_variant_type_copy (t);
10280
10281 va_list_type_node = t;
10282 }
10283 }
10284
10285 /* Modify DECL for given flags.
10286 TM_PURE attribute is set only on types, so the function will modify
10287 DECL's type when ECF_TM_PURE is used. */
10288
10289 void
10290 set_call_expr_flags (tree decl, int flags)
10291 {
10292 if (flags & ECF_NOTHROW)
10293 TREE_NOTHROW (decl) = 1;
10294 if (flags & ECF_CONST)
10295 TREE_READONLY (decl) = 1;
10296 if (flags & ECF_PURE)
10297 DECL_PURE_P (decl) = 1;
10298 if (flags & ECF_LOOPING_CONST_OR_PURE)
10299 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10300 if (flags & ECF_NOVOPS)
10301 DECL_IS_NOVOPS (decl) = 1;
10302 if (flags & ECF_NORETURN)
10303 TREE_THIS_VOLATILE (decl) = 1;
10304 if (flags & ECF_MALLOC)
10305 DECL_IS_MALLOC (decl) = 1;
10306 if (flags & ECF_RETURNS_TWICE)
10307 DECL_IS_RETURNS_TWICE (decl) = 1;
10308 if (flags & ECF_LEAF)
10309 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10310 NULL, DECL_ATTRIBUTES (decl));
10311 if ((flags & ECF_TM_PURE) && flag_tm)
10312 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10313 /* Looping const or pure is implied by noreturn.
10314 There is currently no way to declare looping const or looping pure alone. */
10315 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10316 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10317 }
10318
10319
10320 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10321
10322 static void
10323 local_define_builtin (const char *name, tree type, enum built_in_function code,
10324 const char *library_name, int ecf_flags)
10325 {
10326 tree decl;
10327
10328 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10329 library_name, NULL_TREE);
10330 set_call_expr_flags (decl, ecf_flags);
10331
10332 set_builtin_decl (code, decl, true);
10333 }
10334
10335 /* Call this function after instantiating all builtins that the language
10336 front end cares about. This will build the rest of the builtins
10337 and internal functions that are relied upon by the tree optimizers and
10338 the middle-end. */
10339
10340 void
10341 build_common_builtin_nodes (void)
10342 {
10343 tree tmp, ftype;
10344 int ecf_flags;
10345
10346 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10347 {
10348 ftype = build_function_type (void_type_node, void_list_node);
10349 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10350 "__builtin_unreachable",
10351 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10352 | ECF_CONST);
10353 }
10354
10355 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10356 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10357 {
10358 ftype = build_function_type_list (ptr_type_node,
10359 ptr_type_node, const_ptr_type_node,
10360 size_type_node, NULL_TREE);
10361
10362 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10363 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10364 "memcpy", ECF_NOTHROW | ECF_LEAF);
10365 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10366 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10367 "memmove", ECF_NOTHROW | ECF_LEAF);
10368 }
10369
10370 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10371 {
10372 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10373 const_ptr_type_node, size_type_node,
10374 NULL_TREE);
10375 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10376 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10377 }
10378
10379 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10380 {
10381 ftype = build_function_type_list (ptr_type_node,
10382 ptr_type_node, integer_type_node,
10383 size_type_node, NULL_TREE);
10384 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10385 "memset", ECF_NOTHROW | ECF_LEAF);
10386 }
10387
10388 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10389 {
10390 ftype = build_function_type_list (ptr_type_node,
10391 size_type_node, NULL_TREE);
10392 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10393 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10394 }
10395
10396 ftype = build_function_type_list (ptr_type_node, size_type_node,
10397 size_type_node, NULL_TREE);
10398 local_define_builtin ("__builtin_alloca_with_align", ftype,
10399 BUILT_IN_ALLOCA_WITH_ALIGN,
10400 "__builtin_alloca_with_align",
10401 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10402
10403 /* If we're checking the stack, `alloca' can throw. */
10404 if (flag_stack_check)
10405 {
10406 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10407 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10408 }
10409
10410 ftype = build_function_type_list (void_type_node,
10411 ptr_type_node, ptr_type_node,
10412 ptr_type_node, NULL_TREE);
10413 local_define_builtin ("__builtin_init_trampoline", ftype,
10414 BUILT_IN_INIT_TRAMPOLINE,
10415 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10416 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10417 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10418 "__builtin_init_heap_trampoline",
10419 ECF_NOTHROW | ECF_LEAF);
10420
10421 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10422 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10423 BUILT_IN_ADJUST_TRAMPOLINE,
10424 "__builtin_adjust_trampoline",
10425 ECF_CONST | ECF_NOTHROW);
10426
10427 ftype = build_function_type_list (void_type_node,
10428 ptr_type_node, ptr_type_node, NULL_TREE);
10429 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10430 BUILT_IN_NONLOCAL_GOTO,
10431 "__builtin_nonlocal_goto",
10432 ECF_NORETURN | ECF_NOTHROW);
10433
10434 ftype = build_function_type_list (void_type_node,
10435 ptr_type_node, ptr_type_node, NULL_TREE);
10436 local_define_builtin ("__builtin_setjmp_setup", ftype,
10437 BUILT_IN_SETJMP_SETUP,
10438 "__builtin_setjmp_setup", ECF_NOTHROW);
10439
10440 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10441 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10442 BUILT_IN_SETJMP_RECEIVER,
10443 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10444
10445 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10446 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10447 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10448
10449 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10450 local_define_builtin ("__builtin_stack_restore", ftype,
10451 BUILT_IN_STACK_RESTORE,
10452 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10453
10454 /* If there's a possibility that we might use the ARM EABI, build the
10455 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10456 if (targetm.arm_eabi_unwinder)
10457 {
10458 ftype = build_function_type_list (void_type_node, NULL_TREE);
10459 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10460 BUILT_IN_CXA_END_CLEANUP,
10461 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10462 }
10463
10464 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10465 local_define_builtin ("__builtin_unwind_resume", ftype,
10466 BUILT_IN_UNWIND_RESUME,
10467 ((targetm_common.except_unwind_info (&global_options)
10468 == UI_SJLJ)
10469 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10470 ECF_NORETURN);
10471
10472 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10473 {
10474 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10475 NULL_TREE);
10476 local_define_builtin ("__builtin_return_address", ftype,
10477 BUILT_IN_RETURN_ADDRESS,
10478 "__builtin_return_address",
10479 ECF_NOTHROW);
10480 }
10481
10482 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10483 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10484 {
10485 ftype = build_function_type_list (void_type_node, ptr_type_node,
10486 ptr_type_node, NULL_TREE);
10487 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10488 local_define_builtin ("__cyg_profile_func_enter", ftype,
10489 BUILT_IN_PROFILE_FUNC_ENTER,
10490 "__cyg_profile_func_enter", 0);
10491 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10492 local_define_builtin ("__cyg_profile_func_exit", ftype,
10493 BUILT_IN_PROFILE_FUNC_EXIT,
10494 "__cyg_profile_func_exit", 0);
10495 }
10496
10497 /* The exception object and filter values from the runtime. The argument
10498 must be zero before exception lowering, i.e. from the front end. After
10499 exception lowering, it will be the region number for the exception
10500 landing pad. These functions are PURE instead of CONST to prevent
10501 them from being hoisted past the exception edge that will initialize
10502 its value in the landing pad. */
10503 ftype = build_function_type_list (ptr_type_node,
10504 integer_type_node, NULL_TREE);
10505 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10506 /* Only use TM_PURE if we have TM language support. */
10507 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10508 ecf_flags |= ECF_TM_PURE;
10509 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10510 "__builtin_eh_pointer", ecf_flags);
10511
10512 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10513 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10514 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10515 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10516
10517 ftype = build_function_type_list (void_type_node,
10518 integer_type_node, integer_type_node,
10519 NULL_TREE);
10520 local_define_builtin ("__builtin_eh_copy_values", ftype,
10521 BUILT_IN_EH_COPY_VALUES,
10522 "__builtin_eh_copy_values", ECF_NOTHROW);
10523
10524 /* Complex multiplication and division. These are handled as builtins
10525 rather than optabs because emit_library_call_value doesn't support
10526 complex. Further, we can do slightly better with folding these
10527 beasties if the real and complex parts of the arguments are separate. */
10528 {
10529 int mode;
10530
10531 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10532 {
10533 char mode_name_buf[4], *q;
10534 const char *p;
10535 enum built_in_function mcode, dcode;
10536 tree type, inner_type;
10537 const char *prefix = "__";
10538
10539 if (targetm.libfunc_gnu_prefix)
10540 prefix = "__gnu_";
10541
10542 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10543 if (type == NULL)
10544 continue;
10545 inner_type = TREE_TYPE (type);
10546
10547 ftype = build_function_type_list (type, inner_type, inner_type,
10548 inner_type, inner_type, NULL_TREE);
10549
10550 mcode = ((enum built_in_function)
10551 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10552 dcode = ((enum built_in_function)
10553 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10554
10555 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10556 *q = TOLOWER (*p);
10557 *q = '\0';
10558
10559 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10560 NULL);
10561 local_define_builtin (built_in_names[mcode], ftype, mcode,
10562 built_in_names[mcode],
10563 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10564
10565 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10566 NULL);
10567 local_define_builtin (built_in_names[dcode], ftype, dcode,
10568 built_in_names[dcode],
10569 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10570 }
10571 }
10572
10573 init_internal_fns ();
10574 }
10575
10576 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10577 better way.
10578
10579 If we requested a pointer to a vector, build up the pointers that
10580 we stripped off while looking for the inner type. Similarly for
10581 return values from functions.
10582
10583 The argument TYPE is the top of the chain, and BOTTOM is the
10584 new type which we will point to. */
10585
10586 tree
10587 reconstruct_complex_type (tree type, tree bottom)
10588 {
10589 tree inner, outer;
10590
10591 if (TREE_CODE (type) == POINTER_TYPE)
10592 {
10593 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10594 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10595 TYPE_REF_CAN_ALIAS_ALL (type));
10596 }
10597 else if (TREE_CODE (type) == REFERENCE_TYPE)
10598 {
10599 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10600 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10601 TYPE_REF_CAN_ALIAS_ALL (type));
10602 }
10603 else if (TREE_CODE (type) == ARRAY_TYPE)
10604 {
10605 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10606 outer = build_array_type (inner, TYPE_DOMAIN (type));
10607 }
10608 else if (TREE_CODE (type) == FUNCTION_TYPE)
10609 {
10610 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10611 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10612 }
10613 else if (TREE_CODE (type) == METHOD_TYPE)
10614 {
10615 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10616 /* The build_method_type_directly() routine prepends 'this' to argument list,
10617 so we must compensate by getting rid of it. */
10618 outer
10619 = build_method_type_directly
10620 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10621 inner,
10622 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10623 }
10624 else if (TREE_CODE (type) == OFFSET_TYPE)
10625 {
10626 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10627 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10628 }
10629 else
10630 return bottom;
10631
10632 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10633 TYPE_QUALS (type));
10634 }
10635
10636 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10637 the inner type. */
10638 tree
10639 build_vector_type_for_mode (tree innertype, machine_mode mode)
10640 {
10641 int nunits;
10642
10643 switch (GET_MODE_CLASS (mode))
10644 {
10645 case MODE_VECTOR_INT:
10646 case MODE_VECTOR_FLOAT:
10647 case MODE_VECTOR_FRACT:
10648 case MODE_VECTOR_UFRACT:
10649 case MODE_VECTOR_ACCUM:
10650 case MODE_VECTOR_UACCUM:
10651 nunits = GET_MODE_NUNITS (mode);
10652 break;
10653
10654 case MODE_INT:
10655 /* Check that there are no leftover bits. */
10656 gcc_assert (GET_MODE_BITSIZE (mode)
10657 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10658
10659 nunits = GET_MODE_BITSIZE (mode)
10660 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10661 break;
10662
10663 default:
10664 gcc_unreachable ();
10665 }
10666
10667 return make_vector_type (innertype, nunits, mode);
10668 }
10669
10670 /* Similarly, but takes the inner type and number of units, which must be
10671 a power of two. */
10672
10673 tree
10674 build_vector_type (tree innertype, int nunits)
10675 {
10676 return make_vector_type (innertype, nunits, VOIDmode);
10677 }
10678
10679 /* Build truth vector with specified length and number of units. */
10680
10681 tree
10682 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10683 {
10684 machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits,
10685 vector_size);
10686
10687 gcc_assert (mask_mode != VOIDmode);
10688
10689 unsigned HOST_WIDE_INT vsize;
10690 if (mask_mode == BLKmode)
10691 vsize = vector_size * BITS_PER_UNIT;
10692 else
10693 vsize = GET_MODE_BITSIZE (mask_mode);
10694
10695 unsigned HOST_WIDE_INT esize = vsize / nunits;
10696 gcc_assert (esize * nunits == vsize);
10697
10698 tree bool_type = build_nonstandard_boolean_type (esize);
10699
10700 return make_vector_type (bool_type, nunits, mask_mode);
10701 }
10702
10703 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10704
10705 tree
10706 build_same_sized_truth_vector_type (tree vectype)
10707 {
10708 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10709 return vectype;
10710
10711 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10712
10713 if (!size)
10714 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10715
10716 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10717 }
10718
10719 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10720
10721 tree
10722 build_opaque_vector_type (tree innertype, int nunits)
10723 {
10724 tree t = make_vector_type (innertype, nunits, VOIDmode);
10725 tree cand;
10726 /* We always build the non-opaque variant before the opaque one,
10727 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10728 cand = TYPE_NEXT_VARIANT (t);
10729 if (cand
10730 && TYPE_VECTOR_OPAQUE (cand)
10731 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10732 return cand;
10733 /* Othewise build a variant type and make sure to queue it after
10734 the non-opaque type. */
10735 cand = build_distinct_type_copy (t);
10736 TYPE_VECTOR_OPAQUE (cand) = true;
10737 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10738 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10739 TYPE_NEXT_VARIANT (t) = cand;
10740 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10741 return cand;
10742 }
10743
10744
10745 /* Given an initializer INIT, return TRUE if INIT is zero or some
10746 aggregate of zeros. Otherwise return FALSE. */
10747 bool
10748 initializer_zerop (const_tree init)
10749 {
10750 tree elt;
10751
10752 STRIP_NOPS (init);
10753
10754 switch (TREE_CODE (init))
10755 {
10756 case INTEGER_CST:
10757 return integer_zerop (init);
10758
10759 case REAL_CST:
10760 /* ??? Note that this is not correct for C4X float formats. There,
10761 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10762 negative exponent. */
10763 return real_zerop (init)
10764 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10765
10766 case FIXED_CST:
10767 return fixed_zerop (init);
10768
10769 case COMPLEX_CST:
10770 return integer_zerop (init)
10771 || (real_zerop (init)
10772 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10773 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10774
10775 case VECTOR_CST:
10776 {
10777 unsigned i;
10778 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10779 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10780 return false;
10781 return true;
10782 }
10783
10784 case CONSTRUCTOR:
10785 {
10786 unsigned HOST_WIDE_INT idx;
10787
10788 if (TREE_CLOBBER_P (init))
10789 return false;
10790 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10791 if (!initializer_zerop (elt))
10792 return false;
10793 return true;
10794 }
10795
10796 case STRING_CST:
10797 {
10798 int i;
10799
10800 /* We need to loop through all elements to handle cases like
10801 "\0" and "\0foobar". */
10802 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10803 if (TREE_STRING_POINTER (init)[i] != '\0')
10804 return false;
10805
10806 return true;
10807 }
10808
10809 default:
10810 return false;
10811 }
10812 }
10813
10814 /* Check if vector VEC consists of all the equal elements and
10815 that the number of elements corresponds to the type of VEC.
10816 The function returns first element of the vector
10817 or NULL_TREE if the vector is not uniform. */
10818 tree
10819 uniform_vector_p (const_tree vec)
10820 {
10821 tree first, t;
10822 unsigned i;
10823
10824 if (vec == NULL_TREE)
10825 return NULL_TREE;
10826
10827 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10828
10829 if (TREE_CODE (vec) == VECTOR_CST)
10830 {
10831 first = VECTOR_CST_ELT (vec, 0);
10832 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10833 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10834 return NULL_TREE;
10835
10836 return first;
10837 }
10838
10839 else if (TREE_CODE (vec) == CONSTRUCTOR)
10840 {
10841 first = error_mark_node;
10842
10843 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10844 {
10845 if (i == 0)
10846 {
10847 first = t;
10848 continue;
10849 }
10850 if (!operand_equal_p (first, t, 0))
10851 return NULL_TREE;
10852 }
10853 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10854 return NULL_TREE;
10855
10856 return first;
10857 }
10858
10859 return NULL_TREE;
10860 }
10861
10862 /* Build an empty statement at location LOC. */
10863
10864 tree
10865 build_empty_stmt (location_t loc)
10866 {
10867 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10868 SET_EXPR_LOCATION (t, loc);
10869 return t;
10870 }
10871
10872
10873 /* Build an OpenMP clause with code CODE. LOC is the location of the
10874 clause. */
10875
10876 tree
10877 build_omp_clause (location_t loc, enum omp_clause_code code)
10878 {
10879 tree t;
10880 int size, length;
10881
10882 length = omp_clause_num_ops[code];
10883 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10884
10885 record_node_allocation_statistics (OMP_CLAUSE, size);
10886
10887 t = (tree) ggc_internal_alloc (size);
10888 memset (t, 0, size);
10889 TREE_SET_CODE (t, OMP_CLAUSE);
10890 OMP_CLAUSE_SET_CODE (t, code);
10891 OMP_CLAUSE_LOCATION (t) = loc;
10892
10893 return t;
10894 }
10895
10896 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10897 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10898 Except for the CODE and operand count field, other storage for the
10899 object is initialized to zeros. */
10900
10901 tree
10902 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10903 {
10904 tree t;
10905 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10906
10907 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10908 gcc_assert (len >= 1);
10909
10910 record_node_allocation_statistics (code, length);
10911
10912 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10913
10914 TREE_SET_CODE (t, code);
10915
10916 /* Can't use TREE_OPERAND to store the length because if checking is
10917 enabled, it will try to check the length before we store it. :-P */
10918 t->exp.operands[0] = build_int_cst (sizetype, len);
10919
10920 return t;
10921 }
10922
10923 /* Helper function for build_call_* functions; build a CALL_EXPR with
10924 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10925 the argument slots. */
10926
10927 static tree
10928 build_call_1 (tree return_type, tree fn, int nargs)
10929 {
10930 tree t;
10931
10932 t = build_vl_exp (CALL_EXPR, nargs + 3);
10933 TREE_TYPE (t) = return_type;
10934 CALL_EXPR_FN (t) = fn;
10935 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10936
10937 return t;
10938 }
10939
10940 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10941 FN and a null static chain slot. NARGS is the number of call arguments
10942 which are specified as "..." arguments. */
10943
10944 tree
10945 build_call_nary (tree return_type, tree fn, int nargs, ...)
10946 {
10947 tree ret;
10948 va_list args;
10949 va_start (args, nargs);
10950 ret = build_call_valist (return_type, fn, nargs, args);
10951 va_end (args);
10952 return ret;
10953 }
10954
10955 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10956 FN and a null static chain slot. NARGS is the number of call arguments
10957 which are specified as a va_list ARGS. */
10958
10959 tree
10960 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10961 {
10962 tree t;
10963 int i;
10964
10965 t = build_call_1 (return_type, fn, nargs);
10966 for (i = 0; i < nargs; i++)
10967 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10968 process_call_operands (t);
10969 return t;
10970 }
10971
10972 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10973 FN and a null static chain slot. NARGS is the number of call arguments
10974 which are specified as a tree array ARGS. */
10975
10976 tree
10977 build_call_array_loc (location_t loc, tree return_type, tree fn,
10978 int nargs, const tree *args)
10979 {
10980 tree t;
10981 int i;
10982
10983 t = build_call_1 (return_type, fn, nargs);
10984 for (i = 0; i < nargs; i++)
10985 CALL_EXPR_ARG (t, i) = args[i];
10986 process_call_operands (t);
10987 SET_EXPR_LOCATION (t, loc);
10988 return t;
10989 }
10990
10991 /* Like build_call_array, but takes a vec. */
10992
10993 tree
10994 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10995 {
10996 tree ret, t;
10997 unsigned int ix;
10998
10999 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11000 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11001 CALL_EXPR_ARG (ret, ix) = t;
11002 process_call_operands (ret);
11003 return ret;
11004 }
11005
11006 /* Conveniently construct a function call expression. FNDECL names the
11007 function to be called and N arguments are passed in the array
11008 ARGARRAY. */
11009
11010 tree
11011 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11012 {
11013 tree fntype = TREE_TYPE (fndecl);
11014 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11015
11016 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11017 }
11018
11019 /* Conveniently construct a function call expression. FNDECL names the
11020 function to be called and the arguments are passed in the vector
11021 VEC. */
11022
11023 tree
11024 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11025 {
11026 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11027 vec_safe_address (vec));
11028 }
11029
11030
11031 /* Conveniently construct a function call expression. FNDECL names the
11032 function to be called, N is the number of arguments, and the "..."
11033 parameters are the argument expressions. */
11034
11035 tree
11036 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11037 {
11038 va_list ap;
11039 tree *argarray = XALLOCAVEC (tree, n);
11040 int i;
11041
11042 va_start (ap, n);
11043 for (i = 0; i < n; i++)
11044 argarray[i] = va_arg (ap, tree);
11045 va_end (ap);
11046 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11047 }
11048
11049 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11050 varargs macros aren't supported by all bootstrap compilers. */
11051
11052 tree
11053 build_call_expr (tree fndecl, int n, ...)
11054 {
11055 va_list ap;
11056 tree *argarray = XALLOCAVEC (tree, n);
11057 int i;
11058
11059 va_start (ap, n);
11060 for (i = 0; i < n; i++)
11061 argarray[i] = va_arg (ap, tree);
11062 va_end (ap);
11063 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11064 }
11065
11066 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11067 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11068 It will get gimplified later into an ordinary internal function. */
11069
11070 tree
11071 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11072 tree type, int n, const tree *args)
11073 {
11074 tree t = build_call_1 (type, NULL_TREE, n);
11075 for (int i = 0; i < n; ++i)
11076 CALL_EXPR_ARG (t, i) = args[i];
11077 SET_EXPR_LOCATION (t, loc);
11078 CALL_EXPR_IFN (t) = ifn;
11079 return t;
11080 }
11081
11082 /* Build internal call expression. This is just like CALL_EXPR, except
11083 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11084 internal function. */
11085
11086 tree
11087 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11088 tree type, int n, ...)
11089 {
11090 va_list ap;
11091 tree *argarray = XALLOCAVEC (tree, n);
11092 int i;
11093
11094 va_start (ap, n);
11095 for (i = 0; i < n; i++)
11096 argarray[i] = va_arg (ap, tree);
11097 va_end (ap);
11098 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11099 }
11100
11101 /* Return a function call to FN, if the target is guaranteed to support it,
11102 or null otherwise.
11103
11104 N is the number of arguments, passed in the "...", and TYPE is the
11105 type of the return value. */
11106
11107 tree
11108 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11109 int n, ...)
11110 {
11111 va_list ap;
11112 tree *argarray = XALLOCAVEC (tree, n);
11113 int i;
11114
11115 va_start (ap, n);
11116 for (i = 0; i < n; i++)
11117 argarray[i] = va_arg (ap, tree);
11118 va_end (ap);
11119 if (internal_fn_p (fn))
11120 {
11121 internal_fn ifn = as_internal_fn (fn);
11122 if (direct_internal_fn_p (ifn))
11123 {
11124 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11125 if (!direct_internal_fn_supported_p (ifn, types,
11126 OPTIMIZE_FOR_BOTH))
11127 return NULL_TREE;
11128 }
11129 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11130 }
11131 else
11132 {
11133 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11134 if (!fndecl)
11135 return NULL_TREE;
11136 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11137 }
11138 }
11139
11140 /* Create a new constant string literal and return a char* pointer to it.
11141 The STRING_CST value is the LEN characters at STR. */
11142 tree
11143 build_string_literal (int len, const char *str)
11144 {
11145 tree t, elem, index, type;
11146
11147 t = build_string (len, str);
11148 elem = build_type_variant (char_type_node, 1, 0);
11149 index = build_index_type (size_int (len - 1));
11150 type = build_array_type (elem, index);
11151 TREE_TYPE (t) = type;
11152 TREE_CONSTANT (t) = 1;
11153 TREE_READONLY (t) = 1;
11154 TREE_STATIC (t) = 1;
11155
11156 type = build_pointer_type (elem);
11157 t = build1 (ADDR_EXPR, type,
11158 build4 (ARRAY_REF, elem,
11159 t, integer_zero_node, NULL_TREE, NULL_TREE));
11160 return t;
11161 }
11162
11163
11164
11165 /* Return true if T (assumed to be a DECL) must be assigned a memory
11166 location. */
11167
11168 bool
11169 needs_to_live_in_memory (const_tree t)
11170 {
11171 return (TREE_ADDRESSABLE (t)
11172 || is_global_var (t)
11173 || (TREE_CODE (t) == RESULT_DECL
11174 && !DECL_BY_REFERENCE (t)
11175 && aggregate_value_p (t, current_function_decl)));
11176 }
11177
11178 /* Return value of a constant X and sign-extend it. */
11179
11180 HOST_WIDE_INT
11181 int_cst_value (const_tree x)
11182 {
11183 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11184 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11185
11186 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11187 gcc_assert (cst_and_fits_in_hwi (x));
11188
11189 if (bits < HOST_BITS_PER_WIDE_INT)
11190 {
11191 bool negative = ((val >> (bits - 1)) & 1) != 0;
11192 if (negative)
11193 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
11194 else
11195 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
11196 }
11197
11198 return val;
11199 }
11200
11201 /* If TYPE is an integral or pointer type, return an integer type with
11202 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11203 if TYPE is already an integer type of signedness UNSIGNEDP. */
11204
11205 tree
11206 signed_or_unsigned_type_for (int unsignedp, tree type)
11207 {
11208 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11209 return type;
11210
11211 if (TREE_CODE (type) == VECTOR_TYPE)
11212 {
11213 tree inner = TREE_TYPE (type);
11214 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11215 if (!inner2)
11216 return NULL_TREE;
11217 if (inner == inner2)
11218 return type;
11219 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11220 }
11221
11222 if (!INTEGRAL_TYPE_P (type)
11223 && !POINTER_TYPE_P (type)
11224 && TREE_CODE (type) != OFFSET_TYPE)
11225 return NULL_TREE;
11226
11227 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11228 }
11229
11230 /* If TYPE is an integral or pointer type, return an integer type with
11231 the same precision which is unsigned, or itself if TYPE is already an
11232 unsigned integer type. */
11233
11234 tree
11235 unsigned_type_for (tree type)
11236 {
11237 return signed_or_unsigned_type_for (1, type);
11238 }
11239
11240 /* If TYPE is an integral or pointer type, return an integer type with
11241 the same precision which is signed, or itself if TYPE is already a
11242 signed integer type. */
11243
11244 tree
11245 signed_type_for (tree type)
11246 {
11247 return signed_or_unsigned_type_for (0, type);
11248 }
11249
11250 /* If TYPE is a vector type, return a signed integer vector type with the
11251 same width and number of subparts. Otherwise return boolean_type_node. */
11252
11253 tree
11254 truth_type_for (tree type)
11255 {
11256 if (TREE_CODE (type) == VECTOR_TYPE)
11257 {
11258 if (VECTOR_BOOLEAN_TYPE_P (type))
11259 return type;
11260 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11261 GET_MODE_SIZE (TYPE_MODE (type)));
11262 }
11263 else
11264 return boolean_type_node;
11265 }
11266
11267 /* Returns the largest value obtainable by casting something in INNER type to
11268 OUTER type. */
11269
11270 tree
11271 upper_bound_in_type (tree outer, tree inner)
11272 {
11273 unsigned int det = 0;
11274 unsigned oprec = TYPE_PRECISION (outer);
11275 unsigned iprec = TYPE_PRECISION (inner);
11276 unsigned prec;
11277
11278 /* Compute a unique number for every combination. */
11279 det |= (oprec > iprec) ? 4 : 0;
11280 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11281 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11282
11283 /* Determine the exponent to use. */
11284 switch (det)
11285 {
11286 case 0:
11287 case 1:
11288 /* oprec <= iprec, outer: signed, inner: don't care. */
11289 prec = oprec - 1;
11290 break;
11291 case 2:
11292 case 3:
11293 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11294 prec = oprec;
11295 break;
11296 case 4:
11297 /* oprec > iprec, outer: signed, inner: signed. */
11298 prec = iprec - 1;
11299 break;
11300 case 5:
11301 /* oprec > iprec, outer: signed, inner: unsigned. */
11302 prec = iprec;
11303 break;
11304 case 6:
11305 /* oprec > iprec, outer: unsigned, inner: signed. */
11306 prec = oprec;
11307 break;
11308 case 7:
11309 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11310 prec = iprec;
11311 break;
11312 default:
11313 gcc_unreachable ();
11314 }
11315
11316 return wide_int_to_tree (outer,
11317 wi::mask (prec, false, TYPE_PRECISION (outer)));
11318 }
11319
11320 /* Returns the smallest value obtainable by casting something in INNER type to
11321 OUTER type. */
11322
11323 tree
11324 lower_bound_in_type (tree outer, tree inner)
11325 {
11326 unsigned oprec = TYPE_PRECISION (outer);
11327 unsigned iprec = TYPE_PRECISION (inner);
11328
11329 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11330 and obtain 0. */
11331 if (TYPE_UNSIGNED (outer)
11332 /* If we are widening something of an unsigned type, OUTER type
11333 contains all values of INNER type. In particular, both INNER
11334 and OUTER types have zero in common. */
11335 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11336 return build_int_cst (outer, 0);
11337 else
11338 {
11339 /* If we are widening a signed type to another signed type, we
11340 want to obtain -2^^(iprec-1). If we are keeping the
11341 precision or narrowing to a signed type, we want to obtain
11342 -2^(oprec-1). */
11343 unsigned prec = oprec > iprec ? iprec : oprec;
11344 return wide_int_to_tree (outer,
11345 wi::mask (prec - 1, true,
11346 TYPE_PRECISION (outer)));
11347 }
11348 }
11349
11350 /* Return nonzero if two operands that are suitable for PHI nodes are
11351 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11352 SSA_NAME or invariant. Note that this is strictly an optimization.
11353 That is, callers of this function can directly call operand_equal_p
11354 and get the same result, only slower. */
11355
11356 int
11357 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11358 {
11359 if (arg0 == arg1)
11360 return 1;
11361 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11362 return 0;
11363 return operand_equal_p (arg0, arg1, 0);
11364 }
11365
11366 /* Returns number of zeros at the end of binary representation of X. */
11367
11368 tree
11369 num_ending_zeros (const_tree x)
11370 {
11371 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11372 }
11373
11374
11375 #define WALK_SUBTREE(NODE) \
11376 do \
11377 { \
11378 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11379 if (result) \
11380 return result; \
11381 } \
11382 while (0)
11383
11384 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11385 be walked whenever a type is seen in the tree. Rest of operands and return
11386 value are as for walk_tree. */
11387
11388 static tree
11389 walk_type_fields (tree type, walk_tree_fn func, void *data,
11390 hash_set<tree> *pset, walk_tree_lh lh)
11391 {
11392 tree result = NULL_TREE;
11393
11394 switch (TREE_CODE (type))
11395 {
11396 case POINTER_TYPE:
11397 case REFERENCE_TYPE:
11398 case VECTOR_TYPE:
11399 /* We have to worry about mutually recursive pointers. These can't
11400 be written in C. They can in Ada. It's pathological, but
11401 there's an ACATS test (c38102a) that checks it. Deal with this
11402 by checking if we're pointing to another pointer, that one
11403 points to another pointer, that one does too, and we have no htab.
11404 If so, get a hash table. We check three levels deep to avoid
11405 the cost of the hash table if we don't need one. */
11406 if (POINTER_TYPE_P (TREE_TYPE (type))
11407 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11408 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11409 && !pset)
11410 {
11411 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11412 func, data);
11413 if (result)
11414 return result;
11415
11416 break;
11417 }
11418
11419 /* ... fall through ... */
11420
11421 case COMPLEX_TYPE:
11422 WALK_SUBTREE (TREE_TYPE (type));
11423 break;
11424
11425 case METHOD_TYPE:
11426 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11427
11428 /* Fall through. */
11429
11430 case FUNCTION_TYPE:
11431 WALK_SUBTREE (TREE_TYPE (type));
11432 {
11433 tree arg;
11434
11435 /* We never want to walk into default arguments. */
11436 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11437 WALK_SUBTREE (TREE_VALUE (arg));
11438 }
11439 break;
11440
11441 case ARRAY_TYPE:
11442 /* Don't follow this nodes's type if a pointer for fear that
11443 we'll have infinite recursion. If we have a PSET, then we
11444 need not fear. */
11445 if (pset
11446 || (!POINTER_TYPE_P (TREE_TYPE (type))
11447 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11448 WALK_SUBTREE (TREE_TYPE (type));
11449 WALK_SUBTREE (TYPE_DOMAIN (type));
11450 break;
11451
11452 case OFFSET_TYPE:
11453 WALK_SUBTREE (TREE_TYPE (type));
11454 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11455 break;
11456
11457 default:
11458 break;
11459 }
11460
11461 return NULL_TREE;
11462 }
11463
11464 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11465 called with the DATA and the address of each sub-tree. If FUNC returns a
11466 non-NULL value, the traversal is stopped, and the value returned by FUNC
11467 is returned. If PSET is non-NULL it is used to record the nodes visited,
11468 and to avoid visiting a node more than once. */
11469
11470 tree
11471 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11472 hash_set<tree> *pset, walk_tree_lh lh)
11473 {
11474 enum tree_code code;
11475 int walk_subtrees;
11476 tree result;
11477
11478 #define WALK_SUBTREE_TAIL(NODE) \
11479 do \
11480 { \
11481 tp = & (NODE); \
11482 goto tail_recurse; \
11483 } \
11484 while (0)
11485
11486 tail_recurse:
11487 /* Skip empty subtrees. */
11488 if (!*tp)
11489 return NULL_TREE;
11490
11491 /* Don't walk the same tree twice, if the user has requested
11492 that we avoid doing so. */
11493 if (pset && pset->add (*tp))
11494 return NULL_TREE;
11495
11496 /* Call the function. */
11497 walk_subtrees = 1;
11498 result = (*func) (tp, &walk_subtrees, data);
11499
11500 /* If we found something, return it. */
11501 if (result)
11502 return result;
11503
11504 code = TREE_CODE (*tp);
11505
11506 /* Even if we didn't, FUNC may have decided that there was nothing
11507 interesting below this point in the tree. */
11508 if (!walk_subtrees)
11509 {
11510 /* But we still need to check our siblings. */
11511 if (code == TREE_LIST)
11512 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11513 else if (code == OMP_CLAUSE)
11514 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11515 else
11516 return NULL_TREE;
11517 }
11518
11519 if (lh)
11520 {
11521 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11522 if (result || !walk_subtrees)
11523 return result;
11524 }
11525
11526 switch (code)
11527 {
11528 case ERROR_MARK:
11529 case IDENTIFIER_NODE:
11530 case INTEGER_CST:
11531 case REAL_CST:
11532 case FIXED_CST:
11533 case VECTOR_CST:
11534 case STRING_CST:
11535 case BLOCK:
11536 case PLACEHOLDER_EXPR:
11537 case SSA_NAME:
11538 case FIELD_DECL:
11539 case RESULT_DECL:
11540 /* None of these have subtrees other than those already walked
11541 above. */
11542 break;
11543
11544 case TREE_LIST:
11545 WALK_SUBTREE (TREE_VALUE (*tp));
11546 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11547 break;
11548
11549 case TREE_VEC:
11550 {
11551 int len = TREE_VEC_LENGTH (*tp);
11552
11553 if (len == 0)
11554 break;
11555
11556 /* Walk all elements but the first. */
11557 while (--len)
11558 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11559
11560 /* Now walk the first one as a tail call. */
11561 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11562 }
11563
11564 case COMPLEX_CST:
11565 WALK_SUBTREE (TREE_REALPART (*tp));
11566 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11567
11568 case CONSTRUCTOR:
11569 {
11570 unsigned HOST_WIDE_INT idx;
11571 constructor_elt *ce;
11572
11573 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11574 idx++)
11575 WALK_SUBTREE (ce->value);
11576 }
11577 break;
11578
11579 case SAVE_EXPR:
11580 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11581
11582 case BIND_EXPR:
11583 {
11584 tree decl;
11585 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11586 {
11587 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11588 into declarations that are just mentioned, rather than
11589 declared; they don't really belong to this part of the tree.
11590 And, we can see cycles: the initializer for a declaration
11591 can refer to the declaration itself. */
11592 WALK_SUBTREE (DECL_INITIAL (decl));
11593 WALK_SUBTREE (DECL_SIZE (decl));
11594 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11595 }
11596 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11597 }
11598
11599 case STATEMENT_LIST:
11600 {
11601 tree_stmt_iterator i;
11602 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11603 WALK_SUBTREE (*tsi_stmt_ptr (i));
11604 }
11605 break;
11606
11607 case OMP_CLAUSE:
11608 switch (OMP_CLAUSE_CODE (*tp))
11609 {
11610 case OMP_CLAUSE_GANG:
11611 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11612 /* FALLTHRU */
11613
11614 case OMP_CLAUSE_DEVICE_RESIDENT:
11615 case OMP_CLAUSE_USE_DEVICE:
11616 case OMP_CLAUSE_ASYNC:
11617 case OMP_CLAUSE_WAIT:
11618 case OMP_CLAUSE_WORKER:
11619 case OMP_CLAUSE_VECTOR:
11620 case OMP_CLAUSE_NUM_GANGS:
11621 case OMP_CLAUSE_NUM_WORKERS:
11622 case OMP_CLAUSE_VECTOR_LENGTH:
11623 case OMP_CLAUSE_PRIVATE:
11624 case OMP_CLAUSE_SHARED:
11625 case OMP_CLAUSE_FIRSTPRIVATE:
11626 case OMP_CLAUSE_COPYIN:
11627 case OMP_CLAUSE_COPYPRIVATE:
11628 case OMP_CLAUSE_FINAL:
11629 case OMP_CLAUSE_IF:
11630 case OMP_CLAUSE_NUM_THREADS:
11631 case OMP_CLAUSE_SCHEDULE:
11632 case OMP_CLAUSE_UNIFORM:
11633 case OMP_CLAUSE_DEPEND:
11634 case OMP_CLAUSE_NUM_TEAMS:
11635 case OMP_CLAUSE_THREAD_LIMIT:
11636 case OMP_CLAUSE_DEVICE:
11637 case OMP_CLAUSE_DIST_SCHEDULE:
11638 case OMP_CLAUSE_SAFELEN:
11639 case OMP_CLAUSE_SIMDLEN:
11640 case OMP_CLAUSE_ORDERED:
11641 case OMP_CLAUSE_PRIORITY:
11642 case OMP_CLAUSE_GRAINSIZE:
11643 case OMP_CLAUSE_NUM_TASKS:
11644 case OMP_CLAUSE_HINT:
11645 case OMP_CLAUSE_TO_DECLARE:
11646 case OMP_CLAUSE_LINK:
11647 case OMP_CLAUSE_USE_DEVICE_PTR:
11648 case OMP_CLAUSE_IS_DEVICE_PTR:
11649 case OMP_CLAUSE__LOOPTEMP_:
11650 case OMP_CLAUSE__SIMDUID_:
11651 case OMP_CLAUSE__CILK_FOR_COUNT_:
11652 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11653 /* FALLTHRU */
11654
11655 case OMP_CLAUSE_INDEPENDENT:
11656 case OMP_CLAUSE_NOWAIT:
11657 case OMP_CLAUSE_DEFAULT:
11658 case OMP_CLAUSE_UNTIED:
11659 case OMP_CLAUSE_MERGEABLE:
11660 case OMP_CLAUSE_PROC_BIND:
11661 case OMP_CLAUSE_INBRANCH:
11662 case OMP_CLAUSE_NOTINBRANCH:
11663 case OMP_CLAUSE_FOR:
11664 case OMP_CLAUSE_PARALLEL:
11665 case OMP_CLAUSE_SECTIONS:
11666 case OMP_CLAUSE_TASKGROUP:
11667 case OMP_CLAUSE_NOGROUP:
11668 case OMP_CLAUSE_THREADS:
11669 case OMP_CLAUSE_SIMD:
11670 case OMP_CLAUSE_DEFAULTMAP:
11671 case OMP_CLAUSE_AUTO:
11672 case OMP_CLAUSE_SEQ:
11673 case OMP_CLAUSE_TILE:
11674 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11675
11676 case OMP_CLAUSE_LASTPRIVATE:
11677 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11678 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11679 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11680
11681 case OMP_CLAUSE_COLLAPSE:
11682 {
11683 int i;
11684 for (i = 0; i < 3; i++)
11685 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11686 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11687 }
11688
11689 case OMP_CLAUSE_LINEAR:
11690 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11691 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11692 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11693 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11694
11695 case OMP_CLAUSE_ALIGNED:
11696 case OMP_CLAUSE_FROM:
11697 case OMP_CLAUSE_TO:
11698 case OMP_CLAUSE_MAP:
11699 case OMP_CLAUSE__CACHE_:
11700 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11701 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11702 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11703
11704 case OMP_CLAUSE_REDUCTION:
11705 {
11706 int i;
11707 for (i = 0; i < 5; i++)
11708 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11709 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11710 }
11711
11712 default:
11713 gcc_unreachable ();
11714 }
11715 break;
11716
11717 case TARGET_EXPR:
11718 {
11719 int i, len;
11720
11721 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11722 But, we only want to walk once. */
11723 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11724 for (i = 0; i < len; ++i)
11725 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11726 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11727 }
11728
11729 case DECL_EXPR:
11730 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11731 defining. We only want to walk into these fields of a type in this
11732 case and not in the general case of a mere reference to the type.
11733
11734 The criterion is as follows: if the field can be an expression, it
11735 must be walked only here. This should be in keeping with the fields
11736 that are directly gimplified in gimplify_type_sizes in order for the
11737 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11738 variable-sized types.
11739
11740 Note that DECLs get walked as part of processing the BIND_EXPR. */
11741 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11742 {
11743 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11744 if (TREE_CODE (*type_p) == ERROR_MARK)
11745 return NULL_TREE;
11746
11747 /* Call the function for the type. See if it returns anything or
11748 doesn't want us to continue. If we are to continue, walk both
11749 the normal fields and those for the declaration case. */
11750 result = (*func) (type_p, &walk_subtrees, data);
11751 if (result || !walk_subtrees)
11752 return result;
11753
11754 /* But do not walk a pointed-to type since it may itself need to
11755 be walked in the declaration case if it isn't anonymous. */
11756 if (!POINTER_TYPE_P (*type_p))
11757 {
11758 result = walk_type_fields (*type_p, func, data, pset, lh);
11759 if (result)
11760 return result;
11761 }
11762
11763 /* If this is a record type, also walk the fields. */
11764 if (RECORD_OR_UNION_TYPE_P (*type_p))
11765 {
11766 tree field;
11767
11768 for (field = TYPE_FIELDS (*type_p); field;
11769 field = DECL_CHAIN (field))
11770 {
11771 /* We'd like to look at the type of the field, but we can
11772 easily get infinite recursion. So assume it's pointed
11773 to elsewhere in the tree. Also, ignore things that
11774 aren't fields. */
11775 if (TREE_CODE (field) != FIELD_DECL)
11776 continue;
11777
11778 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11779 WALK_SUBTREE (DECL_SIZE (field));
11780 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11781 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11782 WALK_SUBTREE (DECL_QUALIFIER (field));
11783 }
11784 }
11785
11786 /* Same for scalar types. */
11787 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11788 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11789 || TREE_CODE (*type_p) == INTEGER_TYPE
11790 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11791 || TREE_CODE (*type_p) == REAL_TYPE)
11792 {
11793 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11794 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11795 }
11796
11797 WALK_SUBTREE (TYPE_SIZE (*type_p));
11798 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11799 }
11800 /* FALLTHRU */
11801
11802 default:
11803 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11804 {
11805 int i, len;
11806
11807 /* Walk over all the sub-trees of this operand. */
11808 len = TREE_OPERAND_LENGTH (*tp);
11809
11810 /* Go through the subtrees. We need to do this in forward order so
11811 that the scope of a FOR_EXPR is handled properly. */
11812 if (len)
11813 {
11814 for (i = 0; i < len - 1; ++i)
11815 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11816 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11817 }
11818 }
11819 /* If this is a type, walk the needed fields in the type. */
11820 else if (TYPE_P (*tp))
11821 return walk_type_fields (*tp, func, data, pset, lh);
11822 break;
11823 }
11824
11825 /* We didn't find what we were looking for. */
11826 return NULL_TREE;
11827
11828 #undef WALK_SUBTREE_TAIL
11829 }
11830 #undef WALK_SUBTREE
11831
11832 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11833
11834 tree
11835 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11836 walk_tree_lh lh)
11837 {
11838 tree result;
11839
11840 hash_set<tree> pset;
11841 result = walk_tree_1 (tp, func, data, &pset, lh);
11842 return result;
11843 }
11844
11845
11846 tree
11847 tree_block (tree t)
11848 {
11849 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11850
11851 if (IS_EXPR_CODE_CLASS (c))
11852 return LOCATION_BLOCK (t->exp.locus);
11853 gcc_unreachable ();
11854 return NULL;
11855 }
11856
11857 void
11858 tree_set_block (tree t, tree b)
11859 {
11860 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11861
11862 if (IS_EXPR_CODE_CLASS (c))
11863 {
11864 t->exp.locus = set_block (t->exp.locus, b);
11865 }
11866 else
11867 gcc_unreachable ();
11868 }
11869
11870 /* Create a nameless artificial label and put it in the current
11871 function context. The label has a location of LOC. Returns the
11872 newly created label. */
11873
11874 tree
11875 create_artificial_label (location_t loc)
11876 {
11877 tree lab = build_decl (loc,
11878 LABEL_DECL, NULL_TREE, void_type_node);
11879
11880 DECL_ARTIFICIAL (lab) = 1;
11881 DECL_IGNORED_P (lab) = 1;
11882 DECL_CONTEXT (lab) = current_function_decl;
11883 return lab;
11884 }
11885
11886 /* Given a tree, try to return a useful variable name that we can use
11887 to prefix a temporary that is being assigned the value of the tree.
11888 I.E. given <temp> = &A, return A. */
11889
11890 const char *
11891 get_name (tree t)
11892 {
11893 tree stripped_decl;
11894
11895 stripped_decl = t;
11896 STRIP_NOPS (stripped_decl);
11897 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11898 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11899 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11900 {
11901 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11902 if (!name)
11903 return NULL;
11904 return IDENTIFIER_POINTER (name);
11905 }
11906 else
11907 {
11908 switch (TREE_CODE (stripped_decl))
11909 {
11910 case ADDR_EXPR:
11911 return get_name (TREE_OPERAND (stripped_decl, 0));
11912 default:
11913 return NULL;
11914 }
11915 }
11916 }
11917
11918 /* Return true if TYPE has a variable argument list. */
11919
11920 bool
11921 stdarg_p (const_tree fntype)
11922 {
11923 function_args_iterator args_iter;
11924 tree n = NULL_TREE, t;
11925
11926 if (!fntype)
11927 return false;
11928
11929 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11930 {
11931 n = t;
11932 }
11933
11934 return n != NULL_TREE && n != void_type_node;
11935 }
11936
11937 /* Return true if TYPE has a prototype. */
11938
11939 bool
11940 prototype_p (const_tree fntype)
11941 {
11942 tree t;
11943
11944 gcc_assert (fntype != NULL_TREE);
11945
11946 t = TYPE_ARG_TYPES (fntype);
11947 return (t != NULL_TREE);
11948 }
11949
11950 /* If BLOCK is inlined from an __attribute__((__artificial__))
11951 routine, return pointer to location from where it has been
11952 called. */
11953 location_t *
11954 block_nonartificial_location (tree block)
11955 {
11956 location_t *ret = NULL;
11957
11958 while (block && TREE_CODE (block) == BLOCK
11959 && BLOCK_ABSTRACT_ORIGIN (block))
11960 {
11961 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11962
11963 while (TREE_CODE (ao) == BLOCK
11964 && BLOCK_ABSTRACT_ORIGIN (ao)
11965 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11966 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11967
11968 if (TREE_CODE (ao) == FUNCTION_DECL)
11969 {
11970 /* If AO is an artificial inline, point RET to the
11971 call site locus at which it has been inlined and continue
11972 the loop, in case AO's caller is also an artificial
11973 inline. */
11974 if (DECL_DECLARED_INLINE_P (ao)
11975 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11976 ret = &BLOCK_SOURCE_LOCATION (block);
11977 else
11978 break;
11979 }
11980 else if (TREE_CODE (ao) != BLOCK)
11981 break;
11982
11983 block = BLOCK_SUPERCONTEXT (block);
11984 }
11985 return ret;
11986 }
11987
11988
11989 /* If EXP is inlined from an __attribute__((__artificial__))
11990 function, return the location of the original call expression. */
11991
11992 location_t
11993 tree_nonartificial_location (tree exp)
11994 {
11995 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11996
11997 if (loc)
11998 return *loc;
11999 else
12000 return EXPR_LOCATION (exp);
12001 }
12002
12003
12004 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12005 nodes. */
12006
12007 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12008
12009 hashval_t
12010 cl_option_hasher::hash (tree x)
12011 {
12012 const_tree const t = x;
12013 const char *p;
12014 size_t i;
12015 size_t len = 0;
12016 hashval_t hash = 0;
12017
12018 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12019 {
12020 p = (const char *)TREE_OPTIMIZATION (t);
12021 len = sizeof (struct cl_optimization);
12022 }
12023
12024 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12025 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12026
12027 else
12028 gcc_unreachable ();
12029
12030 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12031 something else. */
12032 for (i = 0; i < len; i++)
12033 if (p[i])
12034 hash = (hash << 4) ^ ((i << 2) | p[i]);
12035
12036 return hash;
12037 }
12038
12039 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12040 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12041 same. */
12042
12043 bool
12044 cl_option_hasher::equal (tree x, tree y)
12045 {
12046 const_tree const xt = x;
12047 const_tree const yt = y;
12048 const char *xp;
12049 const char *yp;
12050 size_t len;
12051
12052 if (TREE_CODE (xt) != TREE_CODE (yt))
12053 return 0;
12054
12055 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12056 {
12057 xp = (const char *)TREE_OPTIMIZATION (xt);
12058 yp = (const char *)TREE_OPTIMIZATION (yt);
12059 len = sizeof (struct cl_optimization);
12060 }
12061
12062 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12063 {
12064 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12065 TREE_TARGET_OPTION (yt));
12066 }
12067
12068 else
12069 gcc_unreachable ();
12070
12071 return (memcmp (xp, yp, len) == 0);
12072 }
12073
12074 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12075
12076 tree
12077 build_optimization_node (struct gcc_options *opts)
12078 {
12079 tree t;
12080
12081 /* Use the cache of optimization nodes. */
12082
12083 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12084 opts);
12085
12086 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12087 t = *slot;
12088 if (!t)
12089 {
12090 /* Insert this one into the hash table. */
12091 t = cl_optimization_node;
12092 *slot = t;
12093
12094 /* Make a new node for next time round. */
12095 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12096 }
12097
12098 return t;
12099 }
12100
12101 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12102
12103 tree
12104 build_target_option_node (struct gcc_options *opts)
12105 {
12106 tree t;
12107
12108 /* Use the cache of optimization nodes. */
12109
12110 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12111 opts);
12112
12113 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12114 t = *slot;
12115 if (!t)
12116 {
12117 /* Insert this one into the hash table. */
12118 t = cl_target_option_node;
12119 *slot = t;
12120
12121 /* Make a new node for next time round. */
12122 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12123 }
12124
12125 return t;
12126 }
12127
12128 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12129 so that they aren't saved during PCH writing. */
12130
12131 void
12132 prepare_target_option_nodes_for_pch (void)
12133 {
12134 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12135 for (; iter != cl_option_hash_table->end (); ++iter)
12136 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12137 TREE_TARGET_GLOBALS (*iter) = NULL;
12138 }
12139
12140 /* Determine the "ultimate origin" of a block. The block may be an inlined
12141 instance of an inlined instance of a block which is local to an inline
12142 function, so we have to trace all of the way back through the origin chain
12143 to find out what sort of node actually served as the original seed for the
12144 given block. */
12145
12146 tree
12147 block_ultimate_origin (const_tree block)
12148 {
12149 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
12150
12151 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12152 we're trying to output the abstract instance of this function. */
12153 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12154 return NULL_TREE;
12155
12156 if (immediate_origin == NULL_TREE)
12157 return NULL_TREE;
12158 else
12159 {
12160 tree ret_val;
12161 tree lookahead = immediate_origin;
12162
12163 do
12164 {
12165 ret_val = lookahead;
12166 lookahead = (TREE_CODE (ret_val) == BLOCK
12167 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12168 }
12169 while (lookahead != NULL && lookahead != ret_val);
12170
12171 /* The block's abstract origin chain may not be the *ultimate* origin of
12172 the block. It could lead to a DECL that has an abstract origin set.
12173 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12174 will give us if it has one). Note that DECL's abstract origins are
12175 supposed to be the most distant ancestor (or so decl_ultimate_origin
12176 claims), so we don't need to loop following the DECL origins. */
12177 if (DECL_P (ret_val))
12178 return DECL_ORIGIN (ret_val);
12179
12180 return ret_val;
12181 }
12182 }
12183
12184 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12185 no instruction. */
12186
12187 bool
12188 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12189 {
12190 /* Use precision rather then machine mode when we can, which gives
12191 the correct answer even for submode (bit-field) types. */
12192 if ((INTEGRAL_TYPE_P (outer_type)
12193 || POINTER_TYPE_P (outer_type)
12194 || TREE_CODE (outer_type) == OFFSET_TYPE)
12195 && (INTEGRAL_TYPE_P (inner_type)
12196 || POINTER_TYPE_P (inner_type)
12197 || TREE_CODE (inner_type) == OFFSET_TYPE))
12198 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12199
12200 /* Otherwise fall back on comparing machine modes (e.g. for
12201 aggregate types, floats). */
12202 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12203 }
12204
12205 /* Return true iff conversion in EXP generates no instruction. Mark
12206 it inline so that we fully inline into the stripping functions even
12207 though we have two uses of this function. */
12208
12209 static inline bool
12210 tree_nop_conversion (const_tree exp)
12211 {
12212 tree outer_type, inner_type;
12213
12214 if (!CONVERT_EXPR_P (exp)
12215 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12216 return false;
12217 if (TREE_OPERAND (exp, 0) == error_mark_node)
12218 return false;
12219
12220 outer_type = TREE_TYPE (exp);
12221 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12222
12223 if (!inner_type)
12224 return false;
12225
12226 return tree_nop_conversion_p (outer_type, inner_type);
12227 }
12228
12229 /* Return true iff conversion in EXP generates no instruction. Don't
12230 consider conversions changing the signedness. */
12231
12232 static bool
12233 tree_sign_nop_conversion (const_tree exp)
12234 {
12235 tree outer_type, inner_type;
12236
12237 if (!tree_nop_conversion (exp))
12238 return false;
12239
12240 outer_type = TREE_TYPE (exp);
12241 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12242
12243 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12244 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12245 }
12246
12247 /* Strip conversions from EXP according to tree_nop_conversion and
12248 return the resulting expression. */
12249
12250 tree
12251 tree_strip_nop_conversions (tree exp)
12252 {
12253 while (tree_nop_conversion (exp))
12254 exp = TREE_OPERAND (exp, 0);
12255 return exp;
12256 }
12257
12258 /* Strip conversions from EXP according to tree_sign_nop_conversion
12259 and return the resulting expression. */
12260
12261 tree
12262 tree_strip_sign_nop_conversions (tree exp)
12263 {
12264 while (tree_sign_nop_conversion (exp))
12265 exp = TREE_OPERAND (exp, 0);
12266 return exp;
12267 }
12268
12269 /* Avoid any floating point extensions from EXP. */
12270 tree
12271 strip_float_extensions (tree exp)
12272 {
12273 tree sub, expt, subt;
12274
12275 /* For floating point constant look up the narrowest type that can hold
12276 it properly and handle it like (type)(narrowest_type)constant.
12277 This way we can optimize for instance a=a*2.0 where "a" is float
12278 but 2.0 is double constant. */
12279 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12280 {
12281 REAL_VALUE_TYPE orig;
12282 tree type = NULL;
12283
12284 orig = TREE_REAL_CST (exp);
12285 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12286 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12287 type = float_type_node;
12288 else if (TYPE_PRECISION (TREE_TYPE (exp))
12289 > TYPE_PRECISION (double_type_node)
12290 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12291 type = double_type_node;
12292 if (type)
12293 return build_real_truncate (type, orig);
12294 }
12295
12296 if (!CONVERT_EXPR_P (exp))
12297 return exp;
12298
12299 sub = TREE_OPERAND (exp, 0);
12300 subt = TREE_TYPE (sub);
12301 expt = TREE_TYPE (exp);
12302
12303 if (!FLOAT_TYPE_P (subt))
12304 return exp;
12305
12306 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12307 return exp;
12308
12309 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12310 return exp;
12311
12312 return strip_float_extensions (sub);
12313 }
12314
12315 /* Strip out all handled components that produce invariant
12316 offsets. */
12317
12318 const_tree
12319 strip_invariant_refs (const_tree op)
12320 {
12321 while (handled_component_p (op))
12322 {
12323 switch (TREE_CODE (op))
12324 {
12325 case ARRAY_REF:
12326 case ARRAY_RANGE_REF:
12327 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12328 || TREE_OPERAND (op, 2) != NULL_TREE
12329 || TREE_OPERAND (op, 3) != NULL_TREE)
12330 return NULL;
12331 break;
12332
12333 case COMPONENT_REF:
12334 if (TREE_OPERAND (op, 2) != NULL_TREE)
12335 return NULL;
12336 break;
12337
12338 default:;
12339 }
12340 op = TREE_OPERAND (op, 0);
12341 }
12342
12343 return op;
12344 }
12345
12346 static GTY(()) tree gcc_eh_personality_decl;
12347
12348 /* Return the GCC personality function decl. */
12349
12350 tree
12351 lhd_gcc_personality (void)
12352 {
12353 if (!gcc_eh_personality_decl)
12354 gcc_eh_personality_decl = build_personality_function ("gcc");
12355 return gcc_eh_personality_decl;
12356 }
12357
12358 /* TARGET is a call target of GIMPLE call statement
12359 (obtained by gimple_call_fn). Return true if it is
12360 OBJ_TYPE_REF representing an virtual call of C++ method.
12361 (As opposed to OBJ_TYPE_REF representing objc calls
12362 through a cast where middle-end devirtualization machinery
12363 can't apply.) */
12364
12365 bool
12366 virtual_method_call_p (const_tree target)
12367 {
12368 if (TREE_CODE (target) != OBJ_TYPE_REF)
12369 return false;
12370 tree t = TREE_TYPE (target);
12371 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12372 t = TREE_TYPE (t);
12373 if (TREE_CODE (t) == FUNCTION_TYPE)
12374 return false;
12375 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12376 /* If we do not have BINFO associated, it means that type was built
12377 without devirtualization enabled. Do not consider this a virtual
12378 call. */
12379 if (!TYPE_BINFO (obj_type_ref_class (target)))
12380 return false;
12381 return true;
12382 }
12383
12384 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12385
12386 tree
12387 obj_type_ref_class (const_tree ref)
12388 {
12389 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12390 ref = TREE_TYPE (ref);
12391 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12392 ref = TREE_TYPE (ref);
12393 /* We look for type THIS points to. ObjC also builds
12394 OBJ_TYPE_REF with non-method calls, Their first parameter
12395 ID however also corresponds to class type. */
12396 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12397 || TREE_CODE (ref) == FUNCTION_TYPE);
12398 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12399 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12400 return TREE_TYPE (ref);
12401 }
12402
12403 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12404
12405 static tree
12406 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12407 {
12408 unsigned int i;
12409 tree base_binfo, b;
12410
12411 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12412 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12413 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12414 return base_binfo;
12415 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12416 return b;
12417 return NULL;
12418 }
12419
12420 /* Try to find a base info of BINFO that would have its field decl at offset
12421 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12422 found, return, otherwise return NULL_TREE. */
12423
12424 tree
12425 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12426 {
12427 tree type = BINFO_TYPE (binfo);
12428
12429 while (true)
12430 {
12431 HOST_WIDE_INT pos, size;
12432 tree fld;
12433 int i;
12434
12435 if (types_same_for_odr (type, expected_type))
12436 return binfo;
12437 if (offset < 0)
12438 return NULL_TREE;
12439
12440 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12441 {
12442 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12443 continue;
12444
12445 pos = int_bit_position (fld);
12446 size = tree_to_uhwi (DECL_SIZE (fld));
12447 if (pos <= offset && (pos + size) > offset)
12448 break;
12449 }
12450 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12451 return NULL_TREE;
12452
12453 /* Offset 0 indicates the primary base, whose vtable contents are
12454 represented in the binfo for the derived class. */
12455 else if (offset != 0)
12456 {
12457 tree found_binfo = NULL, base_binfo;
12458 /* Offsets in BINFO are in bytes relative to the whole structure
12459 while POS is in bits relative to the containing field. */
12460 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12461 / BITS_PER_UNIT);
12462
12463 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12464 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12465 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12466 {
12467 found_binfo = base_binfo;
12468 break;
12469 }
12470 if (found_binfo)
12471 binfo = found_binfo;
12472 else
12473 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12474 binfo_offset);
12475 }
12476
12477 type = TREE_TYPE (fld);
12478 offset -= pos;
12479 }
12480 }
12481
12482 /* Returns true if X is a typedef decl. */
12483
12484 bool
12485 is_typedef_decl (const_tree x)
12486 {
12487 return (x && TREE_CODE (x) == TYPE_DECL
12488 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12489 }
12490
12491 /* Returns true iff TYPE is a type variant created for a typedef. */
12492
12493 bool
12494 typedef_variant_p (const_tree type)
12495 {
12496 return is_typedef_decl (TYPE_NAME (type));
12497 }
12498
12499 /* Warn about a use of an identifier which was marked deprecated. */
12500 void
12501 warn_deprecated_use (tree node, tree attr)
12502 {
12503 const char *msg;
12504
12505 if (node == 0 || !warn_deprecated_decl)
12506 return;
12507
12508 if (!attr)
12509 {
12510 if (DECL_P (node))
12511 attr = DECL_ATTRIBUTES (node);
12512 else if (TYPE_P (node))
12513 {
12514 tree decl = TYPE_STUB_DECL (node);
12515 if (decl)
12516 attr = lookup_attribute ("deprecated",
12517 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12518 }
12519 }
12520
12521 if (attr)
12522 attr = lookup_attribute ("deprecated", attr);
12523
12524 if (attr)
12525 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12526 else
12527 msg = NULL;
12528
12529 bool w;
12530 if (DECL_P (node))
12531 {
12532 if (msg)
12533 w = warning (OPT_Wdeprecated_declarations,
12534 "%qD is deprecated: %s", node, msg);
12535 else
12536 w = warning (OPT_Wdeprecated_declarations,
12537 "%qD is deprecated", node);
12538 if (w)
12539 inform (DECL_SOURCE_LOCATION (node), "declared here");
12540 }
12541 else if (TYPE_P (node))
12542 {
12543 tree what = NULL_TREE;
12544 tree decl = TYPE_STUB_DECL (node);
12545
12546 if (TYPE_NAME (node))
12547 {
12548 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12549 what = TYPE_NAME (node);
12550 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12551 && DECL_NAME (TYPE_NAME (node)))
12552 what = DECL_NAME (TYPE_NAME (node));
12553 }
12554
12555 if (decl)
12556 {
12557 if (what)
12558 {
12559 if (msg)
12560 w = warning (OPT_Wdeprecated_declarations,
12561 "%qE is deprecated: %s", what, msg);
12562 else
12563 w = warning (OPT_Wdeprecated_declarations,
12564 "%qE is deprecated", what);
12565 }
12566 else
12567 {
12568 if (msg)
12569 w = warning (OPT_Wdeprecated_declarations,
12570 "type is deprecated: %s", msg);
12571 else
12572 w = warning (OPT_Wdeprecated_declarations,
12573 "type is deprecated");
12574 }
12575 if (w)
12576 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12577 }
12578 else
12579 {
12580 if (what)
12581 {
12582 if (msg)
12583 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12584 what, msg);
12585 else
12586 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12587 }
12588 else
12589 {
12590 if (msg)
12591 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12592 msg);
12593 else
12594 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12595 }
12596 }
12597 }
12598 }
12599
12600 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12601 somewhere in it. */
12602
12603 bool
12604 contains_bitfld_component_ref_p (const_tree ref)
12605 {
12606 while (handled_component_p (ref))
12607 {
12608 if (TREE_CODE (ref) == COMPONENT_REF
12609 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12610 return true;
12611 ref = TREE_OPERAND (ref, 0);
12612 }
12613
12614 return false;
12615 }
12616
12617 /* Try to determine whether a TRY_CATCH expression can fall through.
12618 This is a subroutine of block_may_fallthru. */
12619
12620 static bool
12621 try_catch_may_fallthru (const_tree stmt)
12622 {
12623 tree_stmt_iterator i;
12624
12625 /* If the TRY block can fall through, the whole TRY_CATCH can
12626 fall through. */
12627 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12628 return true;
12629
12630 i = tsi_start (TREE_OPERAND (stmt, 1));
12631 switch (TREE_CODE (tsi_stmt (i)))
12632 {
12633 case CATCH_EXPR:
12634 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12635 catch expression and a body. The whole TRY_CATCH may fall
12636 through iff any of the catch bodies falls through. */
12637 for (; !tsi_end_p (i); tsi_next (&i))
12638 {
12639 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12640 return true;
12641 }
12642 return false;
12643
12644 case EH_FILTER_EXPR:
12645 /* The exception filter expression only matters if there is an
12646 exception. If the exception does not match EH_FILTER_TYPES,
12647 we will execute EH_FILTER_FAILURE, and we will fall through
12648 if that falls through. If the exception does match
12649 EH_FILTER_TYPES, the stack unwinder will continue up the
12650 stack, so we will not fall through. We don't know whether we
12651 will throw an exception which matches EH_FILTER_TYPES or not,
12652 so we just ignore EH_FILTER_TYPES and assume that we might
12653 throw an exception which doesn't match. */
12654 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12655
12656 default:
12657 /* This case represents statements to be executed when an
12658 exception occurs. Those statements are implicitly followed
12659 by a RESX statement to resume execution after the exception.
12660 So in this case the TRY_CATCH never falls through. */
12661 return false;
12662 }
12663 }
12664
12665 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12666 need not be 100% accurate; simply be conservative and return true if we
12667 don't know. This is used only to avoid stupidly generating extra code.
12668 If we're wrong, we'll just delete the extra code later. */
12669
12670 bool
12671 block_may_fallthru (const_tree block)
12672 {
12673 /* This CONST_CAST is okay because expr_last returns its argument
12674 unmodified and we assign it to a const_tree. */
12675 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12676
12677 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12678 {
12679 case GOTO_EXPR:
12680 case RETURN_EXPR:
12681 /* Easy cases. If the last statement of the block implies
12682 control transfer, then we can't fall through. */
12683 return false;
12684
12685 case SWITCH_EXPR:
12686 /* If SWITCH_LABELS is set, this is lowered, and represents a
12687 branch to a selected label and hence can not fall through.
12688 Otherwise SWITCH_BODY is set, and the switch can fall
12689 through. */
12690 return SWITCH_LABELS (stmt) == NULL_TREE;
12691
12692 case COND_EXPR:
12693 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12694 return true;
12695 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12696
12697 case BIND_EXPR:
12698 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12699
12700 case TRY_CATCH_EXPR:
12701 return try_catch_may_fallthru (stmt);
12702
12703 case TRY_FINALLY_EXPR:
12704 /* The finally clause is always executed after the try clause,
12705 so if it does not fall through, then the try-finally will not
12706 fall through. Otherwise, if the try clause does not fall
12707 through, then when the finally clause falls through it will
12708 resume execution wherever the try clause was going. So the
12709 whole try-finally will only fall through if both the try
12710 clause and the finally clause fall through. */
12711 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12712 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12713
12714 case MODIFY_EXPR:
12715 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12716 stmt = TREE_OPERAND (stmt, 1);
12717 else
12718 return true;
12719 /* FALLTHRU */
12720
12721 case CALL_EXPR:
12722 /* Functions that do not return do not fall through. */
12723 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12724
12725 case CLEANUP_POINT_EXPR:
12726 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12727
12728 case TARGET_EXPR:
12729 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12730
12731 case ERROR_MARK:
12732 return true;
12733
12734 default:
12735 return lang_hooks.block_may_fallthru (stmt);
12736 }
12737 }
12738
12739 /* True if we are using EH to handle cleanups. */
12740 static bool using_eh_for_cleanups_flag = false;
12741
12742 /* This routine is called from front ends to indicate eh should be used for
12743 cleanups. */
12744 void
12745 using_eh_for_cleanups (void)
12746 {
12747 using_eh_for_cleanups_flag = true;
12748 }
12749
12750 /* Query whether EH is used for cleanups. */
12751 bool
12752 using_eh_for_cleanups_p (void)
12753 {
12754 return using_eh_for_cleanups_flag;
12755 }
12756
12757 /* Wrapper for tree_code_name to ensure that tree code is valid */
12758 const char *
12759 get_tree_code_name (enum tree_code code)
12760 {
12761 const char *invalid = "<invalid tree code>";
12762
12763 if (code >= MAX_TREE_CODES)
12764 return invalid;
12765
12766 return tree_code_name[code];
12767 }
12768
12769 /* Drops the TREE_OVERFLOW flag from T. */
12770
12771 tree
12772 drop_tree_overflow (tree t)
12773 {
12774 gcc_checking_assert (TREE_OVERFLOW (t));
12775
12776 /* For tree codes with a sharing machinery re-build the result. */
12777 if (TREE_CODE (t) == INTEGER_CST)
12778 return wide_int_to_tree (TREE_TYPE (t), t);
12779
12780 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12781 and drop the flag. */
12782 t = copy_node (t);
12783 TREE_OVERFLOW (t) = 0;
12784 return t;
12785 }
12786
12787 /* Given a memory reference expression T, return its base address.
12788 The base address of a memory reference expression is the main
12789 object being referenced. For instance, the base address for
12790 'array[i].fld[j]' is 'array'. You can think of this as stripping
12791 away the offset part from a memory address.
12792
12793 This function calls handled_component_p to strip away all the inner
12794 parts of the memory reference until it reaches the base object. */
12795
12796 tree
12797 get_base_address (tree t)
12798 {
12799 while (handled_component_p (t))
12800 t = TREE_OPERAND (t, 0);
12801
12802 if ((TREE_CODE (t) == MEM_REF
12803 || TREE_CODE (t) == TARGET_MEM_REF)
12804 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12805 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12806
12807 /* ??? Either the alias oracle or all callers need to properly deal
12808 with WITH_SIZE_EXPRs before we can look through those. */
12809 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12810 return NULL_TREE;
12811
12812 return t;
12813 }
12814
12815 /* Return a tree of sizetype representing the size, in bytes, of the element
12816 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12817
12818 tree
12819 array_ref_element_size (tree exp)
12820 {
12821 tree aligned_size = TREE_OPERAND (exp, 3);
12822 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12823 location_t loc = EXPR_LOCATION (exp);
12824
12825 /* If a size was specified in the ARRAY_REF, it's the size measured
12826 in alignment units of the element type. So multiply by that value. */
12827 if (aligned_size)
12828 {
12829 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12830 sizetype from another type of the same width and signedness. */
12831 if (TREE_TYPE (aligned_size) != sizetype)
12832 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12833 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12834 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12835 }
12836
12837 /* Otherwise, take the size from that of the element type. Substitute
12838 any PLACEHOLDER_EXPR that we have. */
12839 else
12840 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12841 }
12842
12843 /* Return a tree representing the lower bound of the array mentioned in
12844 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12845
12846 tree
12847 array_ref_low_bound (tree exp)
12848 {
12849 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12850
12851 /* If a lower bound is specified in EXP, use it. */
12852 if (TREE_OPERAND (exp, 2))
12853 return TREE_OPERAND (exp, 2);
12854
12855 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12856 substituting for a PLACEHOLDER_EXPR as needed. */
12857 if (domain_type && TYPE_MIN_VALUE (domain_type))
12858 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12859
12860 /* Otherwise, return a zero of the appropriate type. */
12861 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12862 }
12863
12864 /* Return a tree representing the upper bound of the array mentioned in
12865 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12866
12867 tree
12868 array_ref_up_bound (tree exp)
12869 {
12870 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12871
12872 /* If there is a domain type and it has an upper bound, use it, substituting
12873 for a PLACEHOLDER_EXPR as needed. */
12874 if (domain_type && TYPE_MAX_VALUE (domain_type))
12875 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12876
12877 /* Otherwise fail. */
12878 return NULL_TREE;
12879 }
12880
12881 /* Returns true if REF is an array reference to an array at the end of
12882 a structure. If this is the case, the array may be allocated larger
12883 than its upper bound implies. */
12884
12885 bool
12886 array_at_struct_end_p (tree ref)
12887 {
12888 if (TREE_CODE (ref) != ARRAY_REF
12889 && TREE_CODE (ref) != ARRAY_RANGE_REF)
12890 return false;
12891
12892 while (handled_component_p (ref))
12893 {
12894 /* If the reference chain contains a component reference to a
12895 non-union type and there follows another field the reference
12896 is not at the end of a structure. */
12897 if (TREE_CODE (ref) == COMPONENT_REF
12898 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12899 {
12900 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12901 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12902 nextf = DECL_CHAIN (nextf);
12903 if (nextf)
12904 return false;
12905 }
12906
12907 ref = TREE_OPERAND (ref, 0);
12908 }
12909
12910 /* If the reference is based on a declared entity, the size of the array
12911 is constrained by its given domain. */
12912 if (DECL_P (ref))
12913 return false;
12914
12915 return true;
12916 }
12917
12918 /* Return a tree representing the offset, in bytes, of the field referenced
12919 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12920
12921 tree
12922 component_ref_field_offset (tree exp)
12923 {
12924 tree aligned_offset = TREE_OPERAND (exp, 2);
12925 tree field = TREE_OPERAND (exp, 1);
12926 location_t loc = EXPR_LOCATION (exp);
12927
12928 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12929 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12930 value. */
12931 if (aligned_offset)
12932 {
12933 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12934 sizetype from another type of the same width and signedness. */
12935 if (TREE_TYPE (aligned_offset) != sizetype)
12936 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12937 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12938 size_int (DECL_OFFSET_ALIGN (field)
12939 / BITS_PER_UNIT));
12940 }
12941
12942 /* Otherwise, take the offset from that of the field. Substitute
12943 any PLACEHOLDER_EXPR that we have. */
12944 else
12945 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12946 }
12947
12948 /* Return the machine mode of T. For vectors, returns the mode of the
12949 inner type. The main use case is to feed the result to HONOR_NANS,
12950 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12951
12952 machine_mode
12953 element_mode (const_tree t)
12954 {
12955 if (!TYPE_P (t))
12956 t = TREE_TYPE (t);
12957 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12958 t = TREE_TYPE (t);
12959 return TYPE_MODE (t);
12960 }
12961
12962
12963 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12964 TV. TV should be the more specified variant (i.e. the main variant). */
12965
12966 static bool
12967 verify_type_variant (const_tree t, tree tv)
12968 {
12969 /* Type variant can differ by:
12970
12971 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12972 ENCODE_QUAL_ADDR_SPACE.
12973 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12974 in this case some values may not be set in the variant types
12975 (see TYPE_COMPLETE_P checks).
12976 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12977 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12978 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12979 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12980 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12981 this is necessary to make it possible to merge types form different TUs
12982 - arrays, pointers and references may have TREE_TYPE that is a variant
12983 of TREE_TYPE of their main variants.
12984 - aggregates may have new TYPE_FIELDS list that list variants of
12985 the main variant TYPE_FIELDS.
12986 - vector types may differ by TYPE_VECTOR_OPAQUE
12987 - TYPE_METHODS is always NULL for vairant types and maintained for
12988 main variant only.
12989 */
12990
12991 /* Convenience macro for matching individual fields. */
12992 #define verify_variant_match(flag) \
12993 do { \
12994 if (flag (tv) != flag (t)) \
12995 { \
12996 error ("type variant differs by " #flag "."); \
12997 debug_tree (tv); \
12998 return false; \
12999 } \
13000 } while (false)
13001
13002 /* tree_base checks. */
13003
13004 verify_variant_match (TREE_CODE);
13005 /* FIXME: Ada builds non-artificial variants of artificial types. */
13006 if (TYPE_ARTIFICIAL (tv) && 0)
13007 verify_variant_match (TYPE_ARTIFICIAL);
13008 if (POINTER_TYPE_P (tv))
13009 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13010 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13011 verify_variant_match (TYPE_UNSIGNED);
13012 verify_variant_match (TYPE_ALIGN_OK);
13013 verify_variant_match (TYPE_PACKED);
13014 if (TREE_CODE (t) == REFERENCE_TYPE)
13015 verify_variant_match (TYPE_REF_IS_RVALUE);
13016 if (AGGREGATE_TYPE_P (t))
13017 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13018 else
13019 verify_variant_match (TYPE_SATURATING);
13020 /* FIXME: This check trigger during libstdc++ build. */
13021 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13022 verify_variant_match (TYPE_FINAL_P);
13023
13024 /* tree_type_common checks. */
13025
13026 if (COMPLETE_TYPE_P (t))
13027 {
13028 verify_variant_match (TYPE_SIZE);
13029 verify_variant_match (TYPE_MODE);
13030 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
13031 /* FIXME: ideally we should compare pointer equality, but java FE
13032 produce variants where size is INTEGER_CST of different type (int
13033 wrt size_type) during libjava biuld. */
13034 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
13035 {
13036 error ("type variant has different TYPE_SIZE_UNIT");
13037 debug_tree (tv);
13038 error ("type variant's TYPE_SIZE_UNIT");
13039 debug_tree (TYPE_SIZE_UNIT (tv));
13040 error ("type's TYPE_SIZE_UNIT");
13041 debug_tree (TYPE_SIZE_UNIT (t));
13042 return false;
13043 }
13044 }
13045 verify_variant_match (TYPE_PRECISION);
13046 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13047 if (RECORD_OR_UNION_TYPE_P (t))
13048 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13049 else if (TREE_CODE (t) == ARRAY_TYPE)
13050 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13051 /* During LTO we merge variant lists from diferent translation units
13052 that may differ BY TYPE_CONTEXT that in turn may point
13053 to TRANSLATION_UNIT_DECL.
13054 Ada also builds variants of types with different TYPE_CONTEXT. */
13055 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13056 verify_variant_match (TYPE_CONTEXT);
13057 verify_variant_match (TYPE_STRING_FLAG);
13058 if (TYPE_ALIAS_SET_KNOWN_P (t))
13059 {
13060 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
13061 debug_tree (tv);
13062 return false;
13063 }
13064
13065 /* tree_type_non_common checks. */
13066
13067 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13068 and dangle the pointer from time to time. */
13069 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13070 && (in_lto_p || !TYPE_VFIELD (tv)
13071 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13072 {
13073 error ("type variant has different TYPE_VFIELD");
13074 debug_tree (tv);
13075 return false;
13076 }
13077 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13078 || TREE_CODE (t) == INTEGER_TYPE
13079 || TREE_CODE (t) == BOOLEAN_TYPE
13080 || TREE_CODE (t) == REAL_TYPE
13081 || TREE_CODE (t) == FIXED_POINT_TYPE)
13082 {
13083 verify_variant_match (TYPE_MAX_VALUE);
13084 verify_variant_match (TYPE_MIN_VALUE);
13085 }
13086 if (TREE_CODE (t) == METHOD_TYPE)
13087 verify_variant_match (TYPE_METHOD_BASETYPE);
13088 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
13089 {
13090 error ("type variant has TYPE_METHODS");
13091 debug_tree (tv);
13092 return false;
13093 }
13094 if (TREE_CODE (t) == OFFSET_TYPE)
13095 verify_variant_match (TYPE_OFFSET_BASETYPE);
13096 if (TREE_CODE (t) == ARRAY_TYPE)
13097 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13098 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13099 or even type's main variant. This is needed to make bootstrap pass
13100 and the bug seems new in GCC 5.
13101 C++ FE should be updated to make this consistent and we should check
13102 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13103 is a match with main variant.
13104
13105 Also disable the check for Java for now because of parser hack that builds
13106 first an dummy BINFO and then sometimes replace it by real BINFO in some
13107 of the copies. */
13108 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13109 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13110 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13111 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13112 at LTO time only. */
13113 && (in_lto_p && odr_type_p (t)))
13114 {
13115 error ("type variant has different TYPE_BINFO");
13116 debug_tree (tv);
13117 error ("type variant's TYPE_BINFO");
13118 debug_tree (TYPE_BINFO (tv));
13119 error ("type's TYPE_BINFO");
13120 debug_tree (TYPE_BINFO (t));
13121 return false;
13122 }
13123
13124 /* Check various uses of TYPE_VALUES_RAW. */
13125 if (TREE_CODE (t) == ENUMERAL_TYPE)
13126 verify_variant_match (TYPE_VALUES);
13127 else if (TREE_CODE (t) == ARRAY_TYPE)
13128 verify_variant_match (TYPE_DOMAIN);
13129 /* Permit incomplete variants of complete type. While FEs may complete
13130 all variants, this does not happen for C++ templates in all cases. */
13131 else if (RECORD_OR_UNION_TYPE_P (t)
13132 && COMPLETE_TYPE_P (t)
13133 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13134 {
13135 tree f1, f2;
13136
13137 /* Fortran builds qualified variants as new records with items of
13138 qualified type. Verify that they looks same. */
13139 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13140 f1 && f2;
13141 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13142 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13143 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13144 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13145 /* FIXME: gfc_nonrestricted_type builds all types as variants
13146 with exception of pointer types. It deeply copies the type
13147 which means that we may end up with a variant type
13148 referring non-variant pointer. We may change it to
13149 produce types as variants, too, like
13150 objc_get_protocol_qualified_type does. */
13151 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13152 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13153 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13154 break;
13155 if (f1 || f2)
13156 {
13157 error ("type variant has different TYPE_FIELDS");
13158 debug_tree (tv);
13159 error ("first mismatch is field");
13160 debug_tree (f1);
13161 error ("and field");
13162 debug_tree (f2);
13163 return false;
13164 }
13165 }
13166 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13167 verify_variant_match (TYPE_ARG_TYPES);
13168 /* For C++ the qualified variant of array type is really an array type
13169 of qualified TREE_TYPE.
13170 objc builds variants of pointer where pointer to type is a variant, too
13171 in objc_get_protocol_qualified_type. */
13172 if (TREE_TYPE (t) != TREE_TYPE (tv)
13173 && ((TREE_CODE (t) != ARRAY_TYPE
13174 && !POINTER_TYPE_P (t))
13175 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13176 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13177 {
13178 error ("type variant has different TREE_TYPE");
13179 debug_tree (tv);
13180 error ("type variant's TREE_TYPE");
13181 debug_tree (TREE_TYPE (tv));
13182 error ("type's TREE_TYPE");
13183 debug_tree (TREE_TYPE (t));
13184 return false;
13185 }
13186 if (type_with_alias_set_p (t)
13187 && !gimple_canonical_types_compatible_p (t, tv, false))
13188 {
13189 error ("type is not compatible with its vairant");
13190 debug_tree (tv);
13191 error ("type variant's TREE_TYPE");
13192 debug_tree (TREE_TYPE (tv));
13193 error ("type's TREE_TYPE");
13194 debug_tree (TREE_TYPE (t));
13195 return false;
13196 }
13197 return true;
13198 #undef verify_variant_match
13199 }
13200
13201
13202 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13203 the middle-end types_compatible_p function. It needs to avoid
13204 claiming types are different for types that should be treated
13205 the same with respect to TBAA. Canonical types are also used
13206 for IL consistency checks via the useless_type_conversion_p
13207 predicate which does not handle all type kinds itself but falls
13208 back to pointer-comparison of TYPE_CANONICAL for aggregates
13209 for example. */
13210
13211 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13212 type calculation because we need to allow inter-operability between signed
13213 and unsigned variants. */
13214
13215 bool
13216 type_with_interoperable_signedness (const_tree type)
13217 {
13218 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13219 signed char and unsigned char. Similarly fortran FE builds
13220 C_SIZE_T as signed type, while C defines it unsigned. */
13221
13222 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13223 == INTEGER_TYPE
13224 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13225 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13226 }
13227
13228 /* Return true iff T1 and T2 are structurally identical for what
13229 TBAA is concerned.
13230 This function is used both by lto.c canonical type merging and by the
13231 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13232 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13233 only for LTO because only in these cases TYPE_CANONICAL equivalence
13234 correspond to one defined by gimple_canonical_types_compatible_p. */
13235
13236 bool
13237 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13238 bool trust_type_canonical)
13239 {
13240 /* Type variants should be same as the main variant. When not doing sanity
13241 checking to verify this fact, go to main variants and save some work. */
13242 if (trust_type_canonical)
13243 {
13244 t1 = TYPE_MAIN_VARIANT (t1);
13245 t2 = TYPE_MAIN_VARIANT (t2);
13246 }
13247
13248 /* Check first for the obvious case of pointer identity. */
13249 if (t1 == t2)
13250 return true;
13251
13252 /* Check that we have two types to compare. */
13253 if (t1 == NULL_TREE || t2 == NULL_TREE)
13254 return false;
13255
13256 /* We consider complete types always compatible with incomplete type.
13257 This does not make sense for canonical type calculation and thus we
13258 need to ensure that we are never called on it.
13259
13260 FIXME: For more correctness the function probably should have three modes
13261 1) mode assuming that types are complete mathcing their structure
13262 2) mode allowing incomplete types but producing equivalence classes
13263 and thus ignoring all info from complete types
13264 3) mode allowing incomplete types to match complete but checking
13265 compatibility between complete types.
13266
13267 1 and 2 can be used for canonical type calculation. 3 is the real
13268 definition of type compatibility that can be used i.e. for warnings during
13269 declaration merging. */
13270
13271 gcc_assert (!trust_type_canonical
13272 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13273 /* If the types have been previously registered and found equal
13274 they still are. */
13275
13276 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13277 && trust_type_canonical)
13278 {
13279 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13280 they are always NULL, but they are set to non-NULL for types
13281 constructed by build_pointer_type and variants. In this case the
13282 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13283 all pointers are considered equal. Be sure to not return false
13284 negatives. */
13285 gcc_checking_assert (canonical_type_used_p (t1)
13286 && canonical_type_used_p (t2));
13287 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13288 }
13289
13290 /* Can't be the same type if the types don't have the same code. */
13291 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13292 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13293 return false;
13294
13295 /* Qualifiers do not matter for canonical type comparison purposes. */
13296
13297 /* Void types and nullptr types are always the same. */
13298 if (TREE_CODE (t1) == VOID_TYPE
13299 || TREE_CODE (t1) == NULLPTR_TYPE)
13300 return true;
13301
13302 /* Can't be the same type if they have different mode. */
13303 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13304 return false;
13305
13306 /* Non-aggregate types can be handled cheaply. */
13307 if (INTEGRAL_TYPE_P (t1)
13308 || SCALAR_FLOAT_TYPE_P (t1)
13309 || FIXED_POINT_TYPE_P (t1)
13310 || TREE_CODE (t1) == VECTOR_TYPE
13311 || TREE_CODE (t1) == COMPLEX_TYPE
13312 || TREE_CODE (t1) == OFFSET_TYPE
13313 || POINTER_TYPE_P (t1))
13314 {
13315 /* Can't be the same type if they have different recision. */
13316 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13317 return false;
13318
13319 /* In some cases the signed and unsigned types are required to be
13320 inter-operable. */
13321 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13322 && !type_with_interoperable_signedness (t1))
13323 return false;
13324
13325 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13326 interoperable with "signed char". Unless all frontends are revisited
13327 to agree on these types, we must ignore the flag completely. */
13328
13329 /* Fortran standard define C_PTR type that is compatible with every
13330 C pointer. For this reason we need to glob all pointers into one.
13331 Still pointers in different address spaces are not compatible. */
13332 if (POINTER_TYPE_P (t1))
13333 {
13334 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13335 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13336 return false;
13337 }
13338
13339 /* Tail-recurse to components. */
13340 if (TREE_CODE (t1) == VECTOR_TYPE
13341 || TREE_CODE (t1) == COMPLEX_TYPE)
13342 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13343 TREE_TYPE (t2),
13344 trust_type_canonical);
13345
13346 return true;
13347 }
13348
13349 /* Do type-specific comparisons. */
13350 switch (TREE_CODE (t1))
13351 {
13352 case ARRAY_TYPE:
13353 /* Array types are the same if the element types are the same and
13354 the number of elements are the same. */
13355 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13356 trust_type_canonical)
13357 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13358 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13359 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13360 return false;
13361 else
13362 {
13363 tree i1 = TYPE_DOMAIN (t1);
13364 tree i2 = TYPE_DOMAIN (t2);
13365
13366 /* For an incomplete external array, the type domain can be
13367 NULL_TREE. Check this condition also. */
13368 if (i1 == NULL_TREE && i2 == NULL_TREE)
13369 return true;
13370 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13371 return false;
13372 else
13373 {
13374 tree min1 = TYPE_MIN_VALUE (i1);
13375 tree min2 = TYPE_MIN_VALUE (i2);
13376 tree max1 = TYPE_MAX_VALUE (i1);
13377 tree max2 = TYPE_MAX_VALUE (i2);
13378
13379 /* The minimum/maximum values have to be the same. */
13380 if ((min1 == min2
13381 || (min1 && min2
13382 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13383 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13384 || operand_equal_p (min1, min2, 0))))
13385 && (max1 == max2
13386 || (max1 && max2
13387 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13388 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13389 || operand_equal_p (max1, max2, 0)))))
13390 return true;
13391 else
13392 return false;
13393 }
13394 }
13395
13396 case METHOD_TYPE:
13397 case FUNCTION_TYPE:
13398 /* Function types are the same if the return type and arguments types
13399 are the same. */
13400 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13401 trust_type_canonical))
13402 return false;
13403
13404 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13405 return true;
13406 else
13407 {
13408 tree parms1, parms2;
13409
13410 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13411 parms1 && parms2;
13412 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13413 {
13414 if (!gimple_canonical_types_compatible_p
13415 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13416 trust_type_canonical))
13417 return false;
13418 }
13419
13420 if (parms1 || parms2)
13421 return false;
13422
13423 return true;
13424 }
13425
13426 case RECORD_TYPE:
13427 case UNION_TYPE:
13428 case QUAL_UNION_TYPE:
13429 {
13430 tree f1, f2;
13431
13432 /* Don't try to compare variants of an incomplete type, before
13433 TYPE_FIELDS has been copied around. */
13434 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13435 return true;
13436
13437
13438 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13439 return false;
13440
13441 /* For aggregate types, all the fields must be the same. */
13442 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13443 f1 || f2;
13444 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13445 {
13446 /* Skip non-fields. */
13447 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13448 f1 = TREE_CHAIN (f1);
13449 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13450 f2 = TREE_CHAIN (f2);
13451 if (!f1 || !f2)
13452 break;
13453 /* The fields must have the same name, offset and type. */
13454 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13455 || !gimple_compare_field_offset (f1, f2)
13456 || !gimple_canonical_types_compatible_p
13457 (TREE_TYPE (f1), TREE_TYPE (f2),
13458 trust_type_canonical))
13459 return false;
13460 }
13461
13462 /* If one aggregate has more fields than the other, they
13463 are not the same. */
13464 if (f1 || f2)
13465 return false;
13466
13467 return true;
13468 }
13469
13470 default:
13471 /* Consider all types with language specific trees in them mutually
13472 compatible. This is executed only from verify_type and false
13473 positives can be tolerated. */
13474 gcc_assert (!in_lto_p);
13475 return true;
13476 }
13477 }
13478
13479 /* Verify type T. */
13480
13481 void
13482 verify_type (const_tree t)
13483 {
13484 bool error_found = false;
13485 tree mv = TYPE_MAIN_VARIANT (t);
13486 if (!mv)
13487 {
13488 error ("Main variant is not defined");
13489 error_found = true;
13490 }
13491 else if (mv != TYPE_MAIN_VARIANT (mv))
13492 {
13493 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13494 debug_tree (mv);
13495 error_found = true;
13496 }
13497 else if (t != mv && !verify_type_variant (t, mv))
13498 error_found = true;
13499
13500 tree ct = TYPE_CANONICAL (t);
13501 if (!ct)
13502 ;
13503 else if (TYPE_CANONICAL (t) != ct)
13504 {
13505 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13506 debug_tree (ct);
13507 error_found = true;
13508 }
13509 /* Method and function types can not be used to address memory and thus
13510 TYPE_CANONICAL really matters only for determining useless conversions.
13511
13512 FIXME: C++ FE produce declarations of builtin functions that are not
13513 compatible with main variants. */
13514 else if (TREE_CODE (t) == FUNCTION_TYPE)
13515 ;
13516 else if (t != ct
13517 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13518 with variably sized arrays because their sizes possibly
13519 gimplified to different variables. */
13520 && !variably_modified_type_p (ct, NULL)
13521 && !gimple_canonical_types_compatible_p (t, ct, false))
13522 {
13523 error ("TYPE_CANONICAL is not compatible");
13524 debug_tree (ct);
13525 error_found = true;
13526 }
13527
13528 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13529 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13530 {
13531 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13532 debug_tree (ct);
13533 error_found = true;
13534 }
13535 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13536 {
13537 error ("TYPE_CANONICAL of main variant is not main variant");
13538 debug_tree (ct);
13539 debug_tree (TYPE_MAIN_VARIANT (ct));
13540 error_found = true;
13541 }
13542
13543
13544 /* Check various uses of TYPE_MINVAL. */
13545 if (RECORD_OR_UNION_TYPE_P (t))
13546 {
13547 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13548 and danagle the pointer from time to time. */
13549 if (TYPE_VFIELD (t)
13550 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13551 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13552 {
13553 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13554 debug_tree (TYPE_VFIELD (t));
13555 error_found = true;
13556 }
13557 }
13558 else if (TREE_CODE (t) == POINTER_TYPE)
13559 {
13560 if (TYPE_NEXT_PTR_TO (t)
13561 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13562 {
13563 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13564 debug_tree (TYPE_NEXT_PTR_TO (t));
13565 error_found = true;
13566 }
13567 }
13568 else if (TREE_CODE (t) == REFERENCE_TYPE)
13569 {
13570 if (TYPE_NEXT_REF_TO (t)
13571 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13572 {
13573 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13574 debug_tree (TYPE_NEXT_REF_TO (t));
13575 error_found = true;
13576 }
13577 }
13578 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13579 || TREE_CODE (t) == FIXED_POINT_TYPE)
13580 {
13581 /* FIXME: The following check should pass:
13582 useless_type_conversion_p (const_cast <tree> (t),
13583 TREE_TYPE (TYPE_MIN_VALUE (t))
13584 but does not for C sizetypes in LTO. */
13585 }
13586 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13587 else if (TYPE_MINVAL (t)
13588 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13589 || in_lto_p))
13590 {
13591 error ("TYPE_MINVAL non-NULL");
13592 debug_tree (TYPE_MINVAL (t));
13593 error_found = true;
13594 }
13595
13596 /* Check various uses of TYPE_MAXVAL. */
13597 if (RECORD_OR_UNION_TYPE_P (t))
13598 {
13599 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13600 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13601 && TYPE_METHODS (t) != error_mark_node)
13602 {
13603 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13604 debug_tree (TYPE_METHODS (t));
13605 error_found = true;
13606 }
13607 }
13608 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13609 {
13610 if (TYPE_METHOD_BASETYPE (t)
13611 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13612 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13613 {
13614 error ("TYPE_METHOD_BASETYPE is not record nor union");
13615 debug_tree (TYPE_METHOD_BASETYPE (t));
13616 error_found = true;
13617 }
13618 }
13619 else if (TREE_CODE (t) == OFFSET_TYPE)
13620 {
13621 if (TYPE_OFFSET_BASETYPE (t)
13622 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13623 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13624 {
13625 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13626 debug_tree (TYPE_OFFSET_BASETYPE (t));
13627 error_found = true;
13628 }
13629 }
13630 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13631 || TREE_CODE (t) == FIXED_POINT_TYPE)
13632 {
13633 /* FIXME: The following check should pass:
13634 useless_type_conversion_p (const_cast <tree> (t),
13635 TREE_TYPE (TYPE_MAX_VALUE (t))
13636 but does not for C sizetypes in LTO. */
13637 }
13638 else if (TREE_CODE (t) == ARRAY_TYPE)
13639 {
13640 if (TYPE_ARRAY_MAX_SIZE (t)
13641 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13642 {
13643 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13644 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13645 error_found = true;
13646 }
13647 }
13648 else if (TYPE_MAXVAL (t))
13649 {
13650 error ("TYPE_MAXVAL non-NULL");
13651 debug_tree (TYPE_MAXVAL (t));
13652 error_found = true;
13653 }
13654
13655 /* Check various uses of TYPE_BINFO. */
13656 if (RECORD_OR_UNION_TYPE_P (t))
13657 {
13658 if (!TYPE_BINFO (t))
13659 ;
13660 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13661 {
13662 error ("TYPE_BINFO is not TREE_BINFO");
13663 debug_tree (TYPE_BINFO (t));
13664 error_found = true;
13665 }
13666 /* FIXME: Java builds invalid empty binfos that do not have
13667 TREE_TYPE set. */
13668 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13669 {
13670 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13671 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13672 error_found = true;
13673 }
13674 }
13675 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13676 {
13677 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13678 debug_tree (TYPE_LANG_SLOT_1 (t));
13679 error_found = true;
13680 }
13681
13682 /* Check various uses of TYPE_VALUES_RAW. */
13683 if (TREE_CODE (t) == ENUMERAL_TYPE)
13684 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13685 {
13686 tree value = TREE_VALUE (l);
13687 tree name = TREE_PURPOSE (l);
13688
13689 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13690 CONST_DECL of ENUMERAL TYPE. */
13691 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13692 {
13693 error ("Enum value is not CONST_DECL or INTEGER_CST");
13694 debug_tree (value);
13695 debug_tree (name);
13696 error_found = true;
13697 }
13698 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13699 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13700 {
13701 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13702 debug_tree (value);
13703 debug_tree (name);
13704 error_found = true;
13705 }
13706 if (TREE_CODE (name) != IDENTIFIER_NODE)
13707 {
13708 error ("Enum value name is not IDENTIFIER_NODE");
13709 debug_tree (value);
13710 debug_tree (name);
13711 error_found = true;
13712 }
13713 }
13714 else if (TREE_CODE (t) == ARRAY_TYPE)
13715 {
13716 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13717 {
13718 error ("Array TYPE_DOMAIN is not integer type");
13719 debug_tree (TYPE_DOMAIN (t));
13720 error_found = true;
13721 }
13722 }
13723 else if (RECORD_OR_UNION_TYPE_P (t))
13724 {
13725 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13726 {
13727 error ("TYPE_FIELDS defined in incomplete type");
13728 error_found = true;
13729 }
13730 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13731 {
13732 /* TODO: verify properties of decls. */
13733 if (TREE_CODE (fld) == FIELD_DECL)
13734 ;
13735 else if (TREE_CODE (fld) == TYPE_DECL)
13736 ;
13737 else if (TREE_CODE (fld) == CONST_DECL)
13738 ;
13739 else if (TREE_CODE (fld) == VAR_DECL)
13740 ;
13741 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13742 ;
13743 else if (TREE_CODE (fld) == USING_DECL)
13744 ;
13745 else
13746 {
13747 error ("Wrong tree in TYPE_FIELDS list");
13748 debug_tree (fld);
13749 error_found = true;
13750 }
13751 }
13752 }
13753 else if (TREE_CODE (t) == INTEGER_TYPE
13754 || TREE_CODE (t) == BOOLEAN_TYPE
13755 || TREE_CODE (t) == OFFSET_TYPE
13756 || TREE_CODE (t) == REFERENCE_TYPE
13757 || TREE_CODE (t) == NULLPTR_TYPE
13758 || TREE_CODE (t) == POINTER_TYPE)
13759 {
13760 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13761 {
13762 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13763 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13764 error_found = true;
13765 }
13766 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13767 {
13768 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13769 debug_tree (TYPE_CACHED_VALUES (t));
13770 error_found = true;
13771 }
13772 /* Verify just enough of cache to ensure that no one copied it to new type.
13773 All copying should go by copy_node that should clear it. */
13774 else if (TYPE_CACHED_VALUES_P (t))
13775 {
13776 int i;
13777 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13778 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13779 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13780 {
13781 error ("wrong TYPE_CACHED_VALUES entry");
13782 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13783 error_found = true;
13784 break;
13785 }
13786 }
13787 }
13788 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13789 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13790 {
13791 /* C++ FE uses TREE_PURPOSE to store initial values. */
13792 if (TREE_PURPOSE (l) && in_lto_p)
13793 {
13794 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13795 debug_tree (l);
13796 error_found = true;
13797 }
13798 if (!TYPE_P (TREE_VALUE (l)))
13799 {
13800 error ("Wrong entry in TYPE_ARG_TYPES list");
13801 debug_tree (l);
13802 error_found = true;
13803 }
13804 }
13805 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13806 {
13807 error ("TYPE_VALUES_RAW field is non-NULL");
13808 debug_tree (TYPE_VALUES_RAW (t));
13809 error_found = true;
13810 }
13811 if (TREE_CODE (t) != INTEGER_TYPE
13812 && TREE_CODE (t) != BOOLEAN_TYPE
13813 && TREE_CODE (t) != OFFSET_TYPE
13814 && TREE_CODE (t) != REFERENCE_TYPE
13815 && TREE_CODE (t) != NULLPTR_TYPE
13816 && TREE_CODE (t) != POINTER_TYPE
13817 && TYPE_CACHED_VALUES_P (t))
13818 {
13819 error ("TYPE_CACHED_VALUES_P is set while it should not");
13820 error_found = true;
13821 }
13822 if (TYPE_STRING_FLAG (t)
13823 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13824 {
13825 error ("TYPE_STRING_FLAG is set on wrong type code");
13826 error_found = true;
13827 }
13828 else if (TYPE_STRING_FLAG (t))
13829 {
13830 const_tree b = t;
13831 if (TREE_CODE (b) == ARRAY_TYPE)
13832 b = TREE_TYPE (t);
13833 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13834 that is 32bits. */
13835 if (TREE_CODE (b) != INTEGER_TYPE)
13836 {
13837 error ("TYPE_STRING_FLAG is set on type that does not look like "
13838 "char nor array of chars");
13839 error_found = true;
13840 }
13841 }
13842
13843 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13844 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13845 of a type. */
13846 if (TREE_CODE (t) == METHOD_TYPE
13847 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13848 {
13849 error ("TYPE_METHOD_BASETYPE is not main variant");
13850 error_found = true;
13851 }
13852
13853 if (error_found)
13854 {
13855 debug_tree (const_cast <tree> (t));
13856 internal_error ("verify_type failed");
13857 }
13858 }
13859
13860
13861 /* Return true if ARG is marked with the nonnull attribute in the
13862 current function signature. */
13863
13864 bool
13865 nonnull_arg_p (const_tree arg)
13866 {
13867 tree t, attrs, fntype;
13868 unsigned HOST_WIDE_INT arg_num;
13869
13870 gcc_assert (TREE_CODE (arg) == PARM_DECL
13871 && (POINTER_TYPE_P (TREE_TYPE (arg))
13872 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13873
13874 /* The static chain decl is always non null. */
13875 if (arg == cfun->static_chain_decl)
13876 return true;
13877
13878 /* THIS argument of method is always non-NULL. */
13879 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13880 && arg == DECL_ARGUMENTS (cfun->decl)
13881 && flag_delete_null_pointer_checks)
13882 return true;
13883
13884 /* Values passed by reference are always non-NULL. */
13885 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13886 && flag_delete_null_pointer_checks)
13887 return true;
13888
13889 fntype = TREE_TYPE (cfun->decl);
13890 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13891 {
13892 attrs = lookup_attribute ("nonnull", attrs);
13893
13894 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13895 if (attrs == NULL_TREE)
13896 return false;
13897
13898 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13899 if (TREE_VALUE (attrs) == NULL_TREE)
13900 return true;
13901
13902 /* Get the position number for ARG in the function signature. */
13903 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13904 t;
13905 t = DECL_CHAIN (t), arg_num++)
13906 {
13907 if (t == arg)
13908 break;
13909 }
13910
13911 gcc_assert (t == arg);
13912
13913 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13914 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13915 {
13916 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13917 return true;
13918 }
13919 }
13920
13921 return false;
13922 }
13923
13924 /* Given location LOC, strip away any packed range information
13925 or ad-hoc information. */
13926
13927 location_t
13928 get_pure_location (location_t loc)
13929 {
13930 if (IS_ADHOC_LOC (loc))
13931 loc
13932 = line_table->location_adhoc_data_map.data[loc & MAX_SOURCE_LOCATION].locus;
13933
13934 if (loc >= LINEMAPS_MACRO_LOWEST_LOCATION (line_table))
13935 return loc;
13936
13937 if (loc < RESERVED_LOCATION_COUNT)
13938 return loc;
13939
13940 const line_map *map = linemap_lookup (line_table, loc);
13941 const line_map_ordinary *ordmap = linemap_check_ordinary (map);
13942
13943 return loc & ~((1 << ordmap->m_range_bits) - 1);
13944 }
13945
13946 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13947 information. */
13948
13949 location_t
13950 set_block (location_t loc, tree block)
13951 {
13952 location_t pure_loc = get_pure_location (loc);
13953 source_range src_range = get_range_from_loc (line_table, loc);
13954 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
13955 }
13956
13957 location_t
13958 set_source_range (tree expr, location_t start, location_t finish)
13959 {
13960 source_range src_range;
13961 src_range.m_start = start;
13962 src_range.m_finish = finish;
13963 return set_source_range (expr, src_range);
13964 }
13965
13966 location_t
13967 set_source_range (tree expr, source_range src_range)
13968 {
13969 if (!EXPR_P (expr))
13970 return UNKNOWN_LOCATION;
13971
13972 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
13973 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
13974 pure_loc,
13975 src_range,
13976 NULL);
13977 SET_EXPR_LOCATION (expr, adhoc);
13978 return adhoc;
13979 }
13980
13981 location_t
13982 make_location (location_t caret, location_t start, location_t finish)
13983 {
13984 location_t pure_loc = get_pure_location (caret);
13985 source_range src_range;
13986 src_range.m_start = start;
13987 src_range.m_finish = finish;
13988 location_t combined_loc = COMBINE_LOCATION_DATA (line_table,
13989 pure_loc,
13990 src_range,
13991 NULL);
13992 return combined_loc;
13993 }
13994
13995 /* Return the name of combined function FN, for debugging purposes. */
13996
13997 const char *
13998 combined_fn_name (combined_fn fn)
13999 {
14000 if (builtin_fn_p (fn))
14001 {
14002 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14003 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14004 }
14005 else
14006 return internal_fn_name (as_internal_fn (fn));
14007 }
14008
14009 #include "gt-tree.h"