re PR lto/61886 (LTO breaks fread with _FORTIFY_SOURCE=2)
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64
65 /* Tree code classes. */
66
67 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
68 #define END_OF_BASE_TREE_CODES tcc_exceptional,
69
70 const enum tree_code_class tree_code_type[] = {
71 #include "all-tree.def"
72 };
73
74 #undef DEFTREECODE
75 #undef END_OF_BASE_TREE_CODES
76
77 /* Table indexed by tree code giving number of expression
78 operands beyond the fixed part of the node structure.
79 Not used for types or decls. */
80
81 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
82 #define END_OF_BASE_TREE_CODES 0,
83
84 const unsigned char tree_code_length[] = {
85 #include "all-tree.def"
86 };
87
88 #undef DEFTREECODE
89 #undef END_OF_BASE_TREE_CODES
90
91 /* Names of tree components.
92 Used for printing out the tree and error messages. */
93 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
94 #define END_OF_BASE_TREE_CODES "@dummy",
95
96 static const char *const tree_code_name[] = {
97 #include "all-tree.def"
98 };
99
100 #undef DEFTREECODE
101 #undef END_OF_BASE_TREE_CODES
102
103 /* Each tree code class has an associated string representation.
104 These must correspond to the tree_code_class entries. */
105
106 const char *const tree_code_class_strings[] =
107 {
108 "exceptional",
109 "constant",
110 "type",
111 "declaration",
112 "reference",
113 "comparison",
114 "unary",
115 "binary",
116 "statement",
117 "vl_exp",
118 "expression"
119 };
120
121 /* obstack.[ch] explicitly declined to prototype this. */
122 extern int _obstack_allocated_p (struct obstack *h, void *obj);
123
124 /* Statistics-gathering stuff. */
125
126 static int tree_code_counts[MAX_TREE_CODES];
127 int tree_node_counts[(int) all_kinds];
128 int tree_node_sizes[(int) all_kinds];
129
130 /* Keep in sync with tree.h:enum tree_node_kind. */
131 static const char * const tree_node_kind_names[] = {
132 "decls",
133 "types",
134 "blocks",
135 "stmts",
136 "refs",
137 "exprs",
138 "constants",
139 "identifiers",
140 "vecs",
141 "binfos",
142 "ssa names",
143 "constructors",
144 "random kinds",
145 "lang_decl kinds",
146 "lang_type kinds",
147 "omp clauses",
148 };
149
150 /* Unique id for next decl created. */
151 static GTY(()) int next_decl_uid;
152 /* Unique id for next type created. */
153 static GTY(()) int next_type_uid = 1;
154 /* Unique id for next debug decl created. Use negative numbers,
155 to catch erroneous uses. */
156 static GTY(()) int next_debug_decl_uid;
157
158 /* Since we cannot rehash a type after it is in the table, we have to
159 keep the hash code. */
160
161 struct GTY((for_user)) type_hash {
162 unsigned long hash;
163 tree type;
164 };
165
166 /* Initial size of the hash table (rounded to next prime). */
167 #define TYPE_HASH_INITIAL_SIZE 1000
168
169 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
170 {
171 static hashval_t hash (type_hash *t) { return t->hash; }
172 static bool equal (type_hash *a, type_hash *b);
173
174 static int
175 keep_cache_entry (type_hash *&t)
176 {
177 return ggc_marked_p (t->type);
178 }
179 };
180
181 /* Now here is the hash table. When recording a type, it is added to
182 the slot whose index is the hash code. Note that the hash table is
183 used for several kinds of types (function types, array types and
184 array index range types, for now). While all these live in the
185 same table, they are completely independent, and the hash code is
186 computed differently for each of these. */
187
188 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
189
190 /* Hash table and temporary node for larger integer const values. */
191 static GTY (()) tree int_cst_node;
192
193 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
194 {
195 static hashval_t hash (tree t);
196 static bool equal (tree x, tree y);
197 };
198
199 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
200
201 /* Hash table for optimization flags and target option flags. Use the same
202 hash table for both sets of options. Nodes for building the current
203 optimization and target option nodes. The assumption is most of the time
204 the options created will already be in the hash table, so we avoid
205 allocating and freeing up a node repeatably. */
206 static GTY (()) tree cl_optimization_node;
207 static GTY (()) tree cl_target_option_node;
208
209 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
210 {
211 static hashval_t hash (tree t);
212 static bool equal (tree x, tree y);
213 };
214
215 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
216
217 /* General tree->tree mapping structure for use in hash tables. */
218
219
220 static GTY ((cache))
221 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
222
223 static GTY ((cache))
224 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
225
226 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
227 {
228 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
229
230 static bool
231 equal (tree_vec_map *a, tree_vec_map *b)
232 {
233 return a->base.from == b->base.from;
234 }
235
236 static int
237 keep_cache_entry (tree_vec_map *&m)
238 {
239 return ggc_marked_p (m->base.from);
240 }
241 };
242
243 static GTY ((cache))
244 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
245
246 static void set_type_quals (tree, int);
247 static void print_type_hash_statistics (void);
248 static void print_debug_expr_statistics (void);
249 static void print_value_expr_statistics (void);
250 static void type_hash_list (const_tree, inchash::hash &);
251 static void attribute_hash_list (const_tree, inchash::hash &);
252
253 tree global_trees[TI_MAX];
254 tree integer_types[itk_none];
255
256 bool int_n_enabled_p[NUM_INT_N_ENTS];
257 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
258
259 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
260
261 /* Number of operands for each OpenMP clause. */
262 unsigned const char omp_clause_num_ops[] =
263 {
264 0, /* OMP_CLAUSE_ERROR */
265 1, /* OMP_CLAUSE_PRIVATE */
266 1, /* OMP_CLAUSE_SHARED */
267 1, /* OMP_CLAUSE_FIRSTPRIVATE */
268 2, /* OMP_CLAUSE_LASTPRIVATE */
269 5, /* OMP_CLAUSE_REDUCTION */
270 1, /* OMP_CLAUSE_COPYIN */
271 1, /* OMP_CLAUSE_COPYPRIVATE */
272 3, /* OMP_CLAUSE_LINEAR */
273 2, /* OMP_CLAUSE_ALIGNED */
274 1, /* OMP_CLAUSE_DEPEND */
275 1, /* OMP_CLAUSE_UNIFORM */
276 1, /* OMP_CLAUSE_TO_DECLARE */
277 1, /* OMP_CLAUSE_LINK */
278 2, /* OMP_CLAUSE_FROM */
279 2, /* OMP_CLAUSE_TO */
280 2, /* OMP_CLAUSE_MAP */
281 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
282 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
283 2, /* OMP_CLAUSE__CACHE_ */
284 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
285 1, /* OMP_CLAUSE_USE_DEVICE */
286 2, /* OMP_CLAUSE_GANG */
287 1, /* OMP_CLAUSE_ASYNC */
288 1, /* OMP_CLAUSE_WAIT */
289 0, /* OMP_CLAUSE_AUTO */
290 0, /* OMP_CLAUSE_SEQ */
291 1, /* OMP_CLAUSE__LOOPTEMP_ */
292 1, /* OMP_CLAUSE_IF */
293 1, /* OMP_CLAUSE_NUM_THREADS */
294 1, /* OMP_CLAUSE_SCHEDULE */
295 0, /* OMP_CLAUSE_NOWAIT */
296 1, /* OMP_CLAUSE_ORDERED */
297 0, /* OMP_CLAUSE_DEFAULT */
298 3, /* OMP_CLAUSE_COLLAPSE */
299 0, /* OMP_CLAUSE_UNTIED */
300 1, /* OMP_CLAUSE_FINAL */
301 0, /* OMP_CLAUSE_MERGEABLE */
302 1, /* OMP_CLAUSE_DEVICE */
303 1, /* OMP_CLAUSE_DIST_SCHEDULE */
304 0, /* OMP_CLAUSE_INBRANCH */
305 0, /* OMP_CLAUSE_NOTINBRANCH */
306 1, /* OMP_CLAUSE_NUM_TEAMS */
307 1, /* OMP_CLAUSE_THREAD_LIMIT */
308 0, /* OMP_CLAUSE_PROC_BIND */
309 1, /* OMP_CLAUSE_SAFELEN */
310 1, /* OMP_CLAUSE_SIMDLEN */
311 0, /* OMP_CLAUSE_FOR */
312 0, /* OMP_CLAUSE_PARALLEL */
313 0, /* OMP_CLAUSE_SECTIONS */
314 0, /* OMP_CLAUSE_TASKGROUP */
315 1, /* OMP_CLAUSE_PRIORITY */
316 1, /* OMP_CLAUSE_GRAINSIZE */
317 1, /* OMP_CLAUSE_NUM_TASKS */
318 0, /* OMP_CLAUSE_NOGROUP */
319 0, /* OMP_CLAUSE_THREADS */
320 0, /* OMP_CLAUSE_SIMD */
321 1, /* OMP_CLAUSE_HINT */
322 0, /* OMP_CLAUSE_DEFALTMAP */
323 1, /* OMP_CLAUSE__SIMDUID_ */
324 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
325 0, /* OMP_CLAUSE_INDEPENDENT */
326 1, /* OMP_CLAUSE_WORKER */
327 1, /* OMP_CLAUSE_VECTOR */
328 1, /* OMP_CLAUSE_NUM_GANGS */
329 1, /* OMP_CLAUSE_NUM_WORKERS */
330 1, /* OMP_CLAUSE_VECTOR_LENGTH */
331 1, /* OMP_CLAUSE_TILE */
332 };
333
334 const char * const omp_clause_code_name[] =
335 {
336 "error_clause",
337 "private",
338 "shared",
339 "firstprivate",
340 "lastprivate",
341 "reduction",
342 "copyin",
343 "copyprivate",
344 "linear",
345 "aligned",
346 "depend",
347 "uniform",
348 "to",
349 "link",
350 "from",
351 "to",
352 "map",
353 "use_device_ptr",
354 "is_device_ptr",
355 "_cache_",
356 "device_resident",
357 "use_device",
358 "gang",
359 "async",
360 "wait",
361 "auto",
362 "seq",
363 "_looptemp_",
364 "if",
365 "num_threads",
366 "schedule",
367 "nowait",
368 "ordered",
369 "default",
370 "collapse",
371 "untied",
372 "final",
373 "mergeable",
374 "device",
375 "dist_schedule",
376 "inbranch",
377 "notinbranch",
378 "num_teams",
379 "thread_limit",
380 "proc_bind",
381 "safelen",
382 "simdlen",
383 "for",
384 "parallel",
385 "sections",
386 "taskgroup",
387 "priority",
388 "grainsize",
389 "num_tasks",
390 "nogroup",
391 "threads",
392 "simd",
393 "hint",
394 "defaultmap",
395 "_simduid_",
396 "_Cilk_for_count_",
397 "independent",
398 "worker",
399 "vector",
400 "num_gangs",
401 "num_workers",
402 "vector_length",
403 "tile"
404 };
405
406
407 /* Return the tree node structure used by tree code CODE. */
408
409 static inline enum tree_node_structure_enum
410 tree_node_structure_for_code (enum tree_code code)
411 {
412 switch (TREE_CODE_CLASS (code))
413 {
414 case tcc_declaration:
415 {
416 switch (code)
417 {
418 case FIELD_DECL:
419 return TS_FIELD_DECL;
420 case PARM_DECL:
421 return TS_PARM_DECL;
422 case VAR_DECL:
423 return TS_VAR_DECL;
424 case LABEL_DECL:
425 return TS_LABEL_DECL;
426 case RESULT_DECL:
427 return TS_RESULT_DECL;
428 case DEBUG_EXPR_DECL:
429 return TS_DECL_WRTL;
430 case CONST_DECL:
431 return TS_CONST_DECL;
432 case TYPE_DECL:
433 return TS_TYPE_DECL;
434 case FUNCTION_DECL:
435 return TS_FUNCTION_DECL;
436 case TRANSLATION_UNIT_DECL:
437 return TS_TRANSLATION_UNIT_DECL;
438 default:
439 return TS_DECL_NON_COMMON;
440 }
441 }
442 case tcc_type:
443 return TS_TYPE_NON_COMMON;
444 case tcc_reference:
445 case tcc_comparison:
446 case tcc_unary:
447 case tcc_binary:
448 case tcc_expression:
449 case tcc_statement:
450 case tcc_vl_exp:
451 return TS_EXP;
452 default: /* tcc_constant and tcc_exceptional */
453 break;
454 }
455 switch (code)
456 {
457 /* tcc_constant cases. */
458 case VOID_CST: return TS_TYPED;
459 case INTEGER_CST: return TS_INT_CST;
460 case REAL_CST: return TS_REAL_CST;
461 case FIXED_CST: return TS_FIXED_CST;
462 case COMPLEX_CST: return TS_COMPLEX;
463 case VECTOR_CST: return TS_VECTOR;
464 case STRING_CST: return TS_STRING;
465 /* tcc_exceptional cases. */
466 case ERROR_MARK: return TS_COMMON;
467 case IDENTIFIER_NODE: return TS_IDENTIFIER;
468 case TREE_LIST: return TS_LIST;
469 case TREE_VEC: return TS_VEC;
470 case SSA_NAME: return TS_SSA_NAME;
471 case PLACEHOLDER_EXPR: return TS_COMMON;
472 case STATEMENT_LIST: return TS_STATEMENT_LIST;
473 case BLOCK: return TS_BLOCK;
474 case CONSTRUCTOR: return TS_CONSTRUCTOR;
475 case TREE_BINFO: return TS_BINFO;
476 case OMP_CLAUSE: return TS_OMP_CLAUSE;
477 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
478 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
479
480 default:
481 gcc_unreachable ();
482 }
483 }
484
485
486 /* Initialize tree_contains_struct to describe the hierarchy of tree
487 nodes. */
488
489 static void
490 initialize_tree_contains_struct (void)
491 {
492 unsigned i;
493
494 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
495 {
496 enum tree_code code;
497 enum tree_node_structure_enum ts_code;
498
499 code = (enum tree_code) i;
500 ts_code = tree_node_structure_for_code (code);
501
502 /* Mark the TS structure itself. */
503 tree_contains_struct[code][ts_code] = 1;
504
505 /* Mark all the structures that TS is derived from. */
506 switch (ts_code)
507 {
508 case TS_TYPED:
509 case TS_BLOCK:
510 MARK_TS_BASE (code);
511 break;
512
513 case TS_COMMON:
514 case TS_INT_CST:
515 case TS_REAL_CST:
516 case TS_FIXED_CST:
517 case TS_VECTOR:
518 case TS_STRING:
519 case TS_COMPLEX:
520 case TS_SSA_NAME:
521 case TS_CONSTRUCTOR:
522 case TS_EXP:
523 case TS_STATEMENT_LIST:
524 MARK_TS_TYPED (code);
525 break;
526
527 case TS_IDENTIFIER:
528 case TS_DECL_MINIMAL:
529 case TS_TYPE_COMMON:
530 case TS_LIST:
531 case TS_VEC:
532 case TS_BINFO:
533 case TS_OMP_CLAUSE:
534 case TS_OPTIMIZATION:
535 case TS_TARGET_OPTION:
536 MARK_TS_COMMON (code);
537 break;
538
539 case TS_TYPE_WITH_LANG_SPECIFIC:
540 MARK_TS_TYPE_COMMON (code);
541 break;
542
543 case TS_TYPE_NON_COMMON:
544 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
545 break;
546
547 case TS_DECL_COMMON:
548 MARK_TS_DECL_MINIMAL (code);
549 break;
550
551 case TS_DECL_WRTL:
552 case TS_CONST_DECL:
553 MARK_TS_DECL_COMMON (code);
554 break;
555
556 case TS_DECL_NON_COMMON:
557 MARK_TS_DECL_WITH_VIS (code);
558 break;
559
560 case TS_DECL_WITH_VIS:
561 case TS_PARM_DECL:
562 case TS_LABEL_DECL:
563 case TS_RESULT_DECL:
564 MARK_TS_DECL_WRTL (code);
565 break;
566
567 case TS_FIELD_DECL:
568 MARK_TS_DECL_COMMON (code);
569 break;
570
571 case TS_VAR_DECL:
572 MARK_TS_DECL_WITH_VIS (code);
573 break;
574
575 case TS_TYPE_DECL:
576 case TS_FUNCTION_DECL:
577 MARK_TS_DECL_NON_COMMON (code);
578 break;
579
580 case TS_TRANSLATION_UNIT_DECL:
581 MARK_TS_DECL_COMMON (code);
582 break;
583
584 default:
585 gcc_unreachable ();
586 }
587 }
588
589 /* Basic consistency checks for attributes used in fold. */
590 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
591 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
592 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
593 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
594 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
595 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
596 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
597 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
598 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
599 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
600 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
601 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
602 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
603 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
604 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
605 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
606 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
607 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
608 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
609 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
610 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
611 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
612 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
613 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
614 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
615 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
616 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
617 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
618 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
619 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
620 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
621 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
622 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
623 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
624 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
625 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
626 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
627 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
628 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
629 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
630 }
631
632
633 /* Init tree.c. */
634
635 void
636 init_ttree (void)
637 {
638 /* Initialize the hash table of types. */
639 type_hash_table
640 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
641
642 debug_expr_for_decl
643 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
644
645 value_expr_for_decl
646 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
647
648 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
649
650 int_cst_node = make_int_cst (1, 1);
651
652 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
653
654 cl_optimization_node = make_node (OPTIMIZATION_NODE);
655 cl_target_option_node = make_node (TARGET_OPTION_NODE);
656
657 /* Initialize the tree_contains_struct array. */
658 initialize_tree_contains_struct ();
659 lang_hooks.init_ts ();
660 }
661
662 \f
663 /* The name of the object as the assembler will see it (but before any
664 translations made by ASM_OUTPUT_LABELREF). Often this is the same
665 as DECL_NAME. It is an IDENTIFIER_NODE. */
666 tree
667 decl_assembler_name (tree decl)
668 {
669 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
670 lang_hooks.set_decl_assembler_name (decl);
671 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
672 }
673
674 /* When the target supports COMDAT groups, this indicates which group the
675 DECL is associated with. This can be either an IDENTIFIER_NODE or a
676 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
677 tree
678 decl_comdat_group (const_tree node)
679 {
680 struct symtab_node *snode = symtab_node::get (node);
681 if (!snode)
682 return NULL;
683 return snode->get_comdat_group ();
684 }
685
686 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
687 tree
688 decl_comdat_group_id (const_tree node)
689 {
690 struct symtab_node *snode = symtab_node::get (node);
691 if (!snode)
692 return NULL;
693 return snode->get_comdat_group_id ();
694 }
695
696 /* When the target supports named section, return its name as IDENTIFIER_NODE
697 or NULL if it is in no section. */
698 const char *
699 decl_section_name (const_tree node)
700 {
701 struct symtab_node *snode = symtab_node::get (node);
702 if (!snode)
703 return NULL;
704 return snode->get_section ();
705 }
706
707 /* Set section name of NODE to VALUE (that is expected to be
708 identifier node) */
709 void
710 set_decl_section_name (tree node, const char *value)
711 {
712 struct symtab_node *snode;
713
714 if (value == NULL)
715 {
716 snode = symtab_node::get (node);
717 if (!snode)
718 return;
719 }
720 else if (TREE_CODE (node) == VAR_DECL)
721 snode = varpool_node::get_create (node);
722 else
723 snode = cgraph_node::get_create (node);
724 snode->set_section (value);
725 }
726
727 /* Return TLS model of a variable NODE. */
728 enum tls_model
729 decl_tls_model (const_tree node)
730 {
731 struct varpool_node *snode = varpool_node::get (node);
732 if (!snode)
733 return TLS_MODEL_NONE;
734 return snode->tls_model;
735 }
736
737 /* Set TLS model of variable NODE to MODEL. */
738 void
739 set_decl_tls_model (tree node, enum tls_model model)
740 {
741 struct varpool_node *vnode;
742
743 if (model == TLS_MODEL_NONE)
744 {
745 vnode = varpool_node::get (node);
746 if (!vnode)
747 return;
748 }
749 else
750 vnode = varpool_node::get_create (node);
751 vnode->tls_model = model;
752 }
753
754 /* Compute the number of bytes occupied by a tree with code CODE.
755 This function cannot be used for nodes that have variable sizes,
756 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
757 size_t
758 tree_code_size (enum tree_code code)
759 {
760 switch (TREE_CODE_CLASS (code))
761 {
762 case tcc_declaration: /* A decl node */
763 {
764 switch (code)
765 {
766 case FIELD_DECL:
767 return sizeof (struct tree_field_decl);
768 case PARM_DECL:
769 return sizeof (struct tree_parm_decl);
770 case VAR_DECL:
771 return sizeof (struct tree_var_decl);
772 case LABEL_DECL:
773 return sizeof (struct tree_label_decl);
774 case RESULT_DECL:
775 return sizeof (struct tree_result_decl);
776 case CONST_DECL:
777 return sizeof (struct tree_const_decl);
778 case TYPE_DECL:
779 return sizeof (struct tree_type_decl);
780 case FUNCTION_DECL:
781 return sizeof (struct tree_function_decl);
782 case DEBUG_EXPR_DECL:
783 return sizeof (struct tree_decl_with_rtl);
784 case TRANSLATION_UNIT_DECL:
785 return sizeof (struct tree_translation_unit_decl);
786 case NAMESPACE_DECL:
787 case IMPORTED_DECL:
788 case NAMELIST_DECL:
789 return sizeof (struct tree_decl_non_common);
790 default:
791 return lang_hooks.tree_size (code);
792 }
793 }
794
795 case tcc_type: /* a type node */
796 return sizeof (struct tree_type_non_common);
797
798 case tcc_reference: /* a reference */
799 case tcc_expression: /* an expression */
800 case tcc_statement: /* an expression with side effects */
801 case tcc_comparison: /* a comparison expression */
802 case tcc_unary: /* a unary arithmetic expression */
803 case tcc_binary: /* a binary arithmetic expression */
804 return (sizeof (struct tree_exp)
805 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
806
807 case tcc_constant: /* a constant */
808 switch (code)
809 {
810 case VOID_CST: return sizeof (struct tree_typed);
811 case INTEGER_CST: gcc_unreachable ();
812 case REAL_CST: return sizeof (struct tree_real_cst);
813 case FIXED_CST: return sizeof (struct tree_fixed_cst);
814 case COMPLEX_CST: return sizeof (struct tree_complex);
815 case VECTOR_CST: return sizeof (struct tree_vector);
816 case STRING_CST: gcc_unreachable ();
817 default:
818 return lang_hooks.tree_size (code);
819 }
820
821 case tcc_exceptional: /* something random, like an identifier. */
822 switch (code)
823 {
824 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
825 case TREE_LIST: return sizeof (struct tree_list);
826
827 case ERROR_MARK:
828 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
829
830 case TREE_VEC:
831 case OMP_CLAUSE: gcc_unreachable ();
832
833 case SSA_NAME: return sizeof (struct tree_ssa_name);
834
835 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
836 case BLOCK: return sizeof (struct tree_block);
837 case CONSTRUCTOR: return sizeof (struct tree_constructor);
838 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
839 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
840
841 default:
842 return lang_hooks.tree_size (code);
843 }
844
845 default:
846 gcc_unreachable ();
847 }
848 }
849
850 /* Compute the number of bytes occupied by NODE. This routine only
851 looks at TREE_CODE, except for those nodes that have variable sizes. */
852 size_t
853 tree_size (const_tree node)
854 {
855 const enum tree_code code = TREE_CODE (node);
856 switch (code)
857 {
858 case INTEGER_CST:
859 return (sizeof (struct tree_int_cst)
860 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
861
862 case TREE_BINFO:
863 return (offsetof (struct tree_binfo, base_binfos)
864 + vec<tree, va_gc>
865 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
866
867 case TREE_VEC:
868 return (sizeof (struct tree_vec)
869 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
870
871 case VECTOR_CST:
872 return (sizeof (struct tree_vector)
873 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
874
875 case STRING_CST:
876 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
877
878 case OMP_CLAUSE:
879 return (sizeof (struct tree_omp_clause)
880 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
881 * sizeof (tree));
882
883 default:
884 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
885 return (sizeof (struct tree_exp)
886 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
887 else
888 return tree_code_size (code);
889 }
890 }
891
892 /* Record interesting allocation statistics for a tree node with CODE
893 and LENGTH. */
894
895 static void
896 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
897 size_t length ATTRIBUTE_UNUSED)
898 {
899 enum tree_code_class type = TREE_CODE_CLASS (code);
900 tree_node_kind kind;
901
902 if (!GATHER_STATISTICS)
903 return;
904
905 switch (type)
906 {
907 case tcc_declaration: /* A decl node */
908 kind = d_kind;
909 break;
910
911 case tcc_type: /* a type node */
912 kind = t_kind;
913 break;
914
915 case tcc_statement: /* an expression with side effects */
916 kind = s_kind;
917 break;
918
919 case tcc_reference: /* a reference */
920 kind = r_kind;
921 break;
922
923 case tcc_expression: /* an expression */
924 case tcc_comparison: /* a comparison expression */
925 case tcc_unary: /* a unary arithmetic expression */
926 case tcc_binary: /* a binary arithmetic expression */
927 kind = e_kind;
928 break;
929
930 case tcc_constant: /* a constant */
931 kind = c_kind;
932 break;
933
934 case tcc_exceptional: /* something random, like an identifier. */
935 switch (code)
936 {
937 case IDENTIFIER_NODE:
938 kind = id_kind;
939 break;
940
941 case TREE_VEC:
942 kind = vec_kind;
943 break;
944
945 case TREE_BINFO:
946 kind = binfo_kind;
947 break;
948
949 case SSA_NAME:
950 kind = ssa_name_kind;
951 break;
952
953 case BLOCK:
954 kind = b_kind;
955 break;
956
957 case CONSTRUCTOR:
958 kind = constr_kind;
959 break;
960
961 case OMP_CLAUSE:
962 kind = omp_clause_kind;
963 break;
964
965 default:
966 kind = x_kind;
967 break;
968 }
969 break;
970
971 case tcc_vl_exp:
972 kind = e_kind;
973 break;
974
975 default:
976 gcc_unreachable ();
977 }
978
979 tree_code_counts[(int) code]++;
980 tree_node_counts[(int) kind]++;
981 tree_node_sizes[(int) kind] += length;
982 }
983
984 /* Allocate and return a new UID from the DECL_UID namespace. */
985
986 int
987 allocate_decl_uid (void)
988 {
989 return next_decl_uid++;
990 }
991
992 /* Return a newly allocated node of code CODE. For decl and type
993 nodes, some other fields are initialized. The rest of the node is
994 initialized to zero. This function cannot be used for TREE_VEC,
995 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
996 tree_code_size.
997
998 Achoo! I got a code in the node. */
999
1000 tree
1001 make_node_stat (enum tree_code code MEM_STAT_DECL)
1002 {
1003 tree t;
1004 enum tree_code_class type = TREE_CODE_CLASS (code);
1005 size_t length = tree_code_size (code);
1006
1007 record_node_allocation_statistics (code, length);
1008
1009 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1010 TREE_SET_CODE (t, code);
1011
1012 switch (type)
1013 {
1014 case tcc_statement:
1015 TREE_SIDE_EFFECTS (t) = 1;
1016 break;
1017
1018 case tcc_declaration:
1019 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1020 {
1021 if (code == FUNCTION_DECL)
1022 {
1023 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1024 DECL_MODE (t) = FUNCTION_MODE;
1025 }
1026 else
1027 DECL_ALIGN (t) = 1;
1028 }
1029 DECL_SOURCE_LOCATION (t) = input_location;
1030 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1031 DECL_UID (t) = --next_debug_decl_uid;
1032 else
1033 {
1034 DECL_UID (t) = allocate_decl_uid ();
1035 SET_DECL_PT_UID (t, -1);
1036 }
1037 if (TREE_CODE (t) == LABEL_DECL)
1038 LABEL_DECL_UID (t) = -1;
1039
1040 break;
1041
1042 case tcc_type:
1043 TYPE_UID (t) = next_type_uid++;
1044 TYPE_ALIGN (t) = BITS_PER_UNIT;
1045 TYPE_USER_ALIGN (t) = 0;
1046 TYPE_MAIN_VARIANT (t) = t;
1047 TYPE_CANONICAL (t) = t;
1048
1049 /* Default to no attributes for type, but let target change that. */
1050 TYPE_ATTRIBUTES (t) = NULL_TREE;
1051 targetm.set_default_type_attributes (t);
1052
1053 /* We have not yet computed the alias set for this type. */
1054 TYPE_ALIAS_SET (t) = -1;
1055 break;
1056
1057 case tcc_constant:
1058 TREE_CONSTANT (t) = 1;
1059 break;
1060
1061 case tcc_expression:
1062 switch (code)
1063 {
1064 case INIT_EXPR:
1065 case MODIFY_EXPR:
1066 case VA_ARG_EXPR:
1067 case PREDECREMENT_EXPR:
1068 case PREINCREMENT_EXPR:
1069 case POSTDECREMENT_EXPR:
1070 case POSTINCREMENT_EXPR:
1071 /* All of these have side-effects, no matter what their
1072 operands are. */
1073 TREE_SIDE_EFFECTS (t) = 1;
1074 break;
1075
1076 default:
1077 break;
1078 }
1079 break;
1080
1081 case tcc_exceptional:
1082 switch (code)
1083 {
1084 case TARGET_OPTION_NODE:
1085 TREE_TARGET_OPTION(t)
1086 = ggc_cleared_alloc<struct cl_target_option> ();
1087 break;
1088
1089 case OPTIMIZATION_NODE:
1090 TREE_OPTIMIZATION (t)
1091 = ggc_cleared_alloc<struct cl_optimization> ();
1092 break;
1093
1094 default:
1095 break;
1096 }
1097 break;
1098
1099 default:
1100 /* Other classes need no special treatment. */
1101 break;
1102 }
1103
1104 return t;
1105 }
1106
1107 /* Free tree node. */
1108
1109 void
1110 free_node (tree node)
1111 {
1112 enum tree_code code = TREE_CODE (node);
1113 if (GATHER_STATISTICS)
1114 {
1115 tree_code_counts[(int) TREE_CODE (node)]--;
1116 tree_node_counts[(int) t_kind]--;
1117 tree_node_sizes[(int) t_kind] -= tree_code_size (TREE_CODE (node));
1118 }
1119 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1120 vec_free (CONSTRUCTOR_ELTS (node));
1121 else if (code == BLOCK)
1122 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1123 else if (code == TREE_BINFO)
1124 vec_free (BINFO_BASE_ACCESSES (node));
1125 ggc_free (node);
1126 }
1127 \f
1128 /* Return a new node with the same contents as NODE except that its
1129 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1130
1131 tree
1132 copy_node_stat (tree node MEM_STAT_DECL)
1133 {
1134 tree t;
1135 enum tree_code code = TREE_CODE (node);
1136 size_t length;
1137
1138 gcc_assert (code != STATEMENT_LIST);
1139
1140 length = tree_size (node);
1141 record_node_allocation_statistics (code, length);
1142 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1143 memcpy (t, node, length);
1144
1145 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1146 TREE_CHAIN (t) = 0;
1147 TREE_ASM_WRITTEN (t) = 0;
1148 TREE_VISITED (t) = 0;
1149
1150 if (TREE_CODE_CLASS (code) == tcc_declaration)
1151 {
1152 if (code == DEBUG_EXPR_DECL)
1153 DECL_UID (t) = --next_debug_decl_uid;
1154 else
1155 {
1156 DECL_UID (t) = allocate_decl_uid ();
1157 if (DECL_PT_UID_SET_P (node))
1158 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1159 }
1160 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1161 && DECL_HAS_VALUE_EXPR_P (node))
1162 {
1163 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1164 DECL_HAS_VALUE_EXPR_P (t) = 1;
1165 }
1166 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1167 if (TREE_CODE (node) == VAR_DECL)
1168 {
1169 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1170 t->decl_with_vis.symtab_node = NULL;
1171 }
1172 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1173 {
1174 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1175 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1176 }
1177 if (TREE_CODE (node) == FUNCTION_DECL)
1178 {
1179 DECL_STRUCT_FUNCTION (t) = NULL;
1180 t->decl_with_vis.symtab_node = NULL;
1181 }
1182 }
1183 else if (TREE_CODE_CLASS (code) == tcc_type)
1184 {
1185 TYPE_UID (t) = next_type_uid++;
1186 /* The following is so that the debug code for
1187 the copy is different from the original type.
1188 The two statements usually duplicate each other
1189 (because they clear fields of the same union),
1190 but the optimizer should catch that. */
1191 TYPE_SYMTAB_POINTER (t) = 0;
1192 TYPE_SYMTAB_ADDRESS (t) = 0;
1193
1194 /* Do not copy the values cache. */
1195 if (TYPE_CACHED_VALUES_P (t))
1196 {
1197 TYPE_CACHED_VALUES_P (t) = 0;
1198 TYPE_CACHED_VALUES (t) = NULL_TREE;
1199 }
1200 }
1201 else if (code == TARGET_OPTION_NODE)
1202 {
1203 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1204 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1205 sizeof (struct cl_target_option));
1206 }
1207 else if (code == OPTIMIZATION_NODE)
1208 {
1209 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1210 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1211 sizeof (struct cl_optimization));
1212 }
1213
1214 return t;
1215 }
1216
1217 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1218 For example, this can copy a list made of TREE_LIST nodes. */
1219
1220 tree
1221 copy_list (tree list)
1222 {
1223 tree head;
1224 tree prev, next;
1225
1226 if (list == 0)
1227 return 0;
1228
1229 head = prev = copy_node (list);
1230 next = TREE_CHAIN (list);
1231 while (next)
1232 {
1233 TREE_CHAIN (prev) = copy_node (next);
1234 prev = TREE_CHAIN (prev);
1235 next = TREE_CHAIN (next);
1236 }
1237 return head;
1238 }
1239
1240 \f
1241 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1242 INTEGER_CST with value CST and type TYPE. */
1243
1244 static unsigned int
1245 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1246 {
1247 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1248 /* We need an extra zero HWI if CST is an unsigned integer with its
1249 upper bit set, and if CST occupies a whole number of HWIs. */
1250 if (TYPE_UNSIGNED (type)
1251 && wi::neg_p (cst)
1252 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1253 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1254 return cst.get_len ();
1255 }
1256
1257 /* Return a new INTEGER_CST with value CST and type TYPE. */
1258
1259 static tree
1260 build_new_int_cst (tree type, const wide_int &cst)
1261 {
1262 unsigned int len = cst.get_len ();
1263 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1264 tree nt = make_int_cst (len, ext_len);
1265
1266 if (len < ext_len)
1267 {
1268 --ext_len;
1269 TREE_INT_CST_ELT (nt, ext_len) = 0;
1270 for (unsigned int i = len; i < ext_len; ++i)
1271 TREE_INT_CST_ELT (nt, i) = -1;
1272 }
1273 else if (TYPE_UNSIGNED (type)
1274 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1275 {
1276 len--;
1277 TREE_INT_CST_ELT (nt, len)
1278 = zext_hwi (cst.elt (len),
1279 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1280 }
1281
1282 for (unsigned int i = 0; i < len; i++)
1283 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1284 TREE_TYPE (nt) = type;
1285 return nt;
1286 }
1287
1288 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1289
1290 tree
1291 build_int_cst (tree type, HOST_WIDE_INT low)
1292 {
1293 /* Support legacy code. */
1294 if (!type)
1295 type = integer_type_node;
1296
1297 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1298 }
1299
1300 tree
1301 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1302 {
1303 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1304 }
1305
1306 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1307
1308 tree
1309 build_int_cst_type (tree type, HOST_WIDE_INT low)
1310 {
1311 gcc_assert (type);
1312 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1313 }
1314
1315 /* Constructs tree in type TYPE from with value given by CST. Signedness
1316 of CST is assumed to be the same as the signedness of TYPE. */
1317
1318 tree
1319 double_int_to_tree (tree type, double_int cst)
1320 {
1321 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1322 }
1323
1324 /* We force the wide_int CST to the range of the type TYPE by sign or
1325 zero extending it. OVERFLOWABLE indicates if we are interested in
1326 overflow of the value, when >0 we are only interested in signed
1327 overflow, for <0 we are interested in any overflow. OVERFLOWED
1328 indicates whether overflow has already occurred. CONST_OVERFLOWED
1329 indicates whether constant overflow has already occurred. We force
1330 T's value to be within range of T's type (by setting to 0 or 1 all
1331 the bits outside the type's range). We set TREE_OVERFLOWED if,
1332 OVERFLOWED is nonzero,
1333 or OVERFLOWABLE is >0 and signed overflow occurs
1334 or OVERFLOWABLE is <0 and any overflow occurs
1335 We return a new tree node for the extended wide_int. The node
1336 is shared if no overflow flags are set. */
1337
1338
1339 tree
1340 force_fit_type (tree type, const wide_int_ref &cst,
1341 int overflowable, bool overflowed)
1342 {
1343 signop sign = TYPE_SIGN (type);
1344
1345 /* If we need to set overflow flags, return a new unshared node. */
1346 if (overflowed || !wi::fits_to_tree_p (cst, type))
1347 {
1348 if (overflowed
1349 || overflowable < 0
1350 || (overflowable > 0 && sign == SIGNED))
1351 {
1352 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1353 tree t = build_new_int_cst (type, tmp);
1354 TREE_OVERFLOW (t) = 1;
1355 return t;
1356 }
1357 }
1358
1359 /* Else build a shared node. */
1360 return wide_int_to_tree (type, cst);
1361 }
1362
1363 /* These are the hash table functions for the hash table of INTEGER_CST
1364 nodes of a sizetype. */
1365
1366 /* Return the hash code X, an INTEGER_CST. */
1367
1368 hashval_t
1369 int_cst_hasher::hash (tree x)
1370 {
1371 const_tree const t = x;
1372 hashval_t code = TYPE_UID (TREE_TYPE (t));
1373 int i;
1374
1375 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1376 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1377
1378 return code;
1379 }
1380
1381 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1382 is the same as that given by *Y, which is the same. */
1383
1384 bool
1385 int_cst_hasher::equal (tree x, tree y)
1386 {
1387 const_tree const xt = x;
1388 const_tree const yt = y;
1389
1390 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1391 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1392 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1393 return false;
1394
1395 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1396 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1397 return false;
1398
1399 return true;
1400 }
1401
1402 /* Create an INT_CST node of TYPE and value CST.
1403 The returned node is always shared. For small integers we use a
1404 per-type vector cache, for larger ones we use a single hash table.
1405 The value is extended from its precision according to the sign of
1406 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1407 the upper bits and ensures that hashing and value equality based
1408 upon the underlying HOST_WIDE_INTs works without masking. */
1409
1410 tree
1411 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1412 {
1413 tree t;
1414 int ix = -1;
1415 int limit = 0;
1416
1417 gcc_assert (type);
1418 unsigned int prec = TYPE_PRECISION (type);
1419 signop sgn = TYPE_SIGN (type);
1420
1421 /* Verify that everything is canonical. */
1422 int l = pcst.get_len ();
1423 if (l > 1)
1424 {
1425 if (pcst.elt (l - 1) == 0)
1426 gcc_checking_assert (pcst.elt (l - 2) < 0);
1427 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1428 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1429 }
1430
1431 wide_int cst = wide_int::from (pcst, prec, sgn);
1432 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1433
1434 if (ext_len == 1)
1435 {
1436 /* We just need to store a single HOST_WIDE_INT. */
1437 HOST_WIDE_INT hwi;
1438 if (TYPE_UNSIGNED (type))
1439 hwi = cst.to_uhwi ();
1440 else
1441 hwi = cst.to_shwi ();
1442
1443 switch (TREE_CODE (type))
1444 {
1445 case NULLPTR_TYPE:
1446 gcc_assert (hwi == 0);
1447 /* Fallthru. */
1448
1449 case POINTER_TYPE:
1450 case REFERENCE_TYPE:
1451 case POINTER_BOUNDS_TYPE:
1452 /* Cache NULL pointer and zero bounds. */
1453 if (hwi == 0)
1454 {
1455 limit = 1;
1456 ix = 0;
1457 }
1458 break;
1459
1460 case BOOLEAN_TYPE:
1461 /* Cache false or true. */
1462 limit = 2;
1463 if (IN_RANGE (hwi, 0, 1))
1464 ix = hwi;
1465 break;
1466
1467 case INTEGER_TYPE:
1468 case OFFSET_TYPE:
1469 if (TYPE_SIGN (type) == UNSIGNED)
1470 {
1471 /* Cache [0, N). */
1472 limit = INTEGER_SHARE_LIMIT;
1473 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1474 ix = hwi;
1475 }
1476 else
1477 {
1478 /* Cache [-1, N). */
1479 limit = INTEGER_SHARE_LIMIT + 1;
1480 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1481 ix = hwi + 1;
1482 }
1483 break;
1484
1485 case ENUMERAL_TYPE:
1486 break;
1487
1488 default:
1489 gcc_unreachable ();
1490 }
1491
1492 if (ix >= 0)
1493 {
1494 /* Look for it in the type's vector of small shared ints. */
1495 if (!TYPE_CACHED_VALUES_P (type))
1496 {
1497 TYPE_CACHED_VALUES_P (type) = 1;
1498 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1499 }
1500
1501 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1502 if (t)
1503 /* Make sure no one is clobbering the shared constant. */
1504 gcc_checking_assert (TREE_TYPE (t) == type
1505 && TREE_INT_CST_NUNITS (t) == 1
1506 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1507 && TREE_INT_CST_EXT_NUNITS (t) == 1
1508 && TREE_INT_CST_ELT (t, 0) == hwi);
1509 else
1510 {
1511 /* Create a new shared int. */
1512 t = build_new_int_cst (type, cst);
1513 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1514 }
1515 }
1516 else
1517 {
1518 /* Use the cache of larger shared ints, using int_cst_node as
1519 a temporary. */
1520
1521 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1522 TREE_TYPE (int_cst_node) = type;
1523
1524 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1525 t = *slot;
1526 if (!t)
1527 {
1528 /* Insert this one into the hash table. */
1529 t = int_cst_node;
1530 *slot = t;
1531 /* Make a new node for next time round. */
1532 int_cst_node = make_int_cst (1, 1);
1533 }
1534 }
1535 }
1536 else
1537 {
1538 /* The value either hashes properly or we drop it on the floor
1539 for the gc to take care of. There will not be enough of them
1540 to worry about. */
1541
1542 tree nt = build_new_int_cst (type, cst);
1543 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1544 t = *slot;
1545 if (!t)
1546 {
1547 /* Insert this one into the hash table. */
1548 t = nt;
1549 *slot = t;
1550 }
1551 }
1552
1553 return t;
1554 }
1555
1556 void
1557 cache_integer_cst (tree t)
1558 {
1559 tree type = TREE_TYPE (t);
1560 int ix = -1;
1561 int limit = 0;
1562 int prec = TYPE_PRECISION (type);
1563
1564 gcc_assert (!TREE_OVERFLOW (t));
1565
1566 switch (TREE_CODE (type))
1567 {
1568 case NULLPTR_TYPE:
1569 gcc_assert (integer_zerop (t));
1570 /* Fallthru. */
1571
1572 case POINTER_TYPE:
1573 case REFERENCE_TYPE:
1574 /* Cache NULL pointer. */
1575 if (integer_zerop (t))
1576 {
1577 limit = 1;
1578 ix = 0;
1579 }
1580 break;
1581
1582 case BOOLEAN_TYPE:
1583 /* Cache false or true. */
1584 limit = 2;
1585 if (wi::ltu_p (t, 2))
1586 ix = TREE_INT_CST_ELT (t, 0);
1587 break;
1588
1589 case INTEGER_TYPE:
1590 case OFFSET_TYPE:
1591 if (TYPE_UNSIGNED (type))
1592 {
1593 /* Cache 0..N */
1594 limit = INTEGER_SHARE_LIMIT;
1595
1596 /* This is a little hokie, but if the prec is smaller than
1597 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1598 obvious test will not get the correct answer. */
1599 if (prec < HOST_BITS_PER_WIDE_INT)
1600 {
1601 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1602 ix = tree_to_uhwi (t);
1603 }
1604 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1605 ix = tree_to_uhwi (t);
1606 }
1607 else
1608 {
1609 /* Cache -1..N */
1610 limit = INTEGER_SHARE_LIMIT + 1;
1611
1612 if (integer_minus_onep (t))
1613 ix = 0;
1614 else if (!wi::neg_p (t))
1615 {
1616 if (prec < HOST_BITS_PER_WIDE_INT)
1617 {
1618 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1619 ix = tree_to_shwi (t) + 1;
1620 }
1621 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1622 ix = tree_to_shwi (t) + 1;
1623 }
1624 }
1625 break;
1626
1627 case ENUMERAL_TYPE:
1628 break;
1629
1630 default:
1631 gcc_unreachable ();
1632 }
1633
1634 if (ix >= 0)
1635 {
1636 /* Look for it in the type's vector of small shared ints. */
1637 if (!TYPE_CACHED_VALUES_P (type))
1638 {
1639 TYPE_CACHED_VALUES_P (type) = 1;
1640 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1641 }
1642
1643 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1644 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1645 }
1646 else
1647 {
1648 /* Use the cache of larger shared ints. */
1649 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1650 /* If there is already an entry for the number verify it's the
1651 same. */
1652 if (*slot)
1653 gcc_assert (wi::eq_p (tree (*slot), t));
1654 else
1655 /* Otherwise insert this one into the hash table. */
1656 *slot = t;
1657 }
1658 }
1659
1660
1661 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1662 and the rest are zeros. */
1663
1664 tree
1665 build_low_bits_mask (tree type, unsigned bits)
1666 {
1667 gcc_assert (bits <= TYPE_PRECISION (type));
1668
1669 return wide_int_to_tree (type, wi::mask (bits, false,
1670 TYPE_PRECISION (type)));
1671 }
1672
1673 /* Checks that X is integer constant that can be expressed in (unsigned)
1674 HOST_WIDE_INT without loss of precision. */
1675
1676 bool
1677 cst_and_fits_in_hwi (const_tree x)
1678 {
1679 if (TREE_CODE (x) != INTEGER_CST)
1680 return false;
1681
1682 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1683 return false;
1684
1685 return TREE_INT_CST_NUNITS (x) == 1;
1686 }
1687
1688 /* Build a newly constructed VECTOR_CST node of length LEN. */
1689
1690 tree
1691 make_vector_stat (unsigned len MEM_STAT_DECL)
1692 {
1693 tree t;
1694 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1695
1696 record_node_allocation_statistics (VECTOR_CST, length);
1697
1698 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1699
1700 TREE_SET_CODE (t, VECTOR_CST);
1701 TREE_CONSTANT (t) = 1;
1702
1703 return t;
1704 }
1705
1706 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1707 are in a list pointed to by VALS. */
1708
1709 tree
1710 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1711 {
1712 int over = 0;
1713 unsigned cnt = 0;
1714 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1715 TREE_TYPE (v) = type;
1716
1717 /* Iterate through elements and check for overflow. */
1718 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1719 {
1720 tree value = vals[cnt];
1721
1722 VECTOR_CST_ELT (v, cnt) = value;
1723
1724 /* Don't crash if we get an address constant. */
1725 if (!CONSTANT_CLASS_P (value))
1726 continue;
1727
1728 over |= TREE_OVERFLOW (value);
1729 }
1730
1731 TREE_OVERFLOW (v) = over;
1732 return v;
1733 }
1734
1735 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1736 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1737
1738 tree
1739 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1740 {
1741 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1742 unsigned HOST_WIDE_INT idx, pos = 0;
1743 tree value;
1744
1745 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1746 {
1747 if (TREE_CODE (value) == VECTOR_CST)
1748 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1749 vec[pos++] = VECTOR_CST_ELT (value, i);
1750 else
1751 vec[pos++] = value;
1752 }
1753 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1754 vec[pos++] = build_zero_cst (TREE_TYPE (type));
1755
1756 return build_vector (type, vec);
1757 }
1758
1759 /* Build a vector of type VECTYPE where all the elements are SCs. */
1760 tree
1761 build_vector_from_val (tree vectype, tree sc)
1762 {
1763 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1764
1765 if (sc == error_mark_node)
1766 return sc;
1767
1768 /* Verify that the vector type is suitable for SC. Note that there
1769 is some inconsistency in the type-system with respect to restrict
1770 qualifications of pointers. Vector types always have a main-variant
1771 element type and the qualification is applied to the vector-type.
1772 So TREE_TYPE (vector-type) does not return a properly qualified
1773 vector element-type. */
1774 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1775 TREE_TYPE (vectype)));
1776
1777 if (CONSTANT_CLASS_P (sc))
1778 {
1779 tree *v = XALLOCAVEC (tree, nunits);
1780 for (i = 0; i < nunits; ++i)
1781 v[i] = sc;
1782 return build_vector (vectype, v);
1783 }
1784 else
1785 {
1786 vec<constructor_elt, va_gc> *v;
1787 vec_alloc (v, nunits);
1788 for (i = 0; i < nunits; ++i)
1789 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1790 return build_constructor (vectype, v);
1791 }
1792 }
1793
1794 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1795 are in the vec pointed to by VALS. */
1796 tree
1797 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1798 {
1799 tree c = make_node (CONSTRUCTOR);
1800 unsigned int i;
1801 constructor_elt *elt;
1802 bool constant_p = true;
1803 bool side_effects_p = false;
1804
1805 TREE_TYPE (c) = type;
1806 CONSTRUCTOR_ELTS (c) = vals;
1807
1808 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1809 {
1810 /* Mostly ctors will have elts that don't have side-effects, so
1811 the usual case is to scan all the elements. Hence a single
1812 loop for both const and side effects, rather than one loop
1813 each (with early outs). */
1814 if (!TREE_CONSTANT (elt->value))
1815 constant_p = false;
1816 if (TREE_SIDE_EFFECTS (elt->value))
1817 side_effects_p = true;
1818 }
1819
1820 TREE_SIDE_EFFECTS (c) = side_effects_p;
1821 TREE_CONSTANT (c) = constant_p;
1822
1823 return c;
1824 }
1825
1826 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1827 INDEX and VALUE. */
1828 tree
1829 build_constructor_single (tree type, tree index, tree value)
1830 {
1831 vec<constructor_elt, va_gc> *v;
1832 constructor_elt elt = {index, value};
1833
1834 vec_alloc (v, 1);
1835 v->quick_push (elt);
1836
1837 return build_constructor (type, v);
1838 }
1839
1840
1841 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1842 are in a list pointed to by VALS. */
1843 tree
1844 build_constructor_from_list (tree type, tree vals)
1845 {
1846 tree t;
1847 vec<constructor_elt, va_gc> *v = NULL;
1848
1849 if (vals)
1850 {
1851 vec_alloc (v, list_length (vals));
1852 for (t = vals; t; t = TREE_CHAIN (t))
1853 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1854 }
1855
1856 return build_constructor (type, v);
1857 }
1858
1859 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1860 of elements, provided as index/value pairs. */
1861
1862 tree
1863 build_constructor_va (tree type, int nelts, ...)
1864 {
1865 vec<constructor_elt, va_gc> *v = NULL;
1866 va_list p;
1867
1868 va_start (p, nelts);
1869 vec_alloc (v, nelts);
1870 while (nelts--)
1871 {
1872 tree index = va_arg (p, tree);
1873 tree value = va_arg (p, tree);
1874 CONSTRUCTOR_APPEND_ELT (v, index, value);
1875 }
1876 va_end (p);
1877 return build_constructor (type, v);
1878 }
1879
1880 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1881
1882 tree
1883 build_fixed (tree type, FIXED_VALUE_TYPE f)
1884 {
1885 tree v;
1886 FIXED_VALUE_TYPE *fp;
1887
1888 v = make_node (FIXED_CST);
1889 fp = ggc_alloc<fixed_value> ();
1890 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1891
1892 TREE_TYPE (v) = type;
1893 TREE_FIXED_CST_PTR (v) = fp;
1894 return v;
1895 }
1896
1897 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1898
1899 tree
1900 build_real (tree type, REAL_VALUE_TYPE d)
1901 {
1902 tree v;
1903 REAL_VALUE_TYPE *dp;
1904 int overflow = 0;
1905
1906 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1907 Consider doing it via real_convert now. */
1908
1909 v = make_node (REAL_CST);
1910 dp = ggc_alloc<real_value> ();
1911 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1912
1913 TREE_TYPE (v) = type;
1914 TREE_REAL_CST_PTR (v) = dp;
1915 TREE_OVERFLOW (v) = overflow;
1916 return v;
1917 }
1918
1919 /* Like build_real, but first truncate D to the type. */
1920
1921 tree
1922 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1923 {
1924 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1925 }
1926
1927 /* Return a new REAL_CST node whose type is TYPE
1928 and whose value is the integer value of the INTEGER_CST node I. */
1929
1930 REAL_VALUE_TYPE
1931 real_value_from_int_cst (const_tree type, const_tree i)
1932 {
1933 REAL_VALUE_TYPE d;
1934
1935 /* Clear all bits of the real value type so that we can later do
1936 bitwise comparisons to see if two values are the same. */
1937 memset (&d, 0, sizeof d);
1938
1939 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1940 TYPE_SIGN (TREE_TYPE (i)));
1941 return d;
1942 }
1943
1944 /* Given a tree representing an integer constant I, return a tree
1945 representing the same value as a floating-point constant of type TYPE. */
1946
1947 tree
1948 build_real_from_int_cst (tree type, const_tree i)
1949 {
1950 tree v;
1951 int overflow = TREE_OVERFLOW (i);
1952
1953 v = build_real (type, real_value_from_int_cst (type, i));
1954
1955 TREE_OVERFLOW (v) |= overflow;
1956 return v;
1957 }
1958
1959 /* Return a newly constructed STRING_CST node whose value is
1960 the LEN characters at STR.
1961 Note that for a C string literal, LEN should include the trailing NUL.
1962 The TREE_TYPE is not initialized. */
1963
1964 tree
1965 build_string (int len, const char *str)
1966 {
1967 tree s;
1968 size_t length;
1969
1970 /* Do not waste bytes provided by padding of struct tree_string. */
1971 length = len + offsetof (struct tree_string, str) + 1;
1972
1973 record_node_allocation_statistics (STRING_CST, length);
1974
1975 s = (tree) ggc_internal_alloc (length);
1976
1977 memset (s, 0, sizeof (struct tree_typed));
1978 TREE_SET_CODE (s, STRING_CST);
1979 TREE_CONSTANT (s) = 1;
1980 TREE_STRING_LENGTH (s) = len;
1981 memcpy (s->string.str, str, len);
1982 s->string.str[len] = '\0';
1983
1984 return s;
1985 }
1986
1987 /* Return a newly constructed COMPLEX_CST node whose value is
1988 specified by the real and imaginary parts REAL and IMAG.
1989 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1990 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1991
1992 tree
1993 build_complex (tree type, tree real, tree imag)
1994 {
1995 tree t = make_node (COMPLEX_CST);
1996
1997 TREE_REALPART (t) = real;
1998 TREE_IMAGPART (t) = imag;
1999 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2000 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2001 return t;
2002 }
2003
2004 /* Build a complex (inf +- 0i), such as for the result of cproj.
2005 TYPE is the complex tree type of the result. If NEG is true, the
2006 imaginary zero is negative. */
2007
2008 tree
2009 build_complex_inf (tree type, bool neg)
2010 {
2011 REAL_VALUE_TYPE rinf, rzero = dconst0;
2012
2013 real_inf (&rinf);
2014 rzero.sign = neg;
2015 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2016 build_real (TREE_TYPE (type), rzero));
2017 }
2018
2019 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2020 element is set to 1. In particular, this is 1 + i for complex types. */
2021
2022 tree
2023 build_each_one_cst (tree type)
2024 {
2025 if (TREE_CODE (type) == COMPLEX_TYPE)
2026 {
2027 tree scalar = build_one_cst (TREE_TYPE (type));
2028 return build_complex (type, scalar, scalar);
2029 }
2030 else
2031 return build_one_cst (type);
2032 }
2033
2034 /* Return a constant of arithmetic type TYPE which is the
2035 multiplicative identity of the set TYPE. */
2036
2037 tree
2038 build_one_cst (tree type)
2039 {
2040 switch (TREE_CODE (type))
2041 {
2042 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2043 case POINTER_TYPE: case REFERENCE_TYPE:
2044 case OFFSET_TYPE:
2045 return build_int_cst (type, 1);
2046
2047 case REAL_TYPE:
2048 return build_real (type, dconst1);
2049
2050 case FIXED_POINT_TYPE:
2051 /* We can only generate 1 for accum types. */
2052 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2053 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2054
2055 case VECTOR_TYPE:
2056 {
2057 tree scalar = build_one_cst (TREE_TYPE (type));
2058
2059 return build_vector_from_val (type, scalar);
2060 }
2061
2062 case COMPLEX_TYPE:
2063 return build_complex (type,
2064 build_one_cst (TREE_TYPE (type)),
2065 build_zero_cst (TREE_TYPE (type)));
2066
2067 default:
2068 gcc_unreachable ();
2069 }
2070 }
2071
2072 /* Return an integer of type TYPE containing all 1's in as much precision as
2073 it contains, or a complex or vector whose subparts are such integers. */
2074
2075 tree
2076 build_all_ones_cst (tree type)
2077 {
2078 if (TREE_CODE (type) == COMPLEX_TYPE)
2079 {
2080 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2081 return build_complex (type, scalar, scalar);
2082 }
2083 else
2084 return build_minus_one_cst (type);
2085 }
2086
2087 /* Return a constant of arithmetic type TYPE which is the
2088 opposite of the multiplicative identity of the set TYPE. */
2089
2090 tree
2091 build_minus_one_cst (tree type)
2092 {
2093 switch (TREE_CODE (type))
2094 {
2095 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2096 case POINTER_TYPE: case REFERENCE_TYPE:
2097 case OFFSET_TYPE:
2098 return build_int_cst (type, -1);
2099
2100 case REAL_TYPE:
2101 return build_real (type, dconstm1);
2102
2103 case FIXED_POINT_TYPE:
2104 /* We can only generate 1 for accum types. */
2105 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2106 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2107 TYPE_MODE (type)));
2108
2109 case VECTOR_TYPE:
2110 {
2111 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2112
2113 return build_vector_from_val (type, scalar);
2114 }
2115
2116 case COMPLEX_TYPE:
2117 return build_complex (type,
2118 build_minus_one_cst (TREE_TYPE (type)),
2119 build_zero_cst (TREE_TYPE (type)));
2120
2121 default:
2122 gcc_unreachable ();
2123 }
2124 }
2125
2126 /* Build 0 constant of type TYPE. This is used by constructor folding
2127 and thus the constant should be represented in memory by
2128 zero(es). */
2129
2130 tree
2131 build_zero_cst (tree type)
2132 {
2133 switch (TREE_CODE (type))
2134 {
2135 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2136 case POINTER_TYPE: case REFERENCE_TYPE:
2137 case OFFSET_TYPE: case NULLPTR_TYPE:
2138 return build_int_cst (type, 0);
2139
2140 case REAL_TYPE:
2141 return build_real (type, dconst0);
2142
2143 case FIXED_POINT_TYPE:
2144 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2145
2146 case VECTOR_TYPE:
2147 {
2148 tree scalar = build_zero_cst (TREE_TYPE (type));
2149
2150 return build_vector_from_val (type, scalar);
2151 }
2152
2153 case COMPLEX_TYPE:
2154 {
2155 tree zero = build_zero_cst (TREE_TYPE (type));
2156
2157 return build_complex (type, zero, zero);
2158 }
2159
2160 default:
2161 if (!AGGREGATE_TYPE_P (type))
2162 return fold_convert (type, integer_zero_node);
2163 return build_constructor (type, NULL);
2164 }
2165 }
2166
2167
2168 /* Build a BINFO with LEN language slots. */
2169
2170 tree
2171 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2172 {
2173 tree t;
2174 size_t length = (offsetof (struct tree_binfo, base_binfos)
2175 + vec<tree, va_gc>::embedded_size (base_binfos));
2176
2177 record_node_allocation_statistics (TREE_BINFO, length);
2178
2179 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2180
2181 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2182
2183 TREE_SET_CODE (t, TREE_BINFO);
2184
2185 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2186
2187 return t;
2188 }
2189
2190 /* Create a CASE_LABEL_EXPR tree node and return it. */
2191
2192 tree
2193 build_case_label (tree low_value, tree high_value, tree label_decl)
2194 {
2195 tree t = make_node (CASE_LABEL_EXPR);
2196
2197 TREE_TYPE (t) = void_type_node;
2198 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2199
2200 CASE_LOW (t) = low_value;
2201 CASE_HIGH (t) = high_value;
2202 CASE_LABEL (t) = label_decl;
2203 CASE_CHAIN (t) = NULL_TREE;
2204
2205 return t;
2206 }
2207
2208 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2209 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2210 The latter determines the length of the HOST_WIDE_INT vector. */
2211
2212 tree
2213 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2214 {
2215 tree t;
2216 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2217 + sizeof (struct tree_int_cst));
2218
2219 gcc_assert (len);
2220 record_node_allocation_statistics (INTEGER_CST, length);
2221
2222 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2223
2224 TREE_SET_CODE (t, INTEGER_CST);
2225 TREE_INT_CST_NUNITS (t) = len;
2226 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2227 /* to_offset can only be applied to trees that are offset_int-sized
2228 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2229 must be exactly the precision of offset_int and so LEN is correct. */
2230 if (ext_len <= OFFSET_INT_ELTS)
2231 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2232 else
2233 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2234
2235 TREE_CONSTANT (t) = 1;
2236
2237 return t;
2238 }
2239
2240 /* Build a newly constructed TREE_VEC node of length LEN. */
2241
2242 tree
2243 make_tree_vec_stat (int len MEM_STAT_DECL)
2244 {
2245 tree t;
2246 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2247
2248 record_node_allocation_statistics (TREE_VEC, length);
2249
2250 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2251
2252 TREE_SET_CODE (t, TREE_VEC);
2253 TREE_VEC_LENGTH (t) = len;
2254
2255 return t;
2256 }
2257
2258 /* Grow a TREE_VEC node to new length LEN. */
2259
2260 tree
2261 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2262 {
2263 gcc_assert (TREE_CODE (v) == TREE_VEC);
2264
2265 int oldlen = TREE_VEC_LENGTH (v);
2266 gcc_assert (len > oldlen);
2267
2268 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2269 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2270
2271 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2272
2273 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2274
2275 TREE_VEC_LENGTH (v) = len;
2276
2277 return v;
2278 }
2279 \f
2280 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2281 fixed, and scalar, complex or vector. */
2282
2283 int
2284 zerop (const_tree expr)
2285 {
2286 return (integer_zerop (expr)
2287 || real_zerop (expr)
2288 || fixed_zerop (expr));
2289 }
2290
2291 /* Return 1 if EXPR is the integer constant zero or a complex constant
2292 of zero. */
2293
2294 int
2295 integer_zerop (const_tree expr)
2296 {
2297 switch (TREE_CODE (expr))
2298 {
2299 case INTEGER_CST:
2300 return wi::eq_p (expr, 0);
2301 case COMPLEX_CST:
2302 return (integer_zerop (TREE_REALPART (expr))
2303 && integer_zerop (TREE_IMAGPART (expr)));
2304 case VECTOR_CST:
2305 {
2306 unsigned i;
2307 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2308 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2309 return false;
2310 return true;
2311 }
2312 default:
2313 return false;
2314 }
2315 }
2316
2317 /* Return 1 if EXPR is the integer constant one or the corresponding
2318 complex constant. */
2319
2320 int
2321 integer_onep (const_tree expr)
2322 {
2323 switch (TREE_CODE (expr))
2324 {
2325 case INTEGER_CST:
2326 return wi::eq_p (wi::to_widest (expr), 1);
2327 case COMPLEX_CST:
2328 return (integer_onep (TREE_REALPART (expr))
2329 && integer_zerop (TREE_IMAGPART (expr)));
2330 case VECTOR_CST:
2331 {
2332 unsigned i;
2333 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2334 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2335 return false;
2336 return true;
2337 }
2338 default:
2339 return false;
2340 }
2341 }
2342
2343 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2344 return 1 if every piece is the integer constant one. */
2345
2346 int
2347 integer_each_onep (const_tree expr)
2348 {
2349 if (TREE_CODE (expr) == COMPLEX_CST)
2350 return (integer_onep (TREE_REALPART (expr))
2351 && integer_onep (TREE_IMAGPART (expr)));
2352 else
2353 return integer_onep (expr);
2354 }
2355
2356 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2357 it contains, or a complex or vector whose subparts are such integers. */
2358
2359 int
2360 integer_all_onesp (const_tree expr)
2361 {
2362 if (TREE_CODE (expr) == COMPLEX_CST
2363 && integer_all_onesp (TREE_REALPART (expr))
2364 && integer_all_onesp (TREE_IMAGPART (expr)))
2365 return 1;
2366
2367 else if (TREE_CODE (expr) == VECTOR_CST)
2368 {
2369 unsigned i;
2370 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2371 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2372 return 0;
2373 return 1;
2374 }
2375
2376 else if (TREE_CODE (expr) != INTEGER_CST)
2377 return 0;
2378
2379 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2380 }
2381
2382 /* Return 1 if EXPR is the integer constant minus one. */
2383
2384 int
2385 integer_minus_onep (const_tree expr)
2386 {
2387 if (TREE_CODE (expr) == COMPLEX_CST)
2388 return (integer_all_onesp (TREE_REALPART (expr))
2389 && integer_zerop (TREE_IMAGPART (expr)));
2390 else
2391 return integer_all_onesp (expr);
2392 }
2393
2394 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2395 one bit on). */
2396
2397 int
2398 integer_pow2p (const_tree expr)
2399 {
2400 if (TREE_CODE (expr) == COMPLEX_CST
2401 && integer_pow2p (TREE_REALPART (expr))
2402 && integer_zerop (TREE_IMAGPART (expr)))
2403 return 1;
2404
2405 if (TREE_CODE (expr) != INTEGER_CST)
2406 return 0;
2407
2408 return wi::popcount (expr) == 1;
2409 }
2410
2411 /* Return 1 if EXPR is an integer constant other than zero or a
2412 complex constant other than zero. */
2413
2414 int
2415 integer_nonzerop (const_tree expr)
2416 {
2417 return ((TREE_CODE (expr) == INTEGER_CST
2418 && !wi::eq_p (expr, 0))
2419 || (TREE_CODE (expr) == COMPLEX_CST
2420 && (integer_nonzerop (TREE_REALPART (expr))
2421 || integer_nonzerop (TREE_IMAGPART (expr)))));
2422 }
2423
2424 /* Return 1 if EXPR is the integer constant one. For vector,
2425 return 1 if every piece is the integer constant minus one
2426 (representing the value TRUE). */
2427
2428 int
2429 integer_truep (const_tree expr)
2430 {
2431 if (TREE_CODE (expr) == VECTOR_CST)
2432 return integer_all_onesp (expr);
2433 return integer_onep (expr);
2434 }
2435
2436 /* Return 1 if EXPR is the fixed-point constant zero. */
2437
2438 int
2439 fixed_zerop (const_tree expr)
2440 {
2441 return (TREE_CODE (expr) == FIXED_CST
2442 && TREE_FIXED_CST (expr).data.is_zero ());
2443 }
2444
2445 /* Return the power of two represented by a tree node known to be a
2446 power of two. */
2447
2448 int
2449 tree_log2 (const_tree expr)
2450 {
2451 if (TREE_CODE (expr) == COMPLEX_CST)
2452 return tree_log2 (TREE_REALPART (expr));
2453
2454 return wi::exact_log2 (expr);
2455 }
2456
2457 /* Similar, but return the largest integer Y such that 2 ** Y is less
2458 than or equal to EXPR. */
2459
2460 int
2461 tree_floor_log2 (const_tree expr)
2462 {
2463 if (TREE_CODE (expr) == COMPLEX_CST)
2464 return tree_log2 (TREE_REALPART (expr));
2465
2466 return wi::floor_log2 (expr);
2467 }
2468
2469 /* Return number of known trailing zero bits in EXPR, or, if the value of
2470 EXPR is known to be zero, the precision of it's type. */
2471
2472 unsigned int
2473 tree_ctz (const_tree expr)
2474 {
2475 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2476 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2477 return 0;
2478
2479 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2480 switch (TREE_CODE (expr))
2481 {
2482 case INTEGER_CST:
2483 ret1 = wi::ctz (expr);
2484 return MIN (ret1, prec);
2485 case SSA_NAME:
2486 ret1 = wi::ctz (get_nonzero_bits (expr));
2487 return MIN (ret1, prec);
2488 case PLUS_EXPR:
2489 case MINUS_EXPR:
2490 case BIT_IOR_EXPR:
2491 case BIT_XOR_EXPR:
2492 case MIN_EXPR:
2493 case MAX_EXPR:
2494 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2495 if (ret1 == 0)
2496 return ret1;
2497 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2498 return MIN (ret1, ret2);
2499 case POINTER_PLUS_EXPR:
2500 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2501 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2502 /* Second operand is sizetype, which could be in theory
2503 wider than pointer's precision. Make sure we never
2504 return more than prec. */
2505 ret2 = MIN (ret2, prec);
2506 return MIN (ret1, ret2);
2507 case BIT_AND_EXPR:
2508 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2509 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2510 return MAX (ret1, ret2);
2511 case MULT_EXPR:
2512 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2513 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2514 return MIN (ret1 + ret2, prec);
2515 case LSHIFT_EXPR:
2516 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2517 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2518 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2519 {
2520 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2521 return MIN (ret1 + ret2, prec);
2522 }
2523 return ret1;
2524 case RSHIFT_EXPR:
2525 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2526 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2527 {
2528 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2529 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2530 if (ret1 > ret2)
2531 return ret1 - ret2;
2532 }
2533 return 0;
2534 case TRUNC_DIV_EXPR:
2535 case CEIL_DIV_EXPR:
2536 case FLOOR_DIV_EXPR:
2537 case ROUND_DIV_EXPR:
2538 case EXACT_DIV_EXPR:
2539 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2540 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2541 {
2542 int l = tree_log2 (TREE_OPERAND (expr, 1));
2543 if (l >= 0)
2544 {
2545 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2546 ret2 = l;
2547 if (ret1 > ret2)
2548 return ret1 - ret2;
2549 }
2550 }
2551 return 0;
2552 CASE_CONVERT:
2553 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2554 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2555 ret1 = prec;
2556 return MIN (ret1, prec);
2557 case SAVE_EXPR:
2558 return tree_ctz (TREE_OPERAND (expr, 0));
2559 case COND_EXPR:
2560 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2561 if (ret1 == 0)
2562 return 0;
2563 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2564 return MIN (ret1, ret2);
2565 case COMPOUND_EXPR:
2566 return tree_ctz (TREE_OPERAND (expr, 1));
2567 case ADDR_EXPR:
2568 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2569 if (ret1 > BITS_PER_UNIT)
2570 {
2571 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2572 return MIN (ret1, prec);
2573 }
2574 return 0;
2575 default:
2576 return 0;
2577 }
2578 }
2579
2580 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2581 decimal float constants, so don't return 1 for them. */
2582
2583 int
2584 real_zerop (const_tree expr)
2585 {
2586 switch (TREE_CODE (expr))
2587 {
2588 case REAL_CST:
2589 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2590 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2591 case COMPLEX_CST:
2592 return real_zerop (TREE_REALPART (expr))
2593 && real_zerop (TREE_IMAGPART (expr));
2594 case VECTOR_CST:
2595 {
2596 unsigned i;
2597 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2598 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2599 return false;
2600 return true;
2601 }
2602 default:
2603 return false;
2604 }
2605 }
2606
2607 /* Return 1 if EXPR is the real constant one in real or complex form.
2608 Trailing zeroes matter for decimal float constants, so don't return
2609 1 for them. */
2610
2611 int
2612 real_onep (const_tree expr)
2613 {
2614 switch (TREE_CODE (expr))
2615 {
2616 case REAL_CST:
2617 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2618 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2619 case COMPLEX_CST:
2620 return real_onep (TREE_REALPART (expr))
2621 && real_zerop (TREE_IMAGPART (expr));
2622 case VECTOR_CST:
2623 {
2624 unsigned i;
2625 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2626 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2627 return false;
2628 return true;
2629 }
2630 default:
2631 return false;
2632 }
2633 }
2634
2635 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2636 matter for decimal float constants, so don't return 1 for them. */
2637
2638 int
2639 real_minus_onep (const_tree expr)
2640 {
2641 switch (TREE_CODE (expr))
2642 {
2643 case REAL_CST:
2644 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2645 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2646 case COMPLEX_CST:
2647 return real_minus_onep (TREE_REALPART (expr))
2648 && real_zerop (TREE_IMAGPART (expr));
2649 case VECTOR_CST:
2650 {
2651 unsigned i;
2652 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2653 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2654 return false;
2655 return true;
2656 }
2657 default:
2658 return false;
2659 }
2660 }
2661
2662 /* Nonzero if EXP is a constant or a cast of a constant. */
2663
2664 int
2665 really_constant_p (const_tree exp)
2666 {
2667 /* This is not quite the same as STRIP_NOPS. It does more. */
2668 while (CONVERT_EXPR_P (exp)
2669 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2670 exp = TREE_OPERAND (exp, 0);
2671 return TREE_CONSTANT (exp);
2672 }
2673 \f
2674 /* Return first list element whose TREE_VALUE is ELEM.
2675 Return 0 if ELEM is not in LIST. */
2676
2677 tree
2678 value_member (tree elem, tree list)
2679 {
2680 while (list)
2681 {
2682 if (elem == TREE_VALUE (list))
2683 return list;
2684 list = TREE_CHAIN (list);
2685 }
2686 return NULL_TREE;
2687 }
2688
2689 /* Return first list element whose TREE_PURPOSE is ELEM.
2690 Return 0 if ELEM is not in LIST. */
2691
2692 tree
2693 purpose_member (const_tree elem, tree list)
2694 {
2695 while (list)
2696 {
2697 if (elem == TREE_PURPOSE (list))
2698 return list;
2699 list = TREE_CHAIN (list);
2700 }
2701 return NULL_TREE;
2702 }
2703
2704 /* Return true if ELEM is in V. */
2705
2706 bool
2707 vec_member (const_tree elem, vec<tree, va_gc> *v)
2708 {
2709 unsigned ix;
2710 tree t;
2711 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2712 if (elem == t)
2713 return true;
2714 return false;
2715 }
2716
2717 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2718 NULL_TREE. */
2719
2720 tree
2721 chain_index (int idx, tree chain)
2722 {
2723 for (; chain && idx > 0; --idx)
2724 chain = TREE_CHAIN (chain);
2725 return chain;
2726 }
2727
2728 /* Return nonzero if ELEM is part of the chain CHAIN. */
2729
2730 int
2731 chain_member (const_tree elem, const_tree chain)
2732 {
2733 while (chain)
2734 {
2735 if (elem == chain)
2736 return 1;
2737 chain = DECL_CHAIN (chain);
2738 }
2739
2740 return 0;
2741 }
2742
2743 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2744 We expect a null pointer to mark the end of the chain.
2745 This is the Lisp primitive `length'. */
2746
2747 int
2748 list_length (const_tree t)
2749 {
2750 const_tree p = t;
2751 #ifdef ENABLE_TREE_CHECKING
2752 const_tree q = t;
2753 #endif
2754 int len = 0;
2755
2756 while (p)
2757 {
2758 p = TREE_CHAIN (p);
2759 #ifdef ENABLE_TREE_CHECKING
2760 if (len % 2)
2761 q = TREE_CHAIN (q);
2762 gcc_assert (p != q);
2763 #endif
2764 len++;
2765 }
2766
2767 return len;
2768 }
2769
2770 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2771 UNION_TYPE TYPE, or NULL_TREE if none. */
2772
2773 tree
2774 first_field (const_tree type)
2775 {
2776 tree t = TYPE_FIELDS (type);
2777 while (t && TREE_CODE (t) != FIELD_DECL)
2778 t = TREE_CHAIN (t);
2779 return t;
2780 }
2781
2782 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2783 by modifying the last node in chain 1 to point to chain 2.
2784 This is the Lisp primitive `nconc'. */
2785
2786 tree
2787 chainon (tree op1, tree op2)
2788 {
2789 tree t1;
2790
2791 if (!op1)
2792 return op2;
2793 if (!op2)
2794 return op1;
2795
2796 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2797 continue;
2798 TREE_CHAIN (t1) = op2;
2799
2800 #ifdef ENABLE_TREE_CHECKING
2801 {
2802 tree t2;
2803 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2804 gcc_assert (t2 != t1);
2805 }
2806 #endif
2807
2808 return op1;
2809 }
2810
2811 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2812
2813 tree
2814 tree_last (tree chain)
2815 {
2816 tree next;
2817 if (chain)
2818 while ((next = TREE_CHAIN (chain)))
2819 chain = next;
2820 return chain;
2821 }
2822
2823 /* Reverse the order of elements in the chain T,
2824 and return the new head of the chain (old last element). */
2825
2826 tree
2827 nreverse (tree t)
2828 {
2829 tree prev = 0, decl, next;
2830 for (decl = t; decl; decl = next)
2831 {
2832 /* We shouldn't be using this function to reverse BLOCK chains; we
2833 have blocks_nreverse for that. */
2834 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2835 next = TREE_CHAIN (decl);
2836 TREE_CHAIN (decl) = prev;
2837 prev = decl;
2838 }
2839 return prev;
2840 }
2841 \f
2842 /* Return a newly created TREE_LIST node whose
2843 purpose and value fields are PARM and VALUE. */
2844
2845 tree
2846 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2847 {
2848 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2849 TREE_PURPOSE (t) = parm;
2850 TREE_VALUE (t) = value;
2851 return t;
2852 }
2853
2854 /* Build a chain of TREE_LIST nodes from a vector. */
2855
2856 tree
2857 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2858 {
2859 tree ret = NULL_TREE;
2860 tree *pp = &ret;
2861 unsigned int i;
2862 tree t;
2863 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2864 {
2865 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2866 pp = &TREE_CHAIN (*pp);
2867 }
2868 return ret;
2869 }
2870
2871 /* Return a newly created TREE_LIST node whose
2872 purpose and value fields are PURPOSE and VALUE
2873 and whose TREE_CHAIN is CHAIN. */
2874
2875 tree
2876 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2877 {
2878 tree node;
2879
2880 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2881 memset (node, 0, sizeof (struct tree_common));
2882
2883 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2884
2885 TREE_SET_CODE (node, TREE_LIST);
2886 TREE_CHAIN (node) = chain;
2887 TREE_PURPOSE (node) = purpose;
2888 TREE_VALUE (node) = value;
2889 return node;
2890 }
2891
2892 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2893 trees. */
2894
2895 vec<tree, va_gc> *
2896 ctor_to_vec (tree ctor)
2897 {
2898 vec<tree, va_gc> *vec;
2899 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2900 unsigned int ix;
2901 tree val;
2902
2903 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2904 vec->quick_push (val);
2905
2906 return vec;
2907 }
2908 \f
2909 /* Return the size nominally occupied by an object of type TYPE
2910 when it resides in memory. The value is measured in units of bytes,
2911 and its data type is that normally used for type sizes
2912 (which is the first type created by make_signed_type or
2913 make_unsigned_type). */
2914
2915 tree
2916 size_in_bytes (const_tree type)
2917 {
2918 tree t;
2919
2920 if (type == error_mark_node)
2921 return integer_zero_node;
2922
2923 type = TYPE_MAIN_VARIANT (type);
2924 t = TYPE_SIZE_UNIT (type);
2925
2926 if (t == 0)
2927 {
2928 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2929 return size_zero_node;
2930 }
2931
2932 return t;
2933 }
2934
2935 /* Return the size of TYPE (in bytes) as a wide integer
2936 or return -1 if the size can vary or is larger than an integer. */
2937
2938 HOST_WIDE_INT
2939 int_size_in_bytes (const_tree type)
2940 {
2941 tree t;
2942
2943 if (type == error_mark_node)
2944 return 0;
2945
2946 type = TYPE_MAIN_VARIANT (type);
2947 t = TYPE_SIZE_UNIT (type);
2948
2949 if (t && tree_fits_uhwi_p (t))
2950 return TREE_INT_CST_LOW (t);
2951 else
2952 return -1;
2953 }
2954
2955 /* Return the maximum size of TYPE (in bytes) as a wide integer
2956 or return -1 if the size can vary or is larger than an integer. */
2957
2958 HOST_WIDE_INT
2959 max_int_size_in_bytes (const_tree type)
2960 {
2961 HOST_WIDE_INT size = -1;
2962 tree size_tree;
2963
2964 /* If this is an array type, check for a possible MAX_SIZE attached. */
2965
2966 if (TREE_CODE (type) == ARRAY_TYPE)
2967 {
2968 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2969
2970 if (size_tree && tree_fits_uhwi_p (size_tree))
2971 size = tree_to_uhwi (size_tree);
2972 }
2973
2974 /* If we still haven't been able to get a size, see if the language
2975 can compute a maximum size. */
2976
2977 if (size == -1)
2978 {
2979 size_tree = lang_hooks.types.max_size (type);
2980
2981 if (size_tree && tree_fits_uhwi_p (size_tree))
2982 size = tree_to_uhwi (size_tree);
2983 }
2984
2985 return size;
2986 }
2987 \f
2988 /* Return the bit position of FIELD, in bits from the start of the record.
2989 This is a tree of type bitsizetype. */
2990
2991 tree
2992 bit_position (const_tree field)
2993 {
2994 return bit_from_pos (DECL_FIELD_OFFSET (field),
2995 DECL_FIELD_BIT_OFFSET (field));
2996 }
2997 \f
2998 /* Return the byte position of FIELD, in bytes from the start of the record.
2999 This is a tree of type sizetype. */
3000
3001 tree
3002 byte_position (const_tree field)
3003 {
3004 return byte_from_pos (DECL_FIELD_OFFSET (field),
3005 DECL_FIELD_BIT_OFFSET (field));
3006 }
3007
3008 /* Likewise, but return as an integer. It must be representable in
3009 that way (since it could be a signed value, we don't have the
3010 option of returning -1 like int_size_in_byte can. */
3011
3012 HOST_WIDE_INT
3013 int_byte_position (const_tree field)
3014 {
3015 return tree_to_shwi (byte_position (field));
3016 }
3017 \f
3018 /* Return the strictest alignment, in bits, that T is known to have. */
3019
3020 unsigned int
3021 expr_align (const_tree t)
3022 {
3023 unsigned int align0, align1;
3024
3025 switch (TREE_CODE (t))
3026 {
3027 CASE_CONVERT: case NON_LVALUE_EXPR:
3028 /* If we have conversions, we know that the alignment of the
3029 object must meet each of the alignments of the types. */
3030 align0 = expr_align (TREE_OPERAND (t, 0));
3031 align1 = TYPE_ALIGN (TREE_TYPE (t));
3032 return MAX (align0, align1);
3033
3034 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3035 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3036 case CLEANUP_POINT_EXPR:
3037 /* These don't change the alignment of an object. */
3038 return expr_align (TREE_OPERAND (t, 0));
3039
3040 case COND_EXPR:
3041 /* The best we can do is say that the alignment is the least aligned
3042 of the two arms. */
3043 align0 = expr_align (TREE_OPERAND (t, 1));
3044 align1 = expr_align (TREE_OPERAND (t, 2));
3045 return MIN (align0, align1);
3046
3047 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3048 meaningfully, it's always 1. */
3049 case LABEL_DECL: case CONST_DECL:
3050 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3051 case FUNCTION_DECL:
3052 gcc_assert (DECL_ALIGN (t) != 0);
3053 return DECL_ALIGN (t);
3054
3055 default:
3056 break;
3057 }
3058
3059 /* Otherwise take the alignment from that of the type. */
3060 return TYPE_ALIGN (TREE_TYPE (t));
3061 }
3062 \f
3063 /* Return, as a tree node, the number of elements for TYPE (which is an
3064 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3065
3066 tree
3067 array_type_nelts (const_tree type)
3068 {
3069 tree index_type, min, max;
3070
3071 /* If they did it with unspecified bounds, then we should have already
3072 given an error about it before we got here. */
3073 if (! TYPE_DOMAIN (type))
3074 return error_mark_node;
3075
3076 index_type = TYPE_DOMAIN (type);
3077 min = TYPE_MIN_VALUE (index_type);
3078 max = TYPE_MAX_VALUE (index_type);
3079
3080 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3081 if (!max)
3082 return error_mark_node;
3083
3084 return (integer_zerop (min)
3085 ? max
3086 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3087 }
3088 \f
3089 /* If arg is static -- a reference to an object in static storage -- then
3090 return the object. This is not the same as the C meaning of `static'.
3091 If arg isn't static, return NULL. */
3092
3093 tree
3094 staticp (tree arg)
3095 {
3096 switch (TREE_CODE (arg))
3097 {
3098 case FUNCTION_DECL:
3099 /* Nested functions are static, even though taking their address will
3100 involve a trampoline as we unnest the nested function and create
3101 the trampoline on the tree level. */
3102 return arg;
3103
3104 case VAR_DECL:
3105 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3106 && ! DECL_THREAD_LOCAL_P (arg)
3107 && ! DECL_DLLIMPORT_P (arg)
3108 ? arg : NULL);
3109
3110 case CONST_DECL:
3111 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3112 ? arg : NULL);
3113
3114 case CONSTRUCTOR:
3115 return TREE_STATIC (arg) ? arg : NULL;
3116
3117 case LABEL_DECL:
3118 case STRING_CST:
3119 return arg;
3120
3121 case COMPONENT_REF:
3122 /* If the thing being referenced is not a field, then it is
3123 something language specific. */
3124 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3125
3126 /* If we are referencing a bitfield, we can't evaluate an
3127 ADDR_EXPR at compile time and so it isn't a constant. */
3128 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3129 return NULL;
3130
3131 return staticp (TREE_OPERAND (arg, 0));
3132
3133 case BIT_FIELD_REF:
3134 return NULL;
3135
3136 case INDIRECT_REF:
3137 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3138
3139 case ARRAY_REF:
3140 case ARRAY_RANGE_REF:
3141 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3142 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3143 return staticp (TREE_OPERAND (arg, 0));
3144 else
3145 return NULL;
3146
3147 case COMPOUND_LITERAL_EXPR:
3148 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3149
3150 default:
3151 return NULL;
3152 }
3153 }
3154
3155 \f
3156
3157
3158 /* Return whether OP is a DECL whose address is function-invariant. */
3159
3160 bool
3161 decl_address_invariant_p (const_tree op)
3162 {
3163 /* The conditions below are slightly less strict than the one in
3164 staticp. */
3165
3166 switch (TREE_CODE (op))
3167 {
3168 case PARM_DECL:
3169 case RESULT_DECL:
3170 case LABEL_DECL:
3171 case FUNCTION_DECL:
3172 return true;
3173
3174 case VAR_DECL:
3175 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3176 || DECL_THREAD_LOCAL_P (op)
3177 || DECL_CONTEXT (op) == current_function_decl
3178 || decl_function_context (op) == current_function_decl)
3179 return true;
3180 break;
3181
3182 case CONST_DECL:
3183 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3184 || decl_function_context (op) == current_function_decl)
3185 return true;
3186 break;
3187
3188 default:
3189 break;
3190 }
3191
3192 return false;
3193 }
3194
3195 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3196
3197 bool
3198 decl_address_ip_invariant_p (const_tree op)
3199 {
3200 /* The conditions below are slightly less strict than the one in
3201 staticp. */
3202
3203 switch (TREE_CODE (op))
3204 {
3205 case LABEL_DECL:
3206 case FUNCTION_DECL:
3207 case STRING_CST:
3208 return true;
3209
3210 case VAR_DECL:
3211 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3212 && !DECL_DLLIMPORT_P (op))
3213 || DECL_THREAD_LOCAL_P (op))
3214 return true;
3215 break;
3216
3217 case CONST_DECL:
3218 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3219 return true;
3220 break;
3221
3222 default:
3223 break;
3224 }
3225
3226 return false;
3227 }
3228
3229
3230 /* Return true if T is function-invariant (internal function, does
3231 not handle arithmetic; that's handled in skip_simple_arithmetic and
3232 tree_invariant_p). */
3233
3234 static bool
3235 tree_invariant_p_1 (tree t)
3236 {
3237 tree op;
3238
3239 if (TREE_CONSTANT (t)
3240 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3241 return true;
3242
3243 switch (TREE_CODE (t))
3244 {
3245 case SAVE_EXPR:
3246 return true;
3247
3248 case ADDR_EXPR:
3249 op = TREE_OPERAND (t, 0);
3250 while (handled_component_p (op))
3251 {
3252 switch (TREE_CODE (op))
3253 {
3254 case ARRAY_REF:
3255 case ARRAY_RANGE_REF:
3256 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3257 || TREE_OPERAND (op, 2) != NULL_TREE
3258 || TREE_OPERAND (op, 3) != NULL_TREE)
3259 return false;
3260 break;
3261
3262 case COMPONENT_REF:
3263 if (TREE_OPERAND (op, 2) != NULL_TREE)
3264 return false;
3265 break;
3266
3267 default:;
3268 }
3269 op = TREE_OPERAND (op, 0);
3270 }
3271
3272 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3273
3274 default:
3275 break;
3276 }
3277
3278 return false;
3279 }
3280
3281 /* Return true if T is function-invariant. */
3282
3283 bool
3284 tree_invariant_p (tree t)
3285 {
3286 tree inner = skip_simple_arithmetic (t);
3287 return tree_invariant_p_1 (inner);
3288 }
3289
3290 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3291 Do this to any expression which may be used in more than one place,
3292 but must be evaluated only once.
3293
3294 Normally, expand_expr would reevaluate the expression each time.
3295 Calling save_expr produces something that is evaluated and recorded
3296 the first time expand_expr is called on it. Subsequent calls to
3297 expand_expr just reuse the recorded value.
3298
3299 The call to expand_expr that generates code that actually computes
3300 the value is the first call *at compile time*. Subsequent calls
3301 *at compile time* generate code to use the saved value.
3302 This produces correct result provided that *at run time* control
3303 always flows through the insns made by the first expand_expr
3304 before reaching the other places where the save_expr was evaluated.
3305 You, the caller of save_expr, must make sure this is so.
3306
3307 Constants, and certain read-only nodes, are returned with no
3308 SAVE_EXPR because that is safe. Expressions containing placeholders
3309 are not touched; see tree.def for an explanation of what these
3310 are used for. */
3311
3312 tree
3313 save_expr (tree expr)
3314 {
3315 tree t = fold (expr);
3316 tree inner;
3317
3318 /* If the tree evaluates to a constant, then we don't want to hide that
3319 fact (i.e. this allows further folding, and direct checks for constants).
3320 However, a read-only object that has side effects cannot be bypassed.
3321 Since it is no problem to reevaluate literals, we just return the
3322 literal node. */
3323 inner = skip_simple_arithmetic (t);
3324 if (TREE_CODE (inner) == ERROR_MARK)
3325 return inner;
3326
3327 if (tree_invariant_p_1 (inner))
3328 return t;
3329
3330 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3331 it means that the size or offset of some field of an object depends on
3332 the value within another field.
3333
3334 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3335 and some variable since it would then need to be both evaluated once and
3336 evaluated more than once. Front-ends must assure this case cannot
3337 happen by surrounding any such subexpressions in their own SAVE_EXPR
3338 and forcing evaluation at the proper time. */
3339 if (contains_placeholder_p (inner))
3340 return t;
3341
3342 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3343 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3344
3345 /* This expression might be placed ahead of a jump to ensure that the
3346 value was computed on both sides of the jump. So make sure it isn't
3347 eliminated as dead. */
3348 TREE_SIDE_EFFECTS (t) = 1;
3349 return t;
3350 }
3351
3352 /* Look inside EXPR into any simple arithmetic operations. Return the
3353 outermost non-arithmetic or non-invariant node. */
3354
3355 tree
3356 skip_simple_arithmetic (tree expr)
3357 {
3358 /* We don't care about whether this can be used as an lvalue in this
3359 context. */
3360 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3361 expr = TREE_OPERAND (expr, 0);
3362
3363 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3364 a constant, it will be more efficient to not make another SAVE_EXPR since
3365 it will allow better simplification and GCSE will be able to merge the
3366 computations if they actually occur. */
3367 while (true)
3368 {
3369 if (UNARY_CLASS_P (expr))
3370 expr = TREE_OPERAND (expr, 0);
3371 else if (BINARY_CLASS_P (expr))
3372 {
3373 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3374 expr = TREE_OPERAND (expr, 0);
3375 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3376 expr = TREE_OPERAND (expr, 1);
3377 else
3378 break;
3379 }
3380 else
3381 break;
3382 }
3383
3384 return expr;
3385 }
3386
3387 /* Look inside EXPR into simple arithmetic operations involving constants.
3388 Return the outermost non-arithmetic or non-constant node. */
3389
3390 tree
3391 skip_simple_constant_arithmetic (tree expr)
3392 {
3393 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3394 expr = TREE_OPERAND (expr, 0);
3395
3396 while (true)
3397 {
3398 if (UNARY_CLASS_P (expr))
3399 expr = TREE_OPERAND (expr, 0);
3400 else if (BINARY_CLASS_P (expr))
3401 {
3402 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3403 expr = TREE_OPERAND (expr, 0);
3404 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3405 expr = TREE_OPERAND (expr, 1);
3406 else
3407 break;
3408 }
3409 else
3410 break;
3411 }
3412
3413 return expr;
3414 }
3415
3416 /* Return which tree structure is used by T. */
3417
3418 enum tree_node_structure_enum
3419 tree_node_structure (const_tree t)
3420 {
3421 const enum tree_code code = TREE_CODE (t);
3422 return tree_node_structure_for_code (code);
3423 }
3424
3425 /* Set various status flags when building a CALL_EXPR object T. */
3426
3427 static void
3428 process_call_operands (tree t)
3429 {
3430 bool side_effects = TREE_SIDE_EFFECTS (t);
3431 bool read_only = false;
3432 int i = call_expr_flags (t);
3433
3434 /* Calls have side-effects, except those to const or pure functions. */
3435 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3436 side_effects = true;
3437 /* Propagate TREE_READONLY of arguments for const functions. */
3438 if (i & ECF_CONST)
3439 read_only = true;
3440
3441 if (!side_effects || read_only)
3442 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3443 {
3444 tree op = TREE_OPERAND (t, i);
3445 if (op && TREE_SIDE_EFFECTS (op))
3446 side_effects = true;
3447 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3448 read_only = false;
3449 }
3450
3451 TREE_SIDE_EFFECTS (t) = side_effects;
3452 TREE_READONLY (t) = read_only;
3453 }
3454 \f
3455 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3456 size or offset that depends on a field within a record. */
3457
3458 bool
3459 contains_placeholder_p (const_tree exp)
3460 {
3461 enum tree_code code;
3462
3463 if (!exp)
3464 return 0;
3465
3466 code = TREE_CODE (exp);
3467 if (code == PLACEHOLDER_EXPR)
3468 return 1;
3469
3470 switch (TREE_CODE_CLASS (code))
3471 {
3472 case tcc_reference:
3473 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3474 position computations since they will be converted into a
3475 WITH_RECORD_EXPR involving the reference, which will assume
3476 here will be valid. */
3477 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3478
3479 case tcc_exceptional:
3480 if (code == TREE_LIST)
3481 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3482 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3483 break;
3484
3485 case tcc_unary:
3486 case tcc_binary:
3487 case tcc_comparison:
3488 case tcc_expression:
3489 switch (code)
3490 {
3491 case COMPOUND_EXPR:
3492 /* Ignoring the first operand isn't quite right, but works best. */
3493 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3494
3495 case COND_EXPR:
3496 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3497 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3498 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3499
3500 case SAVE_EXPR:
3501 /* The save_expr function never wraps anything containing
3502 a PLACEHOLDER_EXPR. */
3503 return 0;
3504
3505 default:
3506 break;
3507 }
3508
3509 switch (TREE_CODE_LENGTH (code))
3510 {
3511 case 1:
3512 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3513 case 2:
3514 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3515 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3516 default:
3517 return 0;
3518 }
3519
3520 case tcc_vl_exp:
3521 switch (code)
3522 {
3523 case CALL_EXPR:
3524 {
3525 const_tree arg;
3526 const_call_expr_arg_iterator iter;
3527 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3528 if (CONTAINS_PLACEHOLDER_P (arg))
3529 return 1;
3530 return 0;
3531 }
3532 default:
3533 return 0;
3534 }
3535
3536 default:
3537 return 0;
3538 }
3539 return 0;
3540 }
3541
3542 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3543 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3544 field positions. */
3545
3546 static bool
3547 type_contains_placeholder_1 (const_tree type)
3548 {
3549 /* If the size contains a placeholder or the parent type (component type in
3550 the case of arrays) type involves a placeholder, this type does. */
3551 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3552 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3553 || (!POINTER_TYPE_P (type)
3554 && TREE_TYPE (type)
3555 && type_contains_placeholder_p (TREE_TYPE (type))))
3556 return true;
3557
3558 /* Now do type-specific checks. Note that the last part of the check above
3559 greatly limits what we have to do below. */
3560 switch (TREE_CODE (type))
3561 {
3562 case VOID_TYPE:
3563 case POINTER_BOUNDS_TYPE:
3564 case COMPLEX_TYPE:
3565 case ENUMERAL_TYPE:
3566 case BOOLEAN_TYPE:
3567 case POINTER_TYPE:
3568 case OFFSET_TYPE:
3569 case REFERENCE_TYPE:
3570 case METHOD_TYPE:
3571 case FUNCTION_TYPE:
3572 case VECTOR_TYPE:
3573 case NULLPTR_TYPE:
3574 return false;
3575
3576 case INTEGER_TYPE:
3577 case REAL_TYPE:
3578 case FIXED_POINT_TYPE:
3579 /* Here we just check the bounds. */
3580 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3581 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3582
3583 case ARRAY_TYPE:
3584 /* We have already checked the component type above, so just check the
3585 domain type. */
3586 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3587
3588 case RECORD_TYPE:
3589 case UNION_TYPE:
3590 case QUAL_UNION_TYPE:
3591 {
3592 tree field;
3593
3594 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3595 if (TREE_CODE (field) == FIELD_DECL
3596 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3597 || (TREE_CODE (type) == QUAL_UNION_TYPE
3598 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3599 || type_contains_placeholder_p (TREE_TYPE (field))))
3600 return true;
3601
3602 return false;
3603 }
3604
3605 default:
3606 gcc_unreachable ();
3607 }
3608 }
3609
3610 /* Wrapper around above function used to cache its result. */
3611
3612 bool
3613 type_contains_placeholder_p (tree type)
3614 {
3615 bool result;
3616
3617 /* If the contains_placeholder_bits field has been initialized,
3618 then we know the answer. */
3619 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3620 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3621
3622 /* Indicate that we've seen this type node, and the answer is false.
3623 This is what we want to return if we run into recursion via fields. */
3624 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3625
3626 /* Compute the real value. */
3627 result = type_contains_placeholder_1 (type);
3628
3629 /* Store the real value. */
3630 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3631
3632 return result;
3633 }
3634 \f
3635 /* Push tree EXP onto vector QUEUE if it is not already present. */
3636
3637 static void
3638 push_without_duplicates (tree exp, vec<tree> *queue)
3639 {
3640 unsigned int i;
3641 tree iter;
3642
3643 FOR_EACH_VEC_ELT (*queue, i, iter)
3644 if (simple_cst_equal (iter, exp) == 1)
3645 break;
3646
3647 if (!iter)
3648 queue->safe_push (exp);
3649 }
3650
3651 /* Given a tree EXP, find all occurrences of references to fields
3652 in a PLACEHOLDER_EXPR and place them in vector REFS without
3653 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3654 we assume here that EXP contains only arithmetic expressions
3655 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3656 argument list. */
3657
3658 void
3659 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3660 {
3661 enum tree_code code = TREE_CODE (exp);
3662 tree inner;
3663 int i;
3664
3665 /* We handle TREE_LIST and COMPONENT_REF separately. */
3666 if (code == TREE_LIST)
3667 {
3668 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3669 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3670 }
3671 else if (code == COMPONENT_REF)
3672 {
3673 for (inner = TREE_OPERAND (exp, 0);
3674 REFERENCE_CLASS_P (inner);
3675 inner = TREE_OPERAND (inner, 0))
3676 ;
3677
3678 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3679 push_without_duplicates (exp, refs);
3680 else
3681 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3682 }
3683 else
3684 switch (TREE_CODE_CLASS (code))
3685 {
3686 case tcc_constant:
3687 break;
3688
3689 case tcc_declaration:
3690 /* Variables allocated to static storage can stay. */
3691 if (!TREE_STATIC (exp))
3692 push_without_duplicates (exp, refs);
3693 break;
3694
3695 case tcc_expression:
3696 /* This is the pattern built in ada/make_aligning_type. */
3697 if (code == ADDR_EXPR
3698 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3699 {
3700 push_without_duplicates (exp, refs);
3701 break;
3702 }
3703
3704 /* Fall through... */
3705
3706 case tcc_exceptional:
3707 case tcc_unary:
3708 case tcc_binary:
3709 case tcc_comparison:
3710 case tcc_reference:
3711 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3712 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3713 break;
3714
3715 case tcc_vl_exp:
3716 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3717 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3718 break;
3719
3720 default:
3721 gcc_unreachable ();
3722 }
3723 }
3724
3725 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3726 return a tree with all occurrences of references to F in a
3727 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3728 CONST_DECLs. Note that we assume here that EXP contains only
3729 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3730 occurring only in their argument list. */
3731
3732 tree
3733 substitute_in_expr (tree exp, tree f, tree r)
3734 {
3735 enum tree_code code = TREE_CODE (exp);
3736 tree op0, op1, op2, op3;
3737 tree new_tree;
3738
3739 /* We handle TREE_LIST and COMPONENT_REF separately. */
3740 if (code == TREE_LIST)
3741 {
3742 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3743 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3744 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3745 return exp;
3746
3747 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3748 }
3749 else if (code == COMPONENT_REF)
3750 {
3751 tree inner;
3752
3753 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3754 and it is the right field, replace it with R. */
3755 for (inner = TREE_OPERAND (exp, 0);
3756 REFERENCE_CLASS_P (inner);
3757 inner = TREE_OPERAND (inner, 0))
3758 ;
3759
3760 /* The field. */
3761 op1 = TREE_OPERAND (exp, 1);
3762
3763 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3764 return r;
3765
3766 /* If this expression hasn't been completed let, leave it alone. */
3767 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3768 return exp;
3769
3770 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3771 if (op0 == TREE_OPERAND (exp, 0))
3772 return exp;
3773
3774 new_tree
3775 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3776 }
3777 else
3778 switch (TREE_CODE_CLASS (code))
3779 {
3780 case tcc_constant:
3781 return exp;
3782
3783 case tcc_declaration:
3784 if (exp == f)
3785 return r;
3786 else
3787 return exp;
3788
3789 case tcc_expression:
3790 if (exp == f)
3791 return r;
3792
3793 /* Fall through... */
3794
3795 case tcc_exceptional:
3796 case tcc_unary:
3797 case tcc_binary:
3798 case tcc_comparison:
3799 case tcc_reference:
3800 switch (TREE_CODE_LENGTH (code))
3801 {
3802 case 0:
3803 return exp;
3804
3805 case 1:
3806 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3807 if (op0 == TREE_OPERAND (exp, 0))
3808 return exp;
3809
3810 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3811 break;
3812
3813 case 2:
3814 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3815 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3816
3817 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3818 return exp;
3819
3820 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3821 break;
3822
3823 case 3:
3824 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3825 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3826 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3827
3828 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3829 && op2 == TREE_OPERAND (exp, 2))
3830 return exp;
3831
3832 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3833 break;
3834
3835 case 4:
3836 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3837 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3838 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3839 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3840
3841 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3842 && op2 == TREE_OPERAND (exp, 2)
3843 && op3 == TREE_OPERAND (exp, 3))
3844 return exp;
3845
3846 new_tree
3847 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3848 break;
3849
3850 default:
3851 gcc_unreachable ();
3852 }
3853 break;
3854
3855 case tcc_vl_exp:
3856 {
3857 int i;
3858
3859 new_tree = NULL_TREE;
3860
3861 /* If we are trying to replace F with a constant, inline back
3862 functions which do nothing else than computing a value from
3863 the arguments they are passed. This makes it possible to
3864 fold partially or entirely the replacement expression. */
3865 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3866 {
3867 tree t = maybe_inline_call_in_expr (exp);
3868 if (t)
3869 return SUBSTITUTE_IN_EXPR (t, f, r);
3870 }
3871
3872 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3873 {
3874 tree op = TREE_OPERAND (exp, i);
3875 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3876 if (new_op != op)
3877 {
3878 if (!new_tree)
3879 new_tree = copy_node (exp);
3880 TREE_OPERAND (new_tree, i) = new_op;
3881 }
3882 }
3883
3884 if (new_tree)
3885 {
3886 new_tree = fold (new_tree);
3887 if (TREE_CODE (new_tree) == CALL_EXPR)
3888 process_call_operands (new_tree);
3889 }
3890 else
3891 return exp;
3892 }
3893 break;
3894
3895 default:
3896 gcc_unreachable ();
3897 }
3898
3899 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3900
3901 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3902 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3903
3904 return new_tree;
3905 }
3906
3907 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3908 for it within OBJ, a tree that is an object or a chain of references. */
3909
3910 tree
3911 substitute_placeholder_in_expr (tree exp, tree obj)
3912 {
3913 enum tree_code code = TREE_CODE (exp);
3914 tree op0, op1, op2, op3;
3915 tree new_tree;
3916
3917 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3918 in the chain of OBJ. */
3919 if (code == PLACEHOLDER_EXPR)
3920 {
3921 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3922 tree elt;
3923
3924 for (elt = obj; elt != 0;
3925 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3926 || TREE_CODE (elt) == COND_EXPR)
3927 ? TREE_OPERAND (elt, 1)
3928 : (REFERENCE_CLASS_P (elt)
3929 || UNARY_CLASS_P (elt)
3930 || BINARY_CLASS_P (elt)
3931 || VL_EXP_CLASS_P (elt)
3932 || EXPRESSION_CLASS_P (elt))
3933 ? TREE_OPERAND (elt, 0) : 0))
3934 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3935 return elt;
3936
3937 for (elt = obj; elt != 0;
3938 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3939 || TREE_CODE (elt) == COND_EXPR)
3940 ? TREE_OPERAND (elt, 1)
3941 : (REFERENCE_CLASS_P (elt)
3942 || UNARY_CLASS_P (elt)
3943 || BINARY_CLASS_P (elt)
3944 || VL_EXP_CLASS_P (elt)
3945 || EXPRESSION_CLASS_P (elt))
3946 ? TREE_OPERAND (elt, 0) : 0))
3947 if (POINTER_TYPE_P (TREE_TYPE (elt))
3948 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3949 == need_type))
3950 return fold_build1 (INDIRECT_REF, need_type, elt);
3951
3952 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3953 survives until RTL generation, there will be an error. */
3954 return exp;
3955 }
3956
3957 /* TREE_LIST is special because we need to look at TREE_VALUE
3958 and TREE_CHAIN, not TREE_OPERANDS. */
3959 else if (code == TREE_LIST)
3960 {
3961 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3962 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3963 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3964 return exp;
3965
3966 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3967 }
3968 else
3969 switch (TREE_CODE_CLASS (code))
3970 {
3971 case tcc_constant:
3972 case tcc_declaration:
3973 return exp;
3974
3975 case tcc_exceptional:
3976 case tcc_unary:
3977 case tcc_binary:
3978 case tcc_comparison:
3979 case tcc_expression:
3980 case tcc_reference:
3981 case tcc_statement:
3982 switch (TREE_CODE_LENGTH (code))
3983 {
3984 case 0:
3985 return exp;
3986
3987 case 1:
3988 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3989 if (op0 == TREE_OPERAND (exp, 0))
3990 return exp;
3991
3992 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3993 break;
3994
3995 case 2:
3996 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3997 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3998
3999 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4000 return exp;
4001
4002 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4003 break;
4004
4005 case 3:
4006 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4007 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4008 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4009
4010 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4011 && op2 == TREE_OPERAND (exp, 2))
4012 return exp;
4013
4014 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4015 break;
4016
4017 case 4:
4018 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4019 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4020 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4021 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4022
4023 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4024 && op2 == TREE_OPERAND (exp, 2)
4025 && op3 == TREE_OPERAND (exp, 3))
4026 return exp;
4027
4028 new_tree
4029 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4030 break;
4031
4032 default:
4033 gcc_unreachable ();
4034 }
4035 break;
4036
4037 case tcc_vl_exp:
4038 {
4039 int i;
4040
4041 new_tree = NULL_TREE;
4042
4043 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4044 {
4045 tree op = TREE_OPERAND (exp, i);
4046 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4047 if (new_op != op)
4048 {
4049 if (!new_tree)
4050 new_tree = copy_node (exp);
4051 TREE_OPERAND (new_tree, i) = new_op;
4052 }
4053 }
4054
4055 if (new_tree)
4056 {
4057 new_tree = fold (new_tree);
4058 if (TREE_CODE (new_tree) == CALL_EXPR)
4059 process_call_operands (new_tree);
4060 }
4061 else
4062 return exp;
4063 }
4064 break;
4065
4066 default:
4067 gcc_unreachable ();
4068 }
4069
4070 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4071
4072 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4073 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4074
4075 return new_tree;
4076 }
4077 \f
4078
4079 /* Subroutine of stabilize_reference; this is called for subtrees of
4080 references. Any expression with side-effects must be put in a SAVE_EXPR
4081 to ensure that it is only evaluated once.
4082
4083 We don't put SAVE_EXPR nodes around everything, because assigning very
4084 simple expressions to temporaries causes us to miss good opportunities
4085 for optimizations. Among other things, the opportunity to fold in the
4086 addition of a constant into an addressing mode often gets lost, e.g.
4087 "y[i+1] += x;". In general, we take the approach that we should not make
4088 an assignment unless we are forced into it - i.e., that any non-side effect
4089 operator should be allowed, and that cse should take care of coalescing
4090 multiple utterances of the same expression should that prove fruitful. */
4091
4092 static tree
4093 stabilize_reference_1 (tree e)
4094 {
4095 tree result;
4096 enum tree_code code = TREE_CODE (e);
4097
4098 /* We cannot ignore const expressions because it might be a reference
4099 to a const array but whose index contains side-effects. But we can
4100 ignore things that are actual constant or that already have been
4101 handled by this function. */
4102
4103 if (tree_invariant_p (e))
4104 return e;
4105
4106 switch (TREE_CODE_CLASS (code))
4107 {
4108 case tcc_exceptional:
4109 case tcc_type:
4110 case tcc_declaration:
4111 case tcc_comparison:
4112 case tcc_statement:
4113 case tcc_expression:
4114 case tcc_reference:
4115 case tcc_vl_exp:
4116 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4117 so that it will only be evaluated once. */
4118 /* The reference (r) and comparison (<) classes could be handled as
4119 below, but it is generally faster to only evaluate them once. */
4120 if (TREE_SIDE_EFFECTS (e))
4121 return save_expr (e);
4122 return e;
4123
4124 case tcc_constant:
4125 /* Constants need no processing. In fact, we should never reach
4126 here. */
4127 return e;
4128
4129 case tcc_binary:
4130 /* Division is slow and tends to be compiled with jumps,
4131 especially the division by powers of 2 that is often
4132 found inside of an array reference. So do it just once. */
4133 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4134 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4135 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4136 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4137 return save_expr (e);
4138 /* Recursively stabilize each operand. */
4139 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4140 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4141 break;
4142
4143 case tcc_unary:
4144 /* Recursively stabilize each operand. */
4145 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4146 break;
4147
4148 default:
4149 gcc_unreachable ();
4150 }
4151
4152 TREE_TYPE (result) = TREE_TYPE (e);
4153 TREE_READONLY (result) = TREE_READONLY (e);
4154 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4155 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4156
4157 return result;
4158 }
4159
4160 /* Stabilize a reference so that we can use it any number of times
4161 without causing its operands to be evaluated more than once.
4162 Returns the stabilized reference. This works by means of save_expr,
4163 so see the caveats in the comments about save_expr.
4164
4165 Also allows conversion expressions whose operands are references.
4166 Any other kind of expression is returned unchanged. */
4167
4168 tree
4169 stabilize_reference (tree ref)
4170 {
4171 tree result;
4172 enum tree_code code = TREE_CODE (ref);
4173
4174 switch (code)
4175 {
4176 case VAR_DECL:
4177 case PARM_DECL:
4178 case RESULT_DECL:
4179 /* No action is needed in this case. */
4180 return ref;
4181
4182 CASE_CONVERT:
4183 case FLOAT_EXPR:
4184 case FIX_TRUNC_EXPR:
4185 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4186 break;
4187
4188 case INDIRECT_REF:
4189 result = build_nt (INDIRECT_REF,
4190 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4191 break;
4192
4193 case COMPONENT_REF:
4194 result = build_nt (COMPONENT_REF,
4195 stabilize_reference (TREE_OPERAND (ref, 0)),
4196 TREE_OPERAND (ref, 1), NULL_TREE);
4197 break;
4198
4199 case BIT_FIELD_REF:
4200 result = build_nt (BIT_FIELD_REF,
4201 stabilize_reference (TREE_OPERAND (ref, 0)),
4202 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4203 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4204 break;
4205
4206 case ARRAY_REF:
4207 result = build_nt (ARRAY_REF,
4208 stabilize_reference (TREE_OPERAND (ref, 0)),
4209 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4210 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4211 break;
4212
4213 case ARRAY_RANGE_REF:
4214 result = build_nt (ARRAY_RANGE_REF,
4215 stabilize_reference (TREE_OPERAND (ref, 0)),
4216 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4217 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4218 break;
4219
4220 case COMPOUND_EXPR:
4221 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4222 it wouldn't be ignored. This matters when dealing with
4223 volatiles. */
4224 return stabilize_reference_1 (ref);
4225
4226 /* If arg isn't a kind of lvalue we recognize, make no change.
4227 Caller should recognize the error for an invalid lvalue. */
4228 default:
4229 return ref;
4230
4231 case ERROR_MARK:
4232 return error_mark_node;
4233 }
4234
4235 TREE_TYPE (result) = TREE_TYPE (ref);
4236 TREE_READONLY (result) = TREE_READONLY (ref);
4237 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4238 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4239
4240 return result;
4241 }
4242 \f
4243 /* Low-level constructors for expressions. */
4244
4245 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4246 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4247
4248 void
4249 recompute_tree_invariant_for_addr_expr (tree t)
4250 {
4251 tree node;
4252 bool tc = true, se = false;
4253
4254 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4255
4256 /* We started out assuming this address is both invariant and constant, but
4257 does not have side effects. Now go down any handled components and see if
4258 any of them involve offsets that are either non-constant or non-invariant.
4259 Also check for side-effects.
4260
4261 ??? Note that this code makes no attempt to deal with the case where
4262 taking the address of something causes a copy due to misalignment. */
4263
4264 #define UPDATE_FLAGS(NODE) \
4265 do { tree _node = (NODE); \
4266 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4267 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4268
4269 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4270 node = TREE_OPERAND (node, 0))
4271 {
4272 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4273 array reference (probably made temporarily by the G++ front end),
4274 so ignore all the operands. */
4275 if ((TREE_CODE (node) == ARRAY_REF
4276 || TREE_CODE (node) == ARRAY_RANGE_REF)
4277 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4278 {
4279 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4280 if (TREE_OPERAND (node, 2))
4281 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4282 if (TREE_OPERAND (node, 3))
4283 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4284 }
4285 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4286 FIELD_DECL, apparently. The G++ front end can put something else
4287 there, at least temporarily. */
4288 else if (TREE_CODE (node) == COMPONENT_REF
4289 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4290 {
4291 if (TREE_OPERAND (node, 2))
4292 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4293 }
4294 }
4295
4296 node = lang_hooks.expr_to_decl (node, &tc, &se);
4297
4298 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4299 the address, since &(*a)->b is a form of addition. If it's a constant, the
4300 address is constant too. If it's a decl, its address is constant if the
4301 decl is static. Everything else is not constant and, furthermore,
4302 taking the address of a volatile variable is not volatile. */
4303 if (TREE_CODE (node) == INDIRECT_REF
4304 || TREE_CODE (node) == MEM_REF)
4305 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4306 else if (CONSTANT_CLASS_P (node))
4307 ;
4308 else if (DECL_P (node))
4309 tc &= (staticp (node) != NULL_TREE);
4310 else
4311 {
4312 tc = false;
4313 se |= TREE_SIDE_EFFECTS (node);
4314 }
4315
4316
4317 TREE_CONSTANT (t) = tc;
4318 TREE_SIDE_EFFECTS (t) = se;
4319 #undef UPDATE_FLAGS
4320 }
4321
4322 /* Build an expression of code CODE, data type TYPE, and operands as
4323 specified. Expressions and reference nodes can be created this way.
4324 Constants, decls, types and misc nodes cannot be.
4325
4326 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4327 enough for all extant tree codes. */
4328
4329 tree
4330 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4331 {
4332 tree t;
4333
4334 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4335
4336 t = make_node_stat (code PASS_MEM_STAT);
4337 TREE_TYPE (t) = tt;
4338
4339 return t;
4340 }
4341
4342 tree
4343 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4344 {
4345 int length = sizeof (struct tree_exp);
4346 tree t;
4347
4348 record_node_allocation_statistics (code, length);
4349
4350 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4351
4352 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4353
4354 memset (t, 0, sizeof (struct tree_common));
4355
4356 TREE_SET_CODE (t, code);
4357
4358 TREE_TYPE (t) = type;
4359 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4360 TREE_OPERAND (t, 0) = node;
4361 if (node && !TYPE_P (node))
4362 {
4363 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4364 TREE_READONLY (t) = TREE_READONLY (node);
4365 }
4366
4367 if (TREE_CODE_CLASS (code) == tcc_statement)
4368 TREE_SIDE_EFFECTS (t) = 1;
4369 else switch (code)
4370 {
4371 case VA_ARG_EXPR:
4372 /* All of these have side-effects, no matter what their
4373 operands are. */
4374 TREE_SIDE_EFFECTS (t) = 1;
4375 TREE_READONLY (t) = 0;
4376 break;
4377
4378 case INDIRECT_REF:
4379 /* Whether a dereference is readonly has nothing to do with whether
4380 its operand is readonly. */
4381 TREE_READONLY (t) = 0;
4382 break;
4383
4384 case ADDR_EXPR:
4385 if (node)
4386 recompute_tree_invariant_for_addr_expr (t);
4387 break;
4388
4389 default:
4390 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4391 && node && !TYPE_P (node)
4392 && TREE_CONSTANT (node))
4393 TREE_CONSTANT (t) = 1;
4394 if (TREE_CODE_CLASS (code) == tcc_reference
4395 && node && TREE_THIS_VOLATILE (node))
4396 TREE_THIS_VOLATILE (t) = 1;
4397 break;
4398 }
4399
4400 return t;
4401 }
4402
4403 #define PROCESS_ARG(N) \
4404 do { \
4405 TREE_OPERAND (t, N) = arg##N; \
4406 if (arg##N &&!TYPE_P (arg##N)) \
4407 { \
4408 if (TREE_SIDE_EFFECTS (arg##N)) \
4409 side_effects = 1; \
4410 if (!TREE_READONLY (arg##N) \
4411 && !CONSTANT_CLASS_P (arg##N)) \
4412 (void) (read_only = 0); \
4413 if (!TREE_CONSTANT (arg##N)) \
4414 (void) (constant = 0); \
4415 } \
4416 } while (0)
4417
4418 tree
4419 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4420 {
4421 bool constant, read_only, side_effects;
4422 tree t;
4423
4424 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4425
4426 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4427 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4428 /* When sizetype precision doesn't match that of pointers
4429 we need to be able to build explicit extensions or truncations
4430 of the offset argument. */
4431 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4432 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4433 && TREE_CODE (arg1) == INTEGER_CST);
4434
4435 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4436 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4437 && ptrofftype_p (TREE_TYPE (arg1)));
4438
4439 t = make_node_stat (code PASS_MEM_STAT);
4440 TREE_TYPE (t) = tt;
4441
4442 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4443 result based on those same flags for the arguments. But if the
4444 arguments aren't really even `tree' expressions, we shouldn't be trying
4445 to do this. */
4446
4447 /* Expressions without side effects may be constant if their
4448 arguments are as well. */
4449 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4450 || TREE_CODE_CLASS (code) == tcc_binary);
4451 read_only = 1;
4452 side_effects = TREE_SIDE_EFFECTS (t);
4453
4454 PROCESS_ARG (0);
4455 PROCESS_ARG (1);
4456
4457 TREE_SIDE_EFFECTS (t) = side_effects;
4458 if (code == MEM_REF)
4459 {
4460 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4461 {
4462 tree o = TREE_OPERAND (arg0, 0);
4463 TREE_READONLY (t) = TREE_READONLY (o);
4464 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4465 }
4466 }
4467 else
4468 {
4469 TREE_READONLY (t) = read_only;
4470 TREE_CONSTANT (t) = constant;
4471 TREE_THIS_VOLATILE (t)
4472 = (TREE_CODE_CLASS (code) == tcc_reference
4473 && arg0 && TREE_THIS_VOLATILE (arg0));
4474 }
4475
4476 return t;
4477 }
4478
4479
4480 tree
4481 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4482 tree arg2 MEM_STAT_DECL)
4483 {
4484 bool constant, read_only, side_effects;
4485 tree t;
4486
4487 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4488 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4489
4490 t = make_node_stat (code PASS_MEM_STAT);
4491 TREE_TYPE (t) = tt;
4492
4493 read_only = 1;
4494
4495 /* As a special exception, if COND_EXPR has NULL branches, we
4496 assume that it is a gimple statement and always consider
4497 it to have side effects. */
4498 if (code == COND_EXPR
4499 && tt == void_type_node
4500 && arg1 == NULL_TREE
4501 && arg2 == NULL_TREE)
4502 side_effects = true;
4503 else
4504 side_effects = TREE_SIDE_EFFECTS (t);
4505
4506 PROCESS_ARG (0);
4507 PROCESS_ARG (1);
4508 PROCESS_ARG (2);
4509
4510 if (code == COND_EXPR)
4511 TREE_READONLY (t) = read_only;
4512
4513 TREE_SIDE_EFFECTS (t) = side_effects;
4514 TREE_THIS_VOLATILE (t)
4515 = (TREE_CODE_CLASS (code) == tcc_reference
4516 && arg0 && TREE_THIS_VOLATILE (arg0));
4517
4518 return t;
4519 }
4520
4521 tree
4522 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4523 tree arg2, tree arg3 MEM_STAT_DECL)
4524 {
4525 bool constant, read_only, side_effects;
4526 tree t;
4527
4528 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4529
4530 t = make_node_stat (code PASS_MEM_STAT);
4531 TREE_TYPE (t) = tt;
4532
4533 side_effects = TREE_SIDE_EFFECTS (t);
4534
4535 PROCESS_ARG (0);
4536 PROCESS_ARG (1);
4537 PROCESS_ARG (2);
4538 PROCESS_ARG (3);
4539
4540 TREE_SIDE_EFFECTS (t) = side_effects;
4541 TREE_THIS_VOLATILE (t)
4542 = (TREE_CODE_CLASS (code) == tcc_reference
4543 && arg0 && TREE_THIS_VOLATILE (arg0));
4544
4545 return t;
4546 }
4547
4548 tree
4549 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4550 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4551 {
4552 bool constant, read_only, side_effects;
4553 tree t;
4554
4555 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4556
4557 t = make_node_stat (code PASS_MEM_STAT);
4558 TREE_TYPE (t) = tt;
4559
4560 side_effects = TREE_SIDE_EFFECTS (t);
4561
4562 PROCESS_ARG (0);
4563 PROCESS_ARG (1);
4564 PROCESS_ARG (2);
4565 PROCESS_ARG (3);
4566 PROCESS_ARG (4);
4567
4568 TREE_SIDE_EFFECTS (t) = side_effects;
4569 if (code == TARGET_MEM_REF)
4570 {
4571 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4572 {
4573 tree o = TREE_OPERAND (arg0, 0);
4574 TREE_READONLY (t) = TREE_READONLY (o);
4575 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4576 }
4577 }
4578 else
4579 TREE_THIS_VOLATILE (t)
4580 = (TREE_CODE_CLASS (code) == tcc_reference
4581 && arg0 && TREE_THIS_VOLATILE (arg0));
4582
4583 return t;
4584 }
4585
4586 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4587 on the pointer PTR. */
4588
4589 tree
4590 build_simple_mem_ref_loc (location_t loc, tree ptr)
4591 {
4592 HOST_WIDE_INT offset = 0;
4593 tree ptype = TREE_TYPE (ptr);
4594 tree tem;
4595 /* For convenience allow addresses that collapse to a simple base
4596 and offset. */
4597 if (TREE_CODE (ptr) == ADDR_EXPR
4598 && (handled_component_p (TREE_OPERAND (ptr, 0))
4599 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4600 {
4601 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4602 gcc_assert (ptr);
4603 ptr = build_fold_addr_expr (ptr);
4604 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4605 }
4606 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4607 ptr, build_int_cst (ptype, offset));
4608 SET_EXPR_LOCATION (tem, loc);
4609 return tem;
4610 }
4611
4612 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4613
4614 offset_int
4615 mem_ref_offset (const_tree t)
4616 {
4617 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4618 }
4619
4620 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4621 offsetted by OFFSET units. */
4622
4623 tree
4624 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4625 {
4626 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4627 build_fold_addr_expr (base),
4628 build_int_cst (ptr_type_node, offset));
4629 tree addr = build1 (ADDR_EXPR, type, ref);
4630 recompute_tree_invariant_for_addr_expr (addr);
4631 return addr;
4632 }
4633
4634 /* Similar except don't specify the TREE_TYPE
4635 and leave the TREE_SIDE_EFFECTS as 0.
4636 It is permissible for arguments to be null,
4637 or even garbage if their values do not matter. */
4638
4639 tree
4640 build_nt (enum tree_code code, ...)
4641 {
4642 tree t;
4643 int length;
4644 int i;
4645 va_list p;
4646
4647 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4648
4649 va_start (p, code);
4650
4651 t = make_node (code);
4652 length = TREE_CODE_LENGTH (code);
4653
4654 for (i = 0; i < length; i++)
4655 TREE_OPERAND (t, i) = va_arg (p, tree);
4656
4657 va_end (p);
4658 return t;
4659 }
4660
4661 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4662 tree vec. */
4663
4664 tree
4665 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4666 {
4667 tree ret, t;
4668 unsigned int ix;
4669
4670 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4671 CALL_EXPR_FN (ret) = fn;
4672 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4673 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4674 CALL_EXPR_ARG (ret, ix) = t;
4675 return ret;
4676 }
4677 \f
4678 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4679 We do NOT enter this node in any sort of symbol table.
4680
4681 LOC is the location of the decl.
4682
4683 layout_decl is used to set up the decl's storage layout.
4684 Other slots are initialized to 0 or null pointers. */
4685
4686 tree
4687 build_decl_stat (location_t loc, enum tree_code code, tree name,
4688 tree type MEM_STAT_DECL)
4689 {
4690 tree t;
4691
4692 t = make_node_stat (code PASS_MEM_STAT);
4693 DECL_SOURCE_LOCATION (t) = loc;
4694
4695 /* if (type == error_mark_node)
4696 type = integer_type_node; */
4697 /* That is not done, deliberately, so that having error_mark_node
4698 as the type can suppress useless errors in the use of this variable. */
4699
4700 DECL_NAME (t) = name;
4701 TREE_TYPE (t) = type;
4702
4703 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4704 layout_decl (t, 0);
4705
4706 return t;
4707 }
4708
4709 /* Builds and returns function declaration with NAME and TYPE. */
4710
4711 tree
4712 build_fn_decl (const char *name, tree type)
4713 {
4714 tree id = get_identifier (name);
4715 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4716
4717 DECL_EXTERNAL (decl) = 1;
4718 TREE_PUBLIC (decl) = 1;
4719 DECL_ARTIFICIAL (decl) = 1;
4720 TREE_NOTHROW (decl) = 1;
4721
4722 return decl;
4723 }
4724
4725 vec<tree, va_gc> *all_translation_units;
4726
4727 /* Builds a new translation-unit decl with name NAME, queues it in the
4728 global list of translation-unit decls and returns it. */
4729
4730 tree
4731 build_translation_unit_decl (tree name)
4732 {
4733 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4734 name, NULL_TREE);
4735 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4736 vec_safe_push (all_translation_units, tu);
4737 return tu;
4738 }
4739
4740 \f
4741 /* BLOCK nodes are used to represent the structure of binding contours
4742 and declarations, once those contours have been exited and their contents
4743 compiled. This information is used for outputting debugging info. */
4744
4745 tree
4746 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4747 {
4748 tree block = make_node (BLOCK);
4749
4750 BLOCK_VARS (block) = vars;
4751 BLOCK_SUBBLOCKS (block) = subblocks;
4752 BLOCK_SUPERCONTEXT (block) = supercontext;
4753 BLOCK_CHAIN (block) = chain;
4754 return block;
4755 }
4756
4757 \f
4758 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4759
4760 LOC is the location to use in tree T. */
4761
4762 void
4763 protected_set_expr_location (tree t, location_t loc)
4764 {
4765 if (CAN_HAVE_LOCATION_P (t))
4766 SET_EXPR_LOCATION (t, loc);
4767 }
4768 \f
4769 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4770 is ATTRIBUTE. */
4771
4772 tree
4773 build_decl_attribute_variant (tree ddecl, tree attribute)
4774 {
4775 DECL_ATTRIBUTES (ddecl) = attribute;
4776 return ddecl;
4777 }
4778
4779 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4780 is ATTRIBUTE and its qualifiers are QUALS.
4781
4782 Record such modified types already made so we don't make duplicates. */
4783
4784 tree
4785 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4786 {
4787 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4788 {
4789 inchash::hash hstate;
4790 tree ntype;
4791 int i;
4792 tree t;
4793 enum tree_code code = TREE_CODE (ttype);
4794
4795 /* Building a distinct copy of a tagged type is inappropriate; it
4796 causes breakage in code that expects there to be a one-to-one
4797 relationship between a struct and its fields.
4798 build_duplicate_type is another solution (as used in
4799 handle_transparent_union_attribute), but that doesn't play well
4800 with the stronger C++ type identity model. */
4801 if (TREE_CODE (ttype) == RECORD_TYPE
4802 || TREE_CODE (ttype) == UNION_TYPE
4803 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4804 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4805 {
4806 warning (OPT_Wattributes,
4807 "ignoring attributes applied to %qT after definition",
4808 TYPE_MAIN_VARIANT (ttype));
4809 return build_qualified_type (ttype, quals);
4810 }
4811
4812 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4813 ntype = build_distinct_type_copy (ttype);
4814
4815 TYPE_ATTRIBUTES (ntype) = attribute;
4816
4817 hstate.add_int (code);
4818 if (TREE_TYPE (ntype))
4819 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4820 attribute_hash_list (attribute, hstate);
4821
4822 switch (TREE_CODE (ntype))
4823 {
4824 case FUNCTION_TYPE:
4825 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4826 break;
4827 case ARRAY_TYPE:
4828 if (TYPE_DOMAIN (ntype))
4829 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4830 break;
4831 case INTEGER_TYPE:
4832 t = TYPE_MAX_VALUE (ntype);
4833 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4834 hstate.add_object (TREE_INT_CST_ELT (t, i));
4835 break;
4836 case REAL_TYPE:
4837 case FIXED_POINT_TYPE:
4838 {
4839 unsigned int precision = TYPE_PRECISION (ntype);
4840 hstate.add_object (precision);
4841 }
4842 break;
4843 default:
4844 break;
4845 }
4846
4847 ntype = type_hash_canon (hstate.end(), ntype);
4848
4849 /* If the target-dependent attributes make NTYPE different from
4850 its canonical type, we will need to use structural equality
4851 checks for this type. */
4852 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4853 || !comp_type_attributes (ntype, ttype))
4854 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4855 else if (TYPE_CANONICAL (ntype) == ntype)
4856 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4857
4858 ttype = build_qualified_type (ntype, quals);
4859 }
4860 else if (TYPE_QUALS (ttype) != quals)
4861 ttype = build_qualified_type (ttype, quals);
4862
4863 return ttype;
4864 }
4865
4866 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4867 the same. */
4868
4869 static bool
4870 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4871 {
4872 tree cl1, cl2;
4873 for (cl1 = clauses1, cl2 = clauses2;
4874 cl1 && cl2;
4875 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4876 {
4877 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4878 return false;
4879 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4880 {
4881 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4882 OMP_CLAUSE_DECL (cl2)) != 1)
4883 return false;
4884 }
4885 switch (OMP_CLAUSE_CODE (cl1))
4886 {
4887 case OMP_CLAUSE_ALIGNED:
4888 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4889 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4890 return false;
4891 break;
4892 case OMP_CLAUSE_LINEAR:
4893 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4894 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4895 return false;
4896 break;
4897 case OMP_CLAUSE_SIMDLEN:
4898 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4899 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4900 return false;
4901 default:
4902 break;
4903 }
4904 }
4905 return true;
4906 }
4907
4908 /* Compare two constructor-element-type constants. Return 1 if the lists
4909 are known to be equal; otherwise return 0. */
4910
4911 static bool
4912 simple_cst_list_equal (const_tree l1, const_tree l2)
4913 {
4914 while (l1 != NULL_TREE && l2 != NULL_TREE)
4915 {
4916 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4917 return false;
4918
4919 l1 = TREE_CHAIN (l1);
4920 l2 = TREE_CHAIN (l2);
4921 }
4922
4923 return l1 == l2;
4924 }
4925
4926 /* Compare two identifier nodes representing attributes. Either one may
4927 be in wrapped __ATTR__ form. Return true if they are the same, false
4928 otherwise. */
4929
4930 static bool
4931 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4932 {
4933 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4934 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4935 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4936
4937 /* Identifiers can be compared directly for equality. */
4938 if (attr1 == attr2)
4939 return true;
4940
4941 /* If they are not equal, they may still be one in the form
4942 'text' while the other one is in the form '__text__'. TODO:
4943 If we were storing attributes in normalized 'text' form, then
4944 this could all go away and we could take full advantage of
4945 the fact that we're comparing identifiers. :-) */
4946 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4947 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4948
4949 if (attr2_len == attr1_len + 4)
4950 {
4951 const char *p = IDENTIFIER_POINTER (attr2);
4952 const char *q = IDENTIFIER_POINTER (attr1);
4953 if (p[0] == '_' && p[1] == '_'
4954 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4955 && strncmp (q, p + 2, attr1_len) == 0)
4956 return true;;
4957 }
4958 else if (attr2_len + 4 == attr1_len)
4959 {
4960 const char *p = IDENTIFIER_POINTER (attr2);
4961 const char *q = IDENTIFIER_POINTER (attr1);
4962 if (q[0] == '_' && q[1] == '_'
4963 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4964 && strncmp (q + 2, p, attr2_len) == 0)
4965 return true;
4966 }
4967
4968 return false;
4969 }
4970
4971 /* Compare two attributes for their value identity. Return true if the
4972 attribute values are known to be equal; otherwise return false. */
4973
4974 bool
4975 attribute_value_equal (const_tree attr1, const_tree attr2)
4976 {
4977 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4978 return true;
4979
4980 if (TREE_VALUE (attr1) != NULL_TREE
4981 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4982 && TREE_VALUE (attr2) != NULL_TREE
4983 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4984 {
4985 /* Handle attribute format. */
4986 if (is_attribute_p ("format", TREE_PURPOSE (attr1)))
4987 {
4988 attr1 = TREE_VALUE (attr1);
4989 attr2 = TREE_VALUE (attr2);
4990 /* Compare the archetypes (printf/scanf/strftime/...). */
4991 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
4992 TREE_VALUE (attr2)))
4993 return false;
4994 /* Archetypes are the same. Compare the rest. */
4995 return (simple_cst_list_equal (TREE_CHAIN (attr1),
4996 TREE_CHAIN (attr2)) == 1);
4997 }
4998 return (simple_cst_list_equal (TREE_VALUE (attr1),
4999 TREE_VALUE (attr2)) == 1);
5000 }
5001
5002 if ((flag_openmp || flag_openmp_simd)
5003 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
5004 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
5005 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
5006 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
5007 TREE_VALUE (attr2));
5008
5009 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
5010 }
5011
5012 /* Return 0 if the attributes for two types are incompatible, 1 if they
5013 are compatible, and 2 if they are nearly compatible (which causes a
5014 warning to be generated). */
5015 int
5016 comp_type_attributes (const_tree type1, const_tree type2)
5017 {
5018 const_tree a1 = TYPE_ATTRIBUTES (type1);
5019 const_tree a2 = TYPE_ATTRIBUTES (type2);
5020 const_tree a;
5021
5022 if (a1 == a2)
5023 return 1;
5024 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
5025 {
5026 const struct attribute_spec *as;
5027 const_tree attr;
5028
5029 as = lookup_attribute_spec (get_attribute_name (a));
5030 if (!as || as->affects_type_identity == false)
5031 continue;
5032
5033 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
5034 if (!attr || !attribute_value_equal (a, attr))
5035 break;
5036 }
5037 if (!a)
5038 {
5039 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
5040 {
5041 const struct attribute_spec *as;
5042
5043 as = lookup_attribute_spec (get_attribute_name (a));
5044 if (!as || as->affects_type_identity == false)
5045 continue;
5046
5047 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5048 break;
5049 /* We don't need to compare trees again, as we did this
5050 already in first loop. */
5051 }
5052 /* All types - affecting identity - are equal, so
5053 there is no need to call target hook for comparison. */
5054 if (!a)
5055 return 1;
5056 }
5057 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
5058 return 0;
5059 /* As some type combinations - like default calling-convention - might
5060 be compatible, we have to call the target hook to get the final result. */
5061 return targetm.comp_type_attributes (type1, type2);
5062 }
5063
5064 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5065 is ATTRIBUTE.
5066
5067 Record such modified types already made so we don't make duplicates. */
5068
5069 tree
5070 build_type_attribute_variant (tree ttype, tree attribute)
5071 {
5072 return build_type_attribute_qual_variant (ttype, attribute,
5073 TYPE_QUALS (ttype));
5074 }
5075
5076
5077 /* Reset the expression *EXPR_P, a size or position.
5078
5079 ??? We could reset all non-constant sizes or positions. But it's cheap
5080 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5081
5082 We need to reset self-referential sizes or positions because they cannot
5083 be gimplified and thus can contain a CALL_EXPR after the gimplification
5084 is finished, which will run afoul of LTO streaming. And they need to be
5085 reset to something essentially dummy but not constant, so as to preserve
5086 the properties of the object they are attached to. */
5087
5088 static inline void
5089 free_lang_data_in_one_sizepos (tree *expr_p)
5090 {
5091 tree expr = *expr_p;
5092 if (CONTAINS_PLACEHOLDER_P (expr))
5093 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5094 }
5095
5096
5097 /* Reset all the fields in a binfo node BINFO. We only keep
5098 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5099
5100 static void
5101 free_lang_data_in_binfo (tree binfo)
5102 {
5103 unsigned i;
5104 tree t;
5105
5106 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5107
5108 BINFO_VIRTUALS (binfo) = NULL_TREE;
5109 BINFO_BASE_ACCESSES (binfo) = NULL;
5110 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5111 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5112
5113 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5114 free_lang_data_in_binfo (t);
5115 }
5116
5117
5118 /* Reset all language specific information still present in TYPE. */
5119
5120 static void
5121 free_lang_data_in_type (tree type)
5122 {
5123 gcc_assert (TYPE_P (type));
5124
5125 /* Give the FE a chance to remove its own data first. */
5126 lang_hooks.free_lang_data (type);
5127
5128 TREE_LANG_FLAG_0 (type) = 0;
5129 TREE_LANG_FLAG_1 (type) = 0;
5130 TREE_LANG_FLAG_2 (type) = 0;
5131 TREE_LANG_FLAG_3 (type) = 0;
5132 TREE_LANG_FLAG_4 (type) = 0;
5133 TREE_LANG_FLAG_5 (type) = 0;
5134 TREE_LANG_FLAG_6 (type) = 0;
5135
5136 if (TREE_CODE (type) == FUNCTION_TYPE)
5137 {
5138 /* Remove the const and volatile qualifiers from arguments. The
5139 C++ front end removes them, but the C front end does not,
5140 leading to false ODR violation errors when merging two
5141 instances of the same function signature compiled by
5142 different front ends. */
5143 tree p;
5144
5145 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5146 {
5147 tree arg_type = TREE_VALUE (p);
5148
5149 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5150 {
5151 int quals = TYPE_QUALS (arg_type)
5152 & ~TYPE_QUAL_CONST
5153 & ~TYPE_QUAL_VOLATILE;
5154 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5155 free_lang_data_in_type (TREE_VALUE (p));
5156 }
5157 /* C++ FE uses TREE_PURPOSE to store initial values. */
5158 TREE_PURPOSE (p) = NULL;
5159 }
5160 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5161 TYPE_MINVAL (type) = NULL;
5162 }
5163 if (TREE_CODE (type) == METHOD_TYPE)
5164 {
5165 tree p;
5166
5167 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5168 {
5169 /* C++ FE uses TREE_PURPOSE to store initial values. */
5170 TREE_PURPOSE (p) = NULL;
5171 }
5172 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5173 TYPE_MINVAL (type) = NULL;
5174 }
5175
5176 /* Remove members that are not actually FIELD_DECLs from the field
5177 list of an aggregate. These occur in C++. */
5178 if (RECORD_OR_UNION_TYPE_P (type))
5179 {
5180 tree prev, member;
5181
5182 /* Note that TYPE_FIELDS can be shared across distinct
5183 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5184 to be removed, we cannot set its TREE_CHAIN to NULL.
5185 Otherwise, we would not be able to find all the other fields
5186 in the other instances of this TREE_TYPE.
5187
5188 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5189 prev = NULL_TREE;
5190 member = TYPE_FIELDS (type);
5191 while (member)
5192 {
5193 if (TREE_CODE (member) == FIELD_DECL
5194 || TREE_CODE (member) == TYPE_DECL)
5195 {
5196 if (prev)
5197 TREE_CHAIN (prev) = member;
5198 else
5199 TYPE_FIELDS (type) = member;
5200 prev = member;
5201 }
5202
5203 member = TREE_CHAIN (member);
5204 }
5205
5206 if (prev)
5207 TREE_CHAIN (prev) = NULL_TREE;
5208 else
5209 TYPE_FIELDS (type) = NULL_TREE;
5210
5211 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5212 and danagle the pointer from time to time. */
5213 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5214 TYPE_VFIELD (type) = NULL_TREE;
5215
5216 /* Remove TYPE_METHODS list. While it would be nice to keep it
5217 to enable ODR warnings about different method lists, doing so
5218 seems to impractically increase size of LTO data streamed.
5219 Keep the infrmation if TYPE_METHODS was non-NULL. This is used
5220 by function.c and pretty printers. */
5221 if (TYPE_METHODS (type))
5222 TYPE_METHODS (type) = error_mark_node;
5223 if (TYPE_BINFO (type))
5224 {
5225 free_lang_data_in_binfo (TYPE_BINFO (type));
5226 /* We need to preserve link to bases and virtual table for all
5227 polymorphic types to make devirtualization machinery working.
5228 Debug output cares only about bases, but output also
5229 virtual table pointers so merging of -fdevirtualize and
5230 -fno-devirtualize units is easier. */
5231 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5232 || !flag_devirtualize)
5233 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5234 && !BINFO_VTABLE (TYPE_BINFO (type)))
5235 || debug_info_level != DINFO_LEVEL_NONE))
5236 TYPE_BINFO (type) = NULL;
5237 }
5238 }
5239 else
5240 {
5241 /* For non-aggregate types, clear out the language slot (which
5242 overloads TYPE_BINFO). */
5243 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5244
5245 if (INTEGRAL_TYPE_P (type)
5246 || SCALAR_FLOAT_TYPE_P (type)
5247 || FIXED_POINT_TYPE_P (type))
5248 {
5249 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5250 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5251 }
5252 }
5253
5254 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5255 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5256
5257 if (TYPE_CONTEXT (type)
5258 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5259 {
5260 tree ctx = TYPE_CONTEXT (type);
5261 do
5262 {
5263 ctx = BLOCK_SUPERCONTEXT (ctx);
5264 }
5265 while (ctx && TREE_CODE (ctx) == BLOCK);
5266 TYPE_CONTEXT (type) = ctx;
5267 }
5268 }
5269
5270
5271 /* Return true if DECL may need an assembler name to be set. */
5272
5273 static inline bool
5274 need_assembler_name_p (tree decl)
5275 {
5276 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5277 Rule merging. This makes type_odr_p to return true on those types during
5278 LTO and by comparing the mangled name, we can say what types are intended
5279 to be equivalent across compilation unit.
5280
5281 We do not store names of type_in_anonymous_namespace_p.
5282
5283 Record, union and enumeration type have linkage that allows use
5284 to check type_in_anonymous_namespace_p. We do not mangle compound types
5285 that always can be compared structurally.
5286
5287 Similarly for builtin types, we compare properties of their main variant.
5288 A special case are integer types where mangling do make differences
5289 between char/signed char/unsigned char etc. Storing name for these makes
5290 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5291 See cp/mangle.c:write_builtin_type for details. */
5292
5293 if (flag_lto_odr_type_mering
5294 && TREE_CODE (decl) == TYPE_DECL
5295 && DECL_NAME (decl)
5296 && decl == TYPE_NAME (TREE_TYPE (decl))
5297 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5298 && (type_with_linkage_p (TREE_TYPE (decl))
5299 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5300 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5301 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5302 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5303 if (TREE_CODE (decl) != FUNCTION_DECL
5304 && TREE_CODE (decl) != VAR_DECL)
5305 return false;
5306
5307 /* If DECL already has its assembler name set, it does not need a
5308 new one. */
5309 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5310 || DECL_ASSEMBLER_NAME_SET_P (decl))
5311 return false;
5312
5313 /* Abstract decls do not need an assembler name. */
5314 if (DECL_ABSTRACT_P (decl))
5315 return false;
5316
5317 /* For VAR_DECLs, only static, public and external symbols need an
5318 assembler name. */
5319 if (TREE_CODE (decl) == VAR_DECL
5320 && !TREE_STATIC (decl)
5321 && !TREE_PUBLIC (decl)
5322 && !DECL_EXTERNAL (decl))
5323 return false;
5324
5325 if (TREE_CODE (decl) == FUNCTION_DECL)
5326 {
5327 /* Do not set assembler name on builtins. Allow RTL expansion to
5328 decide whether to expand inline or via a regular call. */
5329 if (DECL_BUILT_IN (decl)
5330 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5331 return false;
5332
5333 /* Functions represented in the callgraph need an assembler name. */
5334 if (cgraph_node::get (decl) != NULL)
5335 return true;
5336
5337 /* Unused and not public functions don't need an assembler name. */
5338 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5339 return false;
5340 }
5341
5342 return true;
5343 }
5344
5345
5346 /* Reset all language specific information still present in symbol
5347 DECL. */
5348
5349 static void
5350 free_lang_data_in_decl (tree decl)
5351 {
5352 gcc_assert (DECL_P (decl));
5353
5354 /* Give the FE a chance to remove its own data first. */
5355 lang_hooks.free_lang_data (decl);
5356
5357 TREE_LANG_FLAG_0 (decl) = 0;
5358 TREE_LANG_FLAG_1 (decl) = 0;
5359 TREE_LANG_FLAG_2 (decl) = 0;
5360 TREE_LANG_FLAG_3 (decl) = 0;
5361 TREE_LANG_FLAG_4 (decl) = 0;
5362 TREE_LANG_FLAG_5 (decl) = 0;
5363 TREE_LANG_FLAG_6 (decl) = 0;
5364
5365 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5366 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5367 if (TREE_CODE (decl) == FIELD_DECL)
5368 {
5369 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5370 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5371 DECL_QUALIFIER (decl) = NULL_TREE;
5372 }
5373
5374 if (TREE_CODE (decl) == FUNCTION_DECL)
5375 {
5376 struct cgraph_node *node;
5377 if (!(node = cgraph_node::get (decl))
5378 || (!node->definition && !node->clones))
5379 {
5380 if (node)
5381 node->release_body ();
5382 else
5383 {
5384 release_function_body (decl);
5385 DECL_ARGUMENTS (decl) = NULL;
5386 DECL_RESULT (decl) = NULL;
5387 DECL_INITIAL (decl) = error_mark_node;
5388 }
5389 }
5390 if (gimple_has_body_p (decl))
5391 {
5392 tree t;
5393
5394 /* If DECL has a gimple body, then the context for its
5395 arguments must be DECL. Otherwise, it doesn't really
5396 matter, as we will not be emitting any code for DECL. In
5397 general, there may be other instances of DECL created by
5398 the front end and since PARM_DECLs are generally shared,
5399 their DECL_CONTEXT changes as the replicas of DECL are
5400 created. The only time where DECL_CONTEXT is important
5401 is for the FUNCTION_DECLs that have a gimple body (since
5402 the PARM_DECL will be used in the function's body). */
5403 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5404 DECL_CONTEXT (t) = decl;
5405 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5406 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5407 = target_option_default_node;
5408 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5409 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5410 = optimization_default_node;
5411 }
5412
5413 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5414 At this point, it is not needed anymore. */
5415 DECL_SAVED_TREE (decl) = NULL_TREE;
5416
5417 /* Clear the abstract origin if it refers to a method. Otherwise
5418 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5419 origin will not be output correctly. */
5420 if (DECL_ABSTRACT_ORIGIN (decl)
5421 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5422 && RECORD_OR_UNION_TYPE_P
5423 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5424 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5425
5426 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5427 DECL_VINDEX referring to itself into a vtable slot number as it
5428 should. Happens with functions that are copied and then forgotten
5429 about. Just clear it, it won't matter anymore. */
5430 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5431 DECL_VINDEX (decl) = NULL_TREE;
5432 }
5433 else if (TREE_CODE (decl) == VAR_DECL)
5434 {
5435 if ((DECL_EXTERNAL (decl)
5436 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5437 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5438 DECL_INITIAL (decl) = NULL_TREE;
5439 }
5440 else if (TREE_CODE (decl) == TYPE_DECL
5441 || TREE_CODE (decl) == FIELD_DECL)
5442 DECL_INITIAL (decl) = NULL_TREE;
5443 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5444 && DECL_INITIAL (decl)
5445 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5446 {
5447 /* Strip builtins from the translation-unit BLOCK. We still have targets
5448 without builtin_decl_explicit support and also builtins are shared
5449 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5450 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5451 while (*nextp)
5452 {
5453 tree var = *nextp;
5454 if (TREE_CODE (var) == FUNCTION_DECL
5455 && DECL_BUILT_IN (var))
5456 *nextp = TREE_CHAIN (var);
5457 else
5458 nextp = &TREE_CHAIN (var);
5459 }
5460 }
5461 }
5462
5463
5464 /* Data used when collecting DECLs and TYPEs for language data removal. */
5465
5466 struct free_lang_data_d
5467 {
5468 /* Worklist to avoid excessive recursion. */
5469 vec<tree> worklist;
5470
5471 /* Set of traversed objects. Used to avoid duplicate visits. */
5472 hash_set<tree> *pset;
5473
5474 /* Array of symbols to process with free_lang_data_in_decl. */
5475 vec<tree> decls;
5476
5477 /* Array of types to process with free_lang_data_in_type. */
5478 vec<tree> types;
5479 };
5480
5481
5482 /* Save all language fields needed to generate proper debug information
5483 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5484
5485 static void
5486 save_debug_info_for_decl (tree t)
5487 {
5488 /*struct saved_debug_info_d *sdi;*/
5489
5490 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5491
5492 /* FIXME. Partial implementation for saving debug info removed. */
5493 }
5494
5495
5496 /* Save all language fields needed to generate proper debug information
5497 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5498
5499 static void
5500 save_debug_info_for_type (tree t)
5501 {
5502 /*struct saved_debug_info_d *sdi;*/
5503
5504 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5505
5506 /* FIXME. Partial implementation for saving debug info removed. */
5507 }
5508
5509
5510 /* Add type or decl T to one of the list of tree nodes that need their
5511 language data removed. The lists are held inside FLD. */
5512
5513 static void
5514 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5515 {
5516 if (DECL_P (t))
5517 {
5518 fld->decls.safe_push (t);
5519 if (debug_info_level > DINFO_LEVEL_TERSE)
5520 save_debug_info_for_decl (t);
5521 }
5522 else if (TYPE_P (t))
5523 {
5524 fld->types.safe_push (t);
5525 if (debug_info_level > DINFO_LEVEL_TERSE)
5526 save_debug_info_for_type (t);
5527 }
5528 else
5529 gcc_unreachable ();
5530 }
5531
5532 /* Push tree node T into FLD->WORKLIST. */
5533
5534 static inline void
5535 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5536 {
5537 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5538 fld->worklist.safe_push ((t));
5539 }
5540
5541
5542 /* Operand callback helper for free_lang_data_in_node. *TP is the
5543 subtree operand being considered. */
5544
5545 static tree
5546 find_decls_types_r (tree *tp, int *ws, void *data)
5547 {
5548 tree t = *tp;
5549 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5550
5551 if (TREE_CODE (t) == TREE_LIST)
5552 return NULL_TREE;
5553
5554 /* Language specific nodes will be removed, so there is no need
5555 to gather anything under them. */
5556 if (is_lang_specific (t))
5557 {
5558 *ws = 0;
5559 return NULL_TREE;
5560 }
5561
5562 if (DECL_P (t))
5563 {
5564 /* Note that walk_tree does not traverse every possible field in
5565 decls, so we have to do our own traversals here. */
5566 add_tree_to_fld_list (t, fld);
5567
5568 fld_worklist_push (DECL_NAME (t), fld);
5569 fld_worklist_push (DECL_CONTEXT (t), fld);
5570 fld_worklist_push (DECL_SIZE (t), fld);
5571 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5572
5573 /* We are going to remove everything under DECL_INITIAL for
5574 TYPE_DECLs. No point walking them. */
5575 if (TREE_CODE (t) != TYPE_DECL)
5576 fld_worklist_push (DECL_INITIAL (t), fld);
5577
5578 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5579 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5580
5581 if (TREE_CODE (t) == FUNCTION_DECL)
5582 {
5583 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5584 fld_worklist_push (DECL_RESULT (t), fld);
5585 }
5586 else if (TREE_CODE (t) == TYPE_DECL)
5587 {
5588 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5589 }
5590 else if (TREE_CODE (t) == FIELD_DECL)
5591 {
5592 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5593 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5594 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5595 fld_worklist_push (DECL_FCONTEXT (t), fld);
5596 }
5597
5598 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5599 && DECL_HAS_VALUE_EXPR_P (t))
5600 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5601
5602 if (TREE_CODE (t) != FIELD_DECL
5603 && TREE_CODE (t) != TYPE_DECL)
5604 fld_worklist_push (TREE_CHAIN (t), fld);
5605 *ws = 0;
5606 }
5607 else if (TYPE_P (t))
5608 {
5609 /* Note that walk_tree does not traverse every possible field in
5610 types, so we have to do our own traversals here. */
5611 add_tree_to_fld_list (t, fld);
5612
5613 if (!RECORD_OR_UNION_TYPE_P (t))
5614 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5615 fld_worklist_push (TYPE_SIZE (t), fld);
5616 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5617 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5618 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5619 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5620 fld_worklist_push (TYPE_NAME (t), fld);
5621 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5622 them and thus do not and want not to reach unused pointer types
5623 this way. */
5624 if (!POINTER_TYPE_P (t))
5625 fld_worklist_push (TYPE_MINVAL (t), fld);
5626 if (!RECORD_OR_UNION_TYPE_P (t))
5627 fld_worklist_push (TYPE_MAXVAL (t), fld);
5628 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5629 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5630 do not and want not to reach unused variants this way. */
5631 if (TYPE_CONTEXT (t))
5632 {
5633 tree ctx = TYPE_CONTEXT (t);
5634 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5635 So push that instead. */
5636 while (ctx && TREE_CODE (ctx) == BLOCK)
5637 ctx = BLOCK_SUPERCONTEXT (ctx);
5638 fld_worklist_push (ctx, fld);
5639 }
5640 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5641 and want not to reach unused types this way. */
5642
5643 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5644 {
5645 unsigned i;
5646 tree tem;
5647 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5648 fld_worklist_push (TREE_TYPE (tem), fld);
5649 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5650 if (tem
5651 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5652 && TREE_CODE (tem) == TREE_LIST)
5653 do
5654 {
5655 fld_worklist_push (TREE_VALUE (tem), fld);
5656 tem = TREE_CHAIN (tem);
5657 }
5658 while (tem);
5659 }
5660 if (RECORD_OR_UNION_TYPE_P (t))
5661 {
5662 tree tem;
5663 /* Push all TYPE_FIELDS - there can be interleaving interesting
5664 and non-interesting things. */
5665 tem = TYPE_FIELDS (t);
5666 while (tem)
5667 {
5668 if (TREE_CODE (tem) == FIELD_DECL
5669 || TREE_CODE (tem) == TYPE_DECL)
5670 fld_worklist_push (tem, fld);
5671 tem = TREE_CHAIN (tem);
5672 }
5673 }
5674
5675 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5676 *ws = 0;
5677 }
5678 else if (TREE_CODE (t) == BLOCK)
5679 {
5680 tree tem;
5681 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5682 fld_worklist_push (tem, fld);
5683 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5684 fld_worklist_push (tem, fld);
5685 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5686 }
5687
5688 if (TREE_CODE (t) != IDENTIFIER_NODE
5689 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5690 fld_worklist_push (TREE_TYPE (t), fld);
5691
5692 return NULL_TREE;
5693 }
5694
5695
5696 /* Find decls and types in T. */
5697
5698 static void
5699 find_decls_types (tree t, struct free_lang_data_d *fld)
5700 {
5701 while (1)
5702 {
5703 if (!fld->pset->contains (t))
5704 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5705 if (fld->worklist.is_empty ())
5706 break;
5707 t = fld->worklist.pop ();
5708 }
5709 }
5710
5711 /* Translate all the types in LIST with the corresponding runtime
5712 types. */
5713
5714 static tree
5715 get_eh_types_for_runtime (tree list)
5716 {
5717 tree head, prev;
5718
5719 if (list == NULL_TREE)
5720 return NULL_TREE;
5721
5722 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5723 prev = head;
5724 list = TREE_CHAIN (list);
5725 while (list)
5726 {
5727 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5728 TREE_CHAIN (prev) = n;
5729 prev = TREE_CHAIN (prev);
5730 list = TREE_CHAIN (list);
5731 }
5732
5733 return head;
5734 }
5735
5736
5737 /* Find decls and types referenced in EH region R and store them in
5738 FLD->DECLS and FLD->TYPES. */
5739
5740 static void
5741 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5742 {
5743 switch (r->type)
5744 {
5745 case ERT_CLEANUP:
5746 break;
5747
5748 case ERT_TRY:
5749 {
5750 eh_catch c;
5751
5752 /* The types referenced in each catch must first be changed to the
5753 EH types used at runtime. This removes references to FE types
5754 in the region. */
5755 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5756 {
5757 c->type_list = get_eh_types_for_runtime (c->type_list);
5758 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5759 }
5760 }
5761 break;
5762
5763 case ERT_ALLOWED_EXCEPTIONS:
5764 r->u.allowed.type_list
5765 = get_eh_types_for_runtime (r->u.allowed.type_list);
5766 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5767 break;
5768
5769 case ERT_MUST_NOT_THROW:
5770 walk_tree (&r->u.must_not_throw.failure_decl,
5771 find_decls_types_r, fld, fld->pset);
5772 break;
5773 }
5774 }
5775
5776
5777 /* Find decls and types referenced in cgraph node N and store them in
5778 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5779 look for *every* kind of DECL and TYPE node reachable from N,
5780 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5781 NAMESPACE_DECLs, etc). */
5782
5783 static void
5784 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5785 {
5786 basic_block bb;
5787 struct function *fn;
5788 unsigned ix;
5789 tree t;
5790
5791 find_decls_types (n->decl, fld);
5792
5793 if (!gimple_has_body_p (n->decl))
5794 return;
5795
5796 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5797
5798 fn = DECL_STRUCT_FUNCTION (n->decl);
5799
5800 /* Traverse locals. */
5801 FOR_EACH_LOCAL_DECL (fn, ix, t)
5802 find_decls_types (t, fld);
5803
5804 /* Traverse EH regions in FN. */
5805 {
5806 eh_region r;
5807 FOR_ALL_EH_REGION_FN (r, fn)
5808 find_decls_types_in_eh_region (r, fld);
5809 }
5810
5811 /* Traverse every statement in FN. */
5812 FOR_EACH_BB_FN (bb, fn)
5813 {
5814 gphi_iterator psi;
5815 gimple_stmt_iterator si;
5816 unsigned i;
5817
5818 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5819 {
5820 gphi *phi = psi.phi ();
5821
5822 for (i = 0; i < gimple_phi_num_args (phi); i++)
5823 {
5824 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5825 find_decls_types (*arg_p, fld);
5826 }
5827 }
5828
5829 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5830 {
5831 gimple *stmt = gsi_stmt (si);
5832
5833 if (is_gimple_call (stmt))
5834 find_decls_types (gimple_call_fntype (stmt), fld);
5835
5836 for (i = 0; i < gimple_num_ops (stmt); i++)
5837 {
5838 tree arg = gimple_op (stmt, i);
5839 find_decls_types (arg, fld);
5840 }
5841 }
5842 }
5843 }
5844
5845
5846 /* Find decls and types referenced in varpool node N and store them in
5847 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5848 look for *every* kind of DECL and TYPE node reachable from N,
5849 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5850 NAMESPACE_DECLs, etc). */
5851
5852 static void
5853 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5854 {
5855 find_decls_types (v->decl, fld);
5856 }
5857
5858 /* If T needs an assembler name, have one created for it. */
5859
5860 void
5861 assign_assembler_name_if_neeeded (tree t)
5862 {
5863 if (need_assembler_name_p (t))
5864 {
5865 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5866 diagnostics that use input_location to show locus
5867 information. The problem here is that, at this point,
5868 input_location is generally anchored to the end of the file
5869 (since the parser is long gone), so we don't have a good
5870 position to pin it to.
5871
5872 To alleviate this problem, this uses the location of T's
5873 declaration. Examples of this are
5874 testsuite/g++.dg/template/cond2.C and
5875 testsuite/g++.dg/template/pr35240.C. */
5876 location_t saved_location = input_location;
5877 input_location = DECL_SOURCE_LOCATION (t);
5878
5879 decl_assembler_name (t);
5880
5881 input_location = saved_location;
5882 }
5883 }
5884
5885
5886 /* Free language specific information for every operand and expression
5887 in every node of the call graph. This process operates in three stages:
5888
5889 1- Every callgraph node and varpool node is traversed looking for
5890 decls and types embedded in them. This is a more exhaustive
5891 search than that done by find_referenced_vars, because it will
5892 also collect individual fields, decls embedded in types, etc.
5893
5894 2- All the decls found are sent to free_lang_data_in_decl.
5895
5896 3- All the types found are sent to free_lang_data_in_type.
5897
5898 The ordering between decls and types is important because
5899 free_lang_data_in_decl sets assembler names, which includes
5900 mangling. So types cannot be freed up until assembler names have
5901 been set up. */
5902
5903 static void
5904 free_lang_data_in_cgraph (void)
5905 {
5906 struct cgraph_node *n;
5907 varpool_node *v;
5908 struct free_lang_data_d fld;
5909 tree t;
5910 unsigned i;
5911 alias_pair *p;
5912
5913 /* Initialize sets and arrays to store referenced decls and types. */
5914 fld.pset = new hash_set<tree>;
5915 fld.worklist.create (0);
5916 fld.decls.create (100);
5917 fld.types.create (100);
5918
5919 /* Find decls and types in the body of every function in the callgraph. */
5920 FOR_EACH_FUNCTION (n)
5921 find_decls_types_in_node (n, &fld);
5922
5923 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5924 find_decls_types (p->decl, &fld);
5925
5926 /* Find decls and types in every varpool symbol. */
5927 FOR_EACH_VARIABLE (v)
5928 find_decls_types_in_var (v, &fld);
5929
5930 /* Set the assembler name on every decl found. We need to do this
5931 now because free_lang_data_in_decl will invalidate data needed
5932 for mangling. This breaks mangling on interdependent decls. */
5933 FOR_EACH_VEC_ELT (fld.decls, i, t)
5934 assign_assembler_name_if_neeeded (t);
5935
5936 /* Traverse every decl found freeing its language data. */
5937 FOR_EACH_VEC_ELT (fld.decls, i, t)
5938 free_lang_data_in_decl (t);
5939
5940 /* Traverse every type found freeing its language data. */
5941 FOR_EACH_VEC_ELT (fld.types, i, t)
5942 free_lang_data_in_type (t);
5943 if (flag_checking)
5944 {
5945 FOR_EACH_VEC_ELT (fld.types, i, t)
5946 verify_type (t);
5947 }
5948
5949 delete fld.pset;
5950 fld.worklist.release ();
5951 fld.decls.release ();
5952 fld.types.release ();
5953 }
5954
5955
5956 /* Free resources that are used by FE but are not needed once they are done. */
5957
5958 static unsigned
5959 free_lang_data (void)
5960 {
5961 unsigned i;
5962
5963 /* If we are the LTO frontend we have freed lang-specific data already. */
5964 if (in_lto_p
5965 || (!flag_generate_lto && !flag_generate_offload))
5966 return 0;
5967
5968 /* Allocate and assign alias sets to the standard integer types
5969 while the slots are still in the way the frontends generated them. */
5970 for (i = 0; i < itk_none; ++i)
5971 if (integer_types[i])
5972 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5973
5974 /* Traverse the IL resetting language specific information for
5975 operands, expressions, etc. */
5976 free_lang_data_in_cgraph ();
5977
5978 /* Create gimple variants for common types. */
5979 ptrdiff_type_node = integer_type_node;
5980 fileptr_type_node = ptr_type_node;
5981
5982 /* Reset some langhooks. Do not reset types_compatible_p, it may
5983 still be used indirectly via the get_alias_set langhook. */
5984 lang_hooks.dwarf_name = lhd_dwarf_name;
5985 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5986 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5987
5988 /* We do not want the default decl_assembler_name implementation,
5989 rather if we have fixed everything we want a wrapper around it
5990 asserting that all non-local symbols already got their assembler
5991 name and only produce assembler names for local symbols. Or rather
5992 make sure we never call decl_assembler_name on local symbols and
5993 devise a separate, middle-end private scheme for it. */
5994
5995 /* Reset diagnostic machinery. */
5996 tree_diagnostics_defaults (global_dc);
5997
5998 return 0;
5999 }
6000
6001
6002 namespace {
6003
6004 const pass_data pass_data_ipa_free_lang_data =
6005 {
6006 SIMPLE_IPA_PASS, /* type */
6007 "*free_lang_data", /* name */
6008 OPTGROUP_NONE, /* optinfo_flags */
6009 TV_IPA_FREE_LANG_DATA, /* tv_id */
6010 0, /* properties_required */
6011 0, /* properties_provided */
6012 0, /* properties_destroyed */
6013 0, /* todo_flags_start */
6014 0, /* todo_flags_finish */
6015 };
6016
6017 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6018 {
6019 public:
6020 pass_ipa_free_lang_data (gcc::context *ctxt)
6021 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6022 {}
6023
6024 /* opt_pass methods: */
6025 virtual unsigned int execute (function *) { return free_lang_data (); }
6026
6027 }; // class pass_ipa_free_lang_data
6028
6029 } // anon namespace
6030
6031 simple_ipa_opt_pass *
6032 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6033 {
6034 return new pass_ipa_free_lang_data (ctxt);
6035 }
6036
6037 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
6038 ATTR_NAME. Also used internally by remove_attribute(). */
6039 bool
6040 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
6041 {
6042 size_t ident_len = IDENTIFIER_LENGTH (ident);
6043
6044 if (ident_len == attr_len)
6045 {
6046 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
6047 return true;
6048 }
6049 else if (ident_len == attr_len + 4)
6050 {
6051 /* There is the possibility that ATTR is 'text' and IDENT is
6052 '__text__'. */
6053 const char *p = IDENTIFIER_POINTER (ident);
6054 if (p[0] == '_' && p[1] == '_'
6055 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6056 && strncmp (attr_name, p + 2, attr_len) == 0)
6057 return true;
6058 }
6059
6060 return false;
6061 }
6062
6063 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6064 of ATTR_NAME, and LIST is not NULL_TREE. */
6065 tree
6066 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6067 {
6068 while (list)
6069 {
6070 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6071
6072 if (ident_len == attr_len)
6073 {
6074 if (!strcmp (attr_name,
6075 IDENTIFIER_POINTER (get_attribute_name (list))))
6076 break;
6077 }
6078 /* TODO: If we made sure that attributes were stored in the
6079 canonical form without '__...__' (ie, as in 'text' as opposed
6080 to '__text__') then we could avoid the following case. */
6081 else if (ident_len == attr_len + 4)
6082 {
6083 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6084 if (p[0] == '_' && p[1] == '_'
6085 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6086 && strncmp (attr_name, p + 2, attr_len) == 0)
6087 break;
6088 }
6089 list = TREE_CHAIN (list);
6090 }
6091
6092 return list;
6093 }
6094
6095 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6096 return a pointer to the attribute's list first element if the attribute
6097 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6098 '__text__'). */
6099
6100 tree
6101 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6102 tree list)
6103 {
6104 while (list)
6105 {
6106 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6107
6108 if (attr_len > ident_len)
6109 {
6110 list = TREE_CHAIN (list);
6111 continue;
6112 }
6113
6114 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6115
6116 if (strncmp (attr_name, p, attr_len) == 0)
6117 break;
6118
6119 /* TODO: If we made sure that attributes were stored in the
6120 canonical form without '__...__' (ie, as in 'text' as opposed
6121 to '__text__') then we could avoid the following case. */
6122 if (p[0] == '_' && p[1] == '_' &&
6123 strncmp (attr_name, p + 2, attr_len) == 0)
6124 break;
6125
6126 list = TREE_CHAIN (list);
6127 }
6128
6129 return list;
6130 }
6131
6132
6133 /* A variant of lookup_attribute() that can be used with an identifier
6134 as the first argument, and where the identifier can be either
6135 'text' or '__text__'.
6136
6137 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6138 return a pointer to the attribute's list element if the attribute
6139 is part of the list, or NULL_TREE if not found. If the attribute
6140 appears more than once, this only returns the first occurrence; the
6141 TREE_CHAIN of the return value should be passed back in if further
6142 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6143 can be in the form 'text' or '__text__'. */
6144 static tree
6145 lookup_ident_attribute (tree attr_identifier, tree list)
6146 {
6147 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6148
6149 while (list)
6150 {
6151 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6152 == IDENTIFIER_NODE);
6153
6154 if (cmp_attrib_identifiers (attr_identifier,
6155 get_attribute_name (list)))
6156 /* Found it. */
6157 break;
6158 list = TREE_CHAIN (list);
6159 }
6160
6161 return list;
6162 }
6163
6164 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6165 modified list. */
6166
6167 tree
6168 remove_attribute (const char *attr_name, tree list)
6169 {
6170 tree *p;
6171 size_t attr_len = strlen (attr_name);
6172
6173 gcc_checking_assert (attr_name[0] != '_');
6174
6175 for (p = &list; *p; )
6176 {
6177 tree l = *p;
6178 /* TODO: If we were storing attributes in normalized form, here
6179 we could use a simple strcmp(). */
6180 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6181 *p = TREE_CHAIN (l);
6182 else
6183 p = &TREE_CHAIN (l);
6184 }
6185
6186 return list;
6187 }
6188
6189 /* Return an attribute list that is the union of a1 and a2. */
6190
6191 tree
6192 merge_attributes (tree a1, tree a2)
6193 {
6194 tree attributes;
6195
6196 /* Either one unset? Take the set one. */
6197
6198 if ((attributes = a1) == 0)
6199 attributes = a2;
6200
6201 /* One that completely contains the other? Take it. */
6202
6203 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6204 {
6205 if (attribute_list_contained (a2, a1))
6206 attributes = a2;
6207 else
6208 {
6209 /* Pick the longest list, and hang on the other list. */
6210
6211 if (list_length (a1) < list_length (a2))
6212 attributes = a2, a2 = a1;
6213
6214 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6215 {
6216 tree a;
6217 for (a = lookup_ident_attribute (get_attribute_name (a2),
6218 attributes);
6219 a != NULL_TREE && !attribute_value_equal (a, a2);
6220 a = lookup_ident_attribute (get_attribute_name (a2),
6221 TREE_CHAIN (a)))
6222 ;
6223 if (a == NULL_TREE)
6224 {
6225 a1 = copy_node (a2);
6226 TREE_CHAIN (a1) = attributes;
6227 attributes = a1;
6228 }
6229 }
6230 }
6231 }
6232 return attributes;
6233 }
6234
6235 /* Given types T1 and T2, merge their attributes and return
6236 the result. */
6237
6238 tree
6239 merge_type_attributes (tree t1, tree t2)
6240 {
6241 return merge_attributes (TYPE_ATTRIBUTES (t1),
6242 TYPE_ATTRIBUTES (t2));
6243 }
6244
6245 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6246 the result. */
6247
6248 tree
6249 merge_decl_attributes (tree olddecl, tree newdecl)
6250 {
6251 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6252 DECL_ATTRIBUTES (newdecl));
6253 }
6254
6255 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6256
6257 /* Specialization of merge_decl_attributes for various Windows targets.
6258
6259 This handles the following situation:
6260
6261 __declspec (dllimport) int foo;
6262 int foo;
6263
6264 The second instance of `foo' nullifies the dllimport. */
6265
6266 tree
6267 merge_dllimport_decl_attributes (tree old, tree new_tree)
6268 {
6269 tree a;
6270 int delete_dllimport_p = 1;
6271
6272 /* What we need to do here is remove from `old' dllimport if it doesn't
6273 appear in `new'. dllimport behaves like extern: if a declaration is
6274 marked dllimport and a definition appears later, then the object
6275 is not dllimport'd. We also remove a `new' dllimport if the old list
6276 contains dllexport: dllexport always overrides dllimport, regardless
6277 of the order of declaration. */
6278 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6279 delete_dllimport_p = 0;
6280 else if (DECL_DLLIMPORT_P (new_tree)
6281 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6282 {
6283 DECL_DLLIMPORT_P (new_tree) = 0;
6284 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6285 "dllimport ignored", new_tree);
6286 }
6287 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6288 {
6289 /* Warn about overriding a symbol that has already been used, e.g.:
6290 extern int __attribute__ ((dllimport)) foo;
6291 int* bar () {return &foo;}
6292 int foo;
6293 */
6294 if (TREE_USED (old))
6295 {
6296 warning (0, "%q+D redeclared without dllimport attribute "
6297 "after being referenced with dll linkage", new_tree);
6298 /* If we have used a variable's address with dllimport linkage,
6299 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6300 decl may already have had TREE_CONSTANT computed.
6301 We still remove the attribute so that assembler code refers
6302 to '&foo rather than '_imp__foo'. */
6303 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6304 DECL_DLLIMPORT_P (new_tree) = 1;
6305 }
6306
6307 /* Let an inline definition silently override the external reference,
6308 but otherwise warn about attribute inconsistency. */
6309 else if (TREE_CODE (new_tree) == VAR_DECL
6310 || !DECL_DECLARED_INLINE_P (new_tree))
6311 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6312 "previous dllimport ignored", new_tree);
6313 }
6314 else
6315 delete_dllimport_p = 0;
6316
6317 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6318
6319 if (delete_dllimport_p)
6320 a = remove_attribute ("dllimport", a);
6321
6322 return a;
6323 }
6324
6325 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6326 struct attribute_spec.handler. */
6327
6328 tree
6329 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6330 bool *no_add_attrs)
6331 {
6332 tree node = *pnode;
6333 bool is_dllimport;
6334
6335 /* These attributes may apply to structure and union types being created,
6336 but otherwise should pass to the declaration involved. */
6337 if (!DECL_P (node))
6338 {
6339 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6340 | (int) ATTR_FLAG_ARRAY_NEXT))
6341 {
6342 *no_add_attrs = true;
6343 return tree_cons (name, args, NULL_TREE);
6344 }
6345 if (TREE_CODE (node) == RECORD_TYPE
6346 || TREE_CODE (node) == UNION_TYPE)
6347 {
6348 node = TYPE_NAME (node);
6349 if (!node)
6350 return NULL_TREE;
6351 }
6352 else
6353 {
6354 warning (OPT_Wattributes, "%qE attribute ignored",
6355 name);
6356 *no_add_attrs = true;
6357 return NULL_TREE;
6358 }
6359 }
6360
6361 if (TREE_CODE (node) != FUNCTION_DECL
6362 && TREE_CODE (node) != VAR_DECL
6363 && TREE_CODE (node) != TYPE_DECL)
6364 {
6365 *no_add_attrs = true;
6366 warning (OPT_Wattributes, "%qE attribute ignored",
6367 name);
6368 return NULL_TREE;
6369 }
6370
6371 if (TREE_CODE (node) == TYPE_DECL
6372 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6373 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6374 {
6375 *no_add_attrs = true;
6376 warning (OPT_Wattributes, "%qE attribute ignored",
6377 name);
6378 return NULL_TREE;
6379 }
6380
6381 is_dllimport = is_attribute_p ("dllimport", name);
6382
6383 /* Report error on dllimport ambiguities seen now before they cause
6384 any damage. */
6385 if (is_dllimport)
6386 {
6387 /* Honor any target-specific overrides. */
6388 if (!targetm.valid_dllimport_attribute_p (node))
6389 *no_add_attrs = true;
6390
6391 else if (TREE_CODE (node) == FUNCTION_DECL
6392 && DECL_DECLARED_INLINE_P (node))
6393 {
6394 warning (OPT_Wattributes, "inline function %q+D declared as "
6395 " dllimport: attribute ignored", node);
6396 *no_add_attrs = true;
6397 }
6398 /* Like MS, treat definition of dllimported variables and
6399 non-inlined functions on declaration as syntax errors. */
6400 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6401 {
6402 error ("function %q+D definition is marked dllimport", node);
6403 *no_add_attrs = true;
6404 }
6405
6406 else if (TREE_CODE (node) == VAR_DECL)
6407 {
6408 if (DECL_INITIAL (node))
6409 {
6410 error ("variable %q+D definition is marked dllimport",
6411 node);
6412 *no_add_attrs = true;
6413 }
6414
6415 /* `extern' needn't be specified with dllimport.
6416 Specify `extern' now and hope for the best. Sigh. */
6417 DECL_EXTERNAL (node) = 1;
6418 /* Also, implicitly give dllimport'd variables declared within
6419 a function global scope, unless declared static. */
6420 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6421 TREE_PUBLIC (node) = 1;
6422 }
6423
6424 if (*no_add_attrs == false)
6425 DECL_DLLIMPORT_P (node) = 1;
6426 }
6427 else if (TREE_CODE (node) == FUNCTION_DECL
6428 && DECL_DECLARED_INLINE_P (node)
6429 && flag_keep_inline_dllexport)
6430 /* An exported function, even if inline, must be emitted. */
6431 DECL_EXTERNAL (node) = 0;
6432
6433 /* Report error if symbol is not accessible at global scope. */
6434 if (!TREE_PUBLIC (node)
6435 && (TREE_CODE (node) == VAR_DECL
6436 || TREE_CODE (node) == FUNCTION_DECL))
6437 {
6438 error ("external linkage required for symbol %q+D because of "
6439 "%qE attribute", node, name);
6440 *no_add_attrs = true;
6441 }
6442
6443 /* A dllexport'd entity must have default visibility so that other
6444 program units (shared libraries or the main executable) can see
6445 it. A dllimport'd entity must have default visibility so that
6446 the linker knows that undefined references within this program
6447 unit can be resolved by the dynamic linker. */
6448 if (!*no_add_attrs)
6449 {
6450 if (DECL_VISIBILITY_SPECIFIED (node)
6451 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6452 error ("%qE implies default visibility, but %qD has already "
6453 "been declared with a different visibility",
6454 name, node);
6455 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6456 DECL_VISIBILITY_SPECIFIED (node) = 1;
6457 }
6458
6459 return NULL_TREE;
6460 }
6461
6462 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6463 \f
6464 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6465 of the various TYPE_QUAL values. */
6466
6467 static void
6468 set_type_quals (tree type, int type_quals)
6469 {
6470 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6471 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6472 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6473 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6474 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6475 }
6476
6477 /* Returns true iff unqualified CAND and BASE are equivalent. */
6478
6479 bool
6480 check_base_type (const_tree cand, const_tree base)
6481 {
6482 return (TYPE_NAME (cand) == TYPE_NAME (base)
6483 /* Apparently this is needed for Objective-C. */
6484 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6485 /* Check alignment. */
6486 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6487 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6488 TYPE_ATTRIBUTES (base)));
6489 }
6490
6491 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6492
6493 bool
6494 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6495 {
6496 return (TYPE_QUALS (cand) == type_quals
6497 && check_base_type (cand, base));
6498 }
6499
6500 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6501
6502 static bool
6503 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6504 {
6505 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6506 && TYPE_NAME (cand) == TYPE_NAME (base)
6507 /* Apparently this is needed for Objective-C. */
6508 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6509 /* Check alignment. */
6510 && TYPE_ALIGN (cand) == align
6511 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6512 TYPE_ATTRIBUTES (base)));
6513 }
6514
6515 /* This function checks to see if TYPE matches the size one of the built-in
6516 atomic types, and returns that core atomic type. */
6517
6518 static tree
6519 find_atomic_core_type (tree type)
6520 {
6521 tree base_atomic_type;
6522
6523 /* Only handle complete types. */
6524 if (TYPE_SIZE (type) == NULL_TREE)
6525 return NULL_TREE;
6526
6527 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6528 switch (type_size)
6529 {
6530 case 8:
6531 base_atomic_type = atomicQI_type_node;
6532 break;
6533
6534 case 16:
6535 base_atomic_type = atomicHI_type_node;
6536 break;
6537
6538 case 32:
6539 base_atomic_type = atomicSI_type_node;
6540 break;
6541
6542 case 64:
6543 base_atomic_type = atomicDI_type_node;
6544 break;
6545
6546 case 128:
6547 base_atomic_type = atomicTI_type_node;
6548 break;
6549
6550 default:
6551 base_atomic_type = NULL_TREE;
6552 }
6553
6554 return base_atomic_type;
6555 }
6556
6557 /* Return a version of the TYPE, qualified as indicated by the
6558 TYPE_QUALS, if one exists. If no qualified version exists yet,
6559 return NULL_TREE. */
6560
6561 tree
6562 get_qualified_type (tree type, int type_quals)
6563 {
6564 tree t;
6565
6566 if (TYPE_QUALS (type) == type_quals)
6567 return type;
6568
6569 /* Search the chain of variants to see if there is already one there just
6570 like the one we need to have. If so, use that existing one. We must
6571 preserve the TYPE_NAME, since there is code that depends on this. */
6572 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6573 if (check_qualified_type (t, type, type_quals))
6574 return t;
6575
6576 return NULL_TREE;
6577 }
6578
6579 /* Like get_qualified_type, but creates the type if it does not
6580 exist. This function never returns NULL_TREE. */
6581
6582 tree
6583 build_qualified_type (tree type, int type_quals)
6584 {
6585 tree t;
6586
6587 /* See if we already have the appropriate qualified variant. */
6588 t = get_qualified_type (type, type_quals);
6589
6590 /* If not, build it. */
6591 if (!t)
6592 {
6593 t = build_variant_type_copy (type);
6594 set_type_quals (t, type_quals);
6595
6596 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6597 {
6598 /* See if this object can map to a basic atomic type. */
6599 tree atomic_type = find_atomic_core_type (type);
6600 if (atomic_type)
6601 {
6602 /* Ensure the alignment of this type is compatible with
6603 the required alignment of the atomic type. */
6604 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6605 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6606 }
6607 }
6608
6609 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6610 /* Propagate structural equality. */
6611 SET_TYPE_STRUCTURAL_EQUALITY (t);
6612 else if (TYPE_CANONICAL (type) != type)
6613 /* Build the underlying canonical type, since it is different
6614 from TYPE. */
6615 {
6616 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6617 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6618 }
6619 else
6620 /* T is its own canonical type. */
6621 TYPE_CANONICAL (t) = t;
6622
6623 }
6624
6625 return t;
6626 }
6627
6628 /* Create a variant of type T with alignment ALIGN. */
6629
6630 tree
6631 build_aligned_type (tree type, unsigned int align)
6632 {
6633 tree t;
6634
6635 if (TYPE_PACKED (type)
6636 || TYPE_ALIGN (type) == align)
6637 return type;
6638
6639 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6640 if (check_aligned_type (t, type, align))
6641 return t;
6642
6643 t = build_variant_type_copy (type);
6644 TYPE_ALIGN (t) = align;
6645
6646 return t;
6647 }
6648
6649 /* Create a new distinct copy of TYPE. The new type is made its own
6650 MAIN_VARIANT. If TYPE requires structural equality checks, the
6651 resulting type requires structural equality checks; otherwise, its
6652 TYPE_CANONICAL points to itself. */
6653
6654 tree
6655 build_distinct_type_copy (tree type)
6656 {
6657 tree t = copy_node (type);
6658
6659 TYPE_POINTER_TO (t) = 0;
6660 TYPE_REFERENCE_TO (t) = 0;
6661
6662 /* Set the canonical type either to a new equivalence class, or
6663 propagate the need for structural equality checks. */
6664 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6665 SET_TYPE_STRUCTURAL_EQUALITY (t);
6666 else
6667 TYPE_CANONICAL (t) = t;
6668
6669 /* Make it its own variant. */
6670 TYPE_MAIN_VARIANT (t) = t;
6671 TYPE_NEXT_VARIANT (t) = 0;
6672
6673 /* We do not record methods in type copies nor variants
6674 so we do not need to keep them up to date when new method
6675 is inserted. */
6676 if (RECORD_OR_UNION_TYPE_P (t))
6677 TYPE_METHODS (t) = NULL_TREE;
6678
6679 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6680 whose TREE_TYPE is not t. This can also happen in the Ada
6681 frontend when using subtypes. */
6682
6683 return t;
6684 }
6685
6686 /* Create a new variant of TYPE, equivalent but distinct. This is so
6687 the caller can modify it. TYPE_CANONICAL for the return type will
6688 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6689 are considered equal by the language itself (or that both types
6690 require structural equality checks). */
6691
6692 tree
6693 build_variant_type_copy (tree type)
6694 {
6695 tree t, m = TYPE_MAIN_VARIANT (type);
6696
6697 t = build_distinct_type_copy (type);
6698
6699 /* Since we're building a variant, assume that it is a non-semantic
6700 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6701 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6702 /* Type variants have no alias set defined. */
6703 TYPE_ALIAS_SET (t) = -1;
6704
6705 /* Add the new type to the chain of variants of TYPE. */
6706 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6707 TYPE_NEXT_VARIANT (m) = t;
6708 TYPE_MAIN_VARIANT (t) = m;
6709
6710 return t;
6711 }
6712 \f
6713 /* Return true if the from tree in both tree maps are equal. */
6714
6715 int
6716 tree_map_base_eq (const void *va, const void *vb)
6717 {
6718 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6719 *const b = (const struct tree_map_base *) vb;
6720 return (a->from == b->from);
6721 }
6722
6723 /* Hash a from tree in a tree_base_map. */
6724
6725 unsigned int
6726 tree_map_base_hash (const void *item)
6727 {
6728 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6729 }
6730
6731 /* Return true if this tree map structure is marked for garbage collection
6732 purposes. We simply return true if the from tree is marked, so that this
6733 structure goes away when the from tree goes away. */
6734
6735 int
6736 tree_map_base_marked_p (const void *p)
6737 {
6738 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6739 }
6740
6741 /* Hash a from tree in a tree_map. */
6742
6743 unsigned int
6744 tree_map_hash (const void *item)
6745 {
6746 return (((const struct tree_map *) item)->hash);
6747 }
6748
6749 /* Hash a from tree in a tree_decl_map. */
6750
6751 unsigned int
6752 tree_decl_map_hash (const void *item)
6753 {
6754 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6755 }
6756
6757 /* Return the initialization priority for DECL. */
6758
6759 priority_type
6760 decl_init_priority_lookup (tree decl)
6761 {
6762 symtab_node *snode = symtab_node::get (decl);
6763
6764 if (!snode)
6765 return DEFAULT_INIT_PRIORITY;
6766 return
6767 snode->get_init_priority ();
6768 }
6769
6770 /* Return the finalization priority for DECL. */
6771
6772 priority_type
6773 decl_fini_priority_lookup (tree decl)
6774 {
6775 cgraph_node *node = cgraph_node::get (decl);
6776
6777 if (!node)
6778 return DEFAULT_INIT_PRIORITY;
6779 return
6780 node->get_fini_priority ();
6781 }
6782
6783 /* Set the initialization priority for DECL to PRIORITY. */
6784
6785 void
6786 decl_init_priority_insert (tree decl, priority_type priority)
6787 {
6788 struct symtab_node *snode;
6789
6790 if (priority == DEFAULT_INIT_PRIORITY)
6791 {
6792 snode = symtab_node::get (decl);
6793 if (!snode)
6794 return;
6795 }
6796 else if (TREE_CODE (decl) == VAR_DECL)
6797 snode = varpool_node::get_create (decl);
6798 else
6799 snode = cgraph_node::get_create (decl);
6800 snode->set_init_priority (priority);
6801 }
6802
6803 /* Set the finalization priority for DECL to PRIORITY. */
6804
6805 void
6806 decl_fini_priority_insert (tree decl, priority_type priority)
6807 {
6808 struct cgraph_node *node;
6809
6810 if (priority == DEFAULT_INIT_PRIORITY)
6811 {
6812 node = cgraph_node::get (decl);
6813 if (!node)
6814 return;
6815 }
6816 else
6817 node = cgraph_node::get_create (decl);
6818 node->set_fini_priority (priority);
6819 }
6820
6821 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6822
6823 static void
6824 print_debug_expr_statistics (void)
6825 {
6826 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6827 (long) debug_expr_for_decl->size (),
6828 (long) debug_expr_for_decl->elements (),
6829 debug_expr_for_decl->collisions ());
6830 }
6831
6832 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6833
6834 static void
6835 print_value_expr_statistics (void)
6836 {
6837 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6838 (long) value_expr_for_decl->size (),
6839 (long) value_expr_for_decl->elements (),
6840 value_expr_for_decl->collisions ());
6841 }
6842
6843 /* Lookup a debug expression for FROM, and return it if we find one. */
6844
6845 tree
6846 decl_debug_expr_lookup (tree from)
6847 {
6848 struct tree_decl_map *h, in;
6849 in.base.from = from;
6850
6851 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6852 if (h)
6853 return h->to;
6854 return NULL_TREE;
6855 }
6856
6857 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6858
6859 void
6860 decl_debug_expr_insert (tree from, tree to)
6861 {
6862 struct tree_decl_map *h;
6863
6864 h = ggc_alloc<tree_decl_map> ();
6865 h->base.from = from;
6866 h->to = to;
6867 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6868 }
6869
6870 /* Lookup a value expression for FROM, and return it if we find one. */
6871
6872 tree
6873 decl_value_expr_lookup (tree from)
6874 {
6875 struct tree_decl_map *h, in;
6876 in.base.from = from;
6877
6878 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6879 if (h)
6880 return h->to;
6881 return NULL_TREE;
6882 }
6883
6884 /* Insert a mapping FROM->TO in the value expression hashtable. */
6885
6886 void
6887 decl_value_expr_insert (tree from, tree to)
6888 {
6889 struct tree_decl_map *h;
6890
6891 h = ggc_alloc<tree_decl_map> ();
6892 h->base.from = from;
6893 h->to = to;
6894 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6895 }
6896
6897 /* Lookup a vector of debug arguments for FROM, and return it if we
6898 find one. */
6899
6900 vec<tree, va_gc> **
6901 decl_debug_args_lookup (tree from)
6902 {
6903 struct tree_vec_map *h, in;
6904
6905 if (!DECL_HAS_DEBUG_ARGS_P (from))
6906 return NULL;
6907 gcc_checking_assert (debug_args_for_decl != NULL);
6908 in.base.from = from;
6909 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6910 if (h)
6911 return &h->to;
6912 return NULL;
6913 }
6914
6915 /* Insert a mapping FROM->empty vector of debug arguments in the value
6916 expression hashtable. */
6917
6918 vec<tree, va_gc> **
6919 decl_debug_args_insert (tree from)
6920 {
6921 struct tree_vec_map *h;
6922 tree_vec_map **loc;
6923
6924 if (DECL_HAS_DEBUG_ARGS_P (from))
6925 return decl_debug_args_lookup (from);
6926 if (debug_args_for_decl == NULL)
6927 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6928 h = ggc_alloc<tree_vec_map> ();
6929 h->base.from = from;
6930 h->to = NULL;
6931 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6932 *loc = h;
6933 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6934 return &h->to;
6935 }
6936
6937 /* Hashing of types so that we don't make duplicates.
6938 The entry point is `type_hash_canon'. */
6939
6940 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6941 with types in the TREE_VALUE slots), by adding the hash codes
6942 of the individual types. */
6943
6944 static void
6945 type_hash_list (const_tree list, inchash::hash &hstate)
6946 {
6947 const_tree tail;
6948
6949 for (tail = list; tail; tail = TREE_CHAIN (tail))
6950 if (TREE_VALUE (tail) != error_mark_node)
6951 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6952 }
6953
6954 /* These are the Hashtable callback functions. */
6955
6956 /* Returns true iff the types are equivalent. */
6957
6958 bool
6959 type_cache_hasher::equal (type_hash *a, type_hash *b)
6960 {
6961 /* First test the things that are the same for all types. */
6962 if (a->hash != b->hash
6963 || TREE_CODE (a->type) != TREE_CODE (b->type)
6964 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6965 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6966 TYPE_ATTRIBUTES (b->type))
6967 || (TREE_CODE (a->type) != COMPLEX_TYPE
6968 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6969 return 0;
6970
6971 /* Be careful about comparing arrays before and after the element type
6972 has been completed; don't compare TYPE_ALIGN unless both types are
6973 complete. */
6974 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6975 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6976 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6977 return 0;
6978
6979 switch (TREE_CODE (a->type))
6980 {
6981 case VOID_TYPE:
6982 case COMPLEX_TYPE:
6983 case POINTER_TYPE:
6984 case REFERENCE_TYPE:
6985 case NULLPTR_TYPE:
6986 return 1;
6987
6988 case VECTOR_TYPE:
6989 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6990
6991 case ENUMERAL_TYPE:
6992 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6993 && !(TYPE_VALUES (a->type)
6994 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6995 && TYPE_VALUES (b->type)
6996 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6997 && type_list_equal (TYPE_VALUES (a->type),
6998 TYPE_VALUES (b->type))))
6999 return 0;
7000
7001 /* ... fall through ... */
7002
7003 case INTEGER_TYPE:
7004 case REAL_TYPE:
7005 case BOOLEAN_TYPE:
7006 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7007 return false;
7008 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7009 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7010 TYPE_MAX_VALUE (b->type)))
7011 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7012 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7013 TYPE_MIN_VALUE (b->type))));
7014
7015 case FIXED_POINT_TYPE:
7016 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7017
7018 case OFFSET_TYPE:
7019 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7020
7021 case METHOD_TYPE:
7022 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7023 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7024 || (TYPE_ARG_TYPES (a->type)
7025 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7026 && TYPE_ARG_TYPES (b->type)
7027 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7028 && type_list_equal (TYPE_ARG_TYPES (a->type),
7029 TYPE_ARG_TYPES (b->type)))))
7030 break;
7031 return 0;
7032 case ARRAY_TYPE:
7033 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
7034
7035 case RECORD_TYPE:
7036 case UNION_TYPE:
7037 case QUAL_UNION_TYPE:
7038 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7039 || (TYPE_FIELDS (a->type)
7040 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7041 && TYPE_FIELDS (b->type)
7042 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7043 && type_list_equal (TYPE_FIELDS (a->type),
7044 TYPE_FIELDS (b->type))));
7045
7046 case FUNCTION_TYPE:
7047 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7048 || (TYPE_ARG_TYPES (a->type)
7049 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7050 && TYPE_ARG_TYPES (b->type)
7051 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7052 && type_list_equal (TYPE_ARG_TYPES (a->type),
7053 TYPE_ARG_TYPES (b->type))))
7054 break;
7055 return 0;
7056
7057 default:
7058 return 0;
7059 }
7060
7061 if (lang_hooks.types.type_hash_eq != NULL)
7062 return lang_hooks.types.type_hash_eq (a->type, b->type);
7063
7064 return 1;
7065 }
7066
7067 /* Given TYPE, and HASHCODE its hash code, return the canonical
7068 object for an identical type if one already exists.
7069 Otherwise, return TYPE, and record it as the canonical object.
7070
7071 To use this function, first create a type of the sort you want.
7072 Then compute its hash code from the fields of the type that
7073 make it different from other similar types.
7074 Then call this function and use the value. */
7075
7076 tree
7077 type_hash_canon (unsigned int hashcode, tree type)
7078 {
7079 type_hash in;
7080 type_hash **loc;
7081
7082 /* The hash table only contains main variants, so ensure that's what we're
7083 being passed. */
7084 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7085
7086 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7087 must call that routine before comparing TYPE_ALIGNs. */
7088 layout_type (type);
7089
7090 in.hash = hashcode;
7091 in.type = type;
7092
7093 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7094 if (*loc)
7095 {
7096 tree t1 = ((type_hash *) *loc)->type;
7097 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7098 free_node (type);
7099 return t1;
7100 }
7101 else
7102 {
7103 struct type_hash *h;
7104
7105 h = ggc_alloc<type_hash> ();
7106 h->hash = hashcode;
7107 h->type = type;
7108 *loc = h;
7109
7110 return type;
7111 }
7112 }
7113
7114 static void
7115 print_type_hash_statistics (void)
7116 {
7117 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7118 (long) type_hash_table->size (),
7119 (long) type_hash_table->elements (),
7120 type_hash_table->collisions ());
7121 }
7122
7123 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7124 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7125 by adding the hash codes of the individual attributes. */
7126
7127 static void
7128 attribute_hash_list (const_tree list, inchash::hash &hstate)
7129 {
7130 const_tree tail;
7131
7132 for (tail = list; tail; tail = TREE_CHAIN (tail))
7133 /* ??? Do we want to add in TREE_VALUE too? */
7134 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7135 }
7136
7137 /* Given two lists of attributes, return true if list l2 is
7138 equivalent to l1. */
7139
7140 int
7141 attribute_list_equal (const_tree l1, const_tree l2)
7142 {
7143 if (l1 == l2)
7144 return 1;
7145
7146 return attribute_list_contained (l1, l2)
7147 && attribute_list_contained (l2, l1);
7148 }
7149
7150 /* Given two lists of attributes, return true if list L2 is
7151 completely contained within L1. */
7152 /* ??? This would be faster if attribute names were stored in a canonicalized
7153 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7154 must be used to show these elements are equivalent (which they are). */
7155 /* ??? It's not clear that attributes with arguments will always be handled
7156 correctly. */
7157
7158 int
7159 attribute_list_contained (const_tree l1, const_tree l2)
7160 {
7161 const_tree t1, t2;
7162
7163 /* First check the obvious, maybe the lists are identical. */
7164 if (l1 == l2)
7165 return 1;
7166
7167 /* Maybe the lists are similar. */
7168 for (t1 = l1, t2 = l2;
7169 t1 != 0 && t2 != 0
7170 && get_attribute_name (t1) == get_attribute_name (t2)
7171 && TREE_VALUE (t1) == TREE_VALUE (t2);
7172 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7173 ;
7174
7175 /* Maybe the lists are equal. */
7176 if (t1 == 0 && t2 == 0)
7177 return 1;
7178
7179 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7180 {
7181 const_tree attr;
7182 /* This CONST_CAST is okay because lookup_attribute does not
7183 modify its argument and the return value is assigned to a
7184 const_tree. */
7185 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7186 CONST_CAST_TREE (l1));
7187 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7188 attr = lookup_ident_attribute (get_attribute_name (t2),
7189 TREE_CHAIN (attr)))
7190 ;
7191
7192 if (attr == NULL_TREE)
7193 return 0;
7194 }
7195
7196 return 1;
7197 }
7198
7199 /* Given two lists of types
7200 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7201 return 1 if the lists contain the same types in the same order.
7202 Also, the TREE_PURPOSEs must match. */
7203
7204 int
7205 type_list_equal (const_tree l1, const_tree l2)
7206 {
7207 const_tree t1, t2;
7208
7209 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7210 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7211 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7212 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7213 && (TREE_TYPE (TREE_PURPOSE (t1))
7214 == TREE_TYPE (TREE_PURPOSE (t2))))))
7215 return 0;
7216
7217 return t1 == t2;
7218 }
7219
7220 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7221 given by TYPE. If the argument list accepts variable arguments,
7222 then this function counts only the ordinary arguments. */
7223
7224 int
7225 type_num_arguments (const_tree type)
7226 {
7227 int i = 0;
7228 tree t;
7229
7230 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7231 /* If the function does not take a variable number of arguments,
7232 the last element in the list will have type `void'. */
7233 if (VOID_TYPE_P (TREE_VALUE (t)))
7234 break;
7235 else
7236 ++i;
7237
7238 return i;
7239 }
7240
7241 /* Nonzero if integer constants T1 and T2
7242 represent the same constant value. */
7243
7244 int
7245 tree_int_cst_equal (const_tree t1, const_tree t2)
7246 {
7247 if (t1 == t2)
7248 return 1;
7249
7250 if (t1 == 0 || t2 == 0)
7251 return 0;
7252
7253 if (TREE_CODE (t1) == INTEGER_CST
7254 && TREE_CODE (t2) == INTEGER_CST
7255 && wi::to_widest (t1) == wi::to_widest (t2))
7256 return 1;
7257
7258 return 0;
7259 }
7260
7261 /* Return true if T is an INTEGER_CST whose numerical value (extended
7262 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7263
7264 bool
7265 tree_fits_shwi_p (const_tree t)
7266 {
7267 return (t != NULL_TREE
7268 && TREE_CODE (t) == INTEGER_CST
7269 && wi::fits_shwi_p (wi::to_widest (t)));
7270 }
7271
7272 /* Return true if T is an INTEGER_CST whose numerical value (extended
7273 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7274
7275 bool
7276 tree_fits_uhwi_p (const_tree t)
7277 {
7278 return (t != NULL_TREE
7279 && TREE_CODE (t) == INTEGER_CST
7280 && wi::fits_uhwi_p (wi::to_widest (t)));
7281 }
7282
7283 /* T is an INTEGER_CST whose numerical value (extended according to
7284 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7285 HOST_WIDE_INT. */
7286
7287 HOST_WIDE_INT
7288 tree_to_shwi (const_tree t)
7289 {
7290 gcc_assert (tree_fits_shwi_p (t));
7291 return TREE_INT_CST_LOW (t);
7292 }
7293
7294 /* T is an INTEGER_CST whose numerical value (extended according to
7295 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7296 HOST_WIDE_INT. */
7297
7298 unsigned HOST_WIDE_INT
7299 tree_to_uhwi (const_tree t)
7300 {
7301 gcc_assert (tree_fits_uhwi_p (t));
7302 return TREE_INT_CST_LOW (t);
7303 }
7304
7305 /* Return the most significant (sign) bit of T. */
7306
7307 int
7308 tree_int_cst_sign_bit (const_tree t)
7309 {
7310 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7311
7312 return wi::extract_uhwi (t, bitno, 1);
7313 }
7314
7315 /* Return an indication of the sign of the integer constant T.
7316 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7317 Note that -1 will never be returned if T's type is unsigned. */
7318
7319 int
7320 tree_int_cst_sgn (const_tree t)
7321 {
7322 if (wi::eq_p (t, 0))
7323 return 0;
7324 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7325 return 1;
7326 else if (wi::neg_p (t))
7327 return -1;
7328 else
7329 return 1;
7330 }
7331
7332 /* Return the minimum number of bits needed to represent VALUE in a
7333 signed or unsigned type, UNSIGNEDP says which. */
7334
7335 unsigned int
7336 tree_int_cst_min_precision (tree value, signop sgn)
7337 {
7338 /* If the value is negative, compute its negative minus 1. The latter
7339 adjustment is because the absolute value of the largest negative value
7340 is one larger than the largest positive value. This is equivalent to
7341 a bit-wise negation, so use that operation instead. */
7342
7343 if (tree_int_cst_sgn (value) < 0)
7344 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7345
7346 /* Return the number of bits needed, taking into account the fact
7347 that we need one more bit for a signed than unsigned type.
7348 If value is 0 or -1, the minimum precision is 1 no matter
7349 whether unsignedp is true or false. */
7350
7351 if (integer_zerop (value))
7352 return 1;
7353 else
7354 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7355 }
7356
7357 /* Return truthvalue of whether T1 is the same tree structure as T2.
7358 Return 1 if they are the same.
7359 Return 0 if they are understandably different.
7360 Return -1 if either contains tree structure not understood by
7361 this function. */
7362
7363 int
7364 simple_cst_equal (const_tree t1, const_tree t2)
7365 {
7366 enum tree_code code1, code2;
7367 int cmp;
7368 int i;
7369
7370 if (t1 == t2)
7371 return 1;
7372 if (t1 == 0 || t2 == 0)
7373 return 0;
7374
7375 code1 = TREE_CODE (t1);
7376 code2 = TREE_CODE (t2);
7377
7378 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7379 {
7380 if (CONVERT_EXPR_CODE_P (code2)
7381 || code2 == NON_LVALUE_EXPR)
7382 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7383 else
7384 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7385 }
7386
7387 else if (CONVERT_EXPR_CODE_P (code2)
7388 || code2 == NON_LVALUE_EXPR)
7389 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7390
7391 if (code1 != code2)
7392 return 0;
7393
7394 switch (code1)
7395 {
7396 case INTEGER_CST:
7397 return wi::to_widest (t1) == wi::to_widest (t2);
7398
7399 case REAL_CST:
7400 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7401
7402 case FIXED_CST:
7403 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7404
7405 case STRING_CST:
7406 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7407 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7408 TREE_STRING_LENGTH (t1)));
7409
7410 case CONSTRUCTOR:
7411 {
7412 unsigned HOST_WIDE_INT idx;
7413 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7414 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7415
7416 if (vec_safe_length (v1) != vec_safe_length (v2))
7417 return false;
7418
7419 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7420 /* ??? Should we handle also fields here? */
7421 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7422 return false;
7423 return true;
7424 }
7425
7426 case SAVE_EXPR:
7427 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7428
7429 case CALL_EXPR:
7430 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7431 if (cmp <= 0)
7432 return cmp;
7433 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7434 return 0;
7435 {
7436 const_tree arg1, arg2;
7437 const_call_expr_arg_iterator iter1, iter2;
7438 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7439 arg2 = first_const_call_expr_arg (t2, &iter2);
7440 arg1 && arg2;
7441 arg1 = next_const_call_expr_arg (&iter1),
7442 arg2 = next_const_call_expr_arg (&iter2))
7443 {
7444 cmp = simple_cst_equal (arg1, arg2);
7445 if (cmp <= 0)
7446 return cmp;
7447 }
7448 return arg1 == arg2;
7449 }
7450
7451 case TARGET_EXPR:
7452 /* Special case: if either target is an unallocated VAR_DECL,
7453 it means that it's going to be unified with whatever the
7454 TARGET_EXPR is really supposed to initialize, so treat it
7455 as being equivalent to anything. */
7456 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7457 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7458 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7459 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7460 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7461 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7462 cmp = 1;
7463 else
7464 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7465
7466 if (cmp <= 0)
7467 return cmp;
7468
7469 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7470
7471 case WITH_CLEANUP_EXPR:
7472 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7473 if (cmp <= 0)
7474 return cmp;
7475
7476 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7477
7478 case COMPONENT_REF:
7479 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7480 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7481
7482 return 0;
7483
7484 case VAR_DECL:
7485 case PARM_DECL:
7486 case CONST_DECL:
7487 case FUNCTION_DECL:
7488 return 0;
7489
7490 default:
7491 break;
7492 }
7493
7494 /* This general rule works for most tree codes. All exceptions should be
7495 handled above. If this is a language-specific tree code, we can't
7496 trust what might be in the operand, so say we don't know
7497 the situation. */
7498 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7499 return -1;
7500
7501 switch (TREE_CODE_CLASS (code1))
7502 {
7503 case tcc_unary:
7504 case tcc_binary:
7505 case tcc_comparison:
7506 case tcc_expression:
7507 case tcc_reference:
7508 case tcc_statement:
7509 cmp = 1;
7510 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7511 {
7512 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7513 if (cmp <= 0)
7514 return cmp;
7515 }
7516
7517 return cmp;
7518
7519 default:
7520 return -1;
7521 }
7522 }
7523
7524 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7525 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7526 than U, respectively. */
7527
7528 int
7529 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7530 {
7531 if (tree_int_cst_sgn (t) < 0)
7532 return -1;
7533 else if (!tree_fits_uhwi_p (t))
7534 return 1;
7535 else if (TREE_INT_CST_LOW (t) == u)
7536 return 0;
7537 else if (TREE_INT_CST_LOW (t) < u)
7538 return -1;
7539 else
7540 return 1;
7541 }
7542
7543 /* Return true if SIZE represents a constant size that is in bounds of
7544 what the middle-end and the backend accepts (covering not more than
7545 half of the address-space). */
7546
7547 bool
7548 valid_constant_size_p (const_tree size)
7549 {
7550 if (! tree_fits_uhwi_p (size)
7551 || TREE_OVERFLOW (size)
7552 || tree_int_cst_sign_bit (size) != 0)
7553 return false;
7554 return true;
7555 }
7556
7557 /* Return the precision of the type, or for a complex or vector type the
7558 precision of the type of its elements. */
7559
7560 unsigned int
7561 element_precision (const_tree type)
7562 {
7563 if (!TYPE_P (type))
7564 type = TREE_TYPE (type);
7565 enum tree_code code = TREE_CODE (type);
7566 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7567 type = TREE_TYPE (type);
7568
7569 return TYPE_PRECISION (type);
7570 }
7571
7572 /* Return true if CODE represents an associative tree code. Otherwise
7573 return false. */
7574 bool
7575 associative_tree_code (enum tree_code code)
7576 {
7577 switch (code)
7578 {
7579 case BIT_IOR_EXPR:
7580 case BIT_AND_EXPR:
7581 case BIT_XOR_EXPR:
7582 case PLUS_EXPR:
7583 case MULT_EXPR:
7584 case MIN_EXPR:
7585 case MAX_EXPR:
7586 return true;
7587
7588 default:
7589 break;
7590 }
7591 return false;
7592 }
7593
7594 /* Return true if CODE represents a commutative tree code. Otherwise
7595 return false. */
7596 bool
7597 commutative_tree_code (enum tree_code code)
7598 {
7599 switch (code)
7600 {
7601 case PLUS_EXPR:
7602 case MULT_EXPR:
7603 case MULT_HIGHPART_EXPR:
7604 case MIN_EXPR:
7605 case MAX_EXPR:
7606 case BIT_IOR_EXPR:
7607 case BIT_XOR_EXPR:
7608 case BIT_AND_EXPR:
7609 case NE_EXPR:
7610 case EQ_EXPR:
7611 case UNORDERED_EXPR:
7612 case ORDERED_EXPR:
7613 case UNEQ_EXPR:
7614 case LTGT_EXPR:
7615 case TRUTH_AND_EXPR:
7616 case TRUTH_XOR_EXPR:
7617 case TRUTH_OR_EXPR:
7618 case WIDEN_MULT_EXPR:
7619 case VEC_WIDEN_MULT_HI_EXPR:
7620 case VEC_WIDEN_MULT_LO_EXPR:
7621 case VEC_WIDEN_MULT_EVEN_EXPR:
7622 case VEC_WIDEN_MULT_ODD_EXPR:
7623 return true;
7624
7625 default:
7626 break;
7627 }
7628 return false;
7629 }
7630
7631 /* Return true if CODE represents a ternary tree code for which the
7632 first two operands are commutative. Otherwise return false. */
7633 bool
7634 commutative_ternary_tree_code (enum tree_code code)
7635 {
7636 switch (code)
7637 {
7638 case WIDEN_MULT_PLUS_EXPR:
7639 case WIDEN_MULT_MINUS_EXPR:
7640 case DOT_PROD_EXPR:
7641 case FMA_EXPR:
7642 return true;
7643
7644 default:
7645 break;
7646 }
7647 return false;
7648 }
7649
7650 /* Returns true if CODE can overflow. */
7651
7652 bool
7653 operation_can_overflow (enum tree_code code)
7654 {
7655 switch (code)
7656 {
7657 case PLUS_EXPR:
7658 case MINUS_EXPR:
7659 case MULT_EXPR:
7660 case LSHIFT_EXPR:
7661 /* Can overflow in various ways. */
7662 return true;
7663 case TRUNC_DIV_EXPR:
7664 case EXACT_DIV_EXPR:
7665 case FLOOR_DIV_EXPR:
7666 case CEIL_DIV_EXPR:
7667 /* For INT_MIN / -1. */
7668 return true;
7669 case NEGATE_EXPR:
7670 case ABS_EXPR:
7671 /* For -INT_MIN. */
7672 return true;
7673 default:
7674 /* These operators cannot overflow. */
7675 return false;
7676 }
7677 }
7678
7679 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7680 ftrapv doesn't generate trapping insns for CODE. */
7681
7682 bool
7683 operation_no_trapping_overflow (tree type, enum tree_code code)
7684 {
7685 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7686
7687 /* We don't generate instructions that trap on overflow for complex or vector
7688 types. */
7689 if (!INTEGRAL_TYPE_P (type))
7690 return true;
7691
7692 if (!TYPE_OVERFLOW_TRAPS (type))
7693 return true;
7694
7695 switch (code)
7696 {
7697 case PLUS_EXPR:
7698 case MINUS_EXPR:
7699 case MULT_EXPR:
7700 case NEGATE_EXPR:
7701 case ABS_EXPR:
7702 /* These operators can overflow, and -ftrapv generates trapping code for
7703 these. */
7704 return false;
7705 case TRUNC_DIV_EXPR:
7706 case EXACT_DIV_EXPR:
7707 case FLOOR_DIV_EXPR:
7708 case CEIL_DIV_EXPR:
7709 case LSHIFT_EXPR:
7710 /* These operators can overflow, but -ftrapv does not generate trapping
7711 code for these. */
7712 return true;
7713 default:
7714 /* These operators cannot overflow. */
7715 return true;
7716 }
7717 }
7718
7719 namespace inchash
7720 {
7721
7722 /* Generate a hash value for an expression. This can be used iteratively
7723 by passing a previous result as the HSTATE argument.
7724
7725 This function is intended to produce the same hash for expressions which
7726 would compare equal using operand_equal_p. */
7727 void
7728 add_expr (const_tree t, inchash::hash &hstate)
7729 {
7730 int i;
7731 enum tree_code code;
7732 enum tree_code_class tclass;
7733
7734 if (t == NULL_TREE)
7735 {
7736 hstate.merge_hash (0);
7737 return;
7738 }
7739
7740 code = TREE_CODE (t);
7741
7742 switch (code)
7743 {
7744 /* Alas, constants aren't shared, so we can't rely on pointer
7745 identity. */
7746 case VOID_CST:
7747 hstate.merge_hash (0);
7748 return;
7749 case INTEGER_CST:
7750 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7751 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7752 return;
7753 case REAL_CST:
7754 {
7755 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7756 hstate.merge_hash (val2);
7757 return;
7758 }
7759 case FIXED_CST:
7760 {
7761 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7762 hstate.merge_hash (val2);
7763 return;
7764 }
7765 case STRING_CST:
7766 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7767 return;
7768 case COMPLEX_CST:
7769 inchash::add_expr (TREE_REALPART (t), hstate);
7770 inchash::add_expr (TREE_IMAGPART (t), hstate);
7771 return;
7772 case VECTOR_CST:
7773 {
7774 unsigned i;
7775 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7776 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7777 return;
7778 }
7779 case SSA_NAME:
7780 /* We can just compare by pointer. */
7781 hstate.add_wide_int (SSA_NAME_VERSION (t));
7782 return;
7783 case PLACEHOLDER_EXPR:
7784 /* The node itself doesn't matter. */
7785 return;
7786 case TREE_LIST:
7787 /* A list of expressions, for a CALL_EXPR or as the elements of a
7788 VECTOR_CST. */
7789 for (; t; t = TREE_CHAIN (t))
7790 inchash::add_expr (TREE_VALUE (t), hstate);
7791 return;
7792 case CONSTRUCTOR:
7793 {
7794 unsigned HOST_WIDE_INT idx;
7795 tree field, value;
7796 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7797 {
7798 inchash::add_expr (field, hstate);
7799 inchash::add_expr (value, hstate);
7800 }
7801 return;
7802 }
7803 case FUNCTION_DECL:
7804 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7805 Otherwise nodes that compare equal according to operand_equal_p might
7806 get different hash codes. However, don't do this for machine specific
7807 or front end builtins, since the function code is overloaded in those
7808 cases. */
7809 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7810 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7811 {
7812 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7813 code = TREE_CODE (t);
7814 }
7815 /* FALL THROUGH */
7816 default:
7817 tclass = TREE_CODE_CLASS (code);
7818
7819 if (tclass == tcc_declaration)
7820 {
7821 /* DECL's have a unique ID */
7822 hstate.add_wide_int (DECL_UID (t));
7823 }
7824 else
7825 {
7826 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7827
7828 hstate.add_object (code);
7829
7830 /* Don't hash the type, that can lead to having nodes which
7831 compare equal according to operand_equal_p, but which
7832 have different hash codes. */
7833 if (CONVERT_EXPR_CODE_P (code)
7834 || code == NON_LVALUE_EXPR)
7835 {
7836 /* Make sure to include signness in the hash computation. */
7837 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7838 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7839 }
7840
7841 else if (commutative_tree_code (code))
7842 {
7843 /* It's a commutative expression. We want to hash it the same
7844 however it appears. We do this by first hashing both operands
7845 and then rehashing based on the order of their independent
7846 hashes. */
7847 inchash::hash one, two;
7848 inchash::add_expr (TREE_OPERAND (t, 0), one);
7849 inchash::add_expr (TREE_OPERAND (t, 1), two);
7850 hstate.add_commutative (one, two);
7851 }
7852 else
7853 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7854 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7855 }
7856 return;
7857 }
7858 }
7859
7860 }
7861
7862 /* Constructors for pointer, array and function types.
7863 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7864 constructed by language-dependent code, not here.) */
7865
7866 /* Construct, lay out and return the type of pointers to TO_TYPE with
7867 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7868 reference all of memory. If such a type has already been
7869 constructed, reuse it. */
7870
7871 tree
7872 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7873 bool can_alias_all)
7874 {
7875 tree t;
7876 bool could_alias = can_alias_all;
7877
7878 if (to_type == error_mark_node)
7879 return error_mark_node;
7880
7881 /* If the pointed-to type has the may_alias attribute set, force
7882 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7883 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7884 can_alias_all = true;
7885
7886 /* In some cases, languages will have things that aren't a POINTER_TYPE
7887 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7888 In that case, return that type without regard to the rest of our
7889 operands.
7890
7891 ??? This is a kludge, but consistent with the way this function has
7892 always operated and there doesn't seem to be a good way to avoid this
7893 at the moment. */
7894 if (TYPE_POINTER_TO (to_type) != 0
7895 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7896 return TYPE_POINTER_TO (to_type);
7897
7898 /* First, if we already have a type for pointers to TO_TYPE and it's
7899 the proper mode, use it. */
7900 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7901 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7902 return t;
7903
7904 t = make_node (POINTER_TYPE);
7905
7906 TREE_TYPE (t) = to_type;
7907 SET_TYPE_MODE (t, mode);
7908 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7909 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7910 TYPE_POINTER_TO (to_type) = t;
7911
7912 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7913 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7914 SET_TYPE_STRUCTURAL_EQUALITY (t);
7915 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7916 TYPE_CANONICAL (t)
7917 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7918 mode, false);
7919
7920 /* Lay out the type. This function has many callers that are concerned
7921 with expression-construction, and this simplifies them all. */
7922 layout_type (t);
7923
7924 return t;
7925 }
7926
7927 /* By default build pointers in ptr_mode. */
7928
7929 tree
7930 build_pointer_type (tree to_type)
7931 {
7932 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7933 : TYPE_ADDR_SPACE (to_type);
7934 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7935 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7936 }
7937
7938 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7939
7940 tree
7941 build_reference_type_for_mode (tree to_type, machine_mode mode,
7942 bool can_alias_all)
7943 {
7944 tree t;
7945 bool could_alias = can_alias_all;
7946
7947 if (to_type == error_mark_node)
7948 return error_mark_node;
7949
7950 /* If the pointed-to type has the may_alias attribute set, force
7951 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7952 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7953 can_alias_all = true;
7954
7955 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7956 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7957 In that case, return that type without regard to the rest of our
7958 operands.
7959
7960 ??? This is a kludge, but consistent with the way this function has
7961 always operated and there doesn't seem to be a good way to avoid this
7962 at the moment. */
7963 if (TYPE_REFERENCE_TO (to_type) != 0
7964 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7965 return TYPE_REFERENCE_TO (to_type);
7966
7967 /* First, if we already have a type for pointers to TO_TYPE and it's
7968 the proper mode, use it. */
7969 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7970 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7971 return t;
7972
7973 t = make_node (REFERENCE_TYPE);
7974
7975 TREE_TYPE (t) = to_type;
7976 SET_TYPE_MODE (t, mode);
7977 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7978 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7979 TYPE_REFERENCE_TO (to_type) = t;
7980
7981 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7982 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7983 SET_TYPE_STRUCTURAL_EQUALITY (t);
7984 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7985 TYPE_CANONICAL (t)
7986 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7987 mode, false);
7988
7989 layout_type (t);
7990
7991 return t;
7992 }
7993
7994
7995 /* Build the node for the type of references-to-TO_TYPE by default
7996 in ptr_mode. */
7997
7998 tree
7999 build_reference_type (tree to_type)
8000 {
8001 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8002 : TYPE_ADDR_SPACE (to_type);
8003 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8004 return build_reference_type_for_mode (to_type, pointer_mode, false);
8005 }
8006
8007 #define MAX_INT_CACHED_PREC \
8008 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8009 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8010
8011 /* Builds a signed or unsigned integer type of precision PRECISION.
8012 Used for C bitfields whose precision does not match that of
8013 built-in target types. */
8014 tree
8015 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8016 int unsignedp)
8017 {
8018 tree itype, ret;
8019
8020 if (unsignedp)
8021 unsignedp = MAX_INT_CACHED_PREC + 1;
8022
8023 if (precision <= MAX_INT_CACHED_PREC)
8024 {
8025 itype = nonstandard_integer_type_cache[precision + unsignedp];
8026 if (itype)
8027 return itype;
8028 }
8029
8030 itype = make_node (INTEGER_TYPE);
8031 TYPE_PRECISION (itype) = precision;
8032
8033 if (unsignedp)
8034 fixup_unsigned_type (itype);
8035 else
8036 fixup_signed_type (itype);
8037
8038 ret = itype;
8039 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
8040 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
8041 if (precision <= MAX_INT_CACHED_PREC)
8042 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8043
8044 return ret;
8045 }
8046
8047 #define MAX_BOOL_CACHED_PREC \
8048 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8049 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8050
8051 /* Builds a boolean type of precision PRECISION.
8052 Used for boolean vectors to choose proper vector element size. */
8053 tree
8054 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8055 {
8056 tree type;
8057
8058 if (precision <= MAX_BOOL_CACHED_PREC)
8059 {
8060 type = nonstandard_boolean_type_cache[precision];
8061 if (type)
8062 return type;
8063 }
8064
8065 type = make_node (BOOLEAN_TYPE);
8066 TYPE_PRECISION (type) = precision;
8067 fixup_signed_type (type);
8068
8069 if (precision <= MAX_INT_CACHED_PREC)
8070 nonstandard_boolean_type_cache[precision] = type;
8071
8072 return type;
8073 }
8074
8075 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8076 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8077 is true, reuse such a type that has already been constructed. */
8078
8079 static tree
8080 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8081 {
8082 tree itype = make_node (INTEGER_TYPE);
8083 inchash::hash hstate;
8084
8085 TREE_TYPE (itype) = type;
8086
8087 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8088 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8089
8090 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8091 SET_TYPE_MODE (itype, TYPE_MODE (type));
8092 TYPE_SIZE (itype) = TYPE_SIZE (type);
8093 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8094 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
8095 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8096
8097 if (!shared)
8098 return itype;
8099
8100 if ((TYPE_MIN_VALUE (itype)
8101 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8102 || (TYPE_MAX_VALUE (itype)
8103 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8104 {
8105 /* Since we cannot reliably merge this type, we need to compare it using
8106 structural equality checks. */
8107 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8108 return itype;
8109 }
8110
8111 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
8112 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8113 hstate.merge_hash (TYPE_HASH (type));
8114 itype = type_hash_canon (hstate.end (), itype);
8115
8116 return itype;
8117 }
8118
8119 /* Wrapper around build_range_type_1 with SHARED set to true. */
8120
8121 tree
8122 build_range_type (tree type, tree lowval, tree highval)
8123 {
8124 return build_range_type_1 (type, lowval, highval, true);
8125 }
8126
8127 /* Wrapper around build_range_type_1 with SHARED set to false. */
8128
8129 tree
8130 build_nonshared_range_type (tree type, tree lowval, tree highval)
8131 {
8132 return build_range_type_1 (type, lowval, highval, false);
8133 }
8134
8135 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8136 MAXVAL should be the maximum value in the domain
8137 (one less than the length of the array).
8138
8139 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8140 We don't enforce this limit, that is up to caller (e.g. language front end).
8141 The limit exists because the result is a signed type and we don't handle
8142 sizes that use more than one HOST_WIDE_INT. */
8143
8144 tree
8145 build_index_type (tree maxval)
8146 {
8147 return build_range_type (sizetype, size_zero_node, maxval);
8148 }
8149
8150 /* Return true if the debug information for TYPE, a subtype, should be emitted
8151 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8152 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8153 debug info and doesn't reflect the source code. */
8154
8155 bool
8156 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8157 {
8158 tree base_type = TREE_TYPE (type), low, high;
8159
8160 /* Subrange types have a base type which is an integral type. */
8161 if (!INTEGRAL_TYPE_P (base_type))
8162 return false;
8163
8164 /* Get the real bounds of the subtype. */
8165 if (lang_hooks.types.get_subrange_bounds)
8166 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8167 else
8168 {
8169 low = TYPE_MIN_VALUE (type);
8170 high = TYPE_MAX_VALUE (type);
8171 }
8172
8173 /* If the type and its base type have the same representation and the same
8174 name, then the type is not a subrange but a copy of the base type. */
8175 if ((TREE_CODE (base_type) == INTEGER_TYPE
8176 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8177 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8178 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8179 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8180 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8181 return false;
8182
8183 if (lowval)
8184 *lowval = low;
8185 if (highval)
8186 *highval = high;
8187 return true;
8188 }
8189
8190 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8191 and number of elements specified by the range of values of INDEX_TYPE.
8192 If SHARED is true, reuse such a type that has already been constructed. */
8193
8194 static tree
8195 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8196 {
8197 tree t;
8198
8199 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8200 {
8201 error ("arrays of functions are not meaningful");
8202 elt_type = integer_type_node;
8203 }
8204
8205 t = make_node (ARRAY_TYPE);
8206 TREE_TYPE (t) = elt_type;
8207 TYPE_DOMAIN (t) = index_type;
8208 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8209 layout_type (t);
8210
8211 /* If the element type is incomplete at this point we get marked for
8212 structural equality. Do not record these types in the canonical
8213 type hashtable. */
8214 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8215 return t;
8216
8217 if (shared)
8218 {
8219 inchash::hash hstate;
8220 hstate.add_object (TYPE_HASH (elt_type));
8221 if (index_type)
8222 hstate.add_object (TYPE_HASH (index_type));
8223 t = type_hash_canon (hstate.end (), t);
8224 }
8225
8226 if (TYPE_CANONICAL (t) == t)
8227 {
8228 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8229 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8230 || in_lto_p)
8231 SET_TYPE_STRUCTURAL_EQUALITY (t);
8232 else if (TYPE_CANONICAL (elt_type) != elt_type
8233 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8234 TYPE_CANONICAL (t)
8235 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8236 index_type
8237 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8238 shared);
8239 }
8240
8241 return t;
8242 }
8243
8244 /* Wrapper around build_array_type_1 with SHARED set to true. */
8245
8246 tree
8247 build_array_type (tree elt_type, tree index_type)
8248 {
8249 return build_array_type_1 (elt_type, index_type, true);
8250 }
8251
8252 /* Wrapper around build_array_type_1 with SHARED set to false. */
8253
8254 tree
8255 build_nonshared_array_type (tree elt_type, tree index_type)
8256 {
8257 return build_array_type_1 (elt_type, index_type, false);
8258 }
8259
8260 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8261 sizetype. */
8262
8263 tree
8264 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8265 {
8266 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8267 }
8268
8269 /* Recursively examines the array elements of TYPE, until a non-array
8270 element type is found. */
8271
8272 tree
8273 strip_array_types (tree type)
8274 {
8275 while (TREE_CODE (type) == ARRAY_TYPE)
8276 type = TREE_TYPE (type);
8277
8278 return type;
8279 }
8280
8281 /* Computes the canonical argument types from the argument type list
8282 ARGTYPES.
8283
8284 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8285 on entry to this function, or if any of the ARGTYPES are
8286 structural.
8287
8288 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8289 true on entry to this function, or if any of the ARGTYPES are
8290 non-canonical.
8291
8292 Returns a canonical argument list, which may be ARGTYPES when the
8293 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8294 true) or would not differ from ARGTYPES. */
8295
8296 static tree
8297 maybe_canonicalize_argtypes (tree argtypes,
8298 bool *any_structural_p,
8299 bool *any_noncanonical_p)
8300 {
8301 tree arg;
8302 bool any_noncanonical_argtypes_p = false;
8303
8304 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8305 {
8306 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8307 /* Fail gracefully by stating that the type is structural. */
8308 *any_structural_p = true;
8309 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8310 *any_structural_p = true;
8311 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8312 || TREE_PURPOSE (arg))
8313 /* If the argument has a default argument, we consider it
8314 non-canonical even though the type itself is canonical.
8315 That way, different variants of function and method types
8316 with default arguments will all point to the variant with
8317 no defaults as their canonical type. */
8318 any_noncanonical_argtypes_p = true;
8319 }
8320
8321 if (*any_structural_p)
8322 return argtypes;
8323
8324 if (any_noncanonical_argtypes_p)
8325 {
8326 /* Build the canonical list of argument types. */
8327 tree canon_argtypes = NULL_TREE;
8328 bool is_void = false;
8329
8330 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8331 {
8332 if (arg == void_list_node)
8333 is_void = true;
8334 else
8335 canon_argtypes = tree_cons (NULL_TREE,
8336 TYPE_CANONICAL (TREE_VALUE (arg)),
8337 canon_argtypes);
8338 }
8339
8340 canon_argtypes = nreverse (canon_argtypes);
8341 if (is_void)
8342 canon_argtypes = chainon (canon_argtypes, void_list_node);
8343
8344 /* There is a non-canonical type. */
8345 *any_noncanonical_p = true;
8346 return canon_argtypes;
8347 }
8348
8349 /* The canonical argument types are the same as ARGTYPES. */
8350 return argtypes;
8351 }
8352
8353 /* Construct, lay out and return
8354 the type of functions returning type VALUE_TYPE
8355 given arguments of types ARG_TYPES.
8356 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8357 are data type nodes for the arguments of the function.
8358 If such a type has already been constructed, reuse it. */
8359
8360 tree
8361 build_function_type (tree value_type, tree arg_types)
8362 {
8363 tree t;
8364 inchash::hash hstate;
8365 bool any_structural_p, any_noncanonical_p;
8366 tree canon_argtypes;
8367
8368 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8369 {
8370 error ("function return type cannot be function");
8371 value_type = integer_type_node;
8372 }
8373
8374 /* Make a node of the sort we want. */
8375 t = make_node (FUNCTION_TYPE);
8376 TREE_TYPE (t) = value_type;
8377 TYPE_ARG_TYPES (t) = arg_types;
8378
8379 /* If we already have such a type, use the old one. */
8380 hstate.add_object (TYPE_HASH (value_type));
8381 type_hash_list (arg_types, hstate);
8382 t = type_hash_canon (hstate.end (), t);
8383
8384 /* Set up the canonical type. */
8385 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8386 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8387 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8388 &any_structural_p,
8389 &any_noncanonical_p);
8390 if (any_structural_p)
8391 SET_TYPE_STRUCTURAL_EQUALITY (t);
8392 else if (any_noncanonical_p)
8393 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8394 canon_argtypes);
8395
8396 if (!COMPLETE_TYPE_P (t))
8397 layout_type (t);
8398 return t;
8399 }
8400
8401 /* Build a function type. The RETURN_TYPE is the type returned by the
8402 function. If VAARGS is set, no void_type_node is appended to the
8403 the list. ARGP must be always be terminated be a NULL_TREE. */
8404
8405 static tree
8406 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8407 {
8408 tree t, args, last;
8409
8410 t = va_arg (argp, tree);
8411 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8412 args = tree_cons (NULL_TREE, t, args);
8413
8414 if (vaargs)
8415 {
8416 last = args;
8417 if (args != NULL_TREE)
8418 args = nreverse (args);
8419 gcc_assert (last != void_list_node);
8420 }
8421 else if (args == NULL_TREE)
8422 args = void_list_node;
8423 else
8424 {
8425 last = args;
8426 args = nreverse (args);
8427 TREE_CHAIN (last) = void_list_node;
8428 }
8429 args = build_function_type (return_type, args);
8430
8431 return args;
8432 }
8433
8434 /* Build a function type. The RETURN_TYPE is the type returned by the
8435 function. If additional arguments are provided, they are
8436 additional argument types. The list of argument types must always
8437 be terminated by NULL_TREE. */
8438
8439 tree
8440 build_function_type_list (tree return_type, ...)
8441 {
8442 tree args;
8443 va_list p;
8444
8445 va_start (p, return_type);
8446 args = build_function_type_list_1 (false, return_type, p);
8447 va_end (p);
8448 return args;
8449 }
8450
8451 /* Build a variable argument function type. The RETURN_TYPE is the
8452 type returned by the function. If additional arguments are provided,
8453 they are additional argument types. The list of argument types must
8454 always be terminated by NULL_TREE. */
8455
8456 tree
8457 build_varargs_function_type_list (tree return_type, ...)
8458 {
8459 tree args;
8460 va_list p;
8461
8462 va_start (p, return_type);
8463 args = build_function_type_list_1 (true, return_type, p);
8464 va_end (p);
8465
8466 return args;
8467 }
8468
8469 /* Build a function type. RETURN_TYPE is the type returned by the
8470 function; VAARGS indicates whether the function takes varargs. The
8471 function takes N named arguments, the types of which are provided in
8472 ARG_TYPES. */
8473
8474 static tree
8475 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8476 tree *arg_types)
8477 {
8478 int i;
8479 tree t = vaargs ? NULL_TREE : void_list_node;
8480
8481 for (i = n - 1; i >= 0; i--)
8482 t = tree_cons (NULL_TREE, arg_types[i], t);
8483
8484 return build_function_type (return_type, t);
8485 }
8486
8487 /* Build a function type. RETURN_TYPE is the type returned by the
8488 function. The function takes N named arguments, the types of which
8489 are provided in ARG_TYPES. */
8490
8491 tree
8492 build_function_type_array (tree return_type, int n, tree *arg_types)
8493 {
8494 return build_function_type_array_1 (false, return_type, n, arg_types);
8495 }
8496
8497 /* Build a variable argument function type. RETURN_TYPE is the type
8498 returned by the function. The function takes N named arguments, the
8499 types of which are provided in ARG_TYPES. */
8500
8501 tree
8502 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8503 {
8504 return build_function_type_array_1 (true, return_type, n, arg_types);
8505 }
8506
8507 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8508 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8509 for the method. An implicit additional parameter (of type
8510 pointer-to-BASETYPE) is added to the ARGTYPES. */
8511
8512 tree
8513 build_method_type_directly (tree basetype,
8514 tree rettype,
8515 tree argtypes)
8516 {
8517 tree t;
8518 tree ptype;
8519 inchash::hash hstate;
8520 bool any_structural_p, any_noncanonical_p;
8521 tree canon_argtypes;
8522
8523 /* Make a node of the sort we want. */
8524 t = make_node (METHOD_TYPE);
8525
8526 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8527 TREE_TYPE (t) = rettype;
8528 ptype = build_pointer_type (basetype);
8529
8530 /* The actual arglist for this function includes a "hidden" argument
8531 which is "this". Put it into the list of argument types. */
8532 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8533 TYPE_ARG_TYPES (t) = argtypes;
8534
8535 /* If we already have such a type, use the old one. */
8536 hstate.add_object (TYPE_HASH (basetype));
8537 hstate.add_object (TYPE_HASH (rettype));
8538 type_hash_list (argtypes, hstate);
8539 t = type_hash_canon (hstate.end (), t);
8540
8541 /* Set up the canonical type. */
8542 any_structural_p
8543 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8544 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8545 any_noncanonical_p
8546 = (TYPE_CANONICAL (basetype) != basetype
8547 || TYPE_CANONICAL (rettype) != rettype);
8548 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8549 &any_structural_p,
8550 &any_noncanonical_p);
8551 if (any_structural_p)
8552 SET_TYPE_STRUCTURAL_EQUALITY (t);
8553 else if (any_noncanonical_p)
8554 TYPE_CANONICAL (t)
8555 = build_method_type_directly (TYPE_CANONICAL (basetype),
8556 TYPE_CANONICAL (rettype),
8557 canon_argtypes);
8558 if (!COMPLETE_TYPE_P (t))
8559 layout_type (t);
8560
8561 return t;
8562 }
8563
8564 /* Construct, lay out and return the type of methods belonging to class
8565 BASETYPE and whose arguments and values are described by TYPE.
8566 If that type exists already, reuse it.
8567 TYPE must be a FUNCTION_TYPE node. */
8568
8569 tree
8570 build_method_type (tree basetype, tree type)
8571 {
8572 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8573
8574 return build_method_type_directly (basetype,
8575 TREE_TYPE (type),
8576 TYPE_ARG_TYPES (type));
8577 }
8578
8579 /* Construct, lay out and return the type of offsets to a value
8580 of type TYPE, within an object of type BASETYPE.
8581 If a suitable offset type exists already, reuse it. */
8582
8583 tree
8584 build_offset_type (tree basetype, tree type)
8585 {
8586 tree t;
8587 inchash::hash hstate;
8588
8589 /* Make a node of the sort we want. */
8590 t = make_node (OFFSET_TYPE);
8591
8592 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8593 TREE_TYPE (t) = type;
8594
8595 /* If we already have such a type, use the old one. */
8596 hstate.add_object (TYPE_HASH (basetype));
8597 hstate.add_object (TYPE_HASH (type));
8598 t = type_hash_canon (hstate.end (), t);
8599
8600 if (!COMPLETE_TYPE_P (t))
8601 layout_type (t);
8602
8603 if (TYPE_CANONICAL (t) == t)
8604 {
8605 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8606 || TYPE_STRUCTURAL_EQUALITY_P (type))
8607 SET_TYPE_STRUCTURAL_EQUALITY (t);
8608 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8609 || TYPE_CANONICAL (type) != type)
8610 TYPE_CANONICAL (t)
8611 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8612 TYPE_CANONICAL (type));
8613 }
8614
8615 return t;
8616 }
8617
8618 /* Create a complex type whose components are COMPONENT_TYPE. */
8619
8620 tree
8621 build_complex_type (tree component_type)
8622 {
8623 tree t;
8624 inchash::hash hstate;
8625
8626 gcc_assert (INTEGRAL_TYPE_P (component_type)
8627 || SCALAR_FLOAT_TYPE_P (component_type)
8628 || FIXED_POINT_TYPE_P (component_type));
8629
8630 /* Make a node of the sort we want. */
8631 t = make_node (COMPLEX_TYPE);
8632
8633 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8634
8635 /* If we already have such a type, use the old one. */
8636 hstate.add_object (TYPE_HASH (component_type));
8637 t = type_hash_canon (hstate.end (), t);
8638
8639 if (!COMPLETE_TYPE_P (t))
8640 layout_type (t);
8641
8642 if (TYPE_CANONICAL (t) == t)
8643 {
8644 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8645 SET_TYPE_STRUCTURAL_EQUALITY (t);
8646 else if (TYPE_CANONICAL (component_type) != component_type)
8647 TYPE_CANONICAL (t)
8648 = build_complex_type (TYPE_CANONICAL (component_type));
8649 }
8650
8651 /* We need to create a name, since complex is a fundamental type. */
8652 if (! TYPE_NAME (t))
8653 {
8654 const char *name;
8655 if (component_type == char_type_node)
8656 name = "complex char";
8657 else if (component_type == signed_char_type_node)
8658 name = "complex signed char";
8659 else if (component_type == unsigned_char_type_node)
8660 name = "complex unsigned char";
8661 else if (component_type == short_integer_type_node)
8662 name = "complex short int";
8663 else if (component_type == short_unsigned_type_node)
8664 name = "complex short unsigned int";
8665 else if (component_type == integer_type_node)
8666 name = "complex int";
8667 else if (component_type == unsigned_type_node)
8668 name = "complex unsigned int";
8669 else if (component_type == long_integer_type_node)
8670 name = "complex long int";
8671 else if (component_type == long_unsigned_type_node)
8672 name = "complex long unsigned int";
8673 else if (component_type == long_long_integer_type_node)
8674 name = "complex long long int";
8675 else if (component_type == long_long_unsigned_type_node)
8676 name = "complex long long unsigned int";
8677 else
8678 name = 0;
8679
8680 if (name != 0)
8681 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8682 get_identifier (name), t);
8683 }
8684
8685 return build_qualified_type (t, TYPE_QUALS (component_type));
8686 }
8687
8688 /* If TYPE is a real or complex floating-point type and the target
8689 does not directly support arithmetic on TYPE then return the wider
8690 type to be used for arithmetic on TYPE. Otherwise, return
8691 NULL_TREE. */
8692
8693 tree
8694 excess_precision_type (tree type)
8695 {
8696 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8697 {
8698 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8699 switch (TREE_CODE (type))
8700 {
8701 case REAL_TYPE:
8702 switch (flt_eval_method)
8703 {
8704 case 1:
8705 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8706 return double_type_node;
8707 break;
8708 case 2:
8709 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8710 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8711 return long_double_type_node;
8712 break;
8713 default:
8714 gcc_unreachable ();
8715 }
8716 break;
8717 case COMPLEX_TYPE:
8718 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8719 return NULL_TREE;
8720 switch (flt_eval_method)
8721 {
8722 case 1:
8723 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8724 return complex_double_type_node;
8725 break;
8726 case 2:
8727 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8728 || (TYPE_MODE (TREE_TYPE (type))
8729 == TYPE_MODE (double_type_node)))
8730 return complex_long_double_type_node;
8731 break;
8732 default:
8733 gcc_unreachable ();
8734 }
8735 break;
8736 default:
8737 break;
8738 }
8739 }
8740 return NULL_TREE;
8741 }
8742 \f
8743 /* Return OP, stripped of any conversions to wider types as much as is safe.
8744 Converting the value back to OP's type makes a value equivalent to OP.
8745
8746 If FOR_TYPE is nonzero, we return a value which, if converted to
8747 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8748
8749 OP must have integer, real or enumeral type. Pointers are not allowed!
8750
8751 There are some cases where the obvious value we could return
8752 would regenerate to OP if converted to OP's type,
8753 but would not extend like OP to wider types.
8754 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8755 For example, if OP is (unsigned short)(signed char)-1,
8756 we avoid returning (signed char)-1 if FOR_TYPE is int,
8757 even though extending that to an unsigned short would regenerate OP,
8758 since the result of extending (signed char)-1 to (int)
8759 is different from (int) OP. */
8760
8761 tree
8762 get_unwidened (tree op, tree for_type)
8763 {
8764 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8765 tree type = TREE_TYPE (op);
8766 unsigned final_prec
8767 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8768 int uns
8769 = (for_type != 0 && for_type != type
8770 && final_prec > TYPE_PRECISION (type)
8771 && TYPE_UNSIGNED (type));
8772 tree win = op;
8773
8774 while (CONVERT_EXPR_P (op))
8775 {
8776 int bitschange;
8777
8778 /* TYPE_PRECISION on vector types has different meaning
8779 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8780 so avoid them here. */
8781 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8782 break;
8783
8784 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8785 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8786
8787 /* Truncations are many-one so cannot be removed.
8788 Unless we are later going to truncate down even farther. */
8789 if (bitschange < 0
8790 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8791 break;
8792
8793 /* See what's inside this conversion. If we decide to strip it,
8794 we will set WIN. */
8795 op = TREE_OPERAND (op, 0);
8796
8797 /* If we have not stripped any zero-extensions (uns is 0),
8798 we can strip any kind of extension.
8799 If we have previously stripped a zero-extension,
8800 only zero-extensions can safely be stripped.
8801 Any extension can be stripped if the bits it would produce
8802 are all going to be discarded later by truncating to FOR_TYPE. */
8803
8804 if (bitschange > 0)
8805 {
8806 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8807 win = op;
8808 /* TYPE_UNSIGNED says whether this is a zero-extension.
8809 Let's avoid computing it if it does not affect WIN
8810 and if UNS will not be needed again. */
8811 if ((uns
8812 || CONVERT_EXPR_P (op))
8813 && TYPE_UNSIGNED (TREE_TYPE (op)))
8814 {
8815 uns = 1;
8816 win = op;
8817 }
8818 }
8819 }
8820
8821 /* If we finally reach a constant see if it fits in for_type and
8822 in that case convert it. */
8823 if (for_type
8824 && TREE_CODE (win) == INTEGER_CST
8825 && TREE_TYPE (win) != for_type
8826 && int_fits_type_p (win, for_type))
8827 win = fold_convert (for_type, win);
8828
8829 return win;
8830 }
8831 \f
8832 /* Return OP or a simpler expression for a narrower value
8833 which can be sign-extended or zero-extended to give back OP.
8834 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8835 or 0 if the value should be sign-extended. */
8836
8837 tree
8838 get_narrower (tree op, int *unsignedp_ptr)
8839 {
8840 int uns = 0;
8841 int first = 1;
8842 tree win = op;
8843 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8844
8845 while (TREE_CODE (op) == NOP_EXPR)
8846 {
8847 int bitschange
8848 = (TYPE_PRECISION (TREE_TYPE (op))
8849 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8850
8851 /* Truncations are many-one so cannot be removed. */
8852 if (bitschange < 0)
8853 break;
8854
8855 /* See what's inside this conversion. If we decide to strip it,
8856 we will set WIN. */
8857
8858 if (bitschange > 0)
8859 {
8860 op = TREE_OPERAND (op, 0);
8861 /* An extension: the outermost one can be stripped,
8862 but remember whether it is zero or sign extension. */
8863 if (first)
8864 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8865 /* Otherwise, if a sign extension has been stripped,
8866 only sign extensions can now be stripped;
8867 if a zero extension has been stripped, only zero-extensions. */
8868 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8869 break;
8870 first = 0;
8871 }
8872 else /* bitschange == 0 */
8873 {
8874 /* A change in nominal type can always be stripped, but we must
8875 preserve the unsignedness. */
8876 if (first)
8877 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8878 first = 0;
8879 op = TREE_OPERAND (op, 0);
8880 /* Keep trying to narrow, but don't assign op to win if it
8881 would turn an integral type into something else. */
8882 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8883 continue;
8884 }
8885
8886 win = op;
8887 }
8888
8889 if (TREE_CODE (op) == COMPONENT_REF
8890 /* Since type_for_size always gives an integer type. */
8891 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8892 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8893 /* Ensure field is laid out already. */
8894 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8895 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8896 {
8897 unsigned HOST_WIDE_INT innerprec
8898 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8899 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8900 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8901 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8902
8903 /* We can get this structure field in a narrower type that fits it,
8904 but the resulting extension to its nominal type (a fullword type)
8905 must satisfy the same conditions as for other extensions.
8906
8907 Do this only for fields that are aligned (not bit-fields),
8908 because when bit-field insns will be used there is no
8909 advantage in doing this. */
8910
8911 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8912 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8913 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8914 && type != 0)
8915 {
8916 if (first)
8917 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8918 win = fold_convert (type, op);
8919 }
8920 }
8921
8922 *unsignedp_ptr = uns;
8923 return win;
8924 }
8925 \f
8926 /* Returns true if integer constant C has a value that is permissible
8927 for type TYPE (an INTEGER_TYPE). */
8928
8929 bool
8930 int_fits_type_p (const_tree c, const_tree type)
8931 {
8932 tree type_low_bound, type_high_bound;
8933 bool ok_for_low_bound, ok_for_high_bound;
8934 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8935
8936 retry:
8937 type_low_bound = TYPE_MIN_VALUE (type);
8938 type_high_bound = TYPE_MAX_VALUE (type);
8939
8940 /* If at least one bound of the type is a constant integer, we can check
8941 ourselves and maybe make a decision. If no such decision is possible, but
8942 this type is a subtype, try checking against that. Otherwise, use
8943 fits_to_tree_p, which checks against the precision.
8944
8945 Compute the status for each possibly constant bound, and return if we see
8946 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8947 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8948 for "constant known to fit". */
8949
8950 /* Check if c >= type_low_bound. */
8951 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8952 {
8953 if (tree_int_cst_lt (c, type_low_bound))
8954 return false;
8955 ok_for_low_bound = true;
8956 }
8957 else
8958 ok_for_low_bound = false;
8959
8960 /* Check if c <= type_high_bound. */
8961 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8962 {
8963 if (tree_int_cst_lt (type_high_bound, c))
8964 return false;
8965 ok_for_high_bound = true;
8966 }
8967 else
8968 ok_for_high_bound = false;
8969
8970 /* If the constant fits both bounds, the result is known. */
8971 if (ok_for_low_bound && ok_for_high_bound)
8972 return true;
8973
8974 /* Perform some generic filtering which may allow making a decision
8975 even if the bounds are not constant. First, negative integers
8976 never fit in unsigned types, */
8977 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8978 return false;
8979
8980 /* Second, narrower types always fit in wider ones. */
8981 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8982 return true;
8983
8984 /* Third, unsigned integers with top bit set never fit signed types. */
8985 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8986 {
8987 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8988 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8989 {
8990 /* When a tree_cst is converted to a wide-int, the precision
8991 is taken from the type. However, if the precision of the
8992 mode underneath the type is smaller than that, it is
8993 possible that the value will not fit. The test below
8994 fails if any bit is set between the sign bit of the
8995 underlying mode and the top bit of the type. */
8996 if (wi::ne_p (wi::zext (c, prec - 1), c))
8997 return false;
8998 }
8999 else if (wi::neg_p (c))
9000 return false;
9001 }
9002
9003 /* If we haven't been able to decide at this point, there nothing more we
9004 can check ourselves here. Look at the base type if we have one and it
9005 has the same precision. */
9006 if (TREE_CODE (type) == INTEGER_TYPE
9007 && TREE_TYPE (type) != 0
9008 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9009 {
9010 type = TREE_TYPE (type);
9011 goto retry;
9012 }
9013
9014 /* Or to fits_to_tree_p, if nothing else. */
9015 return wi::fits_to_tree_p (c, type);
9016 }
9017
9018 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9019 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9020 represented (assuming two's-complement arithmetic) within the bit
9021 precision of the type are returned instead. */
9022
9023 void
9024 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9025 {
9026 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9027 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9028 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
9029 else
9030 {
9031 if (TYPE_UNSIGNED (type))
9032 mpz_set_ui (min, 0);
9033 else
9034 {
9035 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9036 wi::to_mpz (mn, min, SIGNED);
9037 }
9038 }
9039
9040 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9041 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9042 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
9043 else
9044 {
9045 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9046 wi::to_mpz (mn, max, TYPE_SIGN (type));
9047 }
9048 }
9049
9050 /* Return true if VAR is an automatic variable defined in function FN. */
9051
9052 bool
9053 auto_var_in_fn_p (const_tree var, const_tree fn)
9054 {
9055 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9056 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
9057 || TREE_CODE (var) == PARM_DECL)
9058 && ! TREE_STATIC (var))
9059 || TREE_CODE (var) == LABEL_DECL
9060 || TREE_CODE (var) == RESULT_DECL));
9061 }
9062
9063 /* Subprogram of following function. Called by walk_tree.
9064
9065 Return *TP if it is an automatic variable or parameter of the
9066 function passed in as DATA. */
9067
9068 static tree
9069 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9070 {
9071 tree fn = (tree) data;
9072
9073 if (TYPE_P (*tp))
9074 *walk_subtrees = 0;
9075
9076 else if (DECL_P (*tp)
9077 && auto_var_in_fn_p (*tp, fn))
9078 return *tp;
9079
9080 return NULL_TREE;
9081 }
9082
9083 /* Returns true if T is, contains, or refers to a type with variable
9084 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9085 arguments, but not the return type. If FN is nonzero, only return
9086 true if a modifier of the type or position of FN is a variable or
9087 parameter inside FN.
9088
9089 This concept is more general than that of C99 'variably modified types':
9090 in C99, a struct type is never variably modified because a VLA may not
9091 appear as a structure member. However, in GNU C code like:
9092
9093 struct S { int i[f()]; };
9094
9095 is valid, and other languages may define similar constructs. */
9096
9097 bool
9098 variably_modified_type_p (tree type, tree fn)
9099 {
9100 tree t;
9101
9102 /* Test if T is either variable (if FN is zero) or an expression containing
9103 a variable in FN. If TYPE isn't gimplified, return true also if
9104 gimplify_one_sizepos would gimplify the expression into a local
9105 variable. */
9106 #define RETURN_TRUE_IF_VAR(T) \
9107 do { tree _t = (T); \
9108 if (_t != NULL_TREE \
9109 && _t != error_mark_node \
9110 && TREE_CODE (_t) != INTEGER_CST \
9111 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9112 && (!fn \
9113 || (!TYPE_SIZES_GIMPLIFIED (type) \
9114 && !is_gimple_sizepos (_t)) \
9115 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9116 return true; } while (0)
9117
9118 if (type == error_mark_node)
9119 return false;
9120
9121 /* If TYPE itself has variable size, it is variably modified. */
9122 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9123 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9124
9125 switch (TREE_CODE (type))
9126 {
9127 case POINTER_TYPE:
9128 case REFERENCE_TYPE:
9129 case VECTOR_TYPE:
9130 if (variably_modified_type_p (TREE_TYPE (type), fn))
9131 return true;
9132 break;
9133
9134 case FUNCTION_TYPE:
9135 case METHOD_TYPE:
9136 /* If TYPE is a function type, it is variably modified if the
9137 return type is variably modified. */
9138 if (variably_modified_type_p (TREE_TYPE (type), fn))
9139 return true;
9140 break;
9141
9142 case INTEGER_TYPE:
9143 case REAL_TYPE:
9144 case FIXED_POINT_TYPE:
9145 case ENUMERAL_TYPE:
9146 case BOOLEAN_TYPE:
9147 /* Scalar types are variably modified if their end points
9148 aren't constant. */
9149 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9150 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9151 break;
9152
9153 case RECORD_TYPE:
9154 case UNION_TYPE:
9155 case QUAL_UNION_TYPE:
9156 /* We can't see if any of the fields are variably-modified by the
9157 definition we normally use, since that would produce infinite
9158 recursion via pointers. */
9159 /* This is variably modified if some field's type is. */
9160 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9161 if (TREE_CODE (t) == FIELD_DECL)
9162 {
9163 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9164 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9165 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9166
9167 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9168 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9169 }
9170 break;
9171
9172 case ARRAY_TYPE:
9173 /* Do not call ourselves to avoid infinite recursion. This is
9174 variably modified if the element type is. */
9175 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9176 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9177 break;
9178
9179 default:
9180 break;
9181 }
9182
9183 /* The current language may have other cases to check, but in general,
9184 all other types are not variably modified. */
9185 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9186
9187 #undef RETURN_TRUE_IF_VAR
9188 }
9189
9190 /* Given a DECL or TYPE, return the scope in which it was declared, or
9191 NULL_TREE if there is no containing scope. */
9192
9193 tree
9194 get_containing_scope (const_tree t)
9195 {
9196 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9197 }
9198
9199 /* Return the innermost context enclosing DECL that is
9200 a FUNCTION_DECL, or zero if none. */
9201
9202 tree
9203 decl_function_context (const_tree decl)
9204 {
9205 tree context;
9206
9207 if (TREE_CODE (decl) == ERROR_MARK)
9208 return 0;
9209
9210 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9211 where we look up the function at runtime. Such functions always take
9212 a first argument of type 'pointer to real context'.
9213
9214 C++ should really be fixed to use DECL_CONTEXT for the real context,
9215 and use something else for the "virtual context". */
9216 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9217 context
9218 = TYPE_MAIN_VARIANT
9219 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9220 else
9221 context = DECL_CONTEXT (decl);
9222
9223 while (context && TREE_CODE (context) != FUNCTION_DECL)
9224 {
9225 if (TREE_CODE (context) == BLOCK)
9226 context = BLOCK_SUPERCONTEXT (context);
9227 else
9228 context = get_containing_scope (context);
9229 }
9230
9231 return context;
9232 }
9233
9234 /* Return the innermost context enclosing DECL that is
9235 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9236 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9237
9238 tree
9239 decl_type_context (const_tree decl)
9240 {
9241 tree context = DECL_CONTEXT (decl);
9242
9243 while (context)
9244 switch (TREE_CODE (context))
9245 {
9246 case NAMESPACE_DECL:
9247 case TRANSLATION_UNIT_DECL:
9248 return NULL_TREE;
9249
9250 case RECORD_TYPE:
9251 case UNION_TYPE:
9252 case QUAL_UNION_TYPE:
9253 return context;
9254
9255 case TYPE_DECL:
9256 case FUNCTION_DECL:
9257 context = DECL_CONTEXT (context);
9258 break;
9259
9260 case BLOCK:
9261 context = BLOCK_SUPERCONTEXT (context);
9262 break;
9263
9264 default:
9265 gcc_unreachable ();
9266 }
9267
9268 return NULL_TREE;
9269 }
9270
9271 /* CALL is a CALL_EXPR. Return the declaration for the function
9272 called, or NULL_TREE if the called function cannot be
9273 determined. */
9274
9275 tree
9276 get_callee_fndecl (const_tree call)
9277 {
9278 tree addr;
9279
9280 if (call == error_mark_node)
9281 return error_mark_node;
9282
9283 /* It's invalid to call this function with anything but a
9284 CALL_EXPR. */
9285 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9286
9287 /* The first operand to the CALL is the address of the function
9288 called. */
9289 addr = CALL_EXPR_FN (call);
9290
9291 /* If there is no function, return early. */
9292 if (addr == NULL_TREE)
9293 return NULL_TREE;
9294
9295 STRIP_NOPS (addr);
9296
9297 /* If this is a readonly function pointer, extract its initial value. */
9298 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9299 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9300 && DECL_INITIAL (addr))
9301 addr = DECL_INITIAL (addr);
9302
9303 /* If the address is just `&f' for some function `f', then we know
9304 that `f' is being called. */
9305 if (TREE_CODE (addr) == ADDR_EXPR
9306 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9307 return TREE_OPERAND (addr, 0);
9308
9309 /* We couldn't figure out what was being called. */
9310 return NULL_TREE;
9311 }
9312
9313 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9314 return the associated function code, otherwise return CFN_LAST. */
9315
9316 combined_fn
9317 get_call_combined_fn (const_tree call)
9318 {
9319 /* It's invalid to call this function with anything but a CALL_EXPR. */
9320 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9321
9322 if (!CALL_EXPR_FN (call))
9323 return as_combined_fn (CALL_EXPR_IFN (call));
9324
9325 tree fndecl = get_callee_fndecl (call);
9326 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9327 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9328
9329 return CFN_LAST;
9330 }
9331
9332 #define TREE_MEM_USAGE_SPACES 40
9333
9334 /* Print debugging information about tree nodes generated during the compile,
9335 and any language-specific information. */
9336
9337 void
9338 dump_tree_statistics (void)
9339 {
9340 if (GATHER_STATISTICS)
9341 {
9342 int i;
9343 int total_nodes, total_bytes;
9344 fprintf (stderr, "\nKind Nodes Bytes\n");
9345 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9346 total_nodes = total_bytes = 0;
9347 for (i = 0; i < (int) all_kinds; i++)
9348 {
9349 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9350 tree_node_counts[i], tree_node_sizes[i]);
9351 total_nodes += tree_node_counts[i];
9352 total_bytes += tree_node_sizes[i];
9353 }
9354 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9355 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9356 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9357 fprintf (stderr, "Code Nodes\n");
9358 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9359 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9360 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9361 tree_code_counts[i]);
9362 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9363 fprintf (stderr, "\n");
9364 ssanames_print_statistics ();
9365 fprintf (stderr, "\n");
9366 phinodes_print_statistics ();
9367 fprintf (stderr, "\n");
9368 }
9369 else
9370 fprintf (stderr, "(No per-node statistics)\n");
9371
9372 print_type_hash_statistics ();
9373 print_debug_expr_statistics ();
9374 print_value_expr_statistics ();
9375 lang_hooks.print_statistics ();
9376 }
9377 \f
9378 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9379
9380 /* Generate a crc32 of a byte. */
9381
9382 static unsigned
9383 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9384 {
9385 unsigned ix;
9386
9387 for (ix = bits; ix--; value <<= 1)
9388 {
9389 unsigned feedback;
9390
9391 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9392 chksum <<= 1;
9393 chksum ^= feedback;
9394 }
9395 return chksum;
9396 }
9397
9398 /* Generate a crc32 of a 32-bit unsigned. */
9399
9400 unsigned
9401 crc32_unsigned (unsigned chksum, unsigned value)
9402 {
9403 return crc32_unsigned_bits (chksum, value, 32);
9404 }
9405
9406 /* Generate a crc32 of a byte. */
9407
9408 unsigned
9409 crc32_byte (unsigned chksum, char byte)
9410 {
9411 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9412 }
9413
9414 /* Generate a crc32 of a string. */
9415
9416 unsigned
9417 crc32_string (unsigned chksum, const char *string)
9418 {
9419 do
9420 {
9421 chksum = crc32_byte (chksum, *string);
9422 }
9423 while (*string++);
9424 return chksum;
9425 }
9426
9427 /* P is a string that will be used in a symbol. Mask out any characters
9428 that are not valid in that context. */
9429
9430 void
9431 clean_symbol_name (char *p)
9432 {
9433 for (; *p; p++)
9434 if (! (ISALNUM (*p)
9435 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9436 || *p == '$'
9437 #endif
9438 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9439 || *p == '.'
9440 #endif
9441 ))
9442 *p = '_';
9443 }
9444
9445 /* For anonymous aggregate types, we need some sort of name to
9446 hold on to. In practice, this should not appear, but it should
9447 not be harmful if it does. */
9448 bool
9449 anon_aggrname_p(const_tree id_node)
9450 {
9451 #ifndef NO_DOT_IN_LABEL
9452 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9453 && IDENTIFIER_POINTER (id_node)[1] == '_');
9454 #else /* NO_DOT_IN_LABEL */
9455 #ifndef NO_DOLLAR_IN_LABEL
9456 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9457 && IDENTIFIER_POINTER (id_node)[1] == '_');
9458 #else /* NO_DOLLAR_IN_LABEL */
9459 #define ANON_AGGRNAME_PREFIX "__anon_"
9460 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9461 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9462 #endif /* NO_DOLLAR_IN_LABEL */
9463 #endif /* NO_DOT_IN_LABEL */
9464 }
9465
9466 /* Return a format for an anonymous aggregate name. */
9467 const char *
9468 anon_aggrname_format()
9469 {
9470 #ifndef NO_DOT_IN_LABEL
9471 return "._%d";
9472 #else /* NO_DOT_IN_LABEL */
9473 #ifndef NO_DOLLAR_IN_LABEL
9474 return "$_%d";
9475 #else /* NO_DOLLAR_IN_LABEL */
9476 return "__anon_%d";
9477 #endif /* NO_DOLLAR_IN_LABEL */
9478 #endif /* NO_DOT_IN_LABEL */
9479 }
9480
9481 /* Generate a name for a special-purpose function.
9482 The generated name may need to be unique across the whole link.
9483 Changes to this function may also require corresponding changes to
9484 xstrdup_mask_random.
9485 TYPE is some string to identify the purpose of this function to the
9486 linker or collect2; it must start with an uppercase letter,
9487 one of:
9488 I - for constructors
9489 D - for destructors
9490 N - for C++ anonymous namespaces
9491 F - for DWARF unwind frame information. */
9492
9493 tree
9494 get_file_function_name (const char *type)
9495 {
9496 char *buf;
9497 const char *p;
9498 char *q;
9499
9500 /* If we already have a name we know to be unique, just use that. */
9501 if (first_global_object_name)
9502 p = q = ASTRDUP (first_global_object_name);
9503 /* If the target is handling the constructors/destructors, they
9504 will be local to this file and the name is only necessary for
9505 debugging purposes.
9506 We also assign sub_I and sub_D sufixes to constructors called from
9507 the global static constructors. These are always local. */
9508 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9509 || (strncmp (type, "sub_", 4) == 0
9510 && (type[4] == 'I' || type[4] == 'D')))
9511 {
9512 const char *file = main_input_filename;
9513 if (! file)
9514 file = LOCATION_FILE (input_location);
9515 /* Just use the file's basename, because the full pathname
9516 might be quite long. */
9517 p = q = ASTRDUP (lbasename (file));
9518 }
9519 else
9520 {
9521 /* Otherwise, the name must be unique across the entire link.
9522 We don't have anything that we know to be unique to this translation
9523 unit, so use what we do have and throw in some randomness. */
9524 unsigned len;
9525 const char *name = weak_global_object_name;
9526 const char *file = main_input_filename;
9527
9528 if (! name)
9529 name = "";
9530 if (! file)
9531 file = LOCATION_FILE (input_location);
9532
9533 len = strlen (file);
9534 q = (char *) alloca (9 + 17 + len + 1);
9535 memcpy (q, file, len + 1);
9536
9537 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9538 crc32_string (0, name), get_random_seed (false));
9539
9540 p = q;
9541 }
9542
9543 clean_symbol_name (q);
9544 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9545 + strlen (type));
9546
9547 /* Set up the name of the file-level functions we may need.
9548 Use a global object (which is already required to be unique over
9549 the program) rather than the file name (which imposes extra
9550 constraints). */
9551 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9552
9553 return get_identifier (buf);
9554 }
9555 \f
9556 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9557
9558 /* Complain that the tree code of NODE does not match the expected 0
9559 terminated list of trailing codes. The trailing code list can be
9560 empty, for a more vague error message. FILE, LINE, and FUNCTION
9561 are of the caller. */
9562
9563 void
9564 tree_check_failed (const_tree node, const char *file,
9565 int line, const char *function, ...)
9566 {
9567 va_list args;
9568 const char *buffer;
9569 unsigned length = 0;
9570 enum tree_code code;
9571
9572 va_start (args, function);
9573 while ((code = (enum tree_code) va_arg (args, int)))
9574 length += 4 + strlen (get_tree_code_name (code));
9575 va_end (args);
9576 if (length)
9577 {
9578 char *tmp;
9579 va_start (args, function);
9580 length += strlen ("expected ");
9581 buffer = tmp = (char *) alloca (length);
9582 length = 0;
9583 while ((code = (enum tree_code) va_arg (args, int)))
9584 {
9585 const char *prefix = length ? " or " : "expected ";
9586
9587 strcpy (tmp + length, prefix);
9588 length += strlen (prefix);
9589 strcpy (tmp + length, get_tree_code_name (code));
9590 length += strlen (get_tree_code_name (code));
9591 }
9592 va_end (args);
9593 }
9594 else
9595 buffer = "unexpected node";
9596
9597 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9598 buffer, get_tree_code_name (TREE_CODE (node)),
9599 function, trim_filename (file), line);
9600 }
9601
9602 /* Complain that the tree code of NODE does match the expected 0
9603 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9604 the caller. */
9605
9606 void
9607 tree_not_check_failed (const_tree node, const char *file,
9608 int line, const char *function, ...)
9609 {
9610 va_list args;
9611 char *buffer;
9612 unsigned length = 0;
9613 enum tree_code code;
9614
9615 va_start (args, function);
9616 while ((code = (enum tree_code) va_arg (args, int)))
9617 length += 4 + strlen (get_tree_code_name (code));
9618 va_end (args);
9619 va_start (args, function);
9620 buffer = (char *) alloca (length);
9621 length = 0;
9622 while ((code = (enum tree_code) va_arg (args, int)))
9623 {
9624 if (length)
9625 {
9626 strcpy (buffer + length, " or ");
9627 length += 4;
9628 }
9629 strcpy (buffer + length, get_tree_code_name (code));
9630 length += strlen (get_tree_code_name (code));
9631 }
9632 va_end (args);
9633
9634 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9635 buffer, get_tree_code_name (TREE_CODE (node)),
9636 function, trim_filename (file), line);
9637 }
9638
9639 /* Similar to tree_check_failed, except that we check for a class of tree
9640 code, given in CL. */
9641
9642 void
9643 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9644 const char *file, int line, const char *function)
9645 {
9646 internal_error
9647 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9648 TREE_CODE_CLASS_STRING (cl),
9649 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9650 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9651 }
9652
9653 /* Similar to tree_check_failed, except that instead of specifying a
9654 dozen codes, use the knowledge that they're all sequential. */
9655
9656 void
9657 tree_range_check_failed (const_tree node, const char *file, int line,
9658 const char *function, enum tree_code c1,
9659 enum tree_code c2)
9660 {
9661 char *buffer;
9662 unsigned length = 0;
9663 unsigned int c;
9664
9665 for (c = c1; c <= c2; ++c)
9666 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9667
9668 length += strlen ("expected ");
9669 buffer = (char *) alloca (length);
9670 length = 0;
9671
9672 for (c = c1; c <= c2; ++c)
9673 {
9674 const char *prefix = length ? " or " : "expected ";
9675
9676 strcpy (buffer + length, prefix);
9677 length += strlen (prefix);
9678 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9679 length += strlen (get_tree_code_name ((enum tree_code) c));
9680 }
9681
9682 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9683 buffer, get_tree_code_name (TREE_CODE (node)),
9684 function, trim_filename (file), line);
9685 }
9686
9687
9688 /* Similar to tree_check_failed, except that we check that a tree does
9689 not have the specified code, given in CL. */
9690
9691 void
9692 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9693 const char *file, int line, const char *function)
9694 {
9695 internal_error
9696 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9697 TREE_CODE_CLASS_STRING (cl),
9698 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9699 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9700 }
9701
9702
9703 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9704
9705 void
9706 omp_clause_check_failed (const_tree node, const char *file, int line,
9707 const char *function, enum omp_clause_code code)
9708 {
9709 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9710 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9711 function, trim_filename (file), line);
9712 }
9713
9714
9715 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9716
9717 void
9718 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9719 const char *function, enum omp_clause_code c1,
9720 enum omp_clause_code c2)
9721 {
9722 char *buffer;
9723 unsigned length = 0;
9724 unsigned int c;
9725
9726 for (c = c1; c <= c2; ++c)
9727 length += 4 + strlen (omp_clause_code_name[c]);
9728
9729 length += strlen ("expected ");
9730 buffer = (char *) alloca (length);
9731 length = 0;
9732
9733 for (c = c1; c <= c2; ++c)
9734 {
9735 const char *prefix = length ? " or " : "expected ";
9736
9737 strcpy (buffer + length, prefix);
9738 length += strlen (prefix);
9739 strcpy (buffer + length, omp_clause_code_name[c]);
9740 length += strlen (omp_clause_code_name[c]);
9741 }
9742
9743 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9744 buffer, omp_clause_code_name[TREE_CODE (node)],
9745 function, trim_filename (file), line);
9746 }
9747
9748
9749 #undef DEFTREESTRUCT
9750 #define DEFTREESTRUCT(VAL, NAME) NAME,
9751
9752 static const char *ts_enum_names[] = {
9753 #include "treestruct.def"
9754 };
9755 #undef DEFTREESTRUCT
9756
9757 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9758
9759 /* Similar to tree_class_check_failed, except that we check for
9760 whether CODE contains the tree structure identified by EN. */
9761
9762 void
9763 tree_contains_struct_check_failed (const_tree node,
9764 const enum tree_node_structure_enum en,
9765 const char *file, int line,
9766 const char *function)
9767 {
9768 internal_error
9769 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9770 TS_ENUM_NAME (en),
9771 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9772 }
9773
9774
9775 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9776 (dynamically sized) vector. */
9777
9778 void
9779 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9780 const char *function)
9781 {
9782 internal_error
9783 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9784 idx + 1, len, function, trim_filename (file), line);
9785 }
9786
9787 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9788 (dynamically sized) vector. */
9789
9790 void
9791 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9792 const char *function)
9793 {
9794 internal_error
9795 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9796 idx + 1, len, function, trim_filename (file), line);
9797 }
9798
9799 /* Similar to above, except that the check is for the bounds of the operand
9800 vector of an expression node EXP. */
9801
9802 void
9803 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9804 int line, const char *function)
9805 {
9806 enum tree_code code = TREE_CODE (exp);
9807 internal_error
9808 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9809 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9810 function, trim_filename (file), line);
9811 }
9812
9813 /* Similar to above, except that the check is for the number of
9814 operands of an OMP_CLAUSE node. */
9815
9816 void
9817 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9818 int line, const char *function)
9819 {
9820 internal_error
9821 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9822 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9823 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9824 trim_filename (file), line);
9825 }
9826 #endif /* ENABLE_TREE_CHECKING */
9827 \f
9828 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9829 and mapped to the machine mode MODE. Initialize its fields and build
9830 the information necessary for debugging output. */
9831
9832 static tree
9833 make_vector_type (tree innertype, int nunits, machine_mode mode)
9834 {
9835 tree t;
9836 inchash::hash hstate;
9837 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9838
9839 t = make_node (VECTOR_TYPE);
9840 TREE_TYPE (t) = mv_innertype;
9841 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9842 SET_TYPE_MODE (t, mode);
9843
9844 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9845 SET_TYPE_STRUCTURAL_EQUALITY (t);
9846 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9847 || mode != VOIDmode)
9848 && !VECTOR_BOOLEAN_TYPE_P (t))
9849 TYPE_CANONICAL (t)
9850 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9851
9852 layout_type (t);
9853
9854 hstate.add_wide_int (VECTOR_TYPE);
9855 hstate.add_wide_int (nunits);
9856 hstate.add_wide_int (mode);
9857 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9858 t = type_hash_canon (hstate.end (), t);
9859
9860 /* We have built a main variant, based on the main variant of the
9861 inner type. Use it to build the variant we return. */
9862 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9863 && TREE_TYPE (t) != innertype)
9864 return build_type_attribute_qual_variant (t,
9865 TYPE_ATTRIBUTES (innertype),
9866 TYPE_QUALS (innertype));
9867
9868 return t;
9869 }
9870
9871 static tree
9872 make_or_reuse_type (unsigned size, int unsignedp)
9873 {
9874 int i;
9875
9876 if (size == INT_TYPE_SIZE)
9877 return unsignedp ? unsigned_type_node : integer_type_node;
9878 if (size == CHAR_TYPE_SIZE)
9879 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9880 if (size == SHORT_TYPE_SIZE)
9881 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9882 if (size == LONG_TYPE_SIZE)
9883 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9884 if (size == LONG_LONG_TYPE_SIZE)
9885 return (unsignedp ? long_long_unsigned_type_node
9886 : long_long_integer_type_node);
9887
9888 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9889 if (size == int_n_data[i].bitsize
9890 && int_n_enabled_p[i])
9891 return (unsignedp ? int_n_trees[i].unsigned_type
9892 : int_n_trees[i].signed_type);
9893
9894 if (unsignedp)
9895 return make_unsigned_type (size);
9896 else
9897 return make_signed_type (size);
9898 }
9899
9900 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9901
9902 static tree
9903 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9904 {
9905 if (satp)
9906 {
9907 if (size == SHORT_FRACT_TYPE_SIZE)
9908 return unsignedp ? sat_unsigned_short_fract_type_node
9909 : sat_short_fract_type_node;
9910 if (size == FRACT_TYPE_SIZE)
9911 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9912 if (size == LONG_FRACT_TYPE_SIZE)
9913 return unsignedp ? sat_unsigned_long_fract_type_node
9914 : sat_long_fract_type_node;
9915 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9916 return unsignedp ? sat_unsigned_long_long_fract_type_node
9917 : sat_long_long_fract_type_node;
9918 }
9919 else
9920 {
9921 if (size == SHORT_FRACT_TYPE_SIZE)
9922 return unsignedp ? unsigned_short_fract_type_node
9923 : short_fract_type_node;
9924 if (size == FRACT_TYPE_SIZE)
9925 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9926 if (size == LONG_FRACT_TYPE_SIZE)
9927 return unsignedp ? unsigned_long_fract_type_node
9928 : long_fract_type_node;
9929 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9930 return unsignedp ? unsigned_long_long_fract_type_node
9931 : long_long_fract_type_node;
9932 }
9933
9934 return make_fract_type (size, unsignedp, satp);
9935 }
9936
9937 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9938
9939 static tree
9940 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9941 {
9942 if (satp)
9943 {
9944 if (size == SHORT_ACCUM_TYPE_SIZE)
9945 return unsignedp ? sat_unsigned_short_accum_type_node
9946 : sat_short_accum_type_node;
9947 if (size == ACCUM_TYPE_SIZE)
9948 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9949 if (size == LONG_ACCUM_TYPE_SIZE)
9950 return unsignedp ? sat_unsigned_long_accum_type_node
9951 : sat_long_accum_type_node;
9952 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9953 return unsignedp ? sat_unsigned_long_long_accum_type_node
9954 : sat_long_long_accum_type_node;
9955 }
9956 else
9957 {
9958 if (size == SHORT_ACCUM_TYPE_SIZE)
9959 return unsignedp ? unsigned_short_accum_type_node
9960 : short_accum_type_node;
9961 if (size == ACCUM_TYPE_SIZE)
9962 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9963 if (size == LONG_ACCUM_TYPE_SIZE)
9964 return unsignedp ? unsigned_long_accum_type_node
9965 : long_accum_type_node;
9966 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9967 return unsignedp ? unsigned_long_long_accum_type_node
9968 : long_long_accum_type_node;
9969 }
9970
9971 return make_accum_type (size, unsignedp, satp);
9972 }
9973
9974
9975 /* Create an atomic variant node for TYPE. This routine is called
9976 during initialization of data types to create the 5 basic atomic
9977 types. The generic build_variant_type function requires these to
9978 already be set up in order to function properly, so cannot be
9979 called from there. If ALIGN is non-zero, then ensure alignment is
9980 overridden to this value. */
9981
9982 static tree
9983 build_atomic_base (tree type, unsigned int align)
9984 {
9985 tree t;
9986
9987 /* Make sure its not already registered. */
9988 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9989 return t;
9990
9991 t = build_variant_type_copy (type);
9992 set_type_quals (t, TYPE_QUAL_ATOMIC);
9993
9994 if (align)
9995 TYPE_ALIGN (t) = align;
9996
9997 return t;
9998 }
9999
10000 /* Create nodes for all integer types (and error_mark_node) using the sizes
10001 of C datatypes. SIGNED_CHAR specifies whether char is signed,
10002 SHORT_DOUBLE specifies whether double should be of the same precision
10003 as float. */
10004
10005 void
10006 build_common_tree_nodes (bool signed_char, bool short_double)
10007 {
10008 int i;
10009
10010 error_mark_node = make_node (ERROR_MARK);
10011 TREE_TYPE (error_mark_node) = error_mark_node;
10012
10013 initialize_sizetypes ();
10014
10015 /* Define both `signed char' and `unsigned char'. */
10016 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10017 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10018 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10019 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10020
10021 /* Define `char', which is like either `signed char' or `unsigned char'
10022 but not the same as either. */
10023 char_type_node
10024 = (signed_char
10025 ? make_signed_type (CHAR_TYPE_SIZE)
10026 : make_unsigned_type (CHAR_TYPE_SIZE));
10027 TYPE_STRING_FLAG (char_type_node) = 1;
10028
10029 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10030 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10031 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10032 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10033 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10034 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10035 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10036 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10037
10038 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10039 {
10040 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10041 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10042 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
10043 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
10044
10045 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
10046 && int_n_enabled_p[i])
10047 {
10048 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10049 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10050 }
10051 }
10052
10053 /* Define a boolean type. This type only represents boolean values but
10054 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10055 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10056 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10057 TYPE_PRECISION (boolean_type_node) = 1;
10058 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10059
10060 /* Define what type to use for size_t. */
10061 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10062 size_type_node = unsigned_type_node;
10063 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10064 size_type_node = long_unsigned_type_node;
10065 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10066 size_type_node = long_long_unsigned_type_node;
10067 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10068 size_type_node = short_unsigned_type_node;
10069 else
10070 {
10071 int i;
10072
10073 size_type_node = NULL_TREE;
10074 for (i = 0; i < NUM_INT_N_ENTS; i++)
10075 if (int_n_enabled_p[i])
10076 {
10077 char name[50];
10078 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10079
10080 if (strcmp (name, SIZE_TYPE) == 0)
10081 {
10082 size_type_node = int_n_trees[i].unsigned_type;
10083 }
10084 }
10085 if (size_type_node == NULL_TREE)
10086 gcc_unreachable ();
10087 }
10088
10089 /* Fill in the rest of the sized types. Reuse existing type nodes
10090 when possible. */
10091 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10092 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10093 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10094 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10095 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10096
10097 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10098 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10099 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10100 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10101 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10102
10103 /* Don't call build_qualified type for atomics. That routine does
10104 special processing for atomics, and until they are initialized
10105 it's better not to make that call.
10106
10107 Check to see if there is a target override for atomic types. */
10108
10109 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10110 targetm.atomic_align_for_mode (QImode));
10111 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10112 targetm.atomic_align_for_mode (HImode));
10113 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10114 targetm.atomic_align_for_mode (SImode));
10115 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10116 targetm.atomic_align_for_mode (DImode));
10117 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10118 targetm.atomic_align_for_mode (TImode));
10119
10120 access_public_node = get_identifier ("public");
10121 access_protected_node = get_identifier ("protected");
10122 access_private_node = get_identifier ("private");
10123
10124 /* Define these next since types below may used them. */
10125 integer_zero_node = build_int_cst (integer_type_node, 0);
10126 integer_one_node = build_int_cst (integer_type_node, 1);
10127 integer_three_node = build_int_cst (integer_type_node, 3);
10128 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10129
10130 size_zero_node = size_int (0);
10131 size_one_node = size_int (1);
10132 bitsize_zero_node = bitsize_int (0);
10133 bitsize_one_node = bitsize_int (1);
10134 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10135
10136 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10137 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10138
10139 void_type_node = make_node (VOID_TYPE);
10140 layout_type (void_type_node);
10141
10142 pointer_bounds_type_node = targetm.chkp_bound_type ();
10143
10144 /* We are not going to have real types in C with less than byte alignment,
10145 so we might as well not have any types that claim to have it. */
10146 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
10147 TYPE_USER_ALIGN (void_type_node) = 0;
10148
10149 void_node = make_node (VOID_CST);
10150 TREE_TYPE (void_node) = void_type_node;
10151
10152 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10153 layout_type (TREE_TYPE (null_pointer_node));
10154
10155 ptr_type_node = build_pointer_type (void_type_node);
10156 const_ptr_type_node
10157 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10158 fileptr_type_node = ptr_type_node;
10159
10160 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10161
10162 float_type_node = make_node (REAL_TYPE);
10163 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10164 layout_type (float_type_node);
10165
10166 double_type_node = make_node (REAL_TYPE);
10167 if (short_double)
10168 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
10169 else
10170 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10171 layout_type (double_type_node);
10172
10173 long_double_type_node = make_node (REAL_TYPE);
10174 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10175 layout_type (long_double_type_node);
10176
10177 float_ptr_type_node = build_pointer_type (float_type_node);
10178 double_ptr_type_node = build_pointer_type (double_type_node);
10179 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10180 integer_ptr_type_node = build_pointer_type (integer_type_node);
10181
10182 /* Fixed size integer types. */
10183 uint16_type_node = make_or_reuse_type (16, 1);
10184 uint32_type_node = make_or_reuse_type (32, 1);
10185 uint64_type_node = make_or_reuse_type (64, 1);
10186
10187 /* Decimal float types. */
10188 dfloat32_type_node = make_node (REAL_TYPE);
10189 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10190 layout_type (dfloat32_type_node);
10191 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10192 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10193
10194 dfloat64_type_node = make_node (REAL_TYPE);
10195 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10196 layout_type (dfloat64_type_node);
10197 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10198 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10199
10200 dfloat128_type_node = make_node (REAL_TYPE);
10201 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10202 layout_type (dfloat128_type_node);
10203 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10204 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10205
10206 complex_integer_type_node = build_complex_type (integer_type_node);
10207 complex_float_type_node = build_complex_type (float_type_node);
10208 complex_double_type_node = build_complex_type (double_type_node);
10209 complex_long_double_type_node = build_complex_type (long_double_type_node);
10210
10211 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10212 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10213 sat_ ## KIND ## _type_node = \
10214 make_sat_signed_ ## KIND ## _type (SIZE); \
10215 sat_unsigned_ ## KIND ## _type_node = \
10216 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10217 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10218 unsigned_ ## KIND ## _type_node = \
10219 make_unsigned_ ## KIND ## _type (SIZE);
10220
10221 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10222 sat_ ## WIDTH ## KIND ## _type_node = \
10223 make_sat_signed_ ## KIND ## _type (SIZE); \
10224 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10225 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10226 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10227 unsigned_ ## WIDTH ## KIND ## _type_node = \
10228 make_unsigned_ ## KIND ## _type (SIZE);
10229
10230 /* Make fixed-point type nodes based on four different widths. */
10231 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10232 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10233 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10234 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10235 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10236
10237 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10238 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10239 NAME ## _type_node = \
10240 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10241 u ## NAME ## _type_node = \
10242 make_or_reuse_unsigned_ ## KIND ## _type \
10243 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10244 sat_ ## NAME ## _type_node = \
10245 make_or_reuse_sat_signed_ ## KIND ## _type \
10246 (GET_MODE_BITSIZE (MODE ## mode)); \
10247 sat_u ## NAME ## _type_node = \
10248 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10249 (GET_MODE_BITSIZE (U ## MODE ## mode));
10250
10251 /* Fixed-point type and mode nodes. */
10252 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10253 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10254 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10255 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10256 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10257 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10258 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10259 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10260 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10261 MAKE_FIXED_MODE_NODE (accum, da, DA)
10262 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10263
10264 {
10265 tree t = targetm.build_builtin_va_list ();
10266
10267 /* Many back-ends define record types without setting TYPE_NAME.
10268 If we copied the record type here, we'd keep the original
10269 record type without a name. This breaks name mangling. So,
10270 don't copy record types and let c_common_nodes_and_builtins()
10271 declare the type to be __builtin_va_list. */
10272 if (TREE_CODE (t) != RECORD_TYPE)
10273 t = build_variant_type_copy (t);
10274
10275 va_list_type_node = t;
10276 }
10277 }
10278
10279 /* Modify DECL for given flags.
10280 TM_PURE attribute is set only on types, so the function will modify
10281 DECL's type when ECF_TM_PURE is used. */
10282
10283 void
10284 set_call_expr_flags (tree decl, int flags)
10285 {
10286 if (flags & ECF_NOTHROW)
10287 TREE_NOTHROW (decl) = 1;
10288 if (flags & ECF_CONST)
10289 TREE_READONLY (decl) = 1;
10290 if (flags & ECF_PURE)
10291 DECL_PURE_P (decl) = 1;
10292 if (flags & ECF_LOOPING_CONST_OR_PURE)
10293 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10294 if (flags & ECF_NOVOPS)
10295 DECL_IS_NOVOPS (decl) = 1;
10296 if (flags & ECF_NORETURN)
10297 TREE_THIS_VOLATILE (decl) = 1;
10298 if (flags & ECF_MALLOC)
10299 DECL_IS_MALLOC (decl) = 1;
10300 if (flags & ECF_RETURNS_TWICE)
10301 DECL_IS_RETURNS_TWICE (decl) = 1;
10302 if (flags & ECF_LEAF)
10303 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10304 NULL, DECL_ATTRIBUTES (decl));
10305 if ((flags & ECF_TM_PURE) && flag_tm)
10306 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10307 /* Looping const or pure is implied by noreturn.
10308 There is currently no way to declare looping const or looping pure alone. */
10309 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10310 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10311 }
10312
10313
10314 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10315
10316 static void
10317 local_define_builtin (const char *name, tree type, enum built_in_function code,
10318 const char *library_name, int ecf_flags)
10319 {
10320 tree decl;
10321
10322 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10323 library_name, NULL_TREE);
10324 set_call_expr_flags (decl, ecf_flags);
10325
10326 set_builtin_decl (code, decl, true);
10327 }
10328
10329 /* Call this function after instantiating all builtins that the language
10330 front end cares about. This will build the rest of the builtins
10331 and internal functions that are relied upon by the tree optimizers and
10332 the middle-end. */
10333
10334 void
10335 build_common_builtin_nodes (void)
10336 {
10337 tree tmp, ftype;
10338 int ecf_flags;
10339
10340 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10341 {
10342 ftype = build_function_type (void_type_node, void_list_node);
10343 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10344 "__builtin_unreachable",
10345 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10346 | ECF_CONST);
10347 }
10348
10349 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10350 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10351 {
10352 ftype = build_function_type_list (ptr_type_node,
10353 ptr_type_node, const_ptr_type_node,
10354 size_type_node, NULL_TREE);
10355
10356 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10357 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10358 "memcpy", ECF_NOTHROW | ECF_LEAF);
10359 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10360 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10361 "memmove", ECF_NOTHROW | ECF_LEAF);
10362 }
10363
10364 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10365 {
10366 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10367 const_ptr_type_node, size_type_node,
10368 NULL_TREE);
10369 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10370 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10371 }
10372
10373 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10374 {
10375 ftype = build_function_type_list (ptr_type_node,
10376 ptr_type_node, integer_type_node,
10377 size_type_node, NULL_TREE);
10378 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10379 "memset", ECF_NOTHROW | ECF_LEAF);
10380 }
10381
10382 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10383 {
10384 ftype = build_function_type_list (ptr_type_node,
10385 size_type_node, NULL_TREE);
10386 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10387 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10388 }
10389
10390 ftype = build_function_type_list (ptr_type_node, size_type_node,
10391 size_type_node, NULL_TREE);
10392 local_define_builtin ("__builtin_alloca_with_align", ftype,
10393 BUILT_IN_ALLOCA_WITH_ALIGN,
10394 "__builtin_alloca_with_align",
10395 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10396
10397 /* If we're checking the stack, `alloca' can throw. */
10398 if (flag_stack_check)
10399 {
10400 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10401 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10402 }
10403
10404 ftype = build_function_type_list (void_type_node,
10405 ptr_type_node, ptr_type_node,
10406 ptr_type_node, NULL_TREE);
10407 local_define_builtin ("__builtin_init_trampoline", ftype,
10408 BUILT_IN_INIT_TRAMPOLINE,
10409 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10410 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10411 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10412 "__builtin_init_heap_trampoline",
10413 ECF_NOTHROW | ECF_LEAF);
10414
10415 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10416 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10417 BUILT_IN_ADJUST_TRAMPOLINE,
10418 "__builtin_adjust_trampoline",
10419 ECF_CONST | ECF_NOTHROW);
10420
10421 ftype = build_function_type_list (void_type_node,
10422 ptr_type_node, ptr_type_node, NULL_TREE);
10423 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10424 BUILT_IN_NONLOCAL_GOTO,
10425 "__builtin_nonlocal_goto",
10426 ECF_NORETURN | ECF_NOTHROW);
10427
10428 ftype = build_function_type_list (void_type_node,
10429 ptr_type_node, ptr_type_node, NULL_TREE);
10430 local_define_builtin ("__builtin_setjmp_setup", ftype,
10431 BUILT_IN_SETJMP_SETUP,
10432 "__builtin_setjmp_setup", ECF_NOTHROW);
10433
10434 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10435 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10436 BUILT_IN_SETJMP_RECEIVER,
10437 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10438
10439 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10440 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10441 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10442
10443 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10444 local_define_builtin ("__builtin_stack_restore", ftype,
10445 BUILT_IN_STACK_RESTORE,
10446 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10447
10448 /* If there's a possibility that we might use the ARM EABI, build the
10449 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10450 if (targetm.arm_eabi_unwinder)
10451 {
10452 ftype = build_function_type_list (void_type_node, NULL_TREE);
10453 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10454 BUILT_IN_CXA_END_CLEANUP,
10455 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10456 }
10457
10458 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10459 local_define_builtin ("__builtin_unwind_resume", ftype,
10460 BUILT_IN_UNWIND_RESUME,
10461 ((targetm_common.except_unwind_info (&global_options)
10462 == UI_SJLJ)
10463 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10464 ECF_NORETURN);
10465
10466 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10467 {
10468 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10469 NULL_TREE);
10470 local_define_builtin ("__builtin_return_address", ftype,
10471 BUILT_IN_RETURN_ADDRESS,
10472 "__builtin_return_address",
10473 ECF_NOTHROW);
10474 }
10475
10476 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10477 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10478 {
10479 ftype = build_function_type_list (void_type_node, ptr_type_node,
10480 ptr_type_node, NULL_TREE);
10481 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10482 local_define_builtin ("__cyg_profile_func_enter", ftype,
10483 BUILT_IN_PROFILE_FUNC_ENTER,
10484 "__cyg_profile_func_enter", 0);
10485 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10486 local_define_builtin ("__cyg_profile_func_exit", ftype,
10487 BUILT_IN_PROFILE_FUNC_EXIT,
10488 "__cyg_profile_func_exit", 0);
10489 }
10490
10491 /* The exception object and filter values from the runtime. The argument
10492 must be zero before exception lowering, i.e. from the front end. After
10493 exception lowering, it will be the region number for the exception
10494 landing pad. These functions are PURE instead of CONST to prevent
10495 them from being hoisted past the exception edge that will initialize
10496 its value in the landing pad. */
10497 ftype = build_function_type_list (ptr_type_node,
10498 integer_type_node, NULL_TREE);
10499 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10500 /* Only use TM_PURE if we have TM language support. */
10501 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10502 ecf_flags |= ECF_TM_PURE;
10503 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10504 "__builtin_eh_pointer", ecf_flags);
10505
10506 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10507 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10508 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10509 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10510
10511 ftype = build_function_type_list (void_type_node,
10512 integer_type_node, integer_type_node,
10513 NULL_TREE);
10514 local_define_builtin ("__builtin_eh_copy_values", ftype,
10515 BUILT_IN_EH_COPY_VALUES,
10516 "__builtin_eh_copy_values", ECF_NOTHROW);
10517
10518 /* Complex multiplication and division. These are handled as builtins
10519 rather than optabs because emit_library_call_value doesn't support
10520 complex. Further, we can do slightly better with folding these
10521 beasties if the real and complex parts of the arguments are separate. */
10522 {
10523 int mode;
10524
10525 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10526 {
10527 char mode_name_buf[4], *q;
10528 const char *p;
10529 enum built_in_function mcode, dcode;
10530 tree type, inner_type;
10531 const char *prefix = "__";
10532
10533 if (targetm.libfunc_gnu_prefix)
10534 prefix = "__gnu_";
10535
10536 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10537 if (type == NULL)
10538 continue;
10539 inner_type = TREE_TYPE (type);
10540
10541 ftype = build_function_type_list (type, inner_type, inner_type,
10542 inner_type, inner_type, NULL_TREE);
10543
10544 mcode = ((enum built_in_function)
10545 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10546 dcode = ((enum built_in_function)
10547 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10548
10549 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10550 *q = TOLOWER (*p);
10551 *q = '\0';
10552
10553 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10554 NULL);
10555 local_define_builtin (built_in_names[mcode], ftype, mcode,
10556 built_in_names[mcode],
10557 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10558
10559 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10560 NULL);
10561 local_define_builtin (built_in_names[dcode], ftype, dcode,
10562 built_in_names[dcode],
10563 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10564 }
10565 }
10566
10567 init_internal_fns ();
10568 }
10569
10570 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10571 better way.
10572
10573 If we requested a pointer to a vector, build up the pointers that
10574 we stripped off while looking for the inner type. Similarly for
10575 return values from functions.
10576
10577 The argument TYPE is the top of the chain, and BOTTOM is the
10578 new type which we will point to. */
10579
10580 tree
10581 reconstruct_complex_type (tree type, tree bottom)
10582 {
10583 tree inner, outer;
10584
10585 if (TREE_CODE (type) == POINTER_TYPE)
10586 {
10587 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10588 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10589 TYPE_REF_CAN_ALIAS_ALL (type));
10590 }
10591 else if (TREE_CODE (type) == REFERENCE_TYPE)
10592 {
10593 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10594 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10595 TYPE_REF_CAN_ALIAS_ALL (type));
10596 }
10597 else if (TREE_CODE (type) == ARRAY_TYPE)
10598 {
10599 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10600 outer = build_array_type (inner, TYPE_DOMAIN (type));
10601 }
10602 else if (TREE_CODE (type) == FUNCTION_TYPE)
10603 {
10604 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10605 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10606 }
10607 else if (TREE_CODE (type) == METHOD_TYPE)
10608 {
10609 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10610 /* The build_method_type_directly() routine prepends 'this' to argument list,
10611 so we must compensate by getting rid of it. */
10612 outer
10613 = build_method_type_directly
10614 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10615 inner,
10616 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10617 }
10618 else if (TREE_CODE (type) == OFFSET_TYPE)
10619 {
10620 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10621 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10622 }
10623 else
10624 return bottom;
10625
10626 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10627 TYPE_QUALS (type));
10628 }
10629
10630 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10631 the inner type. */
10632 tree
10633 build_vector_type_for_mode (tree innertype, machine_mode mode)
10634 {
10635 int nunits;
10636
10637 switch (GET_MODE_CLASS (mode))
10638 {
10639 case MODE_VECTOR_INT:
10640 case MODE_VECTOR_FLOAT:
10641 case MODE_VECTOR_FRACT:
10642 case MODE_VECTOR_UFRACT:
10643 case MODE_VECTOR_ACCUM:
10644 case MODE_VECTOR_UACCUM:
10645 nunits = GET_MODE_NUNITS (mode);
10646 break;
10647
10648 case MODE_INT:
10649 /* Check that there are no leftover bits. */
10650 gcc_assert (GET_MODE_BITSIZE (mode)
10651 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10652
10653 nunits = GET_MODE_BITSIZE (mode)
10654 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10655 break;
10656
10657 default:
10658 gcc_unreachable ();
10659 }
10660
10661 return make_vector_type (innertype, nunits, mode);
10662 }
10663
10664 /* Similarly, but takes the inner type and number of units, which must be
10665 a power of two. */
10666
10667 tree
10668 build_vector_type (tree innertype, int nunits)
10669 {
10670 return make_vector_type (innertype, nunits, VOIDmode);
10671 }
10672
10673 /* Build truth vector with specified length and number of units. */
10674
10675 tree
10676 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10677 {
10678 machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits,
10679 vector_size);
10680
10681 gcc_assert (mask_mode != VOIDmode);
10682
10683 unsigned HOST_WIDE_INT vsize;
10684 if (mask_mode == BLKmode)
10685 vsize = vector_size * BITS_PER_UNIT;
10686 else
10687 vsize = GET_MODE_BITSIZE (mask_mode);
10688
10689 unsigned HOST_WIDE_INT esize = vsize / nunits;
10690 gcc_assert (esize * nunits == vsize);
10691
10692 tree bool_type = build_nonstandard_boolean_type (esize);
10693
10694 return make_vector_type (bool_type, nunits, mask_mode);
10695 }
10696
10697 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10698
10699 tree
10700 build_same_sized_truth_vector_type (tree vectype)
10701 {
10702 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10703 return vectype;
10704
10705 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10706
10707 if (!size)
10708 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10709
10710 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10711 }
10712
10713 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10714
10715 tree
10716 build_opaque_vector_type (tree innertype, int nunits)
10717 {
10718 tree t = make_vector_type (innertype, nunits, VOIDmode);
10719 tree cand;
10720 /* We always build the non-opaque variant before the opaque one,
10721 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10722 cand = TYPE_NEXT_VARIANT (t);
10723 if (cand
10724 && TYPE_VECTOR_OPAQUE (cand)
10725 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10726 return cand;
10727 /* Othewise build a variant type and make sure to queue it after
10728 the non-opaque type. */
10729 cand = build_distinct_type_copy (t);
10730 TYPE_VECTOR_OPAQUE (cand) = true;
10731 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10732 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10733 TYPE_NEXT_VARIANT (t) = cand;
10734 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10735 return cand;
10736 }
10737
10738
10739 /* Given an initializer INIT, return TRUE if INIT is zero or some
10740 aggregate of zeros. Otherwise return FALSE. */
10741 bool
10742 initializer_zerop (const_tree init)
10743 {
10744 tree elt;
10745
10746 STRIP_NOPS (init);
10747
10748 switch (TREE_CODE (init))
10749 {
10750 case INTEGER_CST:
10751 return integer_zerop (init);
10752
10753 case REAL_CST:
10754 /* ??? Note that this is not correct for C4X float formats. There,
10755 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10756 negative exponent. */
10757 return real_zerop (init)
10758 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10759
10760 case FIXED_CST:
10761 return fixed_zerop (init);
10762
10763 case COMPLEX_CST:
10764 return integer_zerop (init)
10765 || (real_zerop (init)
10766 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10767 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10768
10769 case VECTOR_CST:
10770 {
10771 unsigned i;
10772 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10773 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10774 return false;
10775 return true;
10776 }
10777
10778 case CONSTRUCTOR:
10779 {
10780 unsigned HOST_WIDE_INT idx;
10781
10782 if (TREE_CLOBBER_P (init))
10783 return false;
10784 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10785 if (!initializer_zerop (elt))
10786 return false;
10787 return true;
10788 }
10789
10790 case STRING_CST:
10791 {
10792 int i;
10793
10794 /* We need to loop through all elements to handle cases like
10795 "\0" and "\0foobar". */
10796 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10797 if (TREE_STRING_POINTER (init)[i] != '\0')
10798 return false;
10799
10800 return true;
10801 }
10802
10803 default:
10804 return false;
10805 }
10806 }
10807
10808 /* Check if vector VEC consists of all the equal elements and
10809 that the number of elements corresponds to the type of VEC.
10810 The function returns first element of the vector
10811 or NULL_TREE if the vector is not uniform. */
10812 tree
10813 uniform_vector_p (const_tree vec)
10814 {
10815 tree first, t;
10816 unsigned i;
10817
10818 if (vec == NULL_TREE)
10819 return NULL_TREE;
10820
10821 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10822
10823 if (TREE_CODE (vec) == VECTOR_CST)
10824 {
10825 first = VECTOR_CST_ELT (vec, 0);
10826 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10827 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10828 return NULL_TREE;
10829
10830 return first;
10831 }
10832
10833 else if (TREE_CODE (vec) == CONSTRUCTOR)
10834 {
10835 first = error_mark_node;
10836
10837 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10838 {
10839 if (i == 0)
10840 {
10841 first = t;
10842 continue;
10843 }
10844 if (!operand_equal_p (first, t, 0))
10845 return NULL_TREE;
10846 }
10847 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10848 return NULL_TREE;
10849
10850 return first;
10851 }
10852
10853 return NULL_TREE;
10854 }
10855
10856 /* Build an empty statement at location LOC. */
10857
10858 tree
10859 build_empty_stmt (location_t loc)
10860 {
10861 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10862 SET_EXPR_LOCATION (t, loc);
10863 return t;
10864 }
10865
10866
10867 /* Build an OpenMP clause with code CODE. LOC is the location of the
10868 clause. */
10869
10870 tree
10871 build_omp_clause (location_t loc, enum omp_clause_code code)
10872 {
10873 tree t;
10874 int size, length;
10875
10876 length = omp_clause_num_ops[code];
10877 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10878
10879 record_node_allocation_statistics (OMP_CLAUSE, size);
10880
10881 t = (tree) ggc_internal_alloc (size);
10882 memset (t, 0, size);
10883 TREE_SET_CODE (t, OMP_CLAUSE);
10884 OMP_CLAUSE_SET_CODE (t, code);
10885 OMP_CLAUSE_LOCATION (t) = loc;
10886
10887 return t;
10888 }
10889
10890 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10891 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10892 Except for the CODE and operand count field, other storage for the
10893 object is initialized to zeros. */
10894
10895 tree
10896 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10897 {
10898 tree t;
10899 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10900
10901 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10902 gcc_assert (len >= 1);
10903
10904 record_node_allocation_statistics (code, length);
10905
10906 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10907
10908 TREE_SET_CODE (t, code);
10909
10910 /* Can't use TREE_OPERAND to store the length because if checking is
10911 enabled, it will try to check the length before we store it. :-P */
10912 t->exp.operands[0] = build_int_cst (sizetype, len);
10913
10914 return t;
10915 }
10916
10917 /* Helper function for build_call_* functions; build a CALL_EXPR with
10918 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10919 the argument slots. */
10920
10921 static tree
10922 build_call_1 (tree return_type, tree fn, int nargs)
10923 {
10924 tree t;
10925
10926 t = build_vl_exp (CALL_EXPR, nargs + 3);
10927 TREE_TYPE (t) = return_type;
10928 CALL_EXPR_FN (t) = fn;
10929 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10930
10931 return t;
10932 }
10933
10934 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10935 FN and a null static chain slot. NARGS is the number of call arguments
10936 which are specified as "..." arguments. */
10937
10938 tree
10939 build_call_nary (tree return_type, tree fn, int nargs, ...)
10940 {
10941 tree ret;
10942 va_list args;
10943 va_start (args, nargs);
10944 ret = build_call_valist (return_type, fn, nargs, args);
10945 va_end (args);
10946 return ret;
10947 }
10948
10949 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10950 FN and a null static chain slot. NARGS is the number of call arguments
10951 which are specified as a va_list ARGS. */
10952
10953 tree
10954 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10955 {
10956 tree t;
10957 int i;
10958
10959 t = build_call_1 (return_type, fn, nargs);
10960 for (i = 0; i < nargs; i++)
10961 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10962 process_call_operands (t);
10963 return t;
10964 }
10965
10966 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10967 FN and a null static chain slot. NARGS is the number of call arguments
10968 which are specified as a tree array ARGS. */
10969
10970 tree
10971 build_call_array_loc (location_t loc, tree return_type, tree fn,
10972 int nargs, const tree *args)
10973 {
10974 tree t;
10975 int i;
10976
10977 t = build_call_1 (return_type, fn, nargs);
10978 for (i = 0; i < nargs; i++)
10979 CALL_EXPR_ARG (t, i) = args[i];
10980 process_call_operands (t);
10981 SET_EXPR_LOCATION (t, loc);
10982 return t;
10983 }
10984
10985 /* Like build_call_array, but takes a vec. */
10986
10987 tree
10988 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10989 {
10990 tree ret, t;
10991 unsigned int ix;
10992
10993 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10994 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10995 CALL_EXPR_ARG (ret, ix) = t;
10996 process_call_operands (ret);
10997 return ret;
10998 }
10999
11000 /* Conveniently construct a function call expression. FNDECL names the
11001 function to be called and N arguments are passed in the array
11002 ARGARRAY. */
11003
11004 tree
11005 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11006 {
11007 tree fntype = TREE_TYPE (fndecl);
11008 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11009
11010 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11011 }
11012
11013 /* Conveniently construct a function call expression. FNDECL names the
11014 function to be called and the arguments are passed in the vector
11015 VEC. */
11016
11017 tree
11018 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11019 {
11020 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11021 vec_safe_address (vec));
11022 }
11023
11024
11025 /* Conveniently construct a function call expression. FNDECL names the
11026 function to be called, N is the number of arguments, and the "..."
11027 parameters are the argument expressions. */
11028
11029 tree
11030 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11031 {
11032 va_list ap;
11033 tree *argarray = XALLOCAVEC (tree, n);
11034 int i;
11035
11036 va_start (ap, n);
11037 for (i = 0; i < n; i++)
11038 argarray[i] = va_arg (ap, tree);
11039 va_end (ap);
11040 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11041 }
11042
11043 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11044 varargs macros aren't supported by all bootstrap compilers. */
11045
11046 tree
11047 build_call_expr (tree fndecl, int n, ...)
11048 {
11049 va_list ap;
11050 tree *argarray = XALLOCAVEC (tree, n);
11051 int i;
11052
11053 va_start (ap, n);
11054 for (i = 0; i < n; i++)
11055 argarray[i] = va_arg (ap, tree);
11056 va_end (ap);
11057 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11058 }
11059
11060 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11061 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11062 It will get gimplified later into an ordinary internal function. */
11063
11064 tree
11065 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11066 tree type, int n, const tree *args)
11067 {
11068 tree t = build_call_1 (type, NULL_TREE, n);
11069 for (int i = 0; i < n; ++i)
11070 CALL_EXPR_ARG (t, i) = args[i];
11071 SET_EXPR_LOCATION (t, loc);
11072 CALL_EXPR_IFN (t) = ifn;
11073 return t;
11074 }
11075
11076 /* Build internal call expression. This is just like CALL_EXPR, except
11077 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11078 internal function. */
11079
11080 tree
11081 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11082 tree type, int n, ...)
11083 {
11084 va_list ap;
11085 tree *argarray = XALLOCAVEC (tree, n);
11086 int i;
11087
11088 va_start (ap, n);
11089 for (i = 0; i < n; i++)
11090 argarray[i] = va_arg (ap, tree);
11091 va_end (ap);
11092 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11093 }
11094
11095 /* Return a function call to FN, if the target is guaranteed to support it,
11096 or null otherwise.
11097
11098 N is the number of arguments, passed in the "...", and TYPE is the
11099 type of the return value. */
11100
11101 tree
11102 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11103 int n, ...)
11104 {
11105 va_list ap;
11106 tree *argarray = XALLOCAVEC (tree, n);
11107 int i;
11108
11109 va_start (ap, n);
11110 for (i = 0; i < n; i++)
11111 argarray[i] = va_arg (ap, tree);
11112 va_end (ap);
11113 if (internal_fn_p (fn))
11114 {
11115 internal_fn ifn = as_internal_fn (fn);
11116 if (direct_internal_fn_p (ifn))
11117 {
11118 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11119 if (!direct_internal_fn_supported_p (ifn, types,
11120 OPTIMIZE_FOR_BOTH))
11121 return NULL_TREE;
11122 }
11123 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11124 }
11125 else
11126 {
11127 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11128 if (!fndecl)
11129 return NULL_TREE;
11130 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11131 }
11132 }
11133
11134 /* Create a new constant string literal and return a char* pointer to it.
11135 The STRING_CST value is the LEN characters at STR. */
11136 tree
11137 build_string_literal (int len, const char *str)
11138 {
11139 tree t, elem, index, type;
11140
11141 t = build_string (len, str);
11142 elem = build_type_variant (char_type_node, 1, 0);
11143 index = build_index_type (size_int (len - 1));
11144 type = build_array_type (elem, index);
11145 TREE_TYPE (t) = type;
11146 TREE_CONSTANT (t) = 1;
11147 TREE_READONLY (t) = 1;
11148 TREE_STATIC (t) = 1;
11149
11150 type = build_pointer_type (elem);
11151 t = build1 (ADDR_EXPR, type,
11152 build4 (ARRAY_REF, elem,
11153 t, integer_zero_node, NULL_TREE, NULL_TREE));
11154 return t;
11155 }
11156
11157
11158
11159 /* Return true if T (assumed to be a DECL) must be assigned a memory
11160 location. */
11161
11162 bool
11163 needs_to_live_in_memory (const_tree t)
11164 {
11165 return (TREE_ADDRESSABLE (t)
11166 || is_global_var (t)
11167 || (TREE_CODE (t) == RESULT_DECL
11168 && !DECL_BY_REFERENCE (t)
11169 && aggregate_value_p (t, current_function_decl)));
11170 }
11171
11172 /* Return value of a constant X and sign-extend it. */
11173
11174 HOST_WIDE_INT
11175 int_cst_value (const_tree x)
11176 {
11177 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11178 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11179
11180 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11181 gcc_assert (cst_and_fits_in_hwi (x));
11182
11183 if (bits < HOST_BITS_PER_WIDE_INT)
11184 {
11185 bool negative = ((val >> (bits - 1)) & 1) != 0;
11186 if (negative)
11187 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
11188 else
11189 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
11190 }
11191
11192 return val;
11193 }
11194
11195 /* If TYPE is an integral or pointer type, return an integer type with
11196 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11197 if TYPE is already an integer type of signedness UNSIGNEDP. */
11198
11199 tree
11200 signed_or_unsigned_type_for (int unsignedp, tree type)
11201 {
11202 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11203 return type;
11204
11205 if (TREE_CODE (type) == VECTOR_TYPE)
11206 {
11207 tree inner = TREE_TYPE (type);
11208 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11209 if (!inner2)
11210 return NULL_TREE;
11211 if (inner == inner2)
11212 return type;
11213 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11214 }
11215
11216 if (!INTEGRAL_TYPE_P (type)
11217 && !POINTER_TYPE_P (type)
11218 && TREE_CODE (type) != OFFSET_TYPE)
11219 return NULL_TREE;
11220
11221 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11222 }
11223
11224 /* If TYPE is an integral or pointer type, return an integer type with
11225 the same precision which is unsigned, or itself if TYPE is already an
11226 unsigned integer type. */
11227
11228 tree
11229 unsigned_type_for (tree type)
11230 {
11231 return signed_or_unsigned_type_for (1, type);
11232 }
11233
11234 /* If TYPE is an integral or pointer type, return an integer type with
11235 the same precision which is signed, or itself if TYPE is already a
11236 signed integer type. */
11237
11238 tree
11239 signed_type_for (tree type)
11240 {
11241 return signed_or_unsigned_type_for (0, type);
11242 }
11243
11244 /* If TYPE is a vector type, return a signed integer vector type with the
11245 same width and number of subparts. Otherwise return boolean_type_node. */
11246
11247 tree
11248 truth_type_for (tree type)
11249 {
11250 if (TREE_CODE (type) == VECTOR_TYPE)
11251 {
11252 if (VECTOR_BOOLEAN_TYPE_P (type))
11253 return type;
11254 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11255 GET_MODE_SIZE (TYPE_MODE (type)));
11256 }
11257 else
11258 return boolean_type_node;
11259 }
11260
11261 /* Returns the largest value obtainable by casting something in INNER type to
11262 OUTER type. */
11263
11264 tree
11265 upper_bound_in_type (tree outer, tree inner)
11266 {
11267 unsigned int det = 0;
11268 unsigned oprec = TYPE_PRECISION (outer);
11269 unsigned iprec = TYPE_PRECISION (inner);
11270 unsigned prec;
11271
11272 /* Compute a unique number for every combination. */
11273 det |= (oprec > iprec) ? 4 : 0;
11274 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11275 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11276
11277 /* Determine the exponent to use. */
11278 switch (det)
11279 {
11280 case 0:
11281 case 1:
11282 /* oprec <= iprec, outer: signed, inner: don't care. */
11283 prec = oprec - 1;
11284 break;
11285 case 2:
11286 case 3:
11287 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11288 prec = oprec;
11289 break;
11290 case 4:
11291 /* oprec > iprec, outer: signed, inner: signed. */
11292 prec = iprec - 1;
11293 break;
11294 case 5:
11295 /* oprec > iprec, outer: signed, inner: unsigned. */
11296 prec = iprec;
11297 break;
11298 case 6:
11299 /* oprec > iprec, outer: unsigned, inner: signed. */
11300 prec = oprec;
11301 break;
11302 case 7:
11303 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11304 prec = iprec;
11305 break;
11306 default:
11307 gcc_unreachable ();
11308 }
11309
11310 return wide_int_to_tree (outer,
11311 wi::mask (prec, false, TYPE_PRECISION (outer)));
11312 }
11313
11314 /* Returns the smallest value obtainable by casting something in INNER type to
11315 OUTER type. */
11316
11317 tree
11318 lower_bound_in_type (tree outer, tree inner)
11319 {
11320 unsigned oprec = TYPE_PRECISION (outer);
11321 unsigned iprec = TYPE_PRECISION (inner);
11322
11323 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11324 and obtain 0. */
11325 if (TYPE_UNSIGNED (outer)
11326 /* If we are widening something of an unsigned type, OUTER type
11327 contains all values of INNER type. In particular, both INNER
11328 and OUTER types have zero in common. */
11329 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11330 return build_int_cst (outer, 0);
11331 else
11332 {
11333 /* If we are widening a signed type to another signed type, we
11334 want to obtain -2^^(iprec-1). If we are keeping the
11335 precision or narrowing to a signed type, we want to obtain
11336 -2^(oprec-1). */
11337 unsigned prec = oprec > iprec ? iprec : oprec;
11338 return wide_int_to_tree (outer,
11339 wi::mask (prec - 1, true,
11340 TYPE_PRECISION (outer)));
11341 }
11342 }
11343
11344 /* Return nonzero if two operands that are suitable for PHI nodes are
11345 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11346 SSA_NAME or invariant. Note that this is strictly an optimization.
11347 That is, callers of this function can directly call operand_equal_p
11348 and get the same result, only slower. */
11349
11350 int
11351 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11352 {
11353 if (arg0 == arg1)
11354 return 1;
11355 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11356 return 0;
11357 return operand_equal_p (arg0, arg1, 0);
11358 }
11359
11360 /* Returns number of zeros at the end of binary representation of X. */
11361
11362 tree
11363 num_ending_zeros (const_tree x)
11364 {
11365 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11366 }
11367
11368
11369 #define WALK_SUBTREE(NODE) \
11370 do \
11371 { \
11372 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11373 if (result) \
11374 return result; \
11375 } \
11376 while (0)
11377
11378 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11379 be walked whenever a type is seen in the tree. Rest of operands and return
11380 value are as for walk_tree. */
11381
11382 static tree
11383 walk_type_fields (tree type, walk_tree_fn func, void *data,
11384 hash_set<tree> *pset, walk_tree_lh lh)
11385 {
11386 tree result = NULL_TREE;
11387
11388 switch (TREE_CODE (type))
11389 {
11390 case POINTER_TYPE:
11391 case REFERENCE_TYPE:
11392 case VECTOR_TYPE:
11393 /* We have to worry about mutually recursive pointers. These can't
11394 be written in C. They can in Ada. It's pathological, but
11395 there's an ACATS test (c38102a) that checks it. Deal with this
11396 by checking if we're pointing to another pointer, that one
11397 points to another pointer, that one does too, and we have no htab.
11398 If so, get a hash table. We check three levels deep to avoid
11399 the cost of the hash table if we don't need one. */
11400 if (POINTER_TYPE_P (TREE_TYPE (type))
11401 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11402 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11403 && !pset)
11404 {
11405 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11406 func, data);
11407 if (result)
11408 return result;
11409
11410 break;
11411 }
11412
11413 /* ... fall through ... */
11414
11415 case COMPLEX_TYPE:
11416 WALK_SUBTREE (TREE_TYPE (type));
11417 break;
11418
11419 case METHOD_TYPE:
11420 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11421
11422 /* Fall through. */
11423
11424 case FUNCTION_TYPE:
11425 WALK_SUBTREE (TREE_TYPE (type));
11426 {
11427 tree arg;
11428
11429 /* We never want to walk into default arguments. */
11430 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11431 WALK_SUBTREE (TREE_VALUE (arg));
11432 }
11433 break;
11434
11435 case ARRAY_TYPE:
11436 /* Don't follow this nodes's type if a pointer for fear that
11437 we'll have infinite recursion. If we have a PSET, then we
11438 need not fear. */
11439 if (pset
11440 || (!POINTER_TYPE_P (TREE_TYPE (type))
11441 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11442 WALK_SUBTREE (TREE_TYPE (type));
11443 WALK_SUBTREE (TYPE_DOMAIN (type));
11444 break;
11445
11446 case OFFSET_TYPE:
11447 WALK_SUBTREE (TREE_TYPE (type));
11448 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11449 break;
11450
11451 default:
11452 break;
11453 }
11454
11455 return NULL_TREE;
11456 }
11457
11458 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11459 called with the DATA and the address of each sub-tree. If FUNC returns a
11460 non-NULL value, the traversal is stopped, and the value returned by FUNC
11461 is returned. If PSET is non-NULL it is used to record the nodes visited,
11462 and to avoid visiting a node more than once. */
11463
11464 tree
11465 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11466 hash_set<tree> *pset, walk_tree_lh lh)
11467 {
11468 enum tree_code code;
11469 int walk_subtrees;
11470 tree result;
11471
11472 #define WALK_SUBTREE_TAIL(NODE) \
11473 do \
11474 { \
11475 tp = & (NODE); \
11476 goto tail_recurse; \
11477 } \
11478 while (0)
11479
11480 tail_recurse:
11481 /* Skip empty subtrees. */
11482 if (!*tp)
11483 return NULL_TREE;
11484
11485 /* Don't walk the same tree twice, if the user has requested
11486 that we avoid doing so. */
11487 if (pset && pset->add (*tp))
11488 return NULL_TREE;
11489
11490 /* Call the function. */
11491 walk_subtrees = 1;
11492 result = (*func) (tp, &walk_subtrees, data);
11493
11494 /* If we found something, return it. */
11495 if (result)
11496 return result;
11497
11498 code = TREE_CODE (*tp);
11499
11500 /* Even if we didn't, FUNC may have decided that there was nothing
11501 interesting below this point in the tree. */
11502 if (!walk_subtrees)
11503 {
11504 /* But we still need to check our siblings. */
11505 if (code == TREE_LIST)
11506 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11507 else if (code == OMP_CLAUSE)
11508 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11509 else
11510 return NULL_TREE;
11511 }
11512
11513 if (lh)
11514 {
11515 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11516 if (result || !walk_subtrees)
11517 return result;
11518 }
11519
11520 switch (code)
11521 {
11522 case ERROR_MARK:
11523 case IDENTIFIER_NODE:
11524 case INTEGER_CST:
11525 case REAL_CST:
11526 case FIXED_CST:
11527 case VECTOR_CST:
11528 case STRING_CST:
11529 case BLOCK:
11530 case PLACEHOLDER_EXPR:
11531 case SSA_NAME:
11532 case FIELD_DECL:
11533 case RESULT_DECL:
11534 /* None of these have subtrees other than those already walked
11535 above. */
11536 break;
11537
11538 case TREE_LIST:
11539 WALK_SUBTREE (TREE_VALUE (*tp));
11540 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11541 break;
11542
11543 case TREE_VEC:
11544 {
11545 int len = TREE_VEC_LENGTH (*tp);
11546
11547 if (len == 0)
11548 break;
11549
11550 /* Walk all elements but the first. */
11551 while (--len)
11552 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11553
11554 /* Now walk the first one as a tail call. */
11555 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11556 }
11557
11558 case COMPLEX_CST:
11559 WALK_SUBTREE (TREE_REALPART (*tp));
11560 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11561
11562 case CONSTRUCTOR:
11563 {
11564 unsigned HOST_WIDE_INT idx;
11565 constructor_elt *ce;
11566
11567 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11568 idx++)
11569 WALK_SUBTREE (ce->value);
11570 }
11571 break;
11572
11573 case SAVE_EXPR:
11574 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11575
11576 case BIND_EXPR:
11577 {
11578 tree decl;
11579 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11580 {
11581 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11582 into declarations that are just mentioned, rather than
11583 declared; they don't really belong to this part of the tree.
11584 And, we can see cycles: the initializer for a declaration
11585 can refer to the declaration itself. */
11586 WALK_SUBTREE (DECL_INITIAL (decl));
11587 WALK_SUBTREE (DECL_SIZE (decl));
11588 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11589 }
11590 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11591 }
11592
11593 case STATEMENT_LIST:
11594 {
11595 tree_stmt_iterator i;
11596 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11597 WALK_SUBTREE (*tsi_stmt_ptr (i));
11598 }
11599 break;
11600
11601 case OMP_CLAUSE:
11602 switch (OMP_CLAUSE_CODE (*tp))
11603 {
11604 case OMP_CLAUSE_GANG:
11605 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11606 /* FALLTHRU */
11607
11608 case OMP_CLAUSE_DEVICE_RESIDENT:
11609 case OMP_CLAUSE_USE_DEVICE:
11610 case OMP_CLAUSE_ASYNC:
11611 case OMP_CLAUSE_WAIT:
11612 case OMP_CLAUSE_WORKER:
11613 case OMP_CLAUSE_VECTOR:
11614 case OMP_CLAUSE_NUM_GANGS:
11615 case OMP_CLAUSE_NUM_WORKERS:
11616 case OMP_CLAUSE_VECTOR_LENGTH:
11617 case OMP_CLAUSE_PRIVATE:
11618 case OMP_CLAUSE_SHARED:
11619 case OMP_CLAUSE_FIRSTPRIVATE:
11620 case OMP_CLAUSE_COPYIN:
11621 case OMP_CLAUSE_COPYPRIVATE:
11622 case OMP_CLAUSE_FINAL:
11623 case OMP_CLAUSE_IF:
11624 case OMP_CLAUSE_NUM_THREADS:
11625 case OMP_CLAUSE_SCHEDULE:
11626 case OMP_CLAUSE_UNIFORM:
11627 case OMP_CLAUSE_DEPEND:
11628 case OMP_CLAUSE_NUM_TEAMS:
11629 case OMP_CLAUSE_THREAD_LIMIT:
11630 case OMP_CLAUSE_DEVICE:
11631 case OMP_CLAUSE_DIST_SCHEDULE:
11632 case OMP_CLAUSE_SAFELEN:
11633 case OMP_CLAUSE_SIMDLEN:
11634 case OMP_CLAUSE_ORDERED:
11635 case OMP_CLAUSE_PRIORITY:
11636 case OMP_CLAUSE_GRAINSIZE:
11637 case OMP_CLAUSE_NUM_TASKS:
11638 case OMP_CLAUSE_HINT:
11639 case OMP_CLAUSE_TO_DECLARE:
11640 case OMP_CLAUSE_LINK:
11641 case OMP_CLAUSE_USE_DEVICE_PTR:
11642 case OMP_CLAUSE_IS_DEVICE_PTR:
11643 case OMP_CLAUSE__LOOPTEMP_:
11644 case OMP_CLAUSE__SIMDUID_:
11645 case OMP_CLAUSE__CILK_FOR_COUNT_:
11646 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11647 /* FALLTHRU */
11648
11649 case OMP_CLAUSE_INDEPENDENT:
11650 case OMP_CLAUSE_NOWAIT:
11651 case OMP_CLAUSE_DEFAULT:
11652 case OMP_CLAUSE_UNTIED:
11653 case OMP_CLAUSE_MERGEABLE:
11654 case OMP_CLAUSE_PROC_BIND:
11655 case OMP_CLAUSE_INBRANCH:
11656 case OMP_CLAUSE_NOTINBRANCH:
11657 case OMP_CLAUSE_FOR:
11658 case OMP_CLAUSE_PARALLEL:
11659 case OMP_CLAUSE_SECTIONS:
11660 case OMP_CLAUSE_TASKGROUP:
11661 case OMP_CLAUSE_NOGROUP:
11662 case OMP_CLAUSE_THREADS:
11663 case OMP_CLAUSE_SIMD:
11664 case OMP_CLAUSE_DEFAULTMAP:
11665 case OMP_CLAUSE_AUTO:
11666 case OMP_CLAUSE_SEQ:
11667 case OMP_CLAUSE_TILE:
11668 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11669
11670 case OMP_CLAUSE_LASTPRIVATE:
11671 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11672 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11673 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11674
11675 case OMP_CLAUSE_COLLAPSE:
11676 {
11677 int i;
11678 for (i = 0; i < 3; i++)
11679 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11680 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11681 }
11682
11683 case OMP_CLAUSE_LINEAR:
11684 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11685 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11686 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11687 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11688
11689 case OMP_CLAUSE_ALIGNED:
11690 case OMP_CLAUSE_FROM:
11691 case OMP_CLAUSE_TO:
11692 case OMP_CLAUSE_MAP:
11693 case OMP_CLAUSE__CACHE_:
11694 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11695 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11696 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11697
11698 case OMP_CLAUSE_REDUCTION:
11699 {
11700 int i;
11701 for (i = 0; i < 5; i++)
11702 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11703 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11704 }
11705
11706 default:
11707 gcc_unreachable ();
11708 }
11709 break;
11710
11711 case TARGET_EXPR:
11712 {
11713 int i, len;
11714
11715 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11716 But, we only want to walk once. */
11717 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11718 for (i = 0; i < len; ++i)
11719 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11720 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11721 }
11722
11723 case DECL_EXPR:
11724 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11725 defining. We only want to walk into these fields of a type in this
11726 case and not in the general case of a mere reference to the type.
11727
11728 The criterion is as follows: if the field can be an expression, it
11729 must be walked only here. This should be in keeping with the fields
11730 that are directly gimplified in gimplify_type_sizes in order for the
11731 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11732 variable-sized types.
11733
11734 Note that DECLs get walked as part of processing the BIND_EXPR. */
11735 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11736 {
11737 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11738 if (TREE_CODE (*type_p) == ERROR_MARK)
11739 return NULL_TREE;
11740
11741 /* Call the function for the type. See if it returns anything or
11742 doesn't want us to continue. If we are to continue, walk both
11743 the normal fields and those for the declaration case. */
11744 result = (*func) (type_p, &walk_subtrees, data);
11745 if (result || !walk_subtrees)
11746 return result;
11747
11748 /* But do not walk a pointed-to type since it may itself need to
11749 be walked in the declaration case if it isn't anonymous. */
11750 if (!POINTER_TYPE_P (*type_p))
11751 {
11752 result = walk_type_fields (*type_p, func, data, pset, lh);
11753 if (result)
11754 return result;
11755 }
11756
11757 /* If this is a record type, also walk the fields. */
11758 if (RECORD_OR_UNION_TYPE_P (*type_p))
11759 {
11760 tree field;
11761
11762 for (field = TYPE_FIELDS (*type_p); field;
11763 field = DECL_CHAIN (field))
11764 {
11765 /* We'd like to look at the type of the field, but we can
11766 easily get infinite recursion. So assume it's pointed
11767 to elsewhere in the tree. Also, ignore things that
11768 aren't fields. */
11769 if (TREE_CODE (field) != FIELD_DECL)
11770 continue;
11771
11772 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11773 WALK_SUBTREE (DECL_SIZE (field));
11774 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11775 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11776 WALK_SUBTREE (DECL_QUALIFIER (field));
11777 }
11778 }
11779
11780 /* Same for scalar types. */
11781 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11782 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11783 || TREE_CODE (*type_p) == INTEGER_TYPE
11784 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11785 || TREE_CODE (*type_p) == REAL_TYPE)
11786 {
11787 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11788 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11789 }
11790
11791 WALK_SUBTREE (TYPE_SIZE (*type_p));
11792 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11793 }
11794 /* FALLTHRU */
11795
11796 default:
11797 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11798 {
11799 int i, len;
11800
11801 /* Walk over all the sub-trees of this operand. */
11802 len = TREE_OPERAND_LENGTH (*tp);
11803
11804 /* Go through the subtrees. We need to do this in forward order so
11805 that the scope of a FOR_EXPR is handled properly. */
11806 if (len)
11807 {
11808 for (i = 0; i < len - 1; ++i)
11809 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11810 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11811 }
11812 }
11813 /* If this is a type, walk the needed fields in the type. */
11814 else if (TYPE_P (*tp))
11815 return walk_type_fields (*tp, func, data, pset, lh);
11816 break;
11817 }
11818
11819 /* We didn't find what we were looking for. */
11820 return NULL_TREE;
11821
11822 #undef WALK_SUBTREE_TAIL
11823 }
11824 #undef WALK_SUBTREE
11825
11826 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11827
11828 tree
11829 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11830 walk_tree_lh lh)
11831 {
11832 tree result;
11833
11834 hash_set<tree> pset;
11835 result = walk_tree_1 (tp, func, data, &pset, lh);
11836 return result;
11837 }
11838
11839
11840 tree
11841 tree_block (tree t)
11842 {
11843 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11844
11845 if (IS_EXPR_CODE_CLASS (c))
11846 return LOCATION_BLOCK (t->exp.locus);
11847 gcc_unreachable ();
11848 return NULL;
11849 }
11850
11851 void
11852 tree_set_block (tree t, tree b)
11853 {
11854 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11855
11856 if (IS_EXPR_CODE_CLASS (c))
11857 {
11858 t->exp.locus = set_block (t->exp.locus, b);
11859 }
11860 else
11861 gcc_unreachable ();
11862 }
11863
11864 /* Create a nameless artificial label and put it in the current
11865 function context. The label has a location of LOC. Returns the
11866 newly created label. */
11867
11868 tree
11869 create_artificial_label (location_t loc)
11870 {
11871 tree lab = build_decl (loc,
11872 LABEL_DECL, NULL_TREE, void_type_node);
11873
11874 DECL_ARTIFICIAL (lab) = 1;
11875 DECL_IGNORED_P (lab) = 1;
11876 DECL_CONTEXT (lab) = current_function_decl;
11877 return lab;
11878 }
11879
11880 /* Given a tree, try to return a useful variable name that we can use
11881 to prefix a temporary that is being assigned the value of the tree.
11882 I.E. given <temp> = &A, return A. */
11883
11884 const char *
11885 get_name (tree t)
11886 {
11887 tree stripped_decl;
11888
11889 stripped_decl = t;
11890 STRIP_NOPS (stripped_decl);
11891 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11892 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11893 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11894 {
11895 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11896 if (!name)
11897 return NULL;
11898 return IDENTIFIER_POINTER (name);
11899 }
11900 else
11901 {
11902 switch (TREE_CODE (stripped_decl))
11903 {
11904 case ADDR_EXPR:
11905 return get_name (TREE_OPERAND (stripped_decl, 0));
11906 default:
11907 return NULL;
11908 }
11909 }
11910 }
11911
11912 /* Return true if TYPE has a variable argument list. */
11913
11914 bool
11915 stdarg_p (const_tree fntype)
11916 {
11917 function_args_iterator args_iter;
11918 tree n = NULL_TREE, t;
11919
11920 if (!fntype)
11921 return false;
11922
11923 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11924 {
11925 n = t;
11926 }
11927
11928 return n != NULL_TREE && n != void_type_node;
11929 }
11930
11931 /* Return true if TYPE has a prototype. */
11932
11933 bool
11934 prototype_p (const_tree fntype)
11935 {
11936 tree t;
11937
11938 gcc_assert (fntype != NULL_TREE);
11939
11940 t = TYPE_ARG_TYPES (fntype);
11941 return (t != NULL_TREE);
11942 }
11943
11944 /* If BLOCK is inlined from an __attribute__((__artificial__))
11945 routine, return pointer to location from where it has been
11946 called. */
11947 location_t *
11948 block_nonartificial_location (tree block)
11949 {
11950 location_t *ret = NULL;
11951
11952 while (block && TREE_CODE (block) == BLOCK
11953 && BLOCK_ABSTRACT_ORIGIN (block))
11954 {
11955 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11956
11957 while (TREE_CODE (ao) == BLOCK
11958 && BLOCK_ABSTRACT_ORIGIN (ao)
11959 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11960 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11961
11962 if (TREE_CODE (ao) == FUNCTION_DECL)
11963 {
11964 /* If AO is an artificial inline, point RET to the
11965 call site locus at which it has been inlined and continue
11966 the loop, in case AO's caller is also an artificial
11967 inline. */
11968 if (DECL_DECLARED_INLINE_P (ao)
11969 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11970 ret = &BLOCK_SOURCE_LOCATION (block);
11971 else
11972 break;
11973 }
11974 else if (TREE_CODE (ao) != BLOCK)
11975 break;
11976
11977 block = BLOCK_SUPERCONTEXT (block);
11978 }
11979 return ret;
11980 }
11981
11982
11983 /* If EXP is inlined from an __attribute__((__artificial__))
11984 function, return the location of the original call expression. */
11985
11986 location_t
11987 tree_nonartificial_location (tree exp)
11988 {
11989 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11990
11991 if (loc)
11992 return *loc;
11993 else
11994 return EXPR_LOCATION (exp);
11995 }
11996
11997
11998 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11999 nodes. */
12000
12001 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12002
12003 hashval_t
12004 cl_option_hasher::hash (tree x)
12005 {
12006 const_tree const t = x;
12007 const char *p;
12008 size_t i;
12009 size_t len = 0;
12010 hashval_t hash = 0;
12011
12012 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12013 {
12014 p = (const char *)TREE_OPTIMIZATION (t);
12015 len = sizeof (struct cl_optimization);
12016 }
12017
12018 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12019 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12020
12021 else
12022 gcc_unreachable ();
12023
12024 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12025 something else. */
12026 for (i = 0; i < len; i++)
12027 if (p[i])
12028 hash = (hash << 4) ^ ((i << 2) | p[i]);
12029
12030 return hash;
12031 }
12032
12033 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12034 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12035 same. */
12036
12037 bool
12038 cl_option_hasher::equal (tree x, tree y)
12039 {
12040 const_tree const xt = x;
12041 const_tree const yt = y;
12042 const char *xp;
12043 const char *yp;
12044 size_t len;
12045
12046 if (TREE_CODE (xt) != TREE_CODE (yt))
12047 return 0;
12048
12049 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12050 {
12051 xp = (const char *)TREE_OPTIMIZATION (xt);
12052 yp = (const char *)TREE_OPTIMIZATION (yt);
12053 len = sizeof (struct cl_optimization);
12054 }
12055
12056 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12057 {
12058 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12059 TREE_TARGET_OPTION (yt));
12060 }
12061
12062 else
12063 gcc_unreachable ();
12064
12065 return (memcmp (xp, yp, len) == 0);
12066 }
12067
12068 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12069
12070 tree
12071 build_optimization_node (struct gcc_options *opts)
12072 {
12073 tree t;
12074
12075 /* Use the cache of optimization nodes. */
12076
12077 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12078 opts);
12079
12080 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12081 t = *slot;
12082 if (!t)
12083 {
12084 /* Insert this one into the hash table. */
12085 t = cl_optimization_node;
12086 *slot = t;
12087
12088 /* Make a new node for next time round. */
12089 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12090 }
12091
12092 return t;
12093 }
12094
12095 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12096
12097 tree
12098 build_target_option_node (struct gcc_options *opts)
12099 {
12100 tree t;
12101
12102 /* Use the cache of optimization nodes. */
12103
12104 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12105 opts);
12106
12107 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12108 t = *slot;
12109 if (!t)
12110 {
12111 /* Insert this one into the hash table. */
12112 t = cl_target_option_node;
12113 *slot = t;
12114
12115 /* Make a new node for next time round. */
12116 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12117 }
12118
12119 return t;
12120 }
12121
12122 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12123 so that they aren't saved during PCH writing. */
12124
12125 void
12126 prepare_target_option_nodes_for_pch (void)
12127 {
12128 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12129 for (; iter != cl_option_hash_table->end (); ++iter)
12130 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12131 TREE_TARGET_GLOBALS (*iter) = NULL;
12132 }
12133
12134 /* Determine the "ultimate origin" of a block. The block may be an inlined
12135 instance of an inlined instance of a block which is local to an inline
12136 function, so we have to trace all of the way back through the origin chain
12137 to find out what sort of node actually served as the original seed for the
12138 given block. */
12139
12140 tree
12141 block_ultimate_origin (const_tree block)
12142 {
12143 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
12144
12145 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12146 we're trying to output the abstract instance of this function. */
12147 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12148 return NULL_TREE;
12149
12150 if (immediate_origin == NULL_TREE)
12151 return NULL_TREE;
12152 else
12153 {
12154 tree ret_val;
12155 tree lookahead = immediate_origin;
12156
12157 do
12158 {
12159 ret_val = lookahead;
12160 lookahead = (TREE_CODE (ret_val) == BLOCK
12161 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12162 }
12163 while (lookahead != NULL && lookahead != ret_val);
12164
12165 /* The block's abstract origin chain may not be the *ultimate* origin of
12166 the block. It could lead to a DECL that has an abstract origin set.
12167 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12168 will give us if it has one). Note that DECL's abstract origins are
12169 supposed to be the most distant ancestor (or so decl_ultimate_origin
12170 claims), so we don't need to loop following the DECL origins. */
12171 if (DECL_P (ret_val))
12172 return DECL_ORIGIN (ret_val);
12173
12174 return ret_val;
12175 }
12176 }
12177
12178 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12179 no instruction. */
12180
12181 bool
12182 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12183 {
12184 /* Use precision rather then machine mode when we can, which gives
12185 the correct answer even for submode (bit-field) types. */
12186 if ((INTEGRAL_TYPE_P (outer_type)
12187 || POINTER_TYPE_P (outer_type)
12188 || TREE_CODE (outer_type) == OFFSET_TYPE)
12189 && (INTEGRAL_TYPE_P (inner_type)
12190 || POINTER_TYPE_P (inner_type)
12191 || TREE_CODE (inner_type) == OFFSET_TYPE))
12192 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12193
12194 /* Otherwise fall back on comparing machine modes (e.g. for
12195 aggregate types, floats). */
12196 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12197 }
12198
12199 /* Return true iff conversion in EXP generates no instruction. Mark
12200 it inline so that we fully inline into the stripping functions even
12201 though we have two uses of this function. */
12202
12203 static inline bool
12204 tree_nop_conversion (const_tree exp)
12205 {
12206 tree outer_type, inner_type;
12207
12208 if (!CONVERT_EXPR_P (exp)
12209 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12210 return false;
12211 if (TREE_OPERAND (exp, 0) == error_mark_node)
12212 return false;
12213
12214 outer_type = TREE_TYPE (exp);
12215 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12216
12217 if (!inner_type)
12218 return false;
12219
12220 return tree_nop_conversion_p (outer_type, inner_type);
12221 }
12222
12223 /* Return true iff conversion in EXP generates no instruction. Don't
12224 consider conversions changing the signedness. */
12225
12226 static bool
12227 tree_sign_nop_conversion (const_tree exp)
12228 {
12229 tree outer_type, inner_type;
12230
12231 if (!tree_nop_conversion (exp))
12232 return false;
12233
12234 outer_type = TREE_TYPE (exp);
12235 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12236
12237 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12238 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12239 }
12240
12241 /* Strip conversions from EXP according to tree_nop_conversion and
12242 return the resulting expression. */
12243
12244 tree
12245 tree_strip_nop_conversions (tree exp)
12246 {
12247 while (tree_nop_conversion (exp))
12248 exp = TREE_OPERAND (exp, 0);
12249 return exp;
12250 }
12251
12252 /* Strip conversions from EXP according to tree_sign_nop_conversion
12253 and return the resulting expression. */
12254
12255 tree
12256 tree_strip_sign_nop_conversions (tree exp)
12257 {
12258 while (tree_sign_nop_conversion (exp))
12259 exp = TREE_OPERAND (exp, 0);
12260 return exp;
12261 }
12262
12263 /* Avoid any floating point extensions from EXP. */
12264 tree
12265 strip_float_extensions (tree exp)
12266 {
12267 tree sub, expt, subt;
12268
12269 /* For floating point constant look up the narrowest type that can hold
12270 it properly and handle it like (type)(narrowest_type)constant.
12271 This way we can optimize for instance a=a*2.0 where "a" is float
12272 but 2.0 is double constant. */
12273 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12274 {
12275 REAL_VALUE_TYPE orig;
12276 tree type = NULL;
12277
12278 orig = TREE_REAL_CST (exp);
12279 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12280 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12281 type = float_type_node;
12282 else if (TYPE_PRECISION (TREE_TYPE (exp))
12283 > TYPE_PRECISION (double_type_node)
12284 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12285 type = double_type_node;
12286 if (type)
12287 return build_real_truncate (type, orig);
12288 }
12289
12290 if (!CONVERT_EXPR_P (exp))
12291 return exp;
12292
12293 sub = TREE_OPERAND (exp, 0);
12294 subt = TREE_TYPE (sub);
12295 expt = TREE_TYPE (exp);
12296
12297 if (!FLOAT_TYPE_P (subt))
12298 return exp;
12299
12300 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12301 return exp;
12302
12303 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12304 return exp;
12305
12306 return strip_float_extensions (sub);
12307 }
12308
12309 /* Strip out all handled components that produce invariant
12310 offsets. */
12311
12312 const_tree
12313 strip_invariant_refs (const_tree op)
12314 {
12315 while (handled_component_p (op))
12316 {
12317 switch (TREE_CODE (op))
12318 {
12319 case ARRAY_REF:
12320 case ARRAY_RANGE_REF:
12321 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12322 || TREE_OPERAND (op, 2) != NULL_TREE
12323 || TREE_OPERAND (op, 3) != NULL_TREE)
12324 return NULL;
12325 break;
12326
12327 case COMPONENT_REF:
12328 if (TREE_OPERAND (op, 2) != NULL_TREE)
12329 return NULL;
12330 break;
12331
12332 default:;
12333 }
12334 op = TREE_OPERAND (op, 0);
12335 }
12336
12337 return op;
12338 }
12339
12340 static GTY(()) tree gcc_eh_personality_decl;
12341
12342 /* Return the GCC personality function decl. */
12343
12344 tree
12345 lhd_gcc_personality (void)
12346 {
12347 if (!gcc_eh_personality_decl)
12348 gcc_eh_personality_decl = build_personality_function ("gcc");
12349 return gcc_eh_personality_decl;
12350 }
12351
12352 /* TARGET is a call target of GIMPLE call statement
12353 (obtained by gimple_call_fn). Return true if it is
12354 OBJ_TYPE_REF representing an virtual call of C++ method.
12355 (As opposed to OBJ_TYPE_REF representing objc calls
12356 through a cast where middle-end devirtualization machinery
12357 can't apply.) */
12358
12359 bool
12360 virtual_method_call_p (const_tree target)
12361 {
12362 if (TREE_CODE (target) != OBJ_TYPE_REF)
12363 return false;
12364 tree t = TREE_TYPE (target);
12365 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12366 t = TREE_TYPE (t);
12367 if (TREE_CODE (t) == FUNCTION_TYPE)
12368 return false;
12369 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12370 /* If we do not have BINFO associated, it means that type was built
12371 without devirtualization enabled. Do not consider this a virtual
12372 call. */
12373 if (!TYPE_BINFO (obj_type_ref_class (target)))
12374 return false;
12375 return true;
12376 }
12377
12378 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12379
12380 tree
12381 obj_type_ref_class (const_tree ref)
12382 {
12383 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12384 ref = TREE_TYPE (ref);
12385 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12386 ref = TREE_TYPE (ref);
12387 /* We look for type THIS points to. ObjC also builds
12388 OBJ_TYPE_REF with non-method calls, Their first parameter
12389 ID however also corresponds to class type. */
12390 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12391 || TREE_CODE (ref) == FUNCTION_TYPE);
12392 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12393 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12394 return TREE_TYPE (ref);
12395 }
12396
12397 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12398
12399 static tree
12400 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12401 {
12402 unsigned int i;
12403 tree base_binfo, b;
12404
12405 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12406 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12407 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12408 return base_binfo;
12409 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12410 return b;
12411 return NULL;
12412 }
12413
12414 /* Try to find a base info of BINFO that would have its field decl at offset
12415 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12416 found, return, otherwise return NULL_TREE. */
12417
12418 tree
12419 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12420 {
12421 tree type = BINFO_TYPE (binfo);
12422
12423 while (true)
12424 {
12425 HOST_WIDE_INT pos, size;
12426 tree fld;
12427 int i;
12428
12429 if (types_same_for_odr (type, expected_type))
12430 return binfo;
12431 if (offset < 0)
12432 return NULL_TREE;
12433
12434 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12435 {
12436 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12437 continue;
12438
12439 pos = int_bit_position (fld);
12440 size = tree_to_uhwi (DECL_SIZE (fld));
12441 if (pos <= offset && (pos + size) > offset)
12442 break;
12443 }
12444 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12445 return NULL_TREE;
12446
12447 /* Offset 0 indicates the primary base, whose vtable contents are
12448 represented in the binfo for the derived class. */
12449 else if (offset != 0)
12450 {
12451 tree found_binfo = NULL, base_binfo;
12452 /* Offsets in BINFO are in bytes relative to the whole structure
12453 while POS is in bits relative to the containing field. */
12454 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12455 / BITS_PER_UNIT);
12456
12457 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12458 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12459 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12460 {
12461 found_binfo = base_binfo;
12462 break;
12463 }
12464 if (found_binfo)
12465 binfo = found_binfo;
12466 else
12467 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12468 binfo_offset);
12469 }
12470
12471 type = TREE_TYPE (fld);
12472 offset -= pos;
12473 }
12474 }
12475
12476 /* Returns true if X is a typedef decl. */
12477
12478 bool
12479 is_typedef_decl (const_tree x)
12480 {
12481 return (x && TREE_CODE (x) == TYPE_DECL
12482 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12483 }
12484
12485 /* Returns true iff TYPE is a type variant created for a typedef. */
12486
12487 bool
12488 typedef_variant_p (const_tree type)
12489 {
12490 return is_typedef_decl (TYPE_NAME (type));
12491 }
12492
12493 /* Warn about a use of an identifier which was marked deprecated. */
12494 void
12495 warn_deprecated_use (tree node, tree attr)
12496 {
12497 const char *msg;
12498
12499 if (node == 0 || !warn_deprecated_decl)
12500 return;
12501
12502 if (!attr)
12503 {
12504 if (DECL_P (node))
12505 attr = DECL_ATTRIBUTES (node);
12506 else if (TYPE_P (node))
12507 {
12508 tree decl = TYPE_STUB_DECL (node);
12509 if (decl)
12510 attr = lookup_attribute ("deprecated",
12511 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12512 }
12513 }
12514
12515 if (attr)
12516 attr = lookup_attribute ("deprecated", attr);
12517
12518 if (attr)
12519 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12520 else
12521 msg = NULL;
12522
12523 bool w;
12524 if (DECL_P (node))
12525 {
12526 if (msg)
12527 w = warning (OPT_Wdeprecated_declarations,
12528 "%qD is deprecated: %s", node, msg);
12529 else
12530 w = warning (OPT_Wdeprecated_declarations,
12531 "%qD is deprecated", node);
12532 if (w)
12533 inform (DECL_SOURCE_LOCATION (node), "declared here");
12534 }
12535 else if (TYPE_P (node))
12536 {
12537 tree what = NULL_TREE;
12538 tree decl = TYPE_STUB_DECL (node);
12539
12540 if (TYPE_NAME (node))
12541 {
12542 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12543 what = TYPE_NAME (node);
12544 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12545 && DECL_NAME (TYPE_NAME (node)))
12546 what = DECL_NAME (TYPE_NAME (node));
12547 }
12548
12549 if (decl)
12550 {
12551 if (what)
12552 {
12553 if (msg)
12554 w = warning (OPT_Wdeprecated_declarations,
12555 "%qE is deprecated: %s", what, msg);
12556 else
12557 w = warning (OPT_Wdeprecated_declarations,
12558 "%qE is deprecated", what);
12559 }
12560 else
12561 {
12562 if (msg)
12563 w = warning (OPT_Wdeprecated_declarations,
12564 "type is deprecated: %s", msg);
12565 else
12566 w = warning (OPT_Wdeprecated_declarations,
12567 "type is deprecated");
12568 }
12569 if (w)
12570 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12571 }
12572 else
12573 {
12574 if (what)
12575 {
12576 if (msg)
12577 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12578 what, msg);
12579 else
12580 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12581 }
12582 else
12583 {
12584 if (msg)
12585 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12586 msg);
12587 else
12588 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12589 }
12590 }
12591 }
12592 }
12593
12594 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12595 somewhere in it. */
12596
12597 bool
12598 contains_bitfld_component_ref_p (const_tree ref)
12599 {
12600 while (handled_component_p (ref))
12601 {
12602 if (TREE_CODE (ref) == COMPONENT_REF
12603 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12604 return true;
12605 ref = TREE_OPERAND (ref, 0);
12606 }
12607
12608 return false;
12609 }
12610
12611 /* Try to determine whether a TRY_CATCH expression can fall through.
12612 This is a subroutine of block_may_fallthru. */
12613
12614 static bool
12615 try_catch_may_fallthru (const_tree stmt)
12616 {
12617 tree_stmt_iterator i;
12618
12619 /* If the TRY block can fall through, the whole TRY_CATCH can
12620 fall through. */
12621 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12622 return true;
12623
12624 i = tsi_start (TREE_OPERAND (stmt, 1));
12625 switch (TREE_CODE (tsi_stmt (i)))
12626 {
12627 case CATCH_EXPR:
12628 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12629 catch expression and a body. The whole TRY_CATCH may fall
12630 through iff any of the catch bodies falls through. */
12631 for (; !tsi_end_p (i); tsi_next (&i))
12632 {
12633 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12634 return true;
12635 }
12636 return false;
12637
12638 case EH_FILTER_EXPR:
12639 /* The exception filter expression only matters if there is an
12640 exception. If the exception does not match EH_FILTER_TYPES,
12641 we will execute EH_FILTER_FAILURE, and we will fall through
12642 if that falls through. If the exception does match
12643 EH_FILTER_TYPES, the stack unwinder will continue up the
12644 stack, so we will not fall through. We don't know whether we
12645 will throw an exception which matches EH_FILTER_TYPES or not,
12646 so we just ignore EH_FILTER_TYPES and assume that we might
12647 throw an exception which doesn't match. */
12648 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12649
12650 default:
12651 /* This case represents statements to be executed when an
12652 exception occurs. Those statements are implicitly followed
12653 by a RESX statement to resume execution after the exception.
12654 So in this case the TRY_CATCH never falls through. */
12655 return false;
12656 }
12657 }
12658
12659 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12660 need not be 100% accurate; simply be conservative and return true if we
12661 don't know. This is used only to avoid stupidly generating extra code.
12662 If we're wrong, we'll just delete the extra code later. */
12663
12664 bool
12665 block_may_fallthru (const_tree block)
12666 {
12667 /* This CONST_CAST is okay because expr_last returns its argument
12668 unmodified and we assign it to a const_tree. */
12669 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12670
12671 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12672 {
12673 case GOTO_EXPR:
12674 case RETURN_EXPR:
12675 /* Easy cases. If the last statement of the block implies
12676 control transfer, then we can't fall through. */
12677 return false;
12678
12679 case SWITCH_EXPR:
12680 /* If SWITCH_LABELS is set, this is lowered, and represents a
12681 branch to a selected label and hence can not fall through.
12682 Otherwise SWITCH_BODY is set, and the switch can fall
12683 through. */
12684 return SWITCH_LABELS (stmt) == NULL_TREE;
12685
12686 case COND_EXPR:
12687 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12688 return true;
12689 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12690
12691 case BIND_EXPR:
12692 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12693
12694 case TRY_CATCH_EXPR:
12695 return try_catch_may_fallthru (stmt);
12696
12697 case TRY_FINALLY_EXPR:
12698 /* The finally clause is always executed after the try clause,
12699 so if it does not fall through, then the try-finally will not
12700 fall through. Otherwise, if the try clause does not fall
12701 through, then when the finally clause falls through it will
12702 resume execution wherever the try clause was going. So the
12703 whole try-finally will only fall through if both the try
12704 clause and the finally clause fall through. */
12705 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12706 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12707
12708 case MODIFY_EXPR:
12709 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12710 stmt = TREE_OPERAND (stmt, 1);
12711 else
12712 return true;
12713 /* FALLTHRU */
12714
12715 case CALL_EXPR:
12716 /* Functions that do not return do not fall through. */
12717 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12718
12719 case CLEANUP_POINT_EXPR:
12720 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12721
12722 case TARGET_EXPR:
12723 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12724
12725 case ERROR_MARK:
12726 return true;
12727
12728 default:
12729 return lang_hooks.block_may_fallthru (stmt);
12730 }
12731 }
12732
12733 /* True if we are using EH to handle cleanups. */
12734 static bool using_eh_for_cleanups_flag = false;
12735
12736 /* This routine is called from front ends to indicate eh should be used for
12737 cleanups. */
12738 void
12739 using_eh_for_cleanups (void)
12740 {
12741 using_eh_for_cleanups_flag = true;
12742 }
12743
12744 /* Query whether EH is used for cleanups. */
12745 bool
12746 using_eh_for_cleanups_p (void)
12747 {
12748 return using_eh_for_cleanups_flag;
12749 }
12750
12751 /* Wrapper for tree_code_name to ensure that tree code is valid */
12752 const char *
12753 get_tree_code_name (enum tree_code code)
12754 {
12755 const char *invalid = "<invalid tree code>";
12756
12757 if (code >= MAX_TREE_CODES)
12758 return invalid;
12759
12760 return tree_code_name[code];
12761 }
12762
12763 /* Drops the TREE_OVERFLOW flag from T. */
12764
12765 tree
12766 drop_tree_overflow (tree t)
12767 {
12768 gcc_checking_assert (TREE_OVERFLOW (t));
12769
12770 /* For tree codes with a sharing machinery re-build the result. */
12771 if (TREE_CODE (t) == INTEGER_CST)
12772 return wide_int_to_tree (TREE_TYPE (t), t);
12773
12774 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12775 and drop the flag. */
12776 t = copy_node (t);
12777 TREE_OVERFLOW (t) = 0;
12778 return t;
12779 }
12780
12781 /* Given a memory reference expression T, return its base address.
12782 The base address of a memory reference expression is the main
12783 object being referenced. For instance, the base address for
12784 'array[i].fld[j]' is 'array'. You can think of this as stripping
12785 away the offset part from a memory address.
12786
12787 This function calls handled_component_p to strip away all the inner
12788 parts of the memory reference until it reaches the base object. */
12789
12790 tree
12791 get_base_address (tree t)
12792 {
12793 while (handled_component_p (t))
12794 t = TREE_OPERAND (t, 0);
12795
12796 if ((TREE_CODE (t) == MEM_REF
12797 || TREE_CODE (t) == TARGET_MEM_REF)
12798 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12799 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12800
12801 /* ??? Either the alias oracle or all callers need to properly deal
12802 with WITH_SIZE_EXPRs before we can look through those. */
12803 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12804 return NULL_TREE;
12805
12806 return t;
12807 }
12808
12809 /* Return a tree of sizetype representing the size, in bytes, of the element
12810 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12811
12812 tree
12813 array_ref_element_size (tree exp)
12814 {
12815 tree aligned_size = TREE_OPERAND (exp, 3);
12816 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12817 location_t loc = EXPR_LOCATION (exp);
12818
12819 /* If a size was specified in the ARRAY_REF, it's the size measured
12820 in alignment units of the element type. So multiply by that value. */
12821 if (aligned_size)
12822 {
12823 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12824 sizetype from another type of the same width and signedness. */
12825 if (TREE_TYPE (aligned_size) != sizetype)
12826 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12827 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12828 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12829 }
12830
12831 /* Otherwise, take the size from that of the element type. Substitute
12832 any PLACEHOLDER_EXPR that we have. */
12833 else
12834 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12835 }
12836
12837 /* Return a tree representing the lower bound of the array mentioned in
12838 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12839
12840 tree
12841 array_ref_low_bound (tree exp)
12842 {
12843 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12844
12845 /* If a lower bound is specified in EXP, use it. */
12846 if (TREE_OPERAND (exp, 2))
12847 return TREE_OPERAND (exp, 2);
12848
12849 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12850 substituting for a PLACEHOLDER_EXPR as needed. */
12851 if (domain_type && TYPE_MIN_VALUE (domain_type))
12852 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12853
12854 /* Otherwise, return a zero of the appropriate type. */
12855 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12856 }
12857
12858 /* Return a tree representing the upper bound of the array mentioned in
12859 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12860
12861 tree
12862 array_ref_up_bound (tree exp)
12863 {
12864 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12865
12866 /* If there is a domain type and it has an upper bound, use it, substituting
12867 for a PLACEHOLDER_EXPR as needed. */
12868 if (domain_type && TYPE_MAX_VALUE (domain_type))
12869 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12870
12871 /* Otherwise fail. */
12872 return NULL_TREE;
12873 }
12874
12875 /* Returns true if REF is an array reference to an array at the end of
12876 a structure. If this is the case, the array may be allocated larger
12877 than its upper bound implies. */
12878
12879 bool
12880 array_at_struct_end_p (tree ref)
12881 {
12882 if (TREE_CODE (ref) != ARRAY_REF
12883 && TREE_CODE (ref) != ARRAY_RANGE_REF)
12884 return false;
12885
12886 while (handled_component_p (ref))
12887 {
12888 /* If the reference chain contains a component reference to a
12889 non-union type and there follows another field the reference
12890 is not at the end of a structure. */
12891 if (TREE_CODE (ref) == COMPONENT_REF
12892 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12893 {
12894 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12895 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12896 nextf = DECL_CHAIN (nextf);
12897 if (nextf)
12898 return false;
12899 }
12900
12901 ref = TREE_OPERAND (ref, 0);
12902 }
12903
12904 /* If the reference is based on a declared entity, the size of the array
12905 is constrained by its given domain. */
12906 if (DECL_P (ref))
12907 return false;
12908
12909 return true;
12910 }
12911
12912 /* Return a tree representing the offset, in bytes, of the field referenced
12913 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12914
12915 tree
12916 component_ref_field_offset (tree exp)
12917 {
12918 tree aligned_offset = TREE_OPERAND (exp, 2);
12919 tree field = TREE_OPERAND (exp, 1);
12920 location_t loc = EXPR_LOCATION (exp);
12921
12922 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12923 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12924 value. */
12925 if (aligned_offset)
12926 {
12927 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12928 sizetype from another type of the same width and signedness. */
12929 if (TREE_TYPE (aligned_offset) != sizetype)
12930 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12931 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12932 size_int (DECL_OFFSET_ALIGN (field)
12933 / BITS_PER_UNIT));
12934 }
12935
12936 /* Otherwise, take the offset from that of the field. Substitute
12937 any PLACEHOLDER_EXPR that we have. */
12938 else
12939 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12940 }
12941
12942 /* Return the machine mode of T. For vectors, returns the mode of the
12943 inner type. The main use case is to feed the result to HONOR_NANS,
12944 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12945
12946 machine_mode
12947 element_mode (const_tree t)
12948 {
12949 if (!TYPE_P (t))
12950 t = TREE_TYPE (t);
12951 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12952 t = TREE_TYPE (t);
12953 return TYPE_MODE (t);
12954 }
12955
12956
12957 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12958 TV. TV should be the more specified variant (i.e. the main variant). */
12959
12960 static bool
12961 verify_type_variant (const_tree t, tree tv)
12962 {
12963 /* Type variant can differ by:
12964
12965 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12966 ENCODE_QUAL_ADDR_SPACE.
12967 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12968 in this case some values may not be set in the variant types
12969 (see TYPE_COMPLETE_P checks).
12970 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12971 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12972 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12973 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12974 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12975 this is necessary to make it possible to merge types form different TUs
12976 - arrays, pointers and references may have TREE_TYPE that is a variant
12977 of TREE_TYPE of their main variants.
12978 - aggregates may have new TYPE_FIELDS list that list variants of
12979 the main variant TYPE_FIELDS.
12980 - vector types may differ by TYPE_VECTOR_OPAQUE
12981 - TYPE_METHODS is always NULL for vairant types and maintained for
12982 main variant only.
12983 */
12984
12985 /* Convenience macro for matching individual fields. */
12986 #define verify_variant_match(flag) \
12987 do { \
12988 if (flag (tv) != flag (t)) \
12989 { \
12990 error ("type variant differs by " #flag "."); \
12991 debug_tree (tv); \
12992 return false; \
12993 } \
12994 } while (false)
12995
12996 /* tree_base checks. */
12997
12998 verify_variant_match (TREE_CODE);
12999 /* FIXME: Ada builds non-artificial variants of artificial types. */
13000 if (TYPE_ARTIFICIAL (tv) && 0)
13001 verify_variant_match (TYPE_ARTIFICIAL);
13002 if (POINTER_TYPE_P (tv))
13003 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13004 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13005 verify_variant_match (TYPE_UNSIGNED);
13006 verify_variant_match (TYPE_ALIGN_OK);
13007 verify_variant_match (TYPE_PACKED);
13008 if (TREE_CODE (t) == REFERENCE_TYPE)
13009 verify_variant_match (TYPE_REF_IS_RVALUE);
13010 if (AGGREGATE_TYPE_P (t))
13011 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13012 else
13013 verify_variant_match (TYPE_SATURATING);
13014 /* FIXME: This check trigger during libstdc++ build. */
13015 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13016 verify_variant_match (TYPE_FINAL_P);
13017
13018 /* tree_type_common checks. */
13019
13020 if (COMPLETE_TYPE_P (t))
13021 {
13022 verify_variant_match (TYPE_SIZE);
13023 verify_variant_match (TYPE_MODE);
13024 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
13025 /* FIXME: ideally we should compare pointer equality, but java FE
13026 produce variants where size is INTEGER_CST of different type (int
13027 wrt size_type) during libjava biuld. */
13028 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
13029 {
13030 error ("type variant has different TYPE_SIZE_UNIT");
13031 debug_tree (tv);
13032 error ("type variant's TYPE_SIZE_UNIT");
13033 debug_tree (TYPE_SIZE_UNIT (tv));
13034 error ("type's TYPE_SIZE_UNIT");
13035 debug_tree (TYPE_SIZE_UNIT (t));
13036 return false;
13037 }
13038 }
13039 verify_variant_match (TYPE_PRECISION);
13040 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13041 if (RECORD_OR_UNION_TYPE_P (t))
13042 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13043 else if (TREE_CODE (t) == ARRAY_TYPE)
13044 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13045 /* During LTO we merge variant lists from diferent translation units
13046 that may differ BY TYPE_CONTEXT that in turn may point
13047 to TRANSLATION_UNIT_DECL.
13048 Ada also builds variants of types with different TYPE_CONTEXT. */
13049 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13050 verify_variant_match (TYPE_CONTEXT);
13051 verify_variant_match (TYPE_STRING_FLAG);
13052 if (TYPE_ALIAS_SET_KNOWN_P (t))
13053 {
13054 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
13055 debug_tree (tv);
13056 return false;
13057 }
13058
13059 /* tree_type_non_common checks. */
13060
13061 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13062 and dangle the pointer from time to time. */
13063 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13064 && (in_lto_p || !TYPE_VFIELD (tv)
13065 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13066 {
13067 error ("type variant has different TYPE_VFIELD");
13068 debug_tree (tv);
13069 return false;
13070 }
13071 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13072 || TREE_CODE (t) == INTEGER_TYPE
13073 || TREE_CODE (t) == BOOLEAN_TYPE
13074 || TREE_CODE (t) == REAL_TYPE
13075 || TREE_CODE (t) == FIXED_POINT_TYPE)
13076 {
13077 verify_variant_match (TYPE_MAX_VALUE);
13078 verify_variant_match (TYPE_MIN_VALUE);
13079 }
13080 if (TREE_CODE (t) == METHOD_TYPE)
13081 verify_variant_match (TYPE_METHOD_BASETYPE);
13082 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
13083 {
13084 error ("type variant has TYPE_METHODS");
13085 debug_tree (tv);
13086 return false;
13087 }
13088 if (TREE_CODE (t) == OFFSET_TYPE)
13089 verify_variant_match (TYPE_OFFSET_BASETYPE);
13090 if (TREE_CODE (t) == ARRAY_TYPE)
13091 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13092 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13093 or even type's main variant. This is needed to make bootstrap pass
13094 and the bug seems new in GCC 5.
13095 C++ FE should be updated to make this consistent and we should check
13096 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13097 is a match with main variant.
13098
13099 Also disable the check for Java for now because of parser hack that builds
13100 first an dummy BINFO and then sometimes replace it by real BINFO in some
13101 of the copies. */
13102 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13103 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13104 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13105 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13106 at LTO time only. */
13107 && (in_lto_p && odr_type_p (t)))
13108 {
13109 error ("type variant has different TYPE_BINFO");
13110 debug_tree (tv);
13111 error ("type variant's TYPE_BINFO");
13112 debug_tree (TYPE_BINFO (tv));
13113 error ("type's TYPE_BINFO");
13114 debug_tree (TYPE_BINFO (t));
13115 return false;
13116 }
13117
13118 /* Check various uses of TYPE_VALUES_RAW. */
13119 if (TREE_CODE (t) == ENUMERAL_TYPE)
13120 verify_variant_match (TYPE_VALUES);
13121 else if (TREE_CODE (t) == ARRAY_TYPE)
13122 verify_variant_match (TYPE_DOMAIN);
13123 /* Permit incomplete variants of complete type. While FEs may complete
13124 all variants, this does not happen for C++ templates in all cases. */
13125 else if (RECORD_OR_UNION_TYPE_P (t)
13126 && COMPLETE_TYPE_P (t)
13127 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13128 {
13129 tree f1, f2;
13130
13131 /* Fortran builds qualified variants as new records with items of
13132 qualified type. Verify that they looks same. */
13133 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13134 f1 && f2;
13135 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13136 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13137 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13138 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13139 /* FIXME: gfc_nonrestricted_type builds all types as variants
13140 with exception of pointer types. It deeply copies the type
13141 which means that we may end up with a variant type
13142 referring non-variant pointer. We may change it to
13143 produce types as variants, too, like
13144 objc_get_protocol_qualified_type does. */
13145 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13146 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13147 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13148 break;
13149 if (f1 || f2)
13150 {
13151 error ("type variant has different TYPE_FIELDS");
13152 debug_tree (tv);
13153 error ("first mismatch is field");
13154 debug_tree (f1);
13155 error ("and field");
13156 debug_tree (f2);
13157 return false;
13158 }
13159 }
13160 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13161 verify_variant_match (TYPE_ARG_TYPES);
13162 /* For C++ the qualified variant of array type is really an array type
13163 of qualified TREE_TYPE.
13164 objc builds variants of pointer where pointer to type is a variant, too
13165 in objc_get_protocol_qualified_type. */
13166 if (TREE_TYPE (t) != TREE_TYPE (tv)
13167 && ((TREE_CODE (t) != ARRAY_TYPE
13168 && !POINTER_TYPE_P (t))
13169 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13170 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13171 {
13172 error ("type variant has different TREE_TYPE");
13173 debug_tree (tv);
13174 error ("type variant's TREE_TYPE");
13175 debug_tree (TREE_TYPE (tv));
13176 error ("type's TREE_TYPE");
13177 debug_tree (TREE_TYPE (t));
13178 return false;
13179 }
13180 if (type_with_alias_set_p (t)
13181 && !gimple_canonical_types_compatible_p (t, tv, false))
13182 {
13183 error ("type is not compatible with its vairant");
13184 debug_tree (tv);
13185 error ("type variant's TREE_TYPE");
13186 debug_tree (TREE_TYPE (tv));
13187 error ("type's TREE_TYPE");
13188 debug_tree (TREE_TYPE (t));
13189 return false;
13190 }
13191 return true;
13192 #undef verify_variant_match
13193 }
13194
13195
13196 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13197 the middle-end types_compatible_p function. It needs to avoid
13198 claiming types are different for types that should be treated
13199 the same with respect to TBAA. Canonical types are also used
13200 for IL consistency checks via the useless_type_conversion_p
13201 predicate which does not handle all type kinds itself but falls
13202 back to pointer-comparison of TYPE_CANONICAL for aggregates
13203 for example. */
13204
13205 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13206 type calculation because we need to allow inter-operability between signed
13207 and unsigned variants. */
13208
13209 bool
13210 type_with_interoperable_signedness (const_tree type)
13211 {
13212 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13213 signed char and unsigned char. Similarly fortran FE builds
13214 C_SIZE_T as signed type, while C defines it unsigned. */
13215
13216 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13217 == INTEGER_TYPE
13218 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13219 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13220 }
13221
13222 /* Return true iff T1 and T2 are structurally identical for what
13223 TBAA is concerned.
13224 This function is used both by lto.c canonical type merging and by the
13225 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13226 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13227 only for LTO because only in these cases TYPE_CANONICAL equivalence
13228 correspond to one defined by gimple_canonical_types_compatible_p. */
13229
13230 bool
13231 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13232 bool trust_type_canonical)
13233 {
13234 /* Type variants should be same as the main variant. When not doing sanity
13235 checking to verify this fact, go to main variants and save some work. */
13236 if (trust_type_canonical)
13237 {
13238 t1 = TYPE_MAIN_VARIANT (t1);
13239 t2 = TYPE_MAIN_VARIANT (t2);
13240 }
13241
13242 /* Check first for the obvious case of pointer identity. */
13243 if (t1 == t2)
13244 return true;
13245
13246 /* Check that we have two types to compare. */
13247 if (t1 == NULL_TREE || t2 == NULL_TREE)
13248 return false;
13249
13250 /* We consider complete types always compatible with incomplete type.
13251 This does not make sense for canonical type calculation and thus we
13252 need to ensure that we are never called on it.
13253
13254 FIXME: For more correctness the function probably should have three modes
13255 1) mode assuming that types are complete mathcing their structure
13256 2) mode allowing incomplete types but producing equivalence classes
13257 and thus ignoring all info from complete types
13258 3) mode allowing incomplete types to match complete but checking
13259 compatibility between complete types.
13260
13261 1 and 2 can be used for canonical type calculation. 3 is the real
13262 definition of type compatibility that can be used i.e. for warnings during
13263 declaration merging. */
13264
13265 gcc_assert (!trust_type_canonical
13266 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13267 /* If the types have been previously registered and found equal
13268 they still are. */
13269
13270 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13271 && trust_type_canonical)
13272 {
13273 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13274 they are always NULL, but they are set to non-NULL for types
13275 constructed by build_pointer_type and variants. In this case the
13276 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13277 all pointers are considered equal. Be sure to not return false
13278 negatives. */
13279 gcc_checking_assert (canonical_type_used_p (t1)
13280 && canonical_type_used_p (t2));
13281 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13282 }
13283
13284 /* Can't be the same type if the types don't have the same code. */
13285 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13286 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13287 return false;
13288
13289 /* Qualifiers do not matter for canonical type comparison purposes. */
13290
13291 /* Void types and nullptr types are always the same. */
13292 if (TREE_CODE (t1) == VOID_TYPE
13293 || TREE_CODE (t1) == NULLPTR_TYPE)
13294 return true;
13295
13296 /* Can't be the same type if they have different mode. */
13297 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13298 return false;
13299
13300 /* Non-aggregate types can be handled cheaply. */
13301 if (INTEGRAL_TYPE_P (t1)
13302 || SCALAR_FLOAT_TYPE_P (t1)
13303 || FIXED_POINT_TYPE_P (t1)
13304 || TREE_CODE (t1) == VECTOR_TYPE
13305 || TREE_CODE (t1) == COMPLEX_TYPE
13306 || TREE_CODE (t1) == OFFSET_TYPE
13307 || POINTER_TYPE_P (t1))
13308 {
13309 /* Can't be the same type if they have different recision. */
13310 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13311 return false;
13312
13313 /* In some cases the signed and unsigned types are required to be
13314 inter-operable. */
13315 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13316 && !type_with_interoperable_signedness (t1))
13317 return false;
13318
13319 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13320 interoperable with "signed char". Unless all frontends are revisited
13321 to agree on these types, we must ignore the flag completely. */
13322
13323 /* Fortran standard define C_PTR type that is compatible with every
13324 C pointer. For this reason we need to glob all pointers into one.
13325 Still pointers in different address spaces are not compatible. */
13326 if (POINTER_TYPE_P (t1))
13327 {
13328 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13329 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13330 return false;
13331 }
13332
13333 /* Tail-recurse to components. */
13334 if (TREE_CODE (t1) == VECTOR_TYPE
13335 || TREE_CODE (t1) == COMPLEX_TYPE)
13336 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13337 TREE_TYPE (t2),
13338 trust_type_canonical);
13339
13340 return true;
13341 }
13342
13343 /* Do type-specific comparisons. */
13344 switch (TREE_CODE (t1))
13345 {
13346 case ARRAY_TYPE:
13347 /* Array types are the same if the element types are the same and
13348 the number of elements are the same. */
13349 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13350 trust_type_canonical)
13351 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13352 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13353 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13354 return false;
13355 else
13356 {
13357 tree i1 = TYPE_DOMAIN (t1);
13358 tree i2 = TYPE_DOMAIN (t2);
13359
13360 /* For an incomplete external array, the type domain can be
13361 NULL_TREE. Check this condition also. */
13362 if (i1 == NULL_TREE && i2 == NULL_TREE)
13363 return true;
13364 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13365 return false;
13366 else
13367 {
13368 tree min1 = TYPE_MIN_VALUE (i1);
13369 tree min2 = TYPE_MIN_VALUE (i2);
13370 tree max1 = TYPE_MAX_VALUE (i1);
13371 tree max2 = TYPE_MAX_VALUE (i2);
13372
13373 /* The minimum/maximum values have to be the same. */
13374 if ((min1 == min2
13375 || (min1 && min2
13376 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13377 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13378 || operand_equal_p (min1, min2, 0))))
13379 && (max1 == max2
13380 || (max1 && max2
13381 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13382 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13383 || operand_equal_p (max1, max2, 0)))))
13384 return true;
13385 else
13386 return false;
13387 }
13388 }
13389
13390 case METHOD_TYPE:
13391 case FUNCTION_TYPE:
13392 /* Function types are the same if the return type and arguments types
13393 are the same. */
13394 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13395 trust_type_canonical))
13396 return false;
13397
13398 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13399 return true;
13400 else
13401 {
13402 tree parms1, parms2;
13403
13404 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13405 parms1 && parms2;
13406 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13407 {
13408 if (!gimple_canonical_types_compatible_p
13409 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13410 trust_type_canonical))
13411 return false;
13412 }
13413
13414 if (parms1 || parms2)
13415 return false;
13416
13417 return true;
13418 }
13419
13420 case RECORD_TYPE:
13421 case UNION_TYPE:
13422 case QUAL_UNION_TYPE:
13423 {
13424 tree f1, f2;
13425
13426 /* Don't try to compare variants of an incomplete type, before
13427 TYPE_FIELDS has been copied around. */
13428 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13429 return true;
13430
13431
13432 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13433 return false;
13434
13435 /* For aggregate types, all the fields must be the same. */
13436 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13437 f1 || f2;
13438 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13439 {
13440 /* Skip non-fields. */
13441 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13442 f1 = TREE_CHAIN (f1);
13443 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13444 f2 = TREE_CHAIN (f2);
13445 if (!f1 || !f2)
13446 break;
13447 /* The fields must have the same name, offset and type. */
13448 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13449 || !gimple_compare_field_offset (f1, f2)
13450 || !gimple_canonical_types_compatible_p
13451 (TREE_TYPE (f1), TREE_TYPE (f2),
13452 trust_type_canonical))
13453 return false;
13454 }
13455
13456 /* If one aggregate has more fields than the other, they
13457 are not the same. */
13458 if (f1 || f2)
13459 return false;
13460
13461 return true;
13462 }
13463
13464 default:
13465 /* Consider all types with language specific trees in them mutually
13466 compatible. This is executed only from verify_type and false
13467 positives can be tolerated. */
13468 gcc_assert (!in_lto_p);
13469 return true;
13470 }
13471 }
13472
13473 /* Verify type T. */
13474
13475 void
13476 verify_type (const_tree t)
13477 {
13478 bool error_found = false;
13479 tree mv = TYPE_MAIN_VARIANT (t);
13480 if (!mv)
13481 {
13482 error ("Main variant is not defined");
13483 error_found = true;
13484 }
13485 else if (mv != TYPE_MAIN_VARIANT (mv))
13486 {
13487 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13488 debug_tree (mv);
13489 error_found = true;
13490 }
13491 else if (t != mv && !verify_type_variant (t, mv))
13492 error_found = true;
13493
13494 tree ct = TYPE_CANONICAL (t);
13495 if (!ct)
13496 ;
13497 else if (TYPE_CANONICAL (t) != ct)
13498 {
13499 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13500 debug_tree (ct);
13501 error_found = true;
13502 }
13503 /* Method and function types can not be used to address memory and thus
13504 TYPE_CANONICAL really matters only for determining useless conversions.
13505
13506 FIXME: C++ FE produce declarations of builtin functions that are not
13507 compatible with main variants. */
13508 else if (TREE_CODE (t) == FUNCTION_TYPE)
13509 ;
13510 else if (t != ct
13511 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13512 with variably sized arrays because their sizes possibly
13513 gimplified to different variables. */
13514 && !variably_modified_type_p (ct, NULL)
13515 && !gimple_canonical_types_compatible_p (t, ct, false))
13516 {
13517 error ("TYPE_CANONICAL is not compatible");
13518 debug_tree (ct);
13519 error_found = true;
13520 }
13521
13522 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13523 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13524 {
13525 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13526 debug_tree (ct);
13527 error_found = true;
13528 }
13529 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13530 {
13531 error ("TYPE_CANONICAL of main variant is not main variant");
13532 debug_tree (ct);
13533 debug_tree (TYPE_MAIN_VARIANT (ct));
13534 error_found = true;
13535 }
13536
13537
13538 /* Check various uses of TYPE_MINVAL. */
13539 if (RECORD_OR_UNION_TYPE_P (t))
13540 {
13541 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13542 and danagle the pointer from time to time. */
13543 if (TYPE_VFIELD (t)
13544 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13545 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13546 {
13547 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13548 debug_tree (TYPE_VFIELD (t));
13549 error_found = true;
13550 }
13551 }
13552 else if (TREE_CODE (t) == POINTER_TYPE)
13553 {
13554 if (TYPE_NEXT_PTR_TO (t)
13555 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13556 {
13557 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13558 debug_tree (TYPE_NEXT_PTR_TO (t));
13559 error_found = true;
13560 }
13561 }
13562 else if (TREE_CODE (t) == REFERENCE_TYPE)
13563 {
13564 if (TYPE_NEXT_REF_TO (t)
13565 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13566 {
13567 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13568 debug_tree (TYPE_NEXT_REF_TO (t));
13569 error_found = true;
13570 }
13571 }
13572 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13573 || TREE_CODE (t) == FIXED_POINT_TYPE)
13574 {
13575 /* FIXME: The following check should pass:
13576 useless_type_conversion_p (const_cast <tree> (t),
13577 TREE_TYPE (TYPE_MIN_VALUE (t))
13578 but does not for C sizetypes in LTO. */
13579 }
13580 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13581 else if (TYPE_MINVAL (t)
13582 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13583 || in_lto_p))
13584 {
13585 error ("TYPE_MINVAL non-NULL");
13586 debug_tree (TYPE_MINVAL (t));
13587 error_found = true;
13588 }
13589
13590 /* Check various uses of TYPE_MAXVAL. */
13591 if (RECORD_OR_UNION_TYPE_P (t))
13592 {
13593 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13594 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13595 && TYPE_METHODS (t) != error_mark_node)
13596 {
13597 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13598 debug_tree (TYPE_METHODS (t));
13599 error_found = true;
13600 }
13601 }
13602 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13603 {
13604 if (TYPE_METHOD_BASETYPE (t)
13605 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13606 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13607 {
13608 error ("TYPE_METHOD_BASETYPE is not record nor union");
13609 debug_tree (TYPE_METHOD_BASETYPE (t));
13610 error_found = true;
13611 }
13612 }
13613 else if (TREE_CODE (t) == OFFSET_TYPE)
13614 {
13615 if (TYPE_OFFSET_BASETYPE (t)
13616 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13617 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13618 {
13619 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13620 debug_tree (TYPE_OFFSET_BASETYPE (t));
13621 error_found = true;
13622 }
13623 }
13624 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13625 || TREE_CODE (t) == FIXED_POINT_TYPE)
13626 {
13627 /* FIXME: The following check should pass:
13628 useless_type_conversion_p (const_cast <tree> (t),
13629 TREE_TYPE (TYPE_MAX_VALUE (t))
13630 but does not for C sizetypes in LTO. */
13631 }
13632 else if (TREE_CODE (t) == ARRAY_TYPE)
13633 {
13634 if (TYPE_ARRAY_MAX_SIZE (t)
13635 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13636 {
13637 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13638 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13639 error_found = true;
13640 }
13641 }
13642 else if (TYPE_MAXVAL (t))
13643 {
13644 error ("TYPE_MAXVAL non-NULL");
13645 debug_tree (TYPE_MAXVAL (t));
13646 error_found = true;
13647 }
13648
13649 /* Check various uses of TYPE_BINFO. */
13650 if (RECORD_OR_UNION_TYPE_P (t))
13651 {
13652 if (!TYPE_BINFO (t))
13653 ;
13654 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13655 {
13656 error ("TYPE_BINFO is not TREE_BINFO");
13657 debug_tree (TYPE_BINFO (t));
13658 error_found = true;
13659 }
13660 /* FIXME: Java builds invalid empty binfos that do not have
13661 TREE_TYPE set. */
13662 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13663 {
13664 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13665 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13666 error_found = true;
13667 }
13668 }
13669 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13670 {
13671 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13672 debug_tree (TYPE_LANG_SLOT_1 (t));
13673 error_found = true;
13674 }
13675
13676 /* Check various uses of TYPE_VALUES_RAW. */
13677 if (TREE_CODE (t) == ENUMERAL_TYPE)
13678 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13679 {
13680 tree value = TREE_VALUE (l);
13681 tree name = TREE_PURPOSE (l);
13682
13683 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13684 CONST_DECL of ENUMERAL TYPE. */
13685 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13686 {
13687 error ("Enum value is not CONST_DECL or INTEGER_CST");
13688 debug_tree (value);
13689 debug_tree (name);
13690 error_found = true;
13691 }
13692 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13693 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13694 {
13695 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13696 debug_tree (value);
13697 debug_tree (name);
13698 error_found = true;
13699 }
13700 if (TREE_CODE (name) != IDENTIFIER_NODE)
13701 {
13702 error ("Enum value name is not IDENTIFIER_NODE");
13703 debug_tree (value);
13704 debug_tree (name);
13705 error_found = true;
13706 }
13707 }
13708 else if (TREE_CODE (t) == ARRAY_TYPE)
13709 {
13710 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13711 {
13712 error ("Array TYPE_DOMAIN is not integer type");
13713 debug_tree (TYPE_DOMAIN (t));
13714 error_found = true;
13715 }
13716 }
13717 else if (RECORD_OR_UNION_TYPE_P (t))
13718 {
13719 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13720 {
13721 error ("TYPE_FIELDS defined in incomplete type");
13722 error_found = true;
13723 }
13724 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13725 {
13726 /* TODO: verify properties of decls. */
13727 if (TREE_CODE (fld) == FIELD_DECL)
13728 ;
13729 else if (TREE_CODE (fld) == TYPE_DECL)
13730 ;
13731 else if (TREE_CODE (fld) == CONST_DECL)
13732 ;
13733 else if (TREE_CODE (fld) == VAR_DECL)
13734 ;
13735 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13736 ;
13737 else if (TREE_CODE (fld) == USING_DECL)
13738 ;
13739 else
13740 {
13741 error ("Wrong tree in TYPE_FIELDS list");
13742 debug_tree (fld);
13743 error_found = true;
13744 }
13745 }
13746 }
13747 else if (TREE_CODE (t) == INTEGER_TYPE
13748 || TREE_CODE (t) == BOOLEAN_TYPE
13749 || TREE_CODE (t) == OFFSET_TYPE
13750 || TREE_CODE (t) == REFERENCE_TYPE
13751 || TREE_CODE (t) == NULLPTR_TYPE
13752 || TREE_CODE (t) == POINTER_TYPE)
13753 {
13754 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13755 {
13756 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13757 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13758 error_found = true;
13759 }
13760 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13761 {
13762 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13763 debug_tree (TYPE_CACHED_VALUES (t));
13764 error_found = true;
13765 }
13766 /* Verify just enough of cache to ensure that no one copied it to new type.
13767 All copying should go by copy_node that should clear it. */
13768 else if (TYPE_CACHED_VALUES_P (t))
13769 {
13770 int i;
13771 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13772 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13773 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13774 {
13775 error ("wrong TYPE_CACHED_VALUES entry");
13776 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13777 error_found = true;
13778 break;
13779 }
13780 }
13781 }
13782 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13783 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13784 {
13785 /* C++ FE uses TREE_PURPOSE to store initial values. */
13786 if (TREE_PURPOSE (l) && in_lto_p)
13787 {
13788 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13789 debug_tree (l);
13790 error_found = true;
13791 }
13792 if (!TYPE_P (TREE_VALUE (l)))
13793 {
13794 error ("Wrong entry in TYPE_ARG_TYPES list");
13795 debug_tree (l);
13796 error_found = true;
13797 }
13798 }
13799 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13800 {
13801 error ("TYPE_VALUES_RAW field is non-NULL");
13802 debug_tree (TYPE_VALUES_RAW (t));
13803 error_found = true;
13804 }
13805 if (TREE_CODE (t) != INTEGER_TYPE
13806 && TREE_CODE (t) != BOOLEAN_TYPE
13807 && TREE_CODE (t) != OFFSET_TYPE
13808 && TREE_CODE (t) != REFERENCE_TYPE
13809 && TREE_CODE (t) != NULLPTR_TYPE
13810 && TREE_CODE (t) != POINTER_TYPE
13811 && TYPE_CACHED_VALUES_P (t))
13812 {
13813 error ("TYPE_CACHED_VALUES_P is set while it should not");
13814 error_found = true;
13815 }
13816 if (TYPE_STRING_FLAG (t)
13817 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13818 {
13819 error ("TYPE_STRING_FLAG is set on wrong type code");
13820 error_found = true;
13821 }
13822 else if (TYPE_STRING_FLAG (t))
13823 {
13824 const_tree b = t;
13825 if (TREE_CODE (b) == ARRAY_TYPE)
13826 b = TREE_TYPE (t);
13827 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13828 that is 32bits. */
13829 if (TREE_CODE (b) != INTEGER_TYPE)
13830 {
13831 error ("TYPE_STRING_FLAG is set on type that does not look like "
13832 "char nor array of chars");
13833 error_found = true;
13834 }
13835 }
13836
13837 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13838 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13839 of a type. */
13840 if (TREE_CODE (t) == METHOD_TYPE
13841 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13842 {
13843 error ("TYPE_METHOD_BASETYPE is not main variant");
13844 error_found = true;
13845 }
13846
13847 if (error_found)
13848 {
13849 debug_tree (const_cast <tree> (t));
13850 internal_error ("verify_type failed");
13851 }
13852 }
13853
13854
13855 /* Return true if ARG is marked with the nonnull attribute in the
13856 current function signature. */
13857
13858 bool
13859 nonnull_arg_p (const_tree arg)
13860 {
13861 tree t, attrs, fntype;
13862 unsigned HOST_WIDE_INT arg_num;
13863
13864 gcc_assert (TREE_CODE (arg) == PARM_DECL
13865 && (POINTER_TYPE_P (TREE_TYPE (arg))
13866 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13867
13868 /* The static chain decl is always non null. */
13869 if (arg == cfun->static_chain_decl)
13870 return true;
13871
13872 /* THIS argument of method is always non-NULL. */
13873 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13874 && arg == DECL_ARGUMENTS (cfun->decl)
13875 && flag_delete_null_pointer_checks)
13876 return true;
13877
13878 /* Values passed by reference are always non-NULL. */
13879 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13880 && flag_delete_null_pointer_checks)
13881 return true;
13882
13883 fntype = TREE_TYPE (cfun->decl);
13884 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13885 {
13886 attrs = lookup_attribute ("nonnull", attrs);
13887
13888 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13889 if (attrs == NULL_TREE)
13890 return false;
13891
13892 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13893 if (TREE_VALUE (attrs) == NULL_TREE)
13894 return true;
13895
13896 /* Get the position number for ARG in the function signature. */
13897 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13898 t;
13899 t = DECL_CHAIN (t), arg_num++)
13900 {
13901 if (t == arg)
13902 break;
13903 }
13904
13905 gcc_assert (t == arg);
13906
13907 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13908 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13909 {
13910 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13911 return true;
13912 }
13913 }
13914
13915 return false;
13916 }
13917
13918 /* Given location LOC, strip away any packed range information
13919 or ad-hoc information. */
13920
13921 location_t
13922 get_pure_location (location_t loc)
13923 {
13924 if (IS_ADHOC_LOC (loc))
13925 loc
13926 = line_table->location_adhoc_data_map.data[loc & MAX_SOURCE_LOCATION].locus;
13927
13928 if (loc >= LINEMAPS_MACRO_LOWEST_LOCATION (line_table))
13929 return loc;
13930
13931 if (loc < RESERVED_LOCATION_COUNT)
13932 return loc;
13933
13934 const line_map *map = linemap_lookup (line_table, loc);
13935 const line_map_ordinary *ordmap = linemap_check_ordinary (map);
13936
13937 return loc & ~((1 << ordmap->m_range_bits) - 1);
13938 }
13939
13940 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13941 information. */
13942
13943 location_t
13944 set_block (location_t loc, tree block)
13945 {
13946 location_t pure_loc = get_pure_location (loc);
13947 source_range src_range = get_range_from_loc (line_table, loc);
13948 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
13949 }
13950
13951 location_t
13952 set_source_range (tree expr, location_t start, location_t finish)
13953 {
13954 source_range src_range;
13955 src_range.m_start = start;
13956 src_range.m_finish = finish;
13957 return set_source_range (expr, src_range);
13958 }
13959
13960 location_t
13961 set_source_range (tree expr, source_range src_range)
13962 {
13963 if (!EXPR_P (expr))
13964 return UNKNOWN_LOCATION;
13965
13966 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
13967 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
13968 pure_loc,
13969 src_range,
13970 NULL);
13971 SET_EXPR_LOCATION (expr, adhoc);
13972 return adhoc;
13973 }
13974
13975 location_t
13976 make_location (location_t caret, location_t start, location_t finish)
13977 {
13978 location_t pure_loc = get_pure_location (caret);
13979 source_range src_range;
13980 src_range.m_start = start;
13981 src_range.m_finish = finish;
13982 location_t combined_loc = COMBINE_LOCATION_DATA (line_table,
13983 pure_loc,
13984 src_range,
13985 NULL);
13986 return combined_loc;
13987 }
13988
13989 /* Return the name of combined function FN, for debugging purposes. */
13990
13991 const char *
13992 combined_fn_name (combined_fn fn)
13993 {
13994 if (builtin_fn_p (fn))
13995 {
13996 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
13997 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
13998 }
13999 else
14000 return internal_fn_name (as_internal_fn (fn));
14001 }
14002
14003 #include "gt-tree.h"