* doc/invoke.texi (Warning Options): Document -Winvalid-memory-model.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64
65 /* Tree code classes. */
66
67 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
68 #define END_OF_BASE_TREE_CODES tcc_exceptional,
69
70 const enum tree_code_class tree_code_type[] = {
71 #include "all-tree.def"
72 };
73
74 #undef DEFTREECODE
75 #undef END_OF_BASE_TREE_CODES
76
77 /* Table indexed by tree code giving number of expression
78 operands beyond the fixed part of the node structure.
79 Not used for types or decls. */
80
81 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
82 #define END_OF_BASE_TREE_CODES 0,
83
84 const unsigned char tree_code_length[] = {
85 #include "all-tree.def"
86 };
87
88 #undef DEFTREECODE
89 #undef END_OF_BASE_TREE_CODES
90
91 /* Names of tree components.
92 Used for printing out the tree and error messages. */
93 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
94 #define END_OF_BASE_TREE_CODES "@dummy",
95
96 static const char *const tree_code_name[] = {
97 #include "all-tree.def"
98 };
99
100 #undef DEFTREECODE
101 #undef END_OF_BASE_TREE_CODES
102
103 /* Each tree code class has an associated string representation.
104 These must correspond to the tree_code_class entries. */
105
106 const char *const tree_code_class_strings[] =
107 {
108 "exceptional",
109 "constant",
110 "type",
111 "declaration",
112 "reference",
113 "comparison",
114 "unary",
115 "binary",
116 "statement",
117 "vl_exp",
118 "expression"
119 };
120
121 /* obstack.[ch] explicitly declined to prototype this. */
122 extern int _obstack_allocated_p (struct obstack *h, void *obj);
123
124 /* Statistics-gathering stuff. */
125
126 static int tree_code_counts[MAX_TREE_CODES];
127 int tree_node_counts[(int) all_kinds];
128 int tree_node_sizes[(int) all_kinds];
129
130 /* Keep in sync with tree.h:enum tree_node_kind. */
131 static const char * const tree_node_kind_names[] = {
132 "decls",
133 "types",
134 "blocks",
135 "stmts",
136 "refs",
137 "exprs",
138 "constants",
139 "identifiers",
140 "vecs",
141 "binfos",
142 "ssa names",
143 "constructors",
144 "random kinds",
145 "lang_decl kinds",
146 "lang_type kinds",
147 "omp clauses",
148 };
149
150 /* Unique id for next decl created. */
151 static GTY(()) int next_decl_uid;
152 /* Unique id for next type created. */
153 static GTY(()) int next_type_uid = 1;
154 /* Unique id for next debug decl created. Use negative numbers,
155 to catch erroneous uses. */
156 static GTY(()) int next_debug_decl_uid;
157
158 /* Since we cannot rehash a type after it is in the table, we have to
159 keep the hash code. */
160
161 struct GTY((for_user)) type_hash {
162 unsigned long hash;
163 tree type;
164 };
165
166 /* Initial size of the hash table (rounded to next prime). */
167 #define TYPE_HASH_INITIAL_SIZE 1000
168
169 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
170 {
171 static hashval_t hash (type_hash *t) { return t->hash; }
172 static bool equal (type_hash *a, type_hash *b);
173
174 static int
175 keep_cache_entry (type_hash *&t)
176 {
177 return ggc_marked_p (t->type);
178 }
179 };
180
181 /* Now here is the hash table. When recording a type, it is added to
182 the slot whose index is the hash code. Note that the hash table is
183 used for several kinds of types (function types, array types and
184 array index range types, for now). While all these live in the
185 same table, they are completely independent, and the hash code is
186 computed differently for each of these. */
187
188 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
189
190 /* Hash table and temporary node for larger integer const values. */
191 static GTY (()) tree int_cst_node;
192
193 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
194 {
195 static hashval_t hash (tree t);
196 static bool equal (tree x, tree y);
197 };
198
199 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
200
201 /* Hash table for optimization flags and target option flags. Use the same
202 hash table for both sets of options. Nodes for building the current
203 optimization and target option nodes. The assumption is most of the time
204 the options created will already be in the hash table, so we avoid
205 allocating and freeing up a node repeatably. */
206 static GTY (()) tree cl_optimization_node;
207 static GTY (()) tree cl_target_option_node;
208
209 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
210 {
211 static hashval_t hash (tree t);
212 static bool equal (tree x, tree y);
213 };
214
215 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
216
217 /* General tree->tree mapping structure for use in hash tables. */
218
219
220 static GTY ((cache))
221 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
222
223 static GTY ((cache))
224 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
225
226 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
227 {
228 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
229
230 static bool
231 equal (tree_vec_map *a, tree_vec_map *b)
232 {
233 return a->base.from == b->base.from;
234 }
235
236 static int
237 keep_cache_entry (tree_vec_map *&m)
238 {
239 return ggc_marked_p (m->base.from);
240 }
241 };
242
243 static GTY ((cache))
244 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
245
246 static void set_type_quals (tree, int);
247 static void print_type_hash_statistics (void);
248 static void print_debug_expr_statistics (void);
249 static void print_value_expr_statistics (void);
250 static void type_hash_list (const_tree, inchash::hash &);
251 static void attribute_hash_list (const_tree, inchash::hash &);
252
253 tree global_trees[TI_MAX];
254 tree integer_types[itk_none];
255
256 bool int_n_enabled_p[NUM_INT_N_ENTS];
257 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
258
259 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
260
261 /* Number of operands for each OpenMP clause. */
262 unsigned const char omp_clause_num_ops[] =
263 {
264 0, /* OMP_CLAUSE_ERROR */
265 1, /* OMP_CLAUSE_PRIVATE */
266 1, /* OMP_CLAUSE_SHARED */
267 1, /* OMP_CLAUSE_FIRSTPRIVATE */
268 2, /* OMP_CLAUSE_LASTPRIVATE */
269 5, /* OMP_CLAUSE_REDUCTION */
270 1, /* OMP_CLAUSE_COPYIN */
271 1, /* OMP_CLAUSE_COPYPRIVATE */
272 3, /* OMP_CLAUSE_LINEAR */
273 2, /* OMP_CLAUSE_ALIGNED */
274 1, /* OMP_CLAUSE_DEPEND */
275 1, /* OMP_CLAUSE_UNIFORM */
276 1, /* OMP_CLAUSE_TO_DECLARE */
277 1, /* OMP_CLAUSE_LINK */
278 2, /* OMP_CLAUSE_FROM */
279 2, /* OMP_CLAUSE_TO */
280 2, /* OMP_CLAUSE_MAP */
281 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
282 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
283 2, /* OMP_CLAUSE__CACHE_ */
284 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
285 2, /* OMP_CLAUSE_GANG */
286 1, /* OMP_CLAUSE_ASYNC */
287 1, /* OMP_CLAUSE_WAIT */
288 0, /* OMP_CLAUSE_AUTO */
289 0, /* OMP_CLAUSE_SEQ */
290 1, /* OMP_CLAUSE__LOOPTEMP_ */
291 1, /* OMP_CLAUSE_IF */
292 1, /* OMP_CLAUSE_NUM_THREADS */
293 1, /* OMP_CLAUSE_SCHEDULE */
294 0, /* OMP_CLAUSE_NOWAIT */
295 1, /* OMP_CLAUSE_ORDERED */
296 0, /* OMP_CLAUSE_DEFAULT */
297 3, /* OMP_CLAUSE_COLLAPSE */
298 0, /* OMP_CLAUSE_UNTIED */
299 1, /* OMP_CLAUSE_FINAL */
300 0, /* OMP_CLAUSE_MERGEABLE */
301 1, /* OMP_CLAUSE_DEVICE */
302 1, /* OMP_CLAUSE_DIST_SCHEDULE */
303 0, /* OMP_CLAUSE_INBRANCH */
304 0, /* OMP_CLAUSE_NOTINBRANCH */
305 1, /* OMP_CLAUSE_NUM_TEAMS */
306 1, /* OMP_CLAUSE_THREAD_LIMIT */
307 0, /* OMP_CLAUSE_PROC_BIND */
308 1, /* OMP_CLAUSE_SAFELEN */
309 1, /* OMP_CLAUSE_SIMDLEN */
310 0, /* OMP_CLAUSE_FOR */
311 0, /* OMP_CLAUSE_PARALLEL */
312 0, /* OMP_CLAUSE_SECTIONS */
313 0, /* OMP_CLAUSE_TASKGROUP */
314 1, /* OMP_CLAUSE_PRIORITY */
315 1, /* OMP_CLAUSE_GRAINSIZE */
316 1, /* OMP_CLAUSE_NUM_TASKS */
317 0, /* OMP_CLAUSE_NOGROUP */
318 0, /* OMP_CLAUSE_THREADS */
319 0, /* OMP_CLAUSE_SIMD */
320 1, /* OMP_CLAUSE_HINT */
321 0, /* OMP_CLAUSE_DEFALTMAP */
322 1, /* OMP_CLAUSE__SIMDUID_ */
323 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
324 0, /* OMP_CLAUSE_INDEPENDENT */
325 1, /* OMP_CLAUSE_WORKER */
326 1, /* OMP_CLAUSE_VECTOR */
327 1, /* OMP_CLAUSE_NUM_GANGS */
328 1, /* OMP_CLAUSE_NUM_WORKERS */
329 1, /* OMP_CLAUSE_VECTOR_LENGTH */
330 1, /* OMP_CLAUSE_TILE */
331 };
332
333 const char * const omp_clause_code_name[] =
334 {
335 "error_clause",
336 "private",
337 "shared",
338 "firstprivate",
339 "lastprivate",
340 "reduction",
341 "copyin",
342 "copyprivate",
343 "linear",
344 "aligned",
345 "depend",
346 "uniform",
347 "to",
348 "link",
349 "from",
350 "to",
351 "map",
352 "use_device_ptr",
353 "is_device_ptr",
354 "_cache_",
355 "device_resident",
356 "gang",
357 "async",
358 "wait",
359 "auto",
360 "seq",
361 "_looptemp_",
362 "if",
363 "num_threads",
364 "schedule",
365 "nowait",
366 "ordered",
367 "default",
368 "collapse",
369 "untied",
370 "final",
371 "mergeable",
372 "device",
373 "dist_schedule",
374 "inbranch",
375 "notinbranch",
376 "num_teams",
377 "thread_limit",
378 "proc_bind",
379 "safelen",
380 "simdlen",
381 "for",
382 "parallel",
383 "sections",
384 "taskgroup",
385 "priority",
386 "grainsize",
387 "num_tasks",
388 "nogroup",
389 "threads",
390 "simd",
391 "hint",
392 "defaultmap",
393 "_simduid_",
394 "_Cilk_for_count_",
395 "independent",
396 "worker",
397 "vector",
398 "num_gangs",
399 "num_workers",
400 "vector_length",
401 "tile"
402 };
403
404
405 /* Return the tree node structure used by tree code CODE. */
406
407 static inline enum tree_node_structure_enum
408 tree_node_structure_for_code (enum tree_code code)
409 {
410 switch (TREE_CODE_CLASS (code))
411 {
412 case tcc_declaration:
413 {
414 switch (code)
415 {
416 case FIELD_DECL:
417 return TS_FIELD_DECL;
418 case PARM_DECL:
419 return TS_PARM_DECL;
420 case VAR_DECL:
421 return TS_VAR_DECL;
422 case LABEL_DECL:
423 return TS_LABEL_DECL;
424 case RESULT_DECL:
425 return TS_RESULT_DECL;
426 case DEBUG_EXPR_DECL:
427 return TS_DECL_WRTL;
428 case CONST_DECL:
429 return TS_CONST_DECL;
430 case TYPE_DECL:
431 return TS_TYPE_DECL;
432 case FUNCTION_DECL:
433 return TS_FUNCTION_DECL;
434 case TRANSLATION_UNIT_DECL:
435 return TS_TRANSLATION_UNIT_DECL;
436 default:
437 return TS_DECL_NON_COMMON;
438 }
439 }
440 case tcc_type:
441 return TS_TYPE_NON_COMMON;
442 case tcc_reference:
443 case tcc_comparison:
444 case tcc_unary:
445 case tcc_binary:
446 case tcc_expression:
447 case tcc_statement:
448 case tcc_vl_exp:
449 return TS_EXP;
450 default: /* tcc_constant and tcc_exceptional */
451 break;
452 }
453 switch (code)
454 {
455 /* tcc_constant cases. */
456 case VOID_CST: return TS_TYPED;
457 case INTEGER_CST: return TS_INT_CST;
458 case REAL_CST: return TS_REAL_CST;
459 case FIXED_CST: return TS_FIXED_CST;
460 case COMPLEX_CST: return TS_COMPLEX;
461 case VECTOR_CST: return TS_VECTOR;
462 case STRING_CST: return TS_STRING;
463 /* tcc_exceptional cases. */
464 case ERROR_MARK: return TS_COMMON;
465 case IDENTIFIER_NODE: return TS_IDENTIFIER;
466 case TREE_LIST: return TS_LIST;
467 case TREE_VEC: return TS_VEC;
468 case SSA_NAME: return TS_SSA_NAME;
469 case PLACEHOLDER_EXPR: return TS_COMMON;
470 case STATEMENT_LIST: return TS_STATEMENT_LIST;
471 case BLOCK: return TS_BLOCK;
472 case CONSTRUCTOR: return TS_CONSTRUCTOR;
473 case TREE_BINFO: return TS_BINFO;
474 case OMP_CLAUSE: return TS_OMP_CLAUSE;
475 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
476 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
477
478 default:
479 gcc_unreachable ();
480 }
481 }
482
483
484 /* Initialize tree_contains_struct to describe the hierarchy of tree
485 nodes. */
486
487 static void
488 initialize_tree_contains_struct (void)
489 {
490 unsigned i;
491
492 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
493 {
494 enum tree_code code;
495 enum tree_node_structure_enum ts_code;
496
497 code = (enum tree_code) i;
498 ts_code = tree_node_structure_for_code (code);
499
500 /* Mark the TS structure itself. */
501 tree_contains_struct[code][ts_code] = 1;
502
503 /* Mark all the structures that TS is derived from. */
504 switch (ts_code)
505 {
506 case TS_TYPED:
507 case TS_BLOCK:
508 MARK_TS_BASE (code);
509 break;
510
511 case TS_COMMON:
512 case TS_INT_CST:
513 case TS_REAL_CST:
514 case TS_FIXED_CST:
515 case TS_VECTOR:
516 case TS_STRING:
517 case TS_COMPLEX:
518 case TS_SSA_NAME:
519 case TS_CONSTRUCTOR:
520 case TS_EXP:
521 case TS_STATEMENT_LIST:
522 MARK_TS_TYPED (code);
523 break;
524
525 case TS_IDENTIFIER:
526 case TS_DECL_MINIMAL:
527 case TS_TYPE_COMMON:
528 case TS_LIST:
529 case TS_VEC:
530 case TS_BINFO:
531 case TS_OMP_CLAUSE:
532 case TS_OPTIMIZATION:
533 case TS_TARGET_OPTION:
534 MARK_TS_COMMON (code);
535 break;
536
537 case TS_TYPE_WITH_LANG_SPECIFIC:
538 MARK_TS_TYPE_COMMON (code);
539 break;
540
541 case TS_TYPE_NON_COMMON:
542 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
543 break;
544
545 case TS_DECL_COMMON:
546 MARK_TS_DECL_MINIMAL (code);
547 break;
548
549 case TS_DECL_WRTL:
550 case TS_CONST_DECL:
551 MARK_TS_DECL_COMMON (code);
552 break;
553
554 case TS_DECL_NON_COMMON:
555 MARK_TS_DECL_WITH_VIS (code);
556 break;
557
558 case TS_DECL_WITH_VIS:
559 case TS_PARM_DECL:
560 case TS_LABEL_DECL:
561 case TS_RESULT_DECL:
562 MARK_TS_DECL_WRTL (code);
563 break;
564
565 case TS_FIELD_DECL:
566 MARK_TS_DECL_COMMON (code);
567 break;
568
569 case TS_VAR_DECL:
570 MARK_TS_DECL_WITH_VIS (code);
571 break;
572
573 case TS_TYPE_DECL:
574 case TS_FUNCTION_DECL:
575 MARK_TS_DECL_NON_COMMON (code);
576 break;
577
578 case TS_TRANSLATION_UNIT_DECL:
579 MARK_TS_DECL_COMMON (code);
580 break;
581
582 default:
583 gcc_unreachable ();
584 }
585 }
586
587 /* Basic consistency checks for attributes used in fold. */
588 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
589 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
590 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
591 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
592 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
593 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
594 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
595 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
596 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
597 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
598 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
599 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
600 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
601 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
602 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
603 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
604 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
605 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
606 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
607 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
608 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
609 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
610 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
611 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
612 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
613 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
614 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
615 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
616 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
617 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
618 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
619 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
620 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
621 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
622 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
623 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
624 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
625 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
626 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
627 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
628 }
629
630
631 /* Init tree.c. */
632
633 void
634 init_ttree (void)
635 {
636 /* Initialize the hash table of types. */
637 type_hash_table
638 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
639
640 debug_expr_for_decl
641 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
642
643 value_expr_for_decl
644 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
645
646 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
647
648 int_cst_node = make_int_cst (1, 1);
649
650 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
651
652 cl_optimization_node = make_node (OPTIMIZATION_NODE);
653 cl_target_option_node = make_node (TARGET_OPTION_NODE);
654
655 /* Initialize the tree_contains_struct array. */
656 initialize_tree_contains_struct ();
657 lang_hooks.init_ts ();
658 }
659
660 \f
661 /* The name of the object as the assembler will see it (but before any
662 translations made by ASM_OUTPUT_LABELREF). Often this is the same
663 as DECL_NAME. It is an IDENTIFIER_NODE. */
664 tree
665 decl_assembler_name (tree decl)
666 {
667 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
668 lang_hooks.set_decl_assembler_name (decl);
669 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
670 }
671
672 /* When the target supports COMDAT groups, this indicates which group the
673 DECL is associated with. This can be either an IDENTIFIER_NODE or a
674 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
675 tree
676 decl_comdat_group (const_tree node)
677 {
678 struct symtab_node *snode = symtab_node::get (node);
679 if (!snode)
680 return NULL;
681 return snode->get_comdat_group ();
682 }
683
684 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
685 tree
686 decl_comdat_group_id (const_tree node)
687 {
688 struct symtab_node *snode = symtab_node::get (node);
689 if (!snode)
690 return NULL;
691 return snode->get_comdat_group_id ();
692 }
693
694 /* When the target supports named section, return its name as IDENTIFIER_NODE
695 or NULL if it is in no section. */
696 const char *
697 decl_section_name (const_tree node)
698 {
699 struct symtab_node *snode = symtab_node::get (node);
700 if (!snode)
701 return NULL;
702 return snode->get_section ();
703 }
704
705 /* Set section name of NODE to VALUE (that is expected to be
706 identifier node) */
707 void
708 set_decl_section_name (tree node, const char *value)
709 {
710 struct symtab_node *snode;
711
712 if (value == NULL)
713 {
714 snode = symtab_node::get (node);
715 if (!snode)
716 return;
717 }
718 else if (TREE_CODE (node) == VAR_DECL)
719 snode = varpool_node::get_create (node);
720 else
721 snode = cgraph_node::get_create (node);
722 snode->set_section (value);
723 }
724
725 /* Return TLS model of a variable NODE. */
726 enum tls_model
727 decl_tls_model (const_tree node)
728 {
729 struct varpool_node *snode = varpool_node::get (node);
730 if (!snode)
731 return TLS_MODEL_NONE;
732 return snode->tls_model;
733 }
734
735 /* Set TLS model of variable NODE to MODEL. */
736 void
737 set_decl_tls_model (tree node, enum tls_model model)
738 {
739 struct varpool_node *vnode;
740
741 if (model == TLS_MODEL_NONE)
742 {
743 vnode = varpool_node::get (node);
744 if (!vnode)
745 return;
746 }
747 else
748 vnode = varpool_node::get_create (node);
749 vnode->tls_model = model;
750 }
751
752 /* Compute the number of bytes occupied by a tree with code CODE.
753 This function cannot be used for nodes that have variable sizes,
754 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
755 size_t
756 tree_code_size (enum tree_code code)
757 {
758 switch (TREE_CODE_CLASS (code))
759 {
760 case tcc_declaration: /* A decl node */
761 {
762 switch (code)
763 {
764 case FIELD_DECL:
765 return sizeof (struct tree_field_decl);
766 case PARM_DECL:
767 return sizeof (struct tree_parm_decl);
768 case VAR_DECL:
769 return sizeof (struct tree_var_decl);
770 case LABEL_DECL:
771 return sizeof (struct tree_label_decl);
772 case RESULT_DECL:
773 return sizeof (struct tree_result_decl);
774 case CONST_DECL:
775 return sizeof (struct tree_const_decl);
776 case TYPE_DECL:
777 return sizeof (struct tree_type_decl);
778 case FUNCTION_DECL:
779 return sizeof (struct tree_function_decl);
780 case DEBUG_EXPR_DECL:
781 return sizeof (struct tree_decl_with_rtl);
782 case TRANSLATION_UNIT_DECL:
783 return sizeof (struct tree_translation_unit_decl);
784 case NAMESPACE_DECL:
785 case IMPORTED_DECL:
786 case NAMELIST_DECL:
787 return sizeof (struct tree_decl_non_common);
788 default:
789 return lang_hooks.tree_size (code);
790 }
791 }
792
793 case tcc_type: /* a type node */
794 return sizeof (struct tree_type_non_common);
795
796 case tcc_reference: /* a reference */
797 case tcc_expression: /* an expression */
798 case tcc_statement: /* an expression with side effects */
799 case tcc_comparison: /* a comparison expression */
800 case tcc_unary: /* a unary arithmetic expression */
801 case tcc_binary: /* a binary arithmetic expression */
802 return (sizeof (struct tree_exp)
803 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
804
805 case tcc_constant: /* a constant */
806 switch (code)
807 {
808 case VOID_CST: return sizeof (struct tree_typed);
809 case INTEGER_CST: gcc_unreachable ();
810 case REAL_CST: return sizeof (struct tree_real_cst);
811 case FIXED_CST: return sizeof (struct tree_fixed_cst);
812 case COMPLEX_CST: return sizeof (struct tree_complex);
813 case VECTOR_CST: return sizeof (struct tree_vector);
814 case STRING_CST: gcc_unreachable ();
815 default:
816 return lang_hooks.tree_size (code);
817 }
818
819 case tcc_exceptional: /* something random, like an identifier. */
820 switch (code)
821 {
822 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
823 case TREE_LIST: return sizeof (struct tree_list);
824
825 case ERROR_MARK:
826 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
827
828 case TREE_VEC:
829 case OMP_CLAUSE: gcc_unreachable ();
830
831 case SSA_NAME: return sizeof (struct tree_ssa_name);
832
833 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
834 case BLOCK: return sizeof (struct tree_block);
835 case CONSTRUCTOR: return sizeof (struct tree_constructor);
836 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
837 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
838
839 default:
840 return lang_hooks.tree_size (code);
841 }
842
843 default:
844 gcc_unreachable ();
845 }
846 }
847
848 /* Compute the number of bytes occupied by NODE. This routine only
849 looks at TREE_CODE, except for those nodes that have variable sizes. */
850 size_t
851 tree_size (const_tree node)
852 {
853 const enum tree_code code = TREE_CODE (node);
854 switch (code)
855 {
856 case INTEGER_CST:
857 return (sizeof (struct tree_int_cst)
858 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
859
860 case TREE_BINFO:
861 return (offsetof (struct tree_binfo, base_binfos)
862 + vec<tree, va_gc>
863 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
864
865 case TREE_VEC:
866 return (sizeof (struct tree_vec)
867 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
868
869 case VECTOR_CST:
870 return (sizeof (struct tree_vector)
871 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
872
873 case STRING_CST:
874 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
875
876 case OMP_CLAUSE:
877 return (sizeof (struct tree_omp_clause)
878 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
879 * sizeof (tree));
880
881 default:
882 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
883 return (sizeof (struct tree_exp)
884 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
885 else
886 return tree_code_size (code);
887 }
888 }
889
890 /* Record interesting allocation statistics for a tree node with CODE
891 and LENGTH. */
892
893 static void
894 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
895 size_t length ATTRIBUTE_UNUSED)
896 {
897 enum tree_code_class type = TREE_CODE_CLASS (code);
898 tree_node_kind kind;
899
900 if (!GATHER_STATISTICS)
901 return;
902
903 switch (type)
904 {
905 case tcc_declaration: /* A decl node */
906 kind = d_kind;
907 break;
908
909 case tcc_type: /* a type node */
910 kind = t_kind;
911 break;
912
913 case tcc_statement: /* an expression with side effects */
914 kind = s_kind;
915 break;
916
917 case tcc_reference: /* a reference */
918 kind = r_kind;
919 break;
920
921 case tcc_expression: /* an expression */
922 case tcc_comparison: /* a comparison expression */
923 case tcc_unary: /* a unary arithmetic expression */
924 case tcc_binary: /* a binary arithmetic expression */
925 kind = e_kind;
926 break;
927
928 case tcc_constant: /* a constant */
929 kind = c_kind;
930 break;
931
932 case tcc_exceptional: /* something random, like an identifier. */
933 switch (code)
934 {
935 case IDENTIFIER_NODE:
936 kind = id_kind;
937 break;
938
939 case TREE_VEC:
940 kind = vec_kind;
941 break;
942
943 case TREE_BINFO:
944 kind = binfo_kind;
945 break;
946
947 case SSA_NAME:
948 kind = ssa_name_kind;
949 break;
950
951 case BLOCK:
952 kind = b_kind;
953 break;
954
955 case CONSTRUCTOR:
956 kind = constr_kind;
957 break;
958
959 case OMP_CLAUSE:
960 kind = omp_clause_kind;
961 break;
962
963 default:
964 kind = x_kind;
965 break;
966 }
967 break;
968
969 case tcc_vl_exp:
970 kind = e_kind;
971 break;
972
973 default:
974 gcc_unreachable ();
975 }
976
977 tree_code_counts[(int) code]++;
978 tree_node_counts[(int) kind]++;
979 tree_node_sizes[(int) kind] += length;
980 }
981
982 /* Allocate and return a new UID from the DECL_UID namespace. */
983
984 int
985 allocate_decl_uid (void)
986 {
987 return next_decl_uid++;
988 }
989
990 /* Return a newly allocated node of code CODE. For decl and type
991 nodes, some other fields are initialized. The rest of the node is
992 initialized to zero. This function cannot be used for TREE_VEC,
993 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
994 tree_code_size.
995
996 Achoo! I got a code in the node. */
997
998 tree
999 make_node_stat (enum tree_code code MEM_STAT_DECL)
1000 {
1001 tree t;
1002 enum tree_code_class type = TREE_CODE_CLASS (code);
1003 size_t length = tree_code_size (code);
1004
1005 record_node_allocation_statistics (code, length);
1006
1007 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1008 TREE_SET_CODE (t, code);
1009
1010 switch (type)
1011 {
1012 case tcc_statement:
1013 TREE_SIDE_EFFECTS (t) = 1;
1014 break;
1015
1016 case tcc_declaration:
1017 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1018 {
1019 if (code == FUNCTION_DECL)
1020 {
1021 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1022 DECL_MODE (t) = FUNCTION_MODE;
1023 }
1024 else
1025 DECL_ALIGN (t) = 1;
1026 }
1027 DECL_SOURCE_LOCATION (t) = input_location;
1028 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1029 DECL_UID (t) = --next_debug_decl_uid;
1030 else
1031 {
1032 DECL_UID (t) = allocate_decl_uid ();
1033 SET_DECL_PT_UID (t, -1);
1034 }
1035 if (TREE_CODE (t) == LABEL_DECL)
1036 LABEL_DECL_UID (t) = -1;
1037
1038 break;
1039
1040 case tcc_type:
1041 TYPE_UID (t) = next_type_uid++;
1042 TYPE_ALIGN (t) = BITS_PER_UNIT;
1043 TYPE_USER_ALIGN (t) = 0;
1044 TYPE_MAIN_VARIANT (t) = t;
1045 TYPE_CANONICAL (t) = t;
1046
1047 /* Default to no attributes for type, but let target change that. */
1048 TYPE_ATTRIBUTES (t) = NULL_TREE;
1049 targetm.set_default_type_attributes (t);
1050
1051 /* We have not yet computed the alias set for this type. */
1052 TYPE_ALIAS_SET (t) = -1;
1053 break;
1054
1055 case tcc_constant:
1056 TREE_CONSTANT (t) = 1;
1057 break;
1058
1059 case tcc_expression:
1060 switch (code)
1061 {
1062 case INIT_EXPR:
1063 case MODIFY_EXPR:
1064 case VA_ARG_EXPR:
1065 case PREDECREMENT_EXPR:
1066 case PREINCREMENT_EXPR:
1067 case POSTDECREMENT_EXPR:
1068 case POSTINCREMENT_EXPR:
1069 /* All of these have side-effects, no matter what their
1070 operands are. */
1071 TREE_SIDE_EFFECTS (t) = 1;
1072 break;
1073
1074 default:
1075 break;
1076 }
1077 break;
1078
1079 case tcc_exceptional:
1080 switch (code)
1081 {
1082 case TARGET_OPTION_NODE:
1083 TREE_TARGET_OPTION(t)
1084 = ggc_cleared_alloc<struct cl_target_option> ();
1085 break;
1086
1087 case OPTIMIZATION_NODE:
1088 TREE_OPTIMIZATION (t)
1089 = ggc_cleared_alloc<struct cl_optimization> ();
1090 break;
1091
1092 default:
1093 break;
1094 }
1095 break;
1096
1097 default:
1098 /* Other classes need no special treatment. */
1099 break;
1100 }
1101
1102 return t;
1103 }
1104
1105 /* Free tree node. */
1106
1107 void
1108 free_node (tree node)
1109 {
1110 enum tree_code code = TREE_CODE (node);
1111 if (GATHER_STATISTICS)
1112 {
1113 tree_code_counts[(int) TREE_CODE (node)]--;
1114 tree_node_counts[(int) t_kind]--;
1115 tree_node_sizes[(int) t_kind] -= tree_code_size (TREE_CODE (node));
1116 }
1117 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1118 vec_free (CONSTRUCTOR_ELTS (node));
1119 else if (code == BLOCK)
1120 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1121 else if (code == TREE_BINFO)
1122 vec_free (BINFO_BASE_ACCESSES (node));
1123 ggc_free (node);
1124 }
1125 \f
1126 /* Return a new node with the same contents as NODE except that its
1127 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1128
1129 tree
1130 copy_node_stat (tree node MEM_STAT_DECL)
1131 {
1132 tree t;
1133 enum tree_code code = TREE_CODE (node);
1134 size_t length;
1135
1136 gcc_assert (code != STATEMENT_LIST);
1137
1138 length = tree_size (node);
1139 record_node_allocation_statistics (code, length);
1140 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1141 memcpy (t, node, length);
1142
1143 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1144 TREE_CHAIN (t) = 0;
1145 TREE_ASM_WRITTEN (t) = 0;
1146 TREE_VISITED (t) = 0;
1147
1148 if (TREE_CODE_CLASS (code) == tcc_declaration)
1149 {
1150 if (code == DEBUG_EXPR_DECL)
1151 DECL_UID (t) = --next_debug_decl_uid;
1152 else
1153 {
1154 DECL_UID (t) = allocate_decl_uid ();
1155 if (DECL_PT_UID_SET_P (node))
1156 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1157 }
1158 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1159 && DECL_HAS_VALUE_EXPR_P (node))
1160 {
1161 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1162 DECL_HAS_VALUE_EXPR_P (t) = 1;
1163 }
1164 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1165 if (TREE_CODE (node) == VAR_DECL)
1166 {
1167 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1168 t->decl_with_vis.symtab_node = NULL;
1169 }
1170 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1171 {
1172 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1173 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1174 }
1175 if (TREE_CODE (node) == FUNCTION_DECL)
1176 {
1177 DECL_STRUCT_FUNCTION (t) = NULL;
1178 t->decl_with_vis.symtab_node = NULL;
1179 }
1180 }
1181 else if (TREE_CODE_CLASS (code) == tcc_type)
1182 {
1183 TYPE_UID (t) = next_type_uid++;
1184 /* The following is so that the debug code for
1185 the copy is different from the original type.
1186 The two statements usually duplicate each other
1187 (because they clear fields of the same union),
1188 but the optimizer should catch that. */
1189 TYPE_SYMTAB_POINTER (t) = 0;
1190 TYPE_SYMTAB_ADDRESS (t) = 0;
1191
1192 /* Do not copy the values cache. */
1193 if (TYPE_CACHED_VALUES_P (t))
1194 {
1195 TYPE_CACHED_VALUES_P (t) = 0;
1196 TYPE_CACHED_VALUES (t) = NULL_TREE;
1197 }
1198 }
1199 else if (code == TARGET_OPTION_NODE)
1200 {
1201 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1202 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1203 sizeof (struct cl_target_option));
1204 }
1205 else if (code == OPTIMIZATION_NODE)
1206 {
1207 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1208 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1209 sizeof (struct cl_optimization));
1210 }
1211
1212 return t;
1213 }
1214
1215 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1216 For example, this can copy a list made of TREE_LIST nodes. */
1217
1218 tree
1219 copy_list (tree list)
1220 {
1221 tree head;
1222 tree prev, next;
1223
1224 if (list == 0)
1225 return 0;
1226
1227 head = prev = copy_node (list);
1228 next = TREE_CHAIN (list);
1229 while (next)
1230 {
1231 TREE_CHAIN (prev) = copy_node (next);
1232 prev = TREE_CHAIN (prev);
1233 next = TREE_CHAIN (next);
1234 }
1235 return head;
1236 }
1237
1238 \f
1239 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1240 INTEGER_CST with value CST and type TYPE. */
1241
1242 static unsigned int
1243 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1244 {
1245 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1246 /* We need extra HWIs if CST is an unsigned integer with its
1247 upper bit set. */
1248 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1249 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1250 return cst.get_len ();
1251 }
1252
1253 /* Return a new INTEGER_CST with value CST and type TYPE. */
1254
1255 static tree
1256 build_new_int_cst (tree type, const wide_int &cst)
1257 {
1258 unsigned int len = cst.get_len ();
1259 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1260 tree nt = make_int_cst (len, ext_len);
1261
1262 if (len < ext_len)
1263 {
1264 --ext_len;
1265 TREE_INT_CST_ELT (nt, ext_len)
1266 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1267 for (unsigned int i = len; i < ext_len; ++i)
1268 TREE_INT_CST_ELT (nt, i) = -1;
1269 }
1270 else if (TYPE_UNSIGNED (type)
1271 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1272 {
1273 len--;
1274 TREE_INT_CST_ELT (nt, len)
1275 = zext_hwi (cst.elt (len),
1276 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1277 }
1278
1279 for (unsigned int i = 0; i < len; i++)
1280 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1281 TREE_TYPE (nt) = type;
1282 return nt;
1283 }
1284
1285 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1286
1287 tree
1288 build_int_cst (tree type, HOST_WIDE_INT low)
1289 {
1290 /* Support legacy code. */
1291 if (!type)
1292 type = integer_type_node;
1293
1294 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1295 }
1296
1297 tree
1298 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1299 {
1300 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1301 }
1302
1303 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1304
1305 tree
1306 build_int_cst_type (tree type, HOST_WIDE_INT low)
1307 {
1308 gcc_assert (type);
1309 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1310 }
1311
1312 /* Constructs tree in type TYPE from with value given by CST. Signedness
1313 of CST is assumed to be the same as the signedness of TYPE. */
1314
1315 tree
1316 double_int_to_tree (tree type, double_int cst)
1317 {
1318 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1319 }
1320
1321 /* We force the wide_int CST to the range of the type TYPE by sign or
1322 zero extending it. OVERFLOWABLE indicates if we are interested in
1323 overflow of the value, when >0 we are only interested in signed
1324 overflow, for <0 we are interested in any overflow. OVERFLOWED
1325 indicates whether overflow has already occurred. CONST_OVERFLOWED
1326 indicates whether constant overflow has already occurred. We force
1327 T's value to be within range of T's type (by setting to 0 or 1 all
1328 the bits outside the type's range). We set TREE_OVERFLOWED if,
1329 OVERFLOWED is nonzero,
1330 or OVERFLOWABLE is >0 and signed overflow occurs
1331 or OVERFLOWABLE is <0 and any overflow occurs
1332 We return a new tree node for the extended wide_int. The node
1333 is shared if no overflow flags are set. */
1334
1335
1336 tree
1337 force_fit_type (tree type, const wide_int_ref &cst,
1338 int overflowable, bool overflowed)
1339 {
1340 signop sign = TYPE_SIGN (type);
1341
1342 /* If we need to set overflow flags, return a new unshared node. */
1343 if (overflowed || !wi::fits_to_tree_p (cst, type))
1344 {
1345 if (overflowed
1346 || overflowable < 0
1347 || (overflowable > 0 && sign == SIGNED))
1348 {
1349 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1350 tree t = build_new_int_cst (type, tmp);
1351 TREE_OVERFLOW (t) = 1;
1352 return t;
1353 }
1354 }
1355
1356 /* Else build a shared node. */
1357 return wide_int_to_tree (type, cst);
1358 }
1359
1360 /* These are the hash table functions for the hash table of INTEGER_CST
1361 nodes of a sizetype. */
1362
1363 /* Return the hash code X, an INTEGER_CST. */
1364
1365 hashval_t
1366 int_cst_hasher::hash (tree x)
1367 {
1368 const_tree const t = x;
1369 hashval_t code = TYPE_UID (TREE_TYPE (t));
1370 int i;
1371
1372 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1373 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1374
1375 return code;
1376 }
1377
1378 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1379 is the same as that given by *Y, which is the same. */
1380
1381 bool
1382 int_cst_hasher::equal (tree x, tree y)
1383 {
1384 const_tree const xt = x;
1385 const_tree const yt = y;
1386
1387 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1388 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1389 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1390 return false;
1391
1392 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1393 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1394 return false;
1395
1396 return true;
1397 }
1398
1399 /* Create an INT_CST node of TYPE and value CST.
1400 The returned node is always shared. For small integers we use a
1401 per-type vector cache, for larger ones we use a single hash table.
1402 The value is extended from its precision according to the sign of
1403 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1404 the upper bits and ensures that hashing and value equality based
1405 upon the underlying HOST_WIDE_INTs works without masking. */
1406
1407 tree
1408 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1409 {
1410 tree t;
1411 int ix = -1;
1412 int limit = 0;
1413
1414 gcc_assert (type);
1415 unsigned int prec = TYPE_PRECISION (type);
1416 signop sgn = TYPE_SIGN (type);
1417
1418 /* Verify that everything is canonical. */
1419 int l = pcst.get_len ();
1420 if (l > 1)
1421 {
1422 if (pcst.elt (l - 1) == 0)
1423 gcc_checking_assert (pcst.elt (l - 2) < 0);
1424 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1425 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1426 }
1427
1428 wide_int cst = wide_int::from (pcst, prec, sgn);
1429 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1430
1431 if (ext_len == 1)
1432 {
1433 /* We just need to store a single HOST_WIDE_INT. */
1434 HOST_WIDE_INT hwi;
1435 if (TYPE_UNSIGNED (type))
1436 hwi = cst.to_uhwi ();
1437 else
1438 hwi = cst.to_shwi ();
1439
1440 switch (TREE_CODE (type))
1441 {
1442 case NULLPTR_TYPE:
1443 gcc_assert (hwi == 0);
1444 /* Fallthru. */
1445
1446 case POINTER_TYPE:
1447 case REFERENCE_TYPE:
1448 case POINTER_BOUNDS_TYPE:
1449 /* Cache NULL pointer and zero bounds. */
1450 if (hwi == 0)
1451 {
1452 limit = 1;
1453 ix = 0;
1454 }
1455 break;
1456
1457 case BOOLEAN_TYPE:
1458 /* Cache false or true. */
1459 limit = 2;
1460 if (IN_RANGE (hwi, 0, 1))
1461 ix = hwi;
1462 break;
1463
1464 case INTEGER_TYPE:
1465 case OFFSET_TYPE:
1466 if (TYPE_SIGN (type) == UNSIGNED)
1467 {
1468 /* Cache [0, N). */
1469 limit = INTEGER_SHARE_LIMIT;
1470 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1471 ix = hwi;
1472 }
1473 else
1474 {
1475 /* Cache [-1, N). */
1476 limit = INTEGER_SHARE_LIMIT + 1;
1477 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1478 ix = hwi + 1;
1479 }
1480 break;
1481
1482 case ENUMERAL_TYPE:
1483 break;
1484
1485 default:
1486 gcc_unreachable ();
1487 }
1488
1489 if (ix >= 0)
1490 {
1491 /* Look for it in the type's vector of small shared ints. */
1492 if (!TYPE_CACHED_VALUES_P (type))
1493 {
1494 TYPE_CACHED_VALUES_P (type) = 1;
1495 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1496 }
1497
1498 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1499 if (t)
1500 /* Make sure no one is clobbering the shared constant. */
1501 gcc_checking_assert (TREE_TYPE (t) == type
1502 && TREE_INT_CST_NUNITS (t) == 1
1503 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1504 && TREE_INT_CST_EXT_NUNITS (t) == 1
1505 && TREE_INT_CST_ELT (t, 0) == hwi);
1506 else
1507 {
1508 /* Create a new shared int. */
1509 t = build_new_int_cst (type, cst);
1510 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1511 }
1512 }
1513 else
1514 {
1515 /* Use the cache of larger shared ints, using int_cst_node as
1516 a temporary. */
1517
1518 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1519 TREE_TYPE (int_cst_node) = type;
1520
1521 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1522 t = *slot;
1523 if (!t)
1524 {
1525 /* Insert this one into the hash table. */
1526 t = int_cst_node;
1527 *slot = t;
1528 /* Make a new node for next time round. */
1529 int_cst_node = make_int_cst (1, 1);
1530 }
1531 }
1532 }
1533 else
1534 {
1535 /* The value either hashes properly or we drop it on the floor
1536 for the gc to take care of. There will not be enough of them
1537 to worry about. */
1538
1539 tree nt = build_new_int_cst (type, cst);
1540 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1541 t = *slot;
1542 if (!t)
1543 {
1544 /* Insert this one into the hash table. */
1545 t = nt;
1546 *slot = t;
1547 }
1548 }
1549
1550 return t;
1551 }
1552
1553 void
1554 cache_integer_cst (tree t)
1555 {
1556 tree type = TREE_TYPE (t);
1557 int ix = -1;
1558 int limit = 0;
1559 int prec = TYPE_PRECISION (type);
1560
1561 gcc_assert (!TREE_OVERFLOW (t));
1562
1563 switch (TREE_CODE (type))
1564 {
1565 case NULLPTR_TYPE:
1566 gcc_assert (integer_zerop (t));
1567 /* Fallthru. */
1568
1569 case POINTER_TYPE:
1570 case REFERENCE_TYPE:
1571 /* Cache NULL pointer. */
1572 if (integer_zerop (t))
1573 {
1574 limit = 1;
1575 ix = 0;
1576 }
1577 break;
1578
1579 case BOOLEAN_TYPE:
1580 /* Cache false or true. */
1581 limit = 2;
1582 if (wi::ltu_p (t, 2))
1583 ix = TREE_INT_CST_ELT (t, 0);
1584 break;
1585
1586 case INTEGER_TYPE:
1587 case OFFSET_TYPE:
1588 if (TYPE_UNSIGNED (type))
1589 {
1590 /* Cache 0..N */
1591 limit = INTEGER_SHARE_LIMIT;
1592
1593 /* This is a little hokie, but if the prec is smaller than
1594 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1595 obvious test will not get the correct answer. */
1596 if (prec < HOST_BITS_PER_WIDE_INT)
1597 {
1598 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1599 ix = tree_to_uhwi (t);
1600 }
1601 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1602 ix = tree_to_uhwi (t);
1603 }
1604 else
1605 {
1606 /* Cache -1..N */
1607 limit = INTEGER_SHARE_LIMIT + 1;
1608
1609 if (integer_minus_onep (t))
1610 ix = 0;
1611 else if (!wi::neg_p (t))
1612 {
1613 if (prec < HOST_BITS_PER_WIDE_INT)
1614 {
1615 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1616 ix = tree_to_shwi (t) + 1;
1617 }
1618 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1619 ix = tree_to_shwi (t) + 1;
1620 }
1621 }
1622 break;
1623
1624 case ENUMERAL_TYPE:
1625 break;
1626
1627 default:
1628 gcc_unreachable ();
1629 }
1630
1631 if (ix >= 0)
1632 {
1633 /* Look for it in the type's vector of small shared ints. */
1634 if (!TYPE_CACHED_VALUES_P (type))
1635 {
1636 TYPE_CACHED_VALUES_P (type) = 1;
1637 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1638 }
1639
1640 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1641 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1642 }
1643 else
1644 {
1645 /* Use the cache of larger shared ints. */
1646 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1647 /* If there is already an entry for the number verify it's the
1648 same. */
1649 if (*slot)
1650 gcc_assert (wi::eq_p (tree (*slot), t));
1651 else
1652 /* Otherwise insert this one into the hash table. */
1653 *slot = t;
1654 }
1655 }
1656
1657
1658 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1659 and the rest are zeros. */
1660
1661 tree
1662 build_low_bits_mask (tree type, unsigned bits)
1663 {
1664 gcc_assert (bits <= TYPE_PRECISION (type));
1665
1666 return wide_int_to_tree (type, wi::mask (bits, false,
1667 TYPE_PRECISION (type)));
1668 }
1669
1670 /* Checks that X is integer constant that can be expressed in (unsigned)
1671 HOST_WIDE_INT without loss of precision. */
1672
1673 bool
1674 cst_and_fits_in_hwi (const_tree x)
1675 {
1676 if (TREE_CODE (x) != INTEGER_CST)
1677 return false;
1678
1679 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1680 return false;
1681
1682 return TREE_INT_CST_NUNITS (x) == 1;
1683 }
1684
1685 /* Build a newly constructed VECTOR_CST node of length LEN. */
1686
1687 tree
1688 make_vector_stat (unsigned len MEM_STAT_DECL)
1689 {
1690 tree t;
1691 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1692
1693 record_node_allocation_statistics (VECTOR_CST, length);
1694
1695 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1696
1697 TREE_SET_CODE (t, VECTOR_CST);
1698 TREE_CONSTANT (t) = 1;
1699
1700 return t;
1701 }
1702
1703 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1704 are in a list pointed to by VALS. */
1705
1706 tree
1707 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1708 {
1709 int over = 0;
1710 unsigned cnt = 0;
1711 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1712 TREE_TYPE (v) = type;
1713
1714 /* Iterate through elements and check for overflow. */
1715 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1716 {
1717 tree value = vals[cnt];
1718
1719 VECTOR_CST_ELT (v, cnt) = value;
1720
1721 /* Don't crash if we get an address constant. */
1722 if (!CONSTANT_CLASS_P (value))
1723 continue;
1724
1725 over |= TREE_OVERFLOW (value);
1726 }
1727
1728 TREE_OVERFLOW (v) = over;
1729 return v;
1730 }
1731
1732 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1733 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1734
1735 tree
1736 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1737 {
1738 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1739 unsigned HOST_WIDE_INT idx, pos = 0;
1740 tree value;
1741
1742 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1743 {
1744 if (TREE_CODE (value) == VECTOR_CST)
1745 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1746 vec[pos++] = VECTOR_CST_ELT (value, i);
1747 else
1748 vec[pos++] = value;
1749 }
1750 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1751 vec[pos++] = build_zero_cst (TREE_TYPE (type));
1752
1753 return build_vector (type, vec);
1754 }
1755
1756 /* Build a vector of type VECTYPE where all the elements are SCs. */
1757 tree
1758 build_vector_from_val (tree vectype, tree sc)
1759 {
1760 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1761
1762 if (sc == error_mark_node)
1763 return sc;
1764
1765 /* Verify that the vector type is suitable for SC. Note that there
1766 is some inconsistency in the type-system with respect to restrict
1767 qualifications of pointers. Vector types always have a main-variant
1768 element type and the qualification is applied to the vector-type.
1769 So TREE_TYPE (vector-type) does not return a properly qualified
1770 vector element-type. */
1771 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1772 TREE_TYPE (vectype)));
1773
1774 if (CONSTANT_CLASS_P (sc))
1775 {
1776 tree *v = XALLOCAVEC (tree, nunits);
1777 for (i = 0; i < nunits; ++i)
1778 v[i] = sc;
1779 return build_vector (vectype, v);
1780 }
1781 else
1782 {
1783 vec<constructor_elt, va_gc> *v;
1784 vec_alloc (v, nunits);
1785 for (i = 0; i < nunits; ++i)
1786 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1787 return build_constructor (vectype, v);
1788 }
1789 }
1790
1791 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1792 are in the vec pointed to by VALS. */
1793 tree
1794 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1795 {
1796 tree c = make_node (CONSTRUCTOR);
1797 unsigned int i;
1798 constructor_elt *elt;
1799 bool constant_p = true;
1800 bool side_effects_p = false;
1801
1802 TREE_TYPE (c) = type;
1803 CONSTRUCTOR_ELTS (c) = vals;
1804
1805 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1806 {
1807 /* Mostly ctors will have elts that don't have side-effects, so
1808 the usual case is to scan all the elements. Hence a single
1809 loop for both const and side effects, rather than one loop
1810 each (with early outs). */
1811 if (!TREE_CONSTANT (elt->value))
1812 constant_p = false;
1813 if (TREE_SIDE_EFFECTS (elt->value))
1814 side_effects_p = true;
1815 }
1816
1817 TREE_SIDE_EFFECTS (c) = side_effects_p;
1818 TREE_CONSTANT (c) = constant_p;
1819
1820 return c;
1821 }
1822
1823 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1824 INDEX and VALUE. */
1825 tree
1826 build_constructor_single (tree type, tree index, tree value)
1827 {
1828 vec<constructor_elt, va_gc> *v;
1829 constructor_elt elt = {index, value};
1830
1831 vec_alloc (v, 1);
1832 v->quick_push (elt);
1833
1834 return build_constructor (type, v);
1835 }
1836
1837
1838 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1839 are in a list pointed to by VALS. */
1840 tree
1841 build_constructor_from_list (tree type, tree vals)
1842 {
1843 tree t;
1844 vec<constructor_elt, va_gc> *v = NULL;
1845
1846 if (vals)
1847 {
1848 vec_alloc (v, list_length (vals));
1849 for (t = vals; t; t = TREE_CHAIN (t))
1850 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1851 }
1852
1853 return build_constructor (type, v);
1854 }
1855
1856 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1857 of elements, provided as index/value pairs. */
1858
1859 tree
1860 build_constructor_va (tree type, int nelts, ...)
1861 {
1862 vec<constructor_elt, va_gc> *v = NULL;
1863 va_list p;
1864
1865 va_start (p, nelts);
1866 vec_alloc (v, nelts);
1867 while (nelts--)
1868 {
1869 tree index = va_arg (p, tree);
1870 tree value = va_arg (p, tree);
1871 CONSTRUCTOR_APPEND_ELT (v, index, value);
1872 }
1873 va_end (p);
1874 return build_constructor (type, v);
1875 }
1876
1877 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1878
1879 tree
1880 build_fixed (tree type, FIXED_VALUE_TYPE f)
1881 {
1882 tree v;
1883 FIXED_VALUE_TYPE *fp;
1884
1885 v = make_node (FIXED_CST);
1886 fp = ggc_alloc<fixed_value> ();
1887 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1888
1889 TREE_TYPE (v) = type;
1890 TREE_FIXED_CST_PTR (v) = fp;
1891 return v;
1892 }
1893
1894 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1895
1896 tree
1897 build_real (tree type, REAL_VALUE_TYPE d)
1898 {
1899 tree v;
1900 REAL_VALUE_TYPE *dp;
1901 int overflow = 0;
1902
1903 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1904 Consider doing it via real_convert now. */
1905
1906 v = make_node (REAL_CST);
1907 dp = ggc_alloc<real_value> ();
1908 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1909
1910 TREE_TYPE (v) = type;
1911 TREE_REAL_CST_PTR (v) = dp;
1912 TREE_OVERFLOW (v) = overflow;
1913 return v;
1914 }
1915
1916 /* Like build_real, but first truncate D to the type. */
1917
1918 tree
1919 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1920 {
1921 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1922 }
1923
1924 /* Return a new REAL_CST node whose type is TYPE
1925 and whose value is the integer value of the INTEGER_CST node I. */
1926
1927 REAL_VALUE_TYPE
1928 real_value_from_int_cst (const_tree type, const_tree i)
1929 {
1930 REAL_VALUE_TYPE d;
1931
1932 /* Clear all bits of the real value type so that we can later do
1933 bitwise comparisons to see if two values are the same. */
1934 memset (&d, 0, sizeof d);
1935
1936 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1937 TYPE_SIGN (TREE_TYPE (i)));
1938 return d;
1939 }
1940
1941 /* Given a tree representing an integer constant I, return a tree
1942 representing the same value as a floating-point constant of type TYPE. */
1943
1944 tree
1945 build_real_from_int_cst (tree type, const_tree i)
1946 {
1947 tree v;
1948 int overflow = TREE_OVERFLOW (i);
1949
1950 v = build_real (type, real_value_from_int_cst (type, i));
1951
1952 TREE_OVERFLOW (v) |= overflow;
1953 return v;
1954 }
1955
1956 /* Return a newly constructed STRING_CST node whose value is
1957 the LEN characters at STR.
1958 Note that for a C string literal, LEN should include the trailing NUL.
1959 The TREE_TYPE is not initialized. */
1960
1961 tree
1962 build_string (int len, const char *str)
1963 {
1964 tree s;
1965 size_t length;
1966
1967 /* Do not waste bytes provided by padding of struct tree_string. */
1968 length = len + offsetof (struct tree_string, str) + 1;
1969
1970 record_node_allocation_statistics (STRING_CST, length);
1971
1972 s = (tree) ggc_internal_alloc (length);
1973
1974 memset (s, 0, sizeof (struct tree_typed));
1975 TREE_SET_CODE (s, STRING_CST);
1976 TREE_CONSTANT (s) = 1;
1977 TREE_STRING_LENGTH (s) = len;
1978 memcpy (s->string.str, str, len);
1979 s->string.str[len] = '\0';
1980
1981 return s;
1982 }
1983
1984 /* Return a newly constructed COMPLEX_CST node whose value is
1985 specified by the real and imaginary parts REAL and IMAG.
1986 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1987 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1988
1989 tree
1990 build_complex (tree type, tree real, tree imag)
1991 {
1992 tree t = make_node (COMPLEX_CST);
1993
1994 TREE_REALPART (t) = real;
1995 TREE_IMAGPART (t) = imag;
1996 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1997 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1998 return t;
1999 }
2000
2001 /* Build a complex (inf +- 0i), such as for the result of cproj.
2002 TYPE is the complex tree type of the result. If NEG is true, the
2003 imaginary zero is negative. */
2004
2005 tree
2006 build_complex_inf (tree type, bool neg)
2007 {
2008 REAL_VALUE_TYPE rinf, rzero = dconst0;
2009
2010 real_inf (&rinf);
2011 rzero.sign = neg;
2012 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2013 build_real (TREE_TYPE (type), rzero));
2014 }
2015
2016 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2017 element is set to 1. In particular, this is 1 + i for complex types. */
2018
2019 tree
2020 build_each_one_cst (tree type)
2021 {
2022 if (TREE_CODE (type) == COMPLEX_TYPE)
2023 {
2024 tree scalar = build_one_cst (TREE_TYPE (type));
2025 return build_complex (type, scalar, scalar);
2026 }
2027 else
2028 return build_one_cst (type);
2029 }
2030
2031 /* Return a constant of arithmetic type TYPE which is the
2032 multiplicative identity of the set TYPE. */
2033
2034 tree
2035 build_one_cst (tree type)
2036 {
2037 switch (TREE_CODE (type))
2038 {
2039 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2040 case POINTER_TYPE: case REFERENCE_TYPE:
2041 case OFFSET_TYPE:
2042 return build_int_cst (type, 1);
2043
2044 case REAL_TYPE:
2045 return build_real (type, dconst1);
2046
2047 case FIXED_POINT_TYPE:
2048 /* We can only generate 1 for accum types. */
2049 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2050 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2051
2052 case VECTOR_TYPE:
2053 {
2054 tree scalar = build_one_cst (TREE_TYPE (type));
2055
2056 return build_vector_from_val (type, scalar);
2057 }
2058
2059 case COMPLEX_TYPE:
2060 return build_complex (type,
2061 build_one_cst (TREE_TYPE (type)),
2062 build_zero_cst (TREE_TYPE (type)));
2063
2064 default:
2065 gcc_unreachable ();
2066 }
2067 }
2068
2069 /* Return an integer of type TYPE containing all 1's in as much precision as
2070 it contains, or a complex or vector whose subparts are such integers. */
2071
2072 tree
2073 build_all_ones_cst (tree type)
2074 {
2075 if (TREE_CODE (type) == COMPLEX_TYPE)
2076 {
2077 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2078 return build_complex (type, scalar, scalar);
2079 }
2080 else
2081 return build_minus_one_cst (type);
2082 }
2083
2084 /* Return a constant of arithmetic type TYPE which is the
2085 opposite of the multiplicative identity of the set TYPE. */
2086
2087 tree
2088 build_minus_one_cst (tree type)
2089 {
2090 switch (TREE_CODE (type))
2091 {
2092 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2093 case POINTER_TYPE: case REFERENCE_TYPE:
2094 case OFFSET_TYPE:
2095 return build_int_cst (type, -1);
2096
2097 case REAL_TYPE:
2098 return build_real (type, dconstm1);
2099
2100 case FIXED_POINT_TYPE:
2101 /* We can only generate 1 for accum types. */
2102 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2103 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2104 TYPE_MODE (type)));
2105
2106 case VECTOR_TYPE:
2107 {
2108 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2109
2110 return build_vector_from_val (type, scalar);
2111 }
2112
2113 case COMPLEX_TYPE:
2114 return build_complex (type,
2115 build_minus_one_cst (TREE_TYPE (type)),
2116 build_zero_cst (TREE_TYPE (type)));
2117
2118 default:
2119 gcc_unreachable ();
2120 }
2121 }
2122
2123 /* Build 0 constant of type TYPE. This is used by constructor folding
2124 and thus the constant should be represented in memory by
2125 zero(es). */
2126
2127 tree
2128 build_zero_cst (tree type)
2129 {
2130 switch (TREE_CODE (type))
2131 {
2132 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2133 case POINTER_TYPE: case REFERENCE_TYPE:
2134 case OFFSET_TYPE: case NULLPTR_TYPE:
2135 return build_int_cst (type, 0);
2136
2137 case REAL_TYPE:
2138 return build_real (type, dconst0);
2139
2140 case FIXED_POINT_TYPE:
2141 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2142
2143 case VECTOR_TYPE:
2144 {
2145 tree scalar = build_zero_cst (TREE_TYPE (type));
2146
2147 return build_vector_from_val (type, scalar);
2148 }
2149
2150 case COMPLEX_TYPE:
2151 {
2152 tree zero = build_zero_cst (TREE_TYPE (type));
2153
2154 return build_complex (type, zero, zero);
2155 }
2156
2157 default:
2158 if (!AGGREGATE_TYPE_P (type))
2159 return fold_convert (type, integer_zero_node);
2160 return build_constructor (type, NULL);
2161 }
2162 }
2163
2164
2165 /* Build a BINFO with LEN language slots. */
2166
2167 tree
2168 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2169 {
2170 tree t;
2171 size_t length = (offsetof (struct tree_binfo, base_binfos)
2172 + vec<tree, va_gc>::embedded_size (base_binfos));
2173
2174 record_node_allocation_statistics (TREE_BINFO, length);
2175
2176 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2177
2178 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2179
2180 TREE_SET_CODE (t, TREE_BINFO);
2181
2182 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2183
2184 return t;
2185 }
2186
2187 /* Create a CASE_LABEL_EXPR tree node and return it. */
2188
2189 tree
2190 build_case_label (tree low_value, tree high_value, tree label_decl)
2191 {
2192 tree t = make_node (CASE_LABEL_EXPR);
2193
2194 TREE_TYPE (t) = void_type_node;
2195 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2196
2197 CASE_LOW (t) = low_value;
2198 CASE_HIGH (t) = high_value;
2199 CASE_LABEL (t) = label_decl;
2200 CASE_CHAIN (t) = NULL_TREE;
2201
2202 return t;
2203 }
2204
2205 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2206 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2207 The latter determines the length of the HOST_WIDE_INT vector. */
2208
2209 tree
2210 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2211 {
2212 tree t;
2213 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2214 + sizeof (struct tree_int_cst));
2215
2216 gcc_assert (len);
2217 record_node_allocation_statistics (INTEGER_CST, length);
2218
2219 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2220
2221 TREE_SET_CODE (t, INTEGER_CST);
2222 TREE_INT_CST_NUNITS (t) = len;
2223 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2224 /* to_offset can only be applied to trees that are offset_int-sized
2225 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2226 must be exactly the precision of offset_int and so LEN is correct. */
2227 if (ext_len <= OFFSET_INT_ELTS)
2228 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2229 else
2230 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2231
2232 TREE_CONSTANT (t) = 1;
2233
2234 return t;
2235 }
2236
2237 /* Build a newly constructed TREE_VEC node of length LEN. */
2238
2239 tree
2240 make_tree_vec_stat (int len MEM_STAT_DECL)
2241 {
2242 tree t;
2243 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2244
2245 record_node_allocation_statistics (TREE_VEC, length);
2246
2247 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2248
2249 TREE_SET_CODE (t, TREE_VEC);
2250 TREE_VEC_LENGTH (t) = len;
2251
2252 return t;
2253 }
2254
2255 /* Grow a TREE_VEC node to new length LEN. */
2256
2257 tree
2258 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2259 {
2260 gcc_assert (TREE_CODE (v) == TREE_VEC);
2261
2262 int oldlen = TREE_VEC_LENGTH (v);
2263 gcc_assert (len > oldlen);
2264
2265 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2266 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2267
2268 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2269
2270 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2271
2272 TREE_VEC_LENGTH (v) = len;
2273
2274 return v;
2275 }
2276 \f
2277 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2278 fixed, and scalar, complex or vector. */
2279
2280 int
2281 zerop (const_tree expr)
2282 {
2283 return (integer_zerop (expr)
2284 || real_zerop (expr)
2285 || fixed_zerop (expr));
2286 }
2287
2288 /* Return 1 if EXPR is the integer constant zero or a complex constant
2289 of zero. */
2290
2291 int
2292 integer_zerop (const_tree expr)
2293 {
2294 switch (TREE_CODE (expr))
2295 {
2296 case INTEGER_CST:
2297 return wi::eq_p (expr, 0);
2298 case COMPLEX_CST:
2299 return (integer_zerop (TREE_REALPART (expr))
2300 && integer_zerop (TREE_IMAGPART (expr)));
2301 case VECTOR_CST:
2302 {
2303 unsigned i;
2304 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2305 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2306 return false;
2307 return true;
2308 }
2309 default:
2310 return false;
2311 }
2312 }
2313
2314 /* Return 1 if EXPR is the integer constant one or the corresponding
2315 complex constant. */
2316
2317 int
2318 integer_onep (const_tree expr)
2319 {
2320 switch (TREE_CODE (expr))
2321 {
2322 case INTEGER_CST:
2323 return wi::eq_p (wi::to_widest (expr), 1);
2324 case COMPLEX_CST:
2325 return (integer_onep (TREE_REALPART (expr))
2326 && integer_zerop (TREE_IMAGPART (expr)));
2327 case VECTOR_CST:
2328 {
2329 unsigned i;
2330 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2331 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2332 return false;
2333 return true;
2334 }
2335 default:
2336 return false;
2337 }
2338 }
2339
2340 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2341 return 1 if every piece is the integer constant one. */
2342
2343 int
2344 integer_each_onep (const_tree expr)
2345 {
2346 if (TREE_CODE (expr) == COMPLEX_CST)
2347 return (integer_onep (TREE_REALPART (expr))
2348 && integer_onep (TREE_IMAGPART (expr)));
2349 else
2350 return integer_onep (expr);
2351 }
2352
2353 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2354 it contains, or a complex or vector whose subparts are such integers. */
2355
2356 int
2357 integer_all_onesp (const_tree expr)
2358 {
2359 if (TREE_CODE (expr) == COMPLEX_CST
2360 && integer_all_onesp (TREE_REALPART (expr))
2361 && integer_all_onesp (TREE_IMAGPART (expr)))
2362 return 1;
2363
2364 else if (TREE_CODE (expr) == VECTOR_CST)
2365 {
2366 unsigned i;
2367 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2368 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2369 return 0;
2370 return 1;
2371 }
2372
2373 else if (TREE_CODE (expr) != INTEGER_CST)
2374 return 0;
2375
2376 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2377 }
2378
2379 /* Return 1 if EXPR is the integer constant minus one. */
2380
2381 int
2382 integer_minus_onep (const_tree expr)
2383 {
2384 if (TREE_CODE (expr) == COMPLEX_CST)
2385 return (integer_all_onesp (TREE_REALPART (expr))
2386 && integer_zerop (TREE_IMAGPART (expr)));
2387 else
2388 return integer_all_onesp (expr);
2389 }
2390
2391 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2392 one bit on). */
2393
2394 int
2395 integer_pow2p (const_tree expr)
2396 {
2397 if (TREE_CODE (expr) == COMPLEX_CST
2398 && integer_pow2p (TREE_REALPART (expr))
2399 && integer_zerop (TREE_IMAGPART (expr)))
2400 return 1;
2401
2402 if (TREE_CODE (expr) != INTEGER_CST)
2403 return 0;
2404
2405 return wi::popcount (expr) == 1;
2406 }
2407
2408 /* Return 1 if EXPR is an integer constant other than zero or a
2409 complex constant other than zero. */
2410
2411 int
2412 integer_nonzerop (const_tree expr)
2413 {
2414 return ((TREE_CODE (expr) == INTEGER_CST
2415 && !wi::eq_p (expr, 0))
2416 || (TREE_CODE (expr) == COMPLEX_CST
2417 && (integer_nonzerop (TREE_REALPART (expr))
2418 || integer_nonzerop (TREE_IMAGPART (expr)))));
2419 }
2420
2421 /* Return 1 if EXPR is the integer constant one. For vector,
2422 return 1 if every piece is the integer constant minus one
2423 (representing the value TRUE). */
2424
2425 int
2426 integer_truep (const_tree expr)
2427 {
2428 if (TREE_CODE (expr) == VECTOR_CST)
2429 return integer_all_onesp (expr);
2430 return integer_onep (expr);
2431 }
2432
2433 /* Return 1 if EXPR is the fixed-point constant zero. */
2434
2435 int
2436 fixed_zerop (const_tree expr)
2437 {
2438 return (TREE_CODE (expr) == FIXED_CST
2439 && TREE_FIXED_CST (expr).data.is_zero ());
2440 }
2441
2442 /* Return the power of two represented by a tree node known to be a
2443 power of two. */
2444
2445 int
2446 tree_log2 (const_tree expr)
2447 {
2448 if (TREE_CODE (expr) == COMPLEX_CST)
2449 return tree_log2 (TREE_REALPART (expr));
2450
2451 return wi::exact_log2 (expr);
2452 }
2453
2454 /* Similar, but return the largest integer Y such that 2 ** Y is less
2455 than or equal to EXPR. */
2456
2457 int
2458 tree_floor_log2 (const_tree expr)
2459 {
2460 if (TREE_CODE (expr) == COMPLEX_CST)
2461 return tree_log2 (TREE_REALPART (expr));
2462
2463 return wi::floor_log2 (expr);
2464 }
2465
2466 /* Return number of known trailing zero bits in EXPR, or, if the value of
2467 EXPR is known to be zero, the precision of it's type. */
2468
2469 unsigned int
2470 tree_ctz (const_tree expr)
2471 {
2472 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2473 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2474 return 0;
2475
2476 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2477 switch (TREE_CODE (expr))
2478 {
2479 case INTEGER_CST:
2480 ret1 = wi::ctz (expr);
2481 return MIN (ret1, prec);
2482 case SSA_NAME:
2483 ret1 = wi::ctz (get_nonzero_bits (expr));
2484 return MIN (ret1, prec);
2485 case PLUS_EXPR:
2486 case MINUS_EXPR:
2487 case BIT_IOR_EXPR:
2488 case BIT_XOR_EXPR:
2489 case MIN_EXPR:
2490 case MAX_EXPR:
2491 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2492 if (ret1 == 0)
2493 return ret1;
2494 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2495 return MIN (ret1, ret2);
2496 case POINTER_PLUS_EXPR:
2497 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2498 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2499 /* Second operand is sizetype, which could be in theory
2500 wider than pointer's precision. Make sure we never
2501 return more than prec. */
2502 ret2 = MIN (ret2, prec);
2503 return MIN (ret1, ret2);
2504 case BIT_AND_EXPR:
2505 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2506 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2507 return MAX (ret1, ret2);
2508 case MULT_EXPR:
2509 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2510 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2511 return MIN (ret1 + ret2, prec);
2512 case LSHIFT_EXPR:
2513 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2514 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2515 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2516 {
2517 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2518 return MIN (ret1 + ret2, prec);
2519 }
2520 return ret1;
2521 case RSHIFT_EXPR:
2522 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2523 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2524 {
2525 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2526 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2527 if (ret1 > ret2)
2528 return ret1 - ret2;
2529 }
2530 return 0;
2531 case TRUNC_DIV_EXPR:
2532 case CEIL_DIV_EXPR:
2533 case FLOOR_DIV_EXPR:
2534 case ROUND_DIV_EXPR:
2535 case EXACT_DIV_EXPR:
2536 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2537 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2538 {
2539 int l = tree_log2 (TREE_OPERAND (expr, 1));
2540 if (l >= 0)
2541 {
2542 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2543 ret2 = l;
2544 if (ret1 > ret2)
2545 return ret1 - ret2;
2546 }
2547 }
2548 return 0;
2549 CASE_CONVERT:
2550 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2551 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2552 ret1 = prec;
2553 return MIN (ret1, prec);
2554 case SAVE_EXPR:
2555 return tree_ctz (TREE_OPERAND (expr, 0));
2556 case COND_EXPR:
2557 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2558 if (ret1 == 0)
2559 return 0;
2560 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2561 return MIN (ret1, ret2);
2562 case COMPOUND_EXPR:
2563 return tree_ctz (TREE_OPERAND (expr, 1));
2564 case ADDR_EXPR:
2565 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2566 if (ret1 > BITS_PER_UNIT)
2567 {
2568 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2569 return MIN (ret1, prec);
2570 }
2571 return 0;
2572 default:
2573 return 0;
2574 }
2575 }
2576
2577 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2578 decimal float constants, so don't return 1 for them. */
2579
2580 int
2581 real_zerop (const_tree expr)
2582 {
2583 switch (TREE_CODE (expr))
2584 {
2585 case REAL_CST:
2586 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2587 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2588 case COMPLEX_CST:
2589 return real_zerop (TREE_REALPART (expr))
2590 && real_zerop (TREE_IMAGPART (expr));
2591 case VECTOR_CST:
2592 {
2593 unsigned i;
2594 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2595 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2596 return false;
2597 return true;
2598 }
2599 default:
2600 return false;
2601 }
2602 }
2603
2604 /* Return 1 if EXPR is the real constant one in real or complex form.
2605 Trailing zeroes matter for decimal float constants, so don't return
2606 1 for them. */
2607
2608 int
2609 real_onep (const_tree expr)
2610 {
2611 switch (TREE_CODE (expr))
2612 {
2613 case REAL_CST:
2614 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2615 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2616 case COMPLEX_CST:
2617 return real_onep (TREE_REALPART (expr))
2618 && real_zerop (TREE_IMAGPART (expr));
2619 case VECTOR_CST:
2620 {
2621 unsigned i;
2622 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2623 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2624 return false;
2625 return true;
2626 }
2627 default:
2628 return false;
2629 }
2630 }
2631
2632 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2633 matter for decimal float constants, so don't return 1 for them. */
2634
2635 int
2636 real_minus_onep (const_tree expr)
2637 {
2638 switch (TREE_CODE (expr))
2639 {
2640 case REAL_CST:
2641 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2642 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2643 case COMPLEX_CST:
2644 return real_minus_onep (TREE_REALPART (expr))
2645 && real_zerop (TREE_IMAGPART (expr));
2646 case VECTOR_CST:
2647 {
2648 unsigned i;
2649 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2650 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2651 return false;
2652 return true;
2653 }
2654 default:
2655 return false;
2656 }
2657 }
2658
2659 /* Nonzero if EXP is a constant or a cast of a constant. */
2660
2661 int
2662 really_constant_p (const_tree exp)
2663 {
2664 /* This is not quite the same as STRIP_NOPS. It does more. */
2665 while (CONVERT_EXPR_P (exp)
2666 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2667 exp = TREE_OPERAND (exp, 0);
2668 return TREE_CONSTANT (exp);
2669 }
2670 \f
2671 /* Return first list element whose TREE_VALUE is ELEM.
2672 Return 0 if ELEM is not in LIST. */
2673
2674 tree
2675 value_member (tree elem, tree list)
2676 {
2677 while (list)
2678 {
2679 if (elem == TREE_VALUE (list))
2680 return list;
2681 list = TREE_CHAIN (list);
2682 }
2683 return NULL_TREE;
2684 }
2685
2686 /* Return first list element whose TREE_PURPOSE is ELEM.
2687 Return 0 if ELEM is not in LIST. */
2688
2689 tree
2690 purpose_member (const_tree elem, tree list)
2691 {
2692 while (list)
2693 {
2694 if (elem == TREE_PURPOSE (list))
2695 return list;
2696 list = TREE_CHAIN (list);
2697 }
2698 return NULL_TREE;
2699 }
2700
2701 /* Return true if ELEM is in V. */
2702
2703 bool
2704 vec_member (const_tree elem, vec<tree, va_gc> *v)
2705 {
2706 unsigned ix;
2707 tree t;
2708 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2709 if (elem == t)
2710 return true;
2711 return false;
2712 }
2713
2714 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2715 NULL_TREE. */
2716
2717 tree
2718 chain_index (int idx, tree chain)
2719 {
2720 for (; chain && idx > 0; --idx)
2721 chain = TREE_CHAIN (chain);
2722 return chain;
2723 }
2724
2725 /* Return nonzero if ELEM is part of the chain CHAIN. */
2726
2727 int
2728 chain_member (const_tree elem, const_tree chain)
2729 {
2730 while (chain)
2731 {
2732 if (elem == chain)
2733 return 1;
2734 chain = DECL_CHAIN (chain);
2735 }
2736
2737 return 0;
2738 }
2739
2740 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2741 We expect a null pointer to mark the end of the chain.
2742 This is the Lisp primitive `length'. */
2743
2744 int
2745 list_length (const_tree t)
2746 {
2747 const_tree p = t;
2748 #ifdef ENABLE_TREE_CHECKING
2749 const_tree q = t;
2750 #endif
2751 int len = 0;
2752
2753 while (p)
2754 {
2755 p = TREE_CHAIN (p);
2756 #ifdef ENABLE_TREE_CHECKING
2757 if (len % 2)
2758 q = TREE_CHAIN (q);
2759 gcc_assert (p != q);
2760 #endif
2761 len++;
2762 }
2763
2764 return len;
2765 }
2766
2767 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2768 UNION_TYPE TYPE, or NULL_TREE if none. */
2769
2770 tree
2771 first_field (const_tree type)
2772 {
2773 tree t = TYPE_FIELDS (type);
2774 while (t && TREE_CODE (t) != FIELD_DECL)
2775 t = TREE_CHAIN (t);
2776 return t;
2777 }
2778
2779 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2780 by modifying the last node in chain 1 to point to chain 2.
2781 This is the Lisp primitive `nconc'. */
2782
2783 tree
2784 chainon (tree op1, tree op2)
2785 {
2786 tree t1;
2787
2788 if (!op1)
2789 return op2;
2790 if (!op2)
2791 return op1;
2792
2793 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2794 continue;
2795 TREE_CHAIN (t1) = op2;
2796
2797 #ifdef ENABLE_TREE_CHECKING
2798 {
2799 tree t2;
2800 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2801 gcc_assert (t2 != t1);
2802 }
2803 #endif
2804
2805 return op1;
2806 }
2807
2808 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2809
2810 tree
2811 tree_last (tree chain)
2812 {
2813 tree next;
2814 if (chain)
2815 while ((next = TREE_CHAIN (chain)))
2816 chain = next;
2817 return chain;
2818 }
2819
2820 /* Reverse the order of elements in the chain T,
2821 and return the new head of the chain (old last element). */
2822
2823 tree
2824 nreverse (tree t)
2825 {
2826 tree prev = 0, decl, next;
2827 for (decl = t; decl; decl = next)
2828 {
2829 /* We shouldn't be using this function to reverse BLOCK chains; we
2830 have blocks_nreverse for that. */
2831 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2832 next = TREE_CHAIN (decl);
2833 TREE_CHAIN (decl) = prev;
2834 prev = decl;
2835 }
2836 return prev;
2837 }
2838 \f
2839 /* Return a newly created TREE_LIST node whose
2840 purpose and value fields are PARM and VALUE. */
2841
2842 tree
2843 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2844 {
2845 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2846 TREE_PURPOSE (t) = parm;
2847 TREE_VALUE (t) = value;
2848 return t;
2849 }
2850
2851 /* Build a chain of TREE_LIST nodes from a vector. */
2852
2853 tree
2854 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2855 {
2856 tree ret = NULL_TREE;
2857 tree *pp = &ret;
2858 unsigned int i;
2859 tree t;
2860 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2861 {
2862 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2863 pp = &TREE_CHAIN (*pp);
2864 }
2865 return ret;
2866 }
2867
2868 /* Return a newly created TREE_LIST node whose
2869 purpose and value fields are PURPOSE and VALUE
2870 and whose TREE_CHAIN is CHAIN. */
2871
2872 tree
2873 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2874 {
2875 tree node;
2876
2877 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2878 memset (node, 0, sizeof (struct tree_common));
2879
2880 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2881
2882 TREE_SET_CODE (node, TREE_LIST);
2883 TREE_CHAIN (node) = chain;
2884 TREE_PURPOSE (node) = purpose;
2885 TREE_VALUE (node) = value;
2886 return node;
2887 }
2888
2889 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2890 trees. */
2891
2892 vec<tree, va_gc> *
2893 ctor_to_vec (tree ctor)
2894 {
2895 vec<tree, va_gc> *vec;
2896 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2897 unsigned int ix;
2898 tree val;
2899
2900 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2901 vec->quick_push (val);
2902
2903 return vec;
2904 }
2905 \f
2906 /* Return the size nominally occupied by an object of type TYPE
2907 when it resides in memory. The value is measured in units of bytes,
2908 and its data type is that normally used for type sizes
2909 (which is the first type created by make_signed_type or
2910 make_unsigned_type). */
2911
2912 tree
2913 size_in_bytes (const_tree type)
2914 {
2915 tree t;
2916
2917 if (type == error_mark_node)
2918 return integer_zero_node;
2919
2920 type = TYPE_MAIN_VARIANT (type);
2921 t = TYPE_SIZE_UNIT (type);
2922
2923 if (t == 0)
2924 {
2925 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2926 return size_zero_node;
2927 }
2928
2929 return t;
2930 }
2931
2932 /* Return the size of TYPE (in bytes) as a wide integer
2933 or return -1 if the size can vary or is larger than an integer. */
2934
2935 HOST_WIDE_INT
2936 int_size_in_bytes (const_tree type)
2937 {
2938 tree t;
2939
2940 if (type == error_mark_node)
2941 return 0;
2942
2943 type = TYPE_MAIN_VARIANT (type);
2944 t = TYPE_SIZE_UNIT (type);
2945
2946 if (t && tree_fits_uhwi_p (t))
2947 return TREE_INT_CST_LOW (t);
2948 else
2949 return -1;
2950 }
2951
2952 /* Return the maximum size of TYPE (in bytes) as a wide integer
2953 or return -1 if the size can vary or is larger than an integer. */
2954
2955 HOST_WIDE_INT
2956 max_int_size_in_bytes (const_tree type)
2957 {
2958 HOST_WIDE_INT size = -1;
2959 tree size_tree;
2960
2961 /* If this is an array type, check for a possible MAX_SIZE attached. */
2962
2963 if (TREE_CODE (type) == ARRAY_TYPE)
2964 {
2965 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2966
2967 if (size_tree && tree_fits_uhwi_p (size_tree))
2968 size = tree_to_uhwi (size_tree);
2969 }
2970
2971 /* If we still haven't been able to get a size, see if the language
2972 can compute a maximum size. */
2973
2974 if (size == -1)
2975 {
2976 size_tree = lang_hooks.types.max_size (type);
2977
2978 if (size_tree && tree_fits_uhwi_p (size_tree))
2979 size = tree_to_uhwi (size_tree);
2980 }
2981
2982 return size;
2983 }
2984 \f
2985 /* Return the bit position of FIELD, in bits from the start of the record.
2986 This is a tree of type bitsizetype. */
2987
2988 tree
2989 bit_position (const_tree field)
2990 {
2991 return bit_from_pos (DECL_FIELD_OFFSET (field),
2992 DECL_FIELD_BIT_OFFSET (field));
2993 }
2994 \f
2995 /* Return the byte position of FIELD, in bytes from the start of the record.
2996 This is a tree of type sizetype. */
2997
2998 tree
2999 byte_position (const_tree field)
3000 {
3001 return byte_from_pos (DECL_FIELD_OFFSET (field),
3002 DECL_FIELD_BIT_OFFSET (field));
3003 }
3004
3005 /* Likewise, but return as an integer. It must be representable in
3006 that way (since it could be a signed value, we don't have the
3007 option of returning -1 like int_size_in_byte can. */
3008
3009 HOST_WIDE_INT
3010 int_byte_position (const_tree field)
3011 {
3012 return tree_to_shwi (byte_position (field));
3013 }
3014 \f
3015 /* Return the strictest alignment, in bits, that T is known to have. */
3016
3017 unsigned int
3018 expr_align (const_tree t)
3019 {
3020 unsigned int align0, align1;
3021
3022 switch (TREE_CODE (t))
3023 {
3024 CASE_CONVERT: case NON_LVALUE_EXPR:
3025 /* If we have conversions, we know that the alignment of the
3026 object must meet each of the alignments of the types. */
3027 align0 = expr_align (TREE_OPERAND (t, 0));
3028 align1 = TYPE_ALIGN (TREE_TYPE (t));
3029 return MAX (align0, align1);
3030
3031 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3032 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3033 case CLEANUP_POINT_EXPR:
3034 /* These don't change the alignment of an object. */
3035 return expr_align (TREE_OPERAND (t, 0));
3036
3037 case COND_EXPR:
3038 /* The best we can do is say that the alignment is the least aligned
3039 of the two arms. */
3040 align0 = expr_align (TREE_OPERAND (t, 1));
3041 align1 = expr_align (TREE_OPERAND (t, 2));
3042 return MIN (align0, align1);
3043
3044 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3045 meaningfully, it's always 1. */
3046 case LABEL_DECL: case CONST_DECL:
3047 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3048 case FUNCTION_DECL:
3049 gcc_assert (DECL_ALIGN (t) != 0);
3050 return DECL_ALIGN (t);
3051
3052 default:
3053 break;
3054 }
3055
3056 /* Otherwise take the alignment from that of the type. */
3057 return TYPE_ALIGN (TREE_TYPE (t));
3058 }
3059 \f
3060 /* Return, as a tree node, the number of elements for TYPE (which is an
3061 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3062
3063 tree
3064 array_type_nelts (const_tree type)
3065 {
3066 tree index_type, min, max;
3067
3068 /* If they did it with unspecified bounds, then we should have already
3069 given an error about it before we got here. */
3070 if (! TYPE_DOMAIN (type))
3071 return error_mark_node;
3072
3073 index_type = TYPE_DOMAIN (type);
3074 min = TYPE_MIN_VALUE (index_type);
3075 max = TYPE_MAX_VALUE (index_type);
3076
3077 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3078 if (!max)
3079 return error_mark_node;
3080
3081 return (integer_zerop (min)
3082 ? max
3083 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3084 }
3085 \f
3086 /* If arg is static -- a reference to an object in static storage -- then
3087 return the object. This is not the same as the C meaning of `static'.
3088 If arg isn't static, return NULL. */
3089
3090 tree
3091 staticp (tree arg)
3092 {
3093 switch (TREE_CODE (arg))
3094 {
3095 case FUNCTION_DECL:
3096 /* Nested functions are static, even though taking their address will
3097 involve a trampoline as we unnest the nested function and create
3098 the trampoline on the tree level. */
3099 return arg;
3100
3101 case VAR_DECL:
3102 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3103 && ! DECL_THREAD_LOCAL_P (arg)
3104 && ! DECL_DLLIMPORT_P (arg)
3105 ? arg : NULL);
3106
3107 case CONST_DECL:
3108 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3109 ? arg : NULL);
3110
3111 case CONSTRUCTOR:
3112 return TREE_STATIC (arg) ? arg : NULL;
3113
3114 case LABEL_DECL:
3115 case STRING_CST:
3116 return arg;
3117
3118 case COMPONENT_REF:
3119 /* If the thing being referenced is not a field, then it is
3120 something language specific. */
3121 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3122
3123 /* If we are referencing a bitfield, we can't evaluate an
3124 ADDR_EXPR at compile time and so it isn't a constant. */
3125 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3126 return NULL;
3127
3128 return staticp (TREE_OPERAND (arg, 0));
3129
3130 case BIT_FIELD_REF:
3131 return NULL;
3132
3133 case INDIRECT_REF:
3134 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3135
3136 case ARRAY_REF:
3137 case ARRAY_RANGE_REF:
3138 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3139 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3140 return staticp (TREE_OPERAND (arg, 0));
3141 else
3142 return NULL;
3143
3144 case COMPOUND_LITERAL_EXPR:
3145 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3146
3147 default:
3148 return NULL;
3149 }
3150 }
3151
3152 \f
3153
3154
3155 /* Return whether OP is a DECL whose address is function-invariant. */
3156
3157 bool
3158 decl_address_invariant_p (const_tree op)
3159 {
3160 /* The conditions below are slightly less strict than the one in
3161 staticp. */
3162
3163 switch (TREE_CODE (op))
3164 {
3165 case PARM_DECL:
3166 case RESULT_DECL:
3167 case LABEL_DECL:
3168 case FUNCTION_DECL:
3169 return true;
3170
3171 case VAR_DECL:
3172 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3173 || DECL_THREAD_LOCAL_P (op)
3174 || DECL_CONTEXT (op) == current_function_decl
3175 || decl_function_context (op) == current_function_decl)
3176 return true;
3177 break;
3178
3179 case CONST_DECL:
3180 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3181 || decl_function_context (op) == current_function_decl)
3182 return true;
3183 break;
3184
3185 default:
3186 break;
3187 }
3188
3189 return false;
3190 }
3191
3192 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3193
3194 bool
3195 decl_address_ip_invariant_p (const_tree op)
3196 {
3197 /* The conditions below are slightly less strict than the one in
3198 staticp. */
3199
3200 switch (TREE_CODE (op))
3201 {
3202 case LABEL_DECL:
3203 case FUNCTION_DECL:
3204 case STRING_CST:
3205 return true;
3206
3207 case VAR_DECL:
3208 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3209 && !DECL_DLLIMPORT_P (op))
3210 || DECL_THREAD_LOCAL_P (op))
3211 return true;
3212 break;
3213
3214 case CONST_DECL:
3215 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3216 return true;
3217 break;
3218
3219 default:
3220 break;
3221 }
3222
3223 return false;
3224 }
3225
3226
3227 /* Return true if T is function-invariant (internal function, does
3228 not handle arithmetic; that's handled in skip_simple_arithmetic and
3229 tree_invariant_p). */
3230
3231 static bool
3232 tree_invariant_p_1 (tree t)
3233 {
3234 tree op;
3235
3236 if (TREE_CONSTANT (t)
3237 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3238 return true;
3239
3240 switch (TREE_CODE (t))
3241 {
3242 case SAVE_EXPR:
3243 return true;
3244
3245 case ADDR_EXPR:
3246 op = TREE_OPERAND (t, 0);
3247 while (handled_component_p (op))
3248 {
3249 switch (TREE_CODE (op))
3250 {
3251 case ARRAY_REF:
3252 case ARRAY_RANGE_REF:
3253 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3254 || TREE_OPERAND (op, 2) != NULL_TREE
3255 || TREE_OPERAND (op, 3) != NULL_TREE)
3256 return false;
3257 break;
3258
3259 case COMPONENT_REF:
3260 if (TREE_OPERAND (op, 2) != NULL_TREE)
3261 return false;
3262 break;
3263
3264 default:;
3265 }
3266 op = TREE_OPERAND (op, 0);
3267 }
3268
3269 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3270
3271 default:
3272 break;
3273 }
3274
3275 return false;
3276 }
3277
3278 /* Return true if T is function-invariant. */
3279
3280 bool
3281 tree_invariant_p (tree t)
3282 {
3283 tree inner = skip_simple_arithmetic (t);
3284 return tree_invariant_p_1 (inner);
3285 }
3286
3287 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3288 Do this to any expression which may be used in more than one place,
3289 but must be evaluated only once.
3290
3291 Normally, expand_expr would reevaluate the expression each time.
3292 Calling save_expr produces something that is evaluated and recorded
3293 the first time expand_expr is called on it. Subsequent calls to
3294 expand_expr just reuse the recorded value.
3295
3296 The call to expand_expr that generates code that actually computes
3297 the value is the first call *at compile time*. Subsequent calls
3298 *at compile time* generate code to use the saved value.
3299 This produces correct result provided that *at run time* control
3300 always flows through the insns made by the first expand_expr
3301 before reaching the other places where the save_expr was evaluated.
3302 You, the caller of save_expr, must make sure this is so.
3303
3304 Constants, and certain read-only nodes, are returned with no
3305 SAVE_EXPR because that is safe. Expressions containing placeholders
3306 are not touched; see tree.def for an explanation of what these
3307 are used for. */
3308
3309 tree
3310 save_expr (tree expr)
3311 {
3312 tree t = fold (expr);
3313 tree inner;
3314
3315 /* If the tree evaluates to a constant, then we don't want to hide that
3316 fact (i.e. this allows further folding, and direct checks for constants).
3317 However, a read-only object that has side effects cannot be bypassed.
3318 Since it is no problem to reevaluate literals, we just return the
3319 literal node. */
3320 inner = skip_simple_arithmetic (t);
3321 if (TREE_CODE (inner) == ERROR_MARK)
3322 return inner;
3323
3324 if (tree_invariant_p_1 (inner))
3325 return t;
3326
3327 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3328 it means that the size or offset of some field of an object depends on
3329 the value within another field.
3330
3331 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3332 and some variable since it would then need to be both evaluated once and
3333 evaluated more than once. Front-ends must assure this case cannot
3334 happen by surrounding any such subexpressions in their own SAVE_EXPR
3335 and forcing evaluation at the proper time. */
3336 if (contains_placeholder_p (inner))
3337 return t;
3338
3339 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3340 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3341
3342 /* This expression might be placed ahead of a jump to ensure that the
3343 value was computed on both sides of the jump. So make sure it isn't
3344 eliminated as dead. */
3345 TREE_SIDE_EFFECTS (t) = 1;
3346 return t;
3347 }
3348
3349 /* Look inside EXPR into any simple arithmetic operations. Return the
3350 outermost non-arithmetic or non-invariant node. */
3351
3352 tree
3353 skip_simple_arithmetic (tree expr)
3354 {
3355 /* We don't care about whether this can be used as an lvalue in this
3356 context. */
3357 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3358 expr = TREE_OPERAND (expr, 0);
3359
3360 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3361 a constant, it will be more efficient to not make another SAVE_EXPR since
3362 it will allow better simplification and GCSE will be able to merge the
3363 computations if they actually occur. */
3364 while (true)
3365 {
3366 if (UNARY_CLASS_P (expr))
3367 expr = TREE_OPERAND (expr, 0);
3368 else if (BINARY_CLASS_P (expr))
3369 {
3370 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3371 expr = TREE_OPERAND (expr, 0);
3372 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3373 expr = TREE_OPERAND (expr, 1);
3374 else
3375 break;
3376 }
3377 else
3378 break;
3379 }
3380
3381 return expr;
3382 }
3383
3384 /* Look inside EXPR into simple arithmetic operations involving constants.
3385 Return the outermost non-arithmetic or non-constant node. */
3386
3387 tree
3388 skip_simple_constant_arithmetic (tree expr)
3389 {
3390 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3391 expr = TREE_OPERAND (expr, 0);
3392
3393 while (true)
3394 {
3395 if (UNARY_CLASS_P (expr))
3396 expr = TREE_OPERAND (expr, 0);
3397 else if (BINARY_CLASS_P (expr))
3398 {
3399 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3400 expr = TREE_OPERAND (expr, 0);
3401 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3402 expr = TREE_OPERAND (expr, 1);
3403 else
3404 break;
3405 }
3406 else
3407 break;
3408 }
3409
3410 return expr;
3411 }
3412
3413 /* Return which tree structure is used by T. */
3414
3415 enum tree_node_structure_enum
3416 tree_node_structure (const_tree t)
3417 {
3418 const enum tree_code code = TREE_CODE (t);
3419 return tree_node_structure_for_code (code);
3420 }
3421
3422 /* Set various status flags when building a CALL_EXPR object T. */
3423
3424 static void
3425 process_call_operands (tree t)
3426 {
3427 bool side_effects = TREE_SIDE_EFFECTS (t);
3428 bool read_only = false;
3429 int i = call_expr_flags (t);
3430
3431 /* Calls have side-effects, except those to const or pure functions. */
3432 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3433 side_effects = true;
3434 /* Propagate TREE_READONLY of arguments for const functions. */
3435 if (i & ECF_CONST)
3436 read_only = true;
3437
3438 if (!side_effects || read_only)
3439 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3440 {
3441 tree op = TREE_OPERAND (t, i);
3442 if (op && TREE_SIDE_EFFECTS (op))
3443 side_effects = true;
3444 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3445 read_only = false;
3446 }
3447
3448 TREE_SIDE_EFFECTS (t) = side_effects;
3449 TREE_READONLY (t) = read_only;
3450 }
3451 \f
3452 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3453 size or offset that depends on a field within a record. */
3454
3455 bool
3456 contains_placeholder_p (const_tree exp)
3457 {
3458 enum tree_code code;
3459
3460 if (!exp)
3461 return 0;
3462
3463 code = TREE_CODE (exp);
3464 if (code == PLACEHOLDER_EXPR)
3465 return 1;
3466
3467 switch (TREE_CODE_CLASS (code))
3468 {
3469 case tcc_reference:
3470 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3471 position computations since they will be converted into a
3472 WITH_RECORD_EXPR involving the reference, which will assume
3473 here will be valid. */
3474 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3475
3476 case tcc_exceptional:
3477 if (code == TREE_LIST)
3478 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3479 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3480 break;
3481
3482 case tcc_unary:
3483 case tcc_binary:
3484 case tcc_comparison:
3485 case tcc_expression:
3486 switch (code)
3487 {
3488 case COMPOUND_EXPR:
3489 /* Ignoring the first operand isn't quite right, but works best. */
3490 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3491
3492 case COND_EXPR:
3493 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3494 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3495 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3496
3497 case SAVE_EXPR:
3498 /* The save_expr function never wraps anything containing
3499 a PLACEHOLDER_EXPR. */
3500 return 0;
3501
3502 default:
3503 break;
3504 }
3505
3506 switch (TREE_CODE_LENGTH (code))
3507 {
3508 case 1:
3509 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3510 case 2:
3511 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3512 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3513 default:
3514 return 0;
3515 }
3516
3517 case tcc_vl_exp:
3518 switch (code)
3519 {
3520 case CALL_EXPR:
3521 {
3522 const_tree arg;
3523 const_call_expr_arg_iterator iter;
3524 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3525 if (CONTAINS_PLACEHOLDER_P (arg))
3526 return 1;
3527 return 0;
3528 }
3529 default:
3530 return 0;
3531 }
3532
3533 default:
3534 return 0;
3535 }
3536 return 0;
3537 }
3538
3539 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3540 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3541 field positions. */
3542
3543 static bool
3544 type_contains_placeholder_1 (const_tree type)
3545 {
3546 /* If the size contains a placeholder or the parent type (component type in
3547 the case of arrays) type involves a placeholder, this type does. */
3548 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3549 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3550 || (!POINTER_TYPE_P (type)
3551 && TREE_TYPE (type)
3552 && type_contains_placeholder_p (TREE_TYPE (type))))
3553 return true;
3554
3555 /* Now do type-specific checks. Note that the last part of the check above
3556 greatly limits what we have to do below. */
3557 switch (TREE_CODE (type))
3558 {
3559 case VOID_TYPE:
3560 case POINTER_BOUNDS_TYPE:
3561 case COMPLEX_TYPE:
3562 case ENUMERAL_TYPE:
3563 case BOOLEAN_TYPE:
3564 case POINTER_TYPE:
3565 case OFFSET_TYPE:
3566 case REFERENCE_TYPE:
3567 case METHOD_TYPE:
3568 case FUNCTION_TYPE:
3569 case VECTOR_TYPE:
3570 case NULLPTR_TYPE:
3571 return false;
3572
3573 case INTEGER_TYPE:
3574 case REAL_TYPE:
3575 case FIXED_POINT_TYPE:
3576 /* Here we just check the bounds. */
3577 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3578 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3579
3580 case ARRAY_TYPE:
3581 /* We have already checked the component type above, so just check
3582 the domain type. Flexible array members have a null domain. */
3583 return TYPE_DOMAIN (type) ?
3584 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3585
3586 case RECORD_TYPE:
3587 case UNION_TYPE:
3588 case QUAL_UNION_TYPE:
3589 {
3590 tree field;
3591
3592 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3593 if (TREE_CODE (field) == FIELD_DECL
3594 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3595 || (TREE_CODE (type) == QUAL_UNION_TYPE
3596 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3597 || type_contains_placeholder_p (TREE_TYPE (field))))
3598 return true;
3599
3600 return false;
3601 }
3602
3603 default:
3604 gcc_unreachable ();
3605 }
3606 }
3607
3608 /* Wrapper around above function used to cache its result. */
3609
3610 bool
3611 type_contains_placeholder_p (tree type)
3612 {
3613 bool result;
3614
3615 /* If the contains_placeholder_bits field has been initialized,
3616 then we know the answer. */
3617 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3618 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3619
3620 /* Indicate that we've seen this type node, and the answer is false.
3621 This is what we want to return if we run into recursion via fields. */
3622 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3623
3624 /* Compute the real value. */
3625 result = type_contains_placeholder_1 (type);
3626
3627 /* Store the real value. */
3628 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3629
3630 return result;
3631 }
3632 \f
3633 /* Push tree EXP onto vector QUEUE if it is not already present. */
3634
3635 static void
3636 push_without_duplicates (tree exp, vec<tree> *queue)
3637 {
3638 unsigned int i;
3639 tree iter;
3640
3641 FOR_EACH_VEC_ELT (*queue, i, iter)
3642 if (simple_cst_equal (iter, exp) == 1)
3643 break;
3644
3645 if (!iter)
3646 queue->safe_push (exp);
3647 }
3648
3649 /* Given a tree EXP, find all occurrences of references to fields
3650 in a PLACEHOLDER_EXPR and place them in vector REFS without
3651 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3652 we assume here that EXP contains only arithmetic expressions
3653 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3654 argument list. */
3655
3656 void
3657 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3658 {
3659 enum tree_code code = TREE_CODE (exp);
3660 tree inner;
3661 int i;
3662
3663 /* We handle TREE_LIST and COMPONENT_REF separately. */
3664 if (code == TREE_LIST)
3665 {
3666 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3667 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3668 }
3669 else if (code == COMPONENT_REF)
3670 {
3671 for (inner = TREE_OPERAND (exp, 0);
3672 REFERENCE_CLASS_P (inner);
3673 inner = TREE_OPERAND (inner, 0))
3674 ;
3675
3676 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3677 push_without_duplicates (exp, refs);
3678 else
3679 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3680 }
3681 else
3682 switch (TREE_CODE_CLASS (code))
3683 {
3684 case tcc_constant:
3685 break;
3686
3687 case tcc_declaration:
3688 /* Variables allocated to static storage can stay. */
3689 if (!TREE_STATIC (exp))
3690 push_without_duplicates (exp, refs);
3691 break;
3692
3693 case tcc_expression:
3694 /* This is the pattern built in ada/make_aligning_type. */
3695 if (code == ADDR_EXPR
3696 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3697 {
3698 push_without_duplicates (exp, refs);
3699 break;
3700 }
3701
3702 /* Fall through... */
3703
3704 case tcc_exceptional:
3705 case tcc_unary:
3706 case tcc_binary:
3707 case tcc_comparison:
3708 case tcc_reference:
3709 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3710 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3711 break;
3712
3713 case tcc_vl_exp:
3714 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3715 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3716 break;
3717
3718 default:
3719 gcc_unreachable ();
3720 }
3721 }
3722
3723 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3724 return a tree with all occurrences of references to F in a
3725 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3726 CONST_DECLs. Note that we assume here that EXP contains only
3727 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3728 occurring only in their argument list. */
3729
3730 tree
3731 substitute_in_expr (tree exp, tree f, tree r)
3732 {
3733 enum tree_code code = TREE_CODE (exp);
3734 tree op0, op1, op2, op3;
3735 tree new_tree;
3736
3737 /* We handle TREE_LIST and COMPONENT_REF separately. */
3738 if (code == TREE_LIST)
3739 {
3740 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3741 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3742 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3743 return exp;
3744
3745 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3746 }
3747 else if (code == COMPONENT_REF)
3748 {
3749 tree inner;
3750
3751 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3752 and it is the right field, replace it with R. */
3753 for (inner = TREE_OPERAND (exp, 0);
3754 REFERENCE_CLASS_P (inner);
3755 inner = TREE_OPERAND (inner, 0))
3756 ;
3757
3758 /* The field. */
3759 op1 = TREE_OPERAND (exp, 1);
3760
3761 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3762 return r;
3763
3764 /* If this expression hasn't been completed let, leave it alone. */
3765 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3766 return exp;
3767
3768 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3769 if (op0 == TREE_OPERAND (exp, 0))
3770 return exp;
3771
3772 new_tree
3773 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3774 }
3775 else
3776 switch (TREE_CODE_CLASS (code))
3777 {
3778 case tcc_constant:
3779 return exp;
3780
3781 case tcc_declaration:
3782 if (exp == f)
3783 return r;
3784 else
3785 return exp;
3786
3787 case tcc_expression:
3788 if (exp == f)
3789 return r;
3790
3791 /* Fall through... */
3792
3793 case tcc_exceptional:
3794 case tcc_unary:
3795 case tcc_binary:
3796 case tcc_comparison:
3797 case tcc_reference:
3798 switch (TREE_CODE_LENGTH (code))
3799 {
3800 case 0:
3801 return exp;
3802
3803 case 1:
3804 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3805 if (op0 == TREE_OPERAND (exp, 0))
3806 return exp;
3807
3808 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3809 break;
3810
3811 case 2:
3812 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3813 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3814
3815 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3816 return exp;
3817
3818 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3819 break;
3820
3821 case 3:
3822 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3823 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3824 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3825
3826 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3827 && op2 == TREE_OPERAND (exp, 2))
3828 return exp;
3829
3830 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3831 break;
3832
3833 case 4:
3834 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3835 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3836 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3837 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3838
3839 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3840 && op2 == TREE_OPERAND (exp, 2)
3841 && op3 == TREE_OPERAND (exp, 3))
3842 return exp;
3843
3844 new_tree
3845 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3846 break;
3847
3848 default:
3849 gcc_unreachable ();
3850 }
3851 break;
3852
3853 case tcc_vl_exp:
3854 {
3855 int i;
3856
3857 new_tree = NULL_TREE;
3858
3859 /* If we are trying to replace F with a constant, inline back
3860 functions which do nothing else than computing a value from
3861 the arguments they are passed. This makes it possible to
3862 fold partially or entirely the replacement expression. */
3863 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3864 {
3865 tree t = maybe_inline_call_in_expr (exp);
3866 if (t)
3867 return SUBSTITUTE_IN_EXPR (t, f, r);
3868 }
3869
3870 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3871 {
3872 tree op = TREE_OPERAND (exp, i);
3873 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3874 if (new_op != op)
3875 {
3876 if (!new_tree)
3877 new_tree = copy_node (exp);
3878 TREE_OPERAND (new_tree, i) = new_op;
3879 }
3880 }
3881
3882 if (new_tree)
3883 {
3884 new_tree = fold (new_tree);
3885 if (TREE_CODE (new_tree) == CALL_EXPR)
3886 process_call_operands (new_tree);
3887 }
3888 else
3889 return exp;
3890 }
3891 break;
3892
3893 default:
3894 gcc_unreachable ();
3895 }
3896
3897 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3898
3899 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3900 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3901
3902 return new_tree;
3903 }
3904
3905 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3906 for it within OBJ, a tree that is an object or a chain of references. */
3907
3908 tree
3909 substitute_placeholder_in_expr (tree exp, tree obj)
3910 {
3911 enum tree_code code = TREE_CODE (exp);
3912 tree op0, op1, op2, op3;
3913 tree new_tree;
3914
3915 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3916 in the chain of OBJ. */
3917 if (code == PLACEHOLDER_EXPR)
3918 {
3919 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3920 tree elt;
3921
3922 for (elt = obj; elt != 0;
3923 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3924 || TREE_CODE (elt) == COND_EXPR)
3925 ? TREE_OPERAND (elt, 1)
3926 : (REFERENCE_CLASS_P (elt)
3927 || UNARY_CLASS_P (elt)
3928 || BINARY_CLASS_P (elt)
3929 || VL_EXP_CLASS_P (elt)
3930 || EXPRESSION_CLASS_P (elt))
3931 ? TREE_OPERAND (elt, 0) : 0))
3932 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3933 return elt;
3934
3935 for (elt = obj; elt != 0;
3936 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3937 || TREE_CODE (elt) == COND_EXPR)
3938 ? TREE_OPERAND (elt, 1)
3939 : (REFERENCE_CLASS_P (elt)
3940 || UNARY_CLASS_P (elt)
3941 || BINARY_CLASS_P (elt)
3942 || VL_EXP_CLASS_P (elt)
3943 || EXPRESSION_CLASS_P (elt))
3944 ? TREE_OPERAND (elt, 0) : 0))
3945 if (POINTER_TYPE_P (TREE_TYPE (elt))
3946 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3947 == need_type))
3948 return fold_build1 (INDIRECT_REF, need_type, elt);
3949
3950 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3951 survives until RTL generation, there will be an error. */
3952 return exp;
3953 }
3954
3955 /* TREE_LIST is special because we need to look at TREE_VALUE
3956 and TREE_CHAIN, not TREE_OPERANDS. */
3957 else if (code == TREE_LIST)
3958 {
3959 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3960 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3961 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3962 return exp;
3963
3964 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3965 }
3966 else
3967 switch (TREE_CODE_CLASS (code))
3968 {
3969 case tcc_constant:
3970 case tcc_declaration:
3971 return exp;
3972
3973 case tcc_exceptional:
3974 case tcc_unary:
3975 case tcc_binary:
3976 case tcc_comparison:
3977 case tcc_expression:
3978 case tcc_reference:
3979 case tcc_statement:
3980 switch (TREE_CODE_LENGTH (code))
3981 {
3982 case 0:
3983 return exp;
3984
3985 case 1:
3986 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3987 if (op0 == TREE_OPERAND (exp, 0))
3988 return exp;
3989
3990 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3991 break;
3992
3993 case 2:
3994 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3995 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3996
3997 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3998 return exp;
3999
4000 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4001 break;
4002
4003 case 3:
4004 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4005 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4006 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4007
4008 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4009 && op2 == TREE_OPERAND (exp, 2))
4010 return exp;
4011
4012 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4013 break;
4014
4015 case 4:
4016 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4017 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4018 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4019 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4020
4021 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4022 && op2 == TREE_OPERAND (exp, 2)
4023 && op3 == TREE_OPERAND (exp, 3))
4024 return exp;
4025
4026 new_tree
4027 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4028 break;
4029
4030 default:
4031 gcc_unreachable ();
4032 }
4033 break;
4034
4035 case tcc_vl_exp:
4036 {
4037 int i;
4038
4039 new_tree = NULL_TREE;
4040
4041 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4042 {
4043 tree op = TREE_OPERAND (exp, i);
4044 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4045 if (new_op != op)
4046 {
4047 if (!new_tree)
4048 new_tree = copy_node (exp);
4049 TREE_OPERAND (new_tree, i) = new_op;
4050 }
4051 }
4052
4053 if (new_tree)
4054 {
4055 new_tree = fold (new_tree);
4056 if (TREE_CODE (new_tree) == CALL_EXPR)
4057 process_call_operands (new_tree);
4058 }
4059 else
4060 return exp;
4061 }
4062 break;
4063
4064 default:
4065 gcc_unreachable ();
4066 }
4067
4068 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4069
4070 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4071 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4072
4073 return new_tree;
4074 }
4075 \f
4076
4077 /* Subroutine of stabilize_reference; this is called for subtrees of
4078 references. Any expression with side-effects must be put in a SAVE_EXPR
4079 to ensure that it is only evaluated once.
4080
4081 We don't put SAVE_EXPR nodes around everything, because assigning very
4082 simple expressions to temporaries causes us to miss good opportunities
4083 for optimizations. Among other things, the opportunity to fold in the
4084 addition of a constant into an addressing mode often gets lost, e.g.
4085 "y[i+1] += x;". In general, we take the approach that we should not make
4086 an assignment unless we are forced into it - i.e., that any non-side effect
4087 operator should be allowed, and that cse should take care of coalescing
4088 multiple utterances of the same expression should that prove fruitful. */
4089
4090 static tree
4091 stabilize_reference_1 (tree e)
4092 {
4093 tree result;
4094 enum tree_code code = TREE_CODE (e);
4095
4096 /* We cannot ignore const expressions because it might be a reference
4097 to a const array but whose index contains side-effects. But we can
4098 ignore things that are actual constant or that already have been
4099 handled by this function. */
4100
4101 if (tree_invariant_p (e))
4102 return e;
4103
4104 switch (TREE_CODE_CLASS (code))
4105 {
4106 case tcc_exceptional:
4107 case tcc_type:
4108 case tcc_declaration:
4109 case tcc_comparison:
4110 case tcc_statement:
4111 case tcc_expression:
4112 case tcc_reference:
4113 case tcc_vl_exp:
4114 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4115 so that it will only be evaluated once. */
4116 /* The reference (r) and comparison (<) classes could be handled as
4117 below, but it is generally faster to only evaluate them once. */
4118 if (TREE_SIDE_EFFECTS (e))
4119 return save_expr (e);
4120 return e;
4121
4122 case tcc_constant:
4123 /* Constants need no processing. In fact, we should never reach
4124 here. */
4125 return e;
4126
4127 case tcc_binary:
4128 /* Division is slow and tends to be compiled with jumps,
4129 especially the division by powers of 2 that is often
4130 found inside of an array reference. So do it just once. */
4131 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4132 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4133 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4134 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4135 return save_expr (e);
4136 /* Recursively stabilize each operand. */
4137 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4138 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4139 break;
4140
4141 case tcc_unary:
4142 /* Recursively stabilize each operand. */
4143 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4144 break;
4145
4146 default:
4147 gcc_unreachable ();
4148 }
4149
4150 TREE_TYPE (result) = TREE_TYPE (e);
4151 TREE_READONLY (result) = TREE_READONLY (e);
4152 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4153 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4154
4155 return result;
4156 }
4157
4158 /* Stabilize a reference so that we can use it any number of times
4159 without causing its operands to be evaluated more than once.
4160 Returns the stabilized reference. This works by means of save_expr,
4161 so see the caveats in the comments about save_expr.
4162
4163 Also allows conversion expressions whose operands are references.
4164 Any other kind of expression is returned unchanged. */
4165
4166 tree
4167 stabilize_reference (tree ref)
4168 {
4169 tree result;
4170 enum tree_code code = TREE_CODE (ref);
4171
4172 switch (code)
4173 {
4174 case VAR_DECL:
4175 case PARM_DECL:
4176 case RESULT_DECL:
4177 /* No action is needed in this case. */
4178 return ref;
4179
4180 CASE_CONVERT:
4181 case FLOAT_EXPR:
4182 case FIX_TRUNC_EXPR:
4183 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4184 break;
4185
4186 case INDIRECT_REF:
4187 result = build_nt (INDIRECT_REF,
4188 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4189 break;
4190
4191 case COMPONENT_REF:
4192 result = build_nt (COMPONENT_REF,
4193 stabilize_reference (TREE_OPERAND (ref, 0)),
4194 TREE_OPERAND (ref, 1), NULL_TREE);
4195 break;
4196
4197 case BIT_FIELD_REF:
4198 result = build_nt (BIT_FIELD_REF,
4199 stabilize_reference (TREE_OPERAND (ref, 0)),
4200 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4201 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4202 break;
4203
4204 case ARRAY_REF:
4205 result = build_nt (ARRAY_REF,
4206 stabilize_reference (TREE_OPERAND (ref, 0)),
4207 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4208 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4209 break;
4210
4211 case ARRAY_RANGE_REF:
4212 result = build_nt (ARRAY_RANGE_REF,
4213 stabilize_reference (TREE_OPERAND (ref, 0)),
4214 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4215 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4216 break;
4217
4218 case COMPOUND_EXPR:
4219 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4220 it wouldn't be ignored. This matters when dealing with
4221 volatiles. */
4222 return stabilize_reference_1 (ref);
4223
4224 /* If arg isn't a kind of lvalue we recognize, make no change.
4225 Caller should recognize the error for an invalid lvalue. */
4226 default:
4227 return ref;
4228
4229 case ERROR_MARK:
4230 return error_mark_node;
4231 }
4232
4233 TREE_TYPE (result) = TREE_TYPE (ref);
4234 TREE_READONLY (result) = TREE_READONLY (ref);
4235 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4236 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4237
4238 return result;
4239 }
4240 \f
4241 /* Low-level constructors for expressions. */
4242
4243 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4244 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4245
4246 void
4247 recompute_tree_invariant_for_addr_expr (tree t)
4248 {
4249 tree node;
4250 bool tc = true, se = false;
4251
4252 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4253
4254 /* We started out assuming this address is both invariant and constant, but
4255 does not have side effects. Now go down any handled components and see if
4256 any of them involve offsets that are either non-constant or non-invariant.
4257 Also check for side-effects.
4258
4259 ??? Note that this code makes no attempt to deal with the case where
4260 taking the address of something causes a copy due to misalignment. */
4261
4262 #define UPDATE_FLAGS(NODE) \
4263 do { tree _node = (NODE); \
4264 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4265 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4266
4267 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4268 node = TREE_OPERAND (node, 0))
4269 {
4270 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4271 array reference (probably made temporarily by the G++ front end),
4272 so ignore all the operands. */
4273 if ((TREE_CODE (node) == ARRAY_REF
4274 || TREE_CODE (node) == ARRAY_RANGE_REF)
4275 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4276 {
4277 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4278 if (TREE_OPERAND (node, 2))
4279 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4280 if (TREE_OPERAND (node, 3))
4281 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4282 }
4283 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4284 FIELD_DECL, apparently. The G++ front end can put something else
4285 there, at least temporarily. */
4286 else if (TREE_CODE (node) == COMPONENT_REF
4287 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4288 {
4289 if (TREE_OPERAND (node, 2))
4290 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4291 }
4292 }
4293
4294 node = lang_hooks.expr_to_decl (node, &tc, &se);
4295
4296 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4297 the address, since &(*a)->b is a form of addition. If it's a constant, the
4298 address is constant too. If it's a decl, its address is constant if the
4299 decl is static. Everything else is not constant and, furthermore,
4300 taking the address of a volatile variable is not volatile. */
4301 if (TREE_CODE (node) == INDIRECT_REF
4302 || TREE_CODE (node) == MEM_REF)
4303 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4304 else if (CONSTANT_CLASS_P (node))
4305 ;
4306 else if (DECL_P (node))
4307 tc &= (staticp (node) != NULL_TREE);
4308 else
4309 {
4310 tc = false;
4311 se |= TREE_SIDE_EFFECTS (node);
4312 }
4313
4314
4315 TREE_CONSTANT (t) = tc;
4316 TREE_SIDE_EFFECTS (t) = se;
4317 #undef UPDATE_FLAGS
4318 }
4319
4320 /* Build an expression of code CODE, data type TYPE, and operands as
4321 specified. Expressions and reference nodes can be created this way.
4322 Constants, decls, types and misc nodes cannot be.
4323
4324 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4325 enough for all extant tree codes. */
4326
4327 tree
4328 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4329 {
4330 tree t;
4331
4332 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4333
4334 t = make_node_stat (code PASS_MEM_STAT);
4335 TREE_TYPE (t) = tt;
4336
4337 return t;
4338 }
4339
4340 tree
4341 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4342 {
4343 int length = sizeof (struct tree_exp);
4344 tree t;
4345
4346 record_node_allocation_statistics (code, length);
4347
4348 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4349
4350 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4351
4352 memset (t, 0, sizeof (struct tree_common));
4353
4354 TREE_SET_CODE (t, code);
4355
4356 TREE_TYPE (t) = type;
4357 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4358 TREE_OPERAND (t, 0) = node;
4359 if (node && !TYPE_P (node))
4360 {
4361 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4362 TREE_READONLY (t) = TREE_READONLY (node);
4363 }
4364
4365 if (TREE_CODE_CLASS (code) == tcc_statement)
4366 TREE_SIDE_EFFECTS (t) = 1;
4367 else switch (code)
4368 {
4369 case VA_ARG_EXPR:
4370 /* All of these have side-effects, no matter what their
4371 operands are. */
4372 TREE_SIDE_EFFECTS (t) = 1;
4373 TREE_READONLY (t) = 0;
4374 break;
4375
4376 case INDIRECT_REF:
4377 /* Whether a dereference is readonly has nothing to do with whether
4378 its operand is readonly. */
4379 TREE_READONLY (t) = 0;
4380 break;
4381
4382 case ADDR_EXPR:
4383 if (node)
4384 recompute_tree_invariant_for_addr_expr (t);
4385 break;
4386
4387 default:
4388 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4389 && node && !TYPE_P (node)
4390 && TREE_CONSTANT (node))
4391 TREE_CONSTANT (t) = 1;
4392 if (TREE_CODE_CLASS (code) == tcc_reference
4393 && node && TREE_THIS_VOLATILE (node))
4394 TREE_THIS_VOLATILE (t) = 1;
4395 break;
4396 }
4397
4398 return t;
4399 }
4400
4401 #define PROCESS_ARG(N) \
4402 do { \
4403 TREE_OPERAND (t, N) = arg##N; \
4404 if (arg##N &&!TYPE_P (arg##N)) \
4405 { \
4406 if (TREE_SIDE_EFFECTS (arg##N)) \
4407 side_effects = 1; \
4408 if (!TREE_READONLY (arg##N) \
4409 && !CONSTANT_CLASS_P (arg##N)) \
4410 (void) (read_only = 0); \
4411 if (!TREE_CONSTANT (arg##N)) \
4412 (void) (constant = 0); \
4413 } \
4414 } while (0)
4415
4416 tree
4417 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4418 {
4419 bool constant, read_only, side_effects;
4420 tree t;
4421
4422 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4423
4424 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4425 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4426 /* When sizetype precision doesn't match that of pointers
4427 we need to be able to build explicit extensions or truncations
4428 of the offset argument. */
4429 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4430 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4431 && TREE_CODE (arg1) == INTEGER_CST);
4432
4433 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4434 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4435 && ptrofftype_p (TREE_TYPE (arg1)));
4436
4437 t = make_node_stat (code PASS_MEM_STAT);
4438 TREE_TYPE (t) = tt;
4439
4440 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4441 result based on those same flags for the arguments. But if the
4442 arguments aren't really even `tree' expressions, we shouldn't be trying
4443 to do this. */
4444
4445 /* Expressions without side effects may be constant if their
4446 arguments are as well. */
4447 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4448 || TREE_CODE_CLASS (code) == tcc_binary);
4449 read_only = 1;
4450 side_effects = TREE_SIDE_EFFECTS (t);
4451
4452 PROCESS_ARG (0);
4453 PROCESS_ARG (1);
4454
4455 TREE_SIDE_EFFECTS (t) = side_effects;
4456 if (code == MEM_REF)
4457 {
4458 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4459 {
4460 tree o = TREE_OPERAND (arg0, 0);
4461 TREE_READONLY (t) = TREE_READONLY (o);
4462 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4463 }
4464 }
4465 else
4466 {
4467 TREE_READONLY (t) = read_only;
4468 TREE_CONSTANT (t) = constant;
4469 TREE_THIS_VOLATILE (t)
4470 = (TREE_CODE_CLASS (code) == tcc_reference
4471 && arg0 && TREE_THIS_VOLATILE (arg0));
4472 }
4473
4474 return t;
4475 }
4476
4477
4478 tree
4479 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4480 tree arg2 MEM_STAT_DECL)
4481 {
4482 bool constant, read_only, side_effects;
4483 tree t;
4484
4485 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4486 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4487
4488 t = make_node_stat (code PASS_MEM_STAT);
4489 TREE_TYPE (t) = tt;
4490
4491 read_only = 1;
4492
4493 /* As a special exception, if COND_EXPR has NULL branches, we
4494 assume that it is a gimple statement and always consider
4495 it to have side effects. */
4496 if (code == COND_EXPR
4497 && tt == void_type_node
4498 && arg1 == NULL_TREE
4499 && arg2 == NULL_TREE)
4500 side_effects = true;
4501 else
4502 side_effects = TREE_SIDE_EFFECTS (t);
4503
4504 PROCESS_ARG (0);
4505 PROCESS_ARG (1);
4506 PROCESS_ARG (2);
4507
4508 if (code == COND_EXPR)
4509 TREE_READONLY (t) = read_only;
4510
4511 TREE_SIDE_EFFECTS (t) = side_effects;
4512 TREE_THIS_VOLATILE (t)
4513 = (TREE_CODE_CLASS (code) == tcc_reference
4514 && arg0 && TREE_THIS_VOLATILE (arg0));
4515
4516 return t;
4517 }
4518
4519 tree
4520 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4521 tree arg2, tree arg3 MEM_STAT_DECL)
4522 {
4523 bool constant, read_only, side_effects;
4524 tree t;
4525
4526 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4527
4528 t = make_node_stat (code PASS_MEM_STAT);
4529 TREE_TYPE (t) = tt;
4530
4531 side_effects = TREE_SIDE_EFFECTS (t);
4532
4533 PROCESS_ARG (0);
4534 PROCESS_ARG (1);
4535 PROCESS_ARG (2);
4536 PROCESS_ARG (3);
4537
4538 TREE_SIDE_EFFECTS (t) = side_effects;
4539 TREE_THIS_VOLATILE (t)
4540 = (TREE_CODE_CLASS (code) == tcc_reference
4541 && arg0 && TREE_THIS_VOLATILE (arg0));
4542
4543 return t;
4544 }
4545
4546 tree
4547 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4548 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4549 {
4550 bool constant, read_only, side_effects;
4551 tree t;
4552
4553 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4554
4555 t = make_node_stat (code PASS_MEM_STAT);
4556 TREE_TYPE (t) = tt;
4557
4558 side_effects = TREE_SIDE_EFFECTS (t);
4559
4560 PROCESS_ARG (0);
4561 PROCESS_ARG (1);
4562 PROCESS_ARG (2);
4563 PROCESS_ARG (3);
4564 PROCESS_ARG (4);
4565
4566 TREE_SIDE_EFFECTS (t) = side_effects;
4567 if (code == TARGET_MEM_REF)
4568 {
4569 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4570 {
4571 tree o = TREE_OPERAND (arg0, 0);
4572 TREE_READONLY (t) = TREE_READONLY (o);
4573 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4574 }
4575 }
4576 else
4577 TREE_THIS_VOLATILE (t)
4578 = (TREE_CODE_CLASS (code) == tcc_reference
4579 && arg0 && TREE_THIS_VOLATILE (arg0));
4580
4581 return t;
4582 }
4583
4584 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4585 on the pointer PTR. */
4586
4587 tree
4588 build_simple_mem_ref_loc (location_t loc, tree ptr)
4589 {
4590 HOST_WIDE_INT offset = 0;
4591 tree ptype = TREE_TYPE (ptr);
4592 tree tem;
4593 /* For convenience allow addresses that collapse to a simple base
4594 and offset. */
4595 if (TREE_CODE (ptr) == ADDR_EXPR
4596 && (handled_component_p (TREE_OPERAND (ptr, 0))
4597 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4598 {
4599 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4600 gcc_assert (ptr);
4601 ptr = build_fold_addr_expr (ptr);
4602 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4603 }
4604 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4605 ptr, build_int_cst (ptype, offset));
4606 SET_EXPR_LOCATION (tem, loc);
4607 return tem;
4608 }
4609
4610 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4611
4612 offset_int
4613 mem_ref_offset (const_tree t)
4614 {
4615 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4616 }
4617
4618 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4619 offsetted by OFFSET units. */
4620
4621 tree
4622 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4623 {
4624 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4625 build_fold_addr_expr (base),
4626 build_int_cst (ptr_type_node, offset));
4627 tree addr = build1 (ADDR_EXPR, type, ref);
4628 recompute_tree_invariant_for_addr_expr (addr);
4629 return addr;
4630 }
4631
4632 /* Similar except don't specify the TREE_TYPE
4633 and leave the TREE_SIDE_EFFECTS as 0.
4634 It is permissible for arguments to be null,
4635 or even garbage if their values do not matter. */
4636
4637 tree
4638 build_nt (enum tree_code code, ...)
4639 {
4640 tree t;
4641 int length;
4642 int i;
4643 va_list p;
4644
4645 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4646
4647 va_start (p, code);
4648
4649 t = make_node (code);
4650 length = TREE_CODE_LENGTH (code);
4651
4652 for (i = 0; i < length; i++)
4653 TREE_OPERAND (t, i) = va_arg (p, tree);
4654
4655 va_end (p);
4656 return t;
4657 }
4658
4659 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4660 tree vec. */
4661
4662 tree
4663 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4664 {
4665 tree ret, t;
4666 unsigned int ix;
4667
4668 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4669 CALL_EXPR_FN (ret) = fn;
4670 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4671 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4672 CALL_EXPR_ARG (ret, ix) = t;
4673 return ret;
4674 }
4675 \f
4676 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4677 We do NOT enter this node in any sort of symbol table.
4678
4679 LOC is the location of the decl.
4680
4681 layout_decl is used to set up the decl's storage layout.
4682 Other slots are initialized to 0 or null pointers. */
4683
4684 tree
4685 build_decl_stat (location_t loc, enum tree_code code, tree name,
4686 tree type MEM_STAT_DECL)
4687 {
4688 tree t;
4689
4690 t = make_node_stat (code PASS_MEM_STAT);
4691 DECL_SOURCE_LOCATION (t) = loc;
4692
4693 /* if (type == error_mark_node)
4694 type = integer_type_node; */
4695 /* That is not done, deliberately, so that having error_mark_node
4696 as the type can suppress useless errors in the use of this variable. */
4697
4698 DECL_NAME (t) = name;
4699 TREE_TYPE (t) = type;
4700
4701 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4702 layout_decl (t, 0);
4703
4704 return t;
4705 }
4706
4707 /* Builds and returns function declaration with NAME and TYPE. */
4708
4709 tree
4710 build_fn_decl (const char *name, tree type)
4711 {
4712 tree id = get_identifier (name);
4713 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4714
4715 DECL_EXTERNAL (decl) = 1;
4716 TREE_PUBLIC (decl) = 1;
4717 DECL_ARTIFICIAL (decl) = 1;
4718 TREE_NOTHROW (decl) = 1;
4719
4720 return decl;
4721 }
4722
4723 vec<tree, va_gc> *all_translation_units;
4724
4725 /* Builds a new translation-unit decl with name NAME, queues it in the
4726 global list of translation-unit decls and returns it. */
4727
4728 tree
4729 build_translation_unit_decl (tree name)
4730 {
4731 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4732 name, NULL_TREE);
4733 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4734 vec_safe_push (all_translation_units, tu);
4735 return tu;
4736 }
4737
4738 \f
4739 /* BLOCK nodes are used to represent the structure of binding contours
4740 and declarations, once those contours have been exited and their contents
4741 compiled. This information is used for outputting debugging info. */
4742
4743 tree
4744 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4745 {
4746 tree block = make_node (BLOCK);
4747
4748 BLOCK_VARS (block) = vars;
4749 BLOCK_SUBBLOCKS (block) = subblocks;
4750 BLOCK_SUPERCONTEXT (block) = supercontext;
4751 BLOCK_CHAIN (block) = chain;
4752 return block;
4753 }
4754
4755 \f
4756 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4757
4758 LOC is the location to use in tree T. */
4759
4760 void
4761 protected_set_expr_location (tree t, location_t loc)
4762 {
4763 if (CAN_HAVE_LOCATION_P (t))
4764 SET_EXPR_LOCATION (t, loc);
4765 }
4766 \f
4767 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4768 is ATTRIBUTE. */
4769
4770 tree
4771 build_decl_attribute_variant (tree ddecl, tree attribute)
4772 {
4773 DECL_ATTRIBUTES (ddecl) = attribute;
4774 return ddecl;
4775 }
4776
4777 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4778 is ATTRIBUTE and its qualifiers are QUALS.
4779
4780 Record such modified types already made so we don't make duplicates. */
4781
4782 tree
4783 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4784 {
4785 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4786 {
4787 inchash::hash hstate;
4788 tree ntype;
4789 int i;
4790 tree t;
4791 enum tree_code code = TREE_CODE (ttype);
4792
4793 /* Building a distinct copy of a tagged type is inappropriate; it
4794 causes breakage in code that expects there to be a one-to-one
4795 relationship between a struct and its fields.
4796 build_duplicate_type is another solution (as used in
4797 handle_transparent_union_attribute), but that doesn't play well
4798 with the stronger C++ type identity model. */
4799 if (TREE_CODE (ttype) == RECORD_TYPE
4800 || TREE_CODE (ttype) == UNION_TYPE
4801 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4802 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4803 {
4804 warning (OPT_Wattributes,
4805 "ignoring attributes applied to %qT after definition",
4806 TYPE_MAIN_VARIANT (ttype));
4807 return build_qualified_type (ttype, quals);
4808 }
4809
4810 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4811 ntype = build_distinct_type_copy (ttype);
4812
4813 TYPE_ATTRIBUTES (ntype) = attribute;
4814
4815 hstate.add_int (code);
4816 if (TREE_TYPE (ntype))
4817 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4818 attribute_hash_list (attribute, hstate);
4819
4820 switch (TREE_CODE (ntype))
4821 {
4822 case FUNCTION_TYPE:
4823 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4824 break;
4825 case ARRAY_TYPE:
4826 if (TYPE_DOMAIN (ntype))
4827 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4828 break;
4829 case INTEGER_TYPE:
4830 t = TYPE_MAX_VALUE (ntype);
4831 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4832 hstate.add_object (TREE_INT_CST_ELT (t, i));
4833 break;
4834 case REAL_TYPE:
4835 case FIXED_POINT_TYPE:
4836 {
4837 unsigned int precision = TYPE_PRECISION (ntype);
4838 hstate.add_object (precision);
4839 }
4840 break;
4841 default:
4842 break;
4843 }
4844
4845 ntype = type_hash_canon (hstate.end(), ntype);
4846
4847 /* If the target-dependent attributes make NTYPE different from
4848 its canonical type, we will need to use structural equality
4849 checks for this type. */
4850 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4851 || !comp_type_attributes (ntype, ttype))
4852 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4853 else if (TYPE_CANONICAL (ntype) == ntype)
4854 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4855
4856 ttype = build_qualified_type (ntype, quals);
4857 }
4858 else if (TYPE_QUALS (ttype) != quals)
4859 ttype = build_qualified_type (ttype, quals);
4860
4861 return ttype;
4862 }
4863
4864 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4865 the same. */
4866
4867 static bool
4868 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4869 {
4870 tree cl1, cl2;
4871 for (cl1 = clauses1, cl2 = clauses2;
4872 cl1 && cl2;
4873 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4874 {
4875 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4876 return false;
4877 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4878 {
4879 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4880 OMP_CLAUSE_DECL (cl2)) != 1)
4881 return false;
4882 }
4883 switch (OMP_CLAUSE_CODE (cl1))
4884 {
4885 case OMP_CLAUSE_ALIGNED:
4886 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4887 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4888 return false;
4889 break;
4890 case OMP_CLAUSE_LINEAR:
4891 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4892 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4893 return false;
4894 break;
4895 case OMP_CLAUSE_SIMDLEN:
4896 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4897 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4898 return false;
4899 default:
4900 break;
4901 }
4902 }
4903 return true;
4904 }
4905
4906 /* Compare two constructor-element-type constants. Return 1 if the lists
4907 are known to be equal; otherwise return 0. */
4908
4909 static bool
4910 simple_cst_list_equal (const_tree l1, const_tree l2)
4911 {
4912 while (l1 != NULL_TREE && l2 != NULL_TREE)
4913 {
4914 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4915 return false;
4916
4917 l1 = TREE_CHAIN (l1);
4918 l2 = TREE_CHAIN (l2);
4919 }
4920
4921 return l1 == l2;
4922 }
4923
4924 /* Compare two identifier nodes representing attributes. Either one may
4925 be in wrapped __ATTR__ form. Return true if they are the same, false
4926 otherwise. */
4927
4928 static bool
4929 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4930 {
4931 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4932 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4933 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4934
4935 /* Identifiers can be compared directly for equality. */
4936 if (attr1 == attr2)
4937 return true;
4938
4939 /* If they are not equal, they may still be one in the form
4940 'text' while the other one is in the form '__text__'. TODO:
4941 If we were storing attributes in normalized 'text' form, then
4942 this could all go away and we could take full advantage of
4943 the fact that we're comparing identifiers. :-) */
4944 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4945 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4946
4947 if (attr2_len == attr1_len + 4)
4948 {
4949 const char *p = IDENTIFIER_POINTER (attr2);
4950 const char *q = IDENTIFIER_POINTER (attr1);
4951 if (p[0] == '_' && p[1] == '_'
4952 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4953 && strncmp (q, p + 2, attr1_len) == 0)
4954 return true;;
4955 }
4956 else if (attr2_len + 4 == attr1_len)
4957 {
4958 const char *p = IDENTIFIER_POINTER (attr2);
4959 const char *q = IDENTIFIER_POINTER (attr1);
4960 if (q[0] == '_' && q[1] == '_'
4961 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4962 && strncmp (q + 2, p, attr2_len) == 0)
4963 return true;
4964 }
4965
4966 return false;
4967 }
4968
4969 /* Compare two attributes for their value identity. Return true if the
4970 attribute values are known to be equal; otherwise return false. */
4971
4972 bool
4973 attribute_value_equal (const_tree attr1, const_tree attr2)
4974 {
4975 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4976 return true;
4977
4978 if (TREE_VALUE (attr1) != NULL_TREE
4979 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4980 && TREE_VALUE (attr2) != NULL_TREE
4981 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4982 {
4983 /* Handle attribute format. */
4984 if (is_attribute_p ("format", TREE_PURPOSE (attr1)))
4985 {
4986 attr1 = TREE_VALUE (attr1);
4987 attr2 = TREE_VALUE (attr2);
4988 /* Compare the archetypes (printf/scanf/strftime/...). */
4989 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
4990 TREE_VALUE (attr2)))
4991 return false;
4992 /* Archetypes are the same. Compare the rest. */
4993 return (simple_cst_list_equal (TREE_CHAIN (attr1),
4994 TREE_CHAIN (attr2)) == 1);
4995 }
4996 return (simple_cst_list_equal (TREE_VALUE (attr1),
4997 TREE_VALUE (attr2)) == 1);
4998 }
4999
5000 if ((flag_openmp || flag_openmp_simd)
5001 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
5002 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
5003 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
5004 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
5005 TREE_VALUE (attr2));
5006
5007 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
5008 }
5009
5010 /* Return 0 if the attributes for two types are incompatible, 1 if they
5011 are compatible, and 2 if they are nearly compatible (which causes a
5012 warning to be generated). */
5013 int
5014 comp_type_attributes (const_tree type1, const_tree type2)
5015 {
5016 const_tree a1 = TYPE_ATTRIBUTES (type1);
5017 const_tree a2 = TYPE_ATTRIBUTES (type2);
5018 const_tree a;
5019
5020 if (a1 == a2)
5021 return 1;
5022 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
5023 {
5024 const struct attribute_spec *as;
5025 const_tree attr;
5026
5027 as = lookup_attribute_spec (get_attribute_name (a));
5028 if (!as || as->affects_type_identity == false)
5029 continue;
5030
5031 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
5032 if (!attr || !attribute_value_equal (a, attr))
5033 break;
5034 }
5035 if (!a)
5036 {
5037 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
5038 {
5039 const struct attribute_spec *as;
5040
5041 as = lookup_attribute_spec (get_attribute_name (a));
5042 if (!as || as->affects_type_identity == false)
5043 continue;
5044
5045 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5046 break;
5047 /* We don't need to compare trees again, as we did this
5048 already in first loop. */
5049 }
5050 /* All types - affecting identity - are equal, so
5051 there is no need to call target hook for comparison. */
5052 if (!a)
5053 return 1;
5054 }
5055 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
5056 return 0;
5057 /* As some type combinations - like default calling-convention - might
5058 be compatible, we have to call the target hook to get the final result. */
5059 return targetm.comp_type_attributes (type1, type2);
5060 }
5061
5062 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5063 is ATTRIBUTE.
5064
5065 Record such modified types already made so we don't make duplicates. */
5066
5067 tree
5068 build_type_attribute_variant (tree ttype, tree attribute)
5069 {
5070 return build_type_attribute_qual_variant (ttype, attribute,
5071 TYPE_QUALS (ttype));
5072 }
5073
5074
5075 /* Reset the expression *EXPR_P, a size or position.
5076
5077 ??? We could reset all non-constant sizes or positions. But it's cheap
5078 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5079
5080 We need to reset self-referential sizes or positions because they cannot
5081 be gimplified and thus can contain a CALL_EXPR after the gimplification
5082 is finished, which will run afoul of LTO streaming. And they need to be
5083 reset to something essentially dummy but not constant, so as to preserve
5084 the properties of the object they are attached to. */
5085
5086 static inline void
5087 free_lang_data_in_one_sizepos (tree *expr_p)
5088 {
5089 tree expr = *expr_p;
5090 if (CONTAINS_PLACEHOLDER_P (expr))
5091 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5092 }
5093
5094
5095 /* Reset all the fields in a binfo node BINFO. We only keep
5096 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5097
5098 static void
5099 free_lang_data_in_binfo (tree binfo)
5100 {
5101 unsigned i;
5102 tree t;
5103
5104 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5105
5106 BINFO_VIRTUALS (binfo) = NULL_TREE;
5107 BINFO_BASE_ACCESSES (binfo) = NULL;
5108 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5109 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5110
5111 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5112 free_lang_data_in_binfo (t);
5113 }
5114
5115
5116 /* Reset all language specific information still present in TYPE. */
5117
5118 static void
5119 free_lang_data_in_type (tree type)
5120 {
5121 gcc_assert (TYPE_P (type));
5122
5123 /* Give the FE a chance to remove its own data first. */
5124 lang_hooks.free_lang_data (type);
5125
5126 TREE_LANG_FLAG_0 (type) = 0;
5127 TREE_LANG_FLAG_1 (type) = 0;
5128 TREE_LANG_FLAG_2 (type) = 0;
5129 TREE_LANG_FLAG_3 (type) = 0;
5130 TREE_LANG_FLAG_4 (type) = 0;
5131 TREE_LANG_FLAG_5 (type) = 0;
5132 TREE_LANG_FLAG_6 (type) = 0;
5133
5134 if (TREE_CODE (type) == FUNCTION_TYPE)
5135 {
5136 /* Remove the const and volatile qualifiers from arguments. The
5137 C++ front end removes them, but the C front end does not,
5138 leading to false ODR violation errors when merging two
5139 instances of the same function signature compiled by
5140 different front ends. */
5141 tree p;
5142
5143 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5144 {
5145 tree arg_type = TREE_VALUE (p);
5146
5147 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5148 {
5149 int quals = TYPE_QUALS (arg_type)
5150 & ~TYPE_QUAL_CONST
5151 & ~TYPE_QUAL_VOLATILE;
5152 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5153 free_lang_data_in_type (TREE_VALUE (p));
5154 }
5155 /* C++ FE uses TREE_PURPOSE to store initial values. */
5156 TREE_PURPOSE (p) = NULL;
5157 }
5158 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5159 TYPE_MINVAL (type) = NULL;
5160 }
5161 if (TREE_CODE (type) == METHOD_TYPE)
5162 {
5163 tree p;
5164
5165 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5166 {
5167 /* C++ FE uses TREE_PURPOSE to store initial values. */
5168 TREE_PURPOSE (p) = NULL;
5169 }
5170 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5171 TYPE_MINVAL (type) = NULL;
5172 }
5173
5174 /* Remove members that are not actually FIELD_DECLs from the field
5175 list of an aggregate. These occur in C++. */
5176 if (RECORD_OR_UNION_TYPE_P (type))
5177 {
5178 tree prev, member;
5179
5180 /* Note that TYPE_FIELDS can be shared across distinct
5181 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5182 to be removed, we cannot set its TREE_CHAIN to NULL.
5183 Otherwise, we would not be able to find all the other fields
5184 in the other instances of this TREE_TYPE.
5185
5186 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5187 prev = NULL_TREE;
5188 member = TYPE_FIELDS (type);
5189 while (member)
5190 {
5191 if (TREE_CODE (member) == FIELD_DECL
5192 || (TREE_CODE (member) == TYPE_DECL
5193 && !DECL_IGNORED_P (member)
5194 && debug_info_level > DINFO_LEVEL_TERSE
5195 && !is_redundant_typedef (member)))
5196 {
5197 if (prev)
5198 TREE_CHAIN (prev) = member;
5199 else
5200 TYPE_FIELDS (type) = member;
5201 prev = member;
5202 }
5203
5204 member = TREE_CHAIN (member);
5205 }
5206
5207 if (prev)
5208 TREE_CHAIN (prev) = NULL_TREE;
5209 else
5210 TYPE_FIELDS (type) = NULL_TREE;
5211
5212 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5213 and danagle the pointer from time to time. */
5214 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5215 TYPE_VFIELD (type) = NULL_TREE;
5216
5217 /* Remove TYPE_METHODS list. While it would be nice to keep it
5218 to enable ODR warnings about different method lists, doing so
5219 seems to impractically increase size of LTO data streamed.
5220 Keep the information if TYPE_METHODS was non-NULL. This is used
5221 by function.c and pretty printers. */
5222 if (TYPE_METHODS (type))
5223 TYPE_METHODS (type) = error_mark_node;
5224 if (TYPE_BINFO (type))
5225 {
5226 free_lang_data_in_binfo (TYPE_BINFO (type));
5227 /* We need to preserve link to bases and virtual table for all
5228 polymorphic types to make devirtualization machinery working.
5229 Debug output cares only about bases, but output also
5230 virtual table pointers so merging of -fdevirtualize and
5231 -fno-devirtualize units is easier. */
5232 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5233 || !flag_devirtualize)
5234 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5235 && !BINFO_VTABLE (TYPE_BINFO (type)))
5236 || debug_info_level != DINFO_LEVEL_NONE))
5237 TYPE_BINFO (type) = NULL;
5238 }
5239 }
5240 else
5241 {
5242 /* For non-aggregate types, clear out the language slot (which
5243 overloads TYPE_BINFO). */
5244 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5245
5246 if (INTEGRAL_TYPE_P (type)
5247 || SCALAR_FLOAT_TYPE_P (type)
5248 || FIXED_POINT_TYPE_P (type))
5249 {
5250 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5251 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5252 }
5253 }
5254
5255 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5256 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5257
5258 if (TYPE_CONTEXT (type)
5259 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5260 {
5261 tree ctx = TYPE_CONTEXT (type);
5262 do
5263 {
5264 ctx = BLOCK_SUPERCONTEXT (ctx);
5265 }
5266 while (ctx && TREE_CODE (ctx) == BLOCK);
5267 TYPE_CONTEXT (type) = ctx;
5268 }
5269 }
5270
5271
5272 /* Return true if DECL may need an assembler name to be set. */
5273
5274 static inline bool
5275 need_assembler_name_p (tree decl)
5276 {
5277 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5278 Rule merging. This makes type_odr_p to return true on those types during
5279 LTO and by comparing the mangled name, we can say what types are intended
5280 to be equivalent across compilation unit.
5281
5282 We do not store names of type_in_anonymous_namespace_p.
5283
5284 Record, union and enumeration type have linkage that allows use
5285 to check type_in_anonymous_namespace_p. We do not mangle compound types
5286 that always can be compared structurally.
5287
5288 Similarly for builtin types, we compare properties of their main variant.
5289 A special case are integer types where mangling do make differences
5290 between char/signed char/unsigned char etc. Storing name for these makes
5291 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5292 See cp/mangle.c:write_builtin_type for details. */
5293
5294 if (flag_lto_odr_type_mering
5295 && TREE_CODE (decl) == TYPE_DECL
5296 && DECL_NAME (decl)
5297 && decl == TYPE_NAME (TREE_TYPE (decl))
5298 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5299 && (type_with_linkage_p (TREE_TYPE (decl))
5300 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5301 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5302 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5303 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5304 if (TREE_CODE (decl) != FUNCTION_DECL
5305 && TREE_CODE (decl) != VAR_DECL)
5306 return false;
5307
5308 /* If DECL already has its assembler name set, it does not need a
5309 new one. */
5310 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5311 || DECL_ASSEMBLER_NAME_SET_P (decl))
5312 return false;
5313
5314 /* Abstract decls do not need an assembler name. */
5315 if (DECL_ABSTRACT_P (decl))
5316 return false;
5317
5318 /* For VAR_DECLs, only static, public and external symbols need an
5319 assembler name. */
5320 if (TREE_CODE (decl) == VAR_DECL
5321 && !TREE_STATIC (decl)
5322 && !TREE_PUBLIC (decl)
5323 && !DECL_EXTERNAL (decl))
5324 return false;
5325
5326 if (TREE_CODE (decl) == FUNCTION_DECL)
5327 {
5328 /* Do not set assembler name on builtins. Allow RTL expansion to
5329 decide whether to expand inline or via a regular call. */
5330 if (DECL_BUILT_IN (decl)
5331 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5332 return false;
5333
5334 /* Functions represented in the callgraph need an assembler name. */
5335 if (cgraph_node::get (decl) != NULL)
5336 return true;
5337
5338 /* Unused and not public functions don't need an assembler name. */
5339 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5340 return false;
5341 }
5342
5343 return true;
5344 }
5345
5346
5347 /* Reset all language specific information still present in symbol
5348 DECL. */
5349
5350 static void
5351 free_lang_data_in_decl (tree decl)
5352 {
5353 gcc_assert (DECL_P (decl));
5354
5355 /* Give the FE a chance to remove its own data first. */
5356 lang_hooks.free_lang_data (decl);
5357
5358 TREE_LANG_FLAG_0 (decl) = 0;
5359 TREE_LANG_FLAG_1 (decl) = 0;
5360 TREE_LANG_FLAG_2 (decl) = 0;
5361 TREE_LANG_FLAG_3 (decl) = 0;
5362 TREE_LANG_FLAG_4 (decl) = 0;
5363 TREE_LANG_FLAG_5 (decl) = 0;
5364 TREE_LANG_FLAG_6 (decl) = 0;
5365
5366 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5367 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5368 if (TREE_CODE (decl) == FIELD_DECL)
5369 {
5370 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5371 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5372 DECL_QUALIFIER (decl) = NULL_TREE;
5373 }
5374
5375 if (TREE_CODE (decl) == FUNCTION_DECL)
5376 {
5377 struct cgraph_node *node;
5378 if (!(node = cgraph_node::get (decl))
5379 || (!node->definition && !node->clones))
5380 {
5381 if (node)
5382 node->release_body ();
5383 else
5384 {
5385 release_function_body (decl);
5386 DECL_ARGUMENTS (decl) = NULL;
5387 DECL_RESULT (decl) = NULL;
5388 DECL_INITIAL (decl) = error_mark_node;
5389 }
5390 }
5391 if (gimple_has_body_p (decl))
5392 {
5393 tree t;
5394
5395 /* If DECL has a gimple body, then the context for its
5396 arguments must be DECL. Otherwise, it doesn't really
5397 matter, as we will not be emitting any code for DECL. In
5398 general, there may be other instances of DECL created by
5399 the front end and since PARM_DECLs are generally shared,
5400 their DECL_CONTEXT changes as the replicas of DECL are
5401 created. The only time where DECL_CONTEXT is important
5402 is for the FUNCTION_DECLs that have a gimple body (since
5403 the PARM_DECL will be used in the function's body). */
5404 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5405 DECL_CONTEXT (t) = decl;
5406 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5407 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5408 = target_option_default_node;
5409 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5410 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5411 = optimization_default_node;
5412 }
5413
5414 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5415 At this point, it is not needed anymore. */
5416 DECL_SAVED_TREE (decl) = NULL_TREE;
5417
5418 /* Clear the abstract origin if it refers to a method. Otherwise
5419 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5420 origin will not be output correctly. */
5421 if (DECL_ABSTRACT_ORIGIN (decl)
5422 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5423 && RECORD_OR_UNION_TYPE_P
5424 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5425 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5426
5427 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5428 DECL_VINDEX referring to itself into a vtable slot number as it
5429 should. Happens with functions that are copied and then forgotten
5430 about. Just clear it, it won't matter anymore. */
5431 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5432 DECL_VINDEX (decl) = NULL_TREE;
5433 }
5434 else if (TREE_CODE (decl) == VAR_DECL)
5435 {
5436 if ((DECL_EXTERNAL (decl)
5437 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5438 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5439 DECL_INITIAL (decl) = NULL_TREE;
5440 }
5441 else if (TREE_CODE (decl) == TYPE_DECL
5442 || TREE_CODE (decl) == FIELD_DECL)
5443 DECL_INITIAL (decl) = NULL_TREE;
5444 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5445 && DECL_INITIAL (decl)
5446 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5447 {
5448 /* Strip builtins from the translation-unit BLOCK. We still have targets
5449 without builtin_decl_explicit support and also builtins are shared
5450 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5451 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5452 while (*nextp)
5453 {
5454 tree var = *nextp;
5455 if (TREE_CODE (var) == FUNCTION_DECL
5456 && DECL_BUILT_IN (var))
5457 *nextp = TREE_CHAIN (var);
5458 else
5459 nextp = &TREE_CHAIN (var);
5460 }
5461 }
5462 }
5463
5464
5465 /* Data used when collecting DECLs and TYPEs for language data removal. */
5466
5467 struct free_lang_data_d
5468 {
5469 /* Worklist to avoid excessive recursion. */
5470 vec<tree> worklist;
5471
5472 /* Set of traversed objects. Used to avoid duplicate visits. */
5473 hash_set<tree> *pset;
5474
5475 /* Array of symbols to process with free_lang_data_in_decl. */
5476 vec<tree> decls;
5477
5478 /* Array of types to process with free_lang_data_in_type. */
5479 vec<tree> types;
5480 };
5481
5482
5483 /* Save all language fields needed to generate proper debug information
5484 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5485
5486 static void
5487 save_debug_info_for_decl (tree t)
5488 {
5489 /*struct saved_debug_info_d *sdi;*/
5490
5491 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5492
5493 /* FIXME. Partial implementation for saving debug info removed. */
5494 }
5495
5496
5497 /* Save all language fields needed to generate proper debug information
5498 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5499
5500 static void
5501 save_debug_info_for_type (tree t)
5502 {
5503 /*struct saved_debug_info_d *sdi;*/
5504
5505 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5506
5507 /* FIXME. Partial implementation for saving debug info removed. */
5508 }
5509
5510
5511 /* Add type or decl T to one of the list of tree nodes that need their
5512 language data removed. The lists are held inside FLD. */
5513
5514 static void
5515 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5516 {
5517 if (DECL_P (t))
5518 {
5519 fld->decls.safe_push (t);
5520 if (debug_info_level > DINFO_LEVEL_TERSE)
5521 save_debug_info_for_decl (t);
5522 }
5523 else if (TYPE_P (t))
5524 {
5525 fld->types.safe_push (t);
5526 if (debug_info_level > DINFO_LEVEL_TERSE)
5527 save_debug_info_for_type (t);
5528 }
5529 else
5530 gcc_unreachable ();
5531 }
5532
5533 /* Push tree node T into FLD->WORKLIST. */
5534
5535 static inline void
5536 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5537 {
5538 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5539 fld->worklist.safe_push ((t));
5540 }
5541
5542
5543 /* Operand callback helper for free_lang_data_in_node. *TP is the
5544 subtree operand being considered. */
5545
5546 static tree
5547 find_decls_types_r (tree *tp, int *ws, void *data)
5548 {
5549 tree t = *tp;
5550 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5551
5552 if (TREE_CODE (t) == TREE_LIST)
5553 return NULL_TREE;
5554
5555 /* Language specific nodes will be removed, so there is no need
5556 to gather anything under them. */
5557 if (is_lang_specific (t))
5558 {
5559 *ws = 0;
5560 return NULL_TREE;
5561 }
5562
5563 if (DECL_P (t))
5564 {
5565 /* Note that walk_tree does not traverse every possible field in
5566 decls, so we have to do our own traversals here. */
5567 add_tree_to_fld_list (t, fld);
5568
5569 fld_worklist_push (DECL_NAME (t), fld);
5570 fld_worklist_push (DECL_CONTEXT (t), fld);
5571 fld_worklist_push (DECL_SIZE (t), fld);
5572 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5573
5574 /* We are going to remove everything under DECL_INITIAL for
5575 TYPE_DECLs. No point walking them. */
5576 if (TREE_CODE (t) != TYPE_DECL)
5577 fld_worklist_push (DECL_INITIAL (t), fld);
5578
5579 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5580 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5581
5582 if (TREE_CODE (t) == FUNCTION_DECL)
5583 {
5584 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5585 fld_worklist_push (DECL_RESULT (t), fld);
5586 }
5587 else if (TREE_CODE (t) == TYPE_DECL)
5588 {
5589 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5590 }
5591 else if (TREE_CODE (t) == FIELD_DECL)
5592 {
5593 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5594 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5595 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5596 fld_worklist_push (DECL_FCONTEXT (t), fld);
5597 }
5598
5599 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5600 && DECL_HAS_VALUE_EXPR_P (t))
5601 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5602
5603 if (TREE_CODE (t) != FIELD_DECL
5604 && TREE_CODE (t) != TYPE_DECL)
5605 fld_worklist_push (TREE_CHAIN (t), fld);
5606 *ws = 0;
5607 }
5608 else if (TYPE_P (t))
5609 {
5610 /* Note that walk_tree does not traverse every possible field in
5611 types, so we have to do our own traversals here. */
5612 add_tree_to_fld_list (t, fld);
5613
5614 if (!RECORD_OR_UNION_TYPE_P (t))
5615 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5616 fld_worklist_push (TYPE_SIZE (t), fld);
5617 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5618 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5619 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5620 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5621 fld_worklist_push (TYPE_NAME (t), fld);
5622 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5623 them and thus do not and want not to reach unused pointer types
5624 this way. */
5625 if (!POINTER_TYPE_P (t))
5626 fld_worklist_push (TYPE_MINVAL (t), fld);
5627 if (!RECORD_OR_UNION_TYPE_P (t))
5628 fld_worklist_push (TYPE_MAXVAL (t), fld);
5629 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5630 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5631 do not and want not to reach unused variants this way. */
5632 if (TYPE_CONTEXT (t))
5633 {
5634 tree ctx = TYPE_CONTEXT (t);
5635 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5636 So push that instead. */
5637 while (ctx && TREE_CODE (ctx) == BLOCK)
5638 ctx = BLOCK_SUPERCONTEXT (ctx);
5639 fld_worklist_push (ctx, fld);
5640 }
5641 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5642 and want not to reach unused types this way. */
5643
5644 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5645 {
5646 unsigned i;
5647 tree tem;
5648 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5649 fld_worklist_push (TREE_TYPE (tem), fld);
5650 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5651 if (tem
5652 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5653 && TREE_CODE (tem) == TREE_LIST)
5654 do
5655 {
5656 fld_worklist_push (TREE_VALUE (tem), fld);
5657 tem = TREE_CHAIN (tem);
5658 }
5659 while (tem);
5660 }
5661 if (RECORD_OR_UNION_TYPE_P (t))
5662 {
5663 tree tem;
5664 /* Push all TYPE_FIELDS - there can be interleaving interesting
5665 and non-interesting things. */
5666 tem = TYPE_FIELDS (t);
5667 while (tem)
5668 {
5669 if (TREE_CODE (tem) == FIELD_DECL
5670 || (TREE_CODE (tem) == TYPE_DECL
5671 && !DECL_IGNORED_P (tem)
5672 && debug_info_level > DINFO_LEVEL_TERSE
5673 && !is_redundant_typedef (tem)))
5674 fld_worklist_push (tem, fld);
5675 tem = TREE_CHAIN (tem);
5676 }
5677 }
5678
5679 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5680 *ws = 0;
5681 }
5682 else if (TREE_CODE (t) == BLOCK)
5683 {
5684 tree tem;
5685 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5686 fld_worklist_push (tem, fld);
5687 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5688 fld_worklist_push (tem, fld);
5689 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5690 }
5691
5692 if (TREE_CODE (t) != IDENTIFIER_NODE
5693 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5694 fld_worklist_push (TREE_TYPE (t), fld);
5695
5696 return NULL_TREE;
5697 }
5698
5699
5700 /* Find decls and types in T. */
5701
5702 static void
5703 find_decls_types (tree t, struct free_lang_data_d *fld)
5704 {
5705 while (1)
5706 {
5707 if (!fld->pset->contains (t))
5708 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5709 if (fld->worklist.is_empty ())
5710 break;
5711 t = fld->worklist.pop ();
5712 }
5713 }
5714
5715 /* Translate all the types in LIST with the corresponding runtime
5716 types. */
5717
5718 static tree
5719 get_eh_types_for_runtime (tree list)
5720 {
5721 tree head, prev;
5722
5723 if (list == NULL_TREE)
5724 return NULL_TREE;
5725
5726 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5727 prev = head;
5728 list = TREE_CHAIN (list);
5729 while (list)
5730 {
5731 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5732 TREE_CHAIN (prev) = n;
5733 prev = TREE_CHAIN (prev);
5734 list = TREE_CHAIN (list);
5735 }
5736
5737 return head;
5738 }
5739
5740
5741 /* Find decls and types referenced in EH region R and store them in
5742 FLD->DECLS and FLD->TYPES. */
5743
5744 static void
5745 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5746 {
5747 switch (r->type)
5748 {
5749 case ERT_CLEANUP:
5750 break;
5751
5752 case ERT_TRY:
5753 {
5754 eh_catch c;
5755
5756 /* The types referenced in each catch must first be changed to the
5757 EH types used at runtime. This removes references to FE types
5758 in the region. */
5759 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5760 {
5761 c->type_list = get_eh_types_for_runtime (c->type_list);
5762 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5763 }
5764 }
5765 break;
5766
5767 case ERT_ALLOWED_EXCEPTIONS:
5768 r->u.allowed.type_list
5769 = get_eh_types_for_runtime (r->u.allowed.type_list);
5770 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5771 break;
5772
5773 case ERT_MUST_NOT_THROW:
5774 walk_tree (&r->u.must_not_throw.failure_decl,
5775 find_decls_types_r, fld, fld->pset);
5776 break;
5777 }
5778 }
5779
5780
5781 /* Find decls and types referenced in cgraph node N and store them in
5782 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5783 look for *every* kind of DECL and TYPE node reachable from N,
5784 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5785 NAMESPACE_DECLs, etc). */
5786
5787 static void
5788 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5789 {
5790 basic_block bb;
5791 struct function *fn;
5792 unsigned ix;
5793 tree t;
5794
5795 find_decls_types (n->decl, fld);
5796
5797 if (!gimple_has_body_p (n->decl))
5798 return;
5799
5800 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5801
5802 fn = DECL_STRUCT_FUNCTION (n->decl);
5803
5804 /* Traverse locals. */
5805 FOR_EACH_LOCAL_DECL (fn, ix, t)
5806 find_decls_types (t, fld);
5807
5808 /* Traverse EH regions in FN. */
5809 {
5810 eh_region r;
5811 FOR_ALL_EH_REGION_FN (r, fn)
5812 find_decls_types_in_eh_region (r, fld);
5813 }
5814
5815 /* Traverse every statement in FN. */
5816 FOR_EACH_BB_FN (bb, fn)
5817 {
5818 gphi_iterator psi;
5819 gimple_stmt_iterator si;
5820 unsigned i;
5821
5822 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5823 {
5824 gphi *phi = psi.phi ();
5825
5826 for (i = 0; i < gimple_phi_num_args (phi); i++)
5827 {
5828 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5829 find_decls_types (*arg_p, fld);
5830 }
5831 }
5832
5833 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5834 {
5835 gimple *stmt = gsi_stmt (si);
5836
5837 if (is_gimple_call (stmt))
5838 find_decls_types (gimple_call_fntype (stmt), fld);
5839
5840 for (i = 0; i < gimple_num_ops (stmt); i++)
5841 {
5842 tree arg = gimple_op (stmt, i);
5843 find_decls_types (arg, fld);
5844 }
5845 }
5846 }
5847 }
5848
5849
5850 /* Find decls and types referenced in varpool node N and store them in
5851 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5852 look for *every* kind of DECL and TYPE node reachable from N,
5853 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5854 NAMESPACE_DECLs, etc). */
5855
5856 static void
5857 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5858 {
5859 find_decls_types (v->decl, fld);
5860 }
5861
5862 /* If T needs an assembler name, have one created for it. */
5863
5864 void
5865 assign_assembler_name_if_neeeded (tree t)
5866 {
5867 if (need_assembler_name_p (t))
5868 {
5869 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5870 diagnostics that use input_location to show locus
5871 information. The problem here is that, at this point,
5872 input_location is generally anchored to the end of the file
5873 (since the parser is long gone), so we don't have a good
5874 position to pin it to.
5875
5876 To alleviate this problem, this uses the location of T's
5877 declaration. Examples of this are
5878 testsuite/g++.dg/template/cond2.C and
5879 testsuite/g++.dg/template/pr35240.C. */
5880 location_t saved_location = input_location;
5881 input_location = DECL_SOURCE_LOCATION (t);
5882
5883 decl_assembler_name (t);
5884
5885 input_location = saved_location;
5886 }
5887 }
5888
5889
5890 /* Free language specific information for every operand and expression
5891 in every node of the call graph. This process operates in three stages:
5892
5893 1- Every callgraph node and varpool node is traversed looking for
5894 decls and types embedded in them. This is a more exhaustive
5895 search than that done by find_referenced_vars, because it will
5896 also collect individual fields, decls embedded in types, etc.
5897
5898 2- All the decls found are sent to free_lang_data_in_decl.
5899
5900 3- All the types found are sent to free_lang_data_in_type.
5901
5902 The ordering between decls and types is important because
5903 free_lang_data_in_decl sets assembler names, which includes
5904 mangling. So types cannot be freed up until assembler names have
5905 been set up. */
5906
5907 static void
5908 free_lang_data_in_cgraph (void)
5909 {
5910 struct cgraph_node *n;
5911 varpool_node *v;
5912 struct free_lang_data_d fld;
5913 tree t;
5914 unsigned i;
5915 alias_pair *p;
5916
5917 /* Initialize sets and arrays to store referenced decls and types. */
5918 fld.pset = new hash_set<tree>;
5919 fld.worklist.create (0);
5920 fld.decls.create (100);
5921 fld.types.create (100);
5922
5923 /* Find decls and types in the body of every function in the callgraph. */
5924 FOR_EACH_FUNCTION (n)
5925 find_decls_types_in_node (n, &fld);
5926
5927 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5928 find_decls_types (p->decl, &fld);
5929
5930 /* Find decls and types in every varpool symbol. */
5931 FOR_EACH_VARIABLE (v)
5932 find_decls_types_in_var (v, &fld);
5933
5934 /* Set the assembler name on every decl found. We need to do this
5935 now because free_lang_data_in_decl will invalidate data needed
5936 for mangling. This breaks mangling on interdependent decls. */
5937 FOR_EACH_VEC_ELT (fld.decls, i, t)
5938 assign_assembler_name_if_neeeded (t);
5939
5940 /* Traverse every decl found freeing its language data. */
5941 FOR_EACH_VEC_ELT (fld.decls, i, t)
5942 free_lang_data_in_decl (t);
5943
5944 /* Traverse every type found freeing its language data. */
5945 FOR_EACH_VEC_ELT (fld.types, i, t)
5946 free_lang_data_in_type (t);
5947 if (flag_checking)
5948 {
5949 FOR_EACH_VEC_ELT (fld.types, i, t)
5950 verify_type (t);
5951 }
5952
5953 delete fld.pset;
5954 fld.worklist.release ();
5955 fld.decls.release ();
5956 fld.types.release ();
5957 }
5958
5959
5960 /* Free resources that are used by FE but are not needed once they are done. */
5961
5962 static unsigned
5963 free_lang_data (void)
5964 {
5965 unsigned i;
5966
5967 /* If we are the LTO frontend we have freed lang-specific data already. */
5968 if (in_lto_p
5969 || (!flag_generate_lto && !flag_generate_offload))
5970 return 0;
5971
5972 /* Allocate and assign alias sets to the standard integer types
5973 while the slots are still in the way the frontends generated them. */
5974 for (i = 0; i < itk_none; ++i)
5975 if (integer_types[i])
5976 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5977
5978 /* Traverse the IL resetting language specific information for
5979 operands, expressions, etc. */
5980 free_lang_data_in_cgraph ();
5981
5982 /* Create gimple variants for common types. */
5983 ptrdiff_type_node = integer_type_node;
5984 fileptr_type_node = ptr_type_node;
5985
5986 /* Reset some langhooks. Do not reset types_compatible_p, it may
5987 still be used indirectly via the get_alias_set langhook. */
5988 lang_hooks.dwarf_name = lhd_dwarf_name;
5989 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5990 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5991
5992 /* We do not want the default decl_assembler_name implementation,
5993 rather if we have fixed everything we want a wrapper around it
5994 asserting that all non-local symbols already got their assembler
5995 name and only produce assembler names for local symbols. Or rather
5996 make sure we never call decl_assembler_name on local symbols and
5997 devise a separate, middle-end private scheme for it. */
5998
5999 /* Reset diagnostic machinery. */
6000 tree_diagnostics_defaults (global_dc);
6001
6002 return 0;
6003 }
6004
6005
6006 namespace {
6007
6008 const pass_data pass_data_ipa_free_lang_data =
6009 {
6010 SIMPLE_IPA_PASS, /* type */
6011 "*free_lang_data", /* name */
6012 OPTGROUP_NONE, /* optinfo_flags */
6013 TV_IPA_FREE_LANG_DATA, /* tv_id */
6014 0, /* properties_required */
6015 0, /* properties_provided */
6016 0, /* properties_destroyed */
6017 0, /* todo_flags_start */
6018 0, /* todo_flags_finish */
6019 };
6020
6021 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6022 {
6023 public:
6024 pass_ipa_free_lang_data (gcc::context *ctxt)
6025 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6026 {}
6027
6028 /* opt_pass methods: */
6029 virtual unsigned int execute (function *) { return free_lang_data (); }
6030
6031 }; // class pass_ipa_free_lang_data
6032
6033 } // anon namespace
6034
6035 simple_ipa_opt_pass *
6036 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6037 {
6038 return new pass_ipa_free_lang_data (ctxt);
6039 }
6040
6041 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
6042 ATTR_NAME. Also used internally by remove_attribute(). */
6043 bool
6044 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
6045 {
6046 size_t ident_len = IDENTIFIER_LENGTH (ident);
6047
6048 if (ident_len == attr_len)
6049 {
6050 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
6051 return true;
6052 }
6053 else if (ident_len == attr_len + 4)
6054 {
6055 /* There is the possibility that ATTR is 'text' and IDENT is
6056 '__text__'. */
6057 const char *p = IDENTIFIER_POINTER (ident);
6058 if (p[0] == '_' && p[1] == '_'
6059 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6060 && strncmp (attr_name, p + 2, attr_len) == 0)
6061 return true;
6062 }
6063
6064 return false;
6065 }
6066
6067 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6068 of ATTR_NAME, and LIST is not NULL_TREE. */
6069 tree
6070 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6071 {
6072 while (list)
6073 {
6074 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6075
6076 if (ident_len == attr_len)
6077 {
6078 if (!strcmp (attr_name,
6079 IDENTIFIER_POINTER (get_attribute_name (list))))
6080 break;
6081 }
6082 /* TODO: If we made sure that attributes were stored in the
6083 canonical form without '__...__' (ie, as in 'text' as opposed
6084 to '__text__') then we could avoid the following case. */
6085 else if (ident_len == attr_len + 4)
6086 {
6087 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6088 if (p[0] == '_' && p[1] == '_'
6089 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6090 && strncmp (attr_name, p + 2, attr_len) == 0)
6091 break;
6092 }
6093 list = TREE_CHAIN (list);
6094 }
6095
6096 return list;
6097 }
6098
6099 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6100 return a pointer to the attribute's list first element if the attribute
6101 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6102 '__text__'). */
6103
6104 tree
6105 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6106 tree list)
6107 {
6108 while (list)
6109 {
6110 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6111
6112 if (attr_len > ident_len)
6113 {
6114 list = TREE_CHAIN (list);
6115 continue;
6116 }
6117
6118 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6119
6120 if (strncmp (attr_name, p, attr_len) == 0)
6121 break;
6122
6123 /* TODO: If we made sure that attributes were stored in the
6124 canonical form without '__...__' (ie, as in 'text' as opposed
6125 to '__text__') then we could avoid the following case. */
6126 if (p[0] == '_' && p[1] == '_' &&
6127 strncmp (attr_name, p + 2, attr_len) == 0)
6128 break;
6129
6130 list = TREE_CHAIN (list);
6131 }
6132
6133 return list;
6134 }
6135
6136
6137 /* A variant of lookup_attribute() that can be used with an identifier
6138 as the first argument, and where the identifier can be either
6139 'text' or '__text__'.
6140
6141 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6142 return a pointer to the attribute's list element if the attribute
6143 is part of the list, or NULL_TREE if not found. If the attribute
6144 appears more than once, this only returns the first occurrence; the
6145 TREE_CHAIN of the return value should be passed back in if further
6146 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6147 can be in the form 'text' or '__text__'. */
6148 static tree
6149 lookup_ident_attribute (tree attr_identifier, tree list)
6150 {
6151 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6152
6153 while (list)
6154 {
6155 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6156 == IDENTIFIER_NODE);
6157
6158 if (cmp_attrib_identifiers (attr_identifier,
6159 get_attribute_name (list)))
6160 /* Found it. */
6161 break;
6162 list = TREE_CHAIN (list);
6163 }
6164
6165 return list;
6166 }
6167
6168 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6169 modified list. */
6170
6171 tree
6172 remove_attribute (const char *attr_name, tree list)
6173 {
6174 tree *p;
6175 size_t attr_len = strlen (attr_name);
6176
6177 gcc_checking_assert (attr_name[0] != '_');
6178
6179 for (p = &list; *p; )
6180 {
6181 tree l = *p;
6182 /* TODO: If we were storing attributes in normalized form, here
6183 we could use a simple strcmp(). */
6184 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6185 *p = TREE_CHAIN (l);
6186 else
6187 p = &TREE_CHAIN (l);
6188 }
6189
6190 return list;
6191 }
6192
6193 /* Return an attribute list that is the union of a1 and a2. */
6194
6195 tree
6196 merge_attributes (tree a1, tree a2)
6197 {
6198 tree attributes;
6199
6200 /* Either one unset? Take the set one. */
6201
6202 if ((attributes = a1) == 0)
6203 attributes = a2;
6204
6205 /* One that completely contains the other? Take it. */
6206
6207 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6208 {
6209 if (attribute_list_contained (a2, a1))
6210 attributes = a2;
6211 else
6212 {
6213 /* Pick the longest list, and hang on the other list. */
6214
6215 if (list_length (a1) < list_length (a2))
6216 attributes = a2, a2 = a1;
6217
6218 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6219 {
6220 tree a;
6221 for (a = lookup_ident_attribute (get_attribute_name (a2),
6222 attributes);
6223 a != NULL_TREE && !attribute_value_equal (a, a2);
6224 a = lookup_ident_attribute (get_attribute_name (a2),
6225 TREE_CHAIN (a)))
6226 ;
6227 if (a == NULL_TREE)
6228 {
6229 a1 = copy_node (a2);
6230 TREE_CHAIN (a1) = attributes;
6231 attributes = a1;
6232 }
6233 }
6234 }
6235 }
6236 return attributes;
6237 }
6238
6239 /* Given types T1 and T2, merge their attributes and return
6240 the result. */
6241
6242 tree
6243 merge_type_attributes (tree t1, tree t2)
6244 {
6245 return merge_attributes (TYPE_ATTRIBUTES (t1),
6246 TYPE_ATTRIBUTES (t2));
6247 }
6248
6249 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6250 the result. */
6251
6252 tree
6253 merge_decl_attributes (tree olddecl, tree newdecl)
6254 {
6255 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6256 DECL_ATTRIBUTES (newdecl));
6257 }
6258
6259 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6260
6261 /* Specialization of merge_decl_attributes for various Windows targets.
6262
6263 This handles the following situation:
6264
6265 __declspec (dllimport) int foo;
6266 int foo;
6267
6268 The second instance of `foo' nullifies the dllimport. */
6269
6270 tree
6271 merge_dllimport_decl_attributes (tree old, tree new_tree)
6272 {
6273 tree a;
6274 int delete_dllimport_p = 1;
6275
6276 /* What we need to do here is remove from `old' dllimport if it doesn't
6277 appear in `new'. dllimport behaves like extern: if a declaration is
6278 marked dllimport and a definition appears later, then the object
6279 is not dllimport'd. We also remove a `new' dllimport if the old list
6280 contains dllexport: dllexport always overrides dllimport, regardless
6281 of the order of declaration. */
6282 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6283 delete_dllimport_p = 0;
6284 else if (DECL_DLLIMPORT_P (new_tree)
6285 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6286 {
6287 DECL_DLLIMPORT_P (new_tree) = 0;
6288 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6289 "dllimport ignored", new_tree);
6290 }
6291 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6292 {
6293 /* Warn about overriding a symbol that has already been used, e.g.:
6294 extern int __attribute__ ((dllimport)) foo;
6295 int* bar () {return &foo;}
6296 int foo;
6297 */
6298 if (TREE_USED (old))
6299 {
6300 warning (0, "%q+D redeclared without dllimport attribute "
6301 "after being referenced with dll linkage", new_tree);
6302 /* If we have used a variable's address with dllimport linkage,
6303 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6304 decl may already have had TREE_CONSTANT computed.
6305 We still remove the attribute so that assembler code refers
6306 to '&foo rather than '_imp__foo'. */
6307 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6308 DECL_DLLIMPORT_P (new_tree) = 1;
6309 }
6310
6311 /* Let an inline definition silently override the external reference,
6312 but otherwise warn about attribute inconsistency. */
6313 else if (TREE_CODE (new_tree) == VAR_DECL
6314 || !DECL_DECLARED_INLINE_P (new_tree))
6315 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6316 "previous dllimport ignored", new_tree);
6317 }
6318 else
6319 delete_dllimport_p = 0;
6320
6321 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6322
6323 if (delete_dllimport_p)
6324 a = remove_attribute ("dllimport", a);
6325
6326 return a;
6327 }
6328
6329 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6330 struct attribute_spec.handler. */
6331
6332 tree
6333 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6334 bool *no_add_attrs)
6335 {
6336 tree node = *pnode;
6337 bool is_dllimport;
6338
6339 /* These attributes may apply to structure and union types being created,
6340 but otherwise should pass to the declaration involved. */
6341 if (!DECL_P (node))
6342 {
6343 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6344 | (int) ATTR_FLAG_ARRAY_NEXT))
6345 {
6346 *no_add_attrs = true;
6347 return tree_cons (name, args, NULL_TREE);
6348 }
6349 if (TREE_CODE (node) == RECORD_TYPE
6350 || TREE_CODE (node) == UNION_TYPE)
6351 {
6352 node = TYPE_NAME (node);
6353 if (!node)
6354 return NULL_TREE;
6355 }
6356 else
6357 {
6358 warning (OPT_Wattributes, "%qE attribute ignored",
6359 name);
6360 *no_add_attrs = true;
6361 return NULL_TREE;
6362 }
6363 }
6364
6365 if (TREE_CODE (node) != FUNCTION_DECL
6366 && TREE_CODE (node) != VAR_DECL
6367 && TREE_CODE (node) != TYPE_DECL)
6368 {
6369 *no_add_attrs = true;
6370 warning (OPT_Wattributes, "%qE attribute ignored",
6371 name);
6372 return NULL_TREE;
6373 }
6374
6375 if (TREE_CODE (node) == TYPE_DECL
6376 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6377 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6378 {
6379 *no_add_attrs = true;
6380 warning (OPT_Wattributes, "%qE attribute ignored",
6381 name);
6382 return NULL_TREE;
6383 }
6384
6385 is_dllimport = is_attribute_p ("dllimport", name);
6386
6387 /* Report error on dllimport ambiguities seen now before they cause
6388 any damage. */
6389 if (is_dllimport)
6390 {
6391 /* Honor any target-specific overrides. */
6392 if (!targetm.valid_dllimport_attribute_p (node))
6393 *no_add_attrs = true;
6394
6395 else if (TREE_CODE (node) == FUNCTION_DECL
6396 && DECL_DECLARED_INLINE_P (node))
6397 {
6398 warning (OPT_Wattributes, "inline function %q+D declared as "
6399 " dllimport: attribute ignored", node);
6400 *no_add_attrs = true;
6401 }
6402 /* Like MS, treat definition of dllimported variables and
6403 non-inlined functions on declaration as syntax errors. */
6404 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6405 {
6406 error ("function %q+D definition is marked dllimport", node);
6407 *no_add_attrs = true;
6408 }
6409
6410 else if (TREE_CODE (node) == VAR_DECL)
6411 {
6412 if (DECL_INITIAL (node))
6413 {
6414 error ("variable %q+D definition is marked dllimport",
6415 node);
6416 *no_add_attrs = true;
6417 }
6418
6419 /* `extern' needn't be specified with dllimport.
6420 Specify `extern' now and hope for the best. Sigh. */
6421 DECL_EXTERNAL (node) = 1;
6422 /* Also, implicitly give dllimport'd variables declared within
6423 a function global scope, unless declared static. */
6424 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6425 TREE_PUBLIC (node) = 1;
6426 }
6427
6428 if (*no_add_attrs == false)
6429 DECL_DLLIMPORT_P (node) = 1;
6430 }
6431 else if (TREE_CODE (node) == FUNCTION_DECL
6432 && DECL_DECLARED_INLINE_P (node)
6433 && flag_keep_inline_dllexport)
6434 /* An exported function, even if inline, must be emitted. */
6435 DECL_EXTERNAL (node) = 0;
6436
6437 /* Report error if symbol is not accessible at global scope. */
6438 if (!TREE_PUBLIC (node)
6439 && (TREE_CODE (node) == VAR_DECL
6440 || TREE_CODE (node) == FUNCTION_DECL))
6441 {
6442 error ("external linkage required for symbol %q+D because of "
6443 "%qE attribute", node, name);
6444 *no_add_attrs = true;
6445 }
6446
6447 /* A dllexport'd entity must have default visibility so that other
6448 program units (shared libraries or the main executable) can see
6449 it. A dllimport'd entity must have default visibility so that
6450 the linker knows that undefined references within this program
6451 unit can be resolved by the dynamic linker. */
6452 if (!*no_add_attrs)
6453 {
6454 if (DECL_VISIBILITY_SPECIFIED (node)
6455 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6456 error ("%qE implies default visibility, but %qD has already "
6457 "been declared with a different visibility",
6458 name, node);
6459 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6460 DECL_VISIBILITY_SPECIFIED (node) = 1;
6461 }
6462
6463 return NULL_TREE;
6464 }
6465
6466 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6467 \f
6468 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6469 of the various TYPE_QUAL values. */
6470
6471 static void
6472 set_type_quals (tree type, int type_quals)
6473 {
6474 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6475 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6476 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6477 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6478 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6479 }
6480
6481 /* Returns true iff unqualified CAND and BASE are equivalent. */
6482
6483 bool
6484 check_base_type (const_tree cand, const_tree base)
6485 {
6486 return (TYPE_NAME (cand) == TYPE_NAME (base)
6487 /* Apparently this is needed for Objective-C. */
6488 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6489 /* Check alignment. */
6490 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6491 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6492 TYPE_ATTRIBUTES (base)));
6493 }
6494
6495 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6496
6497 bool
6498 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6499 {
6500 return (TYPE_QUALS (cand) == type_quals
6501 && check_base_type (cand, base));
6502 }
6503
6504 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6505
6506 static bool
6507 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6508 {
6509 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6510 && TYPE_NAME (cand) == TYPE_NAME (base)
6511 /* Apparently this is needed for Objective-C. */
6512 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6513 /* Check alignment. */
6514 && TYPE_ALIGN (cand) == align
6515 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6516 TYPE_ATTRIBUTES (base)));
6517 }
6518
6519 /* This function checks to see if TYPE matches the size one of the built-in
6520 atomic types, and returns that core atomic type. */
6521
6522 static tree
6523 find_atomic_core_type (tree type)
6524 {
6525 tree base_atomic_type;
6526
6527 /* Only handle complete types. */
6528 if (TYPE_SIZE (type) == NULL_TREE)
6529 return NULL_TREE;
6530
6531 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6532 switch (type_size)
6533 {
6534 case 8:
6535 base_atomic_type = atomicQI_type_node;
6536 break;
6537
6538 case 16:
6539 base_atomic_type = atomicHI_type_node;
6540 break;
6541
6542 case 32:
6543 base_atomic_type = atomicSI_type_node;
6544 break;
6545
6546 case 64:
6547 base_atomic_type = atomicDI_type_node;
6548 break;
6549
6550 case 128:
6551 base_atomic_type = atomicTI_type_node;
6552 break;
6553
6554 default:
6555 base_atomic_type = NULL_TREE;
6556 }
6557
6558 return base_atomic_type;
6559 }
6560
6561 /* Return a version of the TYPE, qualified as indicated by the
6562 TYPE_QUALS, if one exists. If no qualified version exists yet,
6563 return NULL_TREE. */
6564
6565 tree
6566 get_qualified_type (tree type, int type_quals)
6567 {
6568 tree t;
6569
6570 if (TYPE_QUALS (type) == type_quals)
6571 return type;
6572
6573 /* Search the chain of variants to see if there is already one there just
6574 like the one we need to have. If so, use that existing one. We must
6575 preserve the TYPE_NAME, since there is code that depends on this. */
6576 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6577 if (check_qualified_type (t, type, type_quals))
6578 return t;
6579
6580 return NULL_TREE;
6581 }
6582
6583 /* Like get_qualified_type, but creates the type if it does not
6584 exist. This function never returns NULL_TREE. */
6585
6586 tree
6587 build_qualified_type (tree type, int type_quals)
6588 {
6589 tree t;
6590
6591 /* See if we already have the appropriate qualified variant. */
6592 t = get_qualified_type (type, type_quals);
6593
6594 /* If not, build it. */
6595 if (!t)
6596 {
6597 t = build_variant_type_copy (type);
6598 set_type_quals (t, type_quals);
6599
6600 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6601 {
6602 /* See if this object can map to a basic atomic type. */
6603 tree atomic_type = find_atomic_core_type (type);
6604 if (atomic_type)
6605 {
6606 /* Ensure the alignment of this type is compatible with
6607 the required alignment of the atomic type. */
6608 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6609 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6610 }
6611 }
6612
6613 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6614 /* Propagate structural equality. */
6615 SET_TYPE_STRUCTURAL_EQUALITY (t);
6616 else if (TYPE_CANONICAL (type) != type)
6617 /* Build the underlying canonical type, since it is different
6618 from TYPE. */
6619 {
6620 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6621 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6622 }
6623 else
6624 /* T is its own canonical type. */
6625 TYPE_CANONICAL (t) = t;
6626
6627 }
6628
6629 return t;
6630 }
6631
6632 /* Create a variant of type T with alignment ALIGN. */
6633
6634 tree
6635 build_aligned_type (tree type, unsigned int align)
6636 {
6637 tree t;
6638
6639 if (TYPE_PACKED (type)
6640 || TYPE_ALIGN (type) == align)
6641 return type;
6642
6643 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6644 if (check_aligned_type (t, type, align))
6645 return t;
6646
6647 t = build_variant_type_copy (type);
6648 TYPE_ALIGN (t) = align;
6649
6650 return t;
6651 }
6652
6653 /* Create a new distinct copy of TYPE. The new type is made its own
6654 MAIN_VARIANT. If TYPE requires structural equality checks, the
6655 resulting type requires structural equality checks; otherwise, its
6656 TYPE_CANONICAL points to itself. */
6657
6658 tree
6659 build_distinct_type_copy (tree type)
6660 {
6661 tree t = copy_node (type);
6662
6663 TYPE_POINTER_TO (t) = 0;
6664 TYPE_REFERENCE_TO (t) = 0;
6665
6666 /* Set the canonical type either to a new equivalence class, or
6667 propagate the need for structural equality checks. */
6668 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6669 SET_TYPE_STRUCTURAL_EQUALITY (t);
6670 else
6671 TYPE_CANONICAL (t) = t;
6672
6673 /* Make it its own variant. */
6674 TYPE_MAIN_VARIANT (t) = t;
6675 TYPE_NEXT_VARIANT (t) = 0;
6676
6677 /* We do not record methods in type copies nor variants
6678 so we do not need to keep them up to date when new method
6679 is inserted. */
6680 if (RECORD_OR_UNION_TYPE_P (t))
6681 TYPE_METHODS (t) = NULL_TREE;
6682
6683 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6684 whose TREE_TYPE is not t. This can also happen in the Ada
6685 frontend when using subtypes. */
6686
6687 return t;
6688 }
6689
6690 /* Create a new variant of TYPE, equivalent but distinct. This is so
6691 the caller can modify it. TYPE_CANONICAL for the return type will
6692 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6693 are considered equal by the language itself (or that both types
6694 require structural equality checks). */
6695
6696 tree
6697 build_variant_type_copy (tree type)
6698 {
6699 tree t, m = TYPE_MAIN_VARIANT (type);
6700
6701 t = build_distinct_type_copy (type);
6702
6703 /* Since we're building a variant, assume that it is a non-semantic
6704 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6705 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6706 /* Type variants have no alias set defined. */
6707 TYPE_ALIAS_SET (t) = -1;
6708
6709 /* Add the new type to the chain of variants of TYPE. */
6710 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6711 TYPE_NEXT_VARIANT (m) = t;
6712 TYPE_MAIN_VARIANT (t) = m;
6713
6714 return t;
6715 }
6716 \f
6717 /* Return true if the from tree in both tree maps are equal. */
6718
6719 int
6720 tree_map_base_eq (const void *va, const void *vb)
6721 {
6722 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6723 *const b = (const struct tree_map_base *) vb;
6724 return (a->from == b->from);
6725 }
6726
6727 /* Hash a from tree in a tree_base_map. */
6728
6729 unsigned int
6730 tree_map_base_hash (const void *item)
6731 {
6732 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6733 }
6734
6735 /* Return true if this tree map structure is marked for garbage collection
6736 purposes. We simply return true if the from tree is marked, so that this
6737 structure goes away when the from tree goes away. */
6738
6739 int
6740 tree_map_base_marked_p (const void *p)
6741 {
6742 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6743 }
6744
6745 /* Hash a from tree in a tree_map. */
6746
6747 unsigned int
6748 tree_map_hash (const void *item)
6749 {
6750 return (((const struct tree_map *) item)->hash);
6751 }
6752
6753 /* Hash a from tree in a tree_decl_map. */
6754
6755 unsigned int
6756 tree_decl_map_hash (const void *item)
6757 {
6758 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6759 }
6760
6761 /* Return the initialization priority for DECL. */
6762
6763 priority_type
6764 decl_init_priority_lookup (tree decl)
6765 {
6766 symtab_node *snode = symtab_node::get (decl);
6767
6768 if (!snode)
6769 return DEFAULT_INIT_PRIORITY;
6770 return
6771 snode->get_init_priority ();
6772 }
6773
6774 /* Return the finalization priority for DECL. */
6775
6776 priority_type
6777 decl_fini_priority_lookup (tree decl)
6778 {
6779 cgraph_node *node = cgraph_node::get (decl);
6780
6781 if (!node)
6782 return DEFAULT_INIT_PRIORITY;
6783 return
6784 node->get_fini_priority ();
6785 }
6786
6787 /* Set the initialization priority for DECL to PRIORITY. */
6788
6789 void
6790 decl_init_priority_insert (tree decl, priority_type priority)
6791 {
6792 struct symtab_node *snode;
6793
6794 if (priority == DEFAULT_INIT_PRIORITY)
6795 {
6796 snode = symtab_node::get (decl);
6797 if (!snode)
6798 return;
6799 }
6800 else if (TREE_CODE (decl) == VAR_DECL)
6801 snode = varpool_node::get_create (decl);
6802 else
6803 snode = cgraph_node::get_create (decl);
6804 snode->set_init_priority (priority);
6805 }
6806
6807 /* Set the finalization priority for DECL to PRIORITY. */
6808
6809 void
6810 decl_fini_priority_insert (tree decl, priority_type priority)
6811 {
6812 struct cgraph_node *node;
6813
6814 if (priority == DEFAULT_INIT_PRIORITY)
6815 {
6816 node = cgraph_node::get (decl);
6817 if (!node)
6818 return;
6819 }
6820 else
6821 node = cgraph_node::get_create (decl);
6822 node->set_fini_priority (priority);
6823 }
6824
6825 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6826
6827 static void
6828 print_debug_expr_statistics (void)
6829 {
6830 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6831 (long) debug_expr_for_decl->size (),
6832 (long) debug_expr_for_decl->elements (),
6833 debug_expr_for_decl->collisions ());
6834 }
6835
6836 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6837
6838 static void
6839 print_value_expr_statistics (void)
6840 {
6841 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6842 (long) value_expr_for_decl->size (),
6843 (long) value_expr_for_decl->elements (),
6844 value_expr_for_decl->collisions ());
6845 }
6846
6847 /* Lookup a debug expression for FROM, and return it if we find one. */
6848
6849 tree
6850 decl_debug_expr_lookup (tree from)
6851 {
6852 struct tree_decl_map *h, in;
6853 in.base.from = from;
6854
6855 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6856 if (h)
6857 return h->to;
6858 return NULL_TREE;
6859 }
6860
6861 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6862
6863 void
6864 decl_debug_expr_insert (tree from, tree to)
6865 {
6866 struct tree_decl_map *h;
6867
6868 h = ggc_alloc<tree_decl_map> ();
6869 h->base.from = from;
6870 h->to = to;
6871 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6872 }
6873
6874 /* Lookup a value expression for FROM, and return it if we find one. */
6875
6876 tree
6877 decl_value_expr_lookup (tree from)
6878 {
6879 struct tree_decl_map *h, in;
6880 in.base.from = from;
6881
6882 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6883 if (h)
6884 return h->to;
6885 return NULL_TREE;
6886 }
6887
6888 /* Insert a mapping FROM->TO in the value expression hashtable. */
6889
6890 void
6891 decl_value_expr_insert (tree from, tree to)
6892 {
6893 struct tree_decl_map *h;
6894
6895 h = ggc_alloc<tree_decl_map> ();
6896 h->base.from = from;
6897 h->to = to;
6898 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6899 }
6900
6901 /* Lookup a vector of debug arguments for FROM, and return it if we
6902 find one. */
6903
6904 vec<tree, va_gc> **
6905 decl_debug_args_lookup (tree from)
6906 {
6907 struct tree_vec_map *h, in;
6908
6909 if (!DECL_HAS_DEBUG_ARGS_P (from))
6910 return NULL;
6911 gcc_checking_assert (debug_args_for_decl != NULL);
6912 in.base.from = from;
6913 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6914 if (h)
6915 return &h->to;
6916 return NULL;
6917 }
6918
6919 /* Insert a mapping FROM->empty vector of debug arguments in the value
6920 expression hashtable. */
6921
6922 vec<tree, va_gc> **
6923 decl_debug_args_insert (tree from)
6924 {
6925 struct tree_vec_map *h;
6926 tree_vec_map **loc;
6927
6928 if (DECL_HAS_DEBUG_ARGS_P (from))
6929 return decl_debug_args_lookup (from);
6930 if (debug_args_for_decl == NULL)
6931 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6932 h = ggc_alloc<tree_vec_map> ();
6933 h->base.from = from;
6934 h->to = NULL;
6935 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6936 *loc = h;
6937 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6938 return &h->to;
6939 }
6940
6941 /* Hashing of types so that we don't make duplicates.
6942 The entry point is `type_hash_canon'. */
6943
6944 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6945 with types in the TREE_VALUE slots), by adding the hash codes
6946 of the individual types. */
6947
6948 static void
6949 type_hash_list (const_tree list, inchash::hash &hstate)
6950 {
6951 const_tree tail;
6952
6953 for (tail = list; tail; tail = TREE_CHAIN (tail))
6954 if (TREE_VALUE (tail) != error_mark_node)
6955 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6956 }
6957
6958 /* These are the Hashtable callback functions. */
6959
6960 /* Returns true iff the types are equivalent. */
6961
6962 bool
6963 type_cache_hasher::equal (type_hash *a, type_hash *b)
6964 {
6965 /* First test the things that are the same for all types. */
6966 if (a->hash != b->hash
6967 || TREE_CODE (a->type) != TREE_CODE (b->type)
6968 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6969 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6970 TYPE_ATTRIBUTES (b->type))
6971 || (TREE_CODE (a->type) != COMPLEX_TYPE
6972 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6973 return 0;
6974
6975 /* Be careful about comparing arrays before and after the element type
6976 has been completed; don't compare TYPE_ALIGN unless both types are
6977 complete. */
6978 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6979 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6980 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6981 return 0;
6982
6983 switch (TREE_CODE (a->type))
6984 {
6985 case VOID_TYPE:
6986 case COMPLEX_TYPE:
6987 case POINTER_TYPE:
6988 case REFERENCE_TYPE:
6989 case NULLPTR_TYPE:
6990 return 1;
6991
6992 case VECTOR_TYPE:
6993 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6994
6995 case ENUMERAL_TYPE:
6996 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6997 && !(TYPE_VALUES (a->type)
6998 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6999 && TYPE_VALUES (b->type)
7000 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7001 && type_list_equal (TYPE_VALUES (a->type),
7002 TYPE_VALUES (b->type))))
7003 return 0;
7004
7005 /* ... fall through ... */
7006
7007 case INTEGER_TYPE:
7008 case REAL_TYPE:
7009 case BOOLEAN_TYPE:
7010 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7011 return false;
7012 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7013 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7014 TYPE_MAX_VALUE (b->type)))
7015 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7016 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7017 TYPE_MIN_VALUE (b->type))));
7018
7019 case FIXED_POINT_TYPE:
7020 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7021
7022 case OFFSET_TYPE:
7023 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7024
7025 case METHOD_TYPE:
7026 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7027 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7028 || (TYPE_ARG_TYPES (a->type)
7029 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7030 && TYPE_ARG_TYPES (b->type)
7031 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7032 && type_list_equal (TYPE_ARG_TYPES (a->type),
7033 TYPE_ARG_TYPES (b->type)))))
7034 break;
7035 return 0;
7036 case ARRAY_TYPE:
7037 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
7038
7039 case RECORD_TYPE:
7040 case UNION_TYPE:
7041 case QUAL_UNION_TYPE:
7042 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7043 || (TYPE_FIELDS (a->type)
7044 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7045 && TYPE_FIELDS (b->type)
7046 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7047 && type_list_equal (TYPE_FIELDS (a->type),
7048 TYPE_FIELDS (b->type))));
7049
7050 case FUNCTION_TYPE:
7051 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7052 || (TYPE_ARG_TYPES (a->type)
7053 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7054 && TYPE_ARG_TYPES (b->type)
7055 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7056 && type_list_equal (TYPE_ARG_TYPES (a->type),
7057 TYPE_ARG_TYPES (b->type))))
7058 break;
7059 return 0;
7060
7061 default:
7062 return 0;
7063 }
7064
7065 if (lang_hooks.types.type_hash_eq != NULL)
7066 return lang_hooks.types.type_hash_eq (a->type, b->type);
7067
7068 return 1;
7069 }
7070
7071 /* Given TYPE, and HASHCODE its hash code, return the canonical
7072 object for an identical type if one already exists.
7073 Otherwise, return TYPE, and record it as the canonical object.
7074
7075 To use this function, first create a type of the sort you want.
7076 Then compute its hash code from the fields of the type that
7077 make it different from other similar types.
7078 Then call this function and use the value. */
7079
7080 tree
7081 type_hash_canon (unsigned int hashcode, tree type)
7082 {
7083 type_hash in;
7084 type_hash **loc;
7085
7086 /* The hash table only contains main variants, so ensure that's what we're
7087 being passed. */
7088 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7089
7090 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7091 must call that routine before comparing TYPE_ALIGNs. */
7092 layout_type (type);
7093
7094 in.hash = hashcode;
7095 in.type = type;
7096
7097 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7098 if (*loc)
7099 {
7100 tree t1 = ((type_hash *) *loc)->type;
7101 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7102 free_node (type);
7103 return t1;
7104 }
7105 else
7106 {
7107 struct type_hash *h;
7108
7109 h = ggc_alloc<type_hash> ();
7110 h->hash = hashcode;
7111 h->type = type;
7112 *loc = h;
7113
7114 return type;
7115 }
7116 }
7117
7118 static void
7119 print_type_hash_statistics (void)
7120 {
7121 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7122 (long) type_hash_table->size (),
7123 (long) type_hash_table->elements (),
7124 type_hash_table->collisions ());
7125 }
7126
7127 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7128 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7129 by adding the hash codes of the individual attributes. */
7130
7131 static void
7132 attribute_hash_list (const_tree list, inchash::hash &hstate)
7133 {
7134 const_tree tail;
7135
7136 for (tail = list; tail; tail = TREE_CHAIN (tail))
7137 /* ??? Do we want to add in TREE_VALUE too? */
7138 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7139 }
7140
7141 /* Given two lists of attributes, return true if list l2 is
7142 equivalent to l1. */
7143
7144 int
7145 attribute_list_equal (const_tree l1, const_tree l2)
7146 {
7147 if (l1 == l2)
7148 return 1;
7149
7150 return attribute_list_contained (l1, l2)
7151 && attribute_list_contained (l2, l1);
7152 }
7153
7154 /* Given two lists of attributes, return true if list L2 is
7155 completely contained within L1. */
7156 /* ??? This would be faster if attribute names were stored in a canonicalized
7157 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7158 must be used to show these elements are equivalent (which they are). */
7159 /* ??? It's not clear that attributes with arguments will always be handled
7160 correctly. */
7161
7162 int
7163 attribute_list_contained (const_tree l1, const_tree l2)
7164 {
7165 const_tree t1, t2;
7166
7167 /* First check the obvious, maybe the lists are identical. */
7168 if (l1 == l2)
7169 return 1;
7170
7171 /* Maybe the lists are similar. */
7172 for (t1 = l1, t2 = l2;
7173 t1 != 0 && t2 != 0
7174 && get_attribute_name (t1) == get_attribute_name (t2)
7175 && TREE_VALUE (t1) == TREE_VALUE (t2);
7176 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7177 ;
7178
7179 /* Maybe the lists are equal. */
7180 if (t1 == 0 && t2 == 0)
7181 return 1;
7182
7183 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7184 {
7185 const_tree attr;
7186 /* This CONST_CAST is okay because lookup_attribute does not
7187 modify its argument and the return value is assigned to a
7188 const_tree. */
7189 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7190 CONST_CAST_TREE (l1));
7191 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7192 attr = lookup_ident_attribute (get_attribute_name (t2),
7193 TREE_CHAIN (attr)))
7194 ;
7195
7196 if (attr == NULL_TREE)
7197 return 0;
7198 }
7199
7200 return 1;
7201 }
7202
7203 /* Given two lists of types
7204 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7205 return 1 if the lists contain the same types in the same order.
7206 Also, the TREE_PURPOSEs must match. */
7207
7208 int
7209 type_list_equal (const_tree l1, const_tree l2)
7210 {
7211 const_tree t1, t2;
7212
7213 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7214 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7215 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7216 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7217 && (TREE_TYPE (TREE_PURPOSE (t1))
7218 == TREE_TYPE (TREE_PURPOSE (t2))))))
7219 return 0;
7220
7221 return t1 == t2;
7222 }
7223
7224 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7225 given by TYPE. If the argument list accepts variable arguments,
7226 then this function counts only the ordinary arguments. */
7227
7228 int
7229 type_num_arguments (const_tree type)
7230 {
7231 int i = 0;
7232 tree t;
7233
7234 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7235 /* If the function does not take a variable number of arguments,
7236 the last element in the list will have type `void'. */
7237 if (VOID_TYPE_P (TREE_VALUE (t)))
7238 break;
7239 else
7240 ++i;
7241
7242 return i;
7243 }
7244
7245 /* Nonzero if integer constants T1 and T2
7246 represent the same constant value. */
7247
7248 int
7249 tree_int_cst_equal (const_tree t1, const_tree t2)
7250 {
7251 if (t1 == t2)
7252 return 1;
7253
7254 if (t1 == 0 || t2 == 0)
7255 return 0;
7256
7257 if (TREE_CODE (t1) == INTEGER_CST
7258 && TREE_CODE (t2) == INTEGER_CST
7259 && wi::to_widest (t1) == wi::to_widest (t2))
7260 return 1;
7261
7262 return 0;
7263 }
7264
7265 /* Return true if T is an INTEGER_CST whose numerical value (extended
7266 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7267
7268 bool
7269 tree_fits_shwi_p (const_tree t)
7270 {
7271 return (t != NULL_TREE
7272 && TREE_CODE (t) == INTEGER_CST
7273 && wi::fits_shwi_p (wi::to_widest (t)));
7274 }
7275
7276 /* Return true if T is an INTEGER_CST whose numerical value (extended
7277 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7278
7279 bool
7280 tree_fits_uhwi_p (const_tree t)
7281 {
7282 return (t != NULL_TREE
7283 && TREE_CODE (t) == INTEGER_CST
7284 && wi::fits_uhwi_p (wi::to_widest (t)));
7285 }
7286
7287 /* T is an INTEGER_CST whose numerical value (extended according to
7288 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7289 HOST_WIDE_INT. */
7290
7291 HOST_WIDE_INT
7292 tree_to_shwi (const_tree t)
7293 {
7294 gcc_assert (tree_fits_shwi_p (t));
7295 return TREE_INT_CST_LOW (t);
7296 }
7297
7298 /* T is an INTEGER_CST whose numerical value (extended according to
7299 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7300 HOST_WIDE_INT. */
7301
7302 unsigned HOST_WIDE_INT
7303 tree_to_uhwi (const_tree t)
7304 {
7305 gcc_assert (tree_fits_uhwi_p (t));
7306 return TREE_INT_CST_LOW (t);
7307 }
7308
7309 /* Return the most significant (sign) bit of T. */
7310
7311 int
7312 tree_int_cst_sign_bit (const_tree t)
7313 {
7314 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7315
7316 return wi::extract_uhwi (t, bitno, 1);
7317 }
7318
7319 /* Return an indication of the sign of the integer constant T.
7320 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7321 Note that -1 will never be returned if T's type is unsigned. */
7322
7323 int
7324 tree_int_cst_sgn (const_tree t)
7325 {
7326 if (wi::eq_p (t, 0))
7327 return 0;
7328 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7329 return 1;
7330 else if (wi::neg_p (t))
7331 return -1;
7332 else
7333 return 1;
7334 }
7335
7336 /* Return the minimum number of bits needed to represent VALUE in a
7337 signed or unsigned type, UNSIGNEDP says which. */
7338
7339 unsigned int
7340 tree_int_cst_min_precision (tree value, signop sgn)
7341 {
7342 /* If the value is negative, compute its negative minus 1. The latter
7343 adjustment is because the absolute value of the largest negative value
7344 is one larger than the largest positive value. This is equivalent to
7345 a bit-wise negation, so use that operation instead. */
7346
7347 if (tree_int_cst_sgn (value) < 0)
7348 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7349
7350 /* Return the number of bits needed, taking into account the fact
7351 that we need one more bit for a signed than unsigned type.
7352 If value is 0 or -1, the minimum precision is 1 no matter
7353 whether unsignedp is true or false. */
7354
7355 if (integer_zerop (value))
7356 return 1;
7357 else
7358 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7359 }
7360
7361 /* Return truthvalue of whether T1 is the same tree structure as T2.
7362 Return 1 if they are the same.
7363 Return 0 if they are understandably different.
7364 Return -1 if either contains tree structure not understood by
7365 this function. */
7366
7367 int
7368 simple_cst_equal (const_tree t1, const_tree t2)
7369 {
7370 enum tree_code code1, code2;
7371 int cmp;
7372 int i;
7373
7374 if (t1 == t2)
7375 return 1;
7376 if (t1 == 0 || t2 == 0)
7377 return 0;
7378
7379 code1 = TREE_CODE (t1);
7380 code2 = TREE_CODE (t2);
7381
7382 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7383 {
7384 if (CONVERT_EXPR_CODE_P (code2)
7385 || code2 == NON_LVALUE_EXPR)
7386 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7387 else
7388 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7389 }
7390
7391 else if (CONVERT_EXPR_CODE_P (code2)
7392 || code2 == NON_LVALUE_EXPR)
7393 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7394
7395 if (code1 != code2)
7396 return 0;
7397
7398 switch (code1)
7399 {
7400 case INTEGER_CST:
7401 return wi::to_widest (t1) == wi::to_widest (t2);
7402
7403 case REAL_CST:
7404 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7405
7406 case FIXED_CST:
7407 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7408
7409 case STRING_CST:
7410 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7411 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7412 TREE_STRING_LENGTH (t1)));
7413
7414 case CONSTRUCTOR:
7415 {
7416 unsigned HOST_WIDE_INT idx;
7417 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7418 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7419
7420 if (vec_safe_length (v1) != vec_safe_length (v2))
7421 return false;
7422
7423 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7424 /* ??? Should we handle also fields here? */
7425 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7426 return false;
7427 return true;
7428 }
7429
7430 case SAVE_EXPR:
7431 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7432
7433 case CALL_EXPR:
7434 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7435 if (cmp <= 0)
7436 return cmp;
7437 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7438 return 0;
7439 {
7440 const_tree arg1, arg2;
7441 const_call_expr_arg_iterator iter1, iter2;
7442 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7443 arg2 = first_const_call_expr_arg (t2, &iter2);
7444 arg1 && arg2;
7445 arg1 = next_const_call_expr_arg (&iter1),
7446 arg2 = next_const_call_expr_arg (&iter2))
7447 {
7448 cmp = simple_cst_equal (arg1, arg2);
7449 if (cmp <= 0)
7450 return cmp;
7451 }
7452 return arg1 == arg2;
7453 }
7454
7455 case TARGET_EXPR:
7456 /* Special case: if either target is an unallocated VAR_DECL,
7457 it means that it's going to be unified with whatever the
7458 TARGET_EXPR is really supposed to initialize, so treat it
7459 as being equivalent to anything. */
7460 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7461 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7462 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7463 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7464 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7465 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7466 cmp = 1;
7467 else
7468 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7469
7470 if (cmp <= 0)
7471 return cmp;
7472
7473 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7474
7475 case WITH_CLEANUP_EXPR:
7476 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7477 if (cmp <= 0)
7478 return cmp;
7479
7480 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7481
7482 case COMPONENT_REF:
7483 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7484 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7485
7486 return 0;
7487
7488 case VAR_DECL:
7489 case PARM_DECL:
7490 case CONST_DECL:
7491 case FUNCTION_DECL:
7492 return 0;
7493
7494 default:
7495 break;
7496 }
7497
7498 /* This general rule works for most tree codes. All exceptions should be
7499 handled above. If this is a language-specific tree code, we can't
7500 trust what might be in the operand, so say we don't know
7501 the situation. */
7502 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7503 return -1;
7504
7505 switch (TREE_CODE_CLASS (code1))
7506 {
7507 case tcc_unary:
7508 case tcc_binary:
7509 case tcc_comparison:
7510 case tcc_expression:
7511 case tcc_reference:
7512 case tcc_statement:
7513 cmp = 1;
7514 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7515 {
7516 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7517 if (cmp <= 0)
7518 return cmp;
7519 }
7520
7521 return cmp;
7522
7523 default:
7524 return -1;
7525 }
7526 }
7527
7528 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7529 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7530 than U, respectively. */
7531
7532 int
7533 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7534 {
7535 if (tree_int_cst_sgn (t) < 0)
7536 return -1;
7537 else if (!tree_fits_uhwi_p (t))
7538 return 1;
7539 else if (TREE_INT_CST_LOW (t) == u)
7540 return 0;
7541 else if (TREE_INT_CST_LOW (t) < u)
7542 return -1;
7543 else
7544 return 1;
7545 }
7546
7547 /* Return true if SIZE represents a constant size that is in bounds of
7548 what the middle-end and the backend accepts (covering not more than
7549 half of the address-space). */
7550
7551 bool
7552 valid_constant_size_p (const_tree size)
7553 {
7554 if (! tree_fits_uhwi_p (size)
7555 || TREE_OVERFLOW (size)
7556 || tree_int_cst_sign_bit (size) != 0)
7557 return false;
7558 return true;
7559 }
7560
7561 /* Return the precision of the type, or for a complex or vector type the
7562 precision of the type of its elements. */
7563
7564 unsigned int
7565 element_precision (const_tree type)
7566 {
7567 if (!TYPE_P (type))
7568 type = TREE_TYPE (type);
7569 enum tree_code code = TREE_CODE (type);
7570 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7571 type = TREE_TYPE (type);
7572
7573 return TYPE_PRECISION (type);
7574 }
7575
7576 /* Return true if CODE represents an associative tree code. Otherwise
7577 return false. */
7578 bool
7579 associative_tree_code (enum tree_code code)
7580 {
7581 switch (code)
7582 {
7583 case BIT_IOR_EXPR:
7584 case BIT_AND_EXPR:
7585 case BIT_XOR_EXPR:
7586 case PLUS_EXPR:
7587 case MULT_EXPR:
7588 case MIN_EXPR:
7589 case MAX_EXPR:
7590 return true;
7591
7592 default:
7593 break;
7594 }
7595 return false;
7596 }
7597
7598 /* Return true if CODE represents a commutative tree code. Otherwise
7599 return false. */
7600 bool
7601 commutative_tree_code (enum tree_code code)
7602 {
7603 switch (code)
7604 {
7605 case PLUS_EXPR:
7606 case MULT_EXPR:
7607 case MULT_HIGHPART_EXPR:
7608 case MIN_EXPR:
7609 case MAX_EXPR:
7610 case BIT_IOR_EXPR:
7611 case BIT_XOR_EXPR:
7612 case BIT_AND_EXPR:
7613 case NE_EXPR:
7614 case EQ_EXPR:
7615 case UNORDERED_EXPR:
7616 case ORDERED_EXPR:
7617 case UNEQ_EXPR:
7618 case LTGT_EXPR:
7619 case TRUTH_AND_EXPR:
7620 case TRUTH_XOR_EXPR:
7621 case TRUTH_OR_EXPR:
7622 case WIDEN_MULT_EXPR:
7623 case VEC_WIDEN_MULT_HI_EXPR:
7624 case VEC_WIDEN_MULT_LO_EXPR:
7625 case VEC_WIDEN_MULT_EVEN_EXPR:
7626 case VEC_WIDEN_MULT_ODD_EXPR:
7627 return true;
7628
7629 default:
7630 break;
7631 }
7632 return false;
7633 }
7634
7635 /* Return true if CODE represents a ternary tree code for which the
7636 first two operands are commutative. Otherwise return false. */
7637 bool
7638 commutative_ternary_tree_code (enum tree_code code)
7639 {
7640 switch (code)
7641 {
7642 case WIDEN_MULT_PLUS_EXPR:
7643 case WIDEN_MULT_MINUS_EXPR:
7644 case DOT_PROD_EXPR:
7645 case FMA_EXPR:
7646 return true;
7647
7648 default:
7649 break;
7650 }
7651 return false;
7652 }
7653
7654 /* Returns true if CODE can overflow. */
7655
7656 bool
7657 operation_can_overflow (enum tree_code code)
7658 {
7659 switch (code)
7660 {
7661 case PLUS_EXPR:
7662 case MINUS_EXPR:
7663 case MULT_EXPR:
7664 case LSHIFT_EXPR:
7665 /* Can overflow in various ways. */
7666 return true;
7667 case TRUNC_DIV_EXPR:
7668 case EXACT_DIV_EXPR:
7669 case FLOOR_DIV_EXPR:
7670 case CEIL_DIV_EXPR:
7671 /* For INT_MIN / -1. */
7672 return true;
7673 case NEGATE_EXPR:
7674 case ABS_EXPR:
7675 /* For -INT_MIN. */
7676 return true;
7677 default:
7678 /* These operators cannot overflow. */
7679 return false;
7680 }
7681 }
7682
7683 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7684 ftrapv doesn't generate trapping insns for CODE. */
7685
7686 bool
7687 operation_no_trapping_overflow (tree type, enum tree_code code)
7688 {
7689 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7690
7691 /* We don't generate instructions that trap on overflow for complex or vector
7692 types. */
7693 if (!INTEGRAL_TYPE_P (type))
7694 return true;
7695
7696 if (!TYPE_OVERFLOW_TRAPS (type))
7697 return true;
7698
7699 switch (code)
7700 {
7701 case PLUS_EXPR:
7702 case MINUS_EXPR:
7703 case MULT_EXPR:
7704 case NEGATE_EXPR:
7705 case ABS_EXPR:
7706 /* These operators can overflow, and -ftrapv generates trapping code for
7707 these. */
7708 return false;
7709 case TRUNC_DIV_EXPR:
7710 case EXACT_DIV_EXPR:
7711 case FLOOR_DIV_EXPR:
7712 case CEIL_DIV_EXPR:
7713 case LSHIFT_EXPR:
7714 /* These operators can overflow, but -ftrapv does not generate trapping
7715 code for these. */
7716 return true;
7717 default:
7718 /* These operators cannot overflow. */
7719 return true;
7720 }
7721 }
7722
7723 namespace inchash
7724 {
7725
7726 /* Generate a hash value for an expression. This can be used iteratively
7727 by passing a previous result as the HSTATE argument.
7728
7729 This function is intended to produce the same hash for expressions which
7730 would compare equal using operand_equal_p. */
7731 void
7732 add_expr (const_tree t, inchash::hash &hstate)
7733 {
7734 int i;
7735 enum tree_code code;
7736 enum tree_code_class tclass;
7737
7738 if (t == NULL_TREE)
7739 {
7740 hstate.merge_hash (0);
7741 return;
7742 }
7743
7744 code = TREE_CODE (t);
7745
7746 switch (code)
7747 {
7748 /* Alas, constants aren't shared, so we can't rely on pointer
7749 identity. */
7750 case VOID_CST:
7751 hstate.merge_hash (0);
7752 return;
7753 case INTEGER_CST:
7754 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7755 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7756 return;
7757 case REAL_CST:
7758 {
7759 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7760 hstate.merge_hash (val2);
7761 return;
7762 }
7763 case FIXED_CST:
7764 {
7765 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7766 hstate.merge_hash (val2);
7767 return;
7768 }
7769 case STRING_CST:
7770 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7771 return;
7772 case COMPLEX_CST:
7773 inchash::add_expr (TREE_REALPART (t), hstate);
7774 inchash::add_expr (TREE_IMAGPART (t), hstate);
7775 return;
7776 case VECTOR_CST:
7777 {
7778 unsigned i;
7779 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7780 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7781 return;
7782 }
7783 case SSA_NAME:
7784 /* We can just compare by pointer. */
7785 hstate.add_wide_int (SSA_NAME_VERSION (t));
7786 return;
7787 case PLACEHOLDER_EXPR:
7788 /* The node itself doesn't matter. */
7789 return;
7790 case TREE_LIST:
7791 /* A list of expressions, for a CALL_EXPR or as the elements of a
7792 VECTOR_CST. */
7793 for (; t; t = TREE_CHAIN (t))
7794 inchash::add_expr (TREE_VALUE (t), hstate);
7795 return;
7796 case CONSTRUCTOR:
7797 {
7798 unsigned HOST_WIDE_INT idx;
7799 tree field, value;
7800 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7801 {
7802 inchash::add_expr (field, hstate);
7803 inchash::add_expr (value, hstate);
7804 }
7805 return;
7806 }
7807 case FUNCTION_DECL:
7808 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7809 Otherwise nodes that compare equal according to operand_equal_p might
7810 get different hash codes. However, don't do this for machine specific
7811 or front end builtins, since the function code is overloaded in those
7812 cases. */
7813 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7814 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7815 {
7816 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7817 code = TREE_CODE (t);
7818 }
7819 /* FALL THROUGH */
7820 default:
7821 tclass = TREE_CODE_CLASS (code);
7822
7823 if (tclass == tcc_declaration)
7824 {
7825 /* DECL's have a unique ID */
7826 hstate.add_wide_int (DECL_UID (t));
7827 }
7828 else
7829 {
7830 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7831
7832 hstate.add_object (code);
7833
7834 /* Don't hash the type, that can lead to having nodes which
7835 compare equal according to operand_equal_p, but which
7836 have different hash codes. */
7837 if (CONVERT_EXPR_CODE_P (code)
7838 || code == NON_LVALUE_EXPR)
7839 {
7840 /* Make sure to include signness in the hash computation. */
7841 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7842 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7843 }
7844
7845 else if (commutative_tree_code (code))
7846 {
7847 /* It's a commutative expression. We want to hash it the same
7848 however it appears. We do this by first hashing both operands
7849 and then rehashing based on the order of their independent
7850 hashes. */
7851 inchash::hash one, two;
7852 inchash::add_expr (TREE_OPERAND (t, 0), one);
7853 inchash::add_expr (TREE_OPERAND (t, 1), two);
7854 hstate.add_commutative (one, two);
7855 }
7856 else
7857 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7858 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7859 }
7860 return;
7861 }
7862 }
7863
7864 }
7865
7866 /* Constructors for pointer, array and function types.
7867 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7868 constructed by language-dependent code, not here.) */
7869
7870 /* Construct, lay out and return the type of pointers to TO_TYPE with
7871 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7872 reference all of memory. If such a type has already been
7873 constructed, reuse it. */
7874
7875 tree
7876 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7877 bool can_alias_all)
7878 {
7879 tree t;
7880 bool could_alias = can_alias_all;
7881
7882 if (to_type == error_mark_node)
7883 return error_mark_node;
7884
7885 /* If the pointed-to type has the may_alias attribute set, force
7886 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7887 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7888 can_alias_all = true;
7889
7890 /* In some cases, languages will have things that aren't a POINTER_TYPE
7891 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7892 In that case, return that type without regard to the rest of our
7893 operands.
7894
7895 ??? This is a kludge, but consistent with the way this function has
7896 always operated and there doesn't seem to be a good way to avoid this
7897 at the moment. */
7898 if (TYPE_POINTER_TO (to_type) != 0
7899 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7900 return TYPE_POINTER_TO (to_type);
7901
7902 /* First, if we already have a type for pointers to TO_TYPE and it's
7903 the proper mode, use it. */
7904 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7905 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7906 return t;
7907
7908 t = make_node (POINTER_TYPE);
7909
7910 TREE_TYPE (t) = to_type;
7911 SET_TYPE_MODE (t, mode);
7912 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7913 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7914 TYPE_POINTER_TO (to_type) = t;
7915
7916 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7917 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7918 SET_TYPE_STRUCTURAL_EQUALITY (t);
7919 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7920 TYPE_CANONICAL (t)
7921 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7922 mode, false);
7923
7924 /* Lay out the type. This function has many callers that are concerned
7925 with expression-construction, and this simplifies them all. */
7926 layout_type (t);
7927
7928 return t;
7929 }
7930
7931 /* By default build pointers in ptr_mode. */
7932
7933 tree
7934 build_pointer_type (tree to_type)
7935 {
7936 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7937 : TYPE_ADDR_SPACE (to_type);
7938 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7939 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7940 }
7941
7942 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7943
7944 tree
7945 build_reference_type_for_mode (tree to_type, machine_mode mode,
7946 bool can_alias_all)
7947 {
7948 tree t;
7949 bool could_alias = can_alias_all;
7950
7951 if (to_type == error_mark_node)
7952 return error_mark_node;
7953
7954 /* If the pointed-to type has the may_alias attribute set, force
7955 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7956 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7957 can_alias_all = true;
7958
7959 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7960 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7961 In that case, return that type without regard to the rest of our
7962 operands.
7963
7964 ??? This is a kludge, but consistent with the way this function has
7965 always operated and there doesn't seem to be a good way to avoid this
7966 at the moment. */
7967 if (TYPE_REFERENCE_TO (to_type) != 0
7968 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7969 return TYPE_REFERENCE_TO (to_type);
7970
7971 /* First, if we already have a type for pointers to TO_TYPE and it's
7972 the proper mode, use it. */
7973 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7974 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7975 return t;
7976
7977 t = make_node (REFERENCE_TYPE);
7978
7979 TREE_TYPE (t) = to_type;
7980 SET_TYPE_MODE (t, mode);
7981 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7982 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7983 TYPE_REFERENCE_TO (to_type) = t;
7984
7985 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7986 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7987 SET_TYPE_STRUCTURAL_EQUALITY (t);
7988 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7989 TYPE_CANONICAL (t)
7990 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7991 mode, false);
7992
7993 layout_type (t);
7994
7995 return t;
7996 }
7997
7998
7999 /* Build the node for the type of references-to-TO_TYPE by default
8000 in ptr_mode. */
8001
8002 tree
8003 build_reference_type (tree to_type)
8004 {
8005 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8006 : TYPE_ADDR_SPACE (to_type);
8007 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8008 return build_reference_type_for_mode (to_type, pointer_mode, false);
8009 }
8010
8011 #define MAX_INT_CACHED_PREC \
8012 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8013 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8014
8015 /* Builds a signed or unsigned integer type of precision PRECISION.
8016 Used for C bitfields whose precision does not match that of
8017 built-in target types. */
8018 tree
8019 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8020 int unsignedp)
8021 {
8022 tree itype, ret;
8023
8024 if (unsignedp)
8025 unsignedp = MAX_INT_CACHED_PREC + 1;
8026
8027 if (precision <= MAX_INT_CACHED_PREC)
8028 {
8029 itype = nonstandard_integer_type_cache[precision + unsignedp];
8030 if (itype)
8031 return itype;
8032 }
8033
8034 itype = make_node (INTEGER_TYPE);
8035 TYPE_PRECISION (itype) = precision;
8036
8037 if (unsignedp)
8038 fixup_unsigned_type (itype);
8039 else
8040 fixup_signed_type (itype);
8041
8042 ret = itype;
8043 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
8044 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
8045 if (precision <= MAX_INT_CACHED_PREC)
8046 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8047
8048 return ret;
8049 }
8050
8051 #define MAX_BOOL_CACHED_PREC \
8052 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8053 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8054
8055 /* Builds a boolean type of precision PRECISION.
8056 Used for boolean vectors to choose proper vector element size. */
8057 tree
8058 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8059 {
8060 tree type;
8061
8062 if (precision <= MAX_BOOL_CACHED_PREC)
8063 {
8064 type = nonstandard_boolean_type_cache[precision];
8065 if (type)
8066 return type;
8067 }
8068
8069 type = make_node (BOOLEAN_TYPE);
8070 TYPE_PRECISION (type) = precision;
8071 fixup_signed_type (type);
8072
8073 if (precision <= MAX_INT_CACHED_PREC)
8074 nonstandard_boolean_type_cache[precision] = type;
8075
8076 return type;
8077 }
8078
8079 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8080 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8081 is true, reuse such a type that has already been constructed. */
8082
8083 static tree
8084 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8085 {
8086 tree itype = make_node (INTEGER_TYPE);
8087 inchash::hash hstate;
8088
8089 TREE_TYPE (itype) = type;
8090
8091 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8092 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8093
8094 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8095 SET_TYPE_MODE (itype, TYPE_MODE (type));
8096 TYPE_SIZE (itype) = TYPE_SIZE (type);
8097 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8098 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
8099 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8100
8101 if (!shared)
8102 return itype;
8103
8104 if ((TYPE_MIN_VALUE (itype)
8105 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8106 || (TYPE_MAX_VALUE (itype)
8107 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8108 {
8109 /* Since we cannot reliably merge this type, we need to compare it using
8110 structural equality checks. */
8111 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8112 return itype;
8113 }
8114
8115 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
8116 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8117 hstate.merge_hash (TYPE_HASH (type));
8118 itype = type_hash_canon (hstate.end (), itype);
8119
8120 return itype;
8121 }
8122
8123 /* Wrapper around build_range_type_1 with SHARED set to true. */
8124
8125 tree
8126 build_range_type (tree type, tree lowval, tree highval)
8127 {
8128 return build_range_type_1 (type, lowval, highval, true);
8129 }
8130
8131 /* Wrapper around build_range_type_1 with SHARED set to false. */
8132
8133 tree
8134 build_nonshared_range_type (tree type, tree lowval, tree highval)
8135 {
8136 return build_range_type_1 (type, lowval, highval, false);
8137 }
8138
8139 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8140 MAXVAL should be the maximum value in the domain
8141 (one less than the length of the array).
8142
8143 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8144 We don't enforce this limit, that is up to caller (e.g. language front end).
8145 The limit exists because the result is a signed type and we don't handle
8146 sizes that use more than one HOST_WIDE_INT. */
8147
8148 tree
8149 build_index_type (tree maxval)
8150 {
8151 return build_range_type (sizetype, size_zero_node, maxval);
8152 }
8153
8154 /* Return true if the debug information for TYPE, a subtype, should be emitted
8155 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8156 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8157 debug info and doesn't reflect the source code. */
8158
8159 bool
8160 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8161 {
8162 tree base_type = TREE_TYPE (type), low, high;
8163
8164 /* Subrange types have a base type which is an integral type. */
8165 if (!INTEGRAL_TYPE_P (base_type))
8166 return false;
8167
8168 /* Get the real bounds of the subtype. */
8169 if (lang_hooks.types.get_subrange_bounds)
8170 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8171 else
8172 {
8173 low = TYPE_MIN_VALUE (type);
8174 high = TYPE_MAX_VALUE (type);
8175 }
8176
8177 /* If the type and its base type have the same representation and the same
8178 name, then the type is not a subrange but a copy of the base type. */
8179 if ((TREE_CODE (base_type) == INTEGER_TYPE
8180 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8181 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8182 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8183 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8184 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8185 return false;
8186
8187 if (lowval)
8188 *lowval = low;
8189 if (highval)
8190 *highval = high;
8191 return true;
8192 }
8193
8194 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8195 and number of elements specified by the range of values of INDEX_TYPE.
8196 If SHARED is true, reuse such a type that has already been constructed. */
8197
8198 static tree
8199 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8200 {
8201 tree t;
8202
8203 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8204 {
8205 error ("arrays of functions are not meaningful");
8206 elt_type = integer_type_node;
8207 }
8208
8209 t = make_node (ARRAY_TYPE);
8210 TREE_TYPE (t) = elt_type;
8211 TYPE_DOMAIN (t) = index_type;
8212 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8213 layout_type (t);
8214
8215 /* If the element type is incomplete at this point we get marked for
8216 structural equality. Do not record these types in the canonical
8217 type hashtable. */
8218 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8219 return t;
8220
8221 if (shared)
8222 {
8223 inchash::hash hstate;
8224 hstate.add_object (TYPE_HASH (elt_type));
8225 if (index_type)
8226 hstate.add_object (TYPE_HASH (index_type));
8227 t = type_hash_canon (hstate.end (), t);
8228 }
8229
8230 if (TYPE_CANONICAL (t) == t)
8231 {
8232 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8233 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8234 || in_lto_p)
8235 SET_TYPE_STRUCTURAL_EQUALITY (t);
8236 else if (TYPE_CANONICAL (elt_type) != elt_type
8237 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8238 TYPE_CANONICAL (t)
8239 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8240 index_type
8241 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8242 shared);
8243 }
8244
8245 return t;
8246 }
8247
8248 /* Wrapper around build_array_type_1 with SHARED set to true. */
8249
8250 tree
8251 build_array_type (tree elt_type, tree index_type)
8252 {
8253 return build_array_type_1 (elt_type, index_type, true);
8254 }
8255
8256 /* Wrapper around build_array_type_1 with SHARED set to false. */
8257
8258 tree
8259 build_nonshared_array_type (tree elt_type, tree index_type)
8260 {
8261 return build_array_type_1 (elt_type, index_type, false);
8262 }
8263
8264 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8265 sizetype. */
8266
8267 tree
8268 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8269 {
8270 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8271 }
8272
8273 /* Recursively examines the array elements of TYPE, until a non-array
8274 element type is found. */
8275
8276 tree
8277 strip_array_types (tree type)
8278 {
8279 while (TREE_CODE (type) == ARRAY_TYPE)
8280 type = TREE_TYPE (type);
8281
8282 return type;
8283 }
8284
8285 /* Computes the canonical argument types from the argument type list
8286 ARGTYPES.
8287
8288 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8289 on entry to this function, or if any of the ARGTYPES are
8290 structural.
8291
8292 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8293 true on entry to this function, or if any of the ARGTYPES are
8294 non-canonical.
8295
8296 Returns a canonical argument list, which may be ARGTYPES when the
8297 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8298 true) or would not differ from ARGTYPES. */
8299
8300 static tree
8301 maybe_canonicalize_argtypes (tree argtypes,
8302 bool *any_structural_p,
8303 bool *any_noncanonical_p)
8304 {
8305 tree arg;
8306 bool any_noncanonical_argtypes_p = false;
8307
8308 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8309 {
8310 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8311 /* Fail gracefully by stating that the type is structural. */
8312 *any_structural_p = true;
8313 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8314 *any_structural_p = true;
8315 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8316 || TREE_PURPOSE (arg))
8317 /* If the argument has a default argument, we consider it
8318 non-canonical even though the type itself is canonical.
8319 That way, different variants of function and method types
8320 with default arguments will all point to the variant with
8321 no defaults as their canonical type. */
8322 any_noncanonical_argtypes_p = true;
8323 }
8324
8325 if (*any_structural_p)
8326 return argtypes;
8327
8328 if (any_noncanonical_argtypes_p)
8329 {
8330 /* Build the canonical list of argument types. */
8331 tree canon_argtypes = NULL_TREE;
8332 bool is_void = false;
8333
8334 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8335 {
8336 if (arg == void_list_node)
8337 is_void = true;
8338 else
8339 canon_argtypes = tree_cons (NULL_TREE,
8340 TYPE_CANONICAL (TREE_VALUE (arg)),
8341 canon_argtypes);
8342 }
8343
8344 canon_argtypes = nreverse (canon_argtypes);
8345 if (is_void)
8346 canon_argtypes = chainon (canon_argtypes, void_list_node);
8347
8348 /* There is a non-canonical type. */
8349 *any_noncanonical_p = true;
8350 return canon_argtypes;
8351 }
8352
8353 /* The canonical argument types are the same as ARGTYPES. */
8354 return argtypes;
8355 }
8356
8357 /* Construct, lay out and return
8358 the type of functions returning type VALUE_TYPE
8359 given arguments of types ARG_TYPES.
8360 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8361 are data type nodes for the arguments of the function.
8362 If such a type has already been constructed, reuse it. */
8363
8364 tree
8365 build_function_type (tree value_type, tree arg_types)
8366 {
8367 tree t;
8368 inchash::hash hstate;
8369 bool any_structural_p, any_noncanonical_p;
8370 tree canon_argtypes;
8371
8372 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8373 {
8374 error ("function return type cannot be function");
8375 value_type = integer_type_node;
8376 }
8377
8378 /* Make a node of the sort we want. */
8379 t = make_node (FUNCTION_TYPE);
8380 TREE_TYPE (t) = value_type;
8381 TYPE_ARG_TYPES (t) = arg_types;
8382
8383 /* If we already have such a type, use the old one. */
8384 hstate.add_object (TYPE_HASH (value_type));
8385 type_hash_list (arg_types, hstate);
8386 t = type_hash_canon (hstate.end (), t);
8387
8388 /* Set up the canonical type. */
8389 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8390 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8391 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8392 &any_structural_p,
8393 &any_noncanonical_p);
8394 if (any_structural_p)
8395 SET_TYPE_STRUCTURAL_EQUALITY (t);
8396 else if (any_noncanonical_p)
8397 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8398 canon_argtypes);
8399
8400 if (!COMPLETE_TYPE_P (t))
8401 layout_type (t);
8402 return t;
8403 }
8404
8405 /* Build a function type. The RETURN_TYPE is the type returned by the
8406 function. If VAARGS is set, no void_type_node is appended to the
8407 the list. ARGP must be always be terminated be a NULL_TREE. */
8408
8409 static tree
8410 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8411 {
8412 tree t, args, last;
8413
8414 t = va_arg (argp, tree);
8415 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8416 args = tree_cons (NULL_TREE, t, args);
8417
8418 if (vaargs)
8419 {
8420 last = args;
8421 if (args != NULL_TREE)
8422 args = nreverse (args);
8423 gcc_assert (last != void_list_node);
8424 }
8425 else if (args == NULL_TREE)
8426 args = void_list_node;
8427 else
8428 {
8429 last = args;
8430 args = nreverse (args);
8431 TREE_CHAIN (last) = void_list_node;
8432 }
8433 args = build_function_type (return_type, args);
8434
8435 return args;
8436 }
8437
8438 /* Build a function type. The RETURN_TYPE is the type returned by the
8439 function. If additional arguments are provided, they are
8440 additional argument types. The list of argument types must always
8441 be terminated by NULL_TREE. */
8442
8443 tree
8444 build_function_type_list (tree return_type, ...)
8445 {
8446 tree args;
8447 va_list p;
8448
8449 va_start (p, return_type);
8450 args = build_function_type_list_1 (false, return_type, p);
8451 va_end (p);
8452 return args;
8453 }
8454
8455 /* Build a variable argument function type. The RETURN_TYPE is the
8456 type returned by the function. If additional arguments are provided,
8457 they are additional argument types. The list of argument types must
8458 always be terminated by NULL_TREE. */
8459
8460 tree
8461 build_varargs_function_type_list (tree return_type, ...)
8462 {
8463 tree args;
8464 va_list p;
8465
8466 va_start (p, return_type);
8467 args = build_function_type_list_1 (true, return_type, p);
8468 va_end (p);
8469
8470 return args;
8471 }
8472
8473 /* Build a function type. RETURN_TYPE is the type returned by the
8474 function; VAARGS indicates whether the function takes varargs. The
8475 function takes N named arguments, the types of which are provided in
8476 ARG_TYPES. */
8477
8478 static tree
8479 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8480 tree *arg_types)
8481 {
8482 int i;
8483 tree t = vaargs ? NULL_TREE : void_list_node;
8484
8485 for (i = n - 1; i >= 0; i--)
8486 t = tree_cons (NULL_TREE, arg_types[i], t);
8487
8488 return build_function_type (return_type, t);
8489 }
8490
8491 /* Build a function type. RETURN_TYPE is the type returned by the
8492 function. The function takes N named arguments, the types of which
8493 are provided in ARG_TYPES. */
8494
8495 tree
8496 build_function_type_array (tree return_type, int n, tree *arg_types)
8497 {
8498 return build_function_type_array_1 (false, return_type, n, arg_types);
8499 }
8500
8501 /* Build a variable argument function type. RETURN_TYPE is the type
8502 returned by the function. The function takes N named arguments, the
8503 types of which are provided in ARG_TYPES. */
8504
8505 tree
8506 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8507 {
8508 return build_function_type_array_1 (true, return_type, n, arg_types);
8509 }
8510
8511 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8512 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8513 for the method. An implicit additional parameter (of type
8514 pointer-to-BASETYPE) is added to the ARGTYPES. */
8515
8516 tree
8517 build_method_type_directly (tree basetype,
8518 tree rettype,
8519 tree argtypes)
8520 {
8521 tree t;
8522 tree ptype;
8523 inchash::hash hstate;
8524 bool any_structural_p, any_noncanonical_p;
8525 tree canon_argtypes;
8526
8527 /* Make a node of the sort we want. */
8528 t = make_node (METHOD_TYPE);
8529
8530 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8531 TREE_TYPE (t) = rettype;
8532 ptype = build_pointer_type (basetype);
8533
8534 /* The actual arglist for this function includes a "hidden" argument
8535 which is "this". Put it into the list of argument types. */
8536 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8537 TYPE_ARG_TYPES (t) = argtypes;
8538
8539 /* If we already have such a type, use the old one. */
8540 hstate.add_object (TYPE_HASH (basetype));
8541 hstate.add_object (TYPE_HASH (rettype));
8542 type_hash_list (argtypes, hstate);
8543 t = type_hash_canon (hstate.end (), t);
8544
8545 /* Set up the canonical type. */
8546 any_structural_p
8547 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8548 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8549 any_noncanonical_p
8550 = (TYPE_CANONICAL (basetype) != basetype
8551 || TYPE_CANONICAL (rettype) != rettype);
8552 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8553 &any_structural_p,
8554 &any_noncanonical_p);
8555 if (any_structural_p)
8556 SET_TYPE_STRUCTURAL_EQUALITY (t);
8557 else if (any_noncanonical_p)
8558 TYPE_CANONICAL (t)
8559 = build_method_type_directly (TYPE_CANONICAL (basetype),
8560 TYPE_CANONICAL (rettype),
8561 canon_argtypes);
8562 if (!COMPLETE_TYPE_P (t))
8563 layout_type (t);
8564
8565 return t;
8566 }
8567
8568 /* Construct, lay out and return the type of methods belonging to class
8569 BASETYPE and whose arguments and values are described by TYPE.
8570 If that type exists already, reuse it.
8571 TYPE must be a FUNCTION_TYPE node. */
8572
8573 tree
8574 build_method_type (tree basetype, tree type)
8575 {
8576 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8577
8578 return build_method_type_directly (basetype,
8579 TREE_TYPE (type),
8580 TYPE_ARG_TYPES (type));
8581 }
8582
8583 /* Construct, lay out and return the type of offsets to a value
8584 of type TYPE, within an object of type BASETYPE.
8585 If a suitable offset type exists already, reuse it. */
8586
8587 tree
8588 build_offset_type (tree basetype, tree type)
8589 {
8590 tree t;
8591 inchash::hash hstate;
8592
8593 /* Make a node of the sort we want. */
8594 t = make_node (OFFSET_TYPE);
8595
8596 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8597 TREE_TYPE (t) = type;
8598
8599 /* If we already have such a type, use the old one. */
8600 hstate.add_object (TYPE_HASH (basetype));
8601 hstate.add_object (TYPE_HASH (type));
8602 t = type_hash_canon (hstate.end (), t);
8603
8604 if (!COMPLETE_TYPE_P (t))
8605 layout_type (t);
8606
8607 if (TYPE_CANONICAL (t) == t)
8608 {
8609 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8610 || TYPE_STRUCTURAL_EQUALITY_P (type))
8611 SET_TYPE_STRUCTURAL_EQUALITY (t);
8612 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8613 || TYPE_CANONICAL (type) != type)
8614 TYPE_CANONICAL (t)
8615 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8616 TYPE_CANONICAL (type));
8617 }
8618
8619 return t;
8620 }
8621
8622 /* Create a complex type whose components are COMPONENT_TYPE. */
8623
8624 tree
8625 build_complex_type (tree component_type)
8626 {
8627 tree t;
8628 inchash::hash hstate;
8629
8630 gcc_assert (INTEGRAL_TYPE_P (component_type)
8631 || SCALAR_FLOAT_TYPE_P (component_type)
8632 || FIXED_POINT_TYPE_P (component_type));
8633
8634 /* Make a node of the sort we want. */
8635 t = make_node (COMPLEX_TYPE);
8636
8637 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8638
8639 /* If we already have such a type, use the old one. */
8640 hstate.add_object (TYPE_HASH (component_type));
8641 t = type_hash_canon (hstate.end (), t);
8642
8643 if (!COMPLETE_TYPE_P (t))
8644 layout_type (t);
8645
8646 if (TYPE_CANONICAL (t) == t)
8647 {
8648 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8649 SET_TYPE_STRUCTURAL_EQUALITY (t);
8650 else if (TYPE_CANONICAL (component_type) != component_type)
8651 TYPE_CANONICAL (t)
8652 = build_complex_type (TYPE_CANONICAL (component_type));
8653 }
8654
8655 /* We need to create a name, since complex is a fundamental type. */
8656 if (! TYPE_NAME (t))
8657 {
8658 const char *name;
8659 if (component_type == char_type_node)
8660 name = "complex char";
8661 else if (component_type == signed_char_type_node)
8662 name = "complex signed char";
8663 else if (component_type == unsigned_char_type_node)
8664 name = "complex unsigned char";
8665 else if (component_type == short_integer_type_node)
8666 name = "complex short int";
8667 else if (component_type == short_unsigned_type_node)
8668 name = "complex short unsigned int";
8669 else if (component_type == integer_type_node)
8670 name = "complex int";
8671 else if (component_type == unsigned_type_node)
8672 name = "complex unsigned int";
8673 else if (component_type == long_integer_type_node)
8674 name = "complex long int";
8675 else if (component_type == long_unsigned_type_node)
8676 name = "complex long unsigned int";
8677 else if (component_type == long_long_integer_type_node)
8678 name = "complex long long int";
8679 else if (component_type == long_long_unsigned_type_node)
8680 name = "complex long long unsigned int";
8681 else
8682 name = 0;
8683
8684 if (name != 0)
8685 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8686 get_identifier (name), t);
8687 }
8688
8689 return build_qualified_type (t, TYPE_QUALS (component_type));
8690 }
8691
8692 /* If TYPE is a real or complex floating-point type and the target
8693 does not directly support arithmetic on TYPE then return the wider
8694 type to be used for arithmetic on TYPE. Otherwise, return
8695 NULL_TREE. */
8696
8697 tree
8698 excess_precision_type (tree type)
8699 {
8700 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8701 {
8702 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8703 switch (TREE_CODE (type))
8704 {
8705 case REAL_TYPE:
8706 switch (flt_eval_method)
8707 {
8708 case 1:
8709 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8710 return double_type_node;
8711 break;
8712 case 2:
8713 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8714 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8715 return long_double_type_node;
8716 break;
8717 default:
8718 gcc_unreachable ();
8719 }
8720 break;
8721 case COMPLEX_TYPE:
8722 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8723 return NULL_TREE;
8724 switch (flt_eval_method)
8725 {
8726 case 1:
8727 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8728 return complex_double_type_node;
8729 break;
8730 case 2:
8731 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8732 || (TYPE_MODE (TREE_TYPE (type))
8733 == TYPE_MODE (double_type_node)))
8734 return complex_long_double_type_node;
8735 break;
8736 default:
8737 gcc_unreachable ();
8738 }
8739 break;
8740 default:
8741 break;
8742 }
8743 }
8744 return NULL_TREE;
8745 }
8746 \f
8747 /* Return OP, stripped of any conversions to wider types as much as is safe.
8748 Converting the value back to OP's type makes a value equivalent to OP.
8749
8750 If FOR_TYPE is nonzero, we return a value which, if converted to
8751 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8752
8753 OP must have integer, real or enumeral type. Pointers are not allowed!
8754
8755 There are some cases where the obvious value we could return
8756 would regenerate to OP if converted to OP's type,
8757 but would not extend like OP to wider types.
8758 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8759 For example, if OP is (unsigned short)(signed char)-1,
8760 we avoid returning (signed char)-1 if FOR_TYPE is int,
8761 even though extending that to an unsigned short would regenerate OP,
8762 since the result of extending (signed char)-1 to (int)
8763 is different from (int) OP. */
8764
8765 tree
8766 get_unwidened (tree op, tree for_type)
8767 {
8768 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8769 tree type = TREE_TYPE (op);
8770 unsigned final_prec
8771 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8772 int uns
8773 = (for_type != 0 && for_type != type
8774 && final_prec > TYPE_PRECISION (type)
8775 && TYPE_UNSIGNED (type));
8776 tree win = op;
8777
8778 while (CONVERT_EXPR_P (op))
8779 {
8780 int bitschange;
8781
8782 /* TYPE_PRECISION on vector types has different meaning
8783 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8784 so avoid them here. */
8785 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8786 break;
8787
8788 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8789 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8790
8791 /* Truncations are many-one so cannot be removed.
8792 Unless we are later going to truncate down even farther. */
8793 if (bitschange < 0
8794 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8795 break;
8796
8797 /* See what's inside this conversion. If we decide to strip it,
8798 we will set WIN. */
8799 op = TREE_OPERAND (op, 0);
8800
8801 /* If we have not stripped any zero-extensions (uns is 0),
8802 we can strip any kind of extension.
8803 If we have previously stripped a zero-extension,
8804 only zero-extensions can safely be stripped.
8805 Any extension can be stripped if the bits it would produce
8806 are all going to be discarded later by truncating to FOR_TYPE. */
8807
8808 if (bitschange > 0)
8809 {
8810 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8811 win = op;
8812 /* TYPE_UNSIGNED says whether this is a zero-extension.
8813 Let's avoid computing it if it does not affect WIN
8814 and if UNS will not be needed again. */
8815 if ((uns
8816 || CONVERT_EXPR_P (op))
8817 && TYPE_UNSIGNED (TREE_TYPE (op)))
8818 {
8819 uns = 1;
8820 win = op;
8821 }
8822 }
8823 }
8824
8825 /* If we finally reach a constant see if it fits in for_type and
8826 in that case convert it. */
8827 if (for_type
8828 && TREE_CODE (win) == INTEGER_CST
8829 && TREE_TYPE (win) != for_type
8830 && int_fits_type_p (win, for_type))
8831 win = fold_convert (for_type, win);
8832
8833 return win;
8834 }
8835 \f
8836 /* Return OP or a simpler expression for a narrower value
8837 which can be sign-extended or zero-extended to give back OP.
8838 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8839 or 0 if the value should be sign-extended. */
8840
8841 tree
8842 get_narrower (tree op, int *unsignedp_ptr)
8843 {
8844 int uns = 0;
8845 int first = 1;
8846 tree win = op;
8847 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8848
8849 while (TREE_CODE (op) == NOP_EXPR)
8850 {
8851 int bitschange
8852 = (TYPE_PRECISION (TREE_TYPE (op))
8853 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8854
8855 /* Truncations are many-one so cannot be removed. */
8856 if (bitschange < 0)
8857 break;
8858
8859 /* See what's inside this conversion. If we decide to strip it,
8860 we will set WIN. */
8861
8862 if (bitschange > 0)
8863 {
8864 op = TREE_OPERAND (op, 0);
8865 /* An extension: the outermost one can be stripped,
8866 but remember whether it is zero or sign extension. */
8867 if (first)
8868 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8869 /* Otherwise, if a sign extension has been stripped,
8870 only sign extensions can now be stripped;
8871 if a zero extension has been stripped, only zero-extensions. */
8872 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8873 break;
8874 first = 0;
8875 }
8876 else /* bitschange == 0 */
8877 {
8878 /* A change in nominal type can always be stripped, but we must
8879 preserve the unsignedness. */
8880 if (first)
8881 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8882 first = 0;
8883 op = TREE_OPERAND (op, 0);
8884 /* Keep trying to narrow, but don't assign op to win if it
8885 would turn an integral type into something else. */
8886 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8887 continue;
8888 }
8889
8890 win = op;
8891 }
8892
8893 if (TREE_CODE (op) == COMPONENT_REF
8894 /* Since type_for_size always gives an integer type. */
8895 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8896 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8897 /* Ensure field is laid out already. */
8898 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8899 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8900 {
8901 unsigned HOST_WIDE_INT innerprec
8902 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8903 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8904 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8905 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8906
8907 /* We can get this structure field in a narrower type that fits it,
8908 but the resulting extension to its nominal type (a fullword type)
8909 must satisfy the same conditions as for other extensions.
8910
8911 Do this only for fields that are aligned (not bit-fields),
8912 because when bit-field insns will be used there is no
8913 advantage in doing this. */
8914
8915 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8916 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8917 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8918 && type != 0)
8919 {
8920 if (first)
8921 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8922 win = fold_convert (type, op);
8923 }
8924 }
8925
8926 *unsignedp_ptr = uns;
8927 return win;
8928 }
8929 \f
8930 /* Returns true if integer constant C has a value that is permissible
8931 for type TYPE (an INTEGER_TYPE). */
8932
8933 bool
8934 int_fits_type_p (const_tree c, const_tree type)
8935 {
8936 tree type_low_bound, type_high_bound;
8937 bool ok_for_low_bound, ok_for_high_bound;
8938 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8939
8940 retry:
8941 type_low_bound = TYPE_MIN_VALUE (type);
8942 type_high_bound = TYPE_MAX_VALUE (type);
8943
8944 /* If at least one bound of the type is a constant integer, we can check
8945 ourselves and maybe make a decision. If no such decision is possible, but
8946 this type is a subtype, try checking against that. Otherwise, use
8947 fits_to_tree_p, which checks against the precision.
8948
8949 Compute the status for each possibly constant bound, and return if we see
8950 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8951 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8952 for "constant known to fit". */
8953
8954 /* Check if c >= type_low_bound. */
8955 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8956 {
8957 if (tree_int_cst_lt (c, type_low_bound))
8958 return false;
8959 ok_for_low_bound = true;
8960 }
8961 else
8962 ok_for_low_bound = false;
8963
8964 /* Check if c <= type_high_bound. */
8965 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8966 {
8967 if (tree_int_cst_lt (type_high_bound, c))
8968 return false;
8969 ok_for_high_bound = true;
8970 }
8971 else
8972 ok_for_high_bound = false;
8973
8974 /* If the constant fits both bounds, the result is known. */
8975 if (ok_for_low_bound && ok_for_high_bound)
8976 return true;
8977
8978 /* Perform some generic filtering which may allow making a decision
8979 even if the bounds are not constant. First, negative integers
8980 never fit in unsigned types, */
8981 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8982 return false;
8983
8984 /* Second, narrower types always fit in wider ones. */
8985 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8986 return true;
8987
8988 /* Third, unsigned integers with top bit set never fit signed types. */
8989 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8990 {
8991 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8992 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8993 {
8994 /* When a tree_cst is converted to a wide-int, the precision
8995 is taken from the type. However, if the precision of the
8996 mode underneath the type is smaller than that, it is
8997 possible that the value will not fit. The test below
8998 fails if any bit is set between the sign bit of the
8999 underlying mode and the top bit of the type. */
9000 if (wi::ne_p (wi::zext (c, prec - 1), c))
9001 return false;
9002 }
9003 else if (wi::neg_p (c))
9004 return false;
9005 }
9006
9007 /* If we haven't been able to decide at this point, there nothing more we
9008 can check ourselves here. Look at the base type if we have one and it
9009 has the same precision. */
9010 if (TREE_CODE (type) == INTEGER_TYPE
9011 && TREE_TYPE (type) != 0
9012 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9013 {
9014 type = TREE_TYPE (type);
9015 goto retry;
9016 }
9017
9018 /* Or to fits_to_tree_p, if nothing else. */
9019 return wi::fits_to_tree_p (c, type);
9020 }
9021
9022 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9023 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9024 represented (assuming two's-complement arithmetic) within the bit
9025 precision of the type are returned instead. */
9026
9027 void
9028 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9029 {
9030 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9031 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9032 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
9033 else
9034 {
9035 if (TYPE_UNSIGNED (type))
9036 mpz_set_ui (min, 0);
9037 else
9038 {
9039 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9040 wi::to_mpz (mn, min, SIGNED);
9041 }
9042 }
9043
9044 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9045 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9046 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
9047 else
9048 {
9049 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9050 wi::to_mpz (mn, max, TYPE_SIGN (type));
9051 }
9052 }
9053
9054 /* Return true if VAR is an automatic variable defined in function FN. */
9055
9056 bool
9057 auto_var_in_fn_p (const_tree var, const_tree fn)
9058 {
9059 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9060 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
9061 || TREE_CODE (var) == PARM_DECL)
9062 && ! TREE_STATIC (var))
9063 || TREE_CODE (var) == LABEL_DECL
9064 || TREE_CODE (var) == RESULT_DECL));
9065 }
9066
9067 /* Subprogram of following function. Called by walk_tree.
9068
9069 Return *TP if it is an automatic variable or parameter of the
9070 function passed in as DATA. */
9071
9072 static tree
9073 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9074 {
9075 tree fn = (tree) data;
9076
9077 if (TYPE_P (*tp))
9078 *walk_subtrees = 0;
9079
9080 else if (DECL_P (*tp)
9081 && auto_var_in_fn_p (*tp, fn))
9082 return *tp;
9083
9084 return NULL_TREE;
9085 }
9086
9087 /* Returns true if T is, contains, or refers to a type with variable
9088 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9089 arguments, but not the return type. If FN is nonzero, only return
9090 true if a modifier of the type or position of FN is a variable or
9091 parameter inside FN.
9092
9093 This concept is more general than that of C99 'variably modified types':
9094 in C99, a struct type is never variably modified because a VLA may not
9095 appear as a structure member. However, in GNU C code like:
9096
9097 struct S { int i[f()]; };
9098
9099 is valid, and other languages may define similar constructs. */
9100
9101 bool
9102 variably_modified_type_p (tree type, tree fn)
9103 {
9104 tree t;
9105
9106 /* Test if T is either variable (if FN is zero) or an expression containing
9107 a variable in FN. If TYPE isn't gimplified, return true also if
9108 gimplify_one_sizepos would gimplify the expression into a local
9109 variable. */
9110 #define RETURN_TRUE_IF_VAR(T) \
9111 do { tree _t = (T); \
9112 if (_t != NULL_TREE \
9113 && _t != error_mark_node \
9114 && TREE_CODE (_t) != INTEGER_CST \
9115 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9116 && (!fn \
9117 || (!TYPE_SIZES_GIMPLIFIED (type) \
9118 && !is_gimple_sizepos (_t)) \
9119 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9120 return true; } while (0)
9121
9122 if (type == error_mark_node)
9123 return false;
9124
9125 /* If TYPE itself has variable size, it is variably modified. */
9126 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9127 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9128
9129 switch (TREE_CODE (type))
9130 {
9131 case POINTER_TYPE:
9132 case REFERENCE_TYPE:
9133 case VECTOR_TYPE:
9134 if (variably_modified_type_p (TREE_TYPE (type), fn))
9135 return true;
9136 break;
9137
9138 case FUNCTION_TYPE:
9139 case METHOD_TYPE:
9140 /* If TYPE is a function type, it is variably modified if the
9141 return type is variably modified. */
9142 if (variably_modified_type_p (TREE_TYPE (type), fn))
9143 return true;
9144 break;
9145
9146 case INTEGER_TYPE:
9147 case REAL_TYPE:
9148 case FIXED_POINT_TYPE:
9149 case ENUMERAL_TYPE:
9150 case BOOLEAN_TYPE:
9151 /* Scalar types are variably modified if their end points
9152 aren't constant. */
9153 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9154 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9155 break;
9156
9157 case RECORD_TYPE:
9158 case UNION_TYPE:
9159 case QUAL_UNION_TYPE:
9160 /* We can't see if any of the fields are variably-modified by the
9161 definition we normally use, since that would produce infinite
9162 recursion via pointers. */
9163 /* This is variably modified if some field's type is. */
9164 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9165 if (TREE_CODE (t) == FIELD_DECL)
9166 {
9167 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9168 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9169 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9170
9171 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9172 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9173 }
9174 break;
9175
9176 case ARRAY_TYPE:
9177 /* Do not call ourselves to avoid infinite recursion. This is
9178 variably modified if the element type is. */
9179 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9180 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9181 break;
9182
9183 default:
9184 break;
9185 }
9186
9187 /* The current language may have other cases to check, but in general,
9188 all other types are not variably modified. */
9189 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9190
9191 #undef RETURN_TRUE_IF_VAR
9192 }
9193
9194 /* Given a DECL or TYPE, return the scope in which it was declared, or
9195 NULL_TREE if there is no containing scope. */
9196
9197 tree
9198 get_containing_scope (const_tree t)
9199 {
9200 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9201 }
9202
9203 /* Return the innermost context enclosing DECL that is
9204 a FUNCTION_DECL, or zero if none. */
9205
9206 tree
9207 decl_function_context (const_tree decl)
9208 {
9209 tree context;
9210
9211 if (TREE_CODE (decl) == ERROR_MARK)
9212 return 0;
9213
9214 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9215 where we look up the function at runtime. Such functions always take
9216 a first argument of type 'pointer to real context'.
9217
9218 C++ should really be fixed to use DECL_CONTEXT for the real context,
9219 and use something else for the "virtual context". */
9220 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9221 context
9222 = TYPE_MAIN_VARIANT
9223 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9224 else
9225 context = DECL_CONTEXT (decl);
9226
9227 while (context && TREE_CODE (context) != FUNCTION_DECL)
9228 {
9229 if (TREE_CODE (context) == BLOCK)
9230 context = BLOCK_SUPERCONTEXT (context);
9231 else
9232 context = get_containing_scope (context);
9233 }
9234
9235 return context;
9236 }
9237
9238 /* Return the innermost context enclosing DECL that is
9239 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9240 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9241
9242 tree
9243 decl_type_context (const_tree decl)
9244 {
9245 tree context = DECL_CONTEXT (decl);
9246
9247 while (context)
9248 switch (TREE_CODE (context))
9249 {
9250 case NAMESPACE_DECL:
9251 case TRANSLATION_UNIT_DECL:
9252 return NULL_TREE;
9253
9254 case RECORD_TYPE:
9255 case UNION_TYPE:
9256 case QUAL_UNION_TYPE:
9257 return context;
9258
9259 case TYPE_DECL:
9260 case FUNCTION_DECL:
9261 context = DECL_CONTEXT (context);
9262 break;
9263
9264 case BLOCK:
9265 context = BLOCK_SUPERCONTEXT (context);
9266 break;
9267
9268 default:
9269 gcc_unreachable ();
9270 }
9271
9272 return NULL_TREE;
9273 }
9274
9275 /* CALL is a CALL_EXPR. Return the declaration for the function
9276 called, or NULL_TREE if the called function cannot be
9277 determined. */
9278
9279 tree
9280 get_callee_fndecl (const_tree call)
9281 {
9282 tree addr;
9283
9284 if (call == error_mark_node)
9285 return error_mark_node;
9286
9287 /* It's invalid to call this function with anything but a
9288 CALL_EXPR. */
9289 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9290
9291 /* The first operand to the CALL is the address of the function
9292 called. */
9293 addr = CALL_EXPR_FN (call);
9294
9295 /* If there is no function, return early. */
9296 if (addr == NULL_TREE)
9297 return NULL_TREE;
9298
9299 STRIP_NOPS (addr);
9300
9301 /* If this is a readonly function pointer, extract its initial value. */
9302 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9303 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9304 && DECL_INITIAL (addr))
9305 addr = DECL_INITIAL (addr);
9306
9307 /* If the address is just `&f' for some function `f', then we know
9308 that `f' is being called. */
9309 if (TREE_CODE (addr) == ADDR_EXPR
9310 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9311 return TREE_OPERAND (addr, 0);
9312
9313 /* We couldn't figure out what was being called. */
9314 return NULL_TREE;
9315 }
9316
9317 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9318 return the associated function code, otherwise return CFN_LAST. */
9319
9320 combined_fn
9321 get_call_combined_fn (const_tree call)
9322 {
9323 /* It's invalid to call this function with anything but a CALL_EXPR. */
9324 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9325
9326 if (!CALL_EXPR_FN (call))
9327 return as_combined_fn (CALL_EXPR_IFN (call));
9328
9329 tree fndecl = get_callee_fndecl (call);
9330 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9331 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9332
9333 return CFN_LAST;
9334 }
9335
9336 #define TREE_MEM_USAGE_SPACES 40
9337
9338 /* Print debugging information about tree nodes generated during the compile,
9339 and any language-specific information. */
9340
9341 void
9342 dump_tree_statistics (void)
9343 {
9344 if (GATHER_STATISTICS)
9345 {
9346 int i;
9347 int total_nodes, total_bytes;
9348 fprintf (stderr, "\nKind Nodes Bytes\n");
9349 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9350 total_nodes = total_bytes = 0;
9351 for (i = 0; i < (int) all_kinds; i++)
9352 {
9353 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9354 tree_node_counts[i], tree_node_sizes[i]);
9355 total_nodes += tree_node_counts[i];
9356 total_bytes += tree_node_sizes[i];
9357 }
9358 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9359 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9360 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9361 fprintf (stderr, "Code Nodes\n");
9362 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9363 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9364 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9365 tree_code_counts[i]);
9366 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9367 fprintf (stderr, "\n");
9368 ssanames_print_statistics ();
9369 fprintf (stderr, "\n");
9370 phinodes_print_statistics ();
9371 fprintf (stderr, "\n");
9372 }
9373 else
9374 fprintf (stderr, "(No per-node statistics)\n");
9375
9376 print_type_hash_statistics ();
9377 print_debug_expr_statistics ();
9378 print_value_expr_statistics ();
9379 lang_hooks.print_statistics ();
9380 }
9381 \f
9382 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9383
9384 /* Generate a crc32 of a byte. */
9385
9386 static unsigned
9387 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9388 {
9389 unsigned ix;
9390
9391 for (ix = bits; ix--; value <<= 1)
9392 {
9393 unsigned feedback;
9394
9395 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9396 chksum <<= 1;
9397 chksum ^= feedback;
9398 }
9399 return chksum;
9400 }
9401
9402 /* Generate a crc32 of a 32-bit unsigned. */
9403
9404 unsigned
9405 crc32_unsigned (unsigned chksum, unsigned value)
9406 {
9407 return crc32_unsigned_bits (chksum, value, 32);
9408 }
9409
9410 /* Generate a crc32 of a byte. */
9411
9412 unsigned
9413 crc32_byte (unsigned chksum, char byte)
9414 {
9415 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9416 }
9417
9418 /* Generate a crc32 of a string. */
9419
9420 unsigned
9421 crc32_string (unsigned chksum, const char *string)
9422 {
9423 do
9424 {
9425 chksum = crc32_byte (chksum, *string);
9426 }
9427 while (*string++);
9428 return chksum;
9429 }
9430
9431 /* P is a string that will be used in a symbol. Mask out any characters
9432 that are not valid in that context. */
9433
9434 void
9435 clean_symbol_name (char *p)
9436 {
9437 for (; *p; p++)
9438 if (! (ISALNUM (*p)
9439 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9440 || *p == '$'
9441 #endif
9442 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9443 || *p == '.'
9444 #endif
9445 ))
9446 *p = '_';
9447 }
9448
9449 /* For anonymous aggregate types, we need some sort of name to
9450 hold on to. In practice, this should not appear, but it should
9451 not be harmful if it does. */
9452 bool
9453 anon_aggrname_p(const_tree id_node)
9454 {
9455 #ifndef NO_DOT_IN_LABEL
9456 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9457 && IDENTIFIER_POINTER (id_node)[1] == '_');
9458 #else /* NO_DOT_IN_LABEL */
9459 #ifndef NO_DOLLAR_IN_LABEL
9460 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9461 && IDENTIFIER_POINTER (id_node)[1] == '_');
9462 #else /* NO_DOLLAR_IN_LABEL */
9463 #define ANON_AGGRNAME_PREFIX "__anon_"
9464 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9465 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9466 #endif /* NO_DOLLAR_IN_LABEL */
9467 #endif /* NO_DOT_IN_LABEL */
9468 }
9469
9470 /* Return a format for an anonymous aggregate name. */
9471 const char *
9472 anon_aggrname_format()
9473 {
9474 #ifndef NO_DOT_IN_LABEL
9475 return "._%d";
9476 #else /* NO_DOT_IN_LABEL */
9477 #ifndef NO_DOLLAR_IN_LABEL
9478 return "$_%d";
9479 #else /* NO_DOLLAR_IN_LABEL */
9480 return "__anon_%d";
9481 #endif /* NO_DOLLAR_IN_LABEL */
9482 #endif /* NO_DOT_IN_LABEL */
9483 }
9484
9485 /* Generate a name for a special-purpose function.
9486 The generated name may need to be unique across the whole link.
9487 Changes to this function may also require corresponding changes to
9488 xstrdup_mask_random.
9489 TYPE is some string to identify the purpose of this function to the
9490 linker or collect2; it must start with an uppercase letter,
9491 one of:
9492 I - for constructors
9493 D - for destructors
9494 N - for C++ anonymous namespaces
9495 F - for DWARF unwind frame information. */
9496
9497 tree
9498 get_file_function_name (const char *type)
9499 {
9500 char *buf;
9501 const char *p;
9502 char *q;
9503
9504 /* If we already have a name we know to be unique, just use that. */
9505 if (first_global_object_name)
9506 p = q = ASTRDUP (first_global_object_name);
9507 /* If the target is handling the constructors/destructors, they
9508 will be local to this file and the name is only necessary for
9509 debugging purposes.
9510 We also assign sub_I and sub_D sufixes to constructors called from
9511 the global static constructors. These are always local. */
9512 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9513 || (strncmp (type, "sub_", 4) == 0
9514 && (type[4] == 'I' || type[4] == 'D')))
9515 {
9516 const char *file = main_input_filename;
9517 if (! file)
9518 file = LOCATION_FILE (input_location);
9519 /* Just use the file's basename, because the full pathname
9520 might be quite long. */
9521 p = q = ASTRDUP (lbasename (file));
9522 }
9523 else
9524 {
9525 /* Otherwise, the name must be unique across the entire link.
9526 We don't have anything that we know to be unique to this translation
9527 unit, so use what we do have and throw in some randomness. */
9528 unsigned len;
9529 const char *name = weak_global_object_name;
9530 const char *file = main_input_filename;
9531
9532 if (! name)
9533 name = "";
9534 if (! file)
9535 file = LOCATION_FILE (input_location);
9536
9537 len = strlen (file);
9538 q = (char *) alloca (9 + 17 + len + 1);
9539 memcpy (q, file, len + 1);
9540
9541 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9542 crc32_string (0, name), get_random_seed (false));
9543
9544 p = q;
9545 }
9546
9547 clean_symbol_name (q);
9548 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9549 + strlen (type));
9550
9551 /* Set up the name of the file-level functions we may need.
9552 Use a global object (which is already required to be unique over
9553 the program) rather than the file name (which imposes extra
9554 constraints). */
9555 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9556
9557 return get_identifier (buf);
9558 }
9559 \f
9560 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9561
9562 /* Complain that the tree code of NODE does not match the expected 0
9563 terminated list of trailing codes. The trailing code list can be
9564 empty, for a more vague error message. FILE, LINE, and FUNCTION
9565 are of the caller. */
9566
9567 void
9568 tree_check_failed (const_tree node, const char *file,
9569 int line, const char *function, ...)
9570 {
9571 va_list args;
9572 const char *buffer;
9573 unsigned length = 0;
9574 enum tree_code code;
9575
9576 va_start (args, function);
9577 while ((code = (enum tree_code) va_arg (args, int)))
9578 length += 4 + strlen (get_tree_code_name (code));
9579 va_end (args);
9580 if (length)
9581 {
9582 char *tmp;
9583 va_start (args, function);
9584 length += strlen ("expected ");
9585 buffer = tmp = (char *) alloca (length);
9586 length = 0;
9587 while ((code = (enum tree_code) va_arg (args, int)))
9588 {
9589 const char *prefix = length ? " or " : "expected ";
9590
9591 strcpy (tmp + length, prefix);
9592 length += strlen (prefix);
9593 strcpy (tmp + length, get_tree_code_name (code));
9594 length += strlen (get_tree_code_name (code));
9595 }
9596 va_end (args);
9597 }
9598 else
9599 buffer = "unexpected node";
9600
9601 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9602 buffer, get_tree_code_name (TREE_CODE (node)),
9603 function, trim_filename (file), line);
9604 }
9605
9606 /* Complain that the tree code of NODE does match the expected 0
9607 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9608 the caller. */
9609
9610 void
9611 tree_not_check_failed (const_tree node, const char *file,
9612 int line, const char *function, ...)
9613 {
9614 va_list args;
9615 char *buffer;
9616 unsigned length = 0;
9617 enum tree_code code;
9618
9619 va_start (args, function);
9620 while ((code = (enum tree_code) va_arg (args, int)))
9621 length += 4 + strlen (get_tree_code_name (code));
9622 va_end (args);
9623 va_start (args, function);
9624 buffer = (char *) alloca (length);
9625 length = 0;
9626 while ((code = (enum tree_code) va_arg (args, int)))
9627 {
9628 if (length)
9629 {
9630 strcpy (buffer + length, " or ");
9631 length += 4;
9632 }
9633 strcpy (buffer + length, get_tree_code_name (code));
9634 length += strlen (get_tree_code_name (code));
9635 }
9636 va_end (args);
9637
9638 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9639 buffer, get_tree_code_name (TREE_CODE (node)),
9640 function, trim_filename (file), line);
9641 }
9642
9643 /* Similar to tree_check_failed, except that we check for a class of tree
9644 code, given in CL. */
9645
9646 void
9647 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9648 const char *file, int line, const char *function)
9649 {
9650 internal_error
9651 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9652 TREE_CODE_CLASS_STRING (cl),
9653 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9654 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9655 }
9656
9657 /* Similar to tree_check_failed, except that instead of specifying a
9658 dozen codes, use the knowledge that they're all sequential. */
9659
9660 void
9661 tree_range_check_failed (const_tree node, const char *file, int line,
9662 const char *function, enum tree_code c1,
9663 enum tree_code c2)
9664 {
9665 char *buffer;
9666 unsigned length = 0;
9667 unsigned int c;
9668
9669 for (c = c1; c <= c2; ++c)
9670 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9671
9672 length += strlen ("expected ");
9673 buffer = (char *) alloca (length);
9674 length = 0;
9675
9676 for (c = c1; c <= c2; ++c)
9677 {
9678 const char *prefix = length ? " or " : "expected ";
9679
9680 strcpy (buffer + length, prefix);
9681 length += strlen (prefix);
9682 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9683 length += strlen (get_tree_code_name ((enum tree_code) c));
9684 }
9685
9686 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9687 buffer, get_tree_code_name (TREE_CODE (node)),
9688 function, trim_filename (file), line);
9689 }
9690
9691
9692 /* Similar to tree_check_failed, except that we check that a tree does
9693 not have the specified code, given in CL. */
9694
9695 void
9696 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9697 const char *file, int line, const char *function)
9698 {
9699 internal_error
9700 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9701 TREE_CODE_CLASS_STRING (cl),
9702 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9703 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9704 }
9705
9706
9707 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9708
9709 void
9710 omp_clause_check_failed (const_tree node, const char *file, int line,
9711 const char *function, enum omp_clause_code code)
9712 {
9713 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9714 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9715 function, trim_filename (file), line);
9716 }
9717
9718
9719 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9720
9721 void
9722 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9723 const char *function, enum omp_clause_code c1,
9724 enum omp_clause_code c2)
9725 {
9726 char *buffer;
9727 unsigned length = 0;
9728 unsigned int c;
9729
9730 for (c = c1; c <= c2; ++c)
9731 length += 4 + strlen (omp_clause_code_name[c]);
9732
9733 length += strlen ("expected ");
9734 buffer = (char *) alloca (length);
9735 length = 0;
9736
9737 for (c = c1; c <= c2; ++c)
9738 {
9739 const char *prefix = length ? " or " : "expected ";
9740
9741 strcpy (buffer + length, prefix);
9742 length += strlen (prefix);
9743 strcpy (buffer + length, omp_clause_code_name[c]);
9744 length += strlen (omp_clause_code_name[c]);
9745 }
9746
9747 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9748 buffer, omp_clause_code_name[TREE_CODE (node)],
9749 function, trim_filename (file), line);
9750 }
9751
9752
9753 #undef DEFTREESTRUCT
9754 #define DEFTREESTRUCT(VAL, NAME) NAME,
9755
9756 static const char *ts_enum_names[] = {
9757 #include "treestruct.def"
9758 };
9759 #undef DEFTREESTRUCT
9760
9761 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9762
9763 /* Similar to tree_class_check_failed, except that we check for
9764 whether CODE contains the tree structure identified by EN. */
9765
9766 void
9767 tree_contains_struct_check_failed (const_tree node,
9768 const enum tree_node_structure_enum en,
9769 const char *file, int line,
9770 const char *function)
9771 {
9772 internal_error
9773 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9774 TS_ENUM_NAME (en),
9775 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9776 }
9777
9778
9779 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9780 (dynamically sized) vector. */
9781
9782 void
9783 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9784 const char *function)
9785 {
9786 internal_error
9787 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9788 idx + 1, len, function, trim_filename (file), line);
9789 }
9790
9791 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9792 (dynamically sized) vector. */
9793
9794 void
9795 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9796 const char *function)
9797 {
9798 internal_error
9799 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9800 idx + 1, len, function, trim_filename (file), line);
9801 }
9802
9803 /* Similar to above, except that the check is for the bounds of the operand
9804 vector of an expression node EXP. */
9805
9806 void
9807 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9808 int line, const char *function)
9809 {
9810 enum tree_code code = TREE_CODE (exp);
9811 internal_error
9812 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9813 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9814 function, trim_filename (file), line);
9815 }
9816
9817 /* Similar to above, except that the check is for the number of
9818 operands of an OMP_CLAUSE node. */
9819
9820 void
9821 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9822 int line, const char *function)
9823 {
9824 internal_error
9825 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9826 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9827 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9828 trim_filename (file), line);
9829 }
9830 #endif /* ENABLE_TREE_CHECKING */
9831 \f
9832 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9833 and mapped to the machine mode MODE. Initialize its fields and build
9834 the information necessary for debugging output. */
9835
9836 static tree
9837 make_vector_type (tree innertype, int nunits, machine_mode mode)
9838 {
9839 tree t;
9840 inchash::hash hstate;
9841 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9842
9843 t = make_node (VECTOR_TYPE);
9844 TREE_TYPE (t) = mv_innertype;
9845 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9846 SET_TYPE_MODE (t, mode);
9847
9848 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9849 SET_TYPE_STRUCTURAL_EQUALITY (t);
9850 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9851 || mode != VOIDmode)
9852 && !VECTOR_BOOLEAN_TYPE_P (t))
9853 TYPE_CANONICAL (t)
9854 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9855
9856 layout_type (t);
9857
9858 hstate.add_wide_int (VECTOR_TYPE);
9859 hstate.add_wide_int (nunits);
9860 hstate.add_wide_int (mode);
9861 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9862 t = type_hash_canon (hstate.end (), t);
9863
9864 /* We have built a main variant, based on the main variant of the
9865 inner type. Use it to build the variant we return. */
9866 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9867 && TREE_TYPE (t) != innertype)
9868 return build_type_attribute_qual_variant (t,
9869 TYPE_ATTRIBUTES (innertype),
9870 TYPE_QUALS (innertype));
9871
9872 return t;
9873 }
9874
9875 static tree
9876 make_or_reuse_type (unsigned size, int unsignedp)
9877 {
9878 int i;
9879
9880 if (size == INT_TYPE_SIZE)
9881 return unsignedp ? unsigned_type_node : integer_type_node;
9882 if (size == CHAR_TYPE_SIZE)
9883 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9884 if (size == SHORT_TYPE_SIZE)
9885 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9886 if (size == LONG_TYPE_SIZE)
9887 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9888 if (size == LONG_LONG_TYPE_SIZE)
9889 return (unsignedp ? long_long_unsigned_type_node
9890 : long_long_integer_type_node);
9891
9892 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9893 if (size == int_n_data[i].bitsize
9894 && int_n_enabled_p[i])
9895 return (unsignedp ? int_n_trees[i].unsigned_type
9896 : int_n_trees[i].signed_type);
9897
9898 if (unsignedp)
9899 return make_unsigned_type (size);
9900 else
9901 return make_signed_type (size);
9902 }
9903
9904 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9905
9906 static tree
9907 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9908 {
9909 if (satp)
9910 {
9911 if (size == SHORT_FRACT_TYPE_SIZE)
9912 return unsignedp ? sat_unsigned_short_fract_type_node
9913 : sat_short_fract_type_node;
9914 if (size == FRACT_TYPE_SIZE)
9915 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9916 if (size == LONG_FRACT_TYPE_SIZE)
9917 return unsignedp ? sat_unsigned_long_fract_type_node
9918 : sat_long_fract_type_node;
9919 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9920 return unsignedp ? sat_unsigned_long_long_fract_type_node
9921 : sat_long_long_fract_type_node;
9922 }
9923 else
9924 {
9925 if (size == SHORT_FRACT_TYPE_SIZE)
9926 return unsignedp ? unsigned_short_fract_type_node
9927 : short_fract_type_node;
9928 if (size == FRACT_TYPE_SIZE)
9929 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9930 if (size == LONG_FRACT_TYPE_SIZE)
9931 return unsignedp ? unsigned_long_fract_type_node
9932 : long_fract_type_node;
9933 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9934 return unsignedp ? unsigned_long_long_fract_type_node
9935 : long_long_fract_type_node;
9936 }
9937
9938 return make_fract_type (size, unsignedp, satp);
9939 }
9940
9941 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9942
9943 static tree
9944 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9945 {
9946 if (satp)
9947 {
9948 if (size == SHORT_ACCUM_TYPE_SIZE)
9949 return unsignedp ? sat_unsigned_short_accum_type_node
9950 : sat_short_accum_type_node;
9951 if (size == ACCUM_TYPE_SIZE)
9952 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9953 if (size == LONG_ACCUM_TYPE_SIZE)
9954 return unsignedp ? sat_unsigned_long_accum_type_node
9955 : sat_long_accum_type_node;
9956 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9957 return unsignedp ? sat_unsigned_long_long_accum_type_node
9958 : sat_long_long_accum_type_node;
9959 }
9960 else
9961 {
9962 if (size == SHORT_ACCUM_TYPE_SIZE)
9963 return unsignedp ? unsigned_short_accum_type_node
9964 : short_accum_type_node;
9965 if (size == ACCUM_TYPE_SIZE)
9966 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9967 if (size == LONG_ACCUM_TYPE_SIZE)
9968 return unsignedp ? unsigned_long_accum_type_node
9969 : long_accum_type_node;
9970 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9971 return unsignedp ? unsigned_long_long_accum_type_node
9972 : long_long_accum_type_node;
9973 }
9974
9975 return make_accum_type (size, unsignedp, satp);
9976 }
9977
9978
9979 /* Create an atomic variant node for TYPE. This routine is called
9980 during initialization of data types to create the 5 basic atomic
9981 types. The generic build_variant_type function requires these to
9982 already be set up in order to function properly, so cannot be
9983 called from there. If ALIGN is non-zero, then ensure alignment is
9984 overridden to this value. */
9985
9986 static tree
9987 build_atomic_base (tree type, unsigned int align)
9988 {
9989 tree t;
9990
9991 /* Make sure its not already registered. */
9992 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9993 return t;
9994
9995 t = build_variant_type_copy (type);
9996 set_type_quals (t, TYPE_QUAL_ATOMIC);
9997
9998 if (align)
9999 TYPE_ALIGN (t) = align;
10000
10001 return t;
10002 }
10003
10004 /* Create nodes for all integer types (and error_mark_node) using the sizes
10005 of C datatypes. SIGNED_CHAR specifies whether char is signed,
10006 SHORT_DOUBLE specifies whether double should be of the same precision
10007 as float. */
10008
10009 void
10010 build_common_tree_nodes (bool signed_char, bool short_double)
10011 {
10012 int i;
10013
10014 error_mark_node = make_node (ERROR_MARK);
10015 TREE_TYPE (error_mark_node) = error_mark_node;
10016
10017 initialize_sizetypes ();
10018
10019 /* Define both `signed char' and `unsigned char'. */
10020 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10021 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10022 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10023 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10024
10025 /* Define `char', which is like either `signed char' or `unsigned char'
10026 but not the same as either. */
10027 char_type_node
10028 = (signed_char
10029 ? make_signed_type (CHAR_TYPE_SIZE)
10030 : make_unsigned_type (CHAR_TYPE_SIZE));
10031 TYPE_STRING_FLAG (char_type_node) = 1;
10032
10033 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10034 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10035 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10036 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10037 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10038 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10039 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10040 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10041
10042 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10043 {
10044 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10045 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10046 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
10047 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
10048
10049 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
10050 && int_n_enabled_p[i])
10051 {
10052 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10053 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10054 }
10055 }
10056
10057 /* Define a boolean type. This type only represents boolean values but
10058 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10059 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10060 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10061 TYPE_PRECISION (boolean_type_node) = 1;
10062 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10063
10064 /* Define what type to use for size_t. */
10065 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10066 size_type_node = unsigned_type_node;
10067 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10068 size_type_node = long_unsigned_type_node;
10069 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10070 size_type_node = long_long_unsigned_type_node;
10071 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10072 size_type_node = short_unsigned_type_node;
10073 else
10074 {
10075 int i;
10076
10077 size_type_node = NULL_TREE;
10078 for (i = 0; i < NUM_INT_N_ENTS; i++)
10079 if (int_n_enabled_p[i])
10080 {
10081 char name[50];
10082 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10083
10084 if (strcmp (name, SIZE_TYPE) == 0)
10085 {
10086 size_type_node = int_n_trees[i].unsigned_type;
10087 }
10088 }
10089 if (size_type_node == NULL_TREE)
10090 gcc_unreachable ();
10091 }
10092
10093 /* Fill in the rest of the sized types. Reuse existing type nodes
10094 when possible. */
10095 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10096 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10097 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10098 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10099 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10100
10101 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10102 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10103 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10104 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10105 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10106
10107 /* Don't call build_qualified type for atomics. That routine does
10108 special processing for atomics, and until they are initialized
10109 it's better not to make that call.
10110
10111 Check to see if there is a target override for atomic types. */
10112
10113 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10114 targetm.atomic_align_for_mode (QImode));
10115 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10116 targetm.atomic_align_for_mode (HImode));
10117 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10118 targetm.atomic_align_for_mode (SImode));
10119 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10120 targetm.atomic_align_for_mode (DImode));
10121 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10122 targetm.atomic_align_for_mode (TImode));
10123
10124 access_public_node = get_identifier ("public");
10125 access_protected_node = get_identifier ("protected");
10126 access_private_node = get_identifier ("private");
10127
10128 /* Define these next since types below may used them. */
10129 integer_zero_node = build_int_cst (integer_type_node, 0);
10130 integer_one_node = build_int_cst (integer_type_node, 1);
10131 integer_three_node = build_int_cst (integer_type_node, 3);
10132 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10133
10134 size_zero_node = size_int (0);
10135 size_one_node = size_int (1);
10136 bitsize_zero_node = bitsize_int (0);
10137 bitsize_one_node = bitsize_int (1);
10138 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10139
10140 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10141 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10142
10143 void_type_node = make_node (VOID_TYPE);
10144 layout_type (void_type_node);
10145
10146 pointer_bounds_type_node = targetm.chkp_bound_type ();
10147
10148 /* We are not going to have real types in C with less than byte alignment,
10149 so we might as well not have any types that claim to have it. */
10150 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
10151 TYPE_USER_ALIGN (void_type_node) = 0;
10152
10153 void_node = make_node (VOID_CST);
10154 TREE_TYPE (void_node) = void_type_node;
10155
10156 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10157 layout_type (TREE_TYPE (null_pointer_node));
10158
10159 ptr_type_node = build_pointer_type (void_type_node);
10160 const_ptr_type_node
10161 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10162 fileptr_type_node = ptr_type_node;
10163
10164 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10165
10166 float_type_node = make_node (REAL_TYPE);
10167 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10168 layout_type (float_type_node);
10169
10170 double_type_node = make_node (REAL_TYPE);
10171 if (short_double)
10172 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
10173 else
10174 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10175 layout_type (double_type_node);
10176
10177 long_double_type_node = make_node (REAL_TYPE);
10178 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10179 layout_type (long_double_type_node);
10180
10181 float_ptr_type_node = build_pointer_type (float_type_node);
10182 double_ptr_type_node = build_pointer_type (double_type_node);
10183 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10184 integer_ptr_type_node = build_pointer_type (integer_type_node);
10185
10186 /* Fixed size integer types. */
10187 uint16_type_node = make_or_reuse_type (16, 1);
10188 uint32_type_node = make_or_reuse_type (32, 1);
10189 uint64_type_node = make_or_reuse_type (64, 1);
10190
10191 /* Decimal float types. */
10192 dfloat32_type_node = make_node (REAL_TYPE);
10193 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10194 layout_type (dfloat32_type_node);
10195 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10196 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10197
10198 dfloat64_type_node = make_node (REAL_TYPE);
10199 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10200 layout_type (dfloat64_type_node);
10201 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10202 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10203
10204 dfloat128_type_node = make_node (REAL_TYPE);
10205 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10206 layout_type (dfloat128_type_node);
10207 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10208 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10209
10210 complex_integer_type_node = build_complex_type (integer_type_node);
10211 complex_float_type_node = build_complex_type (float_type_node);
10212 complex_double_type_node = build_complex_type (double_type_node);
10213 complex_long_double_type_node = build_complex_type (long_double_type_node);
10214
10215 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10216 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10217 sat_ ## KIND ## _type_node = \
10218 make_sat_signed_ ## KIND ## _type (SIZE); \
10219 sat_unsigned_ ## KIND ## _type_node = \
10220 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10221 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10222 unsigned_ ## KIND ## _type_node = \
10223 make_unsigned_ ## KIND ## _type (SIZE);
10224
10225 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10226 sat_ ## WIDTH ## KIND ## _type_node = \
10227 make_sat_signed_ ## KIND ## _type (SIZE); \
10228 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10229 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10230 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10231 unsigned_ ## WIDTH ## KIND ## _type_node = \
10232 make_unsigned_ ## KIND ## _type (SIZE);
10233
10234 /* Make fixed-point type nodes based on four different widths. */
10235 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10236 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10237 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10238 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10239 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10240
10241 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10242 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10243 NAME ## _type_node = \
10244 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10245 u ## NAME ## _type_node = \
10246 make_or_reuse_unsigned_ ## KIND ## _type \
10247 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10248 sat_ ## NAME ## _type_node = \
10249 make_or_reuse_sat_signed_ ## KIND ## _type \
10250 (GET_MODE_BITSIZE (MODE ## mode)); \
10251 sat_u ## NAME ## _type_node = \
10252 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10253 (GET_MODE_BITSIZE (U ## MODE ## mode));
10254
10255 /* Fixed-point type and mode nodes. */
10256 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10257 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10258 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10259 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10260 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10261 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10262 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10263 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10264 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10265 MAKE_FIXED_MODE_NODE (accum, da, DA)
10266 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10267
10268 {
10269 tree t = targetm.build_builtin_va_list ();
10270
10271 /* Many back-ends define record types without setting TYPE_NAME.
10272 If we copied the record type here, we'd keep the original
10273 record type without a name. This breaks name mangling. So,
10274 don't copy record types and let c_common_nodes_and_builtins()
10275 declare the type to be __builtin_va_list. */
10276 if (TREE_CODE (t) != RECORD_TYPE)
10277 t = build_variant_type_copy (t);
10278
10279 va_list_type_node = t;
10280 }
10281 }
10282
10283 /* Modify DECL for given flags.
10284 TM_PURE attribute is set only on types, so the function will modify
10285 DECL's type when ECF_TM_PURE is used. */
10286
10287 void
10288 set_call_expr_flags (tree decl, int flags)
10289 {
10290 if (flags & ECF_NOTHROW)
10291 TREE_NOTHROW (decl) = 1;
10292 if (flags & ECF_CONST)
10293 TREE_READONLY (decl) = 1;
10294 if (flags & ECF_PURE)
10295 DECL_PURE_P (decl) = 1;
10296 if (flags & ECF_LOOPING_CONST_OR_PURE)
10297 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10298 if (flags & ECF_NOVOPS)
10299 DECL_IS_NOVOPS (decl) = 1;
10300 if (flags & ECF_NORETURN)
10301 TREE_THIS_VOLATILE (decl) = 1;
10302 if (flags & ECF_MALLOC)
10303 DECL_IS_MALLOC (decl) = 1;
10304 if (flags & ECF_RETURNS_TWICE)
10305 DECL_IS_RETURNS_TWICE (decl) = 1;
10306 if (flags & ECF_LEAF)
10307 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10308 NULL, DECL_ATTRIBUTES (decl));
10309 if ((flags & ECF_TM_PURE) && flag_tm)
10310 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10311 /* Looping const or pure is implied by noreturn.
10312 There is currently no way to declare looping const or looping pure alone. */
10313 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10314 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10315 }
10316
10317
10318 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10319
10320 static void
10321 local_define_builtin (const char *name, tree type, enum built_in_function code,
10322 const char *library_name, int ecf_flags)
10323 {
10324 tree decl;
10325
10326 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10327 library_name, NULL_TREE);
10328 set_call_expr_flags (decl, ecf_flags);
10329
10330 set_builtin_decl (code, decl, true);
10331 }
10332
10333 /* Call this function after instantiating all builtins that the language
10334 front end cares about. This will build the rest of the builtins
10335 and internal functions that are relied upon by the tree optimizers and
10336 the middle-end. */
10337
10338 void
10339 build_common_builtin_nodes (void)
10340 {
10341 tree tmp, ftype;
10342 int ecf_flags;
10343
10344 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10345 {
10346 ftype = build_function_type (void_type_node, void_list_node);
10347 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10348 "__builtin_unreachable",
10349 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10350 | ECF_CONST);
10351 }
10352
10353 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10354 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10355 {
10356 ftype = build_function_type_list (ptr_type_node,
10357 ptr_type_node, const_ptr_type_node,
10358 size_type_node, NULL_TREE);
10359
10360 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10361 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10362 "memcpy", ECF_NOTHROW | ECF_LEAF);
10363 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10364 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10365 "memmove", ECF_NOTHROW | ECF_LEAF);
10366 }
10367
10368 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10369 {
10370 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10371 const_ptr_type_node, size_type_node,
10372 NULL_TREE);
10373 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10374 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10375 }
10376
10377 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10378 {
10379 ftype = build_function_type_list (ptr_type_node,
10380 ptr_type_node, integer_type_node,
10381 size_type_node, NULL_TREE);
10382 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10383 "memset", ECF_NOTHROW | ECF_LEAF);
10384 }
10385
10386 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10387 {
10388 ftype = build_function_type_list (ptr_type_node,
10389 size_type_node, NULL_TREE);
10390 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10391 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10392 }
10393
10394 ftype = build_function_type_list (ptr_type_node, size_type_node,
10395 size_type_node, NULL_TREE);
10396 local_define_builtin ("__builtin_alloca_with_align", ftype,
10397 BUILT_IN_ALLOCA_WITH_ALIGN,
10398 "__builtin_alloca_with_align",
10399 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10400
10401 /* If we're checking the stack, `alloca' can throw. */
10402 if (flag_stack_check)
10403 {
10404 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10405 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10406 }
10407
10408 ftype = build_function_type_list (void_type_node,
10409 ptr_type_node, ptr_type_node,
10410 ptr_type_node, NULL_TREE);
10411 local_define_builtin ("__builtin_init_trampoline", ftype,
10412 BUILT_IN_INIT_TRAMPOLINE,
10413 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10414 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10415 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10416 "__builtin_init_heap_trampoline",
10417 ECF_NOTHROW | ECF_LEAF);
10418
10419 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10420 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10421 BUILT_IN_ADJUST_TRAMPOLINE,
10422 "__builtin_adjust_trampoline",
10423 ECF_CONST | ECF_NOTHROW);
10424
10425 ftype = build_function_type_list (void_type_node,
10426 ptr_type_node, ptr_type_node, NULL_TREE);
10427 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10428 BUILT_IN_NONLOCAL_GOTO,
10429 "__builtin_nonlocal_goto",
10430 ECF_NORETURN | ECF_NOTHROW);
10431
10432 ftype = build_function_type_list (void_type_node,
10433 ptr_type_node, ptr_type_node, NULL_TREE);
10434 local_define_builtin ("__builtin_setjmp_setup", ftype,
10435 BUILT_IN_SETJMP_SETUP,
10436 "__builtin_setjmp_setup", ECF_NOTHROW);
10437
10438 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10439 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10440 BUILT_IN_SETJMP_RECEIVER,
10441 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10442
10443 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10444 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10445 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10446
10447 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10448 local_define_builtin ("__builtin_stack_restore", ftype,
10449 BUILT_IN_STACK_RESTORE,
10450 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10451
10452 /* If there's a possibility that we might use the ARM EABI, build the
10453 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10454 if (targetm.arm_eabi_unwinder)
10455 {
10456 ftype = build_function_type_list (void_type_node, NULL_TREE);
10457 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10458 BUILT_IN_CXA_END_CLEANUP,
10459 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10460 }
10461
10462 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10463 local_define_builtin ("__builtin_unwind_resume", ftype,
10464 BUILT_IN_UNWIND_RESUME,
10465 ((targetm_common.except_unwind_info (&global_options)
10466 == UI_SJLJ)
10467 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10468 ECF_NORETURN);
10469
10470 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10471 {
10472 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10473 NULL_TREE);
10474 local_define_builtin ("__builtin_return_address", ftype,
10475 BUILT_IN_RETURN_ADDRESS,
10476 "__builtin_return_address",
10477 ECF_NOTHROW);
10478 }
10479
10480 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10481 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10482 {
10483 ftype = build_function_type_list (void_type_node, ptr_type_node,
10484 ptr_type_node, NULL_TREE);
10485 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10486 local_define_builtin ("__cyg_profile_func_enter", ftype,
10487 BUILT_IN_PROFILE_FUNC_ENTER,
10488 "__cyg_profile_func_enter", 0);
10489 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10490 local_define_builtin ("__cyg_profile_func_exit", ftype,
10491 BUILT_IN_PROFILE_FUNC_EXIT,
10492 "__cyg_profile_func_exit", 0);
10493 }
10494
10495 /* The exception object and filter values from the runtime. The argument
10496 must be zero before exception lowering, i.e. from the front end. After
10497 exception lowering, it will be the region number for the exception
10498 landing pad. These functions are PURE instead of CONST to prevent
10499 them from being hoisted past the exception edge that will initialize
10500 its value in the landing pad. */
10501 ftype = build_function_type_list (ptr_type_node,
10502 integer_type_node, NULL_TREE);
10503 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10504 /* Only use TM_PURE if we have TM language support. */
10505 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10506 ecf_flags |= ECF_TM_PURE;
10507 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10508 "__builtin_eh_pointer", ecf_flags);
10509
10510 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10511 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10512 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10513 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10514
10515 ftype = build_function_type_list (void_type_node,
10516 integer_type_node, integer_type_node,
10517 NULL_TREE);
10518 local_define_builtin ("__builtin_eh_copy_values", ftype,
10519 BUILT_IN_EH_COPY_VALUES,
10520 "__builtin_eh_copy_values", ECF_NOTHROW);
10521
10522 /* Complex multiplication and division. These are handled as builtins
10523 rather than optabs because emit_library_call_value doesn't support
10524 complex. Further, we can do slightly better with folding these
10525 beasties if the real and complex parts of the arguments are separate. */
10526 {
10527 int mode;
10528
10529 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10530 {
10531 char mode_name_buf[4], *q;
10532 const char *p;
10533 enum built_in_function mcode, dcode;
10534 tree type, inner_type;
10535 const char *prefix = "__";
10536
10537 if (targetm.libfunc_gnu_prefix)
10538 prefix = "__gnu_";
10539
10540 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10541 if (type == NULL)
10542 continue;
10543 inner_type = TREE_TYPE (type);
10544
10545 ftype = build_function_type_list (type, inner_type, inner_type,
10546 inner_type, inner_type, NULL_TREE);
10547
10548 mcode = ((enum built_in_function)
10549 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10550 dcode = ((enum built_in_function)
10551 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10552
10553 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10554 *q = TOLOWER (*p);
10555 *q = '\0';
10556
10557 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10558 NULL);
10559 local_define_builtin (built_in_names[mcode], ftype, mcode,
10560 built_in_names[mcode],
10561 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10562
10563 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10564 NULL);
10565 local_define_builtin (built_in_names[dcode], ftype, dcode,
10566 built_in_names[dcode],
10567 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10568 }
10569 }
10570
10571 init_internal_fns ();
10572 }
10573
10574 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10575 better way.
10576
10577 If we requested a pointer to a vector, build up the pointers that
10578 we stripped off while looking for the inner type. Similarly for
10579 return values from functions.
10580
10581 The argument TYPE is the top of the chain, and BOTTOM is the
10582 new type which we will point to. */
10583
10584 tree
10585 reconstruct_complex_type (tree type, tree bottom)
10586 {
10587 tree inner, outer;
10588
10589 if (TREE_CODE (type) == POINTER_TYPE)
10590 {
10591 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10592 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10593 TYPE_REF_CAN_ALIAS_ALL (type));
10594 }
10595 else if (TREE_CODE (type) == REFERENCE_TYPE)
10596 {
10597 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10598 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10599 TYPE_REF_CAN_ALIAS_ALL (type));
10600 }
10601 else if (TREE_CODE (type) == ARRAY_TYPE)
10602 {
10603 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10604 outer = build_array_type (inner, TYPE_DOMAIN (type));
10605 }
10606 else if (TREE_CODE (type) == FUNCTION_TYPE)
10607 {
10608 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10609 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10610 }
10611 else if (TREE_CODE (type) == METHOD_TYPE)
10612 {
10613 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10614 /* The build_method_type_directly() routine prepends 'this' to argument list,
10615 so we must compensate by getting rid of it. */
10616 outer
10617 = build_method_type_directly
10618 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10619 inner,
10620 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10621 }
10622 else if (TREE_CODE (type) == OFFSET_TYPE)
10623 {
10624 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10625 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10626 }
10627 else
10628 return bottom;
10629
10630 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10631 TYPE_QUALS (type));
10632 }
10633
10634 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10635 the inner type. */
10636 tree
10637 build_vector_type_for_mode (tree innertype, machine_mode mode)
10638 {
10639 int nunits;
10640
10641 switch (GET_MODE_CLASS (mode))
10642 {
10643 case MODE_VECTOR_INT:
10644 case MODE_VECTOR_FLOAT:
10645 case MODE_VECTOR_FRACT:
10646 case MODE_VECTOR_UFRACT:
10647 case MODE_VECTOR_ACCUM:
10648 case MODE_VECTOR_UACCUM:
10649 nunits = GET_MODE_NUNITS (mode);
10650 break;
10651
10652 case MODE_INT:
10653 /* Check that there are no leftover bits. */
10654 gcc_assert (GET_MODE_BITSIZE (mode)
10655 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10656
10657 nunits = GET_MODE_BITSIZE (mode)
10658 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10659 break;
10660
10661 default:
10662 gcc_unreachable ();
10663 }
10664
10665 return make_vector_type (innertype, nunits, mode);
10666 }
10667
10668 /* Similarly, but takes the inner type and number of units, which must be
10669 a power of two. */
10670
10671 tree
10672 build_vector_type (tree innertype, int nunits)
10673 {
10674 return make_vector_type (innertype, nunits, VOIDmode);
10675 }
10676
10677 /* Build truth vector with specified length and number of units. */
10678
10679 tree
10680 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10681 {
10682 machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits,
10683 vector_size);
10684
10685 gcc_assert (mask_mode != VOIDmode);
10686
10687 unsigned HOST_WIDE_INT vsize;
10688 if (mask_mode == BLKmode)
10689 vsize = vector_size * BITS_PER_UNIT;
10690 else
10691 vsize = GET_MODE_BITSIZE (mask_mode);
10692
10693 unsigned HOST_WIDE_INT esize = vsize / nunits;
10694 gcc_assert (esize * nunits == vsize);
10695
10696 tree bool_type = build_nonstandard_boolean_type (esize);
10697
10698 return make_vector_type (bool_type, nunits, mask_mode);
10699 }
10700
10701 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10702
10703 tree
10704 build_same_sized_truth_vector_type (tree vectype)
10705 {
10706 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10707 return vectype;
10708
10709 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10710
10711 if (!size)
10712 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10713
10714 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10715 }
10716
10717 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10718
10719 tree
10720 build_opaque_vector_type (tree innertype, int nunits)
10721 {
10722 tree t = make_vector_type (innertype, nunits, VOIDmode);
10723 tree cand;
10724 /* We always build the non-opaque variant before the opaque one,
10725 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10726 cand = TYPE_NEXT_VARIANT (t);
10727 if (cand
10728 && TYPE_VECTOR_OPAQUE (cand)
10729 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10730 return cand;
10731 /* Othewise build a variant type and make sure to queue it after
10732 the non-opaque type. */
10733 cand = build_distinct_type_copy (t);
10734 TYPE_VECTOR_OPAQUE (cand) = true;
10735 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10736 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10737 TYPE_NEXT_VARIANT (t) = cand;
10738 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10739 return cand;
10740 }
10741
10742
10743 /* Given an initializer INIT, return TRUE if INIT is zero or some
10744 aggregate of zeros. Otherwise return FALSE. */
10745 bool
10746 initializer_zerop (const_tree init)
10747 {
10748 tree elt;
10749
10750 STRIP_NOPS (init);
10751
10752 switch (TREE_CODE (init))
10753 {
10754 case INTEGER_CST:
10755 return integer_zerop (init);
10756
10757 case REAL_CST:
10758 /* ??? Note that this is not correct for C4X float formats. There,
10759 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10760 negative exponent. */
10761 return real_zerop (init)
10762 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10763
10764 case FIXED_CST:
10765 return fixed_zerop (init);
10766
10767 case COMPLEX_CST:
10768 return integer_zerop (init)
10769 || (real_zerop (init)
10770 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10771 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10772
10773 case VECTOR_CST:
10774 {
10775 unsigned i;
10776 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10777 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10778 return false;
10779 return true;
10780 }
10781
10782 case CONSTRUCTOR:
10783 {
10784 unsigned HOST_WIDE_INT idx;
10785
10786 if (TREE_CLOBBER_P (init))
10787 return false;
10788 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10789 if (!initializer_zerop (elt))
10790 return false;
10791 return true;
10792 }
10793
10794 case STRING_CST:
10795 {
10796 int i;
10797
10798 /* We need to loop through all elements to handle cases like
10799 "\0" and "\0foobar". */
10800 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10801 if (TREE_STRING_POINTER (init)[i] != '\0')
10802 return false;
10803
10804 return true;
10805 }
10806
10807 default:
10808 return false;
10809 }
10810 }
10811
10812 /* Check if vector VEC consists of all the equal elements and
10813 that the number of elements corresponds to the type of VEC.
10814 The function returns first element of the vector
10815 or NULL_TREE if the vector is not uniform. */
10816 tree
10817 uniform_vector_p (const_tree vec)
10818 {
10819 tree first, t;
10820 unsigned i;
10821
10822 if (vec == NULL_TREE)
10823 return NULL_TREE;
10824
10825 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10826
10827 if (TREE_CODE (vec) == VECTOR_CST)
10828 {
10829 first = VECTOR_CST_ELT (vec, 0);
10830 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10831 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10832 return NULL_TREE;
10833
10834 return first;
10835 }
10836
10837 else if (TREE_CODE (vec) == CONSTRUCTOR)
10838 {
10839 first = error_mark_node;
10840
10841 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10842 {
10843 if (i == 0)
10844 {
10845 first = t;
10846 continue;
10847 }
10848 if (!operand_equal_p (first, t, 0))
10849 return NULL_TREE;
10850 }
10851 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10852 return NULL_TREE;
10853
10854 return first;
10855 }
10856
10857 return NULL_TREE;
10858 }
10859
10860 /* Build an empty statement at location LOC. */
10861
10862 tree
10863 build_empty_stmt (location_t loc)
10864 {
10865 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10866 SET_EXPR_LOCATION (t, loc);
10867 return t;
10868 }
10869
10870
10871 /* Build an OpenMP clause with code CODE. LOC is the location of the
10872 clause. */
10873
10874 tree
10875 build_omp_clause (location_t loc, enum omp_clause_code code)
10876 {
10877 tree t;
10878 int size, length;
10879
10880 length = omp_clause_num_ops[code];
10881 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10882
10883 record_node_allocation_statistics (OMP_CLAUSE, size);
10884
10885 t = (tree) ggc_internal_alloc (size);
10886 memset (t, 0, size);
10887 TREE_SET_CODE (t, OMP_CLAUSE);
10888 OMP_CLAUSE_SET_CODE (t, code);
10889 OMP_CLAUSE_LOCATION (t) = loc;
10890
10891 return t;
10892 }
10893
10894 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10895 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10896 Except for the CODE and operand count field, other storage for the
10897 object is initialized to zeros. */
10898
10899 tree
10900 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10901 {
10902 tree t;
10903 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10904
10905 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10906 gcc_assert (len >= 1);
10907
10908 record_node_allocation_statistics (code, length);
10909
10910 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10911
10912 TREE_SET_CODE (t, code);
10913
10914 /* Can't use TREE_OPERAND to store the length because if checking is
10915 enabled, it will try to check the length before we store it. :-P */
10916 t->exp.operands[0] = build_int_cst (sizetype, len);
10917
10918 return t;
10919 }
10920
10921 /* Helper function for build_call_* functions; build a CALL_EXPR with
10922 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10923 the argument slots. */
10924
10925 static tree
10926 build_call_1 (tree return_type, tree fn, int nargs)
10927 {
10928 tree t;
10929
10930 t = build_vl_exp (CALL_EXPR, nargs + 3);
10931 TREE_TYPE (t) = return_type;
10932 CALL_EXPR_FN (t) = fn;
10933 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10934
10935 return t;
10936 }
10937
10938 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10939 FN and a null static chain slot. NARGS is the number of call arguments
10940 which are specified as "..." arguments. */
10941
10942 tree
10943 build_call_nary (tree return_type, tree fn, int nargs, ...)
10944 {
10945 tree ret;
10946 va_list args;
10947 va_start (args, nargs);
10948 ret = build_call_valist (return_type, fn, nargs, args);
10949 va_end (args);
10950 return ret;
10951 }
10952
10953 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10954 FN and a null static chain slot. NARGS is the number of call arguments
10955 which are specified as a va_list ARGS. */
10956
10957 tree
10958 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10959 {
10960 tree t;
10961 int i;
10962
10963 t = build_call_1 (return_type, fn, nargs);
10964 for (i = 0; i < nargs; i++)
10965 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10966 process_call_operands (t);
10967 return t;
10968 }
10969
10970 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10971 FN and a null static chain slot. NARGS is the number of call arguments
10972 which are specified as a tree array ARGS. */
10973
10974 tree
10975 build_call_array_loc (location_t loc, tree return_type, tree fn,
10976 int nargs, const tree *args)
10977 {
10978 tree t;
10979 int i;
10980
10981 t = build_call_1 (return_type, fn, nargs);
10982 for (i = 0; i < nargs; i++)
10983 CALL_EXPR_ARG (t, i) = args[i];
10984 process_call_operands (t);
10985 SET_EXPR_LOCATION (t, loc);
10986 return t;
10987 }
10988
10989 /* Like build_call_array, but takes a vec. */
10990
10991 tree
10992 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10993 {
10994 tree ret, t;
10995 unsigned int ix;
10996
10997 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10998 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10999 CALL_EXPR_ARG (ret, ix) = t;
11000 process_call_operands (ret);
11001 return ret;
11002 }
11003
11004 /* Conveniently construct a function call expression. FNDECL names the
11005 function to be called and N arguments are passed in the array
11006 ARGARRAY. */
11007
11008 tree
11009 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11010 {
11011 tree fntype = TREE_TYPE (fndecl);
11012 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11013
11014 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11015 }
11016
11017 /* Conveniently construct a function call expression. FNDECL names the
11018 function to be called and the arguments are passed in the vector
11019 VEC. */
11020
11021 tree
11022 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11023 {
11024 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11025 vec_safe_address (vec));
11026 }
11027
11028
11029 /* Conveniently construct a function call expression. FNDECL names the
11030 function to be called, N is the number of arguments, and the "..."
11031 parameters are the argument expressions. */
11032
11033 tree
11034 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11035 {
11036 va_list ap;
11037 tree *argarray = XALLOCAVEC (tree, n);
11038 int i;
11039
11040 va_start (ap, n);
11041 for (i = 0; i < n; i++)
11042 argarray[i] = va_arg (ap, tree);
11043 va_end (ap);
11044 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11045 }
11046
11047 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11048 varargs macros aren't supported by all bootstrap compilers. */
11049
11050 tree
11051 build_call_expr (tree fndecl, int n, ...)
11052 {
11053 va_list ap;
11054 tree *argarray = XALLOCAVEC (tree, n);
11055 int i;
11056
11057 va_start (ap, n);
11058 for (i = 0; i < n; i++)
11059 argarray[i] = va_arg (ap, tree);
11060 va_end (ap);
11061 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11062 }
11063
11064 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11065 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11066 It will get gimplified later into an ordinary internal function. */
11067
11068 tree
11069 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11070 tree type, int n, const tree *args)
11071 {
11072 tree t = build_call_1 (type, NULL_TREE, n);
11073 for (int i = 0; i < n; ++i)
11074 CALL_EXPR_ARG (t, i) = args[i];
11075 SET_EXPR_LOCATION (t, loc);
11076 CALL_EXPR_IFN (t) = ifn;
11077 return t;
11078 }
11079
11080 /* Build internal call expression. This is just like CALL_EXPR, except
11081 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11082 internal function. */
11083
11084 tree
11085 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11086 tree type, int n, ...)
11087 {
11088 va_list ap;
11089 tree *argarray = XALLOCAVEC (tree, n);
11090 int i;
11091
11092 va_start (ap, n);
11093 for (i = 0; i < n; i++)
11094 argarray[i] = va_arg (ap, tree);
11095 va_end (ap);
11096 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11097 }
11098
11099 /* Return a function call to FN, if the target is guaranteed to support it,
11100 or null otherwise.
11101
11102 N is the number of arguments, passed in the "...", and TYPE is the
11103 type of the return value. */
11104
11105 tree
11106 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11107 int n, ...)
11108 {
11109 va_list ap;
11110 tree *argarray = XALLOCAVEC (tree, n);
11111 int i;
11112
11113 va_start (ap, n);
11114 for (i = 0; i < n; i++)
11115 argarray[i] = va_arg (ap, tree);
11116 va_end (ap);
11117 if (internal_fn_p (fn))
11118 {
11119 internal_fn ifn = as_internal_fn (fn);
11120 if (direct_internal_fn_p (ifn))
11121 {
11122 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11123 if (!direct_internal_fn_supported_p (ifn, types,
11124 OPTIMIZE_FOR_BOTH))
11125 return NULL_TREE;
11126 }
11127 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11128 }
11129 else
11130 {
11131 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11132 if (!fndecl)
11133 return NULL_TREE;
11134 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11135 }
11136 }
11137
11138 /* Create a new constant string literal and return a char* pointer to it.
11139 The STRING_CST value is the LEN characters at STR. */
11140 tree
11141 build_string_literal (int len, const char *str)
11142 {
11143 tree t, elem, index, type;
11144
11145 t = build_string (len, str);
11146 elem = build_type_variant (char_type_node, 1, 0);
11147 index = build_index_type (size_int (len - 1));
11148 type = build_array_type (elem, index);
11149 TREE_TYPE (t) = type;
11150 TREE_CONSTANT (t) = 1;
11151 TREE_READONLY (t) = 1;
11152 TREE_STATIC (t) = 1;
11153
11154 type = build_pointer_type (elem);
11155 t = build1 (ADDR_EXPR, type,
11156 build4 (ARRAY_REF, elem,
11157 t, integer_zero_node, NULL_TREE, NULL_TREE));
11158 return t;
11159 }
11160
11161
11162
11163 /* Return true if T (assumed to be a DECL) must be assigned a memory
11164 location. */
11165
11166 bool
11167 needs_to_live_in_memory (const_tree t)
11168 {
11169 return (TREE_ADDRESSABLE (t)
11170 || is_global_var (t)
11171 || (TREE_CODE (t) == RESULT_DECL
11172 && !DECL_BY_REFERENCE (t)
11173 && aggregate_value_p (t, current_function_decl)));
11174 }
11175
11176 /* Return value of a constant X and sign-extend it. */
11177
11178 HOST_WIDE_INT
11179 int_cst_value (const_tree x)
11180 {
11181 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11182 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11183
11184 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11185 gcc_assert (cst_and_fits_in_hwi (x));
11186
11187 if (bits < HOST_BITS_PER_WIDE_INT)
11188 {
11189 bool negative = ((val >> (bits - 1)) & 1) != 0;
11190 if (negative)
11191 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
11192 else
11193 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
11194 }
11195
11196 return val;
11197 }
11198
11199 /* If TYPE is an integral or pointer type, return an integer type with
11200 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11201 if TYPE is already an integer type of signedness UNSIGNEDP. */
11202
11203 tree
11204 signed_or_unsigned_type_for (int unsignedp, tree type)
11205 {
11206 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11207 return type;
11208
11209 if (TREE_CODE (type) == VECTOR_TYPE)
11210 {
11211 tree inner = TREE_TYPE (type);
11212 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11213 if (!inner2)
11214 return NULL_TREE;
11215 if (inner == inner2)
11216 return type;
11217 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11218 }
11219
11220 if (!INTEGRAL_TYPE_P (type)
11221 && !POINTER_TYPE_P (type)
11222 && TREE_CODE (type) != OFFSET_TYPE)
11223 return NULL_TREE;
11224
11225 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11226 }
11227
11228 /* If TYPE is an integral or pointer type, return an integer type with
11229 the same precision which is unsigned, or itself if TYPE is already an
11230 unsigned integer type. */
11231
11232 tree
11233 unsigned_type_for (tree type)
11234 {
11235 return signed_or_unsigned_type_for (1, type);
11236 }
11237
11238 /* If TYPE is an integral or pointer type, return an integer type with
11239 the same precision which is signed, or itself if TYPE is already a
11240 signed integer type. */
11241
11242 tree
11243 signed_type_for (tree type)
11244 {
11245 return signed_or_unsigned_type_for (0, type);
11246 }
11247
11248 /* If TYPE is a vector type, return a signed integer vector type with the
11249 same width and number of subparts. Otherwise return boolean_type_node. */
11250
11251 tree
11252 truth_type_for (tree type)
11253 {
11254 if (TREE_CODE (type) == VECTOR_TYPE)
11255 {
11256 if (VECTOR_BOOLEAN_TYPE_P (type))
11257 return type;
11258 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11259 GET_MODE_SIZE (TYPE_MODE (type)));
11260 }
11261 else
11262 return boolean_type_node;
11263 }
11264
11265 /* Returns the largest value obtainable by casting something in INNER type to
11266 OUTER type. */
11267
11268 tree
11269 upper_bound_in_type (tree outer, tree inner)
11270 {
11271 unsigned int det = 0;
11272 unsigned oprec = TYPE_PRECISION (outer);
11273 unsigned iprec = TYPE_PRECISION (inner);
11274 unsigned prec;
11275
11276 /* Compute a unique number for every combination. */
11277 det |= (oprec > iprec) ? 4 : 0;
11278 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11279 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11280
11281 /* Determine the exponent to use. */
11282 switch (det)
11283 {
11284 case 0:
11285 case 1:
11286 /* oprec <= iprec, outer: signed, inner: don't care. */
11287 prec = oprec - 1;
11288 break;
11289 case 2:
11290 case 3:
11291 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11292 prec = oprec;
11293 break;
11294 case 4:
11295 /* oprec > iprec, outer: signed, inner: signed. */
11296 prec = iprec - 1;
11297 break;
11298 case 5:
11299 /* oprec > iprec, outer: signed, inner: unsigned. */
11300 prec = iprec;
11301 break;
11302 case 6:
11303 /* oprec > iprec, outer: unsigned, inner: signed. */
11304 prec = oprec;
11305 break;
11306 case 7:
11307 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11308 prec = iprec;
11309 break;
11310 default:
11311 gcc_unreachable ();
11312 }
11313
11314 return wide_int_to_tree (outer,
11315 wi::mask (prec, false, TYPE_PRECISION (outer)));
11316 }
11317
11318 /* Returns the smallest value obtainable by casting something in INNER type to
11319 OUTER type. */
11320
11321 tree
11322 lower_bound_in_type (tree outer, tree inner)
11323 {
11324 unsigned oprec = TYPE_PRECISION (outer);
11325 unsigned iprec = TYPE_PRECISION (inner);
11326
11327 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11328 and obtain 0. */
11329 if (TYPE_UNSIGNED (outer)
11330 /* If we are widening something of an unsigned type, OUTER type
11331 contains all values of INNER type. In particular, both INNER
11332 and OUTER types have zero in common. */
11333 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11334 return build_int_cst (outer, 0);
11335 else
11336 {
11337 /* If we are widening a signed type to another signed type, we
11338 want to obtain -2^^(iprec-1). If we are keeping the
11339 precision or narrowing to a signed type, we want to obtain
11340 -2^(oprec-1). */
11341 unsigned prec = oprec > iprec ? iprec : oprec;
11342 return wide_int_to_tree (outer,
11343 wi::mask (prec - 1, true,
11344 TYPE_PRECISION (outer)));
11345 }
11346 }
11347
11348 /* Return nonzero if two operands that are suitable for PHI nodes are
11349 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11350 SSA_NAME or invariant. Note that this is strictly an optimization.
11351 That is, callers of this function can directly call operand_equal_p
11352 and get the same result, only slower. */
11353
11354 int
11355 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11356 {
11357 if (arg0 == arg1)
11358 return 1;
11359 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11360 return 0;
11361 return operand_equal_p (arg0, arg1, 0);
11362 }
11363
11364 /* Returns number of zeros at the end of binary representation of X. */
11365
11366 tree
11367 num_ending_zeros (const_tree x)
11368 {
11369 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11370 }
11371
11372
11373 #define WALK_SUBTREE(NODE) \
11374 do \
11375 { \
11376 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11377 if (result) \
11378 return result; \
11379 } \
11380 while (0)
11381
11382 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11383 be walked whenever a type is seen in the tree. Rest of operands and return
11384 value are as for walk_tree. */
11385
11386 static tree
11387 walk_type_fields (tree type, walk_tree_fn func, void *data,
11388 hash_set<tree> *pset, walk_tree_lh lh)
11389 {
11390 tree result = NULL_TREE;
11391
11392 switch (TREE_CODE (type))
11393 {
11394 case POINTER_TYPE:
11395 case REFERENCE_TYPE:
11396 case VECTOR_TYPE:
11397 /* We have to worry about mutually recursive pointers. These can't
11398 be written in C. They can in Ada. It's pathological, but
11399 there's an ACATS test (c38102a) that checks it. Deal with this
11400 by checking if we're pointing to another pointer, that one
11401 points to another pointer, that one does too, and we have no htab.
11402 If so, get a hash table. We check three levels deep to avoid
11403 the cost of the hash table if we don't need one. */
11404 if (POINTER_TYPE_P (TREE_TYPE (type))
11405 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11406 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11407 && !pset)
11408 {
11409 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11410 func, data);
11411 if (result)
11412 return result;
11413
11414 break;
11415 }
11416
11417 /* ... fall through ... */
11418
11419 case COMPLEX_TYPE:
11420 WALK_SUBTREE (TREE_TYPE (type));
11421 break;
11422
11423 case METHOD_TYPE:
11424 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11425
11426 /* Fall through. */
11427
11428 case FUNCTION_TYPE:
11429 WALK_SUBTREE (TREE_TYPE (type));
11430 {
11431 tree arg;
11432
11433 /* We never want to walk into default arguments. */
11434 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11435 WALK_SUBTREE (TREE_VALUE (arg));
11436 }
11437 break;
11438
11439 case ARRAY_TYPE:
11440 /* Don't follow this nodes's type if a pointer for fear that
11441 we'll have infinite recursion. If we have a PSET, then we
11442 need not fear. */
11443 if (pset
11444 || (!POINTER_TYPE_P (TREE_TYPE (type))
11445 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11446 WALK_SUBTREE (TREE_TYPE (type));
11447 WALK_SUBTREE (TYPE_DOMAIN (type));
11448 break;
11449
11450 case OFFSET_TYPE:
11451 WALK_SUBTREE (TREE_TYPE (type));
11452 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11453 break;
11454
11455 default:
11456 break;
11457 }
11458
11459 return NULL_TREE;
11460 }
11461
11462 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11463 called with the DATA and the address of each sub-tree. If FUNC returns a
11464 non-NULL value, the traversal is stopped, and the value returned by FUNC
11465 is returned. If PSET is non-NULL it is used to record the nodes visited,
11466 and to avoid visiting a node more than once. */
11467
11468 tree
11469 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11470 hash_set<tree> *pset, walk_tree_lh lh)
11471 {
11472 enum tree_code code;
11473 int walk_subtrees;
11474 tree result;
11475
11476 #define WALK_SUBTREE_TAIL(NODE) \
11477 do \
11478 { \
11479 tp = & (NODE); \
11480 goto tail_recurse; \
11481 } \
11482 while (0)
11483
11484 tail_recurse:
11485 /* Skip empty subtrees. */
11486 if (!*tp)
11487 return NULL_TREE;
11488
11489 /* Don't walk the same tree twice, if the user has requested
11490 that we avoid doing so. */
11491 if (pset && pset->add (*tp))
11492 return NULL_TREE;
11493
11494 /* Call the function. */
11495 walk_subtrees = 1;
11496 result = (*func) (tp, &walk_subtrees, data);
11497
11498 /* If we found something, return it. */
11499 if (result)
11500 return result;
11501
11502 code = TREE_CODE (*tp);
11503
11504 /* Even if we didn't, FUNC may have decided that there was nothing
11505 interesting below this point in the tree. */
11506 if (!walk_subtrees)
11507 {
11508 /* But we still need to check our siblings. */
11509 if (code == TREE_LIST)
11510 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11511 else if (code == OMP_CLAUSE)
11512 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11513 else
11514 return NULL_TREE;
11515 }
11516
11517 if (lh)
11518 {
11519 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11520 if (result || !walk_subtrees)
11521 return result;
11522 }
11523
11524 switch (code)
11525 {
11526 case ERROR_MARK:
11527 case IDENTIFIER_NODE:
11528 case INTEGER_CST:
11529 case REAL_CST:
11530 case FIXED_CST:
11531 case VECTOR_CST:
11532 case STRING_CST:
11533 case BLOCK:
11534 case PLACEHOLDER_EXPR:
11535 case SSA_NAME:
11536 case FIELD_DECL:
11537 case RESULT_DECL:
11538 /* None of these have subtrees other than those already walked
11539 above. */
11540 break;
11541
11542 case TREE_LIST:
11543 WALK_SUBTREE (TREE_VALUE (*tp));
11544 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11545 break;
11546
11547 case TREE_VEC:
11548 {
11549 int len = TREE_VEC_LENGTH (*tp);
11550
11551 if (len == 0)
11552 break;
11553
11554 /* Walk all elements but the first. */
11555 while (--len)
11556 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11557
11558 /* Now walk the first one as a tail call. */
11559 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11560 }
11561
11562 case COMPLEX_CST:
11563 WALK_SUBTREE (TREE_REALPART (*tp));
11564 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11565
11566 case CONSTRUCTOR:
11567 {
11568 unsigned HOST_WIDE_INT idx;
11569 constructor_elt *ce;
11570
11571 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11572 idx++)
11573 WALK_SUBTREE (ce->value);
11574 }
11575 break;
11576
11577 case SAVE_EXPR:
11578 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11579
11580 case BIND_EXPR:
11581 {
11582 tree decl;
11583 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11584 {
11585 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11586 into declarations that are just mentioned, rather than
11587 declared; they don't really belong to this part of the tree.
11588 And, we can see cycles: the initializer for a declaration
11589 can refer to the declaration itself. */
11590 WALK_SUBTREE (DECL_INITIAL (decl));
11591 WALK_SUBTREE (DECL_SIZE (decl));
11592 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11593 }
11594 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11595 }
11596
11597 case STATEMENT_LIST:
11598 {
11599 tree_stmt_iterator i;
11600 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11601 WALK_SUBTREE (*tsi_stmt_ptr (i));
11602 }
11603 break;
11604
11605 case OMP_CLAUSE:
11606 switch (OMP_CLAUSE_CODE (*tp))
11607 {
11608 case OMP_CLAUSE_GANG:
11609 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11610 /* FALLTHRU */
11611
11612 case OMP_CLAUSE_DEVICE_RESIDENT:
11613 case OMP_CLAUSE_ASYNC:
11614 case OMP_CLAUSE_WAIT:
11615 case OMP_CLAUSE_WORKER:
11616 case OMP_CLAUSE_VECTOR:
11617 case OMP_CLAUSE_NUM_GANGS:
11618 case OMP_CLAUSE_NUM_WORKERS:
11619 case OMP_CLAUSE_VECTOR_LENGTH:
11620 case OMP_CLAUSE_PRIVATE:
11621 case OMP_CLAUSE_SHARED:
11622 case OMP_CLAUSE_FIRSTPRIVATE:
11623 case OMP_CLAUSE_COPYIN:
11624 case OMP_CLAUSE_COPYPRIVATE:
11625 case OMP_CLAUSE_FINAL:
11626 case OMP_CLAUSE_IF:
11627 case OMP_CLAUSE_NUM_THREADS:
11628 case OMP_CLAUSE_SCHEDULE:
11629 case OMP_CLAUSE_UNIFORM:
11630 case OMP_CLAUSE_DEPEND:
11631 case OMP_CLAUSE_NUM_TEAMS:
11632 case OMP_CLAUSE_THREAD_LIMIT:
11633 case OMP_CLAUSE_DEVICE:
11634 case OMP_CLAUSE_DIST_SCHEDULE:
11635 case OMP_CLAUSE_SAFELEN:
11636 case OMP_CLAUSE_SIMDLEN:
11637 case OMP_CLAUSE_ORDERED:
11638 case OMP_CLAUSE_PRIORITY:
11639 case OMP_CLAUSE_GRAINSIZE:
11640 case OMP_CLAUSE_NUM_TASKS:
11641 case OMP_CLAUSE_HINT:
11642 case OMP_CLAUSE_TO_DECLARE:
11643 case OMP_CLAUSE_LINK:
11644 case OMP_CLAUSE_USE_DEVICE_PTR:
11645 case OMP_CLAUSE_IS_DEVICE_PTR:
11646 case OMP_CLAUSE__LOOPTEMP_:
11647 case OMP_CLAUSE__SIMDUID_:
11648 case OMP_CLAUSE__CILK_FOR_COUNT_:
11649 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11650 /* FALLTHRU */
11651
11652 case OMP_CLAUSE_INDEPENDENT:
11653 case OMP_CLAUSE_NOWAIT:
11654 case OMP_CLAUSE_DEFAULT:
11655 case OMP_CLAUSE_UNTIED:
11656 case OMP_CLAUSE_MERGEABLE:
11657 case OMP_CLAUSE_PROC_BIND:
11658 case OMP_CLAUSE_INBRANCH:
11659 case OMP_CLAUSE_NOTINBRANCH:
11660 case OMP_CLAUSE_FOR:
11661 case OMP_CLAUSE_PARALLEL:
11662 case OMP_CLAUSE_SECTIONS:
11663 case OMP_CLAUSE_TASKGROUP:
11664 case OMP_CLAUSE_NOGROUP:
11665 case OMP_CLAUSE_THREADS:
11666 case OMP_CLAUSE_SIMD:
11667 case OMP_CLAUSE_DEFAULTMAP:
11668 case OMP_CLAUSE_AUTO:
11669 case OMP_CLAUSE_SEQ:
11670 case OMP_CLAUSE_TILE:
11671 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11672
11673 case OMP_CLAUSE_LASTPRIVATE:
11674 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11675 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11676 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11677
11678 case OMP_CLAUSE_COLLAPSE:
11679 {
11680 int i;
11681 for (i = 0; i < 3; i++)
11682 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11683 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11684 }
11685
11686 case OMP_CLAUSE_LINEAR:
11687 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11688 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11689 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11690 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11691
11692 case OMP_CLAUSE_ALIGNED:
11693 case OMP_CLAUSE_FROM:
11694 case OMP_CLAUSE_TO:
11695 case OMP_CLAUSE_MAP:
11696 case OMP_CLAUSE__CACHE_:
11697 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11698 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11699 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11700
11701 case OMP_CLAUSE_REDUCTION:
11702 {
11703 int i;
11704 for (i = 0; i < 5; i++)
11705 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11706 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11707 }
11708
11709 default:
11710 gcc_unreachable ();
11711 }
11712 break;
11713
11714 case TARGET_EXPR:
11715 {
11716 int i, len;
11717
11718 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11719 But, we only want to walk once. */
11720 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11721 for (i = 0; i < len; ++i)
11722 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11723 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11724 }
11725
11726 case DECL_EXPR:
11727 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11728 defining. We only want to walk into these fields of a type in this
11729 case and not in the general case of a mere reference to the type.
11730
11731 The criterion is as follows: if the field can be an expression, it
11732 must be walked only here. This should be in keeping with the fields
11733 that are directly gimplified in gimplify_type_sizes in order for the
11734 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11735 variable-sized types.
11736
11737 Note that DECLs get walked as part of processing the BIND_EXPR. */
11738 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11739 {
11740 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11741 if (TREE_CODE (*type_p) == ERROR_MARK)
11742 return NULL_TREE;
11743
11744 /* Call the function for the type. See if it returns anything or
11745 doesn't want us to continue. If we are to continue, walk both
11746 the normal fields and those for the declaration case. */
11747 result = (*func) (type_p, &walk_subtrees, data);
11748 if (result || !walk_subtrees)
11749 return result;
11750
11751 /* But do not walk a pointed-to type since it may itself need to
11752 be walked in the declaration case if it isn't anonymous. */
11753 if (!POINTER_TYPE_P (*type_p))
11754 {
11755 result = walk_type_fields (*type_p, func, data, pset, lh);
11756 if (result)
11757 return result;
11758 }
11759
11760 /* If this is a record type, also walk the fields. */
11761 if (RECORD_OR_UNION_TYPE_P (*type_p))
11762 {
11763 tree field;
11764
11765 for (field = TYPE_FIELDS (*type_p); field;
11766 field = DECL_CHAIN (field))
11767 {
11768 /* We'd like to look at the type of the field, but we can
11769 easily get infinite recursion. So assume it's pointed
11770 to elsewhere in the tree. Also, ignore things that
11771 aren't fields. */
11772 if (TREE_CODE (field) != FIELD_DECL)
11773 continue;
11774
11775 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11776 WALK_SUBTREE (DECL_SIZE (field));
11777 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11778 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11779 WALK_SUBTREE (DECL_QUALIFIER (field));
11780 }
11781 }
11782
11783 /* Same for scalar types. */
11784 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11785 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11786 || TREE_CODE (*type_p) == INTEGER_TYPE
11787 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11788 || TREE_CODE (*type_p) == REAL_TYPE)
11789 {
11790 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11791 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11792 }
11793
11794 WALK_SUBTREE (TYPE_SIZE (*type_p));
11795 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11796 }
11797 /* FALLTHRU */
11798
11799 default:
11800 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11801 {
11802 int i, len;
11803
11804 /* Walk over all the sub-trees of this operand. */
11805 len = TREE_OPERAND_LENGTH (*tp);
11806
11807 /* Go through the subtrees. We need to do this in forward order so
11808 that the scope of a FOR_EXPR is handled properly. */
11809 if (len)
11810 {
11811 for (i = 0; i < len - 1; ++i)
11812 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11813 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11814 }
11815 }
11816 /* If this is a type, walk the needed fields in the type. */
11817 else if (TYPE_P (*tp))
11818 return walk_type_fields (*tp, func, data, pset, lh);
11819 break;
11820 }
11821
11822 /* We didn't find what we were looking for. */
11823 return NULL_TREE;
11824
11825 #undef WALK_SUBTREE_TAIL
11826 }
11827 #undef WALK_SUBTREE
11828
11829 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11830
11831 tree
11832 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11833 walk_tree_lh lh)
11834 {
11835 tree result;
11836
11837 hash_set<tree> pset;
11838 result = walk_tree_1 (tp, func, data, &pset, lh);
11839 return result;
11840 }
11841
11842
11843 tree
11844 tree_block (tree t)
11845 {
11846 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11847
11848 if (IS_EXPR_CODE_CLASS (c))
11849 return LOCATION_BLOCK (t->exp.locus);
11850 gcc_unreachable ();
11851 return NULL;
11852 }
11853
11854 void
11855 tree_set_block (tree t, tree b)
11856 {
11857 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11858
11859 if (IS_EXPR_CODE_CLASS (c))
11860 {
11861 t->exp.locus = set_block (t->exp.locus, b);
11862 }
11863 else
11864 gcc_unreachable ();
11865 }
11866
11867 /* Create a nameless artificial label and put it in the current
11868 function context. The label has a location of LOC. Returns the
11869 newly created label. */
11870
11871 tree
11872 create_artificial_label (location_t loc)
11873 {
11874 tree lab = build_decl (loc,
11875 LABEL_DECL, NULL_TREE, void_type_node);
11876
11877 DECL_ARTIFICIAL (lab) = 1;
11878 DECL_IGNORED_P (lab) = 1;
11879 DECL_CONTEXT (lab) = current_function_decl;
11880 return lab;
11881 }
11882
11883 /* Given a tree, try to return a useful variable name that we can use
11884 to prefix a temporary that is being assigned the value of the tree.
11885 I.E. given <temp> = &A, return A. */
11886
11887 const char *
11888 get_name (tree t)
11889 {
11890 tree stripped_decl;
11891
11892 stripped_decl = t;
11893 STRIP_NOPS (stripped_decl);
11894 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11895 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11896 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11897 {
11898 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11899 if (!name)
11900 return NULL;
11901 return IDENTIFIER_POINTER (name);
11902 }
11903 else
11904 {
11905 switch (TREE_CODE (stripped_decl))
11906 {
11907 case ADDR_EXPR:
11908 return get_name (TREE_OPERAND (stripped_decl, 0));
11909 default:
11910 return NULL;
11911 }
11912 }
11913 }
11914
11915 /* Return true if TYPE has a variable argument list. */
11916
11917 bool
11918 stdarg_p (const_tree fntype)
11919 {
11920 function_args_iterator args_iter;
11921 tree n = NULL_TREE, t;
11922
11923 if (!fntype)
11924 return false;
11925
11926 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11927 {
11928 n = t;
11929 }
11930
11931 return n != NULL_TREE && n != void_type_node;
11932 }
11933
11934 /* Return true if TYPE has a prototype. */
11935
11936 bool
11937 prototype_p (const_tree fntype)
11938 {
11939 tree t;
11940
11941 gcc_assert (fntype != NULL_TREE);
11942
11943 t = TYPE_ARG_TYPES (fntype);
11944 return (t != NULL_TREE);
11945 }
11946
11947 /* If BLOCK is inlined from an __attribute__((__artificial__))
11948 routine, return pointer to location from where it has been
11949 called. */
11950 location_t *
11951 block_nonartificial_location (tree block)
11952 {
11953 location_t *ret = NULL;
11954
11955 while (block && TREE_CODE (block) == BLOCK
11956 && BLOCK_ABSTRACT_ORIGIN (block))
11957 {
11958 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11959
11960 while (TREE_CODE (ao) == BLOCK
11961 && BLOCK_ABSTRACT_ORIGIN (ao)
11962 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11963 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11964
11965 if (TREE_CODE (ao) == FUNCTION_DECL)
11966 {
11967 /* If AO is an artificial inline, point RET to the
11968 call site locus at which it has been inlined and continue
11969 the loop, in case AO's caller is also an artificial
11970 inline. */
11971 if (DECL_DECLARED_INLINE_P (ao)
11972 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11973 ret = &BLOCK_SOURCE_LOCATION (block);
11974 else
11975 break;
11976 }
11977 else if (TREE_CODE (ao) != BLOCK)
11978 break;
11979
11980 block = BLOCK_SUPERCONTEXT (block);
11981 }
11982 return ret;
11983 }
11984
11985
11986 /* If EXP is inlined from an __attribute__((__artificial__))
11987 function, return the location of the original call expression. */
11988
11989 location_t
11990 tree_nonartificial_location (tree exp)
11991 {
11992 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11993
11994 if (loc)
11995 return *loc;
11996 else
11997 return EXPR_LOCATION (exp);
11998 }
11999
12000
12001 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12002 nodes. */
12003
12004 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12005
12006 hashval_t
12007 cl_option_hasher::hash (tree x)
12008 {
12009 const_tree const t = x;
12010 const char *p;
12011 size_t i;
12012 size_t len = 0;
12013 hashval_t hash = 0;
12014
12015 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12016 {
12017 p = (const char *)TREE_OPTIMIZATION (t);
12018 len = sizeof (struct cl_optimization);
12019 }
12020
12021 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12022 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12023
12024 else
12025 gcc_unreachable ();
12026
12027 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12028 something else. */
12029 for (i = 0; i < len; i++)
12030 if (p[i])
12031 hash = (hash << 4) ^ ((i << 2) | p[i]);
12032
12033 return hash;
12034 }
12035
12036 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12037 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12038 same. */
12039
12040 bool
12041 cl_option_hasher::equal (tree x, tree y)
12042 {
12043 const_tree const xt = x;
12044 const_tree const yt = y;
12045 const char *xp;
12046 const char *yp;
12047 size_t len;
12048
12049 if (TREE_CODE (xt) != TREE_CODE (yt))
12050 return 0;
12051
12052 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12053 {
12054 xp = (const char *)TREE_OPTIMIZATION (xt);
12055 yp = (const char *)TREE_OPTIMIZATION (yt);
12056 len = sizeof (struct cl_optimization);
12057 }
12058
12059 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12060 {
12061 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12062 TREE_TARGET_OPTION (yt));
12063 }
12064
12065 else
12066 gcc_unreachable ();
12067
12068 return (memcmp (xp, yp, len) == 0);
12069 }
12070
12071 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12072
12073 tree
12074 build_optimization_node (struct gcc_options *opts)
12075 {
12076 tree t;
12077
12078 /* Use the cache of optimization nodes. */
12079
12080 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12081 opts);
12082
12083 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12084 t = *slot;
12085 if (!t)
12086 {
12087 /* Insert this one into the hash table. */
12088 t = cl_optimization_node;
12089 *slot = t;
12090
12091 /* Make a new node for next time round. */
12092 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12093 }
12094
12095 return t;
12096 }
12097
12098 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12099
12100 tree
12101 build_target_option_node (struct gcc_options *opts)
12102 {
12103 tree t;
12104
12105 /* Use the cache of optimization nodes. */
12106
12107 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12108 opts);
12109
12110 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12111 t = *slot;
12112 if (!t)
12113 {
12114 /* Insert this one into the hash table. */
12115 t = cl_target_option_node;
12116 *slot = t;
12117
12118 /* Make a new node for next time round. */
12119 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12120 }
12121
12122 return t;
12123 }
12124
12125 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12126 so that they aren't saved during PCH writing. */
12127
12128 void
12129 prepare_target_option_nodes_for_pch (void)
12130 {
12131 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12132 for (; iter != cl_option_hash_table->end (); ++iter)
12133 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12134 TREE_TARGET_GLOBALS (*iter) = NULL;
12135 }
12136
12137 /* Determine the "ultimate origin" of a block. The block may be an inlined
12138 instance of an inlined instance of a block which is local to an inline
12139 function, so we have to trace all of the way back through the origin chain
12140 to find out what sort of node actually served as the original seed for the
12141 given block. */
12142
12143 tree
12144 block_ultimate_origin (const_tree block)
12145 {
12146 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
12147
12148 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12149 we're trying to output the abstract instance of this function. */
12150 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12151 return NULL_TREE;
12152
12153 if (immediate_origin == NULL_TREE)
12154 return NULL_TREE;
12155 else
12156 {
12157 tree ret_val;
12158 tree lookahead = immediate_origin;
12159
12160 do
12161 {
12162 ret_val = lookahead;
12163 lookahead = (TREE_CODE (ret_val) == BLOCK
12164 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12165 }
12166 while (lookahead != NULL && lookahead != ret_val);
12167
12168 /* The block's abstract origin chain may not be the *ultimate* origin of
12169 the block. It could lead to a DECL that has an abstract origin set.
12170 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12171 will give us if it has one). Note that DECL's abstract origins are
12172 supposed to be the most distant ancestor (or so decl_ultimate_origin
12173 claims), so we don't need to loop following the DECL origins. */
12174 if (DECL_P (ret_val))
12175 return DECL_ORIGIN (ret_val);
12176
12177 return ret_val;
12178 }
12179 }
12180
12181 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12182 no instruction. */
12183
12184 bool
12185 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12186 {
12187 /* Use precision rather then machine mode when we can, which gives
12188 the correct answer even for submode (bit-field) types. */
12189 if ((INTEGRAL_TYPE_P (outer_type)
12190 || POINTER_TYPE_P (outer_type)
12191 || TREE_CODE (outer_type) == OFFSET_TYPE)
12192 && (INTEGRAL_TYPE_P (inner_type)
12193 || POINTER_TYPE_P (inner_type)
12194 || TREE_CODE (inner_type) == OFFSET_TYPE))
12195 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12196
12197 /* Otherwise fall back on comparing machine modes (e.g. for
12198 aggregate types, floats). */
12199 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12200 }
12201
12202 /* Return true iff conversion in EXP generates no instruction. Mark
12203 it inline so that we fully inline into the stripping functions even
12204 though we have two uses of this function. */
12205
12206 static inline bool
12207 tree_nop_conversion (const_tree exp)
12208 {
12209 tree outer_type, inner_type;
12210
12211 if (!CONVERT_EXPR_P (exp)
12212 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12213 return false;
12214 if (TREE_OPERAND (exp, 0) == error_mark_node)
12215 return false;
12216
12217 outer_type = TREE_TYPE (exp);
12218 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12219
12220 if (!inner_type)
12221 return false;
12222
12223 return tree_nop_conversion_p (outer_type, inner_type);
12224 }
12225
12226 /* Return true iff conversion in EXP generates no instruction. Don't
12227 consider conversions changing the signedness. */
12228
12229 static bool
12230 tree_sign_nop_conversion (const_tree exp)
12231 {
12232 tree outer_type, inner_type;
12233
12234 if (!tree_nop_conversion (exp))
12235 return false;
12236
12237 outer_type = TREE_TYPE (exp);
12238 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12239
12240 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12241 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12242 }
12243
12244 /* Strip conversions from EXP according to tree_nop_conversion and
12245 return the resulting expression. */
12246
12247 tree
12248 tree_strip_nop_conversions (tree exp)
12249 {
12250 while (tree_nop_conversion (exp))
12251 exp = TREE_OPERAND (exp, 0);
12252 return exp;
12253 }
12254
12255 /* Strip conversions from EXP according to tree_sign_nop_conversion
12256 and return the resulting expression. */
12257
12258 tree
12259 tree_strip_sign_nop_conversions (tree exp)
12260 {
12261 while (tree_sign_nop_conversion (exp))
12262 exp = TREE_OPERAND (exp, 0);
12263 return exp;
12264 }
12265
12266 /* Avoid any floating point extensions from EXP. */
12267 tree
12268 strip_float_extensions (tree exp)
12269 {
12270 tree sub, expt, subt;
12271
12272 /* For floating point constant look up the narrowest type that can hold
12273 it properly and handle it like (type)(narrowest_type)constant.
12274 This way we can optimize for instance a=a*2.0 where "a" is float
12275 but 2.0 is double constant. */
12276 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12277 {
12278 REAL_VALUE_TYPE orig;
12279 tree type = NULL;
12280
12281 orig = TREE_REAL_CST (exp);
12282 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12283 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12284 type = float_type_node;
12285 else if (TYPE_PRECISION (TREE_TYPE (exp))
12286 > TYPE_PRECISION (double_type_node)
12287 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12288 type = double_type_node;
12289 if (type)
12290 return build_real_truncate (type, orig);
12291 }
12292
12293 if (!CONVERT_EXPR_P (exp))
12294 return exp;
12295
12296 sub = TREE_OPERAND (exp, 0);
12297 subt = TREE_TYPE (sub);
12298 expt = TREE_TYPE (exp);
12299
12300 if (!FLOAT_TYPE_P (subt))
12301 return exp;
12302
12303 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12304 return exp;
12305
12306 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12307 return exp;
12308
12309 return strip_float_extensions (sub);
12310 }
12311
12312 /* Strip out all handled components that produce invariant
12313 offsets. */
12314
12315 const_tree
12316 strip_invariant_refs (const_tree op)
12317 {
12318 while (handled_component_p (op))
12319 {
12320 switch (TREE_CODE (op))
12321 {
12322 case ARRAY_REF:
12323 case ARRAY_RANGE_REF:
12324 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12325 || TREE_OPERAND (op, 2) != NULL_TREE
12326 || TREE_OPERAND (op, 3) != NULL_TREE)
12327 return NULL;
12328 break;
12329
12330 case COMPONENT_REF:
12331 if (TREE_OPERAND (op, 2) != NULL_TREE)
12332 return NULL;
12333 break;
12334
12335 default:;
12336 }
12337 op = TREE_OPERAND (op, 0);
12338 }
12339
12340 return op;
12341 }
12342
12343 static GTY(()) tree gcc_eh_personality_decl;
12344
12345 /* Return the GCC personality function decl. */
12346
12347 tree
12348 lhd_gcc_personality (void)
12349 {
12350 if (!gcc_eh_personality_decl)
12351 gcc_eh_personality_decl = build_personality_function ("gcc");
12352 return gcc_eh_personality_decl;
12353 }
12354
12355 /* TARGET is a call target of GIMPLE call statement
12356 (obtained by gimple_call_fn). Return true if it is
12357 OBJ_TYPE_REF representing an virtual call of C++ method.
12358 (As opposed to OBJ_TYPE_REF representing objc calls
12359 through a cast where middle-end devirtualization machinery
12360 can't apply.) */
12361
12362 bool
12363 virtual_method_call_p (const_tree target)
12364 {
12365 if (TREE_CODE (target) != OBJ_TYPE_REF)
12366 return false;
12367 tree t = TREE_TYPE (target);
12368 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12369 t = TREE_TYPE (t);
12370 if (TREE_CODE (t) == FUNCTION_TYPE)
12371 return false;
12372 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12373 /* If we do not have BINFO associated, it means that type was built
12374 without devirtualization enabled. Do not consider this a virtual
12375 call. */
12376 if (!TYPE_BINFO (obj_type_ref_class (target)))
12377 return false;
12378 return true;
12379 }
12380
12381 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12382
12383 tree
12384 obj_type_ref_class (const_tree ref)
12385 {
12386 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12387 ref = TREE_TYPE (ref);
12388 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12389 ref = TREE_TYPE (ref);
12390 /* We look for type THIS points to. ObjC also builds
12391 OBJ_TYPE_REF with non-method calls, Their first parameter
12392 ID however also corresponds to class type. */
12393 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12394 || TREE_CODE (ref) == FUNCTION_TYPE);
12395 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12396 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12397 return TREE_TYPE (ref);
12398 }
12399
12400 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12401
12402 static tree
12403 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12404 {
12405 unsigned int i;
12406 tree base_binfo, b;
12407
12408 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12409 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12410 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12411 return base_binfo;
12412 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12413 return b;
12414 return NULL;
12415 }
12416
12417 /* Try to find a base info of BINFO that would have its field decl at offset
12418 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12419 found, return, otherwise return NULL_TREE. */
12420
12421 tree
12422 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12423 {
12424 tree type = BINFO_TYPE (binfo);
12425
12426 while (true)
12427 {
12428 HOST_WIDE_INT pos, size;
12429 tree fld;
12430 int i;
12431
12432 if (types_same_for_odr (type, expected_type))
12433 return binfo;
12434 if (offset < 0)
12435 return NULL_TREE;
12436
12437 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12438 {
12439 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12440 continue;
12441
12442 pos = int_bit_position (fld);
12443 size = tree_to_uhwi (DECL_SIZE (fld));
12444 if (pos <= offset && (pos + size) > offset)
12445 break;
12446 }
12447 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12448 return NULL_TREE;
12449
12450 /* Offset 0 indicates the primary base, whose vtable contents are
12451 represented in the binfo for the derived class. */
12452 else if (offset != 0)
12453 {
12454 tree found_binfo = NULL, base_binfo;
12455 /* Offsets in BINFO are in bytes relative to the whole structure
12456 while POS is in bits relative to the containing field. */
12457 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12458 / BITS_PER_UNIT);
12459
12460 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12461 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12462 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12463 {
12464 found_binfo = base_binfo;
12465 break;
12466 }
12467 if (found_binfo)
12468 binfo = found_binfo;
12469 else
12470 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12471 binfo_offset);
12472 }
12473
12474 type = TREE_TYPE (fld);
12475 offset -= pos;
12476 }
12477 }
12478
12479 /* Returns true if X is a typedef decl. */
12480
12481 bool
12482 is_typedef_decl (const_tree x)
12483 {
12484 return (x && TREE_CODE (x) == TYPE_DECL
12485 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12486 }
12487
12488 /* Returns true iff TYPE is a type variant created for a typedef. */
12489
12490 bool
12491 typedef_variant_p (const_tree type)
12492 {
12493 return is_typedef_decl (TYPE_NAME (type));
12494 }
12495
12496 /* Warn about a use of an identifier which was marked deprecated. */
12497 void
12498 warn_deprecated_use (tree node, tree attr)
12499 {
12500 const char *msg;
12501
12502 if (node == 0 || !warn_deprecated_decl)
12503 return;
12504
12505 if (!attr)
12506 {
12507 if (DECL_P (node))
12508 attr = DECL_ATTRIBUTES (node);
12509 else if (TYPE_P (node))
12510 {
12511 tree decl = TYPE_STUB_DECL (node);
12512 if (decl)
12513 attr = lookup_attribute ("deprecated",
12514 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12515 }
12516 }
12517
12518 if (attr)
12519 attr = lookup_attribute ("deprecated", attr);
12520
12521 if (attr)
12522 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12523 else
12524 msg = NULL;
12525
12526 bool w;
12527 if (DECL_P (node))
12528 {
12529 if (msg)
12530 w = warning (OPT_Wdeprecated_declarations,
12531 "%qD is deprecated: %s", node, msg);
12532 else
12533 w = warning (OPT_Wdeprecated_declarations,
12534 "%qD is deprecated", node);
12535 if (w)
12536 inform (DECL_SOURCE_LOCATION (node), "declared here");
12537 }
12538 else if (TYPE_P (node))
12539 {
12540 tree what = NULL_TREE;
12541 tree decl = TYPE_STUB_DECL (node);
12542
12543 if (TYPE_NAME (node))
12544 {
12545 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12546 what = TYPE_NAME (node);
12547 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12548 && DECL_NAME (TYPE_NAME (node)))
12549 what = DECL_NAME (TYPE_NAME (node));
12550 }
12551
12552 if (decl)
12553 {
12554 if (what)
12555 {
12556 if (msg)
12557 w = warning (OPT_Wdeprecated_declarations,
12558 "%qE is deprecated: %s", what, msg);
12559 else
12560 w = warning (OPT_Wdeprecated_declarations,
12561 "%qE is deprecated", what);
12562 }
12563 else
12564 {
12565 if (msg)
12566 w = warning (OPT_Wdeprecated_declarations,
12567 "type is deprecated: %s", msg);
12568 else
12569 w = warning (OPT_Wdeprecated_declarations,
12570 "type is deprecated");
12571 }
12572 if (w)
12573 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12574 }
12575 else
12576 {
12577 if (what)
12578 {
12579 if (msg)
12580 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12581 what, msg);
12582 else
12583 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12584 }
12585 else
12586 {
12587 if (msg)
12588 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12589 msg);
12590 else
12591 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12592 }
12593 }
12594 }
12595 }
12596
12597 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12598 somewhere in it. */
12599
12600 bool
12601 contains_bitfld_component_ref_p (const_tree ref)
12602 {
12603 while (handled_component_p (ref))
12604 {
12605 if (TREE_CODE (ref) == COMPONENT_REF
12606 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12607 return true;
12608 ref = TREE_OPERAND (ref, 0);
12609 }
12610
12611 return false;
12612 }
12613
12614 /* Try to determine whether a TRY_CATCH expression can fall through.
12615 This is a subroutine of block_may_fallthru. */
12616
12617 static bool
12618 try_catch_may_fallthru (const_tree stmt)
12619 {
12620 tree_stmt_iterator i;
12621
12622 /* If the TRY block can fall through, the whole TRY_CATCH can
12623 fall through. */
12624 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12625 return true;
12626
12627 i = tsi_start (TREE_OPERAND (stmt, 1));
12628 switch (TREE_CODE (tsi_stmt (i)))
12629 {
12630 case CATCH_EXPR:
12631 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12632 catch expression and a body. The whole TRY_CATCH may fall
12633 through iff any of the catch bodies falls through. */
12634 for (; !tsi_end_p (i); tsi_next (&i))
12635 {
12636 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12637 return true;
12638 }
12639 return false;
12640
12641 case EH_FILTER_EXPR:
12642 /* The exception filter expression only matters if there is an
12643 exception. If the exception does not match EH_FILTER_TYPES,
12644 we will execute EH_FILTER_FAILURE, and we will fall through
12645 if that falls through. If the exception does match
12646 EH_FILTER_TYPES, the stack unwinder will continue up the
12647 stack, so we will not fall through. We don't know whether we
12648 will throw an exception which matches EH_FILTER_TYPES or not,
12649 so we just ignore EH_FILTER_TYPES and assume that we might
12650 throw an exception which doesn't match. */
12651 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12652
12653 default:
12654 /* This case represents statements to be executed when an
12655 exception occurs. Those statements are implicitly followed
12656 by a RESX statement to resume execution after the exception.
12657 So in this case the TRY_CATCH never falls through. */
12658 return false;
12659 }
12660 }
12661
12662 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12663 need not be 100% accurate; simply be conservative and return true if we
12664 don't know. This is used only to avoid stupidly generating extra code.
12665 If we're wrong, we'll just delete the extra code later. */
12666
12667 bool
12668 block_may_fallthru (const_tree block)
12669 {
12670 /* This CONST_CAST is okay because expr_last returns its argument
12671 unmodified and we assign it to a const_tree. */
12672 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12673
12674 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12675 {
12676 case GOTO_EXPR:
12677 case RETURN_EXPR:
12678 /* Easy cases. If the last statement of the block implies
12679 control transfer, then we can't fall through. */
12680 return false;
12681
12682 case SWITCH_EXPR:
12683 /* If SWITCH_LABELS is set, this is lowered, and represents a
12684 branch to a selected label and hence can not fall through.
12685 Otherwise SWITCH_BODY is set, and the switch can fall
12686 through. */
12687 return SWITCH_LABELS (stmt) == NULL_TREE;
12688
12689 case COND_EXPR:
12690 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12691 return true;
12692 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12693
12694 case BIND_EXPR:
12695 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12696
12697 case TRY_CATCH_EXPR:
12698 return try_catch_may_fallthru (stmt);
12699
12700 case TRY_FINALLY_EXPR:
12701 /* The finally clause is always executed after the try clause,
12702 so if it does not fall through, then the try-finally will not
12703 fall through. Otherwise, if the try clause does not fall
12704 through, then when the finally clause falls through it will
12705 resume execution wherever the try clause was going. So the
12706 whole try-finally will only fall through if both the try
12707 clause and the finally clause fall through. */
12708 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12709 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12710
12711 case MODIFY_EXPR:
12712 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12713 stmt = TREE_OPERAND (stmt, 1);
12714 else
12715 return true;
12716 /* FALLTHRU */
12717
12718 case CALL_EXPR:
12719 /* Functions that do not return do not fall through. */
12720 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12721
12722 case CLEANUP_POINT_EXPR:
12723 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12724
12725 case TARGET_EXPR:
12726 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12727
12728 case ERROR_MARK:
12729 return true;
12730
12731 default:
12732 return lang_hooks.block_may_fallthru (stmt);
12733 }
12734 }
12735
12736 /* True if we are using EH to handle cleanups. */
12737 static bool using_eh_for_cleanups_flag = false;
12738
12739 /* This routine is called from front ends to indicate eh should be used for
12740 cleanups. */
12741 void
12742 using_eh_for_cleanups (void)
12743 {
12744 using_eh_for_cleanups_flag = true;
12745 }
12746
12747 /* Query whether EH is used for cleanups. */
12748 bool
12749 using_eh_for_cleanups_p (void)
12750 {
12751 return using_eh_for_cleanups_flag;
12752 }
12753
12754 /* Wrapper for tree_code_name to ensure that tree code is valid */
12755 const char *
12756 get_tree_code_name (enum tree_code code)
12757 {
12758 const char *invalid = "<invalid tree code>";
12759
12760 if (code >= MAX_TREE_CODES)
12761 return invalid;
12762
12763 return tree_code_name[code];
12764 }
12765
12766 /* Drops the TREE_OVERFLOW flag from T. */
12767
12768 tree
12769 drop_tree_overflow (tree t)
12770 {
12771 gcc_checking_assert (TREE_OVERFLOW (t));
12772
12773 /* For tree codes with a sharing machinery re-build the result. */
12774 if (TREE_CODE (t) == INTEGER_CST)
12775 return wide_int_to_tree (TREE_TYPE (t), t);
12776
12777 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12778 and drop the flag. */
12779 t = copy_node (t);
12780 TREE_OVERFLOW (t) = 0;
12781 return t;
12782 }
12783
12784 /* Given a memory reference expression T, return its base address.
12785 The base address of a memory reference expression is the main
12786 object being referenced. For instance, the base address for
12787 'array[i].fld[j]' is 'array'. You can think of this as stripping
12788 away the offset part from a memory address.
12789
12790 This function calls handled_component_p to strip away all the inner
12791 parts of the memory reference until it reaches the base object. */
12792
12793 tree
12794 get_base_address (tree t)
12795 {
12796 while (handled_component_p (t))
12797 t = TREE_OPERAND (t, 0);
12798
12799 if ((TREE_CODE (t) == MEM_REF
12800 || TREE_CODE (t) == TARGET_MEM_REF)
12801 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12802 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12803
12804 /* ??? Either the alias oracle or all callers need to properly deal
12805 with WITH_SIZE_EXPRs before we can look through those. */
12806 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12807 return NULL_TREE;
12808
12809 return t;
12810 }
12811
12812 /* Return a tree of sizetype representing the size, in bytes, of the element
12813 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12814
12815 tree
12816 array_ref_element_size (tree exp)
12817 {
12818 tree aligned_size = TREE_OPERAND (exp, 3);
12819 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12820 location_t loc = EXPR_LOCATION (exp);
12821
12822 /* If a size was specified in the ARRAY_REF, it's the size measured
12823 in alignment units of the element type. So multiply by that value. */
12824 if (aligned_size)
12825 {
12826 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12827 sizetype from another type of the same width and signedness. */
12828 if (TREE_TYPE (aligned_size) != sizetype)
12829 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12830 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12831 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12832 }
12833
12834 /* Otherwise, take the size from that of the element type. Substitute
12835 any PLACEHOLDER_EXPR that we have. */
12836 else
12837 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12838 }
12839
12840 /* Return a tree representing the lower bound of the array mentioned in
12841 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12842
12843 tree
12844 array_ref_low_bound (tree exp)
12845 {
12846 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12847
12848 /* If a lower bound is specified in EXP, use it. */
12849 if (TREE_OPERAND (exp, 2))
12850 return TREE_OPERAND (exp, 2);
12851
12852 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12853 substituting for a PLACEHOLDER_EXPR as needed. */
12854 if (domain_type && TYPE_MIN_VALUE (domain_type))
12855 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12856
12857 /* Otherwise, return a zero of the appropriate type. */
12858 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12859 }
12860
12861 /* Return a tree representing the upper bound of the array mentioned in
12862 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12863
12864 tree
12865 array_ref_up_bound (tree exp)
12866 {
12867 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12868
12869 /* If there is a domain type and it has an upper bound, use it, substituting
12870 for a PLACEHOLDER_EXPR as needed. */
12871 if (domain_type && TYPE_MAX_VALUE (domain_type))
12872 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12873
12874 /* Otherwise fail. */
12875 return NULL_TREE;
12876 }
12877
12878 /* Returns true if REF is an array reference to an array at the end of
12879 a structure. If this is the case, the array may be allocated larger
12880 than its upper bound implies. */
12881
12882 bool
12883 array_at_struct_end_p (tree ref)
12884 {
12885 if (TREE_CODE (ref) != ARRAY_REF
12886 && TREE_CODE (ref) != ARRAY_RANGE_REF)
12887 return false;
12888
12889 while (handled_component_p (ref))
12890 {
12891 /* If the reference chain contains a component reference to a
12892 non-union type and there follows another field the reference
12893 is not at the end of a structure. */
12894 if (TREE_CODE (ref) == COMPONENT_REF
12895 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12896 {
12897 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12898 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12899 nextf = DECL_CHAIN (nextf);
12900 if (nextf)
12901 return false;
12902 }
12903
12904 ref = TREE_OPERAND (ref, 0);
12905 }
12906
12907 /* If the reference is based on a declared entity, the size of the array
12908 is constrained by its given domain. */
12909 if (DECL_P (ref))
12910 return false;
12911
12912 return true;
12913 }
12914
12915 /* Return a tree representing the offset, in bytes, of the field referenced
12916 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12917
12918 tree
12919 component_ref_field_offset (tree exp)
12920 {
12921 tree aligned_offset = TREE_OPERAND (exp, 2);
12922 tree field = TREE_OPERAND (exp, 1);
12923 location_t loc = EXPR_LOCATION (exp);
12924
12925 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12926 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12927 value. */
12928 if (aligned_offset)
12929 {
12930 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12931 sizetype from another type of the same width and signedness. */
12932 if (TREE_TYPE (aligned_offset) != sizetype)
12933 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12934 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12935 size_int (DECL_OFFSET_ALIGN (field)
12936 / BITS_PER_UNIT));
12937 }
12938
12939 /* Otherwise, take the offset from that of the field. Substitute
12940 any PLACEHOLDER_EXPR that we have. */
12941 else
12942 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12943 }
12944
12945 /* Return the machine mode of T. For vectors, returns the mode of the
12946 inner type. The main use case is to feed the result to HONOR_NANS,
12947 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12948
12949 machine_mode
12950 element_mode (const_tree t)
12951 {
12952 if (!TYPE_P (t))
12953 t = TREE_TYPE (t);
12954 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12955 t = TREE_TYPE (t);
12956 return TYPE_MODE (t);
12957 }
12958
12959
12960 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12961 TV. TV should be the more specified variant (i.e. the main variant). */
12962
12963 static bool
12964 verify_type_variant (const_tree t, tree tv)
12965 {
12966 /* Type variant can differ by:
12967
12968 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12969 ENCODE_QUAL_ADDR_SPACE.
12970 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12971 in this case some values may not be set in the variant types
12972 (see TYPE_COMPLETE_P checks).
12973 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12974 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12975 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12976 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12977 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12978 this is necessary to make it possible to merge types form different TUs
12979 - arrays, pointers and references may have TREE_TYPE that is a variant
12980 of TREE_TYPE of their main variants.
12981 - aggregates may have new TYPE_FIELDS list that list variants of
12982 the main variant TYPE_FIELDS.
12983 - vector types may differ by TYPE_VECTOR_OPAQUE
12984 - TYPE_METHODS is always NULL for vairant types and maintained for
12985 main variant only.
12986 */
12987
12988 /* Convenience macro for matching individual fields. */
12989 #define verify_variant_match(flag) \
12990 do { \
12991 if (flag (tv) != flag (t)) \
12992 { \
12993 error ("type variant differs by " #flag "."); \
12994 debug_tree (tv); \
12995 return false; \
12996 } \
12997 } while (false)
12998
12999 /* tree_base checks. */
13000
13001 verify_variant_match (TREE_CODE);
13002 /* FIXME: Ada builds non-artificial variants of artificial types. */
13003 if (TYPE_ARTIFICIAL (tv) && 0)
13004 verify_variant_match (TYPE_ARTIFICIAL);
13005 if (POINTER_TYPE_P (tv))
13006 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13007 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13008 verify_variant_match (TYPE_UNSIGNED);
13009 verify_variant_match (TYPE_ALIGN_OK);
13010 verify_variant_match (TYPE_PACKED);
13011 if (TREE_CODE (t) == REFERENCE_TYPE)
13012 verify_variant_match (TYPE_REF_IS_RVALUE);
13013 if (AGGREGATE_TYPE_P (t))
13014 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13015 else
13016 verify_variant_match (TYPE_SATURATING);
13017 /* FIXME: This check trigger during libstdc++ build. */
13018 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13019 verify_variant_match (TYPE_FINAL_P);
13020
13021 /* tree_type_common checks. */
13022
13023 if (COMPLETE_TYPE_P (t))
13024 {
13025 verify_variant_match (TYPE_SIZE);
13026 verify_variant_match (TYPE_MODE);
13027 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
13028 /* FIXME: ideally we should compare pointer equality, but java FE
13029 produce variants where size is INTEGER_CST of different type (int
13030 wrt size_type) during libjava biuld. */
13031 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
13032 {
13033 error ("type variant has different TYPE_SIZE_UNIT");
13034 debug_tree (tv);
13035 error ("type variant's TYPE_SIZE_UNIT");
13036 debug_tree (TYPE_SIZE_UNIT (tv));
13037 error ("type's TYPE_SIZE_UNIT");
13038 debug_tree (TYPE_SIZE_UNIT (t));
13039 return false;
13040 }
13041 }
13042 verify_variant_match (TYPE_PRECISION);
13043 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13044 if (RECORD_OR_UNION_TYPE_P (t))
13045 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13046 else if (TREE_CODE (t) == ARRAY_TYPE)
13047 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13048 /* During LTO we merge variant lists from diferent translation units
13049 that may differ BY TYPE_CONTEXT that in turn may point
13050 to TRANSLATION_UNIT_DECL.
13051 Ada also builds variants of types with different TYPE_CONTEXT. */
13052 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13053 verify_variant_match (TYPE_CONTEXT);
13054 verify_variant_match (TYPE_STRING_FLAG);
13055 if (TYPE_ALIAS_SET_KNOWN_P (t))
13056 {
13057 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
13058 debug_tree (tv);
13059 return false;
13060 }
13061
13062 /* tree_type_non_common checks. */
13063
13064 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13065 and dangle the pointer from time to time. */
13066 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13067 && (in_lto_p || !TYPE_VFIELD (tv)
13068 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13069 {
13070 error ("type variant has different TYPE_VFIELD");
13071 debug_tree (tv);
13072 return false;
13073 }
13074 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13075 || TREE_CODE (t) == INTEGER_TYPE
13076 || TREE_CODE (t) == BOOLEAN_TYPE
13077 || TREE_CODE (t) == REAL_TYPE
13078 || TREE_CODE (t) == FIXED_POINT_TYPE)
13079 {
13080 verify_variant_match (TYPE_MAX_VALUE);
13081 verify_variant_match (TYPE_MIN_VALUE);
13082 }
13083 if (TREE_CODE (t) == METHOD_TYPE)
13084 verify_variant_match (TYPE_METHOD_BASETYPE);
13085 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
13086 {
13087 error ("type variant has TYPE_METHODS");
13088 debug_tree (tv);
13089 return false;
13090 }
13091 if (TREE_CODE (t) == OFFSET_TYPE)
13092 verify_variant_match (TYPE_OFFSET_BASETYPE);
13093 if (TREE_CODE (t) == ARRAY_TYPE)
13094 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13095 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13096 or even type's main variant. This is needed to make bootstrap pass
13097 and the bug seems new in GCC 5.
13098 C++ FE should be updated to make this consistent and we should check
13099 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13100 is a match with main variant.
13101
13102 Also disable the check for Java for now because of parser hack that builds
13103 first an dummy BINFO and then sometimes replace it by real BINFO in some
13104 of the copies. */
13105 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13106 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13107 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13108 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13109 at LTO time only. */
13110 && (in_lto_p && odr_type_p (t)))
13111 {
13112 error ("type variant has different TYPE_BINFO");
13113 debug_tree (tv);
13114 error ("type variant's TYPE_BINFO");
13115 debug_tree (TYPE_BINFO (tv));
13116 error ("type's TYPE_BINFO");
13117 debug_tree (TYPE_BINFO (t));
13118 return false;
13119 }
13120
13121 /* Check various uses of TYPE_VALUES_RAW. */
13122 if (TREE_CODE (t) == ENUMERAL_TYPE)
13123 verify_variant_match (TYPE_VALUES);
13124 else if (TREE_CODE (t) == ARRAY_TYPE)
13125 verify_variant_match (TYPE_DOMAIN);
13126 /* Permit incomplete variants of complete type. While FEs may complete
13127 all variants, this does not happen for C++ templates in all cases. */
13128 else if (RECORD_OR_UNION_TYPE_P (t)
13129 && COMPLETE_TYPE_P (t)
13130 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13131 {
13132 tree f1, f2;
13133
13134 /* Fortran builds qualified variants as new records with items of
13135 qualified type. Verify that they looks same. */
13136 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13137 f1 && f2;
13138 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13139 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13140 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13141 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13142 /* FIXME: gfc_nonrestricted_type builds all types as variants
13143 with exception of pointer types. It deeply copies the type
13144 which means that we may end up with a variant type
13145 referring non-variant pointer. We may change it to
13146 produce types as variants, too, like
13147 objc_get_protocol_qualified_type does. */
13148 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13149 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13150 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13151 break;
13152 if (f1 || f2)
13153 {
13154 error ("type variant has different TYPE_FIELDS");
13155 debug_tree (tv);
13156 error ("first mismatch is field");
13157 debug_tree (f1);
13158 error ("and field");
13159 debug_tree (f2);
13160 return false;
13161 }
13162 }
13163 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13164 verify_variant_match (TYPE_ARG_TYPES);
13165 /* For C++ the qualified variant of array type is really an array type
13166 of qualified TREE_TYPE.
13167 objc builds variants of pointer where pointer to type is a variant, too
13168 in objc_get_protocol_qualified_type. */
13169 if (TREE_TYPE (t) != TREE_TYPE (tv)
13170 && ((TREE_CODE (t) != ARRAY_TYPE
13171 && !POINTER_TYPE_P (t))
13172 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13173 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13174 {
13175 error ("type variant has different TREE_TYPE");
13176 debug_tree (tv);
13177 error ("type variant's TREE_TYPE");
13178 debug_tree (TREE_TYPE (tv));
13179 error ("type's TREE_TYPE");
13180 debug_tree (TREE_TYPE (t));
13181 return false;
13182 }
13183 if (type_with_alias_set_p (t)
13184 && !gimple_canonical_types_compatible_p (t, tv, false))
13185 {
13186 error ("type is not compatible with its vairant");
13187 debug_tree (tv);
13188 error ("type variant's TREE_TYPE");
13189 debug_tree (TREE_TYPE (tv));
13190 error ("type's TREE_TYPE");
13191 debug_tree (TREE_TYPE (t));
13192 return false;
13193 }
13194 return true;
13195 #undef verify_variant_match
13196 }
13197
13198
13199 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13200 the middle-end types_compatible_p function. It needs to avoid
13201 claiming types are different for types that should be treated
13202 the same with respect to TBAA. Canonical types are also used
13203 for IL consistency checks via the useless_type_conversion_p
13204 predicate which does not handle all type kinds itself but falls
13205 back to pointer-comparison of TYPE_CANONICAL for aggregates
13206 for example. */
13207
13208 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13209 type calculation because we need to allow inter-operability between signed
13210 and unsigned variants. */
13211
13212 bool
13213 type_with_interoperable_signedness (const_tree type)
13214 {
13215 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13216 signed char and unsigned char. Similarly fortran FE builds
13217 C_SIZE_T as signed type, while C defines it unsigned. */
13218
13219 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13220 == INTEGER_TYPE
13221 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13222 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13223 }
13224
13225 /* Return true iff T1 and T2 are structurally identical for what
13226 TBAA is concerned.
13227 This function is used both by lto.c canonical type merging and by the
13228 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13229 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13230 only for LTO because only in these cases TYPE_CANONICAL equivalence
13231 correspond to one defined by gimple_canonical_types_compatible_p. */
13232
13233 bool
13234 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13235 bool trust_type_canonical)
13236 {
13237 /* Type variants should be same as the main variant. When not doing sanity
13238 checking to verify this fact, go to main variants and save some work. */
13239 if (trust_type_canonical)
13240 {
13241 t1 = TYPE_MAIN_VARIANT (t1);
13242 t2 = TYPE_MAIN_VARIANT (t2);
13243 }
13244
13245 /* Check first for the obvious case of pointer identity. */
13246 if (t1 == t2)
13247 return true;
13248
13249 /* Check that we have two types to compare. */
13250 if (t1 == NULL_TREE || t2 == NULL_TREE)
13251 return false;
13252
13253 /* We consider complete types always compatible with incomplete type.
13254 This does not make sense for canonical type calculation and thus we
13255 need to ensure that we are never called on it.
13256
13257 FIXME: For more correctness the function probably should have three modes
13258 1) mode assuming that types are complete mathcing their structure
13259 2) mode allowing incomplete types but producing equivalence classes
13260 and thus ignoring all info from complete types
13261 3) mode allowing incomplete types to match complete but checking
13262 compatibility between complete types.
13263
13264 1 and 2 can be used for canonical type calculation. 3 is the real
13265 definition of type compatibility that can be used i.e. for warnings during
13266 declaration merging. */
13267
13268 gcc_assert (!trust_type_canonical
13269 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13270 /* If the types have been previously registered and found equal
13271 they still are. */
13272
13273 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13274 && trust_type_canonical)
13275 {
13276 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13277 they are always NULL, but they are set to non-NULL for types
13278 constructed by build_pointer_type and variants. In this case the
13279 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13280 all pointers are considered equal. Be sure to not return false
13281 negatives. */
13282 gcc_checking_assert (canonical_type_used_p (t1)
13283 && canonical_type_used_p (t2));
13284 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13285 }
13286
13287 /* Can't be the same type if the types don't have the same code. */
13288 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13289 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13290 return false;
13291
13292 /* Qualifiers do not matter for canonical type comparison purposes. */
13293
13294 /* Void types and nullptr types are always the same. */
13295 if (TREE_CODE (t1) == VOID_TYPE
13296 || TREE_CODE (t1) == NULLPTR_TYPE)
13297 return true;
13298
13299 /* Can't be the same type if they have different mode. */
13300 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13301 return false;
13302
13303 /* Non-aggregate types can be handled cheaply. */
13304 if (INTEGRAL_TYPE_P (t1)
13305 || SCALAR_FLOAT_TYPE_P (t1)
13306 || FIXED_POINT_TYPE_P (t1)
13307 || TREE_CODE (t1) == VECTOR_TYPE
13308 || TREE_CODE (t1) == COMPLEX_TYPE
13309 || TREE_CODE (t1) == OFFSET_TYPE
13310 || POINTER_TYPE_P (t1))
13311 {
13312 /* Can't be the same type if they have different recision. */
13313 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13314 return false;
13315
13316 /* In some cases the signed and unsigned types are required to be
13317 inter-operable. */
13318 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13319 && !type_with_interoperable_signedness (t1))
13320 return false;
13321
13322 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13323 interoperable with "signed char". Unless all frontends are revisited
13324 to agree on these types, we must ignore the flag completely. */
13325
13326 /* Fortran standard define C_PTR type that is compatible with every
13327 C pointer. For this reason we need to glob all pointers into one.
13328 Still pointers in different address spaces are not compatible. */
13329 if (POINTER_TYPE_P (t1))
13330 {
13331 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13332 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13333 return false;
13334 }
13335
13336 /* Tail-recurse to components. */
13337 if (TREE_CODE (t1) == VECTOR_TYPE
13338 || TREE_CODE (t1) == COMPLEX_TYPE)
13339 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13340 TREE_TYPE (t2),
13341 trust_type_canonical);
13342
13343 return true;
13344 }
13345
13346 /* Do type-specific comparisons. */
13347 switch (TREE_CODE (t1))
13348 {
13349 case ARRAY_TYPE:
13350 /* Array types are the same if the element types are the same and
13351 the number of elements are the same. */
13352 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13353 trust_type_canonical)
13354 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13355 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13356 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13357 return false;
13358 else
13359 {
13360 tree i1 = TYPE_DOMAIN (t1);
13361 tree i2 = TYPE_DOMAIN (t2);
13362
13363 /* For an incomplete external array, the type domain can be
13364 NULL_TREE. Check this condition also. */
13365 if (i1 == NULL_TREE && i2 == NULL_TREE)
13366 return true;
13367 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13368 return false;
13369 else
13370 {
13371 tree min1 = TYPE_MIN_VALUE (i1);
13372 tree min2 = TYPE_MIN_VALUE (i2);
13373 tree max1 = TYPE_MAX_VALUE (i1);
13374 tree max2 = TYPE_MAX_VALUE (i2);
13375
13376 /* The minimum/maximum values have to be the same. */
13377 if ((min1 == min2
13378 || (min1 && min2
13379 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13380 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13381 || operand_equal_p (min1, min2, 0))))
13382 && (max1 == max2
13383 || (max1 && max2
13384 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13385 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13386 || operand_equal_p (max1, max2, 0)))))
13387 return true;
13388 else
13389 return false;
13390 }
13391 }
13392
13393 case METHOD_TYPE:
13394 case FUNCTION_TYPE:
13395 /* Function types are the same if the return type and arguments types
13396 are the same. */
13397 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13398 trust_type_canonical))
13399 return false;
13400
13401 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13402 return true;
13403 else
13404 {
13405 tree parms1, parms2;
13406
13407 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13408 parms1 && parms2;
13409 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13410 {
13411 if (!gimple_canonical_types_compatible_p
13412 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13413 trust_type_canonical))
13414 return false;
13415 }
13416
13417 if (parms1 || parms2)
13418 return false;
13419
13420 return true;
13421 }
13422
13423 case RECORD_TYPE:
13424 case UNION_TYPE:
13425 case QUAL_UNION_TYPE:
13426 {
13427 tree f1, f2;
13428
13429 /* Don't try to compare variants of an incomplete type, before
13430 TYPE_FIELDS has been copied around. */
13431 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13432 return true;
13433
13434
13435 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13436 return false;
13437
13438 /* For aggregate types, all the fields must be the same. */
13439 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13440 f1 || f2;
13441 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13442 {
13443 /* Skip non-fields. */
13444 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13445 f1 = TREE_CHAIN (f1);
13446 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13447 f2 = TREE_CHAIN (f2);
13448 if (!f1 || !f2)
13449 break;
13450 /* The fields must have the same name, offset and type. */
13451 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13452 || !gimple_compare_field_offset (f1, f2)
13453 || !gimple_canonical_types_compatible_p
13454 (TREE_TYPE (f1), TREE_TYPE (f2),
13455 trust_type_canonical))
13456 return false;
13457 }
13458
13459 /* If one aggregate has more fields than the other, they
13460 are not the same. */
13461 if (f1 || f2)
13462 return false;
13463
13464 return true;
13465 }
13466
13467 default:
13468 /* Consider all types with language specific trees in them mutually
13469 compatible. This is executed only from verify_type and false
13470 positives can be tolerated. */
13471 gcc_assert (!in_lto_p);
13472 return true;
13473 }
13474 }
13475
13476 /* Verify type T. */
13477
13478 void
13479 verify_type (const_tree t)
13480 {
13481 bool error_found = false;
13482 tree mv = TYPE_MAIN_VARIANT (t);
13483 if (!mv)
13484 {
13485 error ("Main variant is not defined");
13486 error_found = true;
13487 }
13488 else if (mv != TYPE_MAIN_VARIANT (mv))
13489 {
13490 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13491 debug_tree (mv);
13492 error_found = true;
13493 }
13494 else if (t != mv && !verify_type_variant (t, mv))
13495 error_found = true;
13496
13497 tree ct = TYPE_CANONICAL (t);
13498 if (!ct)
13499 ;
13500 else if (TYPE_CANONICAL (t) != ct)
13501 {
13502 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13503 debug_tree (ct);
13504 error_found = true;
13505 }
13506 /* Method and function types can not be used to address memory and thus
13507 TYPE_CANONICAL really matters only for determining useless conversions.
13508
13509 FIXME: C++ FE produce declarations of builtin functions that are not
13510 compatible with main variants. */
13511 else if (TREE_CODE (t) == FUNCTION_TYPE)
13512 ;
13513 else if (t != ct
13514 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13515 with variably sized arrays because their sizes possibly
13516 gimplified to different variables. */
13517 && !variably_modified_type_p (ct, NULL)
13518 && !gimple_canonical_types_compatible_p (t, ct, false))
13519 {
13520 error ("TYPE_CANONICAL is not compatible");
13521 debug_tree (ct);
13522 error_found = true;
13523 }
13524
13525 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13526 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13527 {
13528 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13529 debug_tree (ct);
13530 error_found = true;
13531 }
13532 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13533 {
13534 error ("TYPE_CANONICAL of main variant is not main variant");
13535 debug_tree (ct);
13536 debug_tree (TYPE_MAIN_VARIANT (ct));
13537 error_found = true;
13538 }
13539
13540
13541 /* Check various uses of TYPE_MINVAL. */
13542 if (RECORD_OR_UNION_TYPE_P (t))
13543 {
13544 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13545 and danagle the pointer from time to time. */
13546 if (TYPE_VFIELD (t)
13547 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13548 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13549 {
13550 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13551 debug_tree (TYPE_VFIELD (t));
13552 error_found = true;
13553 }
13554 }
13555 else if (TREE_CODE (t) == POINTER_TYPE)
13556 {
13557 if (TYPE_NEXT_PTR_TO (t)
13558 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13559 {
13560 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13561 debug_tree (TYPE_NEXT_PTR_TO (t));
13562 error_found = true;
13563 }
13564 }
13565 else if (TREE_CODE (t) == REFERENCE_TYPE)
13566 {
13567 if (TYPE_NEXT_REF_TO (t)
13568 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13569 {
13570 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13571 debug_tree (TYPE_NEXT_REF_TO (t));
13572 error_found = true;
13573 }
13574 }
13575 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13576 || TREE_CODE (t) == FIXED_POINT_TYPE)
13577 {
13578 /* FIXME: The following check should pass:
13579 useless_type_conversion_p (const_cast <tree> (t),
13580 TREE_TYPE (TYPE_MIN_VALUE (t))
13581 but does not for C sizetypes in LTO. */
13582 }
13583 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13584 else if (TYPE_MINVAL (t)
13585 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13586 || in_lto_p))
13587 {
13588 error ("TYPE_MINVAL non-NULL");
13589 debug_tree (TYPE_MINVAL (t));
13590 error_found = true;
13591 }
13592
13593 /* Check various uses of TYPE_MAXVAL. */
13594 if (RECORD_OR_UNION_TYPE_P (t))
13595 {
13596 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13597 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13598 && TYPE_METHODS (t) != error_mark_node)
13599 {
13600 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13601 debug_tree (TYPE_METHODS (t));
13602 error_found = true;
13603 }
13604 }
13605 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13606 {
13607 if (TYPE_METHOD_BASETYPE (t)
13608 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13609 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13610 {
13611 error ("TYPE_METHOD_BASETYPE is not record nor union");
13612 debug_tree (TYPE_METHOD_BASETYPE (t));
13613 error_found = true;
13614 }
13615 }
13616 else if (TREE_CODE (t) == OFFSET_TYPE)
13617 {
13618 if (TYPE_OFFSET_BASETYPE (t)
13619 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13620 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13621 {
13622 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13623 debug_tree (TYPE_OFFSET_BASETYPE (t));
13624 error_found = true;
13625 }
13626 }
13627 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13628 || TREE_CODE (t) == FIXED_POINT_TYPE)
13629 {
13630 /* FIXME: The following check should pass:
13631 useless_type_conversion_p (const_cast <tree> (t),
13632 TREE_TYPE (TYPE_MAX_VALUE (t))
13633 but does not for C sizetypes in LTO. */
13634 }
13635 else if (TREE_CODE (t) == ARRAY_TYPE)
13636 {
13637 if (TYPE_ARRAY_MAX_SIZE (t)
13638 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13639 {
13640 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13641 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13642 error_found = true;
13643 }
13644 }
13645 else if (TYPE_MAXVAL (t))
13646 {
13647 error ("TYPE_MAXVAL non-NULL");
13648 debug_tree (TYPE_MAXVAL (t));
13649 error_found = true;
13650 }
13651
13652 /* Check various uses of TYPE_BINFO. */
13653 if (RECORD_OR_UNION_TYPE_P (t))
13654 {
13655 if (!TYPE_BINFO (t))
13656 ;
13657 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13658 {
13659 error ("TYPE_BINFO is not TREE_BINFO");
13660 debug_tree (TYPE_BINFO (t));
13661 error_found = true;
13662 }
13663 /* FIXME: Java builds invalid empty binfos that do not have
13664 TREE_TYPE set. */
13665 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13666 {
13667 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13668 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13669 error_found = true;
13670 }
13671 }
13672 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13673 {
13674 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13675 debug_tree (TYPE_LANG_SLOT_1 (t));
13676 error_found = true;
13677 }
13678
13679 /* Check various uses of TYPE_VALUES_RAW. */
13680 if (TREE_CODE (t) == ENUMERAL_TYPE)
13681 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13682 {
13683 tree value = TREE_VALUE (l);
13684 tree name = TREE_PURPOSE (l);
13685
13686 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13687 CONST_DECL of ENUMERAL TYPE. */
13688 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13689 {
13690 error ("Enum value is not CONST_DECL or INTEGER_CST");
13691 debug_tree (value);
13692 debug_tree (name);
13693 error_found = true;
13694 }
13695 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13696 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13697 {
13698 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13699 debug_tree (value);
13700 debug_tree (name);
13701 error_found = true;
13702 }
13703 if (TREE_CODE (name) != IDENTIFIER_NODE)
13704 {
13705 error ("Enum value name is not IDENTIFIER_NODE");
13706 debug_tree (value);
13707 debug_tree (name);
13708 error_found = true;
13709 }
13710 }
13711 else if (TREE_CODE (t) == ARRAY_TYPE)
13712 {
13713 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13714 {
13715 error ("Array TYPE_DOMAIN is not integer type");
13716 debug_tree (TYPE_DOMAIN (t));
13717 error_found = true;
13718 }
13719 }
13720 else if (RECORD_OR_UNION_TYPE_P (t))
13721 {
13722 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13723 {
13724 error ("TYPE_FIELDS defined in incomplete type");
13725 error_found = true;
13726 }
13727 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13728 {
13729 /* TODO: verify properties of decls. */
13730 if (TREE_CODE (fld) == FIELD_DECL)
13731 ;
13732 else if (TREE_CODE (fld) == TYPE_DECL)
13733 ;
13734 else if (TREE_CODE (fld) == CONST_DECL)
13735 ;
13736 else if (TREE_CODE (fld) == VAR_DECL)
13737 ;
13738 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13739 ;
13740 else if (TREE_CODE (fld) == USING_DECL)
13741 ;
13742 else
13743 {
13744 error ("Wrong tree in TYPE_FIELDS list");
13745 debug_tree (fld);
13746 error_found = true;
13747 }
13748 }
13749 }
13750 else if (TREE_CODE (t) == INTEGER_TYPE
13751 || TREE_CODE (t) == BOOLEAN_TYPE
13752 || TREE_CODE (t) == OFFSET_TYPE
13753 || TREE_CODE (t) == REFERENCE_TYPE
13754 || TREE_CODE (t) == NULLPTR_TYPE
13755 || TREE_CODE (t) == POINTER_TYPE)
13756 {
13757 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13758 {
13759 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13760 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13761 error_found = true;
13762 }
13763 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13764 {
13765 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13766 debug_tree (TYPE_CACHED_VALUES (t));
13767 error_found = true;
13768 }
13769 /* Verify just enough of cache to ensure that no one copied it to new type.
13770 All copying should go by copy_node that should clear it. */
13771 else if (TYPE_CACHED_VALUES_P (t))
13772 {
13773 int i;
13774 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13775 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13776 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13777 {
13778 error ("wrong TYPE_CACHED_VALUES entry");
13779 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13780 error_found = true;
13781 break;
13782 }
13783 }
13784 }
13785 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13786 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13787 {
13788 /* C++ FE uses TREE_PURPOSE to store initial values. */
13789 if (TREE_PURPOSE (l) && in_lto_p)
13790 {
13791 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13792 debug_tree (l);
13793 error_found = true;
13794 }
13795 if (!TYPE_P (TREE_VALUE (l)))
13796 {
13797 error ("Wrong entry in TYPE_ARG_TYPES list");
13798 debug_tree (l);
13799 error_found = true;
13800 }
13801 }
13802 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13803 {
13804 error ("TYPE_VALUES_RAW field is non-NULL");
13805 debug_tree (TYPE_VALUES_RAW (t));
13806 error_found = true;
13807 }
13808 if (TREE_CODE (t) != INTEGER_TYPE
13809 && TREE_CODE (t) != BOOLEAN_TYPE
13810 && TREE_CODE (t) != OFFSET_TYPE
13811 && TREE_CODE (t) != REFERENCE_TYPE
13812 && TREE_CODE (t) != NULLPTR_TYPE
13813 && TREE_CODE (t) != POINTER_TYPE
13814 && TYPE_CACHED_VALUES_P (t))
13815 {
13816 error ("TYPE_CACHED_VALUES_P is set while it should not");
13817 error_found = true;
13818 }
13819 if (TYPE_STRING_FLAG (t)
13820 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13821 {
13822 error ("TYPE_STRING_FLAG is set on wrong type code");
13823 error_found = true;
13824 }
13825 else if (TYPE_STRING_FLAG (t))
13826 {
13827 const_tree b = t;
13828 if (TREE_CODE (b) == ARRAY_TYPE)
13829 b = TREE_TYPE (t);
13830 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13831 that is 32bits. */
13832 if (TREE_CODE (b) != INTEGER_TYPE)
13833 {
13834 error ("TYPE_STRING_FLAG is set on type that does not look like "
13835 "char nor array of chars");
13836 error_found = true;
13837 }
13838 }
13839
13840 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13841 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13842 of a type. */
13843 if (TREE_CODE (t) == METHOD_TYPE
13844 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13845 {
13846 error ("TYPE_METHOD_BASETYPE is not main variant");
13847 error_found = true;
13848 }
13849
13850 if (error_found)
13851 {
13852 debug_tree (const_cast <tree> (t));
13853 internal_error ("verify_type failed");
13854 }
13855 }
13856
13857
13858 /* Return true if ARG is marked with the nonnull attribute in the
13859 current function signature. */
13860
13861 bool
13862 nonnull_arg_p (const_tree arg)
13863 {
13864 tree t, attrs, fntype;
13865 unsigned HOST_WIDE_INT arg_num;
13866
13867 gcc_assert (TREE_CODE (arg) == PARM_DECL
13868 && (POINTER_TYPE_P (TREE_TYPE (arg))
13869 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13870
13871 /* The static chain decl is always non null. */
13872 if (arg == cfun->static_chain_decl)
13873 return true;
13874
13875 /* THIS argument of method is always non-NULL. */
13876 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13877 && arg == DECL_ARGUMENTS (cfun->decl)
13878 && flag_delete_null_pointer_checks)
13879 return true;
13880
13881 /* Values passed by reference are always non-NULL. */
13882 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13883 && flag_delete_null_pointer_checks)
13884 return true;
13885
13886 fntype = TREE_TYPE (cfun->decl);
13887 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13888 {
13889 attrs = lookup_attribute ("nonnull", attrs);
13890
13891 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13892 if (attrs == NULL_TREE)
13893 return false;
13894
13895 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13896 if (TREE_VALUE (attrs) == NULL_TREE)
13897 return true;
13898
13899 /* Get the position number for ARG in the function signature. */
13900 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13901 t;
13902 t = DECL_CHAIN (t), arg_num++)
13903 {
13904 if (t == arg)
13905 break;
13906 }
13907
13908 gcc_assert (t == arg);
13909
13910 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13911 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13912 {
13913 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13914 return true;
13915 }
13916 }
13917
13918 return false;
13919 }
13920
13921 /* Given location LOC, strip away any packed range information
13922 or ad-hoc information. */
13923
13924 location_t
13925 get_pure_location (location_t loc)
13926 {
13927 if (IS_ADHOC_LOC (loc))
13928 loc
13929 = line_table->location_adhoc_data_map.data[loc & MAX_SOURCE_LOCATION].locus;
13930
13931 if (loc >= LINEMAPS_MACRO_LOWEST_LOCATION (line_table))
13932 return loc;
13933
13934 if (loc < RESERVED_LOCATION_COUNT)
13935 return loc;
13936
13937 const line_map *map = linemap_lookup (line_table, loc);
13938 const line_map_ordinary *ordmap = linemap_check_ordinary (map);
13939
13940 return loc & ~((1 << ordmap->m_range_bits) - 1);
13941 }
13942
13943 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13944 information. */
13945
13946 location_t
13947 set_block (location_t loc, tree block)
13948 {
13949 location_t pure_loc = get_pure_location (loc);
13950 source_range src_range = get_range_from_loc (line_table, loc);
13951 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
13952 }
13953
13954 location_t
13955 set_source_range (tree expr, location_t start, location_t finish)
13956 {
13957 source_range src_range;
13958 src_range.m_start = start;
13959 src_range.m_finish = finish;
13960 return set_source_range (expr, src_range);
13961 }
13962
13963 location_t
13964 set_source_range (tree expr, source_range src_range)
13965 {
13966 if (!EXPR_P (expr))
13967 return UNKNOWN_LOCATION;
13968
13969 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
13970 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
13971 pure_loc,
13972 src_range,
13973 NULL);
13974 SET_EXPR_LOCATION (expr, adhoc);
13975 return adhoc;
13976 }
13977
13978 location_t
13979 make_location (location_t caret, location_t start, location_t finish)
13980 {
13981 location_t pure_loc = get_pure_location (caret);
13982 source_range src_range;
13983 src_range.m_start = start;
13984 src_range.m_finish = finish;
13985 location_t combined_loc = COMBINE_LOCATION_DATA (line_table,
13986 pure_loc,
13987 src_range,
13988 NULL);
13989 return combined_loc;
13990 }
13991
13992 /* Return the name of combined function FN, for debugging purposes. */
13993
13994 const char *
13995 combined_fn_name (combined_fn fn)
13996 {
13997 if (builtin_fn_p (fn))
13998 {
13999 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14000 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14001 }
14002 else
14003 return internal_fn_name (as_internal_fn (fn));
14004 }
14005
14006 #include "gt-tree.h"