re PR middle-end/92231 (ICE in gimple_fold_stmt_to_constant_1)
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "rtl.h"
68 #include "regs.h"
69 #include "tree-vector-builder.h"
70 #include "gimple-fold.h"
71 #include "escaped_string.h"
72
73 /* Tree code classes. */
74
75 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
76 #define END_OF_BASE_TREE_CODES tcc_exceptional,
77
78 const enum tree_code_class tree_code_type[] = {
79 #include "all-tree.def"
80 };
81
82 #undef DEFTREECODE
83 #undef END_OF_BASE_TREE_CODES
84
85 /* Table indexed by tree code giving number of expression
86 operands beyond the fixed part of the node structure.
87 Not used for types or decls. */
88
89 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
90 #define END_OF_BASE_TREE_CODES 0,
91
92 const unsigned char tree_code_length[] = {
93 #include "all-tree.def"
94 };
95
96 #undef DEFTREECODE
97 #undef END_OF_BASE_TREE_CODES
98
99 /* Names of tree components.
100 Used for printing out the tree and error messages. */
101 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
102 #define END_OF_BASE_TREE_CODES "@dummy",
103
104 static const char *const tree_code_name[] = {
105 #include "all-tree.def"
106 };
107
108 #undef DEFTREECODE
109 #undef END_OF_BASE_TREE_CODES
110
111 /* Each tree code class has an associated string representation.
112 These must correspond to the tree_code_class entries. */
113
114 const char *const tree_code_class_strings[] =
115 {
116 "exceptional",
117 "constant",
118 "type",
119 "declaration",
120 "reference",
121 "comparison",
122 "unary",
123 "binary",
124 "statement",
125 "vl_exp",
126 "expression"
127 };
128
129 /* obstack.[ch] explicitly declined to prototype this. */
130 extern int _obstack_allocated_p (struct obstack *h, void *obj);
131
132 /* Statistics-gathering stuff. */
133
134 static uint64_t tree_code_counts[MAX_TREE_CODES];
135 uint64_t tree_node_counts[(int) all_kinds];
136 uint64_t tree_node_sizes[(int) all_kinds];
137
138 /* Keep in sync with tree.h:enum tree_node_kind. */
139 static const char * const tree_node_kind_names[] = {
140 "decls",
141 "types",
142 "blocks",
143 "stmts",
144 "refs",
145 "exprs",
146 "constants",
147 "identifiers",
148 "vecs",
149 "binfos",
150 "ssa names",
151 "constructors",
152 "random kinds",
153 "lang_decl kinds",
154 "lang_type kinds",
155 "omp clauses",
156 };
157
158 /* Unique id for next decl created. */
159 static GTY(()) int next_decl_uid;
160 /* Unique id for next type created. */
161 static GTY(()) unsigned next_type_uid = 1;
162 /* Unique id for next debug decl created. Use negative numbers,
163 to catch erroneous uses. */
164 static GTY(()) int next_debug_decl_uid;
165
166 /* Since we cannot rehash a type after it is in the table, we have to
167 keep the hash code. */
168
169 struct GTY((for_user)) type_hash {
170 unsigned long hash;
171 tree type;
172 };
173
174 /* Initial size of the hash table (rounded to next prime). */
175 #define TYPE_HASH_INITIAL_SIZE 1000
176
177 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
178 {
179 static hashval_t hash (type_hash *t) { return t->hash; }
180 static bool equal (type_hash *a, type_hash *b);
181
182 static int
183 keep_cache_entry (type_hash *&t)
184 {
185 return ggc_marked_p (t->type);
186 }
187 };
188
189 /* Now here is the hash table. When recording a type, it is added to
190 the slot whose index is the hash code. Note that the hash table is
191 used for several kinds of types (function types, array types and
192 array index range types, for now). While all these live in the
193 same table, they are completely independent, and the hash code is
194 computed differently for each of these. */
195
196 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
197
198 /* Hash table and temporary node for larger integer const values. */
199 static GTY (()) tree int_cst_node;
200
201 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
202 {
203 static hashval_t hash (tree t);
204 static bool equal (tree x, tree y);
205 };
206
207 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
208
209 /* Class and variable for making sure that there is a single POLY_INT_CST
210 for a given value. */
211 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
212 {
213 typedef std::pair<tree, const poly_wide_int *> compare_type;
214 static hashval_t hash (tree t);
215 static bool equal (tree x, const compare_type &y);
216 };
217
218 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
219
220 /* Hash table for optimization flags and target option flags. Use the same
221 hash table for both sets of options. Nodes for building the current
222 optimization and target option nodes. The assumption is most of the time
223 the options created will already be in the hash table, so we avoid
224 allocating and freeing up a node repeatably. */
225 static GTY (()) tree cl_optimization_node;
226 static GTY (()) tree cl_target_option_node;
227
228 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
229 {
230 static hashval_t hash (tree t);
231 static bool equal (tree x, tree y);
232 };
233
234 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
235
236 /* General tree->tree mapping structure for use in hash tables. */
237
238
239 static GTY ((cache))
240 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
241
242 static GTY ((cache))
243 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
244
245 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
246 {
247 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
248
249 static bool
250 equal (tree_vec_map *a, tree_vec_map *b)
251 {
252 return a->base.from == b->base.from;
253 }
254
255 static int
256 keep_cache_entry (tree_vec_map *&m)
257 {
258 return ggc_marked_p (m->base.from);
259 }
260 };
261
262 static GTY ((cache))
263 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
264
265 static void set_type_quals (tree, int);
266 static void print_type_hash_statistics (void);
267 static void print_debug_expr_statistics (void);
268 static void print_value_expr_statistics (void);
269
270 static tree build_array_type_1 (tree, tree, bool, bool);
271
272 tree global_trees[TI_MAX];
273 tree integer_types[itk_none];
274
275 bool int_n_enabled_p[NUM_INT_N_ENTS];
276 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
277
278 bool tree_contains_struct[MAX_TREE_CODES][64];
279
280 /* Number of operands for each OpenMP clause. */
281 unsigned const char omp_clause_num_ops[] =
282 {
283 0, /* OMP_CLAUSE_ERROR */
284 1, /* OMP_CLAUSE_PRIVATE */
285 1, /* OMP_CLAUSE_SHARED */
286 1, /* OMP_CLAUSE_FIRSTPRIVATE */
287 2, /* OMP_CLAUSE_LASTPRIVATE */
288 5, /* OMP_CLAUSE_REDUCTION */
289 5, /* OMP_CLAUSE_TASK_REDUCTION */
290 5, /* OMP_CLAUSE_IN_REDUCTION */
291 1, /* OMP_CLAUSE_COPYIN */
292 1, /* OMP_CLAUSE_COPYPRIVATE */
293 3, /* OMP_CLAUSE_LINEAR */
294 2, /* OMP_CLAUSE_ALIGNED */
295 1, /* OMP_CLAUSE_DEPEND */
296 1, /* OMP_CLAUSE_NONTEMPORAL */
297 1, /* OMP_CLAUSE_UNIFORM */
298 1, /* OMP_CLAUSE_TO_DECLARE */
299 1, /* OMP_CLAUSE_LINK */
300 2, /* OMP_CLAUSE_FROM */
301 2, /* OMP_CLAUSE_TO */
302 2, /* OMP_CLAUSE_MAP */
303 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
304 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
305 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
306 1, /* OMP_CLAUSE_INCLUSIVE */
307 1, /* OMP_CLAUSE_EXCLUSIVE */
308 2, /* OMP_CLAUSE__CACHE_ */
309 2, /* OMP_CLAUSE_GANG */
310 1, /* OMP_CLAUSE_ASYNC */
311 1, /* OMP_CLAUSE_WAIT */
312 0, /* OMP_CLAUSE_AUTO */
313 0, /* OMP_CLAUSE_SEQ */
314 1, /* OMP_CLAUSE__LOOPTEMP_ */
315 1, /* OMP_CLAUSE__REDUCTEMP_ */
316 1, /* OMP_CLAUSE__CONDTEMP_ */
317 1, /* OMP_CLAUSE__SCANTEMP_ */
318 1, /* OMP_CLAUSE_IF */
319 1, /* OMP_CLAUSE_NUM_THREADS */
320 1, /* OMP_CLAUSE_SCHEDULE */
321 0, /* OMP_CLAUSE_NOWAIT */
322 1, /* OMP_CLAUSE_ORDERED */
323 0, /* OMP_CLAUSE_DEFAULT */
324 3, /* OMP_CLAUSE_COLLAPSE */
325 0, /* OMP_CLAUSE_UNTIED */
326 1, /* OMP_CLAUSE_FINAL */
327 0, /* OMP_CLAUSE_MERGEABLE */
328 1, /* OMP_CLAUSE_DEVICE */
329 1, /* OMP_CLAUSE_DIST_SCHEDULE */
330 0, /* OMP_CLAUSE_INBRANCH */
331 0, /* OMP_CLAUSE_NOTINBRANCH */
332 1, /* OMP_CLAUSE_NUM_TEAMS */
333 1, /* OMP_CLAUSE_THREAD_LIMIT */
334 0, /* OMP_CLAUSE_PROC_BIND */
335 1, /* OMP_CLAUSE_SAFELEN */
336 1, /* OMP_CLAUSE_SIMDLEN */
337 0, /* OMP_CLAUSE_DEVICE_TYPE */
338 0, /* OMP_CLAUSE_FOR */
339 0, /* OMP_CLAUSE_PARALLEL */
340 0, /* OMP_CLAUSE_SECTIONS */
341 0, /* OMP_CLAUSE_TASKGROUP */
342 1, /* OMP_CLAUSE_PRIORITY */
343 1, /* OMP_CLAUSE_GRAINSIZE */
344 1, /* OMP_CLAUSE_NUM_TASKS */
345 0, /* OMP_CLAUSE_NOGROUP */
346 0, /* OMP_CLAUSE_THREADS */
347 0, /* OMP_CLAUSE_SIMD */
348 1, /* OMP_CLAUSE_HINT */
349 0, /* OMP_CLAUSE_DEFAULTMAP */
350 0, /* OMP_CLAUSE_ORDER */
351 0, /* OMP_CLAUSE_BIND */
352 1, /* OMP_CLAUSE__SIMDUID_ */
353 0, /* OMP_CLAUSE__SIMT_ */
354 0, /* OMP_CLAUSE_INDEPENDENT */
355 1, /* OMP_CLAUSE_WORKER */
356 1, /* OMP_CLAUSE_VECTOR */
357 1, /* OMP_CLAUSE_NUM_GANGS */
358 1, /* OMP_CLAUSE_NUM_WORKERS */
359 1, /* OMP_CLAUSE_VECTOR_LENGTH */
360 3, /* OMP_CLAUSE_TILE */
361 2, /* OMP_CLAUSE__GRIDDIM_ */
362 0, /* OMP_CLAUSE_IF_PRESENT */
363 0, /* OMP_CLAUSE_FINALIZE */
364 };
365
366 const char * const omp_clause_code_name[] =
367 {
368 "error_clause",
369 "private",
370 "shared",
371 "firstprivate",
372 "lastprivate",
373 "reduction",
374 "task_reduction",
375 "in_reduction",
376 "copyin",
377 "copyprivate",
378 "linear",
379 "aligned",
380 "depend",
381 "nontemporal",
382 "uniform",
383 "to",
384 "link",
385 "from",
386 "to",
387 "map",
388 "use_device_ptr",
389 "use_device_addr",
390 "is_device_ptr",
391 "inclusive",
392 "exclusive",
393 "_cache_",
394 "gang",
395 "async",
396 "wait",
397 "auto",
398 "seq",
399 "_looptemp_",
400 "_reductemp_",
401 "_condtemp_",
402 "_scantemp_",
403 "if",
404 "num_threads",
405 "schedule",
406 "nowait",
407 "ordered",
408 "default",
409 "collapse",
410 "untied",
411 "final",
412 "mergeable",
413 "device",
414 "dist_schedule",
415 "inbranch",
416 "notinbranch",
417 "num_teams",
418 "thread_limit",
419 "proc_bind",
420 "safelen",
421 "simdlen",
422 "device_type",
423 "for",
424 "parallel",
425 "sections",
426 "taskgroup",
427 "priority",
428 "grainsize",
429 "num_tasks",
430 "nogroup",
431 "threads",
432 "simd",
433 "hint",
434 "defaultmap",
435 "order",
436 "bind",
437 "_simduid_",
438 "_simt_",
439 "independent",
440 "worker",
441 "vector",
442 "num_gangs",
443 "num_workers",
444 "vector_length",
445 "tile",
446 "_griddim_",
447 "if_present",
448 "finalize",
449 };
450
451
452 /* Return the tree node structure used by tree code CODE. */
453
454 static inline enum tree_node_structure_enum
455 tree_node_structure_for_code (enum tree_code code)
456 {
457 switch (TREE_CODE_CLASS (code))
458 {
459 case tcc_declaration:
460 switch (code)
461 {
462 case CONST_DECL: return TS_CONST_DECL;
463 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
464 case FIELD_DECL: return TS_FIELD_DECL;
465 case FUNCTION_DECL: return TS_FUNCTION_DECL;
466 case LABEL_DECL: return TS_LABEL_DECL;
467 case PARM_DECL: return TS_PARM_DECL;
468 case RESULT_DECL: return TS_RESULT_DECL;
469 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
470 case TYPE_DECL: return TS_TYPE_DECL;
471 case VAR_DECL: return TS_VAR_DECL;
472 default: return TS_DECL_NON_COMMON;
473 }
474
475 case tcc_type: return TS_TYPE_NON_COMMON;
476
477 case tcc_binary:
478 case tcc_comparison:
479 case tcc_expression:
480 case tcc_reference:
481 case tcc_statement:
482 case tcc_unary:
483 case tcc_vl_exp: return TS_EXP;
484
485 default: /* tcc_constant and tcc_exceptional */
486 break;
487 }
488
489 switch (code)
490 {
491 /* tcc_constant cases. */
492 case COMPLEX_CST: return TS_COMPLEX;
493 case FIXED_CST: return TS_FIXED_CST;
494 case INTEGER_CST: return TS_INT_CST;
495 case POLY_INT_CST: return TS_POLY_INT_CST;
496 case REAL_CST: return TS_REAL_CST;
497 case STRING_CST: return TS_STRING;
498 case VECTOR_CST: return TS_VECTOR;
499 case VOID_CST: return TS_TYPED;
500
501 /* tcc_exceptional cases. */
502 case BLOCK: return TS_BLOCK;
503 case CONSTRUCTOR: return TS_CONSTRUCTOR;
504 case ERROR_MARK: return TS_COMMON;
505 case IDENTIFIER_NODE: return TS_IDENTIFIER;
506 case OMP_CLAUSE: return TS_OMP_CLAUSE;
507 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
508 case PLACEHOLDER_EXPR: return TS_COMMON;
509 case SSA_NAME: return TS_SSA_NAME;
510 case STATEMENT_LIST: return TS_STATEMENT_LIST;
511 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
512 case TREE_BINFO: return TS_BINFO;
513 case TREE_LIST: return TS_LIST;
514 case TREE_VEC: return TS_VEC;
515
516 default:
517 gcc_unreachable ();
518 }
519 }
520
521
522 /* Initialize tree_contains_struct to describe the hierarchy of tree
523 nodes. */
524
525 static void
526 initialize_tree_contains_struct (void)
527 {
528 unsigned i;
529
530 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
531 {
532 enum tree_code code;
533 enum tree_node_structure_enum ts_code;
534
535 code = (enum tree_code) i;
536 ts_code = tree_node_structure_for_code (code);
537
538 /* Mark the TS structure itself. */
539 tree_contains_struct[code][ts_code] = 1;
540
541 /* Mark all the structures that TS is derived from. */
542 switch (ts_code)
543 {
544 case TS_TYPED:
545 case TS_BLOCK:
546 case TS_OPTIMIZATION:
547 case TS_TARGET_OPTION:
548 MARK_TS_BASE (code);
549 break;
550
551 case TS_COMMON:
552 case TS_INT_CST:
553 case TS_POLY_INT_CST:
554 case TS_REAL_CST:
555 case TS_FIXED_CST:
556 case TS_VECTOR:
557 case TS_STRING:
558 case TS_COMPLEX:
559 case TS_SSA_NAME:
560 case TS_CONSTRUCTOR:
561 case TS_EXP:
562 case TS_STATEMENT_LIST:
563 MARK_TS_TYPED (code);
564 break;
565
566 case TS_IDENTIFIER:
567 case TS_DECL_MINIMAL:
568 case TS_TYPE_COMMON:
569 case TS_LIST:
570 case TS_VEC:
571 case TS_BINFO:
572 case TS_OMP_CLAUSE:
573 MARK_TS_COMMON (code);
574 break;
575
576 case TS_TYPE_WITH_LANG_SPECIFIC:
577 MARK_TS_TYPE_COMMON (code);
578 break;
579
580 case TS_TYPE_NON_COMMON:
581 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
582 break;
583
584 case TS_DECL_COMMON:
585 MARK_TS_DECL_MINIMAL (code);
586 break;
587
588 case TS_DECL_WRTL:
589 case TS_CONST_DECL:
590 MARK_TS_DECL_COMMON (code);
591 break;
592
593 case TS_DECL_NON_COMMON:
594 MARK_TS_DECL_WITH_VIS (code);
595 break;
596
597 case TS_DECL_WITH_VIS:
598 case TS_PARM_DECL:
599 case TS_LABEL_DECL:
600 case TS_RESULT_DECL:
601 MARK_TS_DECL_WRTL (code);
602 break;
603
604 case TS_FIELD_DECL:
605 MARK_TS_DECL_COMMON (code);
606 break;
607
608 case TS_VAR_DECL:
609 MARK_TS_DECL_WITH_VIS (code);
610 break;
611
612 case TS_TYPE_DECL:
613 case TS_FUNCTION_DECL:
614 MARK_TS_DECL_NON_COMMON (code);
615 break;
616
617 case TS_TRANSLATION_UNIT_DECL:
618 MARK_TS_DECL_COMMON (code);
619 break;
620
621 default:
622 gcc_unreachable ();
623 }
624 }
625
626 /* Basic consistency checks for attributes used in fold. */
627 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
628 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
629 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
630 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
631 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
632 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
633 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
634 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
635 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
636 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
637 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
639 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
640 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
641 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
642 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
643 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
644 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
645 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
646 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
647 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
648 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
649 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
650 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
651 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
653 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
654 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
655 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
656 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
657 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
658 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
659 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
660 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
661 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
662 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
663 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
664 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
665 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
666 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
667 }
668
669
670 /* Init tree.c. */
671
672 void
673 init_ttree (void)
674 {
675 /* Initialize the hash table of types. */
676 type_hash_table
677 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
678
679 debug_expr_for_decl
680 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
681
682 value_expr_for_decl
683 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
684
685 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
686
687 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
688
689 int_cst_node = make_int_cst (1, 1);
690
691 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
692
693 cl_optimization_node = make_node (OPTIMIZATION_NODE);
694 cl_target_option_node = make_node (TARGET_OPTION_NODE);
695
696 /* Initialize the tree_contains_struct array. */
697 initialize_tree_contains_struct ();
698 lang_hooks.init_ts ();
699 }
700
701 \f
702 /* The name of the object as the assembler will see it (but before any
703 translations made by ASM_OUTPUT_LABELREF). Often this is the same
704 as DECL_NAME. It is an IDENTIFIER_NODE. */
705 tree
706 decl_assembler_name (tree decl)
707 {
708 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
709 lang_hooks.set_decl_assembler_name (decl);
710 return DECL_ASSEMBLER_NAME_RAW (decl);
711 }
712
713 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
714 (either of which may be NULL). Inform the FE, if this changes the
715 name. */
716
717 void
718 overwrite_decl_assembler_name (tree decl, tree name)
719 {
720 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
721 lang_hooks.overwrite_decl_assembler_name (decl, name);
722 }
723
724 /* When the target supports COMDAT groups, this indicates which group the
725 DECL is associated with. This can be either an IDENTIFIER_NODE or a
726 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
727 tree
728 decl_comdat_group (const_tree node)
729 {
730 struct symtab_node *snode = symtab_node::get (node);
731 if (!snode)
732 return NULL;
733 return snode->get_comdat_group ();
734 }
735
736 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
737 tree
738 decl_comdat_group_id (const_tree node)
739 {
740 struct symtab_node *snode = symtab_node::get (node);
741 if (!snode)
742 return NULL;
743 return snode->get_comdat_group_id ();
744 }
745
746 /* When the target supports named section, return its name as IDENTIFIER_NODE
747 or NULL if it is in no section. */
748 const char *
749 decl_section_name (const_tree node)
750 {
751 struct symtab_node *snode = symtab_node::get (node);
752 if (!snode)
753 return NULL;
754 return snode->get_section ();
755 }
756
757 /* Set section name of NODE to VALUE (that is expected to be
758 identifier node) */
759 void
760 set_decl_section_name (tree node, const char *value)
761 {
762 struct symtab_node *snode;
763
764 if (value == NULL)
765 {
766 snode = symtab_node::get (node);
767 if (!snode)
768 return;
769 }
770 else if (VAR_P (node))
771 snode = varpool_node::get_create (node);
772 else
773 snode = cgraph_node::get_create (node);
774 snode->set_section (value);
775 }
776
777 /* Return TLS model of a variable NODE. */
778 enum tls_model
779 decl_tls_model (const_tree node)
780 {
781 struct varpool_node *snode = varpool_node::get (node);
782 if (!snode)
783 return TLS_MODEL_NONE;
784 return snode->tls_model;
785 }
786
787 /* Set TLS model of variable NODE to MODEL. */
788 void
789 set_decl_tls_model (tree node, enum tls_model model)
790 {
791 struct varpool_node *vnode;
792
793 if (model == TLS_MODEL_NONE)
794 {
795 vnode = varpool_node::get (node);
796 if (!vnode)
797 return;
798 }
799 else
800 vnode = varpool_node::get_create (node);
801 vnode->tls_model = model;
802 }
803
804 /* Compute the number of bytes occupied by a tree with code CODE.
805 This function cannot be used for nodes that have variable sizes,
806 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
807 size_t
808 tree_code_size (enum tree_code code)
809 {
810 switch (TREE_CODE_CLASS (code))
811 {
812 case tcc_declaration: /* A decl node */
813 switch (code)
814 {
815 case FIELD_DECL: return sizeof (tree_field_decl);
816 case PARM_DECL: return sizeof (tree_parm_decl);
817 case VAR_DECL: return sizeof (tree_var_decl);
818 case LABEL_DECL: return sizeof (tree_label_decl);
819 case RESULT_DECL: return sizeof (tree_result_decl);
820 case CONST_DECL: return sizeof (tree_const_decl);
821 case TYPE_DECL: return sizeof (tree_type_decl);
822 case FUNCTION_DECL: return sizeof (tree_function_decl);
823 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
824 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
825 case NAMESPACE_DECL:
826 case IMPORTED_DECL:
827 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
828 default:
829 gcc_checking_assert (code >= NUM_TREE_CODES);
830 return lang_hooks.tree_size (code);
831 }
832
833 case tcc_type: /* a type node */
834 switch (code)
835 {
836 case OFFSET_TYPE:
837 case ENUMERAL_TYPE:
838 case BOOLEAN_TYPE:
839 case INTEGER_TYPE:
840 case REAL_TYPE:
841 case POINTER_TYPE:
842 case REFERENCE_TYPE:
843 case NULLPTR_TYPE:
844 case FIXED_POINT_TYPE:
845 case COMPLEX_TYPE:
846 case VECTOR_TYPE:
847 case ARRAY_TYPE:
848 case RECORD_TYPE:
849 case UNION_TYPE:
850 case QUAL_UNION_TYPE:
851 case VOID_TYPE:
852 case FUNCTION_TYPE:
853 case METHOD_TYPE:
854 case LANG_TYPE: return sizeof (tree_type_non_common);
855 default:
856 gcc_checking_assert (code >= NUM_TREE_CODES);
857 return lang_hooks.tree_size (code);
858 }
859
860 case tcc_reference: /* a reference */
861 case tcc_expression: /* an expression */
862 case tcc_statement: /* an expression with side effects */
863 case tcc_comparison: /* a comparison expression */
864 case tcc_unary: /* a unary arithmetic expression */
865 case tcc_binary: /* a binary arithmetic expression */
866 return (sizeof (struct tree_exp)
867 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
868
869 case tcc_constant: /* a constant */
870 switch (code)
871 {
872 case VOID_CST: return sizeof (tree_typed);
873 case INTEGER_CST: gcc_unreachable ();
874 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
875 case REAL_CST: return sizeof (tree_real_cst);
876 case FIXED_CST: return sizeof (tree_fixed_cst);
877 case COMPLEX_CST: return sizeof (tree_complex);
878 case VECTOR_CST: gcc_unreachable ();
879 case STRING_CST: gcc_unreachable ();
880 default:
881 gcc_checking_assert (code >= NUM_TREE_CODES);
882 return lang_hooks.tree_size (code);
883 }
884
885 case tcc_exceptional: /* something random, like an identifier. */
886 switch (code)
887 {
888 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
889 case TREE_LIST: return sizeof (tree_list);
890
891 case ERROR_MARK:
892 case PLACEHOLDER_EXPR: return sizeof (tree_common);
893
894 case TREE_VEC: gcc_unreachable ();
895 case OMP_CLAUSE: gcc_unreachable ();
896
897 case SSA_NAME: return sizeof (tree_ssa_name);
898
899 case STATEMENT_LIST: return sizeof (tree_statement_list);
900 case BLOCK: return sizeof (struct tree_block);
901 case CONSTRUCTOR: return sizeof (tree_constructor);
902 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
903 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
904
905 default:
906 gcc_checking_assert (code >= NUM_TREE_CODES);
907 return lang_hooks.tree_size (code);
908 }
909
910 default:
911 gcc_unreachable ();
912 }
913 }
914
915 /* Compute the number of bytes occupied by NODE. This routine only
916 looks at TREE_CODE, except for those nodes that have variable sizes. */
917 size_t
918 tree_size (const_tree node)
919 {
920 const enum tree_code code = TREE_CODE (node);
921 switch (code)
922 {
923 case INTEGER_CST:
924 return (sizeof (struct tree_int_cst)
925 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
926
927 case TREE_BINFO:
928 return (offsetof (struct tree_binfo, base_binfos)
929 + vec<tree, va_gc>
930 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
931
932 case TREE_VEC:
933 return (sizeof (struct tree_vec)
934 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
935
936 case VECTOR_CST:
937 return (sizeof (struct tree_vector)
938 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
939
940 case STRING_CST:
941 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
942
943 case OMP_CLAUSE:
944 return (sizeof (struct tree_omp_clause)
945 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
946 * sizeof (tree));
947
948 default:
949 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
950 return (sizeof (struct tree_exp)
951 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
952 else
953 return tree_code_size (code);
954 }
955 }
956
957 /* Return tree node kind based on tree CODE. */
958
959 static tree_node_kind
960 get_stats_node_kind (enum tree_code code)
961 {
962 enum tree_code_class type = TREE_CODE_CLASS (code);
963
964 switch (type)
965 {
966 case tcc_declaration: /* A decl node */
967 return d_kind;
968 case tcc_type: /* a type node */
969 return t_kind;
970 case tcc_statement: /* an expression with side effects */
971 return s_kind;
972 case tcc_reference: /* a reference */
973 return r_kind;
974 case tcc_expression: /* an expression */
975 case tcc_comparison: /* a comparison expression */
976 case tcc_unary: /* a unary arithmetic expression */
977 case tcc_binary: /* a binary arithmetic expression */
978 return e_kind;
979 case tcc_constant: /* a constant */
980 return c_kind;
981 case tcc_exceptional: /* something random, like an identifier. */
982 switch (code)
983 {
984 case IDENTIFIER_NODE:
985 return id_kind;
986 case TREE_VEC:
987 return vec_kind;
988 case TREE_BINFO:
989 return binfo_kind;
990 case SSA_NAME:
991 return ssa_name_kind;
992 case BLOCK:
993 return b_kind;
994 case CONSTRUCTOR:
995 return constr_kind;
996 case OMP_CLAUSE:
997 return omp_clause_kind;
998 default:
999 return x_kind;
1000 }
1001 break;
1002 case tcc_vl_exp:
1003 return e_kind;
1004 default:
1005 gcc_unreachable ();
1006 }
1007 }
1008
1009 /* Record interesting allocation statistics for a tree node with CODE
1010 and LENGTH. */
1011
1012 static void
1013 record_node_allocation_statistics (enum tree_code code, size_t length)
1014 {
1015 if (!GATHER_STATISTICS)
1016 return;
1017
1018 tree_node_kind kind = get_stats_node_kind (code);
1019
1020 tree_code_counts[(int) code]++;
1021 tree_node_counts[(int) kind]++;
1022 tree_node_sizes[(int) kind] += length;
1023 }
1024
1025 /* Allocate and return a new UID from the DECL_UID namespace. */
1026
1027 int
1028 allocate_decl_uid (void)
1029 {
1030 return next_decl_uid++;
1031 }
1032
1033 /* Return a newly allocated node of code CODE. For decl and type
1034 nodes, some other fields are initialized. The rest of the node is
1035 initialized to zero. This function cannot be used for TREE_VEC,
1036 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1037 tree_code_size.
1038
1039 Achoo! I got a code in the node. */
1040
1041 tree
1042 make_node (enum tree_code code MEM_STAT_DECL)
1043 {
1044 tree t;
1045 enum tree_code_class type = TREE_CODE_CLASS (code);
1046 size_t length = tree_code_size (code);
1047
1048 record_node_allocation_statistics (code, length);
1049
1050 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1051 TREE_SET_CODE (t, code);
1052
1053 switch (type)
1054 {
1055 case tcc_statement:
1056 if (code != DEBUG_BEGIN_STMT)
1057 TREE_SIDE_EFFECTS (t) = 1;
1058 break;
1059
1060 case tcc_declaration:
1061 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1062 {
1063 if (code == FUNCTION_DECL)
1064 {
1065 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1066 SET_DECL_MODE (t, FUNCTION_MODE);
1067 }
1068 else
1069 SET_DECL_ALIGN (t, 1);
1070 }
1071 DECL_SOURCE_LOCATION (t) = input_location;
1072 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1073 DECL_UID (t) = --next_debug_decl_uid;
1074 else
1075 {
1076 DECL_UID (t) = allocate_decl_uid ();
1077 SET_DECL_PT_UID (t, -1);
1078 }
1079 if (TREE_CODE (t) == LABEL_DECL)
1080 LABEL_DECL_UID (t) = -1;
1081
1082 break;
1083
1084 case tcc_type:
1085 TYPE_UID (t) = next_type_uid++;
1086 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1087 TYPE_USER_ALIGN (t) = 0;
1088 TYPE_MAIN_VARIANT (t) = t;
1089 TYPE_CANONICAL (t) = t;
1090
1091 /* Default to no attributes for type, but let target change that. */
1092 TYPE_ATTRIBUTES (t) = NULL_TREE;
1093 targetm.set_default_type_attributes (t);
1094
1095 /* We have not yet computed the alias set for this type. */
1096 TYPE_ALIAS_SET (t) = -1;
1097 break;
1098
1099 case tcc_constant:
1100 TREE_CONSTANT (t) = 1;
1101 break;
1102
1103 case tcc_expression:
1104 switch (code)
1105 {
1106 case INIT_EXPR:
1107 case MODIFY_EXPR:
1108 case VA_ARG_EXPR:
1109 case PREDECREMENT_EXPR:
1110 case PREINCREMENT_EXPR:
1111 case POSTDECREMENT_EXPR:
1112 case POSTINCREMENT_EXPR:
1113 /* All of these have side-effects, no matter what their
1114 operands are. */
1115 TREE_SIDE_EFFECTS (t) = 1;
1116 break;
1117
1118 default:
1119 break;
1120 }
1121 break;
1122
1123 case tcc_exceptional:
1124 switch (code)
1125 {
1126 case TARGET_OPTION_NODE:
1127 TREE_TARGET_OPTION(t)
1128 = ggc_cleared_alloc<struct cl_target_option> ();
1129 break;
1130
1131 case OPTIMIZATION_NODE:
1132 TREE_OPTIMIZATION (t)
1133 = ggc_cleared_alloc<struct cl_optimization> ();
1134 break;
1135
1136 default:
1137 break;
1138 }
1139 break;
1140
1141 default:
1142 /* Other classes need no special treatment. */
1143 break;
1144 }
1145
1146 return t;
1147 }
1148
1149 /* Free tree node. */
1150
1151 void
1152 free_node (tree node)
1153 {
1154 enum tree_code code = TREE_CODE (node);
1155 if (GATHER_STATISTICS)
1156 {
1157 enum tree_node_kind kind = get_stats_node_kind (code);
1158
1159 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1160 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1161 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1162
1163 tree_code_counts[(int) TREE_CODE (node)]--;
1164 tree_node_counts[(int) kind]--;
1165 tree_node_sizes[(int) kind] -= tree_size (node);
1166 }
1167 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1168 vec_free (CONSTRUCTOR_ELTS (node));
1169 else if (code == BLOCK)
1170 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1171 else if (code == TREE_BINFO)
1172 vec_free (BINFO_BASE_ACCESSES (node));
1173 ggc_free (node);
1174 }
1175 \f
1176 /* Return a new node with the same contents as NODE except that its
1177 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1178
1179 tree
1180 copy_node (tree node MEM_STAT_DECL)
1181 {
1182 tree t;
1183 enum tree_code code = TREE_CODE (node);
1184 size_t length;
1185
1186 gcc_assert (code != STATEMENT_LIST);
1187
1188 length = tree_size (node);
1189 record_node_allocation_statistics (code, length);
1190 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1191 memcpy (t, node, length);
1192
1193 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1194 TREE_CHAIN (t) = 0;
1195 TREE_ASM_WRITTEN (t) = 0;
1196 TREE_VISITED (t) = 0;
1197
1198 if (TREE_CODE_CLASS (code) == tcc_declaration)
1199 {
1200 if (code == DEBUG_EXPR_DECL)
1201 DECL_UID (t) = --next_debug_decl_uid;
1202 else
1203 {
1204 DECL_UID (t) = allocate_decl_uid ();
1205 if (DECL_PT_UID_SET_P (node))
1206 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1207 }
1208 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1209 && DECL_HAS_VALUE_EXPR_P (node))
1210 {
1211 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1212 DECL_HAS_VALUE_EXPR_P (t) = 1;
1213 }
1214 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1215 if (VAR_P (node))
1216 {
1217 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1218 t->decl_with_vis.symtab_node = NULL;
1219 }
1220 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1221 {
1222 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1223 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1224 }
1225 if (TREE_CODE (node) == FUNCTION_DECL)
1226 {
1227 DECL_STRUCT_FUNCTION (t) = NULL;
1228 t->decl_with_vis.symtab_node = NULL;
1229 }
1230 }
1231 else if (TREE_CODE_CLASS (code) == tcc_type)
1232 {
1233 TYPE_UID (t) = next_type_uid++;
1234 /* The following is so that the debug code for
1235 the copy is different from the original type.
1236 The two statements usually duplicate each other
1237 (because they clear fields of the same union),
1238 but the optimizer should catch that. */
1239 TYPE_SYMTAB_ADDRESS (t) = 0;
1240 TYPE_SYMTAB_DIE (t) = 0;
1241
1242 /* Do not copy the values cache. */
1243 if (TYPE_CACHED_VALUES_P (t))
1244 {
1245 TYPE_CACHED_VALUES_P (t) = 0;
1246 TYPE_CACHED_VALUES (t) = NULL_TREE;
1247 }
1248 }
1249 else if (code == TARGET_OPTION_NODE)
1250 {
1251 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1252 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1253 sizeof (struct cl_target_option));
1254 }
1255 else if (code == OPTIMIZATION_NODE)
1256 {
1257 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1258 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1259 sizeof (struct cl_optimization));
1260 }
1261
1262 return t;
1263 }
1264
1265 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1266 For example, this can copy a list made of TREE_LIST nodes. */
1267
1268 tree
1269 copy_list (tree list)
1270 {
1271 tree head;
1272 tree prev, next;
1273
1274 if (list == 0)
1275 return 0;
1276
1277 head = prev = copy_node (list);
1278 next = TREE_CHAIN (list);
1279 while (next)
1280 {
1281 TREE_CHAIN (prev) = copy_node (next);
1282 prev = TREE_CHAIN (prev);
1283 next = TREE_CHAIN (next);
1284 }
1285 return head;
1286 }
1287
1288 \f
1289 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1290 INTEGER_CST with value CST and type TYPE. */
1291
1292 static unsigned int
1293 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1294 {
1295 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1296 /* We need extra HWIs if CST is an unsigned integer with its
1297 upper bit set. */
1298 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1299 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1300 return cst.get_len ();
1301 }
1302
1303 /* Return a new INTEGER_CST with value CST and type TYPE. */
1304
1305 static tree
1306 build_new_int_cst (tree type, const wide_int &cst)
1307 {
1308 unsigned int len = cst.get_len ();
1309 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1310 tree nt = make_int_cst (len, ext_len);
1311
1312 if (len < ext_len)
1313 {
1314 --ext_len;
1315 TREE_INT_CST_ELT (nt, ext_len)
1316 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1317 for (unsigned int i = len; i < ext_len; ++i)
1318 TREE_INT_CST_ELT (nt, i) = -1;
1319 }
1320 else if (TYPE_UNSIGNED (type)
1321 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1322 {
1323 len--;
1324 TREE_INT_CST_ELT (nt, len)
1325 = zext_hwi (cst.elt (len),
1326 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1327 }
1328
1329 for (unsigned int i = 0; i < len; i++)
1330 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1331 TREE_TYPE (nt) = type;
1332 return nt;
1333 }
1334
1335 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1336
1337 static tree
1338 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1339 CXX_MEM_STAT_INFO)
1340 {
1341 size_t length = sizeof (struct tree_poly_int_cst);
1342 record_node_allocation_statistics (POLY_INT_CST, length);
1343
1344 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1345
1346 TREE_SET_CODE (t, POLY_INT_CST);
1347 TREE_CONSTANT (t) = 1;
1348 TREE_TYPE (t) = type;
1349 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1350 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1351 return t;
1352 }
1353
1354 /* Create a constant tree that contains CST sign-extended to TYPE. */
1355
1356 tree
1357 build_int_cst (tree type, poly_int64 cst)
1358 {
1359 /* Support legacy code. */
1360 if (!type)
1361 type = integer_type_node;
1362
1363 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1364 }
1365
1366 /* Create a constant tree that contains CST zero-extended to TYPE. */
1367
1368 tree
1369 build_int_cstu (tree type, poly_uint64 cst)
1370 {
1371 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1372 }
1373
1374 /* Create a constant tree that contains CST sign-extended to TYPE. */
1375
1376 tree
1377 build_int_cst_type (tree type, poly_int64 cst)
1378 {
1379 gcc_assert (type);
1380 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1381 }
1382
1383 /* Constructs tree in type TYPE from with value given by CST. Signedness
1384 of CST is assumed to be the same as the signedness of TYPE. */
1385
1386 tree
1387 double_int_to_tree (tree type, double_int cst)
1388 {
1389 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1390 }
1391
1392 /* We force the wide_int CST to the range of the type TYPE by sign or
1393 zero extending it. OVERFLOWABLE indicates if we are interested in
1394 overflow of the value, when >0 we are only interested in signed
1395 overflow, for <0 we are interested in any overflow. OVERFLOWED
1396 indicates whether overflow has already occurred. CONST_OVERFLOWED
1397 indicates whether constant overflow has already occurred. We force
1398 T's value to be within range of T's type (by setting to 0 or 1 all
1399 the bits outside the type's range). We set TREE_OVERFLOWED if,
1400 OVERFLOWED is nonzero,
1401 or OVERFLOWABLE is >0 and signed overflow occurs
1402 or OVERFLOWABLE is <0 and any overflow occurs
1403 We return a new tree node for the extended wide_int. The node
1404 is shared if no overflow flags are set. */
1405
1406
1407 tree
1408 force_fit_type (tree type, const poly_wide_int_ref &cst,
1409 int overflowable, bool overflowed)
1410 {
1411 signop sign = TYPE_SIGN (type);
1412
1413 /* If we need to set overflow flags, return a new unshared node. */
1414 if (overflowed || !wi::fits_to_tree_p (cst, type))
1415 {
1416 if (overflowed
1417 || overflowable < 0
1418 || (overflowable > 0 && sign == SIGNED))
1419 {
1420 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1421 sign);
1422 tree t;
1423 if (tmp.is_constant ())
1424 t = build_new_int_cst (type, tmp.coeffs[0]);
1425 else
1426 {
1427 tree coeffs[NUM_POLY_INT_COEFFS];
1428 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1429 {
1430 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1431 TREE_OVERFLOW (coeffs[i]) = 1;
1432 }
1433 t = build_new_poly_int_cst (type, coeffs);
1434 }
1435 TREE_OVERFLOW (t) = 1;
1436 return t;
1437 }
1438 }
1439
1440 /* Else build a shared node. */
1441 return wide_int_to_tree (type, cst);
1442 }
1443
1444 /* These are the hash table functions for the hash table of INTEGER_CST
1445 nodes of a sizetype. */
1446
1447 /* Return the hash code X, an INTEGER_CST. */
1448
1449 hashval_t
1450 int_cst_hasher::hash (tree x)
1451 {
1452 const_tree const t = x;
1453 hashval_t code = TYPE_UID (TREE_TYPE (t));
1454 int i;
1455
1456 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1457 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1458
1459 return code;
1460 }
1461
1462 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1463 is the same as that given by *Y, which is the same. */
1464
1465 bool
1466 int_cst_hasher::equal (tree x, tree y)
1467 {
1468 const_tree const xt = x;
1469 const_tree const yt = y;
1470
1471 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1472 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1473 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1474 return false;
1475
1476 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1477 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1478 return false;
1479
1480 return true;
1481 }
1482
1483 /* Create an INT_CST node of TYPE and value CST.
1484 The returned node is always shared. For small integers we use a
1485 per-type vector cache, for larger ones we use a single hash table.
1486 The value is extended from its precision according to the sign of
1487 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1488 the upper bits and ensures that hashing and value equality based
1489 upon the underlying HOST_WIDE_INTs works without masking. */
1490
1491 static tree
1492 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1493 {
1494 tree t;
1495 int ix = -1;
1496 int limit = 0;
1497
1498 gcc_assert (type);
1499 unsigned int prec = TYPE_PRECISION (type);
1500 signop sgn = TYPE_SIGN (type);
1501
1502 /* Verify that everything is canonical. */
1503 int l = pcst.get_len ();
1504 if (l > 1)
1505 {
1506 if (pcst.elt (l - 1) == 0)
1507 gcc_checking_assert (pcst.elt (l - 2) < 0);
1508 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1509 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1510 }
1511
1512 wide_int cst = wide_int::from (pcst, prec, sgn);
1513 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1514
1515 if (ext_len == 1)
1516 {
1517 /* We just need to store a single HOST_WIDE_INT. */
1518 HOST_WIDE_INT hwi;
1519 if (TYPE_UNSIGNED (type))
1520 hwi = cst.to_uhwi ();
1521 else
1522 hwi = cst.to_shwi ();
1523
1524 switch (TREE_CODE (type))
1525 {
1526 case NULLPTR_TYPE:
1527 gcc_assert (hwi == 0);
1528 /* Fallthru. */
1529
1530 case POINTER_TYPE:
1531 case REFERENCE_TYPE:
1532 /* Cache NULL pointer and zero bounds. */
1533 if (hwi == 0)
1534 {
1535 limit = 1;
1536 ix = 0;
1537 }
1538 break;
1539
1540 case BOOLEAN_TYPE:
1541 /* Cache false or true. */
1542 limit = 2;
1543 if (IN_RANGE (hwi, 0, 1))
1544 ix = hwi;
1545 break;
1546
1547 case INTEGER_TYPE:
1548 case OFFSET_TYPE:
1549 if (TYPE_SIGN (type) == UNSIGNED)
1550 {
1551 /* Cache [0, N). */
1552 limit = INTEGER_SHARE_LIMIT;
1553 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1554 ix = hwi;
1555 }
1556 else
1557 {
1558 /* Cache [-1, N). */
1559 limit = INTEGER_SHARE_LIMIT + 1;
1560 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1561 ix = hwi + 1;
1562 }
1563 break;
1564
1565 case ENUMERAL_TYPE:
1566 break;
1567
1568 default:
1569 gcc_unreachable ();
1570 }
1571
1572 if (ix >= 0)
1573 {
1574 /* Look for it in the type's vector of small shared ints. */
1575 if (!TYPE_CACHED_VALUES_P (type))
1576 {
1577 TYPE_CACHED_VALUES_P (type) = 1;
1578 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1579 }
1580
1581 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1582 if (t)
1583 /* Make sure no one is clobbering the shared constant. */
1584 gcc_checking_assert (TREE_TYPE (t) == type
1585 && TREE_INT_CST_NUNITS (t) == 1
1586 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1587 && TREE_INT_CST_EXT_NUNITS (t) == 1
1588 && TREE_INT_CST_ELT (t, 0) == hwi);
1589 else
1590 {
1591 /* Create a new shared int. */
1592 t = build_new_int_cst (type, cst);
1593 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1594 }
1595 }
1596 else
1597 {
1598 /* Use the cache of larger shared ints, using int_cst_node as
1599 a temporary. */
1600
1601 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1602 TREE_TYPE (int_cst_node) = type;
1603
1604 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1605 t = *slot;
1606 if (!t)
1607 {
1608 /* Insert this one into the hash table. */
1609 t = int_cst_node;
1610 *slot = t;
1611 /* Make a new node for next time round. */
1612 int_cst_node = make_int_cst (1, 1);
1613 }
1614 }
1615 }
1616 else
1617 {
1618 /* The value either hashes properly or we drop it on the floor
1619 for the gc to take care of. There will not be enough of them
1620 to worry about. */
1621
1622 tree nt = build_new_int_cst (type, cst);
1623 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1624 t = *slot;
1625 if (!t)
1626 {
1627 /* Insert this one into the hash table. */
1628 t = nt;
1629 *slot = t;
1630 }
1631 else
1632 ggc_free (nt);
1633 }
1634
1635 return t;
1636 }
1637
1638 hashval_t
1639 poly_int_cst_hasher::hash (tree t)
1640 {
1641 inchash::hash hstate;
1642
1643 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1644 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1645 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1646
1647 return hstate.end ();
1648 }
1649
1650 bool
1651 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1652 {
1653 if (TREE_TYPE (x) != y.first)
1654 return false;
1655 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1656 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1657 return false;
1658 return true;
1659 }
1660
1661 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1662 The elements must also have type TYPE. */
1663
1664 tree
1665 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1666 {
1667 unsigned int prec = TYPE_PRECISION (type);
1668 gcc_assert (prec <= values.coeffs[0].get_precision ());
1669 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1670
1671 inchash::hash h;
1672 h.add_int (TYPE_UID (type));
1673 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1674 h.add_wide_int (c.coeffs[i]);
1675 poly_int_cst_hasher::compare_type comp (type, &c);
1676 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1677 INSERT);
1678 if (*slot == NULL_TREE)
1679 {
1680 tree coeffs[NUM_POLY_INT_COEFFS];
1681 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1682 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1683 *slot = build_new_poly_int_cst (type, coeffs);
1684 }
1685 return *slot;
1686 }
1687
1688 /* Create a constant tree with value VALUE in type TYPE. */
1689
1690 tree
1691 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1692 {
1693 if (value.is_constant ())
1694 return wide_int_to_tree_1 (type, value.coeffs[0]);
1695 return build_poly_int_cst (type, value);
1696 }
1697
1698 void
1699 cache_integer_cst (tree t)
1700 {
1701 tree type = TREE_TYPE (t);
1702 int ix = -1;
1703 int limit = 0;
1704 int prec = TYPE_PRECISION (type);
1705
1706 gcc_assert (!TREE_OVERFLOW (t));
1707
1708 switch (TREE_CODE (type))
1709 {
1710 case NULLPTR_TYPE:
1711 gcc_assert (integer_zerop (t));
1712 /* Fallthru. */
1713
1714 case POINTER_TYPE:
1715 case REFERENCE_TYPE:
1716 /* Cache NULL pointer. */
1717 if (integer_zerop (t))
1718 {
1719 limit = 1;
1720 ix = 0;
1721 }
1722 break;
1723
1724 case BOOLEAN_TYPE:
1725 /* Cache false or true. */
1726 limit = 2;
1727 if (wi::ltu_p (wi::to_wide (t), 2))
1728 ix = TREE_INT_CST_ELT (t, 0);
1729 break;
1730
1731 case INTEGER_TYPE:
1732 case OFFSET_TYPE:
1733 if (TYPE_UNSIGNED (type))
1734 {
1735 /* Cache 0..N */
1736 limit = INTEGER_SHARE_LIMIT;
1737
1738 /* This is a little hokie, but if the prec is smaller than
1739 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1740 obvious test will not get the correct answer. */
1741 if (prec < HOST_BITS_PER_WIDE_INT)
1742 {
1743 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1744 ix = tree_to_uhwi (t);
1745 }
1746 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1747 ix = tree_to_uhwi (t);
1748 }
1749 else
1750 {
1751 /* Cache -1..N */
1752 limit = INTEGER_SHARE_LIMIT + 1;
1753
1754 if (integer_minus_onep (t))
1755 ix = 0;
1756 else if (!wi::neg_p (wi::to_wide (t)))
1757 {
1758 if (prec < HOST_BITS_PER_WIDE_INT)
1759 {
1760 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1761 ix = tree_to_shwi (t) + 1;
1762 }
1763 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1764 ix = tree_to_shwi (t) + 1;
1765 }
1766 }
1767 break;
1768
1769 case ENUMERAL_TYPE:
1770 break;
1771
1772 default:
1773 gcc_unreachable ();
1774 }
1775
1776 if (ix >= 0)
1777 {
1778 /* Look for it in the type's vector of small shared ints. */
1779 if (!TYPE_CACHED_VALUES_P (type))
1780 {
1781 TYPE_CACHED_VALUES_P (type) = 1;
1782 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1783 }
1784
1785 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1786 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1787 }
1788 else
1789 {
1790 /* Use the cache of larger shared ints. */
1791 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1792 /* If there is already an entry for the number verify it's the
1793 same. */
1794 if (*slot)
1795 gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1796 else
1797 /* Otherwise insert this one into the hash table. */
1798 *slot = t;
1799 }
1800 }
1801
1802
1803 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1804 and the rest are zeros. */
1805
1806 tree
1807 build_low_bits_mask (tree type, unsigned bits)
1808 {
1809 gcc_assert (bits <= TYPE_PRECISION (type));
1810
1811 return wide_int_to_tree (type, wi::mask (bits, false,
1812 TYPE_PRECISION (type)));
1813 }
1814
1815 /* Checks that X is integer constant that can be expressed in (unsigned)
1816 HOST_WIDE_INT without loss of precision. */
1817
1818 bool
1819 cst_and_fits_in_hwi (const_tree x)
1820 {
1821 return (TREE_CODE (x) == INTEGER_CST
1822 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1823 }
1824
1825 /* Build a newly constructed VECTOR_CST with the given values of
1826 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
1827
1828 tree
1829 make_vector (unsigned log2_npatterns,
1830 unsigned int nelts_per_pattern MEM_STAT_DECL)
1831 {
1832 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1833 tree t;
1834 unsigned npatterns = 1 << log2_npatterns;
1835 unsigned encoded_nelts = npatterns * nelts_per_pattern;
1836 unsigned length = (sizeof (struct tree_vector)
1837 + (encoded_nelts - 1) * sizeof (tree));
1838
1839 record_node_allocation_statistics (VECTOR_CST, length);
1840
1841 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1842
1843 TREE_SET_CODE (t, VECTOR_CST);
1844 TREE_CONSTANT (t) = 1;
1845 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1846 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1847
1848 return t;
1849 }
1850
1851 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1852 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1853
1854 tree
1855 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1856 {
1857 unsigned HOST_WIDE_INT idx, nelts;
1858 tree value;
1859
1860 /* We can't construct a VECTOR_CST for a variable number of elements. */
1861 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
1862 tree_vector_builder vec (type, nelts, 1);
1863 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1864 {
1865 if (TREE_CODE (value) == VECTOR_CST)
1866 {
1867 /* If NELTS is constant then this must be too. */
1868 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
1869 for (unsigned i = 0; i < sub_nelts; ++i)
1870 vec.quick_push (VECTOR_CST_ELT (value, i));
1871 }
1872 else
1873 vec.quick_push (value);
1874 }
1875 while (vec.length () < nelts)
1876 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1877
1878 return vec.build ();
1879 }
1880
1881 /* Build a vector of type VECTYPE where all the elements are SCs. */
1882 tree
1883 build_vector_from_val (tree vectype, tree sc)
1884 {
1885 unsigned HOST_WIDE_INT i, nunits;
1886
1887 if (sc == error_mark_node)
1888 return sc;
1889
1890 /* Verify that the vector type is suitable for SC. Note that there
1891 is some inconsistency in the type-system with respect to restrict
1892 qualifications of pointers. Vector types always have a main-variant
1893 element type and the qualification is applied to the vector-type.
1894 So TREE_TYPE (vector-type) does not return a properly qualified
1895 vector element-type. */
1896 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1897 TREE_TYPE (vectype)));
1898
1899 if (CONSTANT_CLASS_P (sc))
1900 {
1901 tree_vector_builder v (vectype, 1, 1);
1902 v.quick_push (sc);
1903 return v.build ();
1904 }
1905 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
1906 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
1907 else
1908 {
1909 vec<constructor_elt, va_gc> *v;
1910 vec_alloc (v, nunits);
1911 for (i = 0; i < nunits; ++i)
1912 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1913 return build_constructor (vectype, v);
1914 }
1915 }
1916
1917 /* If TYPE is not a vector type, just return SC, otherwise return
1918 build_vector_from_val (TYPE, SC). */
1919
1920 tree
1921 build_uniform_cst (tree type, tree sc)
1922 {
1923 if (!VECTOR_TYPE_P (type))
1924 return sc;
1925
1926 return build_vector_from_val (type, sc);
1927 }
1928
1929 /* Build a vector series of type TYPE in which element I has the value
1930 BASE + I * STEP. The result is a constant if BASE and STEP are constant
1931 and a VEC_SERIES_EXPR otherwise. */
1932
1933 tree
1934 build_vec_series (tree type, tree base, tree step)
1935 {
1936 if (integer_zerop (step))
1937 return build_vector_from_val (type, base);
1938 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
1939 {
1940 tree_vector_builder builder (type, 1, 3);
1941 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
1942 wi::to_wide (base) + wi::to_wide (step));
1943 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
1944 wi::to_wide (elt1) + wi::to_wide (step));
1945 builder.quick_push (base);
1946 builder.quick_push (elt1);
1947 builder.quick_push (elt2);
1948 return builder.build ();
1949 }
1950 return build2 (VEC_SERIES_EXPR, type, base, step);
1951 }
1952
1953 /* Return a vector with the same number of units and number of bits
1954 as VEC_TYPE, but in which the elements are a linear series of unsigned
1955 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
1956
1957 tree
1958 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
1959 {
1960 tree index_vec_type = vec_type;
1961 tree index_elt_type = TREE_TYPE (vec_type);
1962 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
1963 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
1964 {
1965 index_elt_type = build_nonstandard_integer_type
1966 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
1967 index_vec_type = build_vector_type (index_elt_type, nunits);
1968 }
1969
1970 tree_vector_builder v (index_vec_type, 1, 3);
1971 for (unsigned int i = 0; i < 3; ++i)
1972 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
1973 return v.build ();
1974 }
1975
1976 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
1977 elements are A and the rest are B. */
1978
1979 tree
1980 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
1981 {
1982 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
1983 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
1984 /* Optimize the constant case. */
1985 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
1986 count /= 2;
1987 tree_vector_builder builder (vec_type, count, 2);
1988 for (unsigned int i = 0; i < count * 2; ++i)
1989 builder.quick_push (i < num_a ? a : b);
1990 return builder.build ();
1991 }
1992
1993 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1994 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1995
1996 void
1997 recompute_constructor_flags (tree c)
1998 {
1999 unsigned int i;
2000 tree val;
2001 bool constant_p = true;
2002 bool side_effects_p = false;
2003 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2004
2005 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2006 {
2007 /* Mostly ctors will have elts that don't have side-effects, so
2008 the usual case is to scan all the elements. Hence a single
2009 loop for both const and side effects, rather than one loop
2010 each (with early outs). */
2011 if (!TREE_CONSTANT (val))
2012 constant_p = false;
2013 if (TREE_SIDE_EFFECTS (val))
2014 side_effects_p = true;
2015 }
2016
2017 TREE_SIDE_EFFECTS (c) = side_effects_p;
2018 TREE_CONSTANT (c) = constant_p;
2019 }
2020
2021 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2022 CONSTRUCTOR C. */
2023
2024 void
2025 verify_constructor_flags (tree c)
2026 {
2027 unsigned int i;
2028 tree val;
2029 bool constant_p = TREE_CONSTANT (c);
2030 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2031 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2032
2033 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2034 {
2035 if (constant_p && !TREE_CONSTANT (val))
2036 internal_error ("non-constant element in constant CONSTRUCTOR");
2037 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2038 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2039 }
2040 }
2041
2042 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2043 are in the vec pointed to by VALS. */
2044 tree
2045 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2046 {
2047 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2048
2049 TREE_TYPE (c) = type;
2050 CONSTRUCTOR_ELTS (c) = vals;
2051
2052 recompute_constructor_flags (c);
2053
2054 return c;
2055 }
2056
2057 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2058 INDEX and VALUE. */
2059 tree
2060 build_constructor_single (tree type, tree index, tree value)
2061 {
2062 vec<constructor_elt, va_gc> *v;
2063 constructor_elt elt = {index, value};
2064
2065 vec_alloc (v, 1);
2066 v->quick_push (elt);
2067
2068 return build_constructor (type, v);
2069 }
2070
2071
2072 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2073 are in a list pointed to by VALS. */
2074 tree
2075 build_constructor_from_list (tree type, tree vals)
2076 {
2077 tree t;
2078 vec<constructor_elt, va_gc> *v = NULL;
2079
2080 if (vals)
2081 {
2082 vec_alloc (v, list_length (vals));
2083 for (t = vals; t; t = TREE_CHAIN (t))
2084 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2085 }
2086
2087 return build_constructor (type, v);
2088 }
2089
2090 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2091 of elements, provided as index/value pairs. */
2092
2093 tree
2094 build_constructor_va (tree type, int nelts, ...)
2095 {
2096 vec<constructor_elt, va_gc> *v = NULL;
2097 va_list p;
2098
2099 va_start (p, nelts);
2100 vec_alloc (v, nelts);
2101 while (nelts--)
2102 {
2103 tree index = va_arg (p, tree);
2104 tree value = va_arg (p, tree);
2105 CONSTRUCTOR_APPEND_ELT (v, index, value);
2106 }
2107 va_end (p);
2108 return build_constructor (type, v);
2109 }
2110
2111 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2112
2113 tree
2114 build_clobber (tree type)
2115 {
2116 tree clobber = build_constructor (type, NULL);
2117 TREE_THIS_VOLATILE (clobber) = true;
2118 return clobber;
2119 }
2120
2121 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2122
2123 tree
2124 build_fixed (tree type, FIXED_VALUE_TYPE f)
2125 {
2126 tree v;
2127 FIXED_VALUE_TYPE *fp;
2128
2129 v = make_node (FIXED_CST);
2130 fp = ggc_alloc<fixed_value> ();
2131 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2132
2133 TREE_TYPE (v) = type;
2134 TREE_FIXED_CST_PTR (v) = fp;
2135 return v;
2136 }
2137
2138 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2139
2140 tree
2141 build_real (tree type, REAL_VALUE_TYPE d)
2142 {
2143 tree v;
2144 REAL_VALUE_TYPE *dp;
2145 int overflow = 0;
2146
2147 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2148 Consider doing it via real_convert now. */
2149
2150 v = make_node (REAL_CST);
2151 dp = ggc_alloc<real_value> ();
2152 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2153
2154 TREE_TYPE (v) = type;
2155 TREE_REAL_CST_PTR (v) = dp;
2156 TREE_OVERFLOW (v) = overflow;
2157 return v;
2158 }
2159
2160 /* Like build_real, but first truncate D to the type. */
2161
2162 tree
2163 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2164 {
2165 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2166 }
2167
2168 /* Return a new REAL_CST node whose type is TYPE
2169 and whose value is the integer value of the INTEGER_CST node I. */
2170
2171 REAL_VALUE_TYPE
2172 real_value_from_int_cst (const_tree type, const_tree i)
2173 {
2174 REAL_VALUE_TYPE d;
2175
2176 /* Clear all bits of the real value type so that we can later do
2177 bitwise comparisons to see if two values are the same. */
2178 memset (&d, 0, sizeof d);
2179
2180 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2181 TYPE_SIGN (TREE_TYPE (i)));
2182 return d;
2183 }
2184
2185 /* Given a tree representing an integer constant I, return a tree
2186 representing the same value as a floating-point constant of type TYPE. */
2187
2188 tree
2189 build_real_from_int_cst (tree type, const_tree i)
2190 {
2191 tree v;
2192 int overflow = TREE_OVERFLOW (i);
2193
2194 v = build_real (type, real_value_from_int_cst (type, i));
2195
2196 TREE_OVERFLOW (v) |= overflow;
2197 return v;
2198 }
2199
2200 /* Return a newly constructed STRING_CST node whose value is
2201 the LEN characters at STR.
2202 Note that for a C string literal, LEN should include the trailing NUL.
2203 The TREE_TYPE is not initialized. */
2204
2205 tree
2206 build_string (int len, const char *str)
2207 {
2208 tree s;
2209 size_t length;
2210
2211 /* Do not waste bytes provided by padding of struct tree_string. */
2212 length = len + offsetof (struct tree_string, str) + 1;
2213
2214 record_node_allocation_statistics (STRING_CST, length);
2215
2216 s = (tree) ggc_internal_alloc (length);
2217
2218 memset (s, 0, sizeof (struct tree_typed));
2219 TREE_SET_CODE (s, STRING_CST);
2220 TREE_CONSTANT (s) = 1;
2221 TREE_STRING_LENGTH (s) = len;
2222 memcpy (s->string.str, str, len);
2223 s->string.str[len] = '\0';
2224
2225 return s;
2226 }
2227
2228 /* Return a newly constructed COMPLEX_CST node whose value is
2229 specified by the real and imaginary parts REAL and IMAG.
2230 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2231 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2232
2233 tree
2234 build_complex (tree type, tree real, tree imag)
2235 {
2236 gcc_assert (CONSTANT_CLASS_P (real));
2237 gcc_assert (CONSTANT_CLASS_P (imag));
2238
2239 tree t = make_node (COMPLEX_CST);
2240
2241 TREE_REALPART (t) = real;
2242 TREE_IMAGPART (t) = imag;
2243 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2244 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2245 return t;
2246 }
2247
2248 /* Build a complex (inf +- 0i), such as for the result of cproj.
2249 TYPE is the complex tree type of the result. If NEG is true, the
2250 imaginary zero is negative. */
2251
2252 tree
2253 build_complex_inf (tree type, bool neg)
2254 {
2255 REAL_VALUE_TYPE rinf, rzero = dconst0;
2256
2257 real_inf (&rinf);
2258 rzero.sign = neg;
2259 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2260 build_real (TREE_TYPE (type), rzero));
2261 }
2262
2263 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2264 element is set to 1. In particular, this is 1 + i for complex types. */
2265
2266 tree
2267 build_each_one_cst (tree type)
2268 {
2269 if (TREE_CODE (type) == COMPLEX_TYPE)
2270 {
2271 tree scalar = build_one_cst (TREE_TYPE (type));
2272 return build_complex (type, scalar, scalar);
2273 }
2274 else
2275 return build_one_cst (type);
2276 }
2277
2278 /* Return a constant of arithmetic type TYPE which is the
2279 multiplicative identity of the set TYPE. */
2280
2281 tree
2282 build_one_cst (tree type)
2283 {
2284 switch (TREE_CODE (type))
2285 {
2286 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2287 case POINTER_TYPE: case REFERENCE_TYPE:
2288 case OFFSET_TYPE:
2289 return build_int_cst (type, 1);
2290
2291 case REAL_TYPE:
2292 return build_real (type, dconst1);
2293
2294 case FIXED_POINT_TYPE:
2295 /* We can only generate 1 for accum types. */
2296 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2297 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2298
2299 case VECTOR_TYPE:
2300 {
2301 tree scalar = build_one_cst (TREE_TYPE (type));
2302
2303 return build_vector_from_val (type, scalar);
2304 }
2305
2306 case COMPLEX_TYPE:
2307 return build_complex (type,
2308 build_one_cst (TREE_TYPE (type)),
2309 build_zero_cst (TREE_TYPE (type)));
2310
2311 default:
2312 gcc_unreachable ();
2313 }
2314 }
2315
2316 /* Return an integer of type TYPE containing all 1's in as much precision as
2317 it contains, or a complex or vector whose subparts are such integers. */
2318
2319 tree
2320 build_all_ones_cst (tree type)
2321 {
2322 if (TREE_CODE (type) == COMPLEX_TYPE)
2323 {
2324 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2325 return build_complex (type, scalar, scalar);
2326 }
2327 else
2328 return build_minus_one_cst (type);
2329 }
2330
2331 /* Return a constant of arithmetic type TYPE which is the
2332 opposite of the multiplicative identity of the set TYPE. */
2333
2334 tree
2335 build_minus_one_cst (tree type)
2336 {
2337 switch (TREE_CODE (type))
2338 {
2339 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2340 case POINTER_TYPE: case REFERENCE_TYPE:
2341 case OFFSET_TYPE:
2342 return build_int_cst (type, -1);
2343
2344 case REAL_TYPE:
2345 return build_real (type, dconstm1);
2346
2347 case FIXED_POINT_TYPE:
2348 /* We can only generate 1 for accum types. */
2349 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2350 return build_fixed (type,
2351 fixed_from_double_int (double_int_minus_one,
2352 SCALAR_TYPE_MODE (type)));
2353
2354 case VECTOR_TYPE:
2355 {
2356 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2357
2358 return build_vector_from_val (type, scalar);
2359 }
2360
2361 case COMPLEX_TYPE:
2362 return build_complex (type,
2363 build_minus_one_cst (TREE_TYPE (type)),
2364 build_zero_cst (TREE_TYPE (type)));
2365
2366 default:
2367 gcc_unreachable ();
2368 }
2369 }
2370
2371 /* Build 0 constant of type TYPE. This is used by constructor folding
2372 and thus the constant should be represented in memory by
2373 zero(es). */
2374
2375 tree
2376 build_zero_cst (tree type)
2377 {
2378 switch (TREE_CODE (type))
2379 {
2380 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2381 case POINTER_TYPE: case REFERENCE_TYPE:
2382 case OFFSET_TYPE: case NULLPTR_TYPE:
2383 return build_int_cst (type, 0);
2384
2385 case REAL_TYPE:
2386 return build_real (type, dconst0);
2387
2388 case FIXED_POINT_TYPE:
2389 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2390
2391 case VECTOR_TYPE:
2392 {
2393 tree scalar = build_zero_cst (TREE_TYPE (type));
2394
2395 return build_vector_from_val (type, scalar);
2396 }
2397
2398 case COMPLEX_TYPE:
2399 {
2400 tree zero = build_zero_cst (TREE_TYPE (type));
2401
2402 return build_complex (type, zero, zero);
2403 }
2404
2405 default:
2406 if (!AGGREGATE_TYPE_P (type))
2407 return fold_convert (type, integer_zero_node);
2408 return build_constructor (type, NULL);
2409 }
2410 }
2411
2412
2413 /* Build a BINFO with LEN language slots. */
2414
2415 tree
2416 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2417 {
2418 tree t;
2419 size_t length = (offsetof (struct tree_binfo, base_binfos)
2420 + vec<tree, va_gc>::embedded_size (base_binfos));
2421
2422 record_node_allocation_statistics (TREE_BINFO, length);
2423
2424 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2425
2426 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2427
2428 TREE_SET_CODE (t, TREE_BINFO);
2429
2430 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2431
2432 return t;
2433 }
2434
2435 /* Create a CASE_LABEL_EXPR tree node and return it. */
2436
2437 tree
2438 build_case_label (tree low_value, tree high_value, tree label_decl)
2439 {
2440 tree t = make_node (CASE_LABEL_EXPR);
2441
2442 TREE_TYPE (t) = void_type_node;
2443 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2444
2445 CASE_LOW (t) = low_value;
2446 CASE_HIGH (t) = high_value;
2447 CASE_LABEL (t) = label_decl;
2448 CASE_CHAIN (t) = NULL_TREE;
2449
2450 return t;
2451 }
2452
2453 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2454 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2455 The latter determines the length of the HOST_WIDE_INT vector. */
2456
2457 tree
2458 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2459 {
2460 tree t;
2461 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2462 + sizeof (struct tree_int_cst));
2463
2464 gcc_assert (len);
2465 record_node_allocation_statistics (INTEGER_CST, length);
2466
2467 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2468
2469 TREE_SET_CODE (t, INTEGER_CST);
2470 TREE_INT_CST_NUNITS (t) = len;
2471 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2472 /* to_offset can only be applied to trees that are offset_int-sized
2473 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2474 must be exactly the precision of offset_int and so LEN is correct. */
2475 if (ext_len <= OFFSET_INT_ELTS)
2476 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2477 else
2478 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2479
2480 TREE_CONSTANT (t) = 1;
2481
2482 return t;
2483 }
2484
2485 /* Build a newly constructed TREE_VEC node of length LEN. */
2486
2487 tree
2488 make_tree_vec (int len MEM_STAT_DECL)
2489 {
2490 tree t;
2491 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2492
2493 record_node_allocation_statistics (TREE_VEC, length);
2494
2495 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2496
2497 TREE_SET_CODE (t, TREE_VEC);
2498 TREE_VEC_LENGTH (t) = len;
2499
2500 return t;
2501 }
2502
2503 /* Grow a TREE_VEC node to new length LEN. */
2504
2505 tree
2506 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2507 {
2508 gcc_assert (TREE_CODE (v) == TREE_VEC);
2509
2510 int oldlen = TREE_VEC_LENGTH (v);
2511 gcc_assert (len > oldlen);
2512
2513 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2514 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2515
2516 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2517
2518 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2519
2520 TREE_VEC_LENGTH (v) = len;
2521
2522 return v;
2523 }
2524 \f
2525 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2526 fixed, and scalar, complex or vector. */
2527
2528 bool
2529 zerop (const_tree expr)
2530 {
2531 return (integer_zerop (expr)
2532 || real_zerop (expr)
2533 || fixed_zerop (expr));
2534 }
2535
2536 /* Return 1 if EXPR is the integer constant zero or a complex constant
2537 of zero, or a location wrapper for such a constant. */
2538
2539 bool
2540 integer_zerop (const_tree expr)
2541 {
2542 STRIP_ANY_LOCATION_WRAPPER (expr);
2543
2544 switch (TREE_CODE (expr))
2545 {
2546 case INTEGER_CST:
2547 return wi::to_wide (expr) == 0;
2548 case COMPLEX_CST:
2549 return (integer_zerop (TREE_REALPART (expr))
2550 && integer_zerop (TREE_IMAGPART (expr)));
2551 case VECTOR_CST:
2552 return (VECTOR_CST_NPATTERNS (expr) == 1
2553 && VECTOR_CST_DUPLICATE_P (expr)
2554 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2555 default:
2556 return false;
2557 }
2558 }
2559
2560 /* Return 1 if EXPR is the integer constant one or the corresponding
2561 complex constant, or a location wrapper for such a constant. */
2562
2563 bool
2564 integer_onep (const_tree expr)
2565 {
2566 STRIP_ANY_LOCATION_WRAPPER (expr);
2567
2568 switch (TREE_CODE (expr))
2569 {
2570 case INTEGER_CST:
2571 return wi::eq_p (wi::to_widest (expr), 1);
2572 case COMPLEX_CST:
2573 return (integer_onep (TREE_REALPART (expr))
2574 && integer_zerop (TREE_IMAGPART (expr)));
2575 case VECTOR_CST:
2576 return (VECTOR_CST_NPATTERNS (expr) == 1
2577 && VECTOR_CST_DUPLICATE_P (expr)
2578 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2579 default:
2580 return false;
2581 }
2582 }
2583
2584 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2585 return 1 if every piece is the integer constant one.
2586 Also return 1 for location wrappers for such a constant. */
2587
2588 bool
2589 integer_each_onep (const_tree expr)
2590 {
2591 STRIP_ANY_LOCATION_WRAPPER (expr);
2592
2593 if (TREE_CODE (expr) == COMPLEX_CST)
2594 return (integer_onep (TREE_REALPART (expr))
2595 && integer_onep (TREE_IMAGPART (expr)));
2596 else
2597 return integer_onep (expr);
2598 }
2599
2600 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2601 it contains, or a complex or vector whose subparts are such integers,
2602 or a location wrapper for such a constant. */
2603
2604 bool
2605 integer_all_onesp (const_tree expr)
2606 {
2607 STRIP_ANY_LOCATION_WRAPPER (expr);
2608
2609 if (TREE_CODE (expr) == COMPLEX_CST
2610 && integer_all_onesp (TREE_REALPART (expr))
2611 && integer_all_onesp (TREE_IMAGPART (expr)))
2612 return true;
2613
2614 else if (TREE_CODE (expr) == VECTOR_CST)
2615 return (VECTOR_CST_NPATTERNS (expr) == 1
2616 && VECTOR_CST_DUPLICATE_P (expr)
2617 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2618
2619 else if (TREE_CODE (expr) != INTEGER_CST)
2620 return false;
2621
2622 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2623 == wi::to_wide (expr));
2624 }
2625
2626 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2627 for such a constant. */
2628
2629 bool
2630 integer_minus_onep (const_tree expr)
2631 {
2632 STRIP_ANY_LOCATION_WRAPPER (expr);
2633
2634 if (TREE_CODE (expr) == COMPLEX_CST)
2635 return (integer_all_onesp (TREE_REALPART (expr))
2636 && integer_zerop (TREE_IMAGPART (expr)));
2637 else
2638 return integer_all_onesp (expr);
2639 }
2640
2641 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2642 one bit on), or a location wrapper for such a constant. */
2643
2644 bool
2645 integer_pow2p (const_tree expr)
2646 {
2647 STRIP_ANY_LOCATION_WRAPPER (expr);
2648
2649 if (TREE_CODE (expr) == COMPLEX_CST
2650 && integer_pow2p (TREE_REALPART (expr))
2651 && integer_zerop (TREE_IMAGPART (expr)))
2652 return true;
2653
2654 if (TREE_CODE (expr) != INTEGER_CST)
2655 return false;
2656
2657 return wi::popcount (wi::to_wide (expr)) == 1;
2658 }
2659
2660 /* Return 1 if EXPR is an integer constant other than zero or a
2661 complex constant other than zero, or a location wrapper for such a
2662 constant. */
2663
2664 bool
2665 integer_nonzerop (const_tree expr)
2666 {
2667 STRIP_ANY_LOCATION_WRAPPER (expr);
2668
2669 return ((TREE_CODE (expr) == INTEGER_CST
2670 && wi::to_wide (expr) != 0)
2671 || (TREE_CODE (expr) == COMPLEX_CST
2672 && (integer_nonzerop (TREE_REALPART (expr))
2673 || integer_nonzerop (TREE_IMAGPART (expr)))));
2674 }
2675
2676 /* Return 1 if EXPR is the integer constant one. For vector,
2677 return 1 if every piece is the integer constant minus one
2678 (representing the value TRUE).
2679 Also return 1 for location wrappers for such a constant. */
2680
2681 bool
2682 integer_truep (const_tree expr)
2683 {
2684 STRIP_ANY_LOCATION_WRAPPER (expr);
2685
2686 if (TREE_CODE (expr) == VECTOR_CST)
2687 return integer_all_onesp (expr);
2688 return integer_onep (expr);
2689 }
2690
2691 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2692 for such a constant. */
2693
2694 bool
2695 fixed_zerop (const_tree expr)
2696 {
2697 STRIP_ANY_LOCATION_WRAPPER (expr);
2698
2699 return (TREE_CODE (expr) == FIXED_CST
2700 && TREE_FIXED_CST (expr).data.is_zero ());
2701 }
2702
2703 /* Return the power of two represented by a tree node known to be a
2704 power of two. */
2705
2706 int
2707 tree_log2 (const_tree expr)
2708 {
2709 if (TREE_CODE (expr) == COMPLEX_CST)
2710 return tree_log2 (TREE_REALPART (expr));
2711
2712 return wi::exact_log2 (wi::to_wide (expr));
2713 }
2714
2715 /* Similar, but return the largest integer Y such that 2 ** Y is less
2716 than or equal to EXPR. */
2717
2718 int
2719 tree_floor_log2 (const_tree expr)
2720 {
2721 if (TREE_CODE (expr) == COMPLEX_CST)
2722 return tree_log2 (TREE_REALPART (expr));
2723
2724 return wi::floor_log2 (wi::to_wide (expr));
2725 }
2726
2727 /* Return number of known trailing zero bits in EXPR, or, if the value of
2728 EXPR is known to be zero, the precision of it's type. */
2729
2730 unsigned int
2731 tree_ctz (const_tree expr)
2732 {
2733 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2734 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2735 return 0;
2736
2737 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2738 switch (TREE_CODE (expr))
2739 {
2740 case INTEGER_CST:
2741 ret1 = wi::ctz (wi::to_wide (expr));
2742 return MIN (ret1, prec);
2743 case SSA_NAME:
2744 ret1 = wi::ctz (get_nonzero_bits (expr));
2745 return MIN (ret1, prec);
2746 case PLUS_EXPR:
2747 case MINUS_EXPR:
2748 case BIT_IOR_EXPR:
2749 case BIT_XOR_EXPR:
2750 case MIN_EXPR:
2751 case MAX_EXPR:
2752 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2753 if (ret1 == 0)
2754 return ret1;
2755 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2756 return MIN (ret1, ret2);
2757 case POINTER_PLUS_EXPR:
2758 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2759 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2760 /* Second operand is sizetype, which could be in theory
2761 wider than pointer's precision. Make sure we never
2762 return more than prec. */
2763 ret2 = MIN (ret2, prec);
2764 return MIN (ret1, ret2);
2765 case BIT_AND_EXPR:
2766 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2767 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2768 return MAX (ret1, ret2);
2769 case MULT_EXPR:
2770 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2771 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2772 return MIN (ret1 + ret2, prec);
2773 case LSHIFT_EXPR:
2774 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2775 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2776 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2777 {
2778 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2779 return MIN (ret1 + ret2, prec);
2780 }
2781 return ret1;
2782 case RSHIFT_EXPR:
2783 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2784 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2785 {
2786 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2787 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2788 if (ret1 > ret2)
2789 return ret1 - ret2;
2790 }
2791 return 0;
2792 case TRUNC_DIV_EXPR:
2793 case CEIL_DIV_EXPR:
2794 case FLOOR_DIV_EXPR:
2795 case ROUND_DIV_EXPR:
2796 case EXACT_DIV_EXPR:
2797 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2798 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2799 {
2800 int l = tree_log2 (TREE_OPERAND (expr, 1));
2801 if (l >= 0)
2802 {
2803 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2804 ret2 = l;
2805 if (ret1 > ret2)
2806 return ret1 - ret2;
2807 }
2808 }
2809 return 0;
2810 CASE_CONVERT:
2811 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2812 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2813 ret1 = prec;
2814 return MIN (ret1, prec);
2815 case SAVE_EXPR:
2816 return tree_ctz (TREE_OPERAND (expr, 0));
2817 case COND_EXPR:
2818 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2819 if (ret1 == 0)
2820 return 0;
2821 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2822 return MIN (ret1, ret2);
2823 case COMPOUND_EXPR:
2824 return tree_ctz (TREE_OPERAND (expr, 1));
2825 case ADDR_EXPR:
2826 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2827 if (ret1 > BITS_PER_UNIT)
2828 {
2829 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2830 return MIN (ret1, prec);
2831 }
2832 return 0;
2833 default:
2834 return 0;
2835 }
2836 }
2837
2838 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2839 decimal float constants, so don't return 1 for them.
2840 Also return 1 for location wrappers around such a constant. */
2841
2842 bool
2843 real_zerop (const_tree expr)
2844 {
2845 STRIP_ANY_LOCATION_WRAPPER (expr);
2846
2847 switch (TREE_CODE (expr))
2848 {
2849 case REAL_CST:
2850 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2851 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2852 case COMPLEX_CST:
2853 return real_zerop (TREE_REALPART (expr))
2854 && real_zerop (TREE_IMAGPART (expr));
2855 case VECTOR_CST:
2856 {
2857 /* Don't simply check for a duplicate because the predicate
2858 accepts both +0.0 and -0.0. */
2859 unsigned count = vector_cst_encoded_nelts (expr);
2860 for (unsigned int i = 0; i < count; ++i)
2861 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2862 return false;
2863 return true;
2864 }
2865 default:
2866 return false;
2867 }
2868 }
2869
2870 /* Return 1 if EXPR is the real constant one in real or complex form.
2871 Trailing zeroes matter for decimal float constants, so don't return
2872 1 for them.
2873 Also return 1 for location wrappers around such a constant. */
2874
2875 bool
2876 real_onep (const_tree expr)
2877 {
2878 STRIP_ANY_LOCATION_WRAPPER (expr);
2879
2880 switch (TREE_CODE (expr))
2881 {
2882 case REAL_CST:
2883 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2884 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2885 case COMPLEX_CST:
2886 return real_onep (TREE_REALPART (expr))
2887 && real_zerop (TREE_IMAGPART (expr));
2888 case VECTOR_CST:
2889 return (VECTOR_CST_NPATTERNS (expr) == 1
2890 && VECTOR_CST_DUPLICATE_P (expr)
2891 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2892 default:
2893 return false;
2894 }
2895 }
2896
2897 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2898 matter for decimal float constants, so don't return 1 for them.
2899 Also return 1 for location wrappers around such a constant. */
2900
2901 bool
2902 real_minus_onep (const_tree expr)
2903 {
2904 STRIP_ANY_LOCATION_WRAPPER (expr);
2905
2906 switch (TREE_CODE (expr))
2907 {
2908 case REAL_CST:
2909 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2910 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2911 case COMPLEX_CST:
2912 return real_minus_onep (TREE_REALPART (expr))
2913 && real_zerop (TREE_IMAGPART (expr));
2914 case VECTOR_CST:
2915 return (VECTOR_CST_NPATTERNS (expr) == 1
2916 && VECTOR_CST_DUPLICATE_P (expr)
2917 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2918 default:
2919 return false;
2920 }
2921 }
2922
2923 /* Nonzero if EXP is a constant or a cast of a constant. */
2924
2925 bool
2926 really_constant_p (const_tree exp)
2927 {
2928 /* This is not quite the same as STRIP_NOPS. It does more. */
2929 while (CONVERT_EXPR_P (exp)
2930 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2931 exp = TREE_OPERAND (exp, 0);
2932 return TREE_CONSTANT (exp);
2933 }
2934
2935 /* Return true if T holds a polynomial pointer difference, storing it in
2936 *VALUE if so. A true return means that T's precision is no greater
2937 than 64 bits, which is the largest address space we support, so *VALUE
2938 never loses precision. However, the signedness of the result does
2939 not necessarily match the signedness of T: sometimes an unsigned type
2940 like sizetype is used to encode a value that is actually negative. */
2941
2942 bool
2943 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
2944 {
2945 if (!t)
2946 return false;
2947 if (TREE_CODE (t) == INTEGER_CST)
2948 {
2949 if (!cst_and_fits_in_hwi (t))
2950 return false;
2951 *value = int_cst_value (t);
2952 return true;
2953 }
2954 if (POLY_INT_CST_P (t))
2955 {
2956 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2957 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
2958 return false;
2959 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2960 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
2961 return true;
2962 }
2963 return false;
2964 }
2965
2966 poly_int64
2967 tree_to_poly_int64 (const_tree t)
2968 {
2969 gcc_assert (tree_fits_poly_int64_p (t));
2970 if (POLY_INT_CST_P (t))
2971 return poly_int_cst_value (t).force_shwi ();
2972 return TREE_INT_CST_LOW (t);
2973 }
2974
2975 poly_uint64
2976 tree_to_poly_uint64 (const_tree t)
2977 {
2978 gcc_assert (tree_fits_poly_uint64_p (t));
2979 if (POLY_INT_CST_P (t))
2980 return poly_int_cst_value (t).force_uhwi ();
2981 return TREE_INT_CST_LOW (t);
2982 }
2983 \f
2984 /* Return first list element whose TREE_VALUE is ELEM.
2985 Return 0 if ELEM is not in LIST. */
2986
2987 tree
2988 value_member (tree elem, tree list)
2989 {
2990 while (list)
2991 {
2992 if (elem == TREE_VALUE (list))
2993 return list;
2994 list = TREE_CHAIN (list);
2995 }
2996 return NULL_TREE;
2997 }
2998
2999 /* Return first list element whose TREE_PURPOSE is ELEM.
3000 Return 0 if ELEM is not in LIST. */
3001
3002 tree
3003 purpose_member (const_tree elem, tree list)
3004 {
3005 while (list)
3006 {
3007 if (elem == TREE_PURPOSE (list))
3008 return list;
3009 list = TREE_CHAIN (list);
3010 }
3011 return NULL_TREE;
3012 }
3013
3014 /* Return true if ELEM is in V. */
3015
3016 bool
3017 vec_member (const_tree elem, vec<tree, va_gc> *v)
3018 {
3019 unsigned ix;
3020 tree t;
3021 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3022 if (elem == t)
3023 return true;
3024 return false;
3025 }
3026
3027 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3028 NULL_TREE. */
3029
3030 tree
3031 chain_index (int idx, tree chain)
3032 {
3033 for (; chain && idx > 0; --idx)
3034 chain = TREE_CHAIN (chain);
3035 return chain;
3036 }
3037
3038 /* Return nonzero if ELEM is part of the chain CHAIN. */
3039
3040 bool
3041 chain_member (const_tree elem, const_tree chain)
3042 {
3043 while (chain)
3044 {
3045 if (elem == chain)
3046 return true;
3047 chain = DECL_CHAIN (chain);
3048 }
3049
3050 return false;
3051 }
3052
3053 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3054 We expect a null pointer to mark the end of the chain.
3055 This is the Lisp primitive `length'. */
3056
3057 int
3058 list_length (const_tree t)
3059 {
3060 const_tree p = t;
3061 #ifdef ENABLE_TREE_CHECKING
3062 const_tree q = t;
3063 #endif
3064 int len = 0;
3065
3066 while (p)
3067 {
3068 p = TREE_CHAIN (p);
3069 #ifdef ENABLE_TREE_CHECKING
3070 if (len % 2)
3071 q = TREE_CHAIN (q);
3072 gcc_assert (p != q);
3073 #endif
3074 len++;
3075 }
3076
3077 return len;
3078 }
3079
3080 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3081 UNION_TYPE TYPE, or NULL_TREE if none. */
3082
3083 tree
3084 first_field (const_tree type)
3085 {
3086 tree t = TYPE_FIELDS (type);
3087 while (t && TREE_CODE (t) != FIELD_DECL)
3088 t = TREE_CHAIN (t);
3089 return t;
3090 }
3091
3092 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3093 by modifying the last node in chain 1 to point to chain 2.
3094 This is the Lisp primitive `nconc'. */
3095
3096 tree
3097 chainon (tree op1, tree op2)
3098 {
3099 tree t1;
3100
3101 if (!op1)
3102 return op2;
3103 if (!op2)
3104 return op1;
3105
3106 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3107 continue;
3108 TREE_CHAIN (t1) = op2;
3109
3110 #ifdef ENABLE_TREE_CHECKING
3111 {
3112 tree t2;
3113 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3114 gcc_assert (t2 != t1);
3115 }
3116 #endif
3117
3118 return op1;
3119 }
3120
3121 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3122
3123 tree
3124 tree_last (tree chain)
3125 {
3126 tree next;
3127 if (chain)
3128 while ((next = TREE_CHAIN (chain)))
3129 chain = next;
3130 return chain;
3131 }
3132
3133 /* Reverse the order of elements in the chain T,
3134 and return the new head of the chain (old last element). */
3135
3136 tree
3137 nreverse (tree t)
3138 {
3139 tree prev = 0, decl, next;
3140 for (decl = t; decl; decl = next)
3141 {
3142 /* We shouldn't be using this function to reverse BLOCK chains; we
3143 have blocks_nreverse for that. */
3144 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3145 next = TREE_CHAIN (decl);
3146 TREE_CHAIN (decl) = prev;
3147 prev = decl;
3148 }
3149 return prev;
3150 }
3151 \f
3152 /* Return a newly created TREE_LIST node whose
3153 purpose and value fields are PARM and VALUE. */
3154
3155 tree
3156 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3157 {
3158 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3159 TREE_PURPOSE (t) = parm;
3160 TREE_VALUE (t) = value;
3161 return t;
3162 }
3163
3164 /* Build a chain of TREE_LIST nodes from a vector. */
3165
3166 tree
3167 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3168 {
3169 tree ret = NULL_TREE;
3170 tree *pp = &ret;
3171 unsigned int i;
3172 tree t;
3173 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3174 {
3175 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3176 pp = &TREE_CHAIN (*pp);
3177 }
3178 return ret;
3179 }
3180
3181 /* Return a newly created TREE_LIST node whose
3182 purpose and value fields are PURPOSE and VALUE
3183 and whose TREE_CHAIN is CHAIN. */
3184
3185 tree
3186 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3187 {
3188 tree node;
3189
3190 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3191 memset (node, 0, sizeof (struct tree_common));
3192
3193 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3194
3195 TREE_SET_CODE (node, TREE_LIST);
3196 TREE_CHAIN (node) = chain;
3197 TREE_PURPOSE (node) = purpose;
3198 TREE_VALUE (node) = value;
3199 return node;
3200 }
3201
3202 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3203 trees. */
3204
3205 vec<tree, va_gc> *
3206 ctor_to_vec (tree ctor)
3207 {
3208 vec<tree, va_gc> *vec;
3209 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3210 unsigned int ix;
3211 tree val;
3212
3213 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3214 vec->quick_push (val);
3215
3216 return vec;
3217 }
3218 \f
3219 /* Return the size nominally occupied by an object of type TYPE
3220 when it resides in memory. The value is measured in units of bytes,
3221 and its data type is that normally used for type sizes
3222 (which is the first type created by make_signed_type or
3223 make_unsigned_type). */
3224
3225 tree
3226 size_in_bytes_loc (location_t loc, const_tree type)
3227 {
3228 tree t;
3229
3230 if (type == error_mark_node)
3231 return integer_zero_node;
3232
3233 type = TYPE_MAIN_VARIANT (type);
3234 t = TYPE_SIZE_UNIT (type);
3235
3236 if (t == 0)
3237 {
3238 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3239 return size_zero_node;
3240 }
3241
3242 return t;
3243 }
3244
3245 /* Return the size of TYPE (in bytes) as a wide integer
3246 or return -1 if the size can vary or is larger than an integer. */
3247
3248 HOST_WIDE_INT
3249 int_size_in_bytes (const_tree type)
3250 {
3251 tree t;
3252
3253 if (type == error_mark_node)
3254 return 0;
3255
3256 type = TYPE_MAIN_VARIANT (type);
3257 t = TYPE_SIZE_UNIT (type);
3258
3259 if (t && tree_fits_uhwi_p (t))
3260 return TREE_INT_CST_LOW (t);
3261 else
3262 return -1;
3263 }
3264
3265 /* Return the maximum size of TYPE (in bytes) as a wide integer
3266 or return -1 if the size can vary or is larger than an integer. */
3267
3268 HOST_WIDE_INT
3269 max_int_size_in_bytes (const_tree type)
3270 {
3271 HOST_WIDE_INT size = -1;
3272 tree size_tree;
3273
3274 /* If this is an array type, check for a possible MAX_SIZE attached. */
3275
3276 if (TREE_CODE (type) == ARRAY_TYPE)
3277 {
3278 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3279
3280 if (size_tree && tree_fits_uhwi_p (size_tree))
3281 size = tree_to_uhwi (size_tree);
3282 }
3283
3284 /* If we still haven't been able to get a size, see if the language
3285 can compute a maximum size. */
3286
3287 if (size == -1)
3288 {
3289 size_tree = lang_hooks.types.max_size (type);
3290
3291 if (size_tree && tree_fits_uhwi_p (size_tree))
3292 size = tree_to_uhwi (size_tree);
3293 }
3294
3295 return size;
3296 }
3297 \f
3298 /* Return the bit position of FIELD, in bits from the start of the record.
3299 This is a tree of type bitsizetype. */
3300
3301 tree
3302 bit_position (const_tree field)
3303 {
3304 return bit_from_pos (DECL_FIELD_OFFSET (field),
3305 DECL_FIELD_BIT_OFFSET (field));
3306 }
3307 \f
3308 /* Return the byte position of FIELD, in bytes from the start of the record.
3309 This is a tree of type sizetype. */
3310
3311 tree
3312 byte_position (const_tree field)
3313 {
3314 return byte_from_pos (DECL_FIELD_OFFSET (field),
3315 DECL_FIELD_BIT_OFFSET (field));
3316 }
3317
3318 /* Likewise, but return as an integer. It must be representable in
3319 that way (since it could be a signed value, we don't have the
3320 option of returning -1 like int_size_in_byte can. */
3321
3322 HOST_WIDE_INT
3323 int_byte_position (const_tree field)
3324 {
3325 return tree_to_shwi (byte_position (field));
3326 }
3327 \f
3328 /* Return the strictest alignment, in bits, that T is known to have. */
3329
3330 unsigned int
3331 expr_align (const_tree t)
3332 {
3333 unsigned int align0, align1;
3334
3335 switch (TREE_CODE (t))
3336 {
3337 CASE_CONVERT: case NON_LVALUE_EXPR:
3338 /* If we have conversions, we know that the alignment of the
3339 object must meet each of the alignments of the types. */
3340 align0 = expr_align (TREE_OPERAND (t, 0));
3341 align1 = TYPE_ALIGN (TREE_TYPE (t));
3342 return MAX (align0, align1);
3343
3344 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3345 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3346 case CLEANUP_POINT_EXPR:
3347 /* These don't change the alignment of an object. */
3348 return expr_align (TREE_OPERAND (t, 0));
3349
3350 case COND_EXPR:
3351 /* The best we can do is say that the alignment is the least aligned
3352 of the two arms. */
3353 align0 = expr_align (TREE_OPERAND (t, 1));
3354 align1 = expr_align (TREE_OPERAND (t, 2));
3355 return MIN (align0, align1);
3356
3357 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3358 meaningfully, it's always 1. */
3359 case LABEL_DECL: case CONST_DECL:
3360 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3361 case FUNCTION_DECL:
3362 gcc_assert (DECL_ALIGN (t) != 0);
3363 return DECL_ALIGN (t);
3364
3365 default:
3366 break;
3367 }
3368
3369 /* Otherwise take the alignment from that of the type. */
3370 return TYPE_ALIGN (TREE_TYPE (t));
3371 }
3372 \f
3373 /* Return, as a tree node, the number of elements for TYPE (which is an
3374 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3375
3376 tree
3377 array_type_nelts (const_tree type)
3378 {
3379 tree index_type, min, max;
3380
3381 /* If they did it with unspecified bounds, then we should have already
3382 given an error about it before we got here. */
3383 if (! TYPE_DOMAIN (type))
3384 return error_mark_node;
3385
3386 index_type = TYPE_DOMAIN (type);
3387 min = TYPE_MIN_VALUE (index_type);
3388 max = TYPE_MAX_VALUE (index_type);
3389
3390 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3391 if (!max)
3392 return error_mark_node;
3393
3394 return (integer_zerop (min)
3395 ? max
3396 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3397 }
3398 \f
3399 /* If arg is static -- a reference to an object in static storage -- then
3400 return the object. This is not the same as the C meaning of `static'.
3401 If arg isn't static, return NULL. */
3402
3403 tree
3404 staticp (tree arg)
3405 {
3406 switch (TREE_CODE (arg))
3407 {
3408 case FUNCTION_DECL:
3409 /* Nested functions are static, even though taking their address will
3410 involve a trampoline as we unnest the nested function and create
3411 the trampoline on the tree level. */
3412 return arg;
3413
3414 case VAR_DECL:
3415 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3416 && ! DECL_THREAD_LOCAL_P (arg)
3417 && ! DECL_DLLIMPORT_P (arg)
3418 ? arg : NULL);
3419
3420 case CONST_DECL:
3421 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3422 ? arg : NULL);
3423
3424 case CONSTRUCTOR:
3425 return TREE_STATIC (arg) ? arg : NULL;
3426
3427 case LABEL_DECL:
3428 case STRING_CST:
3429 return arg;
3430
3431 case COMPONENT_REF:
3432 /* If the thing being referenced is not a field, then it is
3433 something language specific. */
3434 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3435
3436 /* If we are referencing a bitfield, we can't evaluate an
3437 ADDR_EXPR at compile time and so it isn't a constant. */
3438 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3439 return NULL;
3440
3441 return staticp (TREE_OPERAND (arg, 0));
3442
3443 case BIT_FIELD_REF:
3444 return NULL;
3445
3446 case INDIRECT_REF:
3447 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3448
3449 case ARRAY_REF:
3450 case ARRAY_RANGE_REF:
3451 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3452 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3453 return staticp (TREE_OPERAND (arg, 0));
3454 else
3455 return NULL;
3456
3457 case COMPOUND_LITERAL_EXPR:
3458 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3459
3460 default:
3461 return NULL;
3462 }
3463 }
3464
3465 \f
3466
3467
3468 /* Return whether OP is a DECL whose address is function-invariant. */
3469
3470 bool
3471 decl_address_invariant_p (const_tree op)
3472 {
3473 /* The conditions below are slightly less strict than the one in
3474 staticp. */
3475
3476 switch (TREE_CODE (op))
3477 {
3478 case PARM_DECL:
3479 case RESULT_DECL:
3480 case LABEL_DECL:
3481 case FUNCTION_DECL:
3482 return true;
3483
3484 case VAR_DECL:
3485 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3486 || DECL_THREAD_LOCAL_P (op)
3487 || DECL_CONTEXT (op) == current_function_decl
3488 || decl_function_context (op) == current_function_decl)
3489 return true;
3490 break;
3491
3492 case CONST_DECL:
3493 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3494 || decl_function_context (op) == current_function_decl)
3495 return true;
3496 break;
3497
3498 default:
3499 break;
3500 }
3501
3502 return false;
3503 }
3504
3505 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3506
3507 bool
3508 decl_address_ip_invariant_p (const_tree op)
3509 {
3510 /* The conditions below are slightly less strict than the one in
3511 staticp. */
3512
3513 switch (TREE_CODE (op))
3514 {
3515 case LABEL_DECL:
3516 case FUNCTION_DECL:
3517 case STRING_CST:
3518 return true;
3519
3520 case VAR_DECL:
3521 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3522 && !DECL_DLLIMPORT_P (op))
3523 || DECL_THREAD_LOCAL_P (op))
3524 return true;
3525 break;
3526
3527 case CONST_DECL:
3528 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3529 return true;
3530 break;
3531
3532 default:
3533 break;
3534 }
3535
3536 return false;
3537 }
3538
3539
3540 /* Return true if T is function-invariant (internal function, does
3541 not handle arithmetic; that's handled in skip_simple_arithmetic and
3542 tree_invariant_p). */
3543
3544 static bool
3545 tree_invariant_p_1 (tree t)
3546 {
3547 tree op;
3548
3549 if (TREE_CONSTANT (t)
3550 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3551 return true;
3552
3553 switch (TREE_CODE (t))
3554 {
3555 case SAVE_EXPR:
3556 return true;
3557
3558 case ADDR_EXPR:
3559 op = TREE_OPERAND (t, 0);
3560 while (handled_component_p (op))
3561 {
3562 switch (TREE_CODE (op))
3563 {
3564 case ARRAY_REF:
3565 case ARRAY_RANGE_REF:
3566 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3567 || TREE_OPERAND (op, 2) != NULL_TREE
3568 || TREE_OPERAND (op, 3) != NULL_TREE)
3569 return false;
3570 break;
3571
3572 case COMPONENT_REF:
3573 if (TREE_OPERAND (op, 2) != NULL_TREE)
3574 return false;
3575 break;
3576
3577 default:;
3578 }
3579 op = TREE_OPERAND (op, 0);
3580 }
3581
3582 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3583
3584 default:
3585 break;
3586 }
3587
3588 return false;
3589 }
3590
3591 /* Return true if T is function-invariant. */
3592
3593 bool
3594 tree_invariant_p (tree t)
3595 {
3596 tree inner = skip_simple_arithmetic (t);
3597 return tree_invariant_p_1 (inner);
3598 }
3599
3600 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3601 Do this to any expression which may be used in more than one place,
3602 but must be evaluated only once.
3603
3604 Normally, expand_expr would reevaluate the expression each time.
3605 Calling save_expr produces something that is evaluated and recorded
3606 the first time expand_expr is called on it. Subsequent calls to
3607 expand_expr just reuse the recorded value.
3608
3609 The call to expand_expr that generates code that actually computes
3610 the value is the first call *at compile time*. Subsequent calls
3611 *at compile time* generate code to use the saved value.
3612 This produces correct result provided that *at run time* control
3613 always flows through the insns made by the first expand_expr
3614 before reaching the other places where the save_expr was evaluated.
3615 You, the caller of save_expr, must make sure this is so.
3616
3617 Constants, and certain read-only nodes, are returned with no
3618 SAVE_EXPR because that is safe. Expressions containing placeholders
3619 are not touched; see tree.def for an explanation of what these
3620 are used for. */
3621
3622 tree
3623 save_expr (tree expr)
3624 {
3625 tree inner;
3626
3627 /* If the tree evaluates to a constant, then we don't want to hide that
3628 fact (i.e. this allows further folding, and direct checks for constants).
3629 However, a read-only object that has side effects cannot be bypassed.
3630 Since it is no problem to reevaluate literals, we just return the
3631 literal node. */
3632 inner = skip_simple_arithmetic (expr);
3633 if (TREE_CODE (inner) == ERROR_MARK)
3634 return inner;
3635
3636 if (tree_invariant_p_1 (inner))
3637 return expr;
3638
3639 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3640 it means that the size or offset of some field of an object depends on
3641 the value within another field.
3642
3643 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3644 and some variable since it would then need to be both evaluated once and
3645 evaluated more than once. Front-ends must assure this case cannot
3646 happen by surrounding any such subexpressions in their own SAVE_EXPR
3647 and forcing evaluation at the proper time. */
3648 if (contains_placeholder_p (inner))
3649 return expr;
3650
3651 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3652
3653 /* This expression might be placed ahead of a jump to ensure that the
3654 value was computed on both sides of the jump. So make sure it isn't
3655 eliminated as dead. */
3656 TREE_SIDE_EFFECTS (expr) = 1;
3657 return expr;
3658 }
3659
3660 /* Look inside EXPR into any simple arithmetic operations. Return the
3661 outermost non-arithmetic or non-invariant node. */
3662
3663 tree
3664 skip_simple_arithmetic (tree expr)
3665 {
3666 /* We don't care about whether this can be used as an lvalue in this
3667 context. */
3668 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3669 expr = TREE_OPERAND (expr, 0);
3670
3671 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3672 a constant, it will be more efficient to not make another SAVE_EXPR since
3673 it will allow better simplification and GCSE will be able to merge the
3674 computations if they actually occur. */
3675 while (true)
3676 {
3677 if (UNARY_CLASS_P (expr))
3678 expr = TREE_OPERAND (expr, 0);
3679 else if (BINARY_CLASS_P (expr))
3680 {
3681 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3682 expr = TREE_OPERAND (expr, 0);
3683 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3684 expr = TREE_OPERAND (expr, 1);
3685 else
3686 break;
3687 }
3688 else
3689 break;
3690 }
3691
3692 return expr;
3693 }
3694
3695 /* Look inside EXPR into simple arithmetic operations involving constants.
3696 Return the outermost non-arithmetic or non-constant node. */
3697
3698 tree
3699 skip_simple_constant_arithmetic (tree expr)
3700 {
3701 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3702 expr = TREE_OPERAND (expr, 0);
3703
3704 while (true)
3705 {
3706 if (UNARY_CLASS_P (expr))
3707 expr = TREE_OPERAND (expr, 0);
3708 else if (BINARY_CLASS_P (expr))
3709 {
3710 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3711 expr = TREE_OPERAND (expr, 0);
3712 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3713 expr = TREE_OPERAND (expr, 1);
3714 else
3715 break;
3716 }
3717 else
3718 break;
3719 }
3720
3721 return expr;
3722 }
3723
3724 /* Return which tree structure is used by T. */
3725
3726 enum tree_node_structure_enum
3727 tree_node_structure (const_tree t)
3728 {
3729 const enum tree_code code = TREE_CODE (t);
3730 return tree_node_structure_for_code (code);
3731 }
3732
3733 /* Set various status flags when building a CALL_EXPR object T. */
3734
3735 static void
3736 process_call_operands (tree t)
3737 {
3738 bool side_effects = TREE_SIDE_EFFECTS (t);
3739 bool read_only = false;
3740 int i = call_expr_flags (t);
3741
3742 /* Calls have side-effects, except those to const or pure functions. */
3743 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3744 side_effects = true;
3745 /* Propagate TREE_READONLY of arguments for const functions. */
3746 if (i & ECF_CONST)
3747 read_only = true;
3748
3749 if (!side_effects || read_only)
3750 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3751 {
3752 tree op = TREE_OPERAND (t, i);
3753 if (op && TREE_SIDE_EFFECTS (op))
3754 side_effects = true;
3755 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3756 read_only = false;
3757 }
3758
3759 TREE_SIDE_EFFECTS (t) = side_effects;
3760 TREE_READONLY (t) = read_only;
3761 }
3762 \f
3763 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3764 size or offset that depends on a field within a record. */
3765
3766 bool
3767 contains_placeholder_p (const_tree exp)
3768 {
3769 enum tree_code code;
3770
3771 if (!exp)
3772 return 0;
3773
3774 code = TREE_CODE (exp);
3775 if (code == PLACEHOLDER_EXPR)
3776 return 1;
3777
3778 switch (TREE_CODE_CLASS (code))
3779 {
3780 case tcc_reference:
3781 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3782 position computations since they will be converted into a
3783 WITH_RECORD_EXPR involving the reference, which will assume
3784 here will be valid. */
3785 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3786
3787 case tcc_exceptional:
3788 if (code == TREE_LIST)
3789 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3790 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3791 break;
3792
3793 case tcc_unary:
3794 case tcc_binary:
3795 case tcc_comparison:
3796 case tcc_expression:
3797 switch (code)
3798 {
3799 case COMPOUND_EXPR:
3800 /* Ignoring the first operand isn't quite right, but works best. */
3801 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3802
3803 case COND_EXPR:
3804 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3805 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3806 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3807
3808 case SAVE_EXPR:
3809 /* The save_expr function never wraps anything containing
3810 a PLACEHOLDER_EXPR. */
3811 return 0;
3812
3813 default:
3814 break;
3815 }
3816
3817 switch (TREE_CODE_LENGTH (code))
3818 {
3819 case 1:
3820 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3821 case 2:
3822 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3823 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3824 default:
3825 return 0;
3826 }
3827
3828 case tcc_vl_exp:
3829 switch (code)
3830 {
3831 case CALL_EXPR:
3832 {
3833 const_tree arg;
3834 const_call_expr_arg_iterator iter;
3835 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3836 if (CONTAINS_PLACEHOLDER_P (arg))
3837 return 1;
3838 return 0;
3839 }
3840 default:
3841 return 0;
3842 }
3843
3844 default:
3845 return 0;
3846 }
3847 return 0;
3848 }
3849
3850 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3851 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3852 field positions. */
3853
3854 static bool
3855 type_contains_placeholder_1 (const_tree type)
3856 {
3857 /* If the size contains a placeholder or the parent type (component type in
3858 the case of arrays) type involves a placeholder, this type does. */
3859 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3860 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3861 || (!POINTER_TYPE_P (type)
3862 && TREE_TYPE (type)
3863 && type_contains_placeholder_p (TREE_TYPE (type))))
3864 return true;
3865
3866 /* Now do type-specific checks. Note that the last part of the check above
3867 greatly limits what we have to do below. */
3868 switch (TREE_CODE (type))
3869 {
3870 case VOID_TYPE:
3871 case COMPLEX_TYPE:
3872 case ENUMERAL_TYPE:
3873 case BOOLEAN_TYPE:
3874 case POINTER_TYPE:
3875 case OFFSET_TYPE:
3876 case REFERENCE_TYPE:
3877 case METHOD_TYPE:
3878 case FUNCTION_TYPE:
3879 case VECTOR_TYPE:
3880 case NULLPTR_TYPE:
3881 return false;
3882
3883 case INTEGER_TYPE:
3884 case REAL_TYPE:
3885 case FIXED_POINT_TYPE:
3886 /* Here we just check the bounds. */
3887 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3888 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3889
3890 case ARRAY_TYPE:
3891 /* We have already checked the component type above, so just check
3892 the domain type. Flexible array members have a null domain. */
3893 return TYPE_DOMAIN (type) ?
3894 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3895
3896 case RECORD_TYPE:
3897 case UNION_TYPE:
3898 case QUAL_UNION_TYPE:
3899 {
3900 tree field;
3901
3902 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3903 if (TREE_CODE (field) == FIELD_DECL
3904 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3905 || (TREE_CODE (type) == QUAL_UNION_TYPE
3906 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3907 || type_contains_placeholder_p (TREE_TYPE (field))))
3908 return true;
3909
3910 return false;
3911 }
3912
3913 default:
3914 gcc_unreachable ();
3915 }
3916 }
3917
3918 /* Wrapper around above function used to cache its result. */
3919
3920 bool
3921 type_contains_placeholder_p (tree type)
3922 {
3923 bool result;
3924
3925 /* If the contains_placeholder_bits field has been initialized,
3926 then we know the answer. */
3927 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3928 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3929
3930 /* Indicate that we've seen this type node, and the answer is false.
3931 This is what we want to return if we run into recursion via fields. */
3932 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3933
3934 /* Compute the real value. */
3935 result = type_contains_placeholder_1 (type);
3936
3937 /* Store the real value. */
3938 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3939
3940 return result;
3941 }
3942 \f
3943 /* Push tree EXP onto vector QUEUE if it is not already present. */
3944
3945 static void
3946 push_without_duplicates (tree exp, vec<tree> *queue)
3947 {
3948 unsigned int i;
3949 tree iter;
3950
3951 FOR_EACH_VEC_ELT (*queue, i, iter)
3952 if (simple_cst_equal (iter, exp) == 1)
3953 break;
3954
3955 if (!iter)
3956 queue->safe_push (exp);
3957 }
3958
3959 /* Given a tree EXP, find all occurrences of references to fields
3960 in a PLACEHOLDER_EXPR and place them in vector REFS without
3961 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3962 we assume here that EXP contains only arithmetic expressions
3963 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3964 argument list. */
3965
3966 void
3967 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3968 {
3969 enum tree_code code = TREE_CODE (exp);
3970 tree inner;
3971 int i;
3972
3973 /* We handle TREE_LIST and COMPONENT_REF separately. */
3974 if (code == TREE_LIST)
3975 {
3976 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3977 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3978 }
3979 else if (code == COMPONENT_REF)
3980 {
3981 for (inner = TREE_OPERAND (exp, 0);
3982 REFERENCE_CLASS_P (inner);
3983 inner = TREE_OPERAND (inner, 0))
3984 ;
3985
3986 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3987 push_without_duplicates (exp, refs);
3988 else
3989 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3990 }
3991 else
3992 switch (TREE_CODE_CLASS (code))
3993 {
3994 case tcc_constant:
3995 break;
3996
3997 case tcc_declaration:
3998 /* Variables allocated to static storage can stay. */
3999 if (!TREE_STATIC (exp))
4000 push_without_duplicates (exp, refs);
4001 break;
4002
4003 case tcc_expression:
4004 /* This is the pattern built in ada/make_aligning_type. */
4005 if (code == ADDR_EXPR
4006 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4007 {
4008 push_without_duplicates (exp, refs);
4009 break;
4010 }
4011
4012 /* Fall through. */
4013
4014 case tcc_exceptional:
4015 case tcc_unary:
4016 case tcc_binary:
4017 case tcc_comparison:
4018 case tcc_reference:
4019 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4020 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4021 break;
4022
4023 case tcc_vl_exp:
4024 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4025 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4026 break;
4027
4028 default:
4029 gcc_unreachable ();
4030 }
4031 }
4032
4033 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4034 return a tree with all occurrences of references to F in a
4035 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4036 CONST_DECLs. Note that we assume here that EXP contains only
4037 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4038 occurring only in their argument list. */
4039
4040 tree
4041 substitute_in_expr (tree exp, tree f, tree r)
4042 {
4043 enum tree_code code = TREE_CODE (exp);
4044 tree op0, op1, op2, op3;
4045 tree new_tree;
4046
4047 /* We handle TREE_LIST and COMPONENT_REF separately. */
4048 if (code == TREE_LIST)
4049 {
4050 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4051 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4052 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4053 return exp;
4054
4055 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4056 }
4057 else if (code == COMPONENT_REF)
4058 {
4059 tree inner;
4060
4061 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4062 and it is the right field, replace it with R. */
4063 for (inner = TREE_OPERAND (exp, 0);
4064 REFERENCE_CLASS_P (inner);
4065 inner = TREE_OPERAND (inner, 0))
4066 ;
4067
4068 /* The field. */
4069 op1 = TREE_OPERAND (exp, 1);
4070
4071 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4072 return r;
4073
4074 /* If this expression hasn't been completed let, leave it alone. */
4075 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4076 return exp;
4077
4078 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4079 if (op0 == TREE_OPERAND (exp, 0))
4080 return exp;
4081
4082 new_tree
4083 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4084 }
4085 else
4086 switch (TREE_CODE_CLASS (code))
4087 {
4088 case tcc_constant:
4089 return exp;
4090
4091 case tcc_declaration:
4092 if (exp == f)
4093 return r;
4094 else
4095 return exp;
4096
4097 case tcc_expression:
4098 if (exp == f)
4099 return r;
4100
4101 /* Fall through. */
4102
4103 case tcc_exceptional:
4104 case tcc_unary:
4105 case tcc_binary:
4106 case tcc_comparison:
4107 case tcc_reference:
4108 switch (TREE_CODE_LENGTH (code))
4109 {
4110 case 0:
4111 return exp;
4112
4113 case 1:
4114 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4115 if (op0 == TREE_OPERAND (exp, 0))
4116 return exp;
4117
4118 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4119 break;
4120
4121 case 2:
4122 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4123 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4124
4125 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4126 return exp;
4127
4128 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4129 break;
4130
4131 case 3:
4132 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4133 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4134 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4135
4136 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4137 && op2 == TREE_OPERAND (exp, 2))
4138 return exp;
4139
4140 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4141 break;
4142
4143 case 4:
4144 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4145 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4146 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4147 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4148
4149 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4150 && op2 == TREE_OPERAND (exp, 2)
4151 && op3 == TREE_OPERAND (exp, 3))
4152 return exp;
4153
4154 new_tree
4155 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4156 break;
4157
4158 default:
4159 gcc_unreachable ();
4160 }
4161 break;
4162
4163 case tcc_vl_exp:
4164 {
4165 int i;
4166
4167 new_tree = NULL_TREE;
4168
4169 /* If we are trying to replace F with a constant or with another
4170 instance of one of the arguments of the call, inline back
4171 functions which do nothing else than computing a value from
4172 the arguments they are passed. This makes it possible to
4173 fold partially or entirely the replacement expression. */
4174 if (code == CALL_EXPR)
4175 {
4176 bool maybe_inline = false;
4177 if (CONSTANT_CLASS_P (r))
4178 maybe_inline = true;
4179 else
4180 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4181 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4182 {
4183 maybe_inline = true;
4184 break;
4185 }
4186 if (maybe_inline)
4187 {
4188 tree t = maybe_inline_call_in_expr (exp);
4189 if (t)
4190 return SUBSTITUTE_IN_EXPR (t, f, r);
4191 }
4192 }
4193
4194 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4195 {
4196 tree op = TREE_OPERAND (exp, i);
4197 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4198 if (new_op != op)
4199 {
4200 if (!new_tree)
4201 new_tree = copy_node (exp);
4202 TREE_OPERAND (new_tree, i) = new_op;
4203 }
4204 }
4205
4206 if (new_tree)
4207 {
4208 new_tree = fold (new_tree);
4209 if (TREE_CODE (new_tree) == CALL_EXPR)
4210 process_call_operands (new_tree);
4211 }
4212 else
4213 return exp;
4214 }
4215 break;
4216
4217 default:
4218 gcc_unreachable ();
4219 }
4220
4221 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4222
4223 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4224 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4225
4226 return new_tree;
4227 }
4228
4229 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4230 for it within OBJ, a tree that is an object or a chain of references. */
4231
4232 tree
4233 substitute_placeholder_in_expr (tree exp, tree obj)
4234 {
4235 enum tree_code code = TREE_CODE (exp);
4236 tree op0, op1, op2, op3;
4237 tree new_tree;
4238
4239 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4240 in the chain of OBJ. */
4241 if (code == PLACEHOLDER_EXPR)
4242 {
4243 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4244 tree elt;
4245
4246 for (elt = obj; elt != 0;
4247 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4248 || TREE_CODE (elt) == COND_EXPR)
4249 ? TREE_OPERAND (elt, 1)
4250 : (REFERENCE_CLASS_P (elt)
4251 || UNARY_CLASS_P (elt)
4252 || BINARY_CLASS_P (elt)
4253 || VL_EXP_CLASS_P (elt)
4254 || EXPRESSION_CLASS_P (elt))
4255 ? TREE_OPERAND (elt, 0) : 0))
4256 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4257 return elt;
4258
4259 for (elt = obj; elt != 0;
4260 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4261 || TREE_CODE (elt) == COND_EXPR)
4262 ? TREE_OPERAND (elt, 1)
4263 : (REFERENCE_CLASS_P (elt)
4264 || UNARY_CLASS_P (elt)
4265 || BINARY_CLASS_P (elt)
4266 || VL_EXP_CLASS_P (elt)
4267 || EXPRESSION_CLASS_P (elt))
4268 ? TREE_OPERAND (elt, 0) : 0))
4269 if (POINTER_TYPE_P (TREE_TYPE (elt))
4270 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4271 == need_type))
4272 return fold_build1 (INDIRECT_REF, need_type, elt);
4273
4274 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4275 survives until RTL generation, there will be an error. */
4276 return exp;
4277 }
4278
4279 /* TREE_LIST is special because we need to look at TREE_VALUE
4280 and TREE_CHAIN, not TREE_OPERANDS. */
4281 else if (code == TREE_LIST)
4282 {
4283 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4284 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4285 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4286 return exp;
4287
4288 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4289 }
4290 else
4291 switch (TREE_CODE_CLASS (code))
4292 {
4293 case tcc_constant:
4294 case tcc_declaration:
4295 return exp;
4296
4297 case tcc_exceptional:
4298 case tcc_unary:
4299 case tcc_binary:
4300 case tcc_comparison:
4301 case tcc_expression:
4302 case tcc_reference:
4303 case tcc_statement:
4304 switch (TREE_CODE_LENGTH (code))
4305 {
4306 case 0:
4307 return exp;
4308
4309 case 1:
4310 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4311 if (op0 == TREE_OPERAND (exp, 0))
4312 return exp;
4313
4314 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4315 break;
4316
4317 case 2:
4318 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4319 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4320
4321 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4322 return exp;
4323
4324 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4325 break;
4326
4327 case 3:
4328 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4329 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4330 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4331
4332 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4333 && op2 == TREE_OPERAND (exp, 2))
4334 return exp;
4335
4336 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4337 break;
4338
4339 case 4:
4340 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4341 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4342 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4343 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4344
4345 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4346 && op2 == TREE_OPERAND (exp, 2)
4347 && op3 == TREE_OPERAND (exp, 3))
4348 return exp;
4349
4350 new_tree
4351 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4352 break;
4353
4354 default:
4355 gcc_unreachable ();
4356 }
4357 break;
4358
4359 case tcc_vl_exp:
4360 {
4361 int i;
4362
4363 new_tree = NULL_TREE;
4364
4365 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4366 {
4367 tree op = TREE_OPERAND (exp, i);
4368 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4369 if (new_op != op)
4370 {
4371 if (!new_tree)
4372 new_tree = copy_node (exp);
4373 TREE_OPERAND (new_tree, i) = new_op;
4374 }
4375 }
4376
4377 if (new_tree)
4378 {
4379 new_tree = fold (new_tree);
4380 if (TREE_CODE (new_tree) == CALL_EXPR)
4381 process_call_operands (new_tree);
4382 }
4383 else
4384 return exp;
4385 }
4386 break;
4387
4388 default:
4389 gcc_unreachable ();
4390 }
4391
4392 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4393
4394 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4395 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4396
4397 return new_tree;
4398 }
4399 \f
4400
4401 /* Subroutine of stabilize_reference; this is called for subtrees of
4402 references. Any expression with side-effects must be put in a SAVE_EXPR
4403 to ensure that it is only evaluated once.
4404
4405 We don't put SAVE_EXPR nodes around everything, because assigning very
4406 simple expressions to temporaries causes us to miss good opportunities
4407 for optimizations. Among other things, the opportunity to fold in the
4408 addition of a constant into an addressing mode often gets lost, e.g.
4409 "y[i+1] += x;". In general, we take the approach that we should not make
4410 an assignment unless we are forced into it - i.e., that any non-side effect
4411 operator should be allowed, and that cse should take care of coalescing
4412 multiple utterances of the same expression should that prove fruitful. */
4413
4414 static tree
4415 stabilize_reference_1 (tree e)
4416 {
4417 tree result;
4418 enum tree_code code = TREE_CODE (e);
4419
4420 /* We cannot ignore const expressions because it might be a reference
4421 to a const array but whose index contains side-effects. But we can
4422 ignore things that are actual constant or that already have been
4423 handled by this function. */
4424
4425 if (tree_invariant_p (e))
4426 return e;
4427
4428 switch (TREE_CODE_CLASS (code))
4429 {
4430 case tcc_exceptional:
4431 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4432 have side-effects. */
4433 if (code == STATEMENT_LIST)
4434 return save_expr (e);
4435 /* FALLTHRU */
4436 case tcc_type:
4437 case tcc_declaration:
4438 case tcc_comparison:
4439 case tcc_statement:
4440 case tcc_expression:
4441 case tcc_reference:
4442 case tcc_vl_exp:
4443 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4444 so that it will only be evaluated once. */
4445 /* The reference (r) and comparison (<) classes could be handled as
4446 below, but it is generally faster to only evaluate them once. */
4447 if (TREE_SIDE_EFFECTS (e))
4448 return save_expr (e);
4449 return e;
4450
4451 case tcc_constant:
4452 /* Constants need no processing. In fact, we should never reach
4453 here. */
4454 return e;
4455
4456 case tcc_binary:
4457 /* Division is slow and tends to be compiled with jumps,
4458 especially the division by powers of 2 that is often
4459 found inside of an array reference. So do it just once. */
4460 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4461 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4462 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4463 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4464 return save_expr (e);
4465 /* Recursively stabilize each operand. */
4466 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4467 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4468 break;
4469
4470 case tcc_unary:
4471 /* Recursively stabilize each operand. */
4472 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4473 break;
4474
4475 default:
4476 gcc_unreachable ();
4477 }
4478
4479 TREE_TYPE (result) = TREE_TYPE (e);
4480 TREE_READONLY (result) = TREE_READONLY (e);
4481 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4482 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4483
4484 return result;
4485 }
4486
4487 /* Stabilize a reference so that we can use it any number of times
4488 without causing its operands to be evaluated more than once.
4489 Returns the stabilized reference. This works by means of save_expr,
4490 so see the caveats in the comments about save_expr.
4491
4492 Also allows conversion expressions whose operands are references.
4493 Any other kind of expression is returned unchanged. */
4494
4495 tree
4496 stabilize_reference (tree ref)
4497 {
4498 tree result;
4499 enum tree_code code = TREE_CODE (ref);
4500
4501 switch (code)
4502 {
4503 case VAR_DECL:
4504 case PARM_DECL:
4505 case RESULT_DECL:
4506 /* No action is needed in this case. */
4507 return ref;
4508
4509 CASE_CONVERT:
4510 case FLOAT_EXPR:
4511 case FIX_TRUNC_EXPR:
4512 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4513 break;
4514
4515 case INDIRECT_REF:
4516 result = build_nt (INDIRECT_REF,
4517 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4518 break;
4519
4520 case COMPONENT_REF:
4521 result = build_nt (COMPONENT_REF,
4522 stabilize_reference (TREE_OPERAND (ref, 0)),
4523 TREE_OPERAND (ref, 1), NULL_TREE);
4524 break;
4525
4526 case BIT_FIELD_REF:
4527 result = build_nt (BIT_FIELD_REF,
4528 stabilize_reference (TREE_OPERAND (ref, 0)),
4529 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4530 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4531 break;
4532
4533 case ARRAY_REF:
4534 result = build_nt (ARRAY_REF,
4535 stabilize_reference (TREE_OPERAND (ref, 0)),
4536 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4537 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4538 break;
4539
4540 case ARRAY_RANGE_REF:
4541 result = build_nt (ARRAY_RANGE_REF,
4542 stabilize_reference (TREE_OPERAND (ref, 0)),
4543 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4544 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4545 break;
4546
4547 case COMPOUND_EXPR:
4548 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4549 it wouldn't be ignored. This matters when dealing with
4550 volatiles. */
4551 return stabilize_reference_1 (ref);
4552
4553 /* If arg isn't a kind of lvalue we recognize, make no change.
4554 Caller should recognize the error for an invalid lvalue. */
4555 default:
4556 return ref;
4557
4558 case ERROR_MARK:
4559 return error_mark_node;
4560 }
4561
4562 TREE_TYPE (result) = TREE_TYPE (ref);
4563 TREE_READONLY (result) = TREE_READONLY (ref);
4564 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4565 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4566
4567 return result;
4568 }
4569 \f
4570 /* Low-level constructors for expressions. */
4571
4572 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4573 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4574
4575 void
4576 recompute_tree_invariant_for_addr_expr (tree t)
4577 {
4578 tree node;
4579 bool tc = true, se = false;
4580
4581 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4582
4583 /* We started out assuming this address is both invariant and constant, but
4584 does not have side effects. Now go down any handled components and see if
4585 any of them involve offsets that are either non-constant or non-invariant.
4586 Also check for side-effects.
4587
4588 ??? Note that this code makes no attempt to deal with the case where
4589 taking the address of something causes a copy due to misalignment. */
4590
4591 #define UPDATE_FLAGS(NODE) \
4592 do { tree _node = (NODE); \
4593 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4594 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4595
4596 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4597 node = TREE_OPERAND (node, 0))
4598 {
4599 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4600 array reference (probably made temporarily by the G++ front end),
4601 so ignore all the operands. */
4602 if ((TREE_CODE (node) == ARRAY_REF
4603 || TREE_CODE (node) == ARRAY_RANGE_REF)
4604 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4605 {
4606 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4607 if (TREE_OPERAND (node, 2))
4608 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4609 if (TREE_OPERAND (node, 3))
4610 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4611 }
4612 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4613 FIELD_DECL, apparently. The G++ front end can put something else
4614 there, at least temporarily. */
4615 else if (TREE_CODE (node) == COMPONENT_REF
4616 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4617 {
4618 if (TREE_OPERAND (node, 2))
4619 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4620 }
4621 }
4622
4623 node = lang_hooks.expr_to_decl (node, &tc, &se);
4624
4625 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4626 the address, since &(*a)->b is a form of addition. If it's a constant, the
4627 address is constant too. If it's a decl, its address is constant if the
4628 decl is static. Everything else is not constant and, furthermore,
4629 taking the address of a volatile variable is not volatile. */
4630 if (TREE_CODE (node) == INDIRECT_REF
4631 || TREE_CODE (node) == MEM_REF)
4632 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4633 else if (CONSTANT_CLASS_P (node))
4634 ;
4635 else if (DECL_P (node))
4636 tc &= (staticp (node) != NULL_TREE);
4637 else
4638 {
4639 tc = false;
4640 se |= TREE_SIDE_EFFECTS (node);
4641 }
4642
4643
4644 TREE_CONSTANT (t) = tc;
4645 TREE_SIDE_EFFECTS (t) = se;
4646 #undef UPDATE_FLAGS
4647 }
4648
4649 /* Build an expression of code CODE, data type TYPE, and operands as
4650 specified. Expressions and reference nodes can be created this way.
4651 Constants, decls, types and misc nodes cannot be.
4652
4653 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4654 enough for all extant tree codes. */
4655
4656 tree
4657 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4658 {
4659 tree t;
4660
4661 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4662
4663 t = make_node (code PASS_MEM_STAT);
4664 TREE_TYPE (t) = tt;
4665
4666 return t;
4667 }
4668
4669 tree
4670 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4671 {
4672 int length = sizeof (struct tree_exp);
4673 tree t;
4674
4675 record_node_allocation_statistics (code, length);
4676
4677 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4678
4679 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4680
4681 memset (t, 0, sizeof (struct tree_common));
4682
4683 TREE_SET_CODE (t, code);
4684
4685 TREE_TYPE (t) = type;
4686 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4687 TREE_OPERAND (t, 0) = node;
4688 if (node && !TYPE_P (node))
4689 {
4690 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4691 TREE_READONLY (t) = TREE_READONLY (node);
4692 }
4693
4694 if (TREE_CODE_CLASS (code) == tcc_statement)
4695 {
4696 if (code != DEBUG_BEGIN_STMT)
4697 TREE_SIDE_EFFECTS (t) = 1;
4698 }
4699 else switch (code)
4700 {
4701 case VA_ARG_EXPR:
4702 /* All of these have side-effects, no matter what their
4703 operands are. */
4704 TREE_SIDE_EFFECTS (t) = 1;
4705 TREE_READONLY (t) = 0;
4706 break;
4707
4708 case INDIRECT_REF:
4709 /* Whether a dereference is readonly has nothing to do with whether
4710 its operand is readonly. */
4711 TREE_READONLY (t) = 0;
4712 break;
4713
4714 case ADDR_EXPR:
4715 if (node)
4716 recompute_tree_invariant_for_addr_expr (t);
4717 break;
4718
4719 default:
4720 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4721 && node && !TYPE_P (node)
4722 && TREE_CONSTANT (node))
4723 TREE_CONSTANT (t) = 1;
4724 if (TREE_CODE_CLASS (code) == tcc_reference
4725 && node && TREE_THIS_VOLATILE (node))
4726 TREE_THIS_VOLATILE (t) = 1;
4727 break;
4728 }
4729
4730 return t;
4731 }
4732
4733 #define PROCESS_ARG(N) \
4734 do { \
4735 TREE_OPERAND (t, N) = arg##N; \
4736 if (arg##N &&!TYPE_P (arg##N)) \
4737 { \
4738 if (TREE_SIDE_EFFECTS (arg##N)) \
4739 side_effects = 1; \
4740 if (!TREE_READONLY (arg##N) \
4741 && !CONSTANT_CLASS_P (arg##N)) \
4742 (void) (read_only = 0); \
4743 if (!TREE_CONSTANT (arg##N)) \
4744 (void) (constant = 0); \
4745 } \
4746 } while (0)
4747
4748 tree
4749 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4750 {
4751 bool constant, read_only, side_effects, div_by_zero;
4752 tree t;
4753
4754 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4755
4756 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4757 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4758 /* When sizetype precision doesn't match that of pointers
4759 we need to be able to build explicit extensions or truncations
4760 of the offset argument. */
4761 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4762 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4763 && TREE_CODE (arg1) == INTEGER_CST);
4764
4765 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4766 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4767 && ptrofftype_p (TREE_TYPE (arg1)));
4768
4769 t = make_node (code PASS_MEM_STAT);
4770 TREE_TYPE (t) = tt;
4771
4772 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4773 result based on those same flags for the arguments. But if the
4774 arguments aren't really even `tree' expressions, we shouldn't be trying
4775 to do this. */
4776
4777 /* Expressions without side effects may be constant if their
4778 arguments are as well. */
4779 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4780 || TREE_CODE_CLASS (code) == tcc_binary);
4781 read_only = 1;
4782 side_effects = TREE_SIDE_EFFECTS (t);
4783
4784 switch (code)
4785 {
4786 case TRUNC_DIV_EXPR:
4787 case CEIL_DIV_EXPR:
4788 case FLOOR_DIV_EXPR:
4789 case ROUND_DIV_EXPR:
4790 case EXACT_DIV_EXPR:
4791 case CEIL_MOD_EXPR:
4792 case FLOOR_MOD_EXPR:
4793 case ROUND_MOD_EXPR:
4794 case TRUNC_MOD_EXPR:
4795 div_by_zero = integer_zerop (arg1);
4796 break;
4797 default:
4798 div_by_zero = false;
4799 }
4800
4801 PROCESS_ARG (0);
4802 PROCESS_ARG (1);
4803
4804 TREE_SIDE_EFFECTS (t) = side_effects;
4805 if (code == MEM_REF)
4806 {
4807 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4808 {
4809 tree o = TREE_OPERAND (arg0, 0);
4810 TREE_READONLY (t) = TREE_READONLY (o);
4811 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4812 }
4813 }
4814 else
4815 {
4816 TREE_READONLY (t) = read_only;
4817 /* Don't mark X / 0 as constant. */
4818 TREE_CONSTANT (t) = constant && !div_by_zero;
4819 TREE_THIS_VOLATILE (t)
4820 = (TREE_CODE_CLASS (code) == tcc_reference
4821 && arg0 && TREE_THIS_VOLATILE (arg0));
4822 }
4823
4824 return t;
4825 }
4826
4827
4828 tree
4829 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4830 tree arg2 MEM_STAT_DECL)
4831 {
4832 bool constant, read_only, side_effects;
4833 tree t;
4834
4835 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4836 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4837
4838 t = make_node (code PASS_MEM_STAT);
4839 TREE_TYPE (t) = tt;
4840
4841 read_only = 1;
4842
4843 /* As a special exception, if COND_EXPR has NULL branches, we
4844 assume that it is a gimple statement and always consider
4845 it to have side effects. */
4846 if (code == COND_EXPR
4847 && tt == void_type_node
4848 && arg1 == NULL_TREE
4849 && arg2 == NULL_TREE)
4850 side_effects = true;
4851 else
4852 side_effects = TREE_SIDE_EFFECTS (t);
4853
4854 PROCESS_ARG (0);
4855 PROCESS_ARG (1);
4856 PROCESS_ARG (2);
4857
4858 if (code == COND_EXPR)
4859 TREE_READONLY (t) = read_only;
4860
4861 TREE_SIDE_EFFECTS (t) = side_effects;
4862 TREE_THIS_VOLATILE (t)
4863 = (TREE_CODE_CLASS (code) == tcc_reference
4864 && arg0 && TREE_THIS_VOLATILE (arg0));
4865
4866 return t;
4867 }
4868
4869 tree
4870 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4871 tree arg2, tree arg3 MEM_STAT_DECL)
4872 {
4873 bool constant, read_only, side_effects;
4874 tree t;
4875
4876 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4877
4878 t = make_node (code PASS_MEM_STAT);
4879 TREE_TYPE (t) = tt;
4880
4881 side_effects = TREE_SIDE_EFFECTS (t);
4882
4883 PROCESS_ARG (0);
4884 PROCESS_ARG (1);
4885 PROCESS_ARG (2);
4886 PROCESS_ARG (3);
4887
4888 TREE_SIDE_EFFECTS (t) = side_effects;
4889 TREE_THIS_VOLATILE (t)
4890 = (TREE_CODE_CLASS (code) == tcc_reference
4891 && arg0 && TREE_THIS_VOLATILE (arg0));
4892
4893 return t;
4894 }
4895
4896 tree
4897 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4898 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4899 {
4900 bool constant, read_only, side_effects;
4901 tree t;
4902
4903 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4904
4905 t = make_node (code PASS_MEM_STAT);
4906 TREE_TYPE (t) = tt;
4907
4908 side_effects = TREE_SIDE_EFFECTS (t);
4909
4910 PROCESS_ARG (0);
4911 PROCESS_ARG (1);
4912 PROCESS_ARG (2);
4913 PROCESS_ARG (3);
4914 PROCESS_ARG (4);
4915
4916 TREE_SIDE_EFFECTS (t) = side_effects;
4917 if (code == TARGET_MEM_REF)
4918 {
4919 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4920 {
4921 tree o = TREE_OPERAND (arg0, 0);
4922 TREE_READONLY (t) = TREE_READONLY (o);
4923 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4924 }
4925 }
4926 else
4927 TREE_THIS_VOLATILE (t)
4928 = (TREE_CODE_CLASS (code) == tcc_reference
4929 && arg0 && TREE_THIS_VOLATILE (arg0));
4930
4931 return t;
4932 }
4933
4934 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4935 on the pointer PTR. */
4936
4937 tree
4938 build_simple_mem_ref_loc (location_t loc, tree ptr)
4939 {
4940 poly_int64 offset = 0;
4941 tree ptype = TREE_TYPE (ptr);
4942 tree tem;
4943 /* For convenience allow addresses that collapse to a simple base
4944 and offset. */
4945 if (TREE_CODE (ptr) == ADDR_EXPR
4946 && (handled_component_p (TREE_OPERAND (ptr, 0))
4947 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4948 {
4949 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4950 gcc_assert (ptr);
4951 if (TREE_CODE (ptr) == MEM_REF)
4952 {
4953 offset += mem_ref_offset (ptr).force_shwi ();
4954 ptr = TREE_OPERAND (ptr, 0);
4955 }
4956 else
4957 ptr = build_fold_addr_expr (ptr);
4958 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4959 }
4960 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4961 ptr, build_int_cst (ptype, offset));
4962 SET_EXPR_LOCATION (tem, loc);
4963 return tem;
4964 }
4965
4966 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4967
4968 poly_offset_int
4969 mem_ref_offset (const_tree t)
4970 {
4971 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
4972 SIGNED);
4973 }
4974
4975 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4976 offsetted by OFFSET units. */
4977
4978 tree
4979 build_invariant_address (tree type, tree base, poly_int64 offset)
4980 {
4981 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4982 build_fold_addr_expr (base),
4983 build_int_cst (ptr_type_node, offset));
4984 tree addr = build1 (ADDR_EXPR, type, ref);
4985 recompute_tree_invariant_for_addr_expr (addr);
4986 return addr;
4987 }
4988
4989 /* Similar except don't specify the TREE_TYPE
4990 and leave the TREE_SIDE_EFFECTS as 0.
4991 It is permissible for arguments to be null,
4992 or even garbage if their values do not matter. */
4993
4994 tree
4995 build_nt (enum tree_code code, ...)
4996 {
4997 tree t;
4998 int length;
4999 int i;
5000 va_list p;
5001
5002 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5003
5004 va_start (p, code);
5005
5006 t = make_node (code);
5007 length = TREE_CODE_LENGTH (code);
5008
5009 for (i = 0; i < length; i++)
5010 TREE_OPERAND (t, i) = va_arg (p, tree);
5011
5012 va_end (p);
5013 return t;
5014 }
5015
5016 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5017 tree vec. */
5018
5019 tree
5020 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5021 {
5022 tree ret, t;
5023 unsigned int ix;
5024
5025 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5026 CALL_EXPR_FN (ret) = fn;
5027 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5028 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5029 CALL_EXPR_ARG (ret, ix) = t;
5030 return ret;
5031 }
5032 \f
5033 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5034 and data type TYPE.
5035 We do NOT enter this node in any sort of symbol table.
5036
5037 LOC is the location of the decl.
5038
5039 layout_decl is used to set up the decl's storage layout.
5040 Other slots are initialized to 0 or null pointers. */
5041
5042 tree
5043 build_decl (location_t loc, enum tree_code code, tree name,
5044 tree type MEM_STAT_DECL)
5045 {
5046 tree t;
5047
5048 t = make_node (code PASS_MEM_STAT);
5049 DECL_SOURCE_LOCATION (t) = loc;
5050
5051 /* if (type == error_mark_node)
5052 type = integer_type_node; */
5053 /* That is not done, deliberately, so that having error_mark_node
5054 as the type can suppress useless errors in the use of this variable. */
5055
5056 DECL_NAME (t) = name;
5057 TREE_TYPE (t) = type;
5058
5059 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5060 layout_decl (t, 0);
5061
5062 return t;
5063 }
5064
5065 /* Builds and returns function declaration with NAME and TYPE. */
5066
5067 tree
5068 build_fn_decl (const char *name, tree type)
5069 {
5070 tree id = get_identifier (name);
5071 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5072
5073 DECL_EXTERNAL (decl) = 1;
5074 TREE_PUBLIC (decl) = 1;
5075 DECL_ARTIFICIAL (decl) = 1;
5076 TREE_NOTHROW (decl) = 1;
5077
5078 return decl;
5079 }
5080
5081 vec<tree, va_gc> *all_translation_units;
5082
5083 /* Builds a new translation-unit decl with name NAME, queues it in the
5084 global list of translation-unit decls and returns it. */
5085
5086 tree
5087 build_translation_unit_decl (tree name)
5088 {
5089 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5090 name, NULL_TREE);
5091 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5092 vec_safe_push (all_translation_units, tu);
5093 return tu;
5094 }
5095
5096 \f
5097 /* BLOCK nodes are used to represent the structure of binding contours
5098 and declarations, once those contours have been exited and their contents
5099 compiled. This information is used for outputting debugging info. */
5100
5101 tree
5102 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5103 {
5104 tree block = make_node (BLOCK);
5105
5106 BLOCK_VARS (block) = vars;
5107 BLOCK_SUBBLOCKS (block) = subblocks;
5108 BLOCK_SUPERCONTEXT (block) = supercontext;
5109 BLOCK_CHAIN (block) = chain;
5110 return block;
5111 }
5112
5113 \f
5114 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5115
5116 LOC is the location to use in tree T. */
5117
5118 void
5119 protected_set_expr_location (tree t, location_t loc)
5120 {
5121 if (CAN_HAVE_LOCATION_P (t))
5122 SET_EXPR_LOCATION (t, loc);
5123 }
5124
5125 /* Data used when collecting DECLs and TYPEs for language data removal. */
5126
5127 class free_lang_data_d
5128 {
5129 public:
5130 free_lang_data_d () : decls (100), types (100) {}
5131
5132 /* Worklist to avoid excessive recursion. */
5133 auto_vec<tree> worklist;
5134
5135 /* Set of traversed objects. Used to avoid duplicate visits. */
5136 hash_set<tree> pset;
5137
5138 /* Array of symbols to process with free_lang_data_in_decl. */
5139 auto_vec<tree> decls;
5140
5141 /* Array of types to process with free_lang_data_in_type. */
5142 auto_vec<tree> types;
5143 };
5144
5145
5146 /* Add type or decl T to one of the list of tree nodes that need their
5147 language data removed. The lists are held inside FLD. */
5148
5149 static void
5150 add_tree_to_fld_list (tree t, class free_lang_data_d *fld)
5151 {
5152 if (DECL_P (t))
5153 fld->decls.safe_push (t);
5154 else if (TYPE_P (t))
5155 fld->types.safe_push (t);
5156 else
5157 gcc_unreachable ();
5158 }
5159
5160 /* Push tree node T into FLD->WORKLIST. */
5161
5162 static inline void
5163 fld_worklist_push (tree t, class free_lang_data_d *fld)
5164 {
5165 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5166 fld->worklist.safe_push ((t));
5167 }
5168
5169
5170 \f
5171 /* Return simplified TYPE_NAME of TYPE. */
5172
5173 static tree
5174 fld_simplified_type_name (tree type)
5175 {
5176 if (!TYPE_NAME (type) || TREE_CODE (TYPE_NAME (type)) != TYPE_DECL)
5177 return TYPE_NAME (type);
5178 /* Drop TYPE_DECLs in TYPE_NAME in favor of the identifier in the
5179 TYPE_DECL if the type doesn't have linkage.
5180 this must match fld_ */
5181 if (type != TYPE_MAIN_VARIANT (type)
5182 || (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type))
5183 && (TREE_CODE (type) != RECORD_TYPE
5184 || !TYPE_BINFO (type)
5185 || !BINFO_VTABLE (TYPE_BINFO (type)))))
5186 return DECL_NAME (TYPE_NAME (type));
5187 return TYPE_NAME (type);
5188 }
5189
5190 /* Do same comparsion as check_qualified_type skipping lang part of type
5191 and be more permissive about type names: we only care that names are
5192 same (for diagnostics) and that ODR names are the same.
5193 If INNER_TYPE is non-NULL, be sure that TREE_TYPE match it. */
5194
5195 static bool
5196 fld_type_variant_equal_p (tree t, tree v, tree inner_type)
5197 {
5198 if (TYPE_QUALS (t) != TYPE_QUALS (v)
5199 /* We want to match incomplete variants with complete types.
5200 In this case we need to ignore alignment. */
5201 || ((!RECORD_OR_UNION_TYPE_P (t) || COMPLETE_TYPE_P (v))
5202 && (TYPE_ALIGN (t) != TYPE_ALIGN (v)
5203 || TYPE_USER_ALIGN (t) != TYPE_USER_ALIGN (v)))
5204 || fld_simplified_type_name (t) != fld_simplified_type_name (v)
5205 || !attribute_list_equal (TYPE_ATTRIBUTES (t),
5206 TYPE_ATTRIBUTES (v))
5207 || (inner_type && TREE_TYPE (v) != inner_type))
5208 return false;
5209
5210 return true;
5211 }
5212
5213 /* Find variant of FIRST that match T and create new one if necessary.
5214 Set TREE_TYPE to INNER_TYPE if non-NULL. */
5215
5216 static tree
5217 fld_type_variant (tree first, tree t, class free_lang_data_d *fld,
5218 tree inner_type = NULL)
5219 {
5220 if (first == TYPE_MAIN_VARIANT (t))
5221 return t;
5222 for (tree v = first; v; v = TYPE_NEXT_VARIANT (v))
5223 if (fld_type_variant_equal_p (t, v, inner_type))
5224 return v;
5225 tree v = build_variant_type_copy (first);
5226 TYPE_READONLY (v) = TYPE_READONLY (t);
5227 TYPE_VOLATILE (v) = TYPE_VOLATILE (t);
5228 TYPE_ATOMIC (v) = TYPE_ATOMIC (t);
5229 TYPE_RESTRICT (v) = TYPE_RESTRICT (t);
5230 TYPE_ADDR_SPACE (v) = TYPE_ADDR_SPACE (t);
5231 TYPE_NAME (v) = TYPE_NAME (t);
5232 TYPE_ATTRIBUTES (v) = TYPE_ATTRIBUTES (t);
5233 TYPE_CANONICAL (v) = TYPE_CANONICAL (t);
5234 /* Variants of incomplete types should have alignment
5235 set to BITS_PER_UNIT. Do not copy the actual alignment. */
5236 if (!RECORD_OR_UNION_TYPE_P (v) || COMPLETE_TYPE_P (v))
5237 {
5238 SET_TYPE_ALIGN (v, TYPE_ALIGN (t));
5239 TYPE_USER_ALIGN (v) = TYPE_USER_ALIGN (t);
5240 }
5241 if (inner_type)
5242 TREE_TYPE (v) = inner_type;
5243 gcc_checking_assert (fld_type_variant_equal_p (t,v, inner_type));
5244 if (!fld->pset.add (v))
5245 add_tree_to_fld_list (v, fld);
5246 return v;
5247 }
5248
5249 /* Map complete types to incomplete types. */
5250
5251 static hash_map<tree, tree> *fld_incomplete_types;
5252
5253 /* Map types to simplified types. */
5254
5255 static hash_map<tree, tree> *fld_simplified_types;
5256
5257 /* Produce variant of T whose TREE_TYPE is T2. If it is main variant,
5258 use MAP to prevent duplicates. */
5259
5260 static tree
5261 fld_process_array_type (tree t, tree t2, hash_map<tree, tree> *map,
5262 class free_lang_data_d *fld)
5263 {
5264 if (TREE_TYPE (t) == t2)
5265 return t;
5266
5267 if (TYPE_MAIN_VARIANT (t) != t)
5268 {
5269 return fld_type_variant
5270 (fld_process_array_type (TYPE_MAIN_VARIANT (t),
5271 TYPE_MAIN_VARIANT (t2), map, fld),
5272 t, fld, t2);
5273 }
5274
5275 bool existed;
5276 tree &array
5277 = map->get_or_insert (t, &existed);
5278 if (!existed)
5279 {
5280 array = build_array_type_1 (t2, TYPE_DOMAIN (t),
5281 TYPE_TYPELESS_STORAGE (t), false);
5282 TYPE_CANONICAL (array) = TYPE_CANONICAL (t);
5283 if (!fld->pset.add (array))
5284 add_tree_to_fld_list (array, fld);
5285 }
5286 return array;
5287 }
5288
5289 /* Return CTX after removal of contexts that are not relevant */
5290
5291 static tree
5292 fld_decl_context (tree ctx)
5293 {
5294 /* Variably modified types are needed for tree_is_indexable to decide
5295 whether the type needs to go to local or global section.
5296 This code is semi-broken but for now it is easiest to keep contexts
5297 as expected. */
5298 if (ctx && TYPE_P (ctx)
5299 && !variably_modified_type_p (ctx, NULL_TREE))
5300 {
5301 while (ctx && TYPE_P (ctx))
5302 ctx = TYPE_CONTEXT (ctx);
5303 }
5304 return ctx;
5305 }
5306
5307 /* For T being aggregate type try to turn it into a incomplete variant.
5308 Return T if no simplification is possible. */
5309
5310 static tree
5311 fld_incomplete_type_of (tree t, class free_lang_data_d *fld)
5312 {
5313 if (!t)
5314 return NULL;
5315 if (POINTER_TYPE_P (t))
5316 {
5317 tree t2 = fld_incomplete_type_of (TREE_TYPE (t), fld);
5318 if (t2 != TREE_TYPE (t))
5319 {
5320 tree first;
5321 if (TREE_CODE (t) == POINTER_TYPE)
5322 first = build_pointer_type_for_mode (t2, TYPE_MODE (t),
5323 TYPE_REF_CAN_ALIAS_ALL (t));
5324 else
5325 first = build_reference_type_for_mode (t2, TYPE_MODE (t),
5326 TYPE_REF_CAN_ALIAS_ALL (t));
5327 gcc_assert (TYPE_CANONICAL (t2) != t2
5328 && TYPE_CANONICAL (t2) == TYPE_CANONICAL (TREE_TYPE (t)));
5329 if (!fld->pset.add (first))
5330 add_tree_to_fld_list (first, fld);
5331 return fld_type_variant (first, t, fld);
5332 }
5333 return t;
5334 }
5335 if (TREE_CODE (t) == ARRAY_TYPE)
5336 return fld_process_array_type (t,
5337 fld_incomplete_type_of (TREE_TYPE (t), fld),
5338 fld_incomplete_types, fld);
5339 if ((!RECORD_OR_UNION_TYPE_P (t) && TREE_CODE (t) != ENUMERAL_TYPE)
5340 || !COMPLETE_TYPE_P (t))
5341 return t;
5342 if (TYPE_MAIN_VARIANT (t) == t)
5343 {
5344 bool existed;
5345 tree &copy
5346 = fld_incomplete_types->get_or_insert (t, &existed);
5347
5348 if (!existed)
5349 {
5350 copy = build_distinct_type_copy (t);
5351
5352 /* It is possible that type was not seen by free_lang_data yet. */
5353 if (!fld->pset.add (copy))
5354 add_tree_to_fld_list (copy, fld);
5355 TYPE_SIZE (copy) = NULL;
5356 TYPE_USER_ALIGN (copy) = 0;
5357 TYPE_SIZE_UNIT (copy) = NULL;
5358 TYPE_CANONICAL (copy) = TYPE_CANONICAL (t);
5359 TREE_ADDRESSABLE (copy) = 0;
5360 if (AGGREGATE_TYPE_P (t))
5361 {
5362 SET_TYPE_MODE (copy, VOIDmode);
5363 SET_TYPE_ALIGN (copy, BITS_PER_UNIT);
5364 TYPE_TYPELESS_STORAGE (copy) = 0;
5365 TYPE_FIELDS (copy) = NULL;
5366 TYPE_BINFO (copy) = NULL;
5367 }
5368 else
5369 TYPE_VALUES (copy) = NULL;
5370
5371 /* Build copy of TYPE_DECL in TYPE_NAME if necessary.
5372 This is needed for ODR violation warnings to come out right (we
5373 want duplicate TYPE_DECLs whenever the type is duplicated because
5374 of ODR violation. Because lang data in the TYPE_DECL may not
5375 have been freed yet, rebuild it from scratch and copy relevant
5376 fields. */
5377 TYPE_NAME (copy) = fld_simplified_type_name (copy);
5378 tree name = TYPE_NAME (copy);
5379
5380 if (name && TREE_CODE (name) == TYPE_DECL)
5381 {
5382 gcc_checking_assert (TREE_TYPE (name) == t);
5383 tree name2 = build_decl (DECL_SOURCE_LOCATION (name), TYPE_DECL,
5384 DECL_NAME (name), copy);
5385 if (DECL_ASSEMBLER_NAME_SET_P (name))
5386 SET_DECL_ASSEMBLER_NAME (name2, DECL_ASSEMBLER_NAME (name));
5387 SET_DECL_ALIGN (name2, 0);
5388 DECL_CONTEXT (name2) = fld_decl_context
5389 (DECL_CONTEXT (name));
5390 TYPE_NAME (copy) = name2;
5391 }
5392 }
5393 return copy;
5394 }
5395 return (fld_type_variant
5396 (fld_incomplete_type_of (TYPE_MAIN_VARIANT (t), fld), t, fld));
5397 }
5398
5399 /* Simplify type T for scenarios where we do not need complete pointer
5400 types. */
5401
5402 static tree
5403 fld_simplified_type (tree t, class free_lang_data_d *fld)
5404 {
5405 if (!t)
5406 return t;
5407 if (POINTER_TYPE_P (t))
5408 return fld_incomplete_type_of (t, fld);
5409 /* FIXME: This triggers verification error, see PR88140. */
5410 if (TREE_CODE (t) == ARRAY_TYPE && 0)
5411 return fld_process_array_type (t, fld_simplified_type (TREE_TYPE (t), fld),
5412 fld_simplified_types, fld);
5413 return t;
5414 }
5415
5416 /* Reset the expression *EXPR_P, a size or position.
5417
5418 ??? We could reset all non-constant sizes or positions. But it's cheap
5419 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5420
5421 We need to reset self-referential sizes or positions because they cannot
5422 be gimplified and thus can contain a CALL_EXPR after the gimplification
5423 is finished, which will run afoul of LTO streaming. And they need to be
5424 reset to something essentially dummy but not constant, so as to preserve
5425 the properties of the object they are attached to. */
5426
5427 static inline void
5428 free_lang_data_in_one_sizepos (tree *expr_p)
5429 {
5430 tree expr = *expr_p;
5431 if (CONTAINS_PLACEHOLDER_P (expr))
5432 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5433 }
5434
5435
5436 /* Reset all the fields in a binfo node BINFO. We only keep
5437 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5438
5439 static void
5440 free_lang_data_in_binfo (tree binfo)
5441 {
5442 unsigned i;
5443 tree t;
5444
5445 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5446
5447 BINFO_VIRTUALS (binfo) = NULL_TREE;
5448 BINFO_BASE_ACCESSES (binfo) = NULL;
5449 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5450 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5451 BINFO_VPTR_FIELD (binfo) = NULL_TREE;
5452
5453 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5454 free_lang_data_in_binfo (t);
5455 }
5456
5457
5458 /* Reset all language specific information still present in TYPE. */
5459
5460 static void
5461 free_lang_data_in_type (tree type, class free_lang_data_d *fld)
5462 {
5463 gcc_assert (TYPE_P (type));
5464
5465 /* Give the FE a chance to remove its own data first. */
5466 lang_hooks.free_lang_data (type);
5467
5468 TREE_LANG_FLAG_0 (type) = 0;
5469 TREE_LANG_FLAG_1 (type) = 0;
5470 TREE_LANG_FLAG_2 (type) = 0;
5471 TREE_LANG_FLAG_3 (type) = 0;
5472 TREE_LANG_FLAG_4 (type) = 0;
5473 TREE_LANG_FLAG_5 (type) = 0;
5474 TREE_LANG_FLAG_6 (type) = 0;
5475
5476 TYPE_NEEDS_CONSTRUCTING (type) = 0;
5477
5478 /* Purge non-marked variants from the variants chain, so that they
5479 don't reappear in the IL after free_lang_data. */
5480 while (TYPE_NEXT_VARIANT (type)
5481 && !fld->pset.contains (TYPE_NEXT_VARIANT (type)))
5482 {
5483 tree t = TYPE_NEXT_VARIANT (type);
5484 TYPE_NEXT_VARIANT (type) = TYPE_NEXT_VARIANT (t);
5485 /* Turn the removed types into distinct types. */
5486 TYPE_MAIN_VARIANT (t) = t;
5487 TYPE_NEXT_VARIANT (t) = NULL_TREE;
5488 }
5489
5490 if (TREE_CODE (type) == FUNCTION_TYPE)
5491 {
5492 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5493 /* Remove the const and volatile qualifiers from arguments. The
5494 C++ front end removes them, but the C front end does not,
5495 leading to false ODR violation errors when merging two
5496 instances of the same function signature compiled by
5497 different front ends. */
5498 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5499 {
5500 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5501 tree arg_type = TREE_VALUE (p);
5502
5503 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5504 {
5505 int quals = TYPE_QUALS (arg_type)
5506 & ~TYPE_QUAL_CONST
5507 & ~TYPE_QUAL_VOLATILE;
5508 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5509 if (!fld->pset.add (TREE_VALUE (p)))
5510 free_lang_data_in_type (TREE_VALUE (p), fld);
5511 }
5512 /* C++ FE uses TREE_PURPOSE to store initial values. */
5513 TREE_PURPOSE (p) = NULL;
5514 }
5515 }
5516 else if (TREE_CODE (type) == METHOD_TYPE)
5517 {
5518 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5519 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5520 {
5521 /* C++ FE uses TREE_PURPOSE to store initial values. */
5522 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5523 TREE_PURPOSE (p) = NULL;
5524 }
5525 }
5526 else if (RECORD_OR_UNION_TYPE_P (type))
5527 {
5528 /* Remove members that are not FIELD_DECLs from the field list
5529 of an aggregate. These occur in C++. */
5530 for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
5531 if (TREE_CODE (member) == FIELD_DECL)
5532 prev = &DECL_CHAIN (member);
5533 else
5534 *prev = DECL_CHAIN (member);
5535
5536 TYPE_VFIELD (type) = NULL_TREE;
5537
5538 if (TYPE_BINFO (type))
5539 {
5540 free_lang_data_in_binfo (TYPE_BINFO (type));
5541 /* We need to preserve link to bases and virtual table for all
5542 polymorphic types to make devirtualization machinery working. */
5543 if (!BINFO_VTABLE (TYPE_BINFO (type)))
5544 TYPE_BINFO (type) = NULL;
5545 }
5546 }
5547 else if (INTEGRAL_TYPE_P (type)
5548 || SCALAR_FLOAT_TYPE_P (type)
5549 || FIXED_POINT_TYPE_P (type))
5550 {
5551 if (TREE_CODE (type) == ENUMERAL_TYPE)
5552 {
5553 /* Type values are used only for C++ ODR checking. Drop them
5554 for all type variants and non-ODR types.
5555 For ODR types the data is freed in free_odr_warning_data. */
5556 if (TYPE_MAIN_VARIANT (type) != type
5557 || !type_with_linkage_p (type))
5558 TYPE_VALUES (type) = NULL;
5559 else
5560 /* Simplify representation by recording only values rather
5561 than const decls. */
5562 for (tree e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
5563 if (TREE_CODE (TREE_VALUE (e)) == CONST_DECL)
5564 TREE_VALUE (e) = DECL_INITIAL (TREE_VALUE (e));
5565 }
5566 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5567 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5568 }
5569
5570 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5571
5572 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5573 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5574
5575 if (TYPE_CONTEXT (type)
5576 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5577 {
5578 tree ctx = TYPE_CONTEXT (type);
5579 do
5580 {
5581 ctx = BLOCK_SUPERCONTEXT (ctx);
5582 }
5583 while (ctx && TREE_CODE (ctx) == BLOCK);
5584 TYPE_CONTEXT (type) = ctx;
5585 }
5586
5587 TYPE_STUB_DECL (type) = NULL;
5588 TYPE_NAME (type) = fld_simplified_type_name (type);
5589 }
5590
5591
5592 /* Return true if DECL may need an assembler name to be set. */
5593
5594 static inline bool
5595 need_assembler_name_p (tree decl)
5596 {
5597 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5598 Rule merging. This makes type_odr_p to return true on those types during
5599 LTO and by comparing the mangled name, we can say what types are intended
5600 to be equivalent across compilation unit.
5601
5602 We do not store names of type_in_anonymous_namespace_p.
5603
5604 Record, union and enumeration type have linkage that allows use
5605 to check type_in_anonymous_namespace_p. We do not mangle compound types
5606 that always can be compared structurally.
5607
5608 Similarly for builtin types, we compare properties of their main variant.
5609 A special case are integer types where mangling do make differences
5610 between char/signed char/unsigned char etc. Storing name for these makes
5611 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5612 See cp/mangle.c:write_builtin_type for details. */
5613
5614 if (TREE_CODE (decl) == TYPE_DECL)
5615 {
5616 if (DECL_NAME (decl)
5617 && decl == TYPE_NAME (TREE_TYPE (decl))
5618 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5619 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5620 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
5621 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
5622 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
5623 && (type_with_linkage_p (TREE_TYPE (decl))
5624 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5625 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5626 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5627 return false;
5628 }
5629 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5630 if (!VAR_OR_FUNCTION_DECL_P (decl))
5631 return false;
5632
5633 /* If DECL already has its assembler name set, it does not need a
5634 new one. */
5635 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5636 || DECL_ASSEMBLER_NAME_SET_P (decl))
5637 return false;
5638
5639 /* Abstract decls do not need an assembler name. */
5640 if (DECL_ABSTRACT_P (decl))
5641 return false;
5642
5643 /* For VAR_DECLs, only static, public and external symbols need an
5644 assembler name. */
5645 if (VAR_P (decl)
5646 && !TREE_STATIC (decl)
5647 && !TREE_PUBLIC (decl)
5648 && !DECL_EXTERNAL (decl))
5649 return false;
5650
5651 if (TREE_CODE (decl) == FUNCTION_DECL)
5652 {
5653 /* Do not set assembler name on builtins. Allow RTL expansion to
5654 decide whether to expand inline or via a regular call. */
5655 if (fndecl_built_in_p (decl)
5656 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5657 return false;
5658
5659 /* Functions represented in the callgraph need an assembler name. */
5660 if (cgraph_node::get (decl) != NULL)
5661 return true;
5662
5663 /* Unused and not public functions don't need an assembler name. */
5664 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5665 return false;
5666 }
5667
5668 return true;
5669 }
5670
5671
5672 /* Reset all language specific information still present in symbol
5673 DECL. */
5674
5675 static void
5676 free_lang_data_in_decl (tree decl, class free_lang_data_d *fld)
5677 {
5678 gcc_assert (DECL_P (decl));
5679
5680 /* Give the FE a chance to remove its own data first. */
5681 lang_hooks.free_lang_data (decl);
5682
5683 TREE_LANG_FLAG_0 (decl) = 0;
5684 TREE_LANG_FLAG_1 (decl) = 0;
5685 TREE_LANG_FLAG_2 (decl) = 0;
5686 TREE_LANG_FLAG_3 (decl) = 0;
5687 TREE_LANG_FLAG_4 (decl) = 0;
5688 TREE_LANG_FLAG_5 (decl) = 0;
5689 TREE_LANG_FLAG_6 (decl) = 0;
5690
5691 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5692 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5693 if (TREE_CODE (decl) == FIELD_DECL)
5694 {
5695 DECL_FCONTEXT (decl) = NULL;
5696 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5697 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5698 DECL_QUALIFIER (decl) = NULL_TREE;
5699 }
5700
5701 if (TREE_CODE (decl) == FUNCTION_DECL)
5702 {
5703 struct cgraph_node *node;
5704 /* Frontends do not set TREE_ADDRESSABLE on public variables even though
5705 the address may be taken in other unit, so this flag has no practical
5706 use for middle-end.
5707
5708 It would make more sense if frontends set TREE_ADDRESSABLE to 0 only
5709 for public objects that indeed cannot be adressed, but it is not
5710 the case. Set the flag to true so we do not get merge failures for
5711 i.e. virtual tables between units that take address of it and
5712 units that don't. */
5713 if (TREE_PUBLIC (decl))
5714 TREE_ADDRESSABLE (decl) = true;
5715 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5716 if (!(node = cgraph_node::get (decl))
5717 || (!node->definition && !node->clones))
5718 {
5719 if (node)
5720 node->release_body ();
5721 else
5722 {
5723 release_function_body (decl);
5724 DECL_ARGUMENTS (decl) = NULL;
5725 DECL_RESULT (decl) = NULL;
5726 DECL_INITIAL (decl) = error_mark_node;
5727 }
5728 }
5729 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5730 {
5731 tree t;
5732
5733 /* If DECL has a gimple body, then the context for its
5734 arguments must be DECL. Otherwise, it doesn't really
5735 matter, as we will not be emitting any code for DECL. In
5736 general, there may be other instances of DECL created by
5737 the front end and since PARM_DECLs are generally shared,
5738 their DECL_CONTEXT changes as the replicas of DECL are
5739 created. The only time where DECL_CONTEXT is important
5740 is for the FUNCTION_DECLs that have a gimple body (since
5741 the PARM_DECL will be used in the function's body). */
5742 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5743 DECL_CONTEXT (t) = decl;
5744 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5745 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5746 = target_option_default_node;
5747 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5748 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5749 = optimization_default_node;
5750 }
5751
5752 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5753 At this point, it is not needed anymore. */
5754 DECL_SAVED_TREE (decl) = NULL_TREE;
5755
5756 /* Clear the abstract origin if it refers to a method.
5757 Otherwise dwarf2out.c will ICE as we splice functions out of
5758 TYPE_FIELDS and thus the origin will not be output
5759 correctly. */
5760 if (DECL_ABSTRACT_ORIGIN (decl)
5761 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5762 && RECORD_OR_UNION_TYPE_P
5763 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5764 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5765
5766 DECL_VINDEX (decl) = NULL_TREE;
5767 }
5768 else if (VAR_P (decl))
5769 {
5770 /* See comment above why we set the flag for functoins. */
5771 if (TREE_PUBLIC (decl))
5772 TREE_ADDRESSABLE (decl) = true;
5773 if ((DECL_EXTERNAL (decl)
5774 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5775 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5776 DECL_INITIAL (decl) = NULL_TREE;
5777 }
5778 else if (TREE_CODE (decl) == TYPE_DECL)
5779 {
5780 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5781 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5782 TREE_PUBLIC (decl) = 0;
5783 TREE_PRIVATE (decl) = 0;
5784 DECL_ARTIFICIAL (decl) = 0;
5785 TYPE_DECL_SUPPRESS_DEBUG (decl) = 0;
5786 DECL_INITIAL (decl) = NULL_TREE;
5787 DECL_ORIGINAL_TYPE (decl) = NULL_TREE;
5788 DECL_MODE (decl) = VOIDmode;
5789 SET_DECL_ALIGN (decl, 0);
5790 /* TREE_TYPE is cleared at WPA time in free_odr_warning_data. */
5791 }
5792 else if (TREE_CODE (decl) == FIELD_DECL)
5793 {
5794 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5795 DECL_INITIAL (decl) = NULL_TREE;
5796 }
5797 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5798 && DECL_INITIAL (decl)
5799 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5800 {
5801 /* Strip builtins from the translation-unit BLOCK. We still have targets
5802 without builtin_decl_explicit support and also builtins are shared
5803 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5804 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5805 while (*nextp)
5806 {
5807 tree var = *nextp;
5808 if (TREE_CODE (var) == FUNCTION_DECL
5809 && fndecl_built_in_p (var))
5810 *nextp = TREE_CHAIN (var);
5811 else
5812 nextp = &TREE_CHAIN (var);
5813 }
5814 }
5815 /* We need to keep field decls associated with their trees. Otherwise tree
5816 merging may merge some fileds and keep others disjoint wich in turn will
5817 not do well with TREE_CHAIN pointers linking them.
5818
5819 Also do not drop containing types for virtual methods and tables because
5820 these are needed by devirtualization.
5821 C++ destructors are special because C++ frontends sometimes produces
5822 virtual destructor as an alias of non-virtual destructor. In
5823 devirutalization code we always walk through aliases and we need
5824 context to be preserved too. See PR89335 */
5825 if (TREE_CODE (decl) != FIELD_DECL
5826 && ((TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL)
5827 || (!DECL_VIRTUAL_P (decl)
5828 && (TREE_CODE (decl) != FUNCTION_DECL
5829 || !DECL_CXX_DESTRUCTOR_P (decl)))))
5830 DECL_CONTEXT (decl) = fld_decl_context (DECL_CONTEXT (decl));
5831 }
5832
5833
5834 /* Operand callback helper for free_lang_data_in_node. *TP is the
5835 subtree operand being considered. */
5836
5837 static tree
5838 find_decls_types_r (tree *tp, int *ws, void *data)
5839 {
5840 tree t = *tp;
5841 class free_lang_data_d *fld = (class free_lang_data_d *) data;
5842
5843 if (TREE_CODE (t) == TREE_LIST)
5844 return NULL_TREE;
5845
5846 /* Language specific nodes will be removed, so there is no need
5847 to gather anything under them. */
5848 if (is_lang_specific (t))
5849 {
5850 *ws = 0;
5851 return NULL_TREE;
5852 }
5853
5854 if (DECL_P (t))
5855 {
5856 /* Note that walk_tree does not traverse every possible field in
5857 decls, so we have to do our own traversals here. */
5858 add_tree_to_fld_list (t, fld);
5859
5860 fld_worklist_push (DECL_NAME (t), fld);
5861 fld_worklist_push (DECL_CONTEXT (t), fld);
5862 fld_worklist_push (DECL_SIZE (t), fld);
5863 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5864
5865 /* We are going to remove everything under DECL_INITIAL for
5866 TYPE_DECLs. No point walking them. */
5867 if (TREE_CODE (t) != TYPE_DECL)
5868 fld_worklist_push (DECL_INITIAL (t), fld);
5869
5870 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5871 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5872
5873 if (TREE_CODE (t) == FUNCTION_DECL)
5874 {
5875 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5876 fld_worklist_push (DECL_RESULT (t), fld);
5877 }
5878 else if (TREE_CODE (t) == FIELD_DECL)
5879 {
5880 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5881 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5882 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5883 fld_worklist_push (DECL_FCONTEXT (t), fld);
5884 }
5885
5886 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5887 && DECL_HAS_VALUE_EXPR_P (t))
5888 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5889
5890 if (TREE_CODE (t) != FIELD_DECL
5891 && TREE_CODE (t) != TYPE_DECL)
5892 fld_worklist_push (TREE_CHAIN (t), fld);
5893 *ws = 0;
5894 }
5895 else if (TYPE_P (t))
5896 {
5897 /* Note that walk_tree does not traverse every possible field in
5898 types, so we have to do our own traversals here. */
5899 add_tree_to_fld_list (t, fld);
5900
5901 if (!RECORD_OR_UNION_TYPE_P (t))
5902 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5903 fld_worklist_push (TYPE_SIZE (t), fld);
5904 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5905 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5906 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5907 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5908 fld_worklist_push (TYPE_NAME (t), fld);
5909 /* While we do not stream TYPE_POINTER_TO and TYPE_REFERENCE_TO
5910 lists, we may look types up in these lists and use them while
5911 optimizing the function body. Thus we need to free lang data
5912 in them. */
5913 if (TREE_CODE (t) == POINTER_TYPE)
5914 fld_worklist_push (TYPE_NEXT_PTR_TO (t), fld);
5915 if (TREE_CODE (t) == REFERENCE_TYPE)
5916 fld_worklist_push (TYPE_NEXT_REF_TO (t), fld);
5917 if (!POINTER_TYPE_P (t))
5918 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5919 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5920 if (!RECORD_OR_UNION_TYPE_P (t))
5921 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5922 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5923 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5924 do not and want not to reach unused variants this way. */
5925 if (TYPE_CONTEXT (t))
5926 {
5927 tree ctx = TYPE_CONTEXT (t);
5928 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5929 So push that instead. */
5930 while (ctx && TREE_CODE (ctx) == BLOCK)
5931 ctx = BLOCK_SUPERCONTEXT (ctx);
5932 fld_worklist_push (ctx, fld);
5933 }
5934 fld_worklist_push (TYPE_CANONICAL (t), fld);
5935
5936 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5937 {
5938 unsigned i;
5939 tree tem;
5940 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5941 fld_worklist_push (TREE_TYPE (tem), fld);
5942 fld_worklist_push (BINFO_TYPE (TYPE_BINFO (t)), fld);
5943 fld_worklist_push (BINFO_VTABLE (TYPE_BINFO (t)), fld);
5944 }
5945 if (RECORD_OR_UNION_TYPE_P (t))
5946 {
5947 tree tem;
5948 /* Push all TYPE_FIELDS - there can be interleaving interesting
5949 and non-interesting things. */
5950 tem = TYPE_FIELDS (t);
5951 while (tem)
5952 {
5953 if (TREE_CODE (tem) == FIELD_DECL)
5954 fld_worklist_push (tem, fld);
5955 tem = TREE_CHAIN (tem);
5956 }
5957 }
5958 if (FUNC_OR_METHOD_TYPE_P (t))
5959 fld_worklist_push (TYPE_METHOD_BASETYPE (t), fld);
5960
5961 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5962 *ws = 0;
5963 }
5964 else if (TREE_CODE (t) == BLOCK)
5965 {
5966 for (tree *tem = &BLOCK_VARS (t); *tem; )
5967 {
5968 if (TREE_CODE (*tem) != LABEL_DECL
5969 && (TREE_CODE (*tem) != VAR_DECL
5970 || !auto_var_in_fn_p (*tem, DECL_CONTEXT (*tem))))
5971 {
5972 gcc_assert (TREE_CODE (*tem) != RESULT_DECL
5973 && TREE_CODE (*tem) != PARM_DECL);
5974 *tem = TREE_CHAIN (*tem);
5975 }
5976 else
5977 {
5978 fld_worklist_push (*tem, fld);
5979 tem = &TREE_CHAIN (*tem);
5980 }
5981 }
5982 for (tree tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5983 fld_worklist_push (tem, fld);
5984 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5985 }
5986
5987 if (TREE_CODE (t) != IDENTIFIER_NODE
5988 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5989 fld_worklist_push (TREE_TYPE (t), fld);
5990
5991 return NULL_TREE;
5992 }
5993
5994
5995 /* Find decls and types in T. */
5996
5997 static void
5998 find_decls_types (tree t, class free_lang_data_d *fld)
5999 {
6000 while (1)
6001 {
6002 if (!fld->pset.contains (t))
6003 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
6004 if (fld->worklist.is_empty ())
6005 break;
6006 t = fld->worklist.pop ();
6007 }
6008 }
6009
6010 /* Translate all the types in LIST with the corresponding runtime
6011 types. */
6012
6013 static tree
6014 get_eh_types_for_runtime (tree list)
6015 {
6016 tree head, prev;
6017
6018 if (list == NULL_TREE)
6019 return NULL_TREE;
6020
6021 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6022 prev = head;
6023 list = TREE_CHAIN (list);
6024 while (list)
6025 {
6026 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6027 TREE_CHAIN (prev) = n;
6028 prev = TREE_CHAIN (prev);
6029 list = TREE_CHAIN (list);
6030 }
6031
6032 return head;
6033 }
6034
6035
6036 /* Find decls and types referenced in EH region R and store them in
6037 FLD->DECLS and FLD->TYPES. */
6038
6039 static void
6040 find_decls_types_in_eh_region (eh_region r, class free_lang_data_d *fld)
6041 {
6042 switch (r->type)
6043 {
6044 case ERT_CLEANUP:
6045 break;
6046
6047 case ERT_TRY:
6048 {
6049 eh_catch c;
6050
6051 /* The types referenced in each catch must first be changed to the
6052 EH types used at runtime. This removes references to FE types
6053 in the region. */
6054 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
6055 {
6056 c->type_list = get_eh_types_for_runtime (c->type_list);
6057 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
6058 }
6059 }
6060 break;
6061
6062 case ERT_ALLOWED_EXCEPTIONS:
6063 r->u.allowed.type_list
6064 = get_eh_types_for_runtime (r->u.allowed.type_list);
6065 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
6066 break;
6067
6068 case ERT_MUST_NOT_THROW:
6069 walk_tree (&r->u.must_not_throw.failure_decl,
6070 find_decls_types_r, fld, &fld->pset);
6071 break;
6072 }
6073 }
6074
6075
6076 /* Find decls and types referenced in cgraph node N and store them in
6077 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6078 look for *every* kind of DECL and TYPE node reachable from N,
6079 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6080 NAMESPACE_DECLs, etc). */
6081
6082 static void
6083 find_decls_types_in_node (struct cgraph_node *n, class free_lang_data_d *fld)
6084 {
6085 basic_block bb;
6086 struct function *fn;
6087 unsigned ix;
6088 tree t;
6089
6090 find_decls_types (n->decl, fld);
6091
6092 if (!gimple_has_body_p (n->decl))
6093 return;
6094
6095 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
6096
6097 fn = DECL_STRUCT_FUNCTION (n->decl);
6098
6099 /* Traverse locals. */
6100 FOR_EACH_LOCAL_DECL (fn, ix, t)
6101 find_decls_types (t, fld);
6102
6103 /* Traverse EH regions in FN. */
6104 {
6105 eh_region r;
6106 FOR_ALL_EH_REGION_FN (r, fn)
6107 find_decls_types_in_eh_region (r, fld);
6108 }
6109
6110 /* Traverse every statement in FN. */
6111 FOR_EACH_BB_FN (bb, fn)
6112 {
6113 gphi_iterator psi;
6114 gimple_stmt_iterator si;
6115 unsigned i;
6116
6117 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
6118 {
6119 gphi *phi = psi.phi ();
6120
6121 for (i = 0; i < gimple_phi_num_args (phi); i++)
6122 {
6123 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
6124 find_decls_types (*arg_p, fld);
6125 }
6126 }
6127
6128 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6129 {
6130 gimple *stmt = gsi_stmt (si);
6131
6132 if (is_gimple_call (stmt))
6133 find_decls_types (gimple_call_fntype (stmt), fld);
6134
6135 for (i = 0; i < gimple_num_ops (stmt); i++)
6136 {
6137 tree arg = gimple_op (stmt, i);
6138 find_decls_types (arg, fld);
6139 /* find_decls_types doesn't walk TREE_PURPOSE of TREE_LISTs,
6140 which we need for asm stmts. */
6141 if (arg
6142 && TREE_CODE (arg) == TREE_LIST
6143 && TREE_PURPOSE (arg)
6144 && gimple_code (stmt) == GIMPLE_ASM)
6145 find_decls_types (TREE_PURPOSE (arg), fld);
6146 }
6147 }
6148 }
6149 }
6150
6151
6152 /* Find decls and types referenced in varpool node N and store them in
6153 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6154 look for *every* kind of DECL and TYPE node reachable from N,
6155 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6156 NAMESPACE_DECLs, etc). */
6157
6158 static void
6159 find_decls_types_in_var (varpool_node *v, class free_lang_data_d *fld)
6160 {
6161 find_decls_types (v->decl, fld);
6162 }
6163
6164 /* If T needs an assembler name, have one created for it. */
6165
6166 void
6167 assign_assembler_name_if_needed (tree t)
6168 {
6169 if (need_assembler_name_p (t))
6170 {
6171 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
6172 diagnostics that use input_location to show locus
6173 information. The problem here is that, at this point,
6174 input_location is generally anchored to the end of the file
6175 (since the parser is long gone), so we don't have a good
6176 position to pin it to.
6177
6178 To alleviate this problem, this uses the location of T's
6179 declaration. Examples of this are
6180 testsuite/g++.dg/template/cond2.C and
6181 testsuite/g++.dg/template/pr35240.C. */
6182 location_t saved_location = input_location;
6183 input_location = DECL_SOURCE_LOCATION (t);
6184
6185 decl_assembler_name (t);
6186
6187 input_location = saved_location;
6188 }
6189 }
6190
6191
6192 /* Free language specific information for every operand and expression
6193 in every node of the call graph. This process operates in three stages:
6194
6195 1- Every callgraph node and varpool node is traversed looking for
6196 decls and types embedded in them. This is a more exhaustive
6197 search than that done by find_referenced_vars, because it will
6198 also collect individual fields, decls embedded in types, etc.
6199
6200 2- All the decls found are sent to free_lang_data_in_decl.
6201
6202 3- All the types found are sent to free_lang_data_in_type.
6203
6204 The ordering between decls and types is important because
6205 free_lang_data_in_decl sets assembler names, which includes
6206 mangling. So types cannot be freed up until assembler names have
6207 been set up. */
6208
6209 static void
6210 free_lang_data_in_cgraph (class free_lang_data_d *fld)
6211 {
6212 struct cgraph_node *n;
6213 varpool_node *v;
6214 tree t;
6215 unsigned i;
6216 alias_pair *p;
6217
6218 /* Find decls and types in the body of every function in the callgraph. */
6219 FOR_EACH_FUNCTION (n)
6220 find_decls_types_in_node (n, fld);
6221
6222 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
6223 find_decls_types (p->decl, fld);
6224
6225 /* Find decls and types in every varpool symbol. */
6226 FOR_EACH_VARIABLE (v)
6227 find_decls_types_in_var (v, fld);
6228
6229 /* Set the assembler name on every decl found. We need to do this
6230 now because free_lang_data_in_decl will invalidate data needed
6231 for mangling. This breaks mangling on interdependent decls. */
6232 FOR_EACH_VEC_ELT (fld->decls, i, t)
6233 assign_assembler_name_if_needed (t);
6234
6235 /* Traverse every decl found freeing its language data. */
6236 FOR_EACH_VEC_ELT (fld->decls, i, t)
6237 free_lang_data_in_decl (t, fld);
6238
6239 /* Traverse every type found freeing its language data. */
6240 FOR_EACH_VEC_ELT (fld->types, i, t)
6241 free_lang_data_in_type (t, fld);
6242 }
6243
6244
6245 /* Free resources that are used by FE but are not needed once they are done. */
6246
6247 static unsigned
6248 free_lang_data (void)
6249 {
6250 unsigned i;
6251 class free_lang_data_d fld;
6252
6253 /* If we are the LTO frontend we have freed lang-specific data already. */
6254 if (in_lto_p
6255 || (!flag_generate_lto && !flag_generate_offload))
6256 {
6257 /* Rebuild type inheritance graph even when not doing LTO to get
6258 consistent profile data. */
6259 rebuild_type_inheritance_graph ();
6260 return 0;
6261 }
6262
6263 fld_incomplete_types = new hash_map<tree, tree>;
6264 fld_simplified_types = new hash_map<tree, tree>;
6265
6266 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
6267 if (vec_safe_is_empty (all_translation_units))
6268 build_translation_unit_decl (NULL_TREE);
6269
6270 /* Allocate and assign alias sets to the standard integer types
6271 while the slots are still in the way the frontends generated them. */
6272 for (i = 0; i < itk_none; ++i)
6273 if (integer_types[i])
6274 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6275
6276 /* Traverse the IL resetting language specific information for
6277 operands, expressions, etc. */
6278 free_lang_data_in_cgraph (&fld);
6279
6280 /* Create gimple variants for common types. */
6281 for (unsigned i = 0;
6282 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
6283 ++i)
6284 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
6285
6286 /* Reset some langhooks. Do not reset types_compatible_p, it may
6287 still be used indirectly via the get_alias_set langhook. */
6288 lang_hooks.dwarf_name = lhd_dwarf_name;
6289 lang_hooks.decl_printable_name = gimple_decl_printable_name;
6290 lang_hooks.gimplify_expr = lhd_gimplify_expr;
6291 lang_hooks.overwrite_decl_assembler_name = lhd_overwrite_decl_assembler_name;
6292 lang_hooks.print_xnode = lhd_print_tree_nothing;
6293 lang_hooks.print_decl = lhd_print_tree_nothing;
6294 lang_hooks.print_type = lhd_print_tree_nothing;
6295 lang_hooks.print_identifier = lhd_print_tree_nothing;
6296
6297 lang_hooks.tree_inlining.var_mod_type_p = hook_bool_tree_tree_false;
6298
6299 if (flag_checking)
6300 {
6301 int i;
6302 tree t;
6303
6304 FOR_EACH_VEC_ELT (fld.types, i, t)
6305 verify_type (t);
6306 }
6307
6308 /* We do not want the default decl_assembler_name implementation,
6309 rather if we have fixed everything we want a wrapper around it
6310 asserting that all non-local symbols already got their assembler
6311 name and only produce assembler names for local symbols. Or rather
6312 make sure we never call decl_assembler_name on local symbols and
6313 devise a separate, middle-end private scheme for it. */
6314
6315 /* Reset diagnostic machinery. */
6316 tree_diagnostics_defaults (global_dc);
6317
6318 rebuild_type_inheritance_graph ();
6319
6320 delete fld_incomplete_types;
6321 delete fld_simplified_types;
6322
6323 return 0;
6324 }
6325
6326
6327 namespace {
6328
6329 const pass_data pass_data_ipa_free_lang_data =
6330 {
6331 SIMPLE_IPA_PASS, /* type */
6332 "*free_lang_data", /* name */
6333 OPTGROUP_NONE, /* optinfo_flags */
6334 TV_IPA_FREE_LANG_DATA, /* tv_id */
6335 0, /* properties_required */
6336 0, /* properties_provided */
6337 0, /* properties_destroyed */
6338 0, /* todo_flags_start */
6339 0, /* todo_flags_finish */
6340 };
6341
6342 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6343 {
6344 public:
6345 pass_ipa_free_lang_data (gcc::context *ctxt)
6346 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6347 {}
6348
6349 /* opt_pass methods: */
6350 virtual unsigned int execute (function *) { return free_lang_data (); }
6351
6352 }; // class pass_ipa_free_lang_data
6353
6354 } // anon namespace
6355
6356 simple_ipa_opt_pass *
6357 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6358 {
6359 return new pass_ipa_free_lang_data (ctxt);
6360 }
6361 \f
6362 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6363 of the various TYPE_QUAL values. */
6364
6365 static void
6366 set_type_quals (tree type, int type_quals)
6367 {
6368 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6369 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6370 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6371 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6372 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6373 }
6374
6375 /* Returns true iff CAND and BASE have equivalent language-specific
6376 qualifiers. */
6377
6378 bool
6379 check_lang_type (const_tree cand, const_tree base)
6380 {
6381 if (lang_hooks.types.type_hash_eq == NULL)
6382 return true;
6383 /* type_hash_eq currently only applies to these types. */
6384 if (TREE_CODE (cand) != FUNCTION_TYPE
6385 && TREE_CODE (cand) != METHOD_TYPE)
6386 return true;
6387 return lang_hooks.types.type_hash_eq (cand, base);
6388 }
6389
6390 /* This function checks to see if TYPE matches the size one of the built-in
6391 atomic types, and returns that core atomic type. */
6392
6393 static tree
6394 find_atomic_core_type (const_tree type)
6395 {
6396 tree base_atomic_type;
6397
6398 /* Only handle complete types. */
6399 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
6400 return NULL_TREE;
6401
6402 switch (tree_to_uhwi (TYPE_SIZE (type)))
6403 {
6404 case 8:
6405 base_atomic_type = atomicQI_type_node;
6406 break;
6407
6408 case 16:
6409 base_atomic_type = atomicHI_type_node;
6410 break;
6411
6412 case 32:
6413 base_atomic_type = atomicSI_type_node;
6414 break;
6415
6416 case 64:
6417 base_atomic_type = atomicDI_type_node;
6418 break;
6419
6420 case 128:
6421 base_atomic_type = atomicTI_type_node;
6422 break;
6423
6424 default:
6425 base_atomic_type = NULL_TREE;
6426 }
6427
6428 return base_atomic_type;
6429 }
6430
6431 /* Returns true iff unqualified CAND and BASE are equivalent. */
6432
6433 bool
6434 check_base_type (const_tree cand, const_tree base)
6435 {
6436 if (TYPE_NAME (cand) != TYPE_NAME (base)
6437 /* Apparently this is needed for Objective-C. */
6438 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
6439 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
6440 TYPE_ATTRIBUTES (base)))
6441 return false;
6442 /* Check alignment. */
6443 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base))
6444 return true;
6445 /* Atomic types increase minimal alignment. We must to do so as well
6446 or we get duplicated canonical types. See PR88686. */
6447 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
6448 {
6449 /* See if this object can map to a basic atomic type. */
6450 tree atomic_type = find_atomic_core_type (cand);
6451 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
6452 return true;
6453 }
6454 return false;
6455 }
6456
6457 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6458
6459 bool
6460 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6461 {
6462 return (TYPE_QUALS (cand) == type_quals
6463 && check_base_type (cand, base)
6464 && check_lang_type (cand, base));
6465 }
6466
6467 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6468
6469 static bool
6470 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6471 {
6472 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6473 && TYPE_NAME (cand) == TYPE_NAME (base)
6474 /* Apparently this is needed for Objective-C. */
6475 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6476 /* Check alignment. */
6477 && TYPE_ALIGN (cand) == align
6478 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6479 TYPE_ATTRIBUTES (base))
6480 && check_lang_type (cand, base));
6481 }
6482
6483 /* Return a version of the TYPE, qualified as indicated by the
6484 TYPE_QUALS, if one exists. If no qualified version exists yet,
6485 return NULL_TREE. */
6486
6487 tree
6488 get_qualified_type (tree type, int type_quals)
6489 {
6490 if (TYPE_QUALS (type) == type_quals)
6491 return type;
6492
6493 tree mv = TYPE_MAIN_VARIANT (type);
6494 if (check_qualified_type (mv, type, type_quals))
6495 return mv;
6496
6497 /* Search the chain of variants to see if there is already one there just
6498 like the one we need to have. If so, use that existing one. We must
6499 preserve the TYPE_NAME, since there is code that depends on this. */
6500 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
6501 if (check_qualified_type (*tp, type, type_quals))
6502 {
6503 /* Put the found variant at the head of the variant list so
6504 frequently searched variants get found faster. The C++ FE
6505 benefits greatly from this. */
6506 tree t = *tp;
6507 *tp = TYPE_NEXT_VARIANT (t);
6508 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
6509 TYPE_NEXT_VARIANT (mv) = t;
6510 return t;
6511 }
6512
6513 return NULL_TREE;
6514 }
6515
6516 /* Like get_qualified_type, but creates the type if it does not
6517 exist. This function never returns NULL_TREE. */
6518
6519 tree
6520 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
6521 {
6522 tree t;
6523
6524 /* See if we already have the appropriate qualified variant. */
6525 t = get_qualified_type (type, type_quals);
6526
6527 /* If not, build it. */
6528 if (!t)
6529 {
6530 t = build_variant_type_copy (type PASS_MEM_STAT);
6531 set_type_quals (t, type_quals);
6532
6533 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6534 {
6535 /* See if this object can map to a basic atomic type. */
6536 tree atomic_type = find_atomic_core_type (type);
6537 if (atomic_type)
6538 {
6539 /* Ensure the alignment of this type is compatible with
6540 the required alignment of the atomic type. */
6541 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6542 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6543 }
6544 }
6545
6546 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6547 /* Propagate structural equality. */
6548 SET_TYPE_STRUCTURAL_EQUALITY (t);
6549 else if (TYPE_CANONICAL (type) != type)
6550 /* Build the underlying canonical type, since it is different
6551 from TYPE. */
6552 {
6553 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6554 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6555 }
6556 else
6557 /* T is its own canonical type. */
6558 TYPE_CANONICAL (t) = t;
6559
6560 }
6561
6562 return t;
6563 }
6564
6565 /* Create a variant of type T with alignment ALIGN. */
6566
6567 tree
6568 build_aligned_type (tree type, unsigned int align)
6569 {
6570 tree t;
6571
6572 if (TYPE_PACKED (type)
6573 || TYPE_ALIGN (type) == align)
6574 return type;
6575
6576 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6577 if (check_aligned_type (t, type, align))
6578 return t;
6579
6580 t = build_variant_type_copy (type);
6581 SET_TYPE_ALIGN (t, align);
6582 TYPE_USER_ALIGN (t) = 1;
6583
6584 return t;
6585 }
6586
6587 /* Create a new distinct copy of TYPE. The new type is made its own
6588 MAIN_VARIANT. If TYPE requires structural equality checks, the
6589 resulting type requires structural equality checks; otherwise, its
6590 TYPE_CANONICAL points to itself. */
6591
6592 tree
6593 build_distinct_type_copy (tree type MEM_STAT_DECL)
6594 {
6595 tree t = copy_node (type PASS_MEM_STAT);
6596
6597 TYPE_POINTER_TO (t) = 0;
6598 TYPE_REFERENCE_TO (t) = 0;
6599
6600 /* Set the canonical type either to a new equivalence class, or
6601 propagate the need for structural equality checks. */
6602 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6603 SET_TYPE_STRUCTURAL_EQUALITY (t);
6604 else
6605 TYPE_CANONICAL (t) = t;
6606
6607 /* Make it its own variant. */
6608 TYPE_MAIN_VARIANT (t) = t;
6609 TYPE_NEXT_VARIANT (t) = 0;
6610
6611 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6612 whose TREE_TYPE is not t. This can also happen in the Ada
6613 frontend when using subtypes. */
6614
6615 return t;
6616 }
6617
6618 /* Create a new variant of TYPE, equivalent but distinct. This is so
6619 the caller can modify it. TYPE_CANONICAL for the return type will
6620 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6621 are considered equal by the language itself (or that both types
6622 require structural equality checks). */
6623
6624 tree
6625 build_variant_type_copy (tree type MEM_STAT_DECL)
6626 {
6627 tree t, m = TYPE_MAIN_VARIANT (type);
6628
6629 t = build_distinct_type_copy (type PASS_MEM_STAT);
6630
6631 /* Since we're building a variant, assume that it is a non-semantic
6632 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6633 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6634 /* Type variants have no alias set defined. */
6635 TYPE_ALIAS_SET (t) = -1;
6636
6637 /* Add the new type to the chain of variants of TYPE. */
6638 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6639 TYPE_NEXT_VARIANT (m) = t;
6640 TYPE_MAIN_VARIANT (t) = m;
6641
6642 return t;
6643 }
6644 \f
6645 /* Return true if the from tree in both tree maps are equal. */
6646
6647 int
6648 tree_map_base_eq (const void *va, const void *vb)
6649 {
6650 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6651 *const b = (const struct tree_map_base *) vb;
6652 return (a->from == b->from);
6653 }
6654
6655 /* Hash a from tree in a tree_base_map. */
6656
6657 unsigned int
6658 tree_map_base_hash (const void *item)
6659 {
6660 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6661 }
6662
6663 /* Return true if this tree map structure is marked for garbage collection
6664 purposes. We simply return true if the from tree is marked, so that this
6665 structure goes away when the from tree goes away. */
6666
6667 int
6668 tree_map_base_marked_p (const void *p)
6669 {
6670 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6671 }
6672
6673 /* Hash a from tree in a tree_map. */
6674
6675 unsigned int
6676 tree_map_hash (const void *item)
6677 {
6678 return (((const struct tree_map *) item)->hash);
6679 }
6680
6681 /* Hash a from tree in a tree_decl_map. */
6682
6683 unsigned int
6684 tree_decl_map_hash (const void *item)
6685 {
6686 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6687 }
6688
6689 /* Return the initialization priority for DECL. */
6690
6691 priority_type
6692 decl_init_priority_lookup (tree decl)
6693 {
6694 symtab_node *snode = symtab_node::get (decl);
6695
6696 if (!snode)
6697 return DEFAULT_INIT_PRIORITY;
6698 return
6699 snode->get_init_priority ();
6700 }
6701
6702 /* Return the finalization priority for DECL. */
6703
6704 priority_type
6705 decl_fini_priority_lookup (tree decl)
6706 {
6707 cgraph_node *node = cgraph_node::get (decl);
6708
6709 if (!node)
6710 return DEFAULT_INIT_PRIORITY;
6711 return
6712 node->get_fini_priority ();
6713 }
6714
6715 /* Set the initialization priority for DECL to PRIORITY. */
6716
6717 void
6718 decl_init_priority_insert (tree decl, priority_type priority)
6719 {
6720 struct symtab_node *snode;
6721
6722 if (priority == DEFAULT_INIT_PRIORITY)
6723 {
6724 snode = symtab_node::get (decl);
6725 if (!snode)
6726 return;
6727 }
6728 else if (VAR_P (decl))
6729 snode = varpool_node::get_create (decl);
6730 else
6731 snode = cgraph_node::get_create (decl);
6732 snode->set_init_priority (priority);
6733 }
6734
6735 /* Set the finalization priority for DECL to PRIORITY. */
6736
6737 void
6738 decl_fini_priority_insert (tree decl, priority_type priority)
6739 {
6740 struct cgraph_node *node;
6741
6742 if (priority == DEFAULT_INIT_PRIORITY)
6743 {
6744 node = cgraph_node::get (decl);
6745 if (!node)
6746 return;
6747 }
6748 else
6749 node = cgraph_node::get_create (decl);
6750 node->set_fini_priority (priority);
6751 }
6752
6753 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6754
6755 static void
6756 print_debug_expr_statistics (void)
6757 {
6758 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6759 (long) debug_expr_for_decl->size (),
6760 (long) debug_expr_for_decl->elements (),
6761 debug_expr_for_decl->collisions ());
6762 }
6763
6764 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6765
6766 static void
6767 print_value_expr_statistics (void)
6768 {
6769 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6770 (long) value_expr_for_decl->size (),
6771 (long) value_expr_for_decl->elements (),
6772 value_expr_for_decl->collisions ());
6773 }
6774
6775 /* Lookup a debug expression for FROM, and return it if we find one. */
6776
6777 tree
6778 decl_debug_expr_lookup (tree from)
6779 {
6780 struct tree_decl_map *h, in;
6781 in.base.from = from;
6782
6783 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6784 if (h)
6785 return h->to;
6786 return NULL_TREE;
6787 }
6788
6789 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6790
6791 void
6792 decl_debug_expr_insert (tree from, tree to)
6793 {
6794 struct tree_decl_map *h;
6795
6796 h = ggc_alloc<tree_decl_map> ();
6797 h->base.from = from;
6798 h->to = to;
6799 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6800 }
6801
6802 /* Lookup a value expression for FROM, and return it if we find one. */
6803
6804 tree
6805 decl_value_expr_lookup (tree from)
6806 {
6807 struct tree_decl_map *h, in;
6808 in.base.from = from;
6809
6810 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6811 if (h)
6812 return h->to;
6813 return NULL_TREE;
6814 }
6815
6816 /* Insert a mapping FROM->TO in the value expression hashtable. */
6817
6818 void
6819 decl_value_expr_insert (tree from, tree to)
6820 {
6821 struct tree_decl_map *h;
6822
6823 h = ggc_alloc<tree_decl_map> ();
6824 h->base.from = from;
6825 h->to = to;
6826 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6827 }
6828
6829 /* Lookup a vector of debug arguments for FROM, and return it if we
6830 find one. */
6831
6832 vec<tree, va_gc> **
6833 decl_debug_args_lookup (tree from)
6834 {
6835 struct tree_vec_map *h, in;
6836
6837 if (!DECL_HAS_DEBUG_ARGS_P (from))
6838 return NULL;
6839 gcc_checking_assert (debug_args_for_decl != NULL);
6840 in.base.from = from;
6841 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6842 if (h)
6843 return &h->to;
6844 return NULL;
6845 }
6846
6847 /* Insert a mapping FROM->empty vector of debug arguments in the value
6848 expression hashtable. */
6849
6850 vec<tree, va_gc> **
6851 decl_debug_args_insert (tree from)
6852 {
6853 struct tree_vec_map *h;
6854 tree_vec_map **loc;
6855
6856 if (DECL_HAS_DEBUG_ARGS_P (from))
6857 return decl_debug_args_lookup (from);
6858 if (debug_args_for_decl == NULL)
6859 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6860 h = ggc_alloc<tree_vec_map> ();
6861 h->base.from = from;
6862 h->to = NULL;
6863 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6864 *loc = h;
6865 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6866 return &h->to;
6867 }
6868
6869 /* Hashing of types so that we don't make duplicates.
6870 The entry point is `type_hash_canon'. */
6871
6872 /* Generate the default hash code for TYPE. This is designed for
6873 speed, rather than maximum entropy. */
6874
6875 hashval_t
6876 type_hash_canon_hash (tree type)
6877 {
6878 inchash::hash hstate;
6879
6880 hstate.add_int (TREE_CODE (type));
6881
6882 if (TREE_TYPE (type))
6883 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6884
6885 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6886 /* Just the identifier is adequate to distinguish. */
6887 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6888
6889 switch (TREE_CODE (type))
6890 {
6891 case METHOD_TYPE:
6892 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6893 /* FALLTHROUGH. */
6894 case FUNCTION_TYPE:
6895 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6896 if (TREE_VALUE (t) != error_mark_node)
6897 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6898 break;
6899
6900 case OFFSET_TYPE:
6901 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6902 break;
6903
6904 case ARRAY_TYPE:
6905 {
6906 if (TYPE_DOMAIN (type))
6907 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6908 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6909 {
6910 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6911 hstate.add_object (typeless);
6912 }
6913 }
6914 break;
6915
6916 case INTEGER_TYPE:
6917 {
6918 tree t = TYPE_MAX_VALUE (type);
6919 if (!t)
6920 t = TYPE_MIN_VALUE (type);
6921 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6922 hstate.add_object (TREE_INT_CST_ELT (t, i));
6923 break;
6924 }
6925
6926 case REAL_TYPE:
6927 case FIXED_POINT_TYPE:
6928 {
6929 unsigned prec = TYPE_PRECISION (type);
6930 hstate.add_object (prec);
6931 break;
6932 }
6933
6934 case VECTOR_TYPE:
6935 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6936 break;
6937
6938 default:
6939 break;
6940 }
6941
6942 return hstate.end ();
6943 }
6944
6945 /* These are the Hashtable callback functions. */
6946
6947 /* Returns true iff the types are equivalent. */
6948
6949 bool
6950 type_cache_hasher::equal (type_hash *a, type_hash *b)
6951 {
6952 /* First test the things that are the same for all types. */
6953 if (a->hash != b->hash
6954 || TREE_CODE (a->type) != TREE_CODE (b->type)
6955 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6956 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6957 TYPE_ATTRIBUTES (b->type))
6958 || (TREE_CODE (a->type) != COMPLEX_TYPE
6959 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6960 return 0;
6961
6962 /* Be careful about comparing arrays before and after the element type
6963 has been completed; don't compare TYPE_ALIGN unless both types are
6964 complete. */
6965 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6966 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6967 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6968 return 0;
6969
6970 switch (TREE_CODE (a->type))
6971 {
6972 case VOID_TYPE:
6973 case COMPLEX_TYPE:
6974 case POINTER_TYPE:
6975 case REFERENCE_TYPE:
6976 case NULLPTR_TYPE:
6977 return 1;
6978
6979 case VECTOR_TYPE:
6980 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6981 TYPE_VECTOR_SUBPARTS (b->type));
6982
6983 case ENUMERAL_TYPE:
6984 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6985 && !(TYPE_VALUES (a->type)
6986 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6987 && TYPE_VALUES (b->type)
6988 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6989 && type_list_equal (TYPE_VALUES (a->type),
6990 TYPE_VALUES (b->type))))
6991 return 0;
6992
6993 /* fall through */
6994
6995 case INTEGER_TYPE:
6996 case REAL_TYPE:
6997 case BOOLEAN_TYPE:
6998 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6999 return false;
7000 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7001 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7002 TYPE_MAX_VALUE (b->type)))
7003 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7004 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7005 TYPE_MIN_VALUE (b->type))));
7006
7007 case FIXED_POINT_TYPE:
7008 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7009
7010 case OFFSET_TYPE:
7011 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7012
7013 case METHOD_TYPE:
7014 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7015 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7016 || (TYPE_ARG_TYPES (a->type)
7017 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7018 && TYPE_ARG_TYPES (b->type)
7019 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7020 && type_list_equal (TYPE_ARG_TYPES (a->type),
7021 TYPE_ARG_TYPES (b->type)))))
7022 break;
7023 return 0;
7024 case ARRAY_TYPE:
7025 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
7026 where the flag should be inherited from the element type
7027 and can change after ARRAY_TYPEs are created; on non-aggregates
7028 compare it and hash it, scalars will never have that flag set
7029 and we need to differentiate between arrays created by different
7030 front-ends or middle-end created arrays. */
7031 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
7032 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
7033 || (TYPE_TYPELESS_STORAGE (a->type)
7034 == TYPE_TYPELESS_STORAGE (b->type))));
7035
7036 case RECORD_TYPE:
7037 case UNION_TYPE:
7038 case QUAL_UNION_TYPE:
7039 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7040 || (TYPE_FIELDS (a->type)
7041 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7042 && TYPE_FIELDS (b->type)
7043 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7044 && type_list_equal (TYPE_FIELDS (a->type),
7045 TYPE_FIELDS (b->type))));
7046
7047 case FUNCTION_TYPE:
7048 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7049 || (TYPE_ARG_TYPES (a->type)
7050 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7051 && TYPE_ARG_TYPES (b->type)
7052 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7053 && type_list_equal (TYPE_ARG_TYPES (a->type),
7054 TYPE_ARG_TYPES (b->type))))
7055 break;
7056 return 0;
7057
7058 default:
7059 return 0;
7060 }
7061
7062 if (lang_hooks.types.type_hash_eq != NULL)
7063 return lang_hooks.types.type_hash_eq (a->type, b->type);
7064
7065 return 1;
7066 }
7067
7068 /* Given TYPE, and HASHCODE its hash code, return the canonical
7069 object for an identical type if one already exists.
7070 Otherwise, return TYPE, and record it as the canonical object.
7071
7072 To use this function, first create a type of the sort you want.
7073 Then compute its hash code from the fields of the type that
7074 make it different from other similar types.
7075 Then call this function and use the value. */
7076
7077 tree
7078 type_hash_canon (unsigned int hashcode, tree type)
7079 {
7080 type_hash in;
7081 type_hash **loc;
7082
7083 /* The hash table only contains main variants, so ensure that's what we're
7084 being passed. */
7085 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7086
7087 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7088 must call that routine before comparing TYPE_ALIGNs. */
7089 layout_type (type);
7090
7091 in.hash = hashcode;
7092 in.type = type;
7093
7094 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7095 if (*loc)
7096 {
7097 tree t1 = ((type_hash *) *loc)->type;
7098 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
7099 && t1 != type);
7100 if (TYPE_UID (type) + 1 == next_type_uid)
7101 --next_type_uid;
7102 /* Free also min/max values and the cache for integer
7103 types. This can't be done in free_node, as LTO frees
7104 those on its own. */
7105 if (TREE_CODE (type) == INTEGER_TYPE)
7106 {
7107 if (TYPE_MIN_VALUE (type)
7108 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
7109 {
7110 /* Zero is always in TYPE_CACHED_VALUES. */
7111 if (! TYPE_UNSIGNED (type))
7112 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
7113 ggc_free (TYPE_MIN_VALUE (type));
7114 }
7115 if (TYPE_MAX_VALUE (type)
7116 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
7117 {
7118 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
7119 ggc_free (TYPE_MAX_VALUE (type));
7120 }
7121 if (TYPE_CACHED_VALUES_P (type))
7122 ggc_free (TYPE_CACHED_VALUES (type));
7123 }
7124 free_node (type);
7125 return t1;
7126 }
7127 else
7128 {
7129 struct type_hash *h;
7130
7131 h = ggc_alloc<type_hash> ();
7132 h->hash = hashcode;
7133 h->type = type;
7134 *loc = h;
7135
7136 return type;
7137 }
7138 }
7139
7140 static void
7141 print_type_hash_statistics (void)
7142 {
7143 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7144 (long) type_hash_table->size (),
7145 (long) type_hash_table->elements (),
7146 type_hash_table->collisions ());
7147 }
7148
7149 /* Given two lists of types
7150 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7151 return 1 if the lists contain the same types in the same order.
7152 Also, the TREE_PURPOSEs must match. */
7153
7154 bool
7155 type_list_equal (const_tree l1, const_tree l2)
7156 {
7157 const_tree t1, t2;
7158
7159 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7160 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7161 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7162 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7163 && (TREE_TYPE (TREE_PURPOSE (t1))
7164 == TREE_TYPE (TREE_PURPOSE (t2))))))
7165 return false;
7166
7167 return t1 == t2;
7168 }
7169
7170 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7171 given by TYPE. If the argument list accepts variable arguments,
7172 then this function counts only the ordinary arguments. */
7173
7174 int
7175 type_num_arguments (const_tree fntype)
7176 {
7177 int i = 0;
7178
7179 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
7180 /* If the function does not take a variable number of arguments,
7181 the last element in the list will have type `void'. */
7182 if (VOID_TYPE_P (TREE_VALUE (t)))
7183 break;
7184 else
7185 ++i;
7186
7187 return i;
7188 }
7189
7190 /* Return the type of the function TYPE's argument ARGNO if known.
7191 For vararg function's where ARGNO refers to one of the variadic
7192 arguments return null. Otherwise, return a void_type_node for
7193 out-of-bounds ARGNO. */
7194
7195 tree
7196 type_argument_type (const_tree fntype, unsigned argno)
7197 {
7198 /* Treat zero the same as an out-of-bounds argument number. */
7199 if (!argno)
7200 return void_type_node;
7201
7202 function_args_iterator iter;
7203
7204 tree argtype;
7205 unsigned i = 1;
7206 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
7207 {
7208 /* A vararg function's argument list ends in a null. Otherwise,
7209 an ordinary function's argument list ends with void. Return
7210 null if ARGNO refers to a vararg argument, void_type_node if
7211 it's out of bounds, and the formal argument type otherwise. */
7212 if (!argtype)
7213 break;
7214
7215 if (i == argno || VOID_TYPE_P (argtype))
7216 return argtype;
7217
7218 ++i;
7219 }
7220
7221 return NULL_TREE;
7222 }
7223
7224 /* Nonzero if integer constants T1 and T2
7225 represent the same constant value. */
7226
7227 int
7228 tree_int_cst_equal (const_tree t1, const_tree t2)
7229 {
7230 if (t1 == t2)
7231 return 1;
7232
7233 if (t1 == 0 || t2 == 0)
7234 return 0;
7235
7236 STRIP_ANY_LOCATION_WRAPPER (t1);
7237 STRIP_ANY_LOCATION_WRAPPER (t2);
7238
7239 if (TREE_CODE (t1) == INTEGER_CST
7240 && TREE_CODE (t2) == INTEGER_CST
7241 && wi::to_widest (t1) == wi::to_widest (t2))
7242 return 1;
7243
7244 return 0;
7245 }
7246
7247 /* Return true if T is an INTEGER_CST whose numerical value (extended
7248 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7249
7250 bool
7251 tree_fits_shwi_p (const_tree t)
7252 {
7253 return (t != NULL_TREE
7254 && TREE_CODE (t) == INTEGER_CST
7255 && wi::fits_shwi_p (wi::to_widest (t)));
7256 }
7257
7258 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7259 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
7260
7261 bool
7262 tree_fits_poly_int64_p (const_tree t)
7263 {
7264 if (t == NULL_TREE)
7265 return false;
7266 if (POLY_INT_CST_P (t))
7267 {
7268 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7269 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
7270 return false;
7271 return true;
7272 }
7273 return (TREE_CODE (t) == INTEGER_CST
7274 && wi::fits_shwi_p (wi::to_widest (t)));
7275 }
7276
7277 /* Return true if T is an INTEGER_CST whose numerical value (extended
7278 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7279
7280 bool
7281 tree_fits_uhwi_p (const_tree t)
7282 {
7283 return (t != NULL_TREE
7284 && TREE_CODE (t) == INTEGER_CST
7285 && wi::fits_uhwi_p (wi::to_widest (t)));
7286 }
7287
7288 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7289 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
7290
7291 bool
7292 tree_fits_poly_uint64_p (const_tree t)
7293 {
7294 if (t == NULL_TREE)
7295 return false;
7296 if (POLY_INT_CST_P (t))
7297 {
7298 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7299 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
7300 return false;
7301 return true;
7302 }
7303 return (TREE_CODE (t) == INTEGER_CST
7304 && wi::fits_uhwi_p (wi::to_widest (t)));
7305 }
7306
7307 /* T is an INTEGER_CST whose numerical value (extended according to
7308 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7309 HOST_WIDE_INT. */
7310
7311 HOST_WIDE_INT
7312 tree_to_shwi (const_tree t)
7313 {
7314 gcc_assert (tree_fits_shwi_p (t));
7315 return TREE_INT_CST_LOW (t);
7316 }
7317
7318 /* T is an INTEGER_CST whose numerical value (extended according to
7319 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7320 HOST_WIDE_INT. */
7321
7322 unsigned HOST_WIDE_INT
7323 tree_to_uhwi (const_tree t)
7324 {
7325 gcc_assert (tree_fits_uhwi_p (t));
7326 return TREE_INT_CST_LOW (t);
7327 }
7328
7329 /* Return the most significant (sign) bit of T. */
7330
7331 int
7332 tree_int_cst_sign_bit (const_tree t)
7333 {
7334 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7335
7336 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
7337 }
7338
7339 /* Return an indication of the sign of the integer constant T.
7340 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7341 Note that -1 will never be returned if T's type is unsigned. */
7342
7343 int
7344 tree_int_cst_sgn (const_tree t)
7345 {
7346 if (wi::to_wide (t) == 0)
7347 return 0;
7348 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7349 return 1;
7350 else if (wi::neg_p (wi::to_wide (t)))
7351 return -1;
7352 else
7353 return 1;
7354 }
7355
7356 /* Return the minimum number of bits needed to represent VALUE in a
7357 signed or unsigned type, UNSIGNEDP says which. */
7358
7359 unsigned int
7360 tree_int_cst_min_precision (tree value, signop sgn)
7361 {
7362 /* If the value is negative, compute its negative minus 1. The latter
7363 adjustment is because the absolute value of the largest negative value
7364 is one larger than the largest positive value. This is equivalent to
7365 a bit-wise negation, so use that operation instead. */
7366
7367 if (tree_int_cst_sgn (value) < 0)
7368 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7369
7370 /* Return the number of bits needed, taking into account the fact
7371 that we need one more bit for a signed than unsigned type.
7372 If value is 0 or -1, the minimum precision is 1 no matter
7373 whether unsignedp is true or false. */
7374
7375 if (integer_zerop (value))
7376 return 1;
7377 else
7378 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7379 }
7380
7381 /* Return truthvalue of whether T1 is the same tree structure as T2.
7382 Return 1 if they are the same.
7383 Return 0 if they are understandably different.
7384 Return -1 if either contains tree structure not understood by
7385 this function. */
7386
7387 int
7388 simple_cst_equal (const_tree t1, const_tree t2)
7389 {
7390 enum tree_code code1, code2;
7391 int cmp;
7392 int i;
7393
7394 if (t1 == t2)
7395 return 1;
7396 if (t1 == 0 || t2 == 0)
7397 return 0;
7398
7399 /* For location wrappers to be the same, they must be at the same
7400 source location (and wrap the same thing). */
7401 if (location_wrapper_p (t1) && location_wrapper_p (t2))
7402 {
7403 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
7404 return 0;
7405 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7406 }
7407
7408 code1 = TREE_CODE (t1);
7409 code2 = TREE_CODE (t2);
7410
7411 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7412 {
7413 if (CONVERT_EXPR_CODE_P (code2)
7414 || code2 == NON_LVALUE_EXPR)
7415 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7416 else
7417 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7418 }
7419
7420 else if (CONVERT_EXPR_CODE_P (code2)
7421 || code2 == NON_LVALUE_EXPR)
7422 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7423
7424 if (code1 != code2)
7425 return 0;
7426
7427 switch (code1)
7428 {
7429 case INTEGER_CST:
7430 return wi::to_widest (t1) == wi::to_widest (t2);
7431
7432 case REAL_CST:
7433 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7434
7435 case FIXED_CST:
7436 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7437
7438 case STRING_CST:
7439 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7440 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7441 TREE_STRING_LENGTH (t1)));
7442
7443 case CONSTRUCTOR:
7444 {
7445 unsigned HOST_WIDE_INT idx;
7446 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7447 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7448
7449 if (vec_safe_length (v1) != vec_safe_length (v2))
7450 return false;
7451
7452 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7453 /* ??? Should we handle also fields here? */
7454 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7455 return false;
7456 return true;
7457 }
7458
7459 case SAVE_EXPR:
7460 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7461
7462 case CALL_EXPR:
7463 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7464 if (cmp <= 0)
7465 return cmp;
7466 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7467 return 0;
7468 {
7469 const_tree arg1, arg2;
7470 const_call_expr_arg_iterator iter1, iter2;
7471 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7472 arg2 = first_const_call_expr_arg (t2, &iter2);
7473 arg1 && arg2;
7474 arg1 = next_const_call_expr_arg (&iter1),
7475 arg2 = next_const_call_expr_arg (&iter2))
7476 {
7477 cmp = simple_cst_equal (arg1, arg2);
7478 if (cmp <= 0)
7479 return cmp;
7480 }
7481 return arg1 == arg2;
7482 }
7483
7484 case TARGET_EXPR:
7485 /* Special case: if either target is an unallocated VAR_DECL,
7486 it means that it's going to be unified with whatever the
7487 TARGET_EXPR is really supposed to initialize, so treat it
7488 as being equivalent to anything. */
7489 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7490 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7491 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7492 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7493 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7494 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7495 cmp = 1;
7496 else
7497 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7498
7499 if (cmp <= 0)
7500 return cmp;
7501
7502 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7503
7504 case WITH_CLEANUP_EXPR:
7505 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7506 if (cmp <= 0)
7507 return cmp;
7508
7509 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7510
7511 case COMPONENT_REF:
7512 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7513 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7514
7515 return 0;
7516
7517 case VAR_DECL:
7518 case PARM_DECL:
7519 case CONST_DECL:
7520 case FUNCTION_DECL:
7521 return 0;
7522
7523 default:
7524 if (POLY_INT_CST_P (t1))
7525 /* A false return means maybe_ne rather than known_ne. */
7526 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
7527 TYPE_SIGN (TREE_TYPE (t1))),
7528 poly_widest_int::from (poly_int_cst_value (t2),
7529 TYPE_SIGN (TREE_TYPE (t2))));
7530 break;
7531 }
7532
7533 /* This general rule works for most tree codes. All exceptions should be
7534 handled above. If this is a language-specific tree code, we can't
7535 trust what might be in the operand, so say we don't know
7536 the situation. */
7537 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7538 return -1;
7539
7540 switch (TREE_CODE_CLASS (code1))
7541 {
7542 case tcc_unary:
7543 case tcc_binary:
7544 case tcc_comparison:
7545 case tcc_expression:
7546 case tcc_reference:
7547 case tcc_statement:
7548 cmp = 1;
7549 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7550 {
7551 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7552 if (cmp <= 0)
7553 return cmp;
7554 }
7555
7556 return cmp;
7557
7558 default:
7559 return -1;
7560 }
7561 }
7562
7563 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7564 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7565 than U, respectively. */
7566
7567 int
7568 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7569 {
7570 if (tree_int_cst_sgn (t) < 0)
7571 return -1;
7572 else if (!tree_fits_uhwi_p (t))
7573 return 1;
7574 else if (TREE_INT_CST_LOW (t) == u)
7575 return 0;
7576 else if (TREE_INT_CST_LOW (t) < u)
7577 return -1;
7578 else
7579 return 1;
7580 }
7581
7582 /* Return true if SIZE represents a constant size that is in bounds of
7583 what the middle-end and the backend accepts (covering not more than
7584 half of the address-space).
7585 When PERR is non-null, set *PERR on failure to the description of
7586 why SIZE is not valid. */
7587
7588 bool
7589 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
7590 {
7591 if (POLY_INT_CST_P (size))
7592 {
7593 if (TREE_OVERFLOW (size))
7594 return false;
7595 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7596 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
7597 return false;
7598 return true;
7599 }
7600
7601 cst_size_error error;
7602 if (!perr)
7603 perr = &error;
7604
7605 if (TREE_CODE (size) != INTEGER_CST)
7606 {
7607 *perr = cst_size_not_constant;
7608 return false;
7609 }
7610
7611 if (TREE_OVERFLOW_P (size))
7612 {
7613 *perr = cst_size_overflow;
7614 return false;
7615 }
7616
7617 if (tree_int_cst_sgn (size) < 0)
7618 {
7619 *perr = cst_size_negative;
7620 return false;
7621 }
7622 if (!tree_fits_uhwi_p (size)
7623 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
7624 < wi::to_widest (size) * 2))
7625 {
7626 *perr = cst_size_too_big;
7627 return false;
7628 }
7629
7630 return true;
7631 }
7632
7633 /* Return the precision of the type, or for a complex or vector type the
7634 precision of the type of its elements. */
7635
7636 unsigned int
7637 element_precision (const_tree type)
7638 {
7639 if (!TYPE_P (type))
7640 type = TREE_TYPE (type);
7641 enum tree_code code = TREE_CODE (type);
7642 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7643 type = TREE_TYPE (type);
7644
7645 return TYPE_PRECISION (type);
7646 }
7647
7648 /* Return true if CODE represents an associative tree code. Otherwise
7649 return false. */
7650 bool
7651 associative_tree_code (enum tree_code code)
7652 {
7653 switch (code)
7654 {
7655 case BIT_IOR_EXPR:
7656 case BIT_AND_EXPR:
7657 case BIT_XOR_EXPR:
7658 case PLUS_EXPR:
7659 case MULT_EXPR:
7660 case MIN_EXPR:
7661 case MAX_EXPR:
7662 return true;
7663
7664 default:
7665 break;
7666 }
7667 return false;
7668 }
7669
7670 /* Return true if CODE represents a commutative tree code. Otherwise
7671 return false. */
7672 bool
7673 commutative_tree_code (enum tree_code code)
7674 {
7675 switch (code)
7676 {
7677 case PLUS_EXPR:
7678 case MULT_EXPR:
7679 case MULT_HIGHPART_EXPR:
7680 case MIN_EXPR:
7681 case MAX_EXPR:
7682 case BIT_IOR_EXPR:
7683 case BIT_XOR_EXPR:
7684 case BIT_AND_EXPR:
7685 case NE_EXPR:
7686 case EQ_EXPR:
7687 case UNORDERED_EXPR:
7688 case ORDERED_EXPR:
7689 case UNEQ_EXPR:
7690 case LTGT_EXPR:
7691 case TRUTH_AND_EXPR:
7692 case TRUTH_XOR_EXPR:
7693 case TRUTH_OR_EXPR:
7694 case WIDEN_MULT_EXPR:
7695 case VEC_WIDEN_MULT_HI_EXPR:
7696 case VEC_WIDEN_MULT_LO_EXPR:
7697 case VEC_WIDEN_MULT_EVEN_EXPR:
7698 case VEC_WIDEN_MULT_ODD_EXPR:
7699 return true;
7700
7701 default:
7702 break;
7703 }
7704 return false;
7705 }
7706
7707 /* Return true if CODE represents a ternary tree code for which the
7708 first two operands are commutative. Otherwise return false. */
7709 bool
7710 commutative_ternary_tree_code (enum tree_code code)
7711 {
7712 switch (code)
7713 {
7714 case WIDEN_MULT_PLUS_EXPR:
7715 case WIDEN_MULT_MINUS_EXPR:
7716 case DOT_PROD_EXPR:
7717 return true;
7718
7719 default:
7720 break;
7721 }
7722 return false;
7723 }
7724
7725 /* Returns true if CODE can overflow. */
7726
7727 bool
7728 operation_can_overflow (enum tree_code code)
7729 {
7730 switch (code)
7731 {
7732 case PLUS_EXPR:
7733 case MINUS_EXPR:
7734 case MULT_EXPR:
7735 case LSHIFT_EXPR:
7736 /* Can overflow in various ways. */
7737 return true;
7738 case TRUNC_DIV_EXPR:
7739 case EXACT_DIV_EXPR:
7740 case FLOOR_DIV_EXPR:
7741 case CEIL_DIV_EXPR:
7742 /* For INT_MIN / -1. */
7743 return true;
7744 case NEGATE_EXPR:
7745 case ABS_EXPR:
7746 /* For -INT_MIN. */
7747 return true;
7748 default:
7749 /* These operators cannot overflow. */
7750 return false;
7751 }
7752 }
7753
7754 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7755 ftrapv doesn't generate trapping insns for CODE. */
7756
7757 bool
7758 operation_no_trapping_overflow (tree type, enum tree_code code)
7759 {
7760 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7761
7762 /* We don't generate instructions that trap on overflow for complex or vector
7763 types. */
7764 if (!INTEGRAL_TYPE_P (type))
7765 return true;
7766
7767 if (!TYPE_OVERFLOW_TRAPS (type))
7768 return true;
7769
7770 switch (code)
7771 {
7772 case PLUS_EXPR:
7773 case MINUS_EXPR:
7774 case MULT_EXPR:
7775 case NEGATE_EXPR:
7776 case ABS_EXPR:
7777 /* These operators can overflow, and -ftrapv generates trapping code for
7778 these. */
7779 return false;
7780 case TRUNC_DIV_EXPR:
7781 case EXACT_DIV_EXPR:
7782 case FLOOR_DIV_EXPR:
7783 case CEIL_DIV_EXPR:
7784 case LSHIFT_EXPR:
7785 /* These operators can overflow, but -ftrapv does not generate trapping
7786 code for these. */
7787 return true;
7788 default:
7789 /* These operators cannot overflow. */
7790 return true;
7791 }
7792 }
7793
7794 /* Constructors for pointer, array and function types.
7795 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7796 constructed by language-dependent code, not here.) */
7797
7798 /* Construct, lay out and return the type of pointers to TO_TYPE with
7799 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7800 reference all of memory. If such a type has already been
7801 constructed, reuse it. */
7802
7803 tree
7804 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7805 bool can_alias_all)
7806 {
7807 tree t;
7808 bool could_alias = can_alias_all;
7809
7810 if (to_type == error_mark_node)
7811 return error_mark_node;
7812
7813 /* If the pointed-to type has the may_alias attribute set, force
7814 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7815 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7816 can_alias_all = true;
7817
7818 /* In some cases, languages will have things that aren't a POINTER_TYPE
7819 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7820 In that case, return that type without regard to the rest of our
7821 operands.
7822
7823 ??? This is a kludge, but consistent with the way this function has
7824 always operated and there doesn't seem to be a good way to avoid this
7825 at the moment. */
7826 if (TYPE_POINTER_TO (to_type) != 0
7827 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7828 return TYPE_POINTER_TO (to_type);
7829
7830 /* First, if we already have a type for pointers to TO_TYPE and it's
7831 the proper mode, use it. */
7832 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7833 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7834 return t;
7835
7836 t = make_node (POINTER_TYPE);
7837
7838 TREE_TYPE (t) = to_type;
7839 SET_TYPE_MODE (t, mode);
7840 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7841 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7842 TYPE_POINTER_TO (to_type) = t;
7843
7844 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7845 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7846 SET_TYPE_STRUCTURAL_EQUALITY (t);
7847 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7848 TYPE_CANONICAL (t)
7849 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7850 mode, false);
7851
7852 /* Lay out the type. This function has many callers that are concerned
7853 with expression-construction, and this simplifies them all. */
7854 layout_type (t);
7855
7856 return t;
7857 }
7858
7859 /* By default build pointers in ptr_mode. */
7860
7861 tree
7862 build_pointer_type (tree to_type)
7863 {
7864 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7865 : TYPE_ADDR_SPACE (to_type);
7866 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7867 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7868 }
7869
7870 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7871
7872 tree
7873 build_reference_type_for_mode (tree to_type, machine_mode mode,
7874 bool can_alias_all)
7875 {
7876 tree t;
7877 bool could_alias = can_alias_all;
7878
7879 if (to_type == error_mark_node)
7880 return error_mark_node;
7881
7882 /* If the pointed-to type has the may_alias attribute set, force
7883 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7884 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7885 can_alias_all = true;
7886
7887 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7888 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7889 In that case, return that type without regard to the rest of our
7890 operands.
7891
7892 ??? This is a kludge, but consistent with the way this function has
7893 always operated and there doesn't seem to be a good way to avoid this
7894 at the moment. */
7895 if (TYPE_REFERENCE_TO (to_type) != 0
7896 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7897 return TYPE_REFERENCE_TO (to_type);
7898
7899 /* First, if we already have a type for pointers to TO_TYPE and it's
7900 the proper mode, use it. */
7901 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7902 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7903 return t;
7904
7905 t = make_node (REFERENCE_TYPE);
7906
7907 TREE_TYPE (t) = to_type;
7908 SET_TYPE_MODE (t, mode);
7909 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7910 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7911 TYPE_REFERENCE_TO (to_type) = t;
7912
7913 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7914 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7915 SET_TYPE_STRUCTURAL_EQUALITY (t);
7916 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7917 TYPE_CANONICAL (t)
7918 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7919 mode, false);
7920
7921 layout_type (t);
7922
7923 return t;
7924 }
7925
7926
7927 /* Build the node for the type of references-to-TO_TYPE by default
7928 in ptr_mode. */
7929
7930 tree
7931 build_reference_type (tree to_type)
7932 {
7933 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7934 : TYPE_ADDR_SPACE (to_type);
7935 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7936 return build_reference_type_for_mode (to_type, pointer_mode, false);
7937 }
7938
7939 #define MAX_INT_CACHED_PREC \
7940 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7941 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7942
7943 /* Builds a signed or unsigned integer type of precision PRECISION.
7944 Used for C bitfields whose precision does not match that of
7945 built-in target types. */
7946 tree
7947 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7948 int unsignedp)
7949 {
7950 tree itype, ret;
7951
7952 if (unsignedp)
7953 unsignedp = MAX_INT_CACHED_PREC + 1;
7954
7955 if (precision <= MAX_INT_CACHED_PREC)
7956 {
7957 itype = nonstandard_integer_type_cache[precision + unsignedp];
7958 if (itype)
7959 return itype;
7960 }
7961
7962 itype = make_node (INTEGER_TYPE);
7963 TYPE_PRECISION (itype) = precision;
7964
7965 if (unsignedp)
7966 fixup_unsigned_type (itype);
7967 else
7968 fixup_signed_type (itype);
7969
7970 inchash::hash hstate;
7971 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7972 ret = type_hash_canon (hstate.end (), itype);
7973 if (precision <= MAX_INT_CACHED_PREC)
7974 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7975
7976 return ret;
7977 }
7978
7979 #define MAX_BOOL_CACHED_PREC \
7980 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7981 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7982
7983 /* Builds a boolean type of precision PRECISION.
7984 Used for boolean vectors to choose proper vector element size. */
7985 tree
7986 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7987 {
7988 tree type;
7989
7990 if (precision <= MAX_BOOL_CACHED_PREC)
7991 {
7992 type = nonstandard_boolean_type_cache[precision];
7993 if (type)
7994 return type;
7995 }
7996
7997 type = make_node (BOOLEAN_TYPE);
7998 TYPE_PRECISION (type) = precision;
7999 fixup_signed_type (type);
8000
8001 if (precision <= MAX_INT_CACHED_PREC)
8002 nonstandard_boolean_type_cache[precision] = type;
8003
8004 return type;
8005 }
8006
8007 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8008 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8009 is true, reuse such a type that has already been constructed. */
8010
8011 static tree
8012 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8013 {
8014 tree itype = make_node (INTEGER_TYPE);
8015
8016 TREE_TYPE (itype) = type;
8017
8018 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8019 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8020
8021 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8022 SET_TYPE_MODE (itype, TYPE_MODE (type));
8023 TYPE_SIZE (itype) = TYPE_SIZE (type);
8024 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8025 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8026 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8027 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
8028
8029 if (!shared)
8030 return itype;
8031
8032 if ((TYPE_MIN_VALUE (itype)
8033 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8034 || (TYPE_MAX_VALUE (itype)
8035 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8036 {
8037 /* Since we cannot reliably merge this type, we need to compare it using
8038 structural equality checks. */
8039 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8040 return itype;
8041 }
8042
8043 hashval_t hash = type_hash_canon_hash (itype);
8044 itype = type_hash_canon (hash, itype);
8045
8046 return itype;
8047 }
8048
8049 /* Wrapper around build_range_type_1 with SHARED set to true. */
8050
8051 tree
8052 build_range_type (tree type, tree lowval, tree highval)
8053 {
8054 return build_range_type_1 (type, lowval, highval, true);
8055 }
8056
8057 /* Wrapper around build_range_type_1 with SHARED set to false. */
8058
8059 tree
8060 build_nonshared_range_type (tree type, tree lowval, tree highval)
8061 {
8062 return build_range_type_1 (type, lowval, highval, false);
8063 }
8064
8065 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8066 MAXVAL should be the maximum value in the domain
8067 (one less than the length of the array).
8068
8069 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8070 We don't enforce this limit, that is up to caller (e.g. language front end).
8071 The limit exists because the result is a signed type and we don't handle
8072 sizes that use more than one HOST_WIDE_INT. */
8073
8074 tree
8075 build_index_type (tree maxval)
8076 {
8077 return build_range_type (sizetype, size_zero_node, maxval);
8078 }
8079
8080 /* Return true if the debug information for TYPE, a subtype, should be emitted
8081 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8082 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8083 debug info and doesn't reflect the source code. */
8084
8085 bool
8086 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8087 {
8088 tree base_type = TREE_TYPE (type), low, high;
8089
8090 /* Subrange types have a base type which is an integral type. */
8091 if (!INTEGRAL_TYPE_P (base_type))
8092 return false;
8093
8094 /* Get the real bounds of the subtype. */
8095 if (lang_hooks.types.get_subrange_bounds)
8096 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8097 else
8098 {
8099 low = TYPE_MIN_VALUE (type);
8100 high = TYPE_MAX_VALUE (type);
8101 }
8102
8103 /* If the type and its base type have the same representation and the same
8104 name, then the type is not a subrange but a copy of the base type. */
8105 if ((TREE_CODE (base_type) == INTEGER_TYPE
8106 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8107 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8108 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8109 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8110 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8111 return false;
8112
8113 if (lowval)
8114 *lowval = low;
8115 if (highval)
8116 *highval = high;
8117 return true;
8118 }
8119
8120 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8121 and number of elements specified by the range of values of INDEX_TYPE.
8122 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
8123 If SHARED is true, reuse such a type that has already been constructed. */
8124
8125 static tree
8126 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
8127 bool shared)
8128 {
8129 tree t;
8130
8131 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8132 {
8133 error ("arrays of functions are not meaningful");
8134 elt_type = integer_type_node;
8135 }
8136
8137 t = make_node (ARRAY_TYPE);
8138 TREE_TYPE (t) = elt_type;
8139 TYPE_DOMAIN (t) = index_type;
8140 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8141 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
8142 layout_type (t);
8143
8144 /* If the element type is incomplete at this point we get marked for
8145 structural equality. Do not record these types in the canonical
8146 type hashtable. */
8147 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8148 return t;
8149
8150 if (shared)
8151 {
8152 hashval_t hash = type_hash_canon_hash (t);
8153 t = type_hash_canon (hash, t);
8154 }
8155
8156 if (TYPE_CANONICAL (t) == t)
8157 {
8158 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8159 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8160 || in_lto_p)
8161 SET_TYPE_STRUCTURAL_EQUALITY (t);
8162 else if (TYPE_CANONICAL (elt_type) != elt_type
8163 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8164 TYPE_CANONICAL (t)
8165 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8166 index_type
8167 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8168 typeless_storage, shared);
8169 }
8170
8171 return t;
8172 }
8173
8174 /* Wrapper around build_array_type_1 with SHARED set to true. */
8175
8176 tree
8177 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
8178 {
8179 return build_array_type_1 (elt_type, index_type, typeless_storage, true);
8180 }
8181
8182 /* Wrapper around build_array_type_1 with SHARED set to false. */
8183
8184 tree
8185 build_nonshared_array_type (tree elt_type, tree index_type)
8186 {
8187 return build_array_type_1 (elt_type, index_type, false, false);
8188 }
8189
8190 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8191 sizetype. */
8192
8193 tree
8194 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
8195 {
8196 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8197 }
8198
8199 /* Recursively examines the array elements of TYPE, until a non-array
8200 element type is found. */
8201
8202 tree
8203 strip_array_types (tree type)
8204 {
8205 while (TREE_CODE (type) == ARRAY_TYPE)
8206 type = TREE_TYPE (type);
8207
8208 return type;
8209 }
8210
8211 /* Computes the canonical argument types from the argument type list
8212 ARGTYPES.
8213
8214 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8215 on entry to this function, or if any of the ARGTYPES are
8216 structural.
8217
8218 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8219 true on entry to this function, or if any of the ARGTYPES are
8220 non-canonical.
8221
8222 Returns a canonical argument list, which may be ARGTYPES when the
8223 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8224 true) or would not differ from ARGTYPES. */
8225
8226 static tree
8227 maybe_canonicalize_argtypes (tree argtypes,
8228 bool *any_structural_p,
8229 bool *any_noncanonical_p)
8230 {
8231 tree arg;
8232 bool any_noncanonical_argtypes_p = false;
8233
8234 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8235 {
8236 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8237 /* Fail gracefully by stating that the type is structural. */
8238 *any_structural_p = true;
8239 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8240 *any_structural_p = true;
8241 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8242 || TREE_PURPOSE (arg))
8243 /* If the argument has a default argument, we consider it
8244 non-canonical even though the type itself is canonical.
8245 That way, different variants of function and method types
8246 with default arguments will all point to the variant with
8247 no defaults as their canonical type. */
8248 any_noncanonical_argtypes_p = true;
8249 }
8250
8251 if (*any_structural_p)
8252 return argtypes;
8253
8254 if (any_noncanonical_argtypes_p)
8255 {
8256 /* Build the canonical list of argument types. */
8257 tree canon_argtypes = NULL_TREE;
8258 bool is_void = false;
8259
8260 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8261 {
8262 if (arg == void_list_node)
8263 is_void = true;
8264 else
8265 canon_argtypes = tree_cons (NULL_TREE,
8266 TYPE_CANONICAL (TREE_VALUE (arg)),
8267 canon_argtypes);
8268 }
8269
8270 canon_argtypes = nreverse (canon_argtypes);
8271 if (is_void)
8272 canon_argtypes = chainon (canon_argtypes, void_list_node);
8273
8274 /* There is a non-canonical type. */
8275 *any_noncanonical_p = true;
8276 return canon_argtypes;
8277 }
8278
8279 /* The canonical argument types are the same as ARGTYPES. */
8280 return argtypes;
8281 }
8282
8283 /* Construct, lay out and return
8284 the type of functions returning type VALUE_TYPE
8285 given arguments of types ARG_TYPES.
8286 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8287 are data type nodes for the arguments of the function.
8288 If such a type has already been constructed, reuse it. */
8289
8290 tree
8291 build_function_type (tree value_type, tree arg_types)
8292 {
8293 tree t;
8294 inchash::hash hstate;
8295 bool any_structural_p, any_noncanonical_p;
8296 tree canon_argtypes;
8297
8298 gcc_assert (arg_types != error_mark_node);
8299
8300 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8301 {
8302 error ("function return type cannot be function");
8303 value_type = integer_type_node;
8304 }
8305
8306 /* Make a node of the sort we want. */
8307 t = make_node (FUNCTION_TYPE);
8308 TREE_TYPE (t) = value_type;
8309 TYPE_ARG_TYPES (t) = arg_types;
8310
8311 /* If we already have such a type, use the old one. */
8312 hashval_t hash = type_hash_canon_hash (t);
8313 t = type_hash_canon (hash, t);
8314
8315 /* Set up the canonical type. */
8316 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8317 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8318 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8319 &any_structural_p,
8320 &any_noncanonical_p);
8321 if (any_structural_p)
8322 SET_TYPE_STRUCTURAL_EQUALITY (t);
8323 else if (any_noncanonical_p)
8324 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8325 canon_argtypes);
8326
8327 if (!COMPLETE_TYPE_P (t))
8328 layout_type (t);
8329 return t;
8330 }
8331
8332 /* Build a function type. The RETURN_TYPE is the type returned by the
8333 function. If VAARGS is set, no void_type_node is appended to the
8334 list. ARGP must be always be terminated be a NULL_TREE. */
8335
8336 static tree
8337 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8338 {
8339 tree t, args, last;
8340
8341 t = va_arg (argp, tree);
8342 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8343 args = tree_cons (NULL_TREE, t, args);
8344
8345 if (vaargs)
8346 {
8347 last = args;
8348 if (args != NULL_TREE)
8349 args = nreverse (args);
8350 gcc_assert (last != void_list_node);
8351 }
8352 else if (args == NULL_TREE)
8353 args = void_list_node;
8354 else
8355 {
8356 last = args;
8357 args = nreverse (args);
8358 TREE_CHAIN (last) = void_list_node;
8359 }
8360 args = build_function_type (return_type, args);
8361
8362 return args;
8363 }
8364
8365 /* Build a function type. The RETURN_TYPE is the type returned by the
8366 function. If additional arguments are provided, they are
8367 additional argument types. The list of argument types must always
8368 be terminated by NULL_TREE. */
8369
8370 tree
8371 build_function_type_list (tree return_type, ...)
8372 {
8373 tree args;
8374 va_list p;
8375
8376 va_start (p, return_type);
8377 args = build_function_type_list_1 (false, return_type, p);
8378 va_end (p);
8379 return args;
8380 }
8381
8382 /* Build a variable argument function type. The RETURN_TYPE is the
8383 type returned by the function. If additional arguments are provided,
8384 they are additional argument types. The list of argument types must
8385 always be terminated by NULL_TREE. */
8386
8387 tree
8388 build_varargs_function_type_list (tree return_type, ...)
8389 {
8390 tree args;
8391 va_list p;
8392
8393 va_start (p, return_type);
8394 args = build_function_type_list_1 (true, return_type, p);
8395 va_end (p);
8396
8397 return args;
8398 }
8399
8400 /* Build a function type. RETURN_TYPE is the type returned by the
8401 function; VAARGS indicates whether the function takes varargs. The
8402 function takes N named arguments, the types of which are provided in
8403 ARG_TYPES. */
8404
8405 static tree
8406 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8407 tree *arg_types)
8408 {
8409 int i;
8410 tree t = vaargs ? NULL_TREE : void_list_node;
8411
8412 for (i = n - 1; i >= 0; i--)
8413 t = tree_cons (NULL_TREE, arg_types[i], t);
8414
8415 return build_function_type (return_type, t);
8416 }
8417
8418 /* Build a function type. RETURN_TYPE is the type returned by the
8419 function. The function takes N named arguments, the types of which
8420 are provided in ARG_TYPES. */
8421
8422 tree
8423 build_function_type_array (tree return_type, int n, tree *arg_types)
8424 {
8425 return build_function_type_array_1 (false, return_type, n, arg_types);
8426 }
8427
8428 /* Build a variable argument function type. RETURN_TYPE is the type
8429 returned by the function. The function takes N named arguments, the
8430 types of which are provided in ARG_TYPES. */
8431
8432 tree
8433 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8434 {
8435 return build_function_type_array_1 (true, return_type, n, arg_types);
8436 }
8437
8438 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8439 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8440 for the method. An implicit additional parameter (of type
8441 pointer-to-BASETYPE) is added to the ARGTYPES. */
8442
8443 tree
8444 build_method_type_directly (tree basetype,
8445 tree rettype,
8446 tree argtypes)
8447 {
8448 tree t;
8449 tree ptype;
8450 bool any_structural_p, any_noncanonical_p;
8451 tree canon_argtypes;
8452
8453 /* Make a node of the sort we want. */
8454 t = make_node (METHOD_TYPE);
8455
8456 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8457 TREE_TYPE (t) = rettype;
8458 ptype = build_pointer_type (basetype);
8459
8460 /* The actual arglist for this function includes a "hidden" argument
8461 which is "this". Put it into the list of argument types. */
8462 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8463 TYPE_ARG_TYPES (t) = argtypes;
8464
8465 /* If we already have such a type, use the old one. */
8466 hashval_t hash = type_hash_canon_hash (t);
8467 t = type_hash_canon (hash, t);
8468
8469 /* Set up the canonical type. */
8470 any_structural_p
8471 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8472 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8473 any_noncanonical_p
8474 = (TYPE_CANONICAL (basetype) != basetype
8475 || TYPE_CANONICAL (rettype) != rettype);
8476 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8477 &any_structural_p,
8478 &any_noncanonical_p);
8479 if (any_structural_p)
8480 SET_TYPE_STRUCTURAL_EQUALITY (t);
8481 else if (any_noncanonical_p)
8482 TYPE_CANONICAL (t)
8483 = build_method_type_directly (TYPE_CANONICAL (basetype),
8484 TYPE_CANONICAL (rettype),
8485 canon_argtypes);
8486 if (!COMPLETE_TYPE_P (t))
8487 layout_type (t);
8488
8489 return t;
8490 }
8491
8492 /* Construct, lay out and return the type of methods belonging to class
8493 BASETYPE and whose arguments and values are described by TYPE.
8494 If that type exists already, reuse it.
8495 TYPE must be a FUNCTION_TYPE node. */
8496
8497 tree
8498 build_method_type (tree basetype, tree type)
8499 {
8500 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8501
8502 return build_method_type_directly (basetype,
8503 TREE_TYPE (type),
8504 TYPE_ARG_TYPES (type));
8505 }
8506
8507 /* Construct, lay out and return the type of offsets to a value
8508 of type TYPE, within an object of type BASETYPE.
8509 If a suitable offset type exists already, reuse it. */
8510
8511 tree
8512 build_offset_type (tree basetype, tree type)
8513 {
8514 tree t;
8515
8516 /* Make a node of the sort we want. */
8517 t = make_node (OFFSET_TYPE);
8518
8519 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8520 TREE_TYPE (t) = type;
8521
8522 /* If we already have such a type, use the old one. */
8523 hashval_t hash = type_hash_canon_hash (t);
8524 t = type_hash_canon (hash, t);
8525
8526 if (!COMPLETE_TYPE_P (t))
8527 layout_type (t);
8528
8529 if (TYPE_CANONICAL (t) == t)
8530 {
8531 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8532 || TYPE_STRUCTURAL_EQUALITY_P (type))
8533 SET_TYPE_STRUCTURAL_EQUALITY (t);
8534 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8535 || TYPE_CANONICAL (type) != type)
8536 TYPE_CANONICAL (t)
8537 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8538 TYPE_CANONICAL (type));
8539 }
8540
8541 return t;
8542 }
8543
8544 /* Create a complex type whose components are COMPONENT_TYPE.
8545
8546 If NAMED is true, the type is given a TYPE_NAME. We do not always
8547 do so because this creates a DECL node and thus make the DECL_UIDs
8548 dependent on the type canonicalization hashtable, which is GC-ed,
8549 so the DECL_UIDs would not be stable wrt garbage collection. */
8550
8551 tree
8552 build_complex_type (tree component_type, bool named)
8553 {
8554 gcc_assert (INTEGRAL_TYPE_P (component_type)
8555 || SCALAR_FLOAT_TYPE_P (component_type)
8556 || FIXED_POINT_TYPE_P (component_type));
8557
8558 /* Make a node of the sort we want. */
8559 tree probe = make_node (COMPLEX_TYPE);
8560
8561 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8562
8563 /* If we already have such a type, use the old one. */
8564 hashval_t hash = type_hash_canon_hash (probe);
8565 tree t = type_hash_canon (hash, probe);
8566
8567 if (t == probe)
8568 {
8569 /* We created a new type. The hash insertion will have laid
8570 out the type. We need to check the canonicalization and
8571 maybe set the name. */
8572 gcc_checking_assert (COMPLETE_TYPE_P (t)
8573 && !TYPE_NAME (t)
8574 && TYPE_CANONICAL (t) == t);
8575
8576 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8577 SET_TYPE_STRUCTURAL_EQUALITY (t);
8578 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8579 TYPE_CANONICAL (t)
8580 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8581
8582 /* We need to create a name, since complex is a fundamental type. */
8583 if (named)
8584 {
8585 const char *name = NULL;
8586
8587 if (TREE_TYPE (t) == char_type_node)
8588 name = "complex char";
8589 else if (TREE_TYPE (t) == signed_char_type_node)
8590 name = "complex signed char";
8591 else if (TREE_TYPE (t) == unsigned_char_type_node)
8592 name = "complex unsigned char";
8593 else if (TREE_TYPE (t) == short_integer_type_node)
8594 name = "complex short int";
8595 else if (TREE_TYPE (t) == short_unsigned_type_node)
8596 name = "complex short unsigned int";
8597 else if (TREE_TYPE (t) == integer_type_node)
8598 name = "complex int";
8599 else if (TREE_TYPE (t) == unsigned_type_node)
8600 name = "complex unsigned int";
8601 else if (TREE_TYPE (t) == long_integer_type_node)
8602 name = "complex long int";
8603 else if (TREE_TYPE (t) == long_unsigned_type_node)
8604 name = "complex long unsigned int";
8605 else if (TREE_TYPE (t) == long_long_integer_type_node)
8606 name = "complex long long int";
8607 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8608 name = "complex long long unsigned int";
8609
8610 if (name != NULL)
8611 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8612 get_identifier (name), t);
8613 }
8614 }
8615
8616 return build_qualified_type (t, TYPE_QUALS (component_type));
8617 }
8618
8619 /* If TYPE is a real or complex floating-point type and the target
8620 does not directly support arithmetic on TYPE then return the wider
8621 type to be used for arithmetic on TYPE. Otherwise, return
8622 NULL_TREE. */
8623
8624 tree
8625 excess_precision_type (tree type)
8626 {
8627 /* The target can give two different responses to the question of
8628 which excess precision mode it would like depending on whether we
8629 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8630
8631 enum excess_precision_type requested_type
8632 = (flag_excess_precision == EXCESS_PRECISION_FAST
8633 ? EXCESS_PRECISION_TYPE_FAST
8634 : EXCESS_PRECISION_TYPE_STANDARD);
8635
8636 enum flt_eval_method target_flt_eval_method
8637 = targetm.c.excess_precision (requested_type);
8638
8639 /* The target should not ask for unpredictable float evaluation (though
8640 it might advertise that implicitly the evaluation is unpredictable,
8641 but we don't care about that here, it will have been reported
8642 elsewhere). If it does ask for unpredictable evaluation, we have
8643 nothing to do here. */
8644 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8645
8646 /* Nothing to do. The target has asked for all types we know about
8647 to be computed with their native precision and range. */
8648 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8649 return NULL_TREE;
8650
8651 /* The target will promote this type in a target-dependent way, so excess
8652 precision ought to leave it alone. */
8653 if (targetm.promoted_type (type) != NULL_TREE)
8654 return NULL_TREE;
8655
8656 machine_mode float16_type_mode = (float16_type_node
8657 ? TYPE_MODE (float16_type_node)
8658 : VOIDmode);
8659 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8660 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8661
8662 switch (TREE_CODE (type))
8663 {
8664 case REAL_TYPE:
8665 {
8666 machine_mode type_mode = TYPE_MODE (type);
8667 switch (target_flt_eval_method)
8668 {
8669 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8670 if (type_mode == float16_type_mode)
8671 return float_type_node;
8672 break;
8673 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8674 if (type_mode == float16_type_mode
8675 || type_mode == float_type_mode)
8676 return double_type_node;
8677 break;
8678 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8679 if (type_mode == float16_type_mode
8680 || type_mode == float_type_mode
8681 || type_mode == double_type_mode)
8682 return long_double_type_node;
8683 break;
8684 default:
8685 gcc_unreachable ();
8686 }
8687 break;
8688 }
8689 case COMPLEX_TYPE:
8690 {
8691 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8692 return NULL_TREE;
8693 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8694 switch (target_flt_eval_method)
8695 {
8696 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8697 if (type_mode == float16_type_mode)
8698 return complex_float_type_node;
8699 break;
8700 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8701 if (type_mode == float16_type_mode
8702 || type_mode == float_type_mode)
8703 return complex_double_type_node;
8704 break;
8705 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8706 if (type_mode == float16_type_mode
8707 || type_mode == float_type_mode
8708 || type_mode == double_type_mode)
8709 return complex_long_double_type_node;
8710 break;
8711 default:
8712 gcc_unreachable ();
8713 }
8714 break;
8715 }
8716 default:
8717 break;
8718 }
8719
8720 return NULL_TREE;
8721 }
8722 \f
8723 /* Return OP, stripped of any conversions to wider types as much as is safe.
8724 Converting the value back to OP's type makes a value equivalent to OP.
8725
8726 If FOR_TYPE is nonzero, we return a value which, if converted to
8727 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8728
8729 OP must have integer, real or enumeral type. Pointers are not allowed!
8730
8731 There are some cases where the obvious value we could return
8732 would regenerate to OP if converted to OP's type,
8733 but would not extend like OP to wider types.
8734 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8735 For example, if OP is (unsigned short)(signed char)-1,
8736 we avoid returning (signed char)-1 if FOR_TYPE is int,
8737 even though extending that to an unsigned short would regenerate OP,
8738 since the result of extending (signed char)-1 to (int)
8739 is different from (int) OP. */
8740
8741 tree
8742 get_unwidened (tree op, tree for_type)
8743 {
8744 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8745 tree type = TREE_TYPE (op);
8746 unsigned final_prec
8747 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8748 int uns
8749 = (for_type != 0 && for_type != type
8750 && final_prec > TYPE_PRECISION (type)
8751 && TYPE_UNSIGNED (type));
8752 tree win = op;
8753
8754 while (CONVERT_EXPR_P (op))
8755 {
8756 int bitschange;
8757
8758 /* TYPE_PRECISION on vector types has different meaning
8759 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8760 so avoid them here. */
8761 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8762 break;
8763
8764 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8765 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8766
8767 /* Truncations are many-one so cannot be removed.
8768 Unless we are later going to truncate down even farther. */
8769 if (bitschange < 0
8770 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8771 break;
8772
8773 /* See what's inside this conversion. If we decide to strip it,
8774 we will set WIN. */
8775 op = TREE_OPERAND (op, 0);
8776
8777 /* If we have not stripped any zero-extensions (uns is 0),
8778 we can strip any kind of extension.
8779 If we have previously stripped a zero-extension,
8780 only zero-extensions can safely be stripped.
8781 Any extension can be stripped if the bits it would produce
8782 are all going to be discarded later by truncating to FOR_TYPE. */
8783
8784 if (bitschange > 0)
8785 {
8786 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8787 win = op;
8788 /* TYPE_UNSIGNED says whether this is a zero-extension.
8789 Let's avoid computing it if it does not affect WIN
8790 and if UNS will not be needed again. */
8791 if ((uns
8792 || CONVERT_EXPR_P (op))
8793 && TYPE_UNSIGNED (TREE_TYPE (op)))
8794 {
8795 uns = 1;
8796 win = op;
8797 }
8798 }
8799 }
8800
8801 /* If we finally reach a constant see if it fits in sth smaller and
8802 in that case convert it. */
8803 if (TREE_CODE (win) == INTEGER_CST)
8804 {
8805 tree wtype = TREE_TYPE (win);
8806 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8807 if (for_type)
8808 prec = MAX (prec, final_prec);
8809 if (prec < TYPE_PRECISION (wtype))
8810 {
8811 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8812 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8813 win = fold_convert (t, win);
8814 }
8815 }
8816
8817 return win;
8818 }
8819 \f
8820 /* Return OP or a simpler expression for a narrower value
8821 which can be sign-extended or zero-extended to give back OP.
8822 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8823 or 0 if the value should be sign-extended. */
8824
8825 tree
8826 get_narrower (tree op, int *unsignedp_ptr)
8827 {
8828 int uns = 0;
8829 int first = 1;
8830 tree win = op;
8831 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8832
8833 while (TREE_CODE (op) == NOP_EXPR)
8834 {
8835 int bitschange
8836 = (TYPE_PRECISION (TREE_TYPE (op))
8837 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8838
8839 /* Truncations are many-one so cannot be removed. */
8840 if (bitschange < 0)
8841 break;
8842
8843 /* See what's inside this conversion. If we decide to strip it,
8844 we will set WIN. */
8845
8846 if (bitschange > 0)
8847 {
8848 op = TREE_OPERAND (op, 0);
8849 /* An extension: the outermost one can be stripped,
8850 but remember whether it is zero or sign extension. */
8851 if (first)
8852 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8853 /* Otherwise, if a sign extension has been stripped,
8854 only sign extensions can now be stripped;
8855 if a zero extension has been stripped, only zero-extensions. */
8856 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8857 break;
8858 first = 0;
8859 }
8860 else /* bitschange == 0 */
8861 {
8862 /* A change in nominal type can always be stripped, but we must
8863 preserve the unsignedness. */
8864 if (first)
8865 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8866 first = 0;
8867 op = TREE_OPERAND (op, 0);
8868 /* Keep trying to narrow, but don't assign op to win if it
8869 would turn an integral type into something else. */
8870 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8871 continue;
8872 }
8873
8874 win = op;
8875 }
8876
8877 if (TREE_CODE (op) == COMPONENT_REF
8878 /* Since type_for_size always gives an integer type. */
8879 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8880 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8881 /* Ensure field is laid out already. */
8882 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8883 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8884 {
8885 unsigned HOST_WIDE_INT innerprec
8886 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8887 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8888 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8889 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8890
8891 /* We can get this structure field in a narrower type that fits it,
8892 but the resulting extension to its nominal type (a fullword type)
8893 must satisfy the same conditions as for other extensions.
8894
8895 Do this only for fields that are aligned (not bit-fields),
8896 because when bit-field insns will be used there is no
8897 advantage in doing this. */
8898
8899 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8900 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8901 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8902 && type != 0)
8903 {
8904 if (first)
8905 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8906 win = fold_convert (type, op);
8907 }
8908 }
8909
8910 *unsignedp_ptr = uns;
8911 return win;
8912 }
8913 \f
8914 /* Return true if integer constant C has a value that is permissible
8915 for TYPE, an integral type. */
8916
8917 bool
8918 int_fits_type_p (const_tree c, const_tree type)
8919 {
8920 tree type_low_bound, type_high_bound;
8921 bool ok_for_low_bound, ok_for_high_bound;
8922 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8923
8924 /* Non-standard boolean types can have arbitrary precision but various
8925 transformations assume that they can only take values 0 and +/-1. */
8926 if (TREE_CODE (type) == BOOLEAN_TYPE)
8927 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8928
8929 retry:
8930 type_low_bound = TYPE_MIN_VALUE (type);
8931 type_high_bound = TYPE_MAX_VALUE (type);
8932
8933 /* If at least one bound of the type is a constant integer, we can check
8934 ourselves and maybe make a decision. If no such decision is possible, but
8935 this type is a subtype, try checking against that. Otherwise, use
8936 fits_to_tree_p, which checks against the precision.
8937
8938 Compute the status for each possibly constant bound, and return if we see
8939 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8940 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8941 for "constant known to fit". */
8942
8943 /* Check if c >= type_low_bound. */
8944 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8945 {
8946 if (tree_int_cst_lt (c, type_low_bound))
8947 return false;
8948 ok_for_low_bound = true;
8949 }
8950 else
8951 ok_for_low_bound = false;
8952
8953 /* Check if c <= type_high_bound. */
8954 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8955 {
8956 if (tree_int_cst_lt (type_high_bound, c))
8957 return false;
8958 ok_for_high_bound = true;
8959 }
8960 else
8961 ok_for_high_bound = false;
8962
8963 /* If the constant fits both bounds, the result is known. */
8964 if (ok_for_low_bound && ok_for_high_bound)
8965 return true;
8966
8967 /* Perform some generic filtering which may allow making a decision
8968 even if the bounds are not constant. First, negative integers
8969 never fit in unsigned types, */
8970 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8971 return false;
8972
8973 /* Second, narrower types always fit in wider ones. */
8974 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8975 return true;
8976
8977 /* Third, unsigned integers with top bit set never fit signed types. */
8978 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8979 {
8980 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8981 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8982 {
8983 /* When a tree_cst is converted to a wide-int, the precision
8984 is taken from the type. However, if the precision of the
8985 mode underneath the type is smaller than that, it is
8986 possible that the value will not fit. The test below
8987 fails if any bit is set between the sign bit of the
8988 underlying mode and the top bit of the type. */
8989 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8990 return false;
8991 }
8992 else if (wi::neg_p (wi::to_wide (c)))
8993 return false;
8994 }
8995
8996 /* If we haven't been able to decide at this point, there nothing more we
8997 can check ourselves here. Look at the base type if we have one and it
8998 has the same precision. */
8999 if (TREE_CODE (type) == INTEGER_TYPE
9000 && TREE_TYPE (type) != 0
9001 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9002 {
9003 type = TREE_TYPE (type);
9004 goto retry;
9005 }
9006
9007 /* Or to fits_to_tree_p, if nothing else. */
9008 return wi::fits_to_tree_p (wi::to_wide (c), type);
9009 }
9010
9011 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9012 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9013 represented (assuming two's-complement arithmetic) within the bit
9014 precision of the type are returned instead. */
9015
9016 void
9017 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9018 {
9019 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9020 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9021 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
9022 else
9023 {
9024 if (TYPE_UNSIGNED (type))
9025 mpz_set_ui (min, 0);
9026 else
9027 {
9028 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9029 wi::to_mpz (mn, min, SIGNED);
9030 }
9031 }
9032
9033 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9034 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9035 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
9036 else
9037 {
9038 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9039 wi::to_mpz (mn, max, TYPE_SIGN (type));
9040 }
9041 }
9042
9043 /* Return true if VAR is an automatic variable. */
9044
9045 bool
9046 auto_var_p (const_tree var)
9047 {
9048 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
9049 || TREE_CODE (var) == PARM_DECL)
9050 && ! TREE_STATIC (var))
9051 || TREE_CODE (var) == RESULT_DECL);
9052 }
9053
9054 /* Return true if VAR is an automatic variable defined in function FN. */
9055
9056 bool
9057 auto_var_in_fn_p (const_tree var, const_tree fn)
9058 {
9059 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9060 && (auto_var_p (var)
9061 || TREE_CODE (var) == LABEL_DECL));
9062 }
9063
9064 /* Subprogram of following function. Called by walk_tree.
9065
9066 Return *TP if it is an automatic variable or parameter of the
9067 function passed in as DATA. */
9068
9069 static tree
9070 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9071 {
9072 tree fn = (tree) data;
9073
9074 if (TYPE_P (*tp))
9075 *walk_subtrees = 0;
9076
9077 else if (DECL_P (*tp)
9078 && auto_var_in_fn_p (*tp, fn))
9079 return *tp;
9080
9081 return NULL_TREE;
9082 }
9083
9084 /* Returns true if T is, contains, or refers to a type with variable
9085 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9086 arguments, but not the return type. If FN is nonzero, only return
9087 true if a modifier of the type or position of FN is a variable or
9088 parameter inside FN.
9089
9090 This concept is more general than that of C99 'variably modified types':
9091 in C99, a struct type is never variably modified because a VLA may not
9092 appear as a structure member. However, in GNU C code like:
9093
9094 struct S { int i[f()]; };
9095
9096 is valid, and other languages may define similar constructs. */
9097
9098 bool
9099 variably_modified_type_p (tree type, tree fn)
9100 {
9101 tree t;
9102
9103 /* Test if T is either variable (if FN is zero) or an expression containing
9104 a variable in FN. If TYPE isn't gimplified, return true also if
9105 gimplify_one_sizepos would gimplify the expression into a local
9106 variable. */
9107 #define RETURN_TRUE_IF_VAR(T) \
9108 do { tree _t = (T); \
9109 if (_t != NULL_TREE \
9110 && _t != error_mark_node \
9111 && !CONSTANT_CLASS_P (_t) \
9112 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9113 && (!fn \
9114 || (!TYPE_SIZES_GIMPLIFIED (type) \
9115 && (TREE_CODE (_t) != VAR_DECL \
9116 && !CONTAINS_PLACEHOLDER_P (_t))) \
9117 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9118 return true; } while (0)
9119
9120 if (type == error_mark_node)
9121 return false;
9122
9123 /* If TYPE itself has variable size, it is variably modified. */
9124 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9125 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9126
9127 switch (TREE_CODE (type))
9128 {
9129 case POINTER_TYPE:
9130 case REFERENCE_TYPE:
9131 case VECTOR_TYPE:
9132 /* Ada can have pointer types refering to themselves indirectly. */
9133 if (TREE_VISITED (type))
9134 return false;
9135 TREE_VISITED (type) = true;
9136 if (variably_modified_type_p (TREE_TYPE (type), fn))
9137 {
9138 TREE_VISITED (type) = false;
9139 return true;
9140 }
9141 TREE_VISITED (type) = false;
9142 break;
9143
9144 case FUNCTION_TYPE:
9145 case METHOD_TYPE:
9146 /* If TYPE is a function type, it is variably modified if the
9147 return type is variably modified. */
9148 if (variably_modified_type_p (TREE_TYPE (type), fn))
9149 return true;
9150 break;
9151
9152 case INTEGER_TYPE:
9153 case REAL_TYPE:
9154 case FIXED_POINT_TYPE:
9155 case ENUMERAL_TYPE:
9156 case BOOLEAN_TYPE:
9157 /* Scalar types are variably modified if their end points
9158 aren't constant. */
9159 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9160 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9161 break;
9162
9163 case RECORD_TYPE:
9164 case UNION_TYPE:
9165 case QUAL_UNION_TYPE:
9166 /* We can't see if any of the fields are variably-modified by the
9167 definition we normally use, since that would produce infinite
9168 recursion via pointers. */
9169 /* This is variably modified if some field's type is. */
9170 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9171 if (TREE_CODE (t) == FIELD_DECL)
9172 {
9173 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9174 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9175 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9176
9177 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9178 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9179 }
9180 break;
9181
9182 case ARRAY_TYPE:
9183 /* Do not call ourselves to avoid infinite recursion. This is
9184 variably modified if the element type is. */
9185 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9186 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9187 break;
9188
9189 default:
9190 break;
9191 }
9192
9193 /* The current language may have other cases to check, but in general,
9194 all other types are not variably modified. */
9195 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9196
9197 #undef RETURN_TRUE_IF_VAR
9198 }
9199
9200 /* Given a DECL or TYPE, return the scope in which it was declared, or
9201 NULL_TREE if there is no containing scope. */
9202
9203 tree
9204 get_containing_scope (const_tree t)
9205 {
9206 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9207 }
9208
9209 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
9210
9211 const_tree
9212 get_ultimate_context (const_tree decl)
9213 {
9214 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
9215 {
9216 if (TREE_CODE (decl) == BLOCK)
9217 decl = BLOCK_SUPERCONTEXT (decl);
9218 else
9219 decl = get_containing_scope (decl);
9220 }
9221 return decl;
9222 }
9223
9224 /* Return the innermost context enclosing DECL that is
9225 a FUNCTION_DECL, or zero if none. */
9226
9227 tree
9228 decl_function_context (const_tree decl)
9229 {
9230 tree context;
9231
9232 if (TREE_CODE (decl) == ERROR_MARK)
9233 return 0;
9234
9235 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9236 where we look up the function at runtime. Such functions always take
9237 a first argument of type 'pointer to real context'.
9238
9239 C++ should really be fixed to use DECL_CONTEXT for the real context,
9240 and use something else for the "virtual context". */
9241 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
9242 context
9243 = TYPE_MAIN_VARIANT
9244 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9245 else
9246 context = DECL_CONTEXT (decl);
9247
9248 while (context && TREE_CODE (context) != FUNCTION_DECL)
9249 {
9250 if (TREE_CODE (context) == BLOCK)
9251 context = BLOCK_SUPERCONTEXT (context);
9252 else
9253 context = get_containing_scope (context);
9254 }
9255
9256 return context;
9257 }
9258
9259 /* Return the innermost context enclosing DECL that is
9260 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9261 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9262
9263 tree
9264 decl_type_context (const_tree decl)
9265 {
9266 tree context = DECL_CONTEXT (decl);
9267
9268 while (context)
9269 switch (TREE_CODE (context))
9270 {
9271 case NAMESPACE_DECL:
9272 case TRANSLATION_UNIT_DECL:
9273 return NULL_TREE;
9274
9275 case RECORD_TYPE:
9276 case UNION_TYPE:
9277 case QUAL_UNION_TYPE:
9278 return context;
9279
9280 case TYPE_DECL:
9281 case FUNCTION_DECL:
9282 context = DECL_CONTEXT (context);
9283 break;
9284
9285 case BLOCK:
9286 context = BLOCK_SUPERCONTEXT (context);
9287 break;
9288
9289 default:
9290 gcc_unreachable ();
9291 }
9292
9293 return NULL_TREE;
9294 }
9295
9296 /* CALL is a CALL_EXPR. Return the declaration for the function
9297 called, or NULL_TREE if the called function cannot be
9298 determined. */
9299
9300 tree
9301 get_callee_fndecl (const_tree call)
9302 {
9303 tree addr;
9304
9305 if (call == error_mark_node)
9306 return error_mark_node;
9307
9308 /* It's invalid to call this function with anything but a
9309 CALL_EXPR. */
9310 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9311
9312 /* The first operand to the CALL is the address of the function
9313 called. */
9314 addr = CALL_EXPR_FN (call);
9315
9316 /* If there is no function, return early. */
9317 if (addr == NULL_TREE)
9318 return NULL_TREE;
9319
9320 STRIP_NOPS (addr);
9321
9322 /* If this is a readonly function pointer, extract its initial value. */
9323 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9324 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9325 && DECL_INITIAL (addr))
9326 addr = DECL_INITIAL (addr);
9327
9328 /* If the address is just `&f' for some function `f', then we know
9329 that `f' is being called. */
9330 if (TREE_CODE (addr) == ADDR_EXPR
9331 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9332 return TREE_OPERAND (addr, 0);
9333
9334 /* We couldn't figure out what was being called. */
9335 return NULL_TREE;
9336 }
9337
9338 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9339 return the associated function code, otherwise return CFN_LAST. */
9340
9341 combined_fn
9342 get_call_combined_fn (const_tree call)
9343 {
9344 /* It's invalid to call this function with anything but a CALL_EXPR. */
9345 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9346
9347 if (!CALL_EXPR_FN (call))
9348 return as_combined_fn (CALL_EXPR_IFN (call));
9349
9350 tree fndecl = get_callee_fndecl (call);
9351 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9352 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9353
9354 return CFN_LAST;
9355 }
9356
9357 /* Comparator of indices based on tree_node_counts. */
9358
9359 static int
9360 tree_nodes_cmp (const void *p1, const void *p2)
9361 {
9362 const unsigned *n1 = (const unsigned *)p1;
9363 const unsigned *n2 = (const unsigned *)p2;
9364
9365 return tree_node_counts[*n1] - tree_node_counts[*n2];
9366 }
9367
9368 /* Comparator of indices based on tree_code_counts. */
9369
9370 static int
9371 tree_codes_cmp (const void *p1, const void *p2)
9372 {
9373 const unsigned *n1 = (const unsigned *)p1;
9374 const unsigned *n2 = (const unsigned *)p2;
9375
9376 return tree_code_counts[*n1] - tree_code_counts[*n2];
9377 }
9378
9379 #define TREE_MEM_USAGE_SPACES 40
9380
9381 /* Print debugging information about tree nodes generated during the compile,
9382 and any language-specific information. */
9383
9384 void
9385 dump_tree_statistics (void)
9386 {
9387 if (GATHER_STATISTICS)
9388 {
9389 uint64_t total_nodes, total_bytes;
9390 fprintf (stderr, "\nKind Nodes Bytes\n");
9391 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9392 total_nodes = total_bytes = 0;
9393
9394 {
9395 auto_vec<unsigned> indices (all_kinds);
9396 for (unsigned i = 0; i < all_kinds; i++)
9397 indices.quick_push (i);
9398 indices.qsort (tree_nodes_cmp);
9399
9400 for (unsigned i = 0; i < (int) all_kinds; i++)
9401 {
9402 unsigned j = indices[i];
9403 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
9404 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
9405 SIZE_AMOUNT (tree_node_sizes[j]));
9406 total_nodes += tree_node_counts[j];
9407 total_bytes += tree_node_sizes[j];
9408 }
9409 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9410 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
9411 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
9412 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9413 }
9414
9415 {
9416 fprintf (stderr, "Code Nodes\n");
9417 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9418
9419 auto_vec<unsigned> indices (MAX_TREE_CODES);
9420 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9421 indices.quick_push (i);
9422 indices.qsort (tree_codes_cmp);
9423
9424 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9425 {
9426 unsigned j = indices[i];
9427 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
9428 get_tree_code_name ((enum tree_code) j),
9429 SIZE_AMOUNT (tree_code_counts[j]));
9430 }
9431 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9432 fprintf (stderr, "\n");
9433 ssanames_print_statistics ();
9434 fprintf (stderr, "\n");
9435 phinodes_print_statistics ();
9436 fprintf (stderr, "\n");
9437 }
9438 }
9439 else
9440 fprintf (stderr, "(No per-node statistics)\n");
9441
9442 print_type_hash_statistics ();
9443 print_debug_expr_statistics ();
9444 print_value_expr_statistics ();
9445 lang_hooks.print_statistics ();
9446 }
9447 \f
9448 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9449
9450 /* Generate a crc32 of the low BYTES bytes of VALUE. */
9451
9452 unsigned
9453 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
9454 {
9455 /* This relies on the raw feedback's top 4 bits being zero. */
9456 #define FEEDBACK(X) ((X) * 0x04c11db7)
9457 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9458 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9459 static const unsigned syndromes[16] =
9460 {
9461 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9462 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9463 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9464 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9465 };
9466 #undef FEEDBACK
9467 #undef SYNDROME
9468
9469 value <<= (32 - bytes * 8);
9470 for (unsigned ix = bytes * 2; ix--; value <<= 4)
9471 {
9472 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
9473
9474 chksum = (chksum << 4) ^ feedback;
9475 }
9476
9477 return chksum;
9478 }
9479
9480 /* Generate a crc32 of a string. */
9481
9482 unsigned
9483 crc32_string (unsigned chksum, const char *string)
9484 {
9485 do
9486 chksum = crc32_byte (chksum, *string);
9487 while (*string++);
9488 return chksum;
9489 }
9490
9491 /* P is a string that will be used in a symbol. Mask out any characters
9492 that are not valid in that context. */
9493
9494 void
9495 clean_symbol_name (char *p)
9496 {
9497 for (; *p; p++)
9498 if (! (ISALNUM (*p)
9499 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9500 || *p == '$'
9501 #endif
9502 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9503 || *p == '.'
9504 #endif
9505 ))
9506 *p = '_';
9507 }
9508
9509 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
9510
9511 /* Create a unique anonymous identifier. The identifier is still a
9512 valid assembly label. */
9513
9514 tree
9515 make_anon_name ()
9516 {
9517 const char *fmt =
9518 #if !defined (NO_DOT_IN_LABEL)
9519 "."
9520 #elif !defined (NO_DOLLAR_IN_LABEL)
9521 "$"
9522 #else
9523 "_"
9524 #endif
9525 "_anon_%d";
9526
9527 char buf[24];
9528 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
9529 gcc_checking_assert (len < int (sizeof (buf)));
9530
9531 tree id = get_identifier_with_length (buf, len);
9532 IDENTIFIER_ANON_P (id) = true;
9533
9534 return id;
9535 }
9536
9537 /* Generate a name for a special-purpose function.
9538 The generated name may need to be unique across the whole link.
9539 Changes to this function may also require corresponding changes to
9540 xstrdup_mask_random.
9541 TYPE is some string to identify the purpose of this function to the
9542 linker or collect2; it must start with an uppercase letter,
9543 one of:
9544 I - for constructors
9545 D - for destructors
9546 N - for C++ anonymous namespaces
9547 F - for DWARF unwind frame information. */
9548
9549 tree
9550 get_file_function_name (const char *type)
9551 {
9552 char *buf;
9553 const char *p;
9554 char *q;
9555
9556 /* If we already have a name we know to be unique, just use that. */
9557 if (first_global_object_name)
9558 p = q = ASTRDUP (first_global_object_name);
9559 /* If the target is handling the constructors/destructors, they
9560 will be local to this file and the name is only necessary for
9561 debugging purposes.
9562 We also assign sub_I and sub_D sufixes to constructors called from
9563 the global static constructors. These are always local. */
9564 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9565 || (strncmp (type, "sub_", 4) == 0
9566 && (type[4] == 'I' || type[4] == 'D')))
9567 {
9568 const char *file = main_input_filename;
9569 if (! file)
9570 file = LOCATION_FILE (input_location);
9571 /* Just use the file's basename, because the full pathname
9572 might be quite long. */
9573 p = q = ASTRDUP (lbasename (file));
9574 }
9575 else
9576 {
9577 /* Otherwise, the name must be unique across the entire link.
9578 We don't have anything that we know to be unique to this translation
9579 unit, so use what we do have and throw in some randomness. */
9580 unsigned len;
9581 const char *name = weak_global_object_name;
9582 const char *file = main_input_filename;
9583
9584 if (! name)
9585 name = "";
9586 if (! file)
9587 file = LOCATION_FILE (input_location);
9588
9589 len = strlen (file);
9590 q = (char *) alloca (9 + 19 + len + 1);
9591 memcpy (q, file, len + 1);
9592
9593 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9594 crc32_string (0, name), get_random_seed (false));
9595
9596 p = q;
9597 }
9598
9599 clean_symbol_name (q);
9600 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9601 + strlen (type));
9602
9603 /* Set up the name of the file-level functions we may need.
9604 Use a global object (which is already required to be unique over
9605 the program) rather than the file name (which imposes extra
9606 constraints). */
9607 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9608
9609 return get_identifier (buf);
9610 }
9611 \f
9612 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9613
9614 /* Complain that the tree code of NODE does not match the expected 0
9615 terminated list of trailing codes. The trailing code list can be
9616 empty, for a more vague error message. FILE, LINE, and FUNCTION
9617 are of the caller. */
9618
9619 void
9620 tree_check_failed (const_tree node, const char *file,
9621 int line, const char *function, ...)
9622 {
9623 va_list args;
9624 const char *buffer;
9625 unsigned length = 0;
9626 enum tree_code code;
9627
9628 va_start (args, function);
9629 while ((code = (enum tree_code) va_arg (args, int)))
9630 length += 4 + strlen (get_tree_code_name (code));
9631 va_end (args);
9632 if (length)
9633 {
9634 char *tmp;
9635 va_start (args, function);
9636 length += strlen ("expected ");
9637 buffer = tmp = (char *) alloca (length);
9638 length = 0;
9639 while ((code = (enum tree_code) va_arg (args, int)))
9640 {
9641 const char *prefix = length ? " or " : "expected ";
9642
9643 strcpy (tmp + length, prefix);
9644 length += strlen (prefix);
9645 strcpy (tmp + length, get_tree_code_name (code));
9646 length += strlen (get_tree_code_name (code));
9647 }
9648 va_end (args);
9649 }
9650 else
9651 buffer = "unexpected node";
9652
9653 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9654 buffer, get_tree_code_name (TREE_CODE (node)),
9655 function, trim_filename (file), line);
9656 }
9657
9658 /* Complain that the tree code of NODE does match the expected 0
9659 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9660 the caller. */
9661
9662 void
9663 tree_not_check_failed (const_tree node, const char *file,
9664 int line, const char *function, ...)
9665 {
9666 va_list args;
9667 char *buffer;
9668 unsigned length = 0;
9669 enum tree_code code;
9670
9671 va_start (args, function);
9672 while ((code = (enum tree_code) va_arg (args, int)))
9673 length += 4 + strlen (get_tree_code_name (code));
9674 va_end (args);
9675 va_start (args, function);
9676 buffer = (char *) alloca (length);
9677 length = 0;
9678 while ((code = (enum tree_code) va_arg (args, int)))
9679 {
9680 if (length)
9681 {
9682 strcpy (buffer + length, " or ");
9683 length += 4;
9684 }
9685 strcpy (buffer + length, get_tree_code_name (code));
9686 length += strlen (get_tree_code_name (code));
9687 }
9688 va_end (args);
9689
9690 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9691 buffer, get_tree_code_name (TREE_CODE (node)),
9692 function, trim_filename (file), line);
9693 }
9694
9695 /* Similar to tree_check_failed, except that we check for a class of tree
9696 code, given in CL. */
9697
9698 void
9699 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9700 const char *file, int line, const char *function)
9701 {
9702 internal_error
9703 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9704 TREE_CODE_CLASS_STRING (cl),
9705 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9706 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9707 }
9708
9709 /* Similar to tree_check_failed, except that instead of specifying a
9710 dozen codes, use the knowledge that they're all sequential. */
9711
9712 void
9713 tree_range_check_failed (const_tree node, const char *file, int line,
9714 const char *function, enum tree_code c1,
9715 enum tree_code c2)
9716 {
9717 char *buffer;
9718 unsigned length = 0;
9719 unsigned int c;
9720
9721 for (c = c1; c <= c2; ++c)
9722 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9723
9724 length += strlen ("expected ");
9725 buffer = (char *) alloca (length);
9726 length = 0;
9727
9728 for (c = c1; c <= c2; ++c)
9729 {
9730 const char *prefix = length ? " or " : "expected ";
9731
9732 strcpy (buffer + length, prefix);
9733 length += strlen (prefix);
9734 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9735 length += strlen (get_tree_code_name ((enum tree_code) c));
9736 }
9737
9738 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9739 buffer, get_tree_code_name (TREE_CODE (node)),
9740 function, trim_filename (file), line);
9741 }
9742
9743
9744 /* Similar to tree_check_failed, except that we check that a tree does
9745 not have the specified code, given in CL. */
9746
9747 void
9748 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9749 const char *file, int line, const char *function)
9750 {
9751 internal_error
9752 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9753 TREE_CODE_CLASS_STRING (cl),
9754 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9755 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9756 }
9757
9758
9759 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9760
9761 void
9762 omp_clause_check_failed (const_tree node, const char *file, int line,
9763 const char *function, enum omp_clause_code code)
9764 {
9765 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9766 "in %s, at %s:%d",
9767 omp_clause_code_name[code],
9768 get_tree_code_name (TREE_CODE (node)),
9769 function, trim_filename (file), line);
9770 }
9771
9772
9773 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9774
9775 void
9776 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9777 const char *function, enum omp_clause_code c1,
9778 enum omp_clause_code c2)
9779 {
9780 char *buffer;
9781 unsigned length = 0;
9782 unsigned int c;
9783
9784 for (c = c1; c <= c2; ++c)
9785 length += 4 + strlen (omp_clause_code_name[c]);
9786
9787 length += strlen ("expected ");
9788 buffer = (char *) alloca (length);
9789 length = 0;
9790
9791 for (c = c1; c <= c2; ++c)
9792 {
9793 const char *prefix = length ? " or " : "expected ";
9794
9795 strcpy (buffer + length, prefix);
9796 length += strlen (prefix);
9797 strcpy (buffer + length, omp_clause_code_name[c]);
9798 length += strlen (omp_clause_code_name[c]);
9799 }
9800
9801 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9802 buffer, omp_clause_code_name[TREE_CODE (node)],
9803 function, trim_filename (file), line);
9804 }
9805
9806
9807 #undef DEFTREESTRUCT
9808 #define DEFTREESTRUCT(VAL, NAME) NAME,
9809
9810 static const char *ts_enum_names[] = {
9811 #include "treestruct.def"
9812 };
9813 #undef DEFTREESTRUCT
9814
9815 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9816
9817 /* Similar to tree_class_check_failed, except that we check for
9818 whether CODE contains the tree structure identified by EN. */
9819
9820 void
9821 tree_contains_struct_check_failed (const_tree node,
9822 const enum tree_node_structure_enum en,
9823 const char *file, int line,
9824 const char *function)
9825 {
9826 internal_error
9827 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9828 TS_ENUM_NAME (en),
9829 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9830 }
9831
9832
9833 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9834 (dynamically sized) vector. */
9835
9836 void
9837 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9838 const char *function)
9839 {
9840 internal_error
9841 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9842 "at %s:%d",
9843 idx + 1, len, function, trim_filename (file), line);
9844 }
9845
9846 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9847 (dynamically sized) vector. */
9848
9849 void
9850 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9851 const char *function)
9852 {
9853 internal_error
9854 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9855 idx + 1, len, function, trim_filename (file), line);
9856 }
9857
9858 /* Similar to above, except that the check is for the bounds of the operand
9859 vector of an expression node EXP. */
9860
9861 void
9862 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9863 int line, const char *function)
9864 {
9865 enum tree_code code = TREE_CODE (exp);
9866 internal_error
9867 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9868 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9869 function, trim_filename (file), line);
9870 }
9871
9872 /* Similar to above, except that the check is for the number of
9873 operands of an OMP_CLAUSE node. */
9874
9875 void
9876 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9877 int line, const char *function)
9878 {
9879 internal_error
9880 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9881 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9882 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9883 trim_filename (file), line);
9884 }
9885 #endif /* ENABLE_TREE_CHECKING */
9886 \f
9887 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9888 and mapped to the machine mode MODE. Initialize its fields and build
9889 the information necessary for debugging output. */
9890
9891 static tree
9892 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9893 {
9894 tree t;
9895 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9896
9897 t = make_node (VECTOR_TYPE);
9898 TREE_TYPE (t) = mv_innertype;
9899 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9900 SET_TYPE_MODE (t, mode);
9901
9902 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9903 SET_TYPE_STRUCTURAL_EQUALITY (t);
9904 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9905 || mode != VOIDmode)
9906 && !VECTOR_BOOLEAN_TYPE_P (t))
9907 TYPE_CANONICAL (t)
9908 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9909
9910 layout_type (t);
9911
9912 hashval_t hash = type_hash_canon_hash (t);
9913 t = type_hash_canon (hash, t);
9914
9915 /* We have built a main variant, based on the main variant of the
9916 inner type. Use it to build the variant we return. */
9917 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9918 && TREE_TYPE (t) != innertype)
9919 return build_type_attribute_qual_variant (t,
9920 TYPE_ATTRIBUTES (innertype),
9921 TYPE_QUALS (innertype));
9922
9923 return t;
9924 }
9925
9926 static tree
9927 make_or_reuse_type (unsigned size, int unsignedp)
9928 {
9929 int i;
9930
9931 if (size == INT_TYPE_SIZE)
9932 return unsignedp ? unsigned_type_node : integer_type_node;
9933 if (size == CHAR_TYPE_SIZE)
9934 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9935 if (size == SHORT_TYPE_SIZE)
9936 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9937 if (size == LONG_TYPE_SIZE)
9938 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9939 if (size == LONG_LONG_TYPE_SIZE)
9940 return (unsignedp ? long_long_unsigned_type_node
9941 : long_long_integer_type_node);
9942
9943 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9944 if (size == int_n_data[i].bitsize
9945 && int_n_enabled_p[i])
9946 return (unsignedp ? int_n_trees[i].unsigned_type
9947 : int_n_trees[i].signed_type);
9948
9949 if (unsignedp)
9950 return make_unsigned_type (size);
9951 else
9952 return make_signed_type (size);
9953 }
9954
9955 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9956
9957 static tree
9958 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9959 {
9960 if (satp)
9961 {
9962 if (size == SHORT_FRACT_TYPE_SIZE)
9963 return unsignedp ? sat_unsigned_short_fract_type_node
9964 : sat_short_fract_type_node;
9965 if (size == FRACT_TYPE_SIZE)
9966 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9967 if (size == LONG_FRACT_TYPE_SIZE)
9968 return unsignedp ? sat_unsigned_long_fract_type_node
9969 : sat_long_fract_type_node;
9970 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9971 return unsignedp ? sat_unsigned_long_long_fract_type_node
9972 : sat_long_long_fract_type_node;
9973 }
9974 else
9975 {
9976 if (size == SHORT_FRACT_TYPE_SIZE)
9977 return unsignedp ? unsigned_short_fract_type_node
9978 : short_fract_type_node;
9979 if (size == FRACT_TYPE_SIZE)
9980 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9981 if (size == LONG_FRACT_TYPE_SIZE)
9982 return unsignedp ? unsigned_long_fract_type_node
9983 : long_fract_type_node;
9984 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9985 return unsignedp ? unsigned_long_long_fract_type_node
9986 : long_long_fract_type_node;
9987 }
9988
9989 return make_fract_type (size, unsignedp, satp);
9990 }
9991
9992 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9993
9994 static tree
9995 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9996 {
9997 if (satp)
9998 {
9999 if (size == SHORT_ACCUM_TYPE_SIZE)
10000 return unsignedp ? sat_unsigned_short_accum_type_node
10001 : sat_short_accum_type_node;
10002 if (size == ACCUM_TYPE_SIZE)
10003 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10004 if (size == LONG_ACCUM_TYPE_SIZE)
10005 return unsignedp ? sat_unsigned_long_accum_type_node
10006 : sat_long_accum_type_node;
10007 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10008 return unsignedp ? sat_unsigned_long_long_accum_type_node
10009 : sat_long_long_accum_type_node;
10010 }
10011 else
10012 {
10013 if (size == SHORT_ACCUM_TYPE_SIZE)
10014 return unsignedp ? unsigned_short_accum_type_node
10015 : short_accum_type_node;
10016 if (size == ACCUM_TYPE_SIZE)
10017 return unsignedp ? unsigned_accum_type_node : accum_type_node;
10018 if (size == LONG_ACCUM_TYPE_SIZE)
10019 return unsignedp ? unsigned_long_accum_type_node
10020 : long_accum_type_node;
10021 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10022 return unsignedp ? unsigned_long_long_accum_type_node
10023 : long_long_accum_type_node;
10024 }
10025
10026 return make_accum_type (size, unsignedp, satp);
10027 }
10028
10029
10030 /* Create an atomic variant node for TYPE. This routine is called
10031 during initialization of data types to create the 5 basic atomic
10032 types. The generic build_variant_type function requires these to
10033 already be set up in order to function properly, so cannot be
10034 called from there. If ALIGN is non-zero, then ensure alignment is
10035 overridden to this value. */
10036
10037 static tree
10038 build_atomic_base (tree type, unsigned int align)
10039 {
10040 tree t;
10041
10042 /* Make sure its not already registered. */
10043 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10044 return t;
10045
10046 t = build_variant_type_copy (type);
10047 set_type_quals (t, TYPE_QUAL_ATOMIC);
10048
10049 if (align)
10050 SET_TYPE_ALIGN (t, align);
10051
10052 return t;
10053 }
10054
10055 /* Information about the _FloatN and _FloatNx types. This must be in
10056 the same order as the corresponding TI_* enum values. */
10057 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
10058 {
10059 { 16, false },
10060 { 32, false },
10061 { 64, false },
10062 { 128, false },
10063 { 32, true },
10064 { 64, true },
10065 { 128, true },
10066 };
10067
10068
10069 /* Create nodes for all integer types (and error_mark_node) using the sizes
10070 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10071
10072 void
10073 build_common_tree_nodes (bool signed_char)
10074 {
10075 int i;
10076
10077 error_mark_node = make_node (ERROR_MARK);
10078 TREE_TYPE (error_mark_node) = error_mark_node;
10079
10080 initialize_sizetypes ();
10081
10082 /* Define both `signed char' and `unsigned char'. */
10083 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10084 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10085 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10086 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10087
10088 /* Define `char', which is like either `signed char' or `unsigned char'
10089 but not the same as either. */
10090 char_type_node
10091 = (signed_char
10092 ? make_signed_type (CHAR_TYPE_SIZE)
10093 : make_unsigned_type (CHAR_TYPE_SIZE));
10094 TYPE_STRING_FLAG (char_type_node) = 1;
10095
10096 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10097 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10098 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10099 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10100 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10101 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10102 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10103 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10104
10105 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10106 {
10107 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10108 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10109
10110 if (int_n_enabled_p[i])
10111 {
10112 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10113 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10114 }
10115 }
10116
10117 /* Define a boolean type. This type only represents boolean values but
10118 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10119 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10120 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10121 TYPE_PRECISION (boolean_type_node) = 1;
10122 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10123
10124 /* Define what type to use for size_t. */
10125 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10126 size_type_node = unsigned_type_node;
10127 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10128 size_type_node = long_unsigned_type_node;
10129 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10130 size_type_node = long_long_unsigned_type_node;
10131 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10132 size_type_node = short_unsigned_type_node;
10133 else
10134 {
10135 int i;
10136
10137 size_type_node = NULL_TREE;
10138 for (i = 0; i < NUM_INT_N_ENTS; i++)
10139 if (int_n_enabled_p[i])
10140 {
10141 char name[50], altname[50];
10142 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10143 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
10144
10145 if (strcmp (name, SIZE_TYPE) == 0
10146 || strcmp (altname, SIZE_TYPE) == 0)
10147 {
10148 size_type_node = int_n_trees[i].unsigned_type;
10149 }
10150 }
10151 if (size_type_node == NULL_TREE)
10152 gcc_unreachable ();
10153 }
10154
10155 /* Define what type to use for ptrdiff_t. */
10156 if (strcmp (PTRDIFF_TYPE, "int") == 0)
10157 ptrdiff_type_node = integer_type_node;
10158 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
10159 ptrdiff_type_node = long_integer_type_node;
10160 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
10161 ptrdiff_type_node = long_long_integer_type_node;
10162 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
10163 ptrdiff_type_node = short_integer_type_node;
10164 else
10165 {
10166 ptrdiff_type_node = NULL_TREE;
10167 for (int i = 0; i < NUM_INT_N_ENTS; i++)
10168 if (int_n_enabled_p[i])
10169 {
10170 char name[50], altname[50];
10171 sprintf (name, "__int%d", int_n_data[i].bitsize);
10172 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
10173
10174 if (strcmp (name, PTRDIFF_TYPE) == 0
10175 || strcmp (altname, PTRDIFF_TYPE) == 0)
10176 ptrdiff_type_node = int_n_trees[i].signed_type;
10177 }
10178 if (ptrdiff_type_node == NULL_TREE)
10179 gcc_unreachable ();
10180 }
10181
10182 /* Fill in the rest of the sized types. Reuse existing type nodes
10183 when possible. */
10184 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10185 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10186 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10187 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10188 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10189
10190 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10191 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10192 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10193 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10194 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10195
10196 /* Don't call build_qualified type for atomics. That routine does
10197 special processing for atomics, and until they are initialized
10198 it's better not to make that call.
10199
10200 Check to see if there is a target override for atomic types. */
10201
10202 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10203 targetm.atomic_align_for_mode (QImode));
10204 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10205 targetm.atomic_align_for_mode (HImode));
10206 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10207 targetm.atomic_align_for_mode (SImode));
10208 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10209 targetm.atomic_align_for_mode (DImode));
10210 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10211 targetm.atomic_align_for_mode (TImode));
10212
10213 access_public_node = get_identifier ("public");
10214 access_protected_node = get_identifier ("protected");
10215 access_private_node = get_identifier ("private");
10216
10217 /* Define these next since types below may used them. */
10218 integer_zero_node = build_int_cst (integer_type_node, 0);
10219 integer_one_node = build_int_cst (integer_type_node, 1);
10220 integer_three_node = build_int_cst (integer_type_node, 3);
10221 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10222
10223 size_zero_node = size_int (0);
10224 size_one_node = size_int (1);
10225 bitsize_zero_node = bitsize_int (0);
10226 bitsize_one_node = bitsize_int (1);
10227 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10228
10229 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10230 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10231
10232 void_type_node = make_node (VOID_TYPE);
10233 layout_type (void_type_node);
10234
10235 /* We are not going to have real types in C with less than byte alignment,
10236 so we might as well not have any types that claim to have it. */
10237 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10238 TYPE_USER_ALIGN (void_type_node) = 0;
10239
10240 void_node = make_node (VOID_CST);
10241 TREE_TYPE (void_node) = void_type_node;
10242
10243 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10244 layout_type (TREE_TYPE (null_pointer_node));
10245
10246 ptr_type_node = build_pointer_type (void_type_node);
10247 const_ptr_type_node
10248 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10249 for (unsigned i = 0;
10250 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
10251 ++i)
10252 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
10253
10254 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10255
10256 float_type_node = make_node (REAL_TYPE);
10257 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10258 layout_type (float_type_node);
10259
10260 double_type_node = make_node (REAL_TYPE);
10261 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10262 layout_type (double_type_node);
10263
10264 long_double_type_node = make_node (REAL_TYPE);
10265 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10266 layout_type (long_double_type_node);
10267
10268 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10269 {
10270 int n = floatn_nx_types[i].n;
10271 bool extended = floatn_nx_types[i].extended;
10272 scalar_float_mode mode;
10273 if (!targetm.floatn_mode (n, extended).exists (&mode))
10274 continue;
10275 int precision = GET_MODE_PRECISION (mode);
10276 /* Work around the rs6000 KFmode having precision 113 not
10277 128. */
10278 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
10279 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
10280 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
10281 if (!extended)
10282 gcc_assert (min_precision == n);
10283 if (precision < min_precision)
10284 precision = min_precision;
10285 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
10286 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
10287 layout_type (FLOATN_NX_TYPE_NODE (i));
10288 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
10289 }
10290
10291 float_ptr_type_node = build_pointer_type (float_type_node);
10292 double_ptr_type_node = build_pointer_type (double_type_node);
10293 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10294 integer_ptr_type_node = build_pointer_type (integer_type_node);
10295
10296 /* Fixed size integer types. */
10297 uint16_type_node = make_or_reuse_type (16, 1);
10298 uint32_type_node = make_or_reuse_type (32, 1);
10299 uint64_type_node = make_or_reuse_type (64, 1);
10300
10301 /* Decimal float types. */
10302 dfloat32_type_node = make_node (REAL_TYPE);
10303 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10304 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10305 layout_type (dfloat32_type_node);
10306 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10307
10308 dfloat64_type_node = make_node (REAL_TYPE);
10309 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10310 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10311 layout_type (dfloat64_type_node);
10312 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10313
10314 dfloat128_type_node = make_node (REAL_TYPE);
10315 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10316 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10317 layout_type (dfloat128_type_node);
10318 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10319
10320 complex_integer_type_node = build_complex_type (integer_type_node, true);
10321 complex_float_type_node = build_complex_type (float_type_node, true);
10322 complex_double_type_node = build_complex_type (double_type_node, true);
10323 complex_long_double_type_node = build_complex_type (long_double_type_node,
10324 true);
10325
10326 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10327 {
10328 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
10329 COMPLEX_FLOATN_NX_TYPE_NODE (i)
10330 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
10331 }
10332
10333 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10334 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10335 sat_ ## KIND ## _type_node = \
10336 make_sat_signed_ ## KIND ## _type (SIZE); \
10337 sat_unsigned_ ## KIND ## _type_node = \
10338 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10339 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10340 unsigned_ ## KIND ## _type_node = \
10341 make_unsigned_ ## KIND ## _type (SIZE);
10342
10343 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10344 sat_ ## WIDTH ## KIND ## _type_node = \
10345 make_sat_signed_ ## KIND ## _type (SIZE); \
10346 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10347 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10348 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10349 unsigned_ ## WIDTH ## KIND ## _type_node = \
10350 make_unsigned_ ## KIND ## _type (SIZE);
10351
10352 /* Make fixed-point type nodes based on four different widths. */
10353 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10354 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10355 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10356 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10357 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10358
10359 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10360 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10361 NAME ## _type_node = \
10362 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10363 u ## NAME ## _type_node = \
10364 make_or_reuse_unsigned_ ## KIND ## _type \
10365 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10366 sat_ ## NAME ## _type_node = \
10367 make_or_reuse_sat_signed_ ## KIND ## _type \
10368 (GET_MODE_BITSIZE (MODE ## mode)); \
10369 sat_u ## NAME ## _type_node = \
10370 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10371 (GET_MODE_BITSIZE (U ## MODE ## mode));
10372
10373 /* Fixed-point type and mode nodes. */
10374 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10375 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10376 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10377 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10378 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10379 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10380 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10381 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10382 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10383 MAKE_FIXED_MODE_NODE (accum, da, DA)
10384 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10385
10386 {
10387 tree t = targetm.build_builtin_va_list ();
10388
10389 /* Many back-ends define record types without setting TYPE_NAME.
10390 If we copied the record type here, we'd keep the original
10391 record type without a name. This breaks name mangling. So,
10392 don't copy record types and let c_common_nodes_and_builtins()
10393 declare the type to be __builtin_va_list. */
10394 if (TREE_CODE (t) != RECORD_TYPE)
10395 t = build_variant_type_copy (t);
10396
10397 va_list_type_node = t;
10398 }
10399
10400 /* SCEV analyzer global shared trees. */
10401 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
10402 TREE_TYPE (chrec_dont_know) = void_type_node;
10403 chrec_known = make_node (SCEV_KNOWN);
10404 TREE_TYPE (chrec_known) = void_type_node;
10405 }
10406
10407 /* Modify DECL for given flags.
10408 TM_PURE attribute is set only on types, so the function will modify
10409 DECL's type when ECF_TM_PURE is used. */
10410
10411 void
10412 set_call_expr_flags (tree decl, int flags)
10413 {
10414 if (flags & ECF_NOTHROW)
10415 TREE_NOTHROW (decl) = 1;
10416 if (flags & ECF_CONST)
10417 TREE_READONLY (decl) = 1;
10418 if (flags & ECF_PURE)
10419 DECL_PURE_P (decl) = 1;
10420 if (flags & ECF_LOOPING_CONST_OR_PURE)
10421 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10422 if (flags & ECF_NOVOPS)
10423 DECL_IS_NOVOPS (decl) = 1;
10424 if (flags & ECF_NORETURN)
10425 TREE_THIS_VOLATILE (decl) = 1;
10426 if (flags & ECF_MALLOC)
10427 DECL_IS_MALLOC (decl) = 1;
10428 if (flags & ECF_RETURNS_TWICE)
10429 DECL_IS_RETURNS_TWICE (decl) = 1;
10430 if (flags & ECF_LEAF)
10431 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10432 NULL, DECL_ATTRIBUTES (decl));
10433 if (flags & ECF_COLD)
10434 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
10435 NULL, DECL_ATTRIBUTES (decl));
10436 if (flags & ECF_RET1)
10437 DECL_ATTRIBUTES (decl)
10438 = tree_cons (get_identifier ("fn spec"),
10439 build_tree_list (NULL_TREE, build_string (1, "1")),
10440 DECL_ATTRIBUTES (decl));
10441 if ((flags & ECF_TM_PURE) && flag_tm)
10442 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10443 /* Looping const or pure is implied by noreturn.
10444 There is currently no way to declare looping const or looping pure alone. */
10445 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10446 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10447 }
10448
10449
10450 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10451
10452 static void
10453 local_define_builtin (const char *name, tree type, enum built_in_function code,
10454 const char *library_name, int ecf_flags)
10455 {
10456 tree decl;
10457
10458 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10459 library_name, NULL_TREE);
10460 set_call_expr_flags (decl, ecf_flags);
10461
10462 set_builtin_decl (code, decl, true);
10463 }
10464
10465 /* Call this function after instantiating all builtins that the language
10466 front end cares about. This will build the rest of the builtins
10467 and internal functions that are relied upon by the tree optimizers and
10468 the middle-end. */
10469
10470 void
10471 build_common_builtin_nodes (void)
10472 {
10473 tree tmp, ftype;
10474 int ecf_flags;
10475
10476 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10477 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10478 {
10479 ftype = build_function_type (void_type_node, void_list_node);
10480 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10481 local_define_builtin ("__builtin_unreachable", ftype,
10482 BUILT_IN_UNREACHABLE,
10483 "__builtin_unreachable",
10484 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10485 | ECF_CONST | ECF_COLD);
10486 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10487 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10488 "abort",
10489 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
10490 }
10491
10492 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10493 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10494 {
10495 ftype = build_function_type_list (ptr_type_node,
10496 ptr_type_node, const_ptr_type_node,
10497 size_type_node, NULL_TREE);
10498
10499 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10500 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10501 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10502 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10503 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10504 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10505 }
10506
10507 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10508 {
10509 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10510 const_ptr_type_node, size_type_node,
10511 NULL_TREE);
10512 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10513 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10514 }
10515
10516 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10517 {
10518 ftype = build_function_type_list (ptr_type_node,
10519 ptr_type_node, integer_type_node,
10520 size_type_node, NULL_TREE);
10521 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10522 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10523 }
10524
10525 /* If we're checking the stack, `alloca' can throw. */
10526 const int alloca_flags
10527 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10528
10529 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10530 {
10531 ftype = build_function_type_list (ptr_type_node,
10532 size_type_node, NULL_TREE);
10533 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10534 "alloca", alloca_flags);
10535 }
10536
10537 ftype = build_function_type_list (ptr_type_node, size_type_node,
10538 size_type_node, NULL_TREE);
10539 local_define_builtin ("__builtin_alloca_with_align", ftype,
10540 BUILT_IN_ALLOCA_WITH_ALIGN,
10541 "__builtin_alloca_with_align",
10542 alloca_flags);
10543
10544 ftype = build_function_type_list (ptr_type_node, size_type_node,
10545 size_type_node, size_type_node, NULL_TREE);
10546 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
10547 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
10548 "__builtin_alloca_with_align_and_max",
10549 alloca_flags);
10550
10551 ftype = build_function_type_list (void_type_node,
10552 ptr_type_node, ptr_type_node,
10553 ptr_type_node, NULL_TREE);
10554 local_define_builtin ("__builtin_init_trampoline", ftype,
10555 BUILT_IN_INIT_TRAMPOLINE,
10556 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10557 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10558 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10559 "__builtin_init_heap_trampoline",
10560 ECF_NOTHROW | ECF_LEAF);
10561 local_define_builtin ("__builtin_init_descriptor", ftype,
10562 BUILT_IN_INIT_DESCRIPTOR,
10563 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10564
10565 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10566 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10567 BUILT_IN_ADJUST_TRAMPOLINE,
10568 "__builtin_adjust_trampoline",
10569 ECF_CONST | ECF_NOTHROW);
10570 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10571 BUILT_IN_ADJUST_DESCRIPTOR,
10572 "__builtin_adjust_descriptor",
10573 ECF_CONST | ECF_NOTHROW);
10574
10575 ftype = build_function_type_list (void_type_node,
10576 ptr_type_node, ptr_type_node, NULL_TREE);
10577 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10578 BUILT_IN_NONLOCAL_GOTO,
10579 "__builtin_nonlocal_goto",
10580 ECF_NORETURN | ECF_NOTHROW);
10581
10582 ftype = build_function_type_list (void_type_node,
10583 ptr_type_node, ptr_type_node, NULL_TREE);
10584 local_define_builtin ("__builtin_setjmp_setup", ftype,
10585 BUILT_IN_SETJMP_SETUP,
10586 "__builtin_setjmp_setup", ECF_NOTHROW);
10587
10588 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10589 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10590 BUILT_IN_SETJMP_RECEIVER,
10591 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10592
10593 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10594 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10595 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10596
10597 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10598 local_define_builtin ("__builtin_stack_restore", ftype,
10599 BUILT_IN_STACK_RESTORE,
10600 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10601
10602 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10603 const_ptr_type_node, size_type_node,
10604 NULL_TREE);
10605 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10606 "__builtin_memcmp_eq",
10607 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10608
10609 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
10610 "__builtin_strncmp_eq",
10611 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10612
10613 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
10614 "__builtin_strcmp_eq",
10615 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10616
10617 /* If there's a possibility that we might use the ARM EABI, build the
10618 alternate __cxa_end_cleanup node used to resume from C++. */
10619 if (targetm.arm_eabi_unwinder)
10620 {
10621 ftype = build_function_type_list (void_type_node, NULL_TREE);
10622 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10623 BUILT_IN_CXA_END_CLEANUP,
10624 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10625 }
10626
10627 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10628 local_define_builtin ("__builtin_unwind_resume", ftype,
10629 BUILT_IN_UNWIND_RESUME,
10630 ((targetm_common.except_unwind_info (&global_options)
10631 == UI_SJLJ)
10632 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10633 ECF_NORETURN);
10634
10635 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10636 {
10637 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10638 NULL_TREE);
10639 local_define_builtin ("__builtin_return_address", ftype,
10640 BUILT_IN_RETURN_ADDRESS,
10641 "__builtin_return_address",
10642 ECF_NOTHROW);
10643 }
10644
10645 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10646 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10647 {
10648 ftype = build_function_type_list (void_type_node, ptr_type_node,
10649 ptr_type_node, NULL_TREE);
10650 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10651 local_define_builtin ("__cyg_profile_func_enter", ftype,
10652 BUILT_IN_PROFILE_FUNC_ENTER,
10653 "__cyg_profile_func_enter", 0);
10654 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10655 local_define_builtin ("__cyg_profile_func_exit", ftype,
10656 BUILT_IN_PROFILE_FUNC_EXIT,
10657 "__cyg_profile_func_exit", 0);
10658 }
10659
10660 /* The exception object and filter values from the runtime. The argument
10661 must be zero before exception lowering, i.e. from the front end. After
10662 exception lowering, it will be the region number for the exception
10663 landing pad. These functions are PURE instead of CONST to prevent
10664 them from being hoisted past the exception edge that will initialize
10665 its value in the landing pad. */
10666 ftype = build_function_type_list (ptr_type_node,
10667 integer_type_node, NULL_TREE);
10668 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10669 /* Only use TM_PURE if we have TM language support. */
10670 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10671 ecf_flags |= ECF_TM_PURE;
10672 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10673 "__builtin_eh_pointer", ecf_flags);
10674
10675 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10676 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10677 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10678 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10679
10680 ftype = build_function_type_list (void_type_node,
10681 integer_type_node, integer_type_node,
10682 NULL_TREE);
10683 local_define_builtin ("__builtin_eh_copy_values", ftype,
10684 BUILT_IN_EH_COPY_VALUES,
10685 "__builtin_eh_copy_values", ECF_NOTHROW);
10686
10687 /* Complex multiplication and division. These are handled as builtins
10688 rather than optabs because emit_library_call_value doesn't support
10689 complex. Further, we can do slightly better with folding these
10690 beasties if the real and complex parts of the arguments are separate. */
10691 {
10692 int mode;
10693
10694 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10695 {
10696 char mode_name_buf[4], *q;
10697 const char *p;
10698 enum built_in_function mcode, dcode;
10699 tree type, inner_type;
10700 const char *prefix = "__";
10701
10702 if (targetm.libfunc_gnu_prefix)
10703 prefix = "__gnu_";
10704
10705 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10706 if (type == NULL)
10707 continue;
10708 inner_type = TREE_TYPE (type);
10709
10710 ftype = build_function_type_list (type, inner_type, inner_type,
10711 inner_type, inner_type, NULL_TREE);
10712
10713 mcode = ((enum built_in_function)
10714 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10715 dcode = ((enum built_in_function)
10716 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10717
10718 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10719 *q = TOLOWER (*p);
10720 *q = '\0';
10721
10722 /* For -ftrapping-math these should throw from a former
10723 -fnon-call-exception stmt. */
10724 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10725 NULL);
10726 local_define_builtin (built_in_names[mcode], ftype, mcode,
10727 built_in_names[mcode],
10728 ECF_CONST | ECF_LEAF);
10729
10730 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10731 NULL);
10732 local_define_builtin (built_in_names[dcode], ftype, dcode,
10733 built_in_names[dcode],
10734 ECF_CONST | ECF_LEAF);
10735 }
10736 }
10737
10738 init_internal_fns ();
10739 }
10740
10741 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10742 better way.
10743
10744 If we requested a pointer to a vector, build up the pointers that
10745 we stripped off while looking for the inner type. Similarly for
10746 return values from functions.
10747
10748 The argument TYPE is the top of the chain, and BOTTOM is the
10749 new type which we will point to. */
10750
10751 tree
10752 reconstruct_complex_type (tree type, tree bottom)
10753 {
10754 tree inner, outer;
10755
10756 if (TREE_CODE (type) == POINTER_TYPE)
10757 {
10758 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10759 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10760 TYPE_REF_CAN_ALIAS_ALL (type));
10761 }
10762 else if (TREE_CODE (type) == REFERENCE_TYPE)
10763 {
10764 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10765 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10766 TYPE_REF_CAN_ALIAS_ALL (type));
10767 }
10768 else if (TREE_CODE (type) == ARRAY_TYPE)
10769 {
10770 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10771 outer = build_array_type (inner, TYPE_DOMAIN (type));
10772 }
10773 else if (TREE_CODE (type) == FUNCTION_TYPE)
10774 {
10775 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10776 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10777 }
10778 else if (TREE_CODE (type) == METHOD_TYPE)
10779 {
10780 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10781 /* The build_method_type_directly() routine prepends 'this' to argument list,
10782 so we must compensate by getting rid of it. */
10783 outer
10784 = build_method_type_directly
10785 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10786 inner,
10787 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10788 }
10789 else if (TREE_CODE (type) == OFFSET_TYPE)
10790 {
10791 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10792 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10793 }
10794 else
10795 return bottom;
10796
10797 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10798 TYPE_QUALS (type));
10799 }
10800
10801 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10802 the inner type. */
10803 tree
10804 build_vector_type_for_mode (tree innertype, machine_mode mode)
10805 {
10806 poly_int64 nunits;
10807 unsigned int bitsize;
10808
10809 switch (GET_MODE_CLASS (mode))
10810 {
10811 case MODE_VECTOR_BOOL:
10812 case MODE_VECTOR_INT:
10813 case MODE_VECTOR_FLOAT:
10814 case MODE_VECTOR_FRACT:
10815 case MODE_VECTOR_UFRACT:
10816 case MODE_VECTOR_ACCUM:
10817 case MODE_VECTOR_UACCUM:
10818 nunits = GET_MODE_NUNITS (mode);
10819 break;
10820
10821 case MODE_INT:
10822 /* Check that there are no leftover bits. */
10823 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10824 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10825 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10826 break;
10827
10828 default:
10829 gcc_unreachable ();
10830 }
10831
10832 return make_vector_type (innertype, nunits, mode);
10833 }
10834
10835 /* Similarly, but takes the inner type and number of units, which must be
10836 a power of two. */
10837
10838 tree
10839 build_vector_type (tree innertype, poly_int64 nunits)
10840 {
10841 return make_vector_type (innertype, nunits, VOIDmode);
10842 }
10843
10844 /* Build truth vector with specified length and number of units. */
10845
10846 tree
10847 build_truth_vector_type (poly_uint64 nunits, poly_uint64 vector_size)
10848 {
10849 machine_mode mask_mode
10850 = targetm.vectorize.get_mask_mode (nunits, vector_size).else_blk ();
10851
10852 poly_uint64 vsize;
10853 if (mask_mode == BLKmode)
10854 vsize = vector_size * BITS_PER_UNIT;
10855 else
10856 vsize = GET_MODE_BITSIZE (mask_mode);
10857
10858 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10859
10860 tree bool_type = build_nonstandard_boolean_type (esize);
10861
10862 return make_vector_type (bool_type, nunits, mask_mode);
10863 }
10864
10865 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10866
10867 tree
10868 build_same_sized_truth_vector_type (tree vectype)
10869 {
10870 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10871 return vectype;
10872
10873 poly_uint64 size = GET_MODE_SIZE (TYPE_MODE (vectype));
10874
10875 if (known_eq (size, 0U))
10876 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10877
10878 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10879 }
10880
10881 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10882
10883 tree
10884 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10885 {
10886 tree t = make_vector_type (innertype, nunits, VOIDmode);
10887 tree cand;
10888 /* We always build the non-opaque variant before the opaque one,
10889 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10890 cand = TYPE_NEXT_VARIANT (t);
10891 if (cand
10892 && TYPE_VECTOR_OPAQUE (cand)
10893 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10894 return cand;
10895 /* Othewise build a variant type and make sure to queue it after
10896 the non-opaque type. */
10897 cand = build_distinct_type_copy (t);
10898 TYPE_VECTOR_OPAQUE (cand) = true;
10899 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10900 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10901 TYPE_NEXT_VARIANT (t) = cand;
10902 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10903 return cand;
10904 }
10905
10906 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10907
10908 wide_int
10909 vector_cst_int_elt (const_tree t, unsigned int i)
10910 {
10911 /* First handle elements that are directly encoded. */
10912 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10913 if (i < encoded_nelts)
10914 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, i));
10915
10916 /* Identify the pattern that contains element I and work out the index of
10917 the last encoded element for that pattern. */
10918 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10919 unsigned int pattern = i % npatterns;
10920 unsigned int count = i / npatterns;
10921 unsigned int final_i = encoded_nelts - npatterns + pattern;
10922
10923 /* If there are no steps, the final encoded value is the right one. */
10924 if (!VECTOR_CST_STEPPED_P (t))
10925 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10926
10927 /* Otherwise work out the value from the last two encoded elements. */
10928 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10929 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10930 wide_int diff = wi::to_wide (v2) - wi::to_wide (v1);
10931 return wi::to_wide (v2) + (count - 2) * diff;
10932 }
10933
10934 /* Return the value of element I of VECTOR_CST T. */
10935
10936 tree
10937 vector_cst_elt (const_tree t, unsigned int i)
10938 {
10939 /* First handle elements that are directly encoded. */
10940 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10941 if (i < encoded_nelts)
10942 return VECTOR_CST_ENCODED_ELT (t, i);
10943
10944 /* If there are no steps, the final encoded value is the right one. */
10945 if (!VECTOR_CST_STEPPED_P (t))
10946 {
10947 /* Identify the pattern that contains element I and work out the index of
10948 the last encoded element for that pattern. */
10949 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10950 unsigned int pattern = i % npatterns;
10951 unsigned int final_i = encoded_nelts - npatterns + pattern;
10952 return VECTOR_CST_ENCODED_ELT (t, final_i);
10953 }
10954
10955 /* Otherwise work out the value from the last two encoded elements. */
10956 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10957 vector_cst_int_elt (t, i));
10958 }
10959
10960 /* Given an initializer INIT, return TRUE if INIT is zero or some
10961 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10962 null, set *NONZERO if and only if INIT is known not to be all
10963 zeros. The combination of return value of false and *NONZERO
10964 false implies that INIT may but need not be all zeros. Other
10965 combinations indicate definitive answers. */
10966
10967 bool
10968 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10969 {
10970 bool dummy;
10971 if (!nonzero)
10972 nonzero = &dummy;
10973
10974 /* Conservatively clear NONZERO and set it only if INIT is definitely
10975 not all zero. */
10976 *nonzero = false;
10977
10978 STRIP_NOPS (init);
10979
10980 unsigned HOST_WIDE_INT off = 0;
10981
10982 switch (TREE_CODE (init))
10983 {
10984 case INTEGER_CST:
10985 if (integer_zerop (init))
10986 return true;
10987
10988 *nonzero = true;
10989 return false;
10990
10991 case REAL_CST:
10992 /* ??? Note that this is not correct for C4X float formats. There,
10993 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10994 negative exponent. */
10995 if (real_zerop (init)
10996 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10997 return true;
10998
10999 *nonzero = true;
11000 return false;
11001
11002 case FIXED_CST:
11003 if (fixed_zerop (init))
11004 return true;
11005
11006 *nonzero = true;
11007 return false;
11008
11009 case COMPLEX_CST:
11010 if (integer_zerop (init)
11011 || (real_zerop (init)
11012 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
11013 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
11014 return true;
11015
11016 *nonzero = true;
11017 return false;
11018
11019 case VECTOR_CST:
11020 if (VECTOR_CST_NPATTERNS (init) == 1
11021 && VECTOR_CST_DUPLICATE_P (init)
11022 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
11023 return true;
11024
11025 *nonzero = true;
11026 return false;
11027
11028 case CONSTRUCTOR:
11029 {
11030 if (TREE_CLOBBER_P (init))
11031 return false;
11032
11033 unsigned HOST_WIDE_INT idx;
11034 tree elt;
11035
11036 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
11037 if (!initializer_zerop (elt, nonzero))
11038 return false;
11039
11040 return true;
11041 }
11042
11043 case MEM_REF:
11044 {
11045 tree arg = TREE_OPERAND (init, 0);
11046 if (TREE_CODE (arg) != ADDR_EXPR)
11047 return false;
11048 tree offset = TREE_OPERAND (init, 1);
11049 if (TREE_CODE (offset) != INTEGER_CST
11050 || !tree_fits_uhwi_p (offset))
11051 return false;
11052 off = tree_to_uhwi (offset);
11053 if (INT_MAX < off)
11054 return false;
11055 arg = TREE_OPERAND (arg, 0);
11056 if (TREE_CODE (arg) != STRING_CST)
11057 return false;
11058 init = arg;
11059 }
11060 /* Fall through. */
11061
11062 case STRING_CST:
11063 {
11064 gcc_assert (off <= INT_MAX);
11065
11066 int i = off;
11067 int n = TREE_STRING_LENGTH (init);
11068 if (n <= i)
11069 return false;
11070
11071 /* We need to loop through all elements to handle cases like
11072 "\0" and "\0foobar". */
11073 for (i = 0; i < n; ++i)
11074 if (TREE_STRING_POINTER (init)[i] != '\0')
11075 {
11076 *nonzero = true;
11077 return false;
11078 }
11079
11080 return true;
11081 }
11082
11083 default:
11084 return false;
11085 }
11086 }
11087
11088 /* Return true if EXPR is an initializer expression in which every element
11089 is a constant that is numerically equal to 0 or 1. The elements do not
11090 need to be equal to each other. */
11091
11092 bool
11093 initializer_each_zero_or_onep (const_tree expr)
11094 {
11095 STRIP_ANY_LOCATION_WRAPPER (expr);
11096
11097 switch (TREE_CODE (expr))
11098 {
11099 case INTEGER_CST:
11100 return integer_zerop (expr) || integer_onep (expr);
11101
11102 case REAL_CST:
11103 return real_zerop (expr) || real_onep (expr);
11104
11105 case VECTOR_CST:
11106 {
11107 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
11108 if (VECTOR_CST_STEPPED_P (expr)
11109 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
11110 return false;
11111
11112 for (unsigned int i = 0; i < nelts; ++i)
11113 {
11114 tree elt = vector_cst_elt (expr, i);
11115 if (!initializer_each_zero_or_onep (elt))
11116 return false;
11117 }
11118
11119 return true;
11120 }
11121
11122 default:
11123 return false;
11124 }
11125 }
11126
11127 /* Check if vector VEC consists of all the equal elements and
11128 that the number of elements corresponds to the type of VEC.
11129 The function returns first element of the vector
11130 or NULL_TREE if the vector is not uniform. */
11131 tree
11132 uniform_vector_p (const_tree vec)
11133 {
11134 tree first, t;
11135 unsigned HOST_WIDE_INT i, nelts;
11136
11137 if (vec == NULL_TREE)
11138 return NULL_TREE;
11139
11140 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
11141
11142 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
11143 return TREE_OPERAND (vec, 0);
11144
11145 else if (TREE_CODE (vec) == VECTOR_CST)
11146 {
11147 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
11148 return VECTOR_CST_ENCODED_ELT (vec, 0);
11149 return NULL_TREE;
11150 }
11151
11152 else if (TREE_CODE (vec) == CONSTRUCTOR
11153 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
11154 {
11155 first = error_mark_node;
11156
11157 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
11158 {
11159 if (i == 0)
11160 {
11161 first = t;
11162 continue;
11163 }
11164 if (!operand_equal_p (first, t, 0))
11165 return NULL_TREE;
11166 }
11167 if (i != nelts)
11168 return NULL_TREE;
11169
11170 return first;
11171 }
11172
11173 return NULL_TREE;
11174 }
11175
11176 /* If the argument is INTEGER_CST, return it. If the argument is vector
11177 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
11178 return NULL_TREE.
11179 Look through location wrappers. */
11180
11181 tree
11182 uniform_integer_cst_p (tree t)
11183 {
11184 STRIP_ANY_LOCATION_WRAPPER (t);
11185
11186 if (TREE_CODE (t) == INTEGER_CST)
11187 return t;
11188
11189 if (VECTOR_TYPE_P (TREE_TYPE (t)))
11190 {
11191 t = uniform_vector_p (t);
11192 if (t && TREE_CODE (t) == INTEGER_CST)
11193 return t;
11194 }
11195
11196 return NULL_TREE;
11197 }
11198
11199 /* If VECTOR_CST T has a single nonzero element, return the index of that
11200 element, otherwise return -1. */
11201
11202 int
11203 single_nonzero_element (const_tree t)
11204 {
11205 unsigned HOST_WIDE_INT nelts;
11206 unsigned int repeat_nelts;
11207 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
11208 repeat_nelts = nelts;
11209 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
11210 {
11211 nelts = vector_cst_encoded_nelts (t);
11212 repeat_nelts = VECTOR_CST_NPATTERNS (t);
11213 }
11214 else
11215 return -1;
11216
11217 int res = -1;
11218 for (unsigned int i = 0; i < nelts; ++i)
11219 {
11220 tree elt = vector_cst_elt (t, i);
11221 if (!integer_zerop (elt) && !real_zerop (elt))
11222 {
11223 if (res >= 0 || i >= repeat_nelts)
11224 return -1;
11225 res = i;
11226 }
11227 }
11228 return res;
11229 }
11230
11231 /* Build an empty statement at location LOC. */
11232
11233 tree
11234 build_empty_stmt (location_t loc)
11235 {
11236 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11237 SET_EXPR_LOCATION (t, loc);
11238 return t;
11239 }
11240
11241
11242 /* Build an OpenMP clause with code CODE. LOC is the location of the
11243 clause. */
11244
11245 tree
11246 build_omp_clause (location_t loc, enum omp_clause_code code)
11247 {
11248 tree t;
11249 int size, length;
11250
11251 length = omp_clause_num_ops[code];
11252 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11253
11254 record_node_allocation_statistics (OMP_CLAUSE, size);
11255
11256 t = (tree) ggc_internal_alloc (size);
11257 memset (t, 0, size);
11258 TREE_SET_CODE (t, OMP_CLAUSE);
11259 OMP_CLAUSE_SET_CODE (t, code);
11260 OMP_CLAUSE_LOCATION (t) = loc;
11261
11262 return t;
11263 }
11264
11265 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
11266 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11267 Except for the CODE and operand count field, other storage for the
11268 object is initialized to zeros. */
11269
11270 tree
11271 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
11272 {
11273 tree t;
11274 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11275
11276 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11277 gcc_assert (len >= 1);
11278
11279 record_node_allocation_statistics (code, length);
11280
11281 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11282
11283 TREE_SET_CODE (t, code);
11284
11285 /* Can't use TREE_OPERAND to store the length because if checking is
11286 enabled, it will try to check the length before we store it. :-P */
11287 t->exp.operands[0] = build_int_cst (sizetype, len);
11288
11289 return t;
11290 }
11291
11292 /* Helper function for build_call_* functions; build a CALL_EXPR with
11293 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11294 the argument slots. */
11295
11296 static tree
11297 build_call_1 (tree return_type, tree fn, int nargs)
11298 {
11299 tree t;
11300
11301 t = build_vl_exp (CALL_EXPR, nargs + 3);
11302 TREE_TYPE (t) = return_type;
11303 CALL_EXPR_FN (t) = fn;
11304 CALL_EXPR_STATIC_CHAIN (t) = NULL;
11305
11306 return t;
11307 }
11308
11309 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11310 FN and a null static chain slot. NARGS is the number of call arguments
11311 which are specified as "..." arguments. */
11312
11313 tree
11314 build_call_nary (tree return_type, tree fn, int nargs, ...)
11315 {
11316 tree ret;
11317 va_list args;
11318 va_start (args, nargs);
11319 ret = build_call_valist (return_type, fn, nargs, args);
11320 va_end (args);
11321 return ret;
11322 }
11323
11324 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11325 FN and a null static chain slot. NARGS is the number of call arguments
11326 which are specified as a va_list ARGS. */
11327
11328 tree
11329 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11330 {
11331 tree t;
11332 int i;
11333
11334 t = build_call_1 (return_type, fn, nargs);
11335 for (i = 0; i < nargs; i++)
11336 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11337 process_call_operands (t);
11338 return t;
11339 }
11340
11341 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11342 FN and a null static chain slot. NARGS is the number of call arguments
11343 which are specified as a tree array ARGS. */
11344
11345 tree
11346 build_call_array_loc (location_t loc, tree return_type, tree fn,
11347 int nargs, const tree *args)
11348 {
11349 tree t;
11350 int i;
11351
11352 t = build_call_1 (return_type, fn, nargs);
11353 for (i = 0; i < nargs; i++)
11354 CALL_EXPR_ARG (t, i) = args[i];
11355 process_call_operands (t);
11356 SET_EXPR_LOCATION (t, loc);
11357 return t;
11358 }
11359
11360 /* Like build_call_array, but takes a vec. */
11361
11362 tree
11363 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11364 {
11365 tree ret, t;
11366 unsigned int ix;
11367
11368 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11369 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11370 CALL_EXPR_ARG (ret, ix) = t;
11371 process_call_operands (ret);
11372 return ret;
11373 }
11374
11375 /* Conveniently construct a function call expression. FNDECL names the
11376 function to be called and N arguments are passed in the array
11377 ARGARRAY. */
11378
11379 tree
11380 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11381 {
11382 tree fntype = TREE_TYPE (fndecl);
11383 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11384
11385 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11386 }
11387
11388 /* Conveniently construct a function call expression. FNDECL names the
11389 function to be called and the arguments are passed in the vector
11390 VEC. */
11391
11392 tree
11393 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11394 {
11395 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11396 vec_safe_address (vec));
11397 }
11398
11399
11400 /* Conveniently construct a function call expression. FNDECL names the
11401 function to be called, N is the number of arguments, and the "..."
11402 parameters are the argument expressions. */
11403
11404 tree
11405 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11406 {
11407 va_list ap;
11408 tree *argarray = XALLOCAVEC (tree, n);
11409 int i;
11410
11411 va_start (ap, n);
11412 for (i = 0; i < n; i++)
11413 argarray[i] = va_arg (ap, tree);
11414 va_end (ap);
11415 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11416 }
11417
11418 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11419 varargs macros aren't supported by all bootstrap compilers. */
11420
11421 tree
11422 build_call_expr (tree fndecl, int n, ...)
11423 {
11424 va_list ap;
11425 tree *argarray = XALLOCAVEC (tree, n);
11426 int i;
11427
11428 va_start (ap, n);
11429 for (i = 0; i < n; i++)
11430 argarray[i] = va_arg (ap, tree);
11431 va_end (ap);
11432 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11433 }
11434
11435 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11436 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11437 It will get gimplified later into an ordinary internal function. */
11438
11439 tree
11440 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11441 tree type, int n, const tree *args)
11442 {
11443 tree t = build_call_1 (type, NULL_TREE, n);
11444 for (int i = 0; i < n; ++i)
11445 CALL_EXPR_ARG (t, i) = args[i];
11446 SET_EXPR_LOCATION (t, loc);
11447 CALL_EXPR_IFN (t) = ifn;
11448 return t;
11449 }
11450
11451 /* Build internal call expression. This is just like CALL_EXPR, except
11452 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11453 internal function. */
11454
11455 tree
11456 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11457 tree type, int n, ...)
11458 {
11459 va_list ap;
11460 tree *argarray = XALLOCAVEC (tree, n);
11461 int i;
11462
11463 va_start (ap, n);
11464 for (i = 0; i < n; i++)
11465 argarray[i] = va_arg (ap, tree);
11466 va_end (ap);
11467 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11468 }
11469
11470 /* Return a function call to FN, if the target is guaranteed to support it,
11471 or null otherwise.
11472
11473 N is the number of arguments, passed in the "...", and TYPE is the
11474 type of the return value. */
11475
11476 tree
11477 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11478 int n, ...)
11479 {
11480 va_list ap;
11481 tree *argarray = XALLOCAVEC (tree, n);
11482 int i;
11483
11484 va_start (ap, n);
11485 for (i = 0; i < n; i++)
11486 argarray[i] = va_arg (ap, tree);
11487 va_end (ap);
11488 if (internal_fn_p (fn))
11489 {
11490 internal_fn ifn = as_internal_fn (fn);
11491 if (direct_internal_fn_p (ifn))
11492 {
11493 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11494 if (!direct_internal_fn_supported_p (ifn, types,
11495 OPTIMIZE_FOR_BOTH))
11496 return NULL_TREE;
11497 }
11498 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11499 }
11500 else
11501 {
11502 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11503 if (!fndecl)
11504 return NULL_TREE;
11505 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11506 }
11507 }
11508
11509 /* Return a function call to the appropriate builtin alloca variant.
11510
11511 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
11512 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
11513 bound for SIZE in case it is not a fixed value. */
11514
11515 tree
11516 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
11517 {
11518 if (max_size >= 0)
11519 {
11520 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
11521 return
11522 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
11523 }
11524 else if (align > 0)
11525 {
11526 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11527 return build_call_expr (t, 2, size, size_int (align));
11528 }
11529 else
11530 {
11531 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
11532 return build_call_expr (t, 1, size);
11533 }
11534 }
11535
11536 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
11537 if SIZE == -1) and return a tree node representing char* pointer to
11538 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). The STRING_CST value
11539 is the LEN bytes at STR (the representation of the string, which may
11540 be wide). */
11541
11542 tree
11543 build_string_literal (int len, const char *str,
11544 tree eltype /* = char_type_node */,
11545 unsigned HOST_WIDE_INT size /* = -1 */)
11546 {
11547 tree t = build_string (len, str);
11548 /* Set the maximum valid index based on the string length or SIZE. */
11549 unsigned HOST_WIDE_INT maxidx
11550 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
11551
11552 tree index = build_index_type (size_int (maxidx));
11553 eltype = build_type_variant (eltype, 1, 0);
11554 tree type = build_array_type (eltype, index);
11555 TREE_TYPE (t) = type;
11556 TREE_CONSTANT (t) = 1;
11557 TREE_READONLY (t) = 1;
11558 TREE_STATIC (t) = 1;
11559
11560 type = build_pointer_type (eltype);
11561 t = build1 (ADDR_EXPR, type,
11562 build4 (ARRAY_REF, eltype,
11563 t, integer_zero_node, NULL_TREE, NULL_TREE));
11564 return t;
11565 }
11566
11567
11568
11569 /* Return true if T (assumed to be a DECL) must be assigned a memory
11570 location. */
11571
11572 bool
11573 needs_to_live_in_memory (const_tree t)
11574 {
11575 return (TREE_ADDRESSABLE (t)
11576 || is_global_var (t)
11577 || (TREE_CODE (t) == RESULT_DECL
11578 && !DECL_BY_REFERENCE (t)
11579 && aggregate_value_p (t, current_function_decl)));
11580 }
11581
11582 /* Return value of a constant X and sign-extend it. */
11583
11584 HOST_WIDE_INT
11585 int_cst_value (const_tree x)
11586 {
11587 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11588 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11589
11590 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11591 gcc_assert (cst_and_fits_in_hwi (x));
11592
11593 if (bits < HOST_BITS_PER_WIDE_INT)
11594 {
11595 bool negative = ((val >> (bits - 1)) & 1) != 0;
11596 if (negative)
11597 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11598 else
11599 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11600 }
11601
11602 return val;
11603 }
11604
11605 /* If TYPE is an integral or pointer type, return an integer type with
11606 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11607 if TYPE is already an integer type of signedness UNSIGNEDP.
11608 If TYPE is a floating-point type, return an integer type with the same
11609 bitsize and with the signedness given by UNSIGNEDP; this is useful
11610 when doing bit-level operations on a floating-point value. */
11611
11612 tree
11613 signed_or_unsigned_type_for (int unsignedp, tree type)
11614 {
11615 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11616 return type;
11617
11618 if (TREE_CODE (type) == VECTOR_TYPE)
11619 {
11620 tree inner = TREE_TYPE (type);
11621 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11622 if (!inner2)
11623 return NULL_TREE;
11624 if (inner == inner2)
11625 return type;
11626 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11627 }
11628
11629 if (TREE_CODE (type) == COMPLEX_TYPE)
11630 {
11631 tree inner = TREE_TYPE (type);
11632 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11633 if (!inner2)
11634 return NULL_TREE;
11635 if (inner == inner2)
11636 return type;
11637 return build_complex_type (inner2);
11638 }
11639
11640 unsigned int bits;
11641 if (INTEGRAL_TYPE_P (type)
11642 || POINTER_TYPE_P (type)
11643 || TREE_CODE (type) == OFFSET_TYPE)
11644 bits = TYPE_PRECISION (type);
11645 else if (TREE_CODE (type) == REAL_TYPE)
11646 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11647 else
11648 return NULL_TREE;
11649
11650 return build_nonstandard_integer_type (bits, unsignedp);
11651 }
11652
11653 /* If TYPE is an integral or pointer type, return an integer type with
11654 the same precision which is unsigned, or itself if TYPE is already an
11655 unsigned integer type. If TYPE is a floating-point type, return an
11656 unsigned integer type with the same bitsize as TYPE. */
11657
11658 tree
11659 unsigned_type_for (tree type)
11660 {
11661 return signed_or_unsigned_type_for (1, type);
11662 }
11663
11664 /* If TYPE is an integral or pointer type, return an integer type with
11665 the same precision which is signed, or itself if TYPE is already a
11666 signed integer type. If TYPE is a floating-point type, return a
11667 signed integer type with the same bitsize as TYPE. */
11668
11669 tree
11670 signed_type_for (tree type)
11671 {
11672 return signed_or_unsigned_type_for (0, type);
11673 }
11674
11675 /* If TYPE is a vector type, return a signed integer vector type with the
11676 same width and number of subparts. Otherwise return boolean_type_node. */
11677
11678 tree
11679 truth_type_for (tree type)
11680 {
11681 if (TREE_CODE (type) == VECTOR_TYPE)
11682 {
11683 if (VECTOR_BOOLEAN_TYPE_P (type))
11684 return type;
11685 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11686 GET_MODE_SIZE (TYPE_MODE (type)));
11687 }
11688 else
11689 return boolean_type_node;
11690 }
11691
11692 /* Returns the largest value obtainable by casting something in INNER type to
11693 OUTER type. */
11694
11695 tree
11696 upper_bound_in_type (tree outer, tree inner)
11697 {
11698 unsigned int det = 0;
11699 unsigned oprec = TYPE_PRECISION (outer);
11700 unsigned iprec = TYPE_PRECISION (inner);
11701 unsigned prec;
11702
11703 /* Compute a unique number for every combination. */
11704 det |= (oprec > iprec) ? 4 : 0;
11705 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11706 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11707
11708 /* Determine the exponent to use. */
11709 switch (det)
11710 {
11711 case 0:
11712 case 1:
11713 /* oprec <= iprec, outer: signed, inner: don't care. */
11714 prec = oprec - 1;
11715 break;
11716 case 2:
11717 case 3:
11718 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11719 prec = oprec;
11720 break;
11721 case 4:
11722 /* oprec > iprec, outer: signed, inner: signed. */
11723 prec = iprec - 1;
11724 break;
11725 case 5:
11726 /* oprec > iprec, outer: signed, inner: unsigned. */
11727 prec = iprec;
11728 break;
11729 case 6:
11730 /* oprec > iprec, outer: unsigned, inner: signed. */
11731 prec = oprec;
11732 break;
11733 case 7:
11734 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11735 prec = iprec;
11736 break;
11737 default:
11738 gcc_unreachable ();
11739 }
11740
11741 return wide_int_to_tree (outer,
11742 wi::mask (prec, false, TYPE_PRECISION (outer)));
11743 }
11744
11745 /* Returns the smallest value obtainable by casting something in INNER type to
11746 OUTER type. */
11747
11748 tree
11749 lower_bound_in_type (tree outer, tree inner)
11750 {
11751 unsigned oprec = TYPE_PRECISION (outer);
11752 unsigned iprec = TYPE_PRECISION (inner);
11753
11754 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11755 and obtain 0. */
11756 if (TYPE_UNSIGNED (outer)
11757 /* If we are widening something of an unsigned type, OUTER type
11758 contains all values of INNER type. In particular, both INNER
11759 and OUTER types have zero in common. */
11760 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11761 return build_int_cst (outer, 0);
11762 else
11763 {
11764 /* If we are widening a signed type to another signed type, we
11765 want to obtain -2^^(iprec-1). If we are keeping the
11766 precision or narrowing to a signed type, we want to obtain
11767 -2^(oprec-1). */
11768 unsigned prec = oprec > iprec ? iprec : oprec;
11769 return wide_int_to_tree (outer,
11770 wi::mask (prec - 1, true,
11771 TYPE_PRECISION (outer)));
11772 }
11773 }
11774
11775 /* Return nonzero if two operands that are suitable for PHI nodes are
11776 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11777 SSA_NAME or invariant. Note that this is strictly an optimization.
11778 That is, callers of this function can directly call operand_equal_p
11779 and get the same result, only slower. */
11780
11781 int
11782 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11783 {
11784 if (arg0 == arg1)
11785 return 1;
11786 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11787 return 0;
11788 return operand_equal_p (arg0, arg1, 0);
11789 }
11790
11791 /* Returns number of zeros at the end of binary representation of X. */
11792
11793 tree
11794 num_ending_zeros (const_tree x)
11795 {
11796 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11797 }
11798
11799
11800 #define WALK_SUBTREE(NODE) \
11801 do \
11802 { \
11803 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11804 if (result) \
11805 return result; \
11806 } \
11807 while (0)
11808
11809 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11810 be walked whenever a type is seen in the tree. Rest of operands and return
11811 value are as for walk_tree. */
11812
11813 static tree
11814 walk_type_fields (tree type, walk_tree_fn func, void *data,
11815 hash_set<tree> *pset, walk_tree_lh lh)
11816 {
11817 tree result = NULL_TREE;
11818
11819 switch (TREE_CODE (type))
11820 {
11821 case POINTER_TYPE:
11822 case REFERENCE_TYPE:
11823 case VECTOR_TYPE:
11824 /* We have to worry about mutually recursive pointers. These can't
11825 be written in C. They can in Ada. It's pathological, but
11826 there's an ACATS test (c38102a) that checks it. Deal with this
11827 by checking if we're pointing to another pointer, that one
11828 points to another pointer, that one does too, and we have no htab.
11829 If so, get a hash table. We check three levels deep to avoid
11830 the cost of the hash table if we don't need one. */
11831 if (POINTER_TYPE_P (TREE_TYPE (type))
11832 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11833 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11834 && !pset)
11835 {
11836 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11837 func, data);
11838 if (result)
11839 return result;
11840
11841 break;
11842 }
11843
11844 /* fall through */
11845
11846 case COMPLEX_TYPE:
11847 WALK_SUBTREE (TREE_TYPE (type));
11848 break;
11849
11850 case METHOD_TYPE:
11851 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11852
11853 /* Fall through. */
11854
11855 case FUNCTION_TYPE:
11856 WALK_SUBTREE (TREE_TYPE (type));
11857 {
11858 tree arg;
11859
11860 /* We never want to walk into default arguments. */
11861 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11862 WALK_SUBTREE (TREE_VALUE (arg));
11863 }
11864 break;
11865
11866 case ARRAY_TYPE:
11867 /* Don't follow this nodes's type if a pointer for fear that
11868 we'll have infinite recursion. If we have a PSET, then we
11869 need not fear. */
11870 if (pset
11871 || (!POINTER_TYPE_P (TREE_TYPE (type))
11872 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11873 WALK_SUBTREE (TREE_TYPE (type));
11874 WALK_SUBTREE (TYPE_DOMAIN (type));
11875 break;
11876
11877 case OFFSET_TYPE:
11878 WALK_SUBTREE (TREE_TYPE (type));
11879 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11880 break;
11881
11882 default:
11883 break;
11884 }
11885
11886 return NULL_TREE;
11887 }
11888
11889 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11890 called with the DATA and the address of each sub-tree. If FUNC returns a
11891 non-NULL value, the traversal is stopped, and the value returned by FUNC
11892 is returned. If PSET is non-NULL it is used to record the nodes visited,
11893 and to avoid visiting a node more than once. */
11894
11895 tree
11896 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11897 hash_set<tree> *pset, walk_tree_lh lh)
11898 {
11899 enum tree_code code;
11900 int walk_subtrees;
11901 tree result;
11902
11903 #define WALK_SUBTREE_TAIL(NODE) \
11904 do \
11905 { \
11906 tp = & (NODE); \
11907 goto tail_recurse; \
11908 } \
11909 while (0)
11910
11911 tail_recurse:
11912 /* Skip empty subtrees. */
11913 if (!*tp)
11914 return NULL_TREE;
11915
11916 /* Don't walk the same tree twice, if the user has requested
11917 that we avoid doing so. */
11918 if (pset && pset->add (*tp))
11919 return NULL_TREE;
11920
11921 /* Call the function. */
11922 walk_subtrees = 1;
11923 result = (*func) (tp, &walk_subtrees, data);
11924
11925 /* If we found something, return it. */
11926 if (result)
11927 return result;
11928
11929 code = TREE_CODE (*tp);
11930
11931 /* Even if we didn't, FUNC may have decided that there was nothing
11932 interesting below this point in the tree. */
11933 if (!walk_subtrees)
11934 {
11935 /* But we still need to check our siblings. */
11936 if (code == TREE_LIST)
11937 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11938 else if (code == OMP_CLAUSE)
11939 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11940 else
11941 return NULL_TREE;
11942 }
11943
11944 if (lh)
11945 {
11946 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11947 if (result || !walk_subtrees)
11948 return result;
11949 }
11950
11951 switch (code)
11952 {
11953 case ERROR_MARK:
11954 case IDENTIFIER_NODE:
11955 case INTEGER_CST:
11956 case REAL_CST:
11957 case FIXED_CST:
11958 case VECTOR_CST:
11959 case STRING_CST:
11960 case BLOCK:
11961 case PLACEHOLDER_EXPR:
11962 case SSA_NAME:
11963 case FIELD_DECL:
11964 case RESULT_DECL:
11965 /* None of these have subtrees other than those already walked
11966 above. */
11967 break;
11968
11969 case TREE_LIST:
11970 WALK_SUBTREE (TREE_VALUE (*tp));
11971 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11972 break;
11973
11974 case TREE_VEC:
11975 {
11976 int len = TREE_VEC_LENGTH (*tp);
11977
11978 if (len == 0)
11979 break;
11980
11981 /* Walk all elements but the first. */
11982 while (--len)
11983 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11984
11985 /* Now walk the first one as a tail call. */
11986 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11987 }
11988
11989 case COMPLEX_CST:
11990 WALK_SUBTREE (TREE_REALPART (*tp));
11991 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11992
11993 case CONSTRUCTOR:
11994 {
11995 unsigned HOST_WIDE_INT idx;
11996 constructor_elt *ce;
11997
11998 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11999 idx++)
12000 WALK_SUBTREE (ce->value);
12001 }
12002 break;
12003
12004 case SAVE_EXPR:
12005 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
12006
12007 case BIND_EXPR:
12008 {
12009 tree decl;
12010 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
12011 {
12012 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
12013 into declarations that are just mentioned, rather than
12014 declared; they don't really belong to this part of the tree.
12015 And, we can see cycles: the initializer for a declaration
12016 can refer to the declaration itself. */
12017 WALK_SUBTREE (DECL_INITIAL (decl));
12018 WALK_SUBTREE (DECL_SIZE (decl));
12019 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
12020 }
12021 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
12022 }
12023
12024 case STATEMENT_LIST:
12025 {
12026 tree_stmt_iterator i;
12027 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
12028 WALK_SUBTREE (*tsi_stmt_ptr (i));
12029 }
12030 break;
12031
12032 case OMP_CLAUSE:
12033 switch (OMP_CLAUSE_CODE (*tp))
12034 {
12035 case OMP_CLAUSE_GANG:
12036 case OMP_CLAUSE__GRIDDIM_:
12037 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12038 /* FALLTHRU */
12039
12040 case OMP_CLAUSE_ASYNC:
12041 case OMP_CLAUSE_WAIT:
12042 case OMP_CLAUSE_WORKER:
12043 case OMP_CLAUSE_VECTOR:
12044 case OMP_CLAUSE_NUM_GANGS:
12045 case OMP_CLAUSE_NUM_WORKERS:
12046 case OMP_CLAUSE_VECTOR_LENGTH:
12047 case OMP_CLAUSE_PRIVATE:
12048 case OMP_CLAUSE_SHARED:
12049 case OMP_CLAUSE_FIRSTPRIVATE:
12050 case OMP_CLAUSE_COPYIN:
12051 case OMP_CLAUSE_COPYPRIVATE:
12052 case OMP_CLAUSE_FINAL:
12053 case OMP_CLAUSE_IF:
12054 case OMP_CLAUSE_NUM_THREADS:
12055 case OMP_CLAUSE_SCHEDULE:
12056 case OMP_CLAUSE_UNIFORM:
12057 case OMP_CLAUSE_DEPEND:
12058 case OMP_CLAUSE_NONTEMPORAL:
12059 case OMP_CLAUSE_NUM_TEAMS:
12060 case OMP_CLAUSE_THREAD_LIMIT:
12061 case OMP_CLAUSE_DEVICE:
12062 case OMP_CLAUSE_DIST_SCHEDULE:
12063 case OMP_CLAUSE_SAFELEN:
12064 case OMP_CLAUSE_SIMDLEN:
12065 case OMP_CLAUSE_ORDERED:
12066 case OMP_CLAUSE_PRIORITY:
12067 case OMP_CLAUSE_GRAINSIZE:
12068 case OMP_CLAUSE_NUM_TASKS:
12069 case OMP_CLAUSE_HINT:
12070 case OMP_CLAUSE_TO_DECLARE:
12071 case OMP_CLAUSE_LINK:
12072 case OMP_CLAUSE_USE_DEVICE_PTR:
12073 case OMP_CLAUSE_USE_DEVICE_ADDR:
12074 case OMP_CLAUSE_IS_DEVICE_PTR:
12075 case OMP_CLAUSE_INCLUSIVE:
12076 case OMP_CLAUSE_EXCLUSIVE:
12077 case OMP_CLAUSE__LOOPTEMP_:
12078 case OMP_CLAUSE__REDUCTEMP_:
12079 case OMP_CLAUSE__CONDTEMP_:
12080 case OMP_CLAUSE__SCANTEMP_:
12081 case OMP_CLAUSE__SIMDUID_:
12082 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
12083 /* FALLTHRU */
12084
12085 case OMP_CLAUSE_INDEPENDENT:
12086 case OMP_CLAUSE_NOWAIT:
12087 case OMP_CLAUSE_DEFAULT:
12088 case OMP_CLAUSE_UNTIED:
12089 case OMP_CLAUSE_MERGEABLE:
12090 case OMP_CLAUSE_PROC_BIND:
12091 case OMP_CLAUSE_DEVICE_TYPE:
12092 case OMP_CLAUSE_INBRANCH:
12093 case OMP_CLAUSE_NOTINBRANCH:
12094 case OMP_CLAUSE_FOR:
12095 case OMP_CLAUSE_PARALLEL:
12096 case OMP_CLAUSE_SECTIONS:
12097 case OMP_CLAUSE_TASKGROUP:
12098 case OMP_CLAUSE_NOGROUP:
12099 case OMP_CLAUSE_THREADS:
12100 case OMP_CLAUSE_SIMD:
12101 case OMP_CLAUSE_DEFAULTMAP:
12102 case OMP_CLAUSE_ORDER:
12103 case OMP_CLAUSE_BIND:
12104 case OMP_CLAUSE_AUTO:
12105 case OMP_CLAUSE_SEQ:
12106 case OMP_CLAUSE_TILE:
12107 case OMP_CLAUSE__SIMT_:
12108 case OMP_CLAUSE_IF_PRESENT:
12109 case OMP_CLAUSE_FINALIZE:
12110 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12111
12112 case OMP_CLAUSE_LASTPRIVATE:
12113 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12114 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
12115 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12116
12117 case OMP_CLAUSE_COLLAPSE:
12118 {
12119 int i;
12120 for (i = 0; i < 3; i++)
12121 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12122 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12123 }
12124
12125 case OMP_CLAUSE_LINEAR:
12126 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12127 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
12128 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
12129 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12130
12131 case OMP_CLAUSE_ALIGNED:
12132 case OMP_CLAUSE_FROM:
12133 case OMP_CLAUSE_TO:
12134 case OMP_CLAUSE_MAP:
12135 case OMP_CLAUSE__CACHE_:
12136 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12137 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12138 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12139
12140 case OMP_CLAUSE_REDUCTION:
12141 case OMP_CLAUSE_TASK_REDUCTION:
12142 case OMP_CLAUSE_IN_REDUCTION:
12143 {
12144 int i;
12145 for (i = 0; i < 5; i++)
12146 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12147 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12148 }
12149
12150 default:
12151 gcc_unreachable ();
12152 }
12153 break;
12154
12155 case TARGET_EXPR:
12156 {
12157 int i, len;
12158
12159 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
12160 But, we only want to walk once. */
12161 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
12162 for (i = 0; i < len; ++i)
12163 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12164 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
12165 }
12166
12167 case DECL_EXPR:
12168 /* If this is a TYPE_DECL, walk into the fields of the type that it's
12169 defining. We only want to walk into these fields of a type in this
12170 case and not in the general case of a mere reference to the type.
12171
12172 The criterion is as follows: if the field can be an expression, it
12173 must be walked only here. This should be in keeping with the fields
12174 that are directly gimplified in gimplify_type_sizes in order for the
12175 mark/copy-if-shared/unmark machinery of the gimplifier to work with
12176 variable-sized types.
12177
12178 Note that DECLs get walked as part of processing the BIND_EXPR. */
12179 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
12180 {
12181 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
12182 if (TREE_CODE (*type_p) == ERROR_MARK)
12183 return NULL_TREE;
12184
12185 /* Call the function for the type. See if it returns anything or
12186 doesn't want us to continue. If we are to continue, walk both
12187 the normal fields and those for the declaration case. */
12188 result = (*func) (type_p, &walk_subtrees, data);
12189 if (result || !walk_subtrees)
12190 return result;
12191
12192 /* But do not walk a pointed-to type since it may itself need to
12193 be walked in the declaration case if it isn't anonymous. */
12194 if (!POINTER_TYPE_P (*type_p))
12195 {
12196 result = walk_type_fields (*type_p, func, data, pset, lh);
12197 if (result)
12198 return result;
12199 }
12200
12201 /* If this is a record type, also walk the fields. */
12202 if (RECORD_OR_UNION_TYPE_P (*type_p))
12203 {
12204 tree field;
12205
12206 for (field = TYPE_FIELDS (*type_p); field;
12207 field = DECL_CHAIN (field))
12208 {
12209 /* We'd like to look at the type of the field, but we can
12210 easily get infinite recursion. So assume it's pointed
12211 to elsewhere in the tree. Also, ignore things that
12212 aren't fields. */
12213 if (TREE_CODE (field) != FIELD_DECL)
12214 continue;
12215
12216 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
12217 WALK_SUBTREE (DECL_SIZE (field));
12218 WALK_SUBTREE (DECL_SIZE_UNIT (field));
12219 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
12220 WALK_SUBTREE (DECL_QUALIFIER (field));
12221 }
12222 }
12223
12224 /* Same for scalar types. */
12225 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
12226 || TREE_CODE (*type_p) == ENUMERAL_TYPE
12227 || TREE_CODE (*type_p) == INTEGER_TYPE
12228 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
12229 || TREE_CODE (*type_p) == REAL_TYPE)
12230 {
12231 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
12232 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
12233 }
12234
12235 WALK_SUBTREE (TYPE_SIZE (*type_p));
12236 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
12237 }
12238 /* FALLTHRU */
12239
12240 default:
12241 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
12242 {
12243 int i, len;
12244
12245 /* Walk over all the sub-trees of this operand. */
12246 len = TREE_OPERAND_LENGTH (*tp);
12247
12248 /* Go through the subtrees. We need to do this in forward order so
12249 that the scope of a FOR_EXPR is handled properly. */
12250 if (len)
12251 {
12252 for (i = 0; i < len - 1; ++i)
12253 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12254 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
12255 }
12256 }
12257 /* If this is a type, walk the needed fields in the type. */
12258 else if (TYPE_P (*tp))
12259 return walk_type_fields (*tp, func, data, pset, lh);
12260 break;
12261 }
12262
12263 /* We didn't find what we were looking for. */
12264 return NULL_TREE;
12265
12266 #undef WALK_SUBTREE_TAIL
12267 }
12268 #undef WALK_SUBTREE
12269
12270 /* Like walk_tree, but does not walk duplicate nodes more than once. */
12271
12272 tree
12273 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
12274 walk_tree_lh lh)
12275 {
12276 tree result;
12277
12278 hash_set<tree> pset;
12279 result = walk_tree_1 (tp, func, data, &pset, lh);
12280 return result;
12281 }
12282
12283
12284 tree
12285 tree_block (tree t)
12286 {
12287 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12288
12289 if (IS_EXPR_CODE_CLASS (c))
12290 return LOCATION_BLOCK (t->exp.locus);
12291 gcc_unreachable ();
12292 return NULL;
12293 }
12294
12295 void
12296 tree_set_block (tree t, tree b)
12297 {
12298 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12299
12300 if (IS_EXPR_CODE_CLASS (c))
12301 {
12302 t->exp.locus = set_block (t->exp.locus, b);
12303 }
12304 else
12305 gcc_unreachable ();
12306 }
12307
12308 /* Create a nameless artificial label and put it in the current
12309 function context. The label has a location of LOC. Returns the
12310 newly created label. */
12311
12312 tree
12313 create_artificial_label (location_t loc)
12314 {
12315 tree lab = build_decl (loc,
12316 LABEL_DECL, NULL_TREE, void_type_node);
12317
12318 DECL_ARTIFICIAL (lab) = 1;
12319 DECL_IGNORED_P (lab) = 1;
12320 DECL_CONTEXT (lab) = current_function_decl;
12321 return lab;
12322 }
12323
12324 /* Given a tree, try to return a useful variable name that we can use
12325 to prefix a temporary that is being assigned the value of the tree.
12326 I.E. given <temp> = &A, return A. */
12327
12328 const char *
12329 get_name (tree t)
12330 {
12331 tree stripped_decl;
12332
12333 stripped_decl = t;
12334 STRIP_NOPS (stripped_decl);
12335 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12336 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12337 else if (TREE_CODE (stripped_decl) == SSA_NAME)
12338 {
12339 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12340 if (!name)
12341 return NULL;
12342 return IDENTIFIER_POINTER (name);
12343 }
12344 else
12345 {
12346 switch (TREE_CODE (stripped_decl))
12347 {
12348 case ADDR_EXPR:
12349 return get_name (TREE_OPERAND (stripped_decl, 0));
12350 default:
12351 return NULL;
12352 }
12353 }
12354 }
12355
12356 /* Return true if TYPE has a variable argument list. */
12357
12358 bool
12359 stdarg_p (const_tree fntype)
12360 {
12361 function_args_iterator args_iter;
12362 tree n = NULL_TREE, t;
12363
12364 if (!fntype)
12365 return false;
12366
12367 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12368 {
12369 n = t;
12370 }
12371
12372 return n != NULL_TREE && n != void_type_node;
12373 }
12374
12375 /* Return true if TYPE has a prototype. */
12376
12377 bool
12378 prototype_p (const_tree fntype)
12379 {
12380 tree t;
12381
12382 gcc_assert (fntype != NULL_TREE);
12383
12384 t = TYPE_ARG_TYPES (fntype);
12385 return (t != NULL_TREE);
12386 }
12387
12388 /* If BLOCK is inlined from an __attribute__((__artificial__))
12389 routine, return pointer to location from where it has been
12390 called. */
12391 location_t *
12392 block_nonartificial_location (tree block)
12393 {
12394 location_t *ret = NULL;
12395
12396 while (block && TREE_CODE (block) == BLOCK
12397 && BLOCK_ABSTRACT_ORIGIN (block))
12398 {
12399 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12400 if (TREE_CODE (ao) == FUNCTION_DECL)
12401 {
12402 /* If AO is an artificial inline, point RET to the
12403 call site locus at which it has been inlined and continue
12404 the loop, in case AO's caller is also an artificial
12405 inline. */
12406 if (DECL_DECLARED_INLINE_P (ao)
12407 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12408 ret = &BLOCK_SOURCE_LOCATION (block);
12409 else
12410 break;
12411 }
12412 else if (TREE_CODE (ao) != BLOCK)
12413 break;
12414
12415 block = BLOCK_SUPERCONTEXT (block);
12416 }
12417 return ret;
12418 }
12419
12420
12421 /* If EXP is inlined from an __attribute__((__artificial__))
12422 function, return the location of the original call expression. */
12423
12424 location_t
12425 tree_nonartificial_location (tree exp)
12426 {
12427 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12428
12429 if (loc)
12430 return *loc;
12431 else
12432 return EXPR_LOCATION (exp);
12433 }
12434
12435
12436 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12437 nodes. */
12438
12439 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12440
12441 hashval_t
12442 cl_option_hasher::hash (tree x)
12443 {
12444 const_tree const t = x;
12445 const char *p;
12446 size_t i;
12447 size_t len = 0;
12448 hashval_t hash = 0;
12449
12450 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12451 {
12452 p = (const char *)TREE_OPTIMIZATION (t);
12453 len = sizeof (struct cl_optimization);
12454 }
12455
12456 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12457 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12458
12459 else
12460 gcc_unreachable ();
12461
12462 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12463 something else. */
12464 for (i = 0; i < len; i++)
12465 if (p[i])
12466 hash = (hash << 4) ^ ((i << 2) | p[i]);
12467
12468 return hash;
12469 }
12470
12471 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12472 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12473 same. */
12474
12475 bool
12476 cl_option_hasher::equal (tree x, tree y)
12477 {
12478 const_tree const xt = x;
12479 const_tree const yt = y;
12480
12481 if (TREE_CODE (xt) != TREE_CODE (yt))
12482 return 0;
12483
12484 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12485 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
12486 TREE_OPTIMIZATION (yt));
12487 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12488 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12489 TREE_TARGET_OPTION (yt));
12490 else
12491 gcc_unreachable ();
12492 }
12493
12494 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12495
12496 tree
12497 build_optimization_node (struct gcc_options *opts)
12498 {
12499 tree t;
12500
12501 /* Use the cache of optimization nodes. */
12502
12503 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12504 opts);
12505
12506 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12507 t = *slot;
12508 if (!t)
12509 {
12510 /* Insert this one into the hash table. */
12511 t = cl_optimization_node;
12512 *slot = t;
12513
12514 /* Make a new node for next time round. */
12515 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12516 }
12517
12518 return t;
12519 }
12520
12521 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12522
12523 tree
12524 build_target_option_node (struct gcc_options *opts)
12525 {
12526 tree t;
12527
12528 /* Use the cache of optimization nodes. */
12529
12530 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12531 opts);
12532
12533 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12534 t = *slot;
12535 if (!t)
12536 {
12537 /* Insert this one into the hash table. */
12538 t = cl_target_option_node;
12539 *slot = t;
12540
12541 /* Make a new node for next time round. */
12542 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12543 }
12544
12545 return t;
12546 }
12547
12548 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12549 so that they aren't saved during PCH writing. */
12550
12551 void
12552 prepare_target_option_nodes_for_pch (void)
12553 {
12554 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12555 for (; iter != cl_option_hash_table->end (); ++iter)
12556 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12557 TREE_TARGET_GLOBALS (*iter) = NULL;
12558 }
12559
12560 /* Determine the "ultimate origin" of a block. */
12561
12562 tree
12563 block_ultimate_origin (const_tree block)
12564 {
12565 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
12566
12567 if (origin == NULL_TREE)
12568 return NULL_TREE;
12569 else
12570 {
12571 gcc_checking_assert ((DECL_P (origin)
12572 && DECL_ORIGIN (origin) == origin)
12573 || BLOCK_ORIGIN (origin) == origin);
12574 return origin;
12575 }
12576 }
12577
12578 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12579 no instruction. */
12580
12581 bool
12582 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12583 {
12584 /* Do not strip casts into or out of differing address spaces. */
12585 if (POINTER_TYPE_P (outer_type)
12586 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12587 {
12588 if (!POINTER_TYPE_P (inner_type)
12589 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12590 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12591 return false;
12592 }
12593 else if (POINTER_TYPE_P (inner_type)
12594 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12595 {
12596 /* We already know that outer_type is not a pointer with
12597 a non-generic address space. */
12598 return false;
12599 }
12600
12601 /* Use precision rather then machine mode when we can, which gives
12602 the correct answer even for submode (bit-field) types. */
12603 if ((INTEGRAL_TYPE_P (outer_type)
12604 || POINTER_TYPE_P (outer_type)
12605 || TREE_CODE (outer_type) == OFFSET_TYPE)
12606 && (INTEGRAL_TYPE_P (inner_type)
12607 || POINTER_TYPE_P (inner_type)
12608 || TREE_CODE (inner_type) == OFFSET_TYPE))
12609 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12610
12611 /* Otherwise fall back on comparing machine modes (e.g. for
12612 aggregate types, floats). */
12613 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12614 }
12615
12616 /* Return true iff conversion in EXP generates no instruction. Mark
12617 it inline so that we fully inline into the stripping functions even
12618 though we have two uses of this function. */
12619
12620 static inline bool
12621 tree_nop_conversion (const_tree exp)
12622 {
12623 tree outer_type, inner_type;
12624
12625 if (location_wrapper_p (exp))
12626 return true;
12627 if (!CONVERT_EXPR_P (exp)
12628 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12629 return false;
12630
12631 outer_type = TREE_TYPE (exp);
12632 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12633 if (!inner_type || inner_type == error_mark_node)
12634 return false;
12635
12636 return tree_nop_conversion_p (outer_type, inner_type);
12637 }
12638
12639 /* Return true iff conversion in EXP generates no instruction. Don't
12640 consider conversions changing the signedness. */
12641
12642 static bool
12643 tree_sign_nop_conversion (const_tree exp)
12644 {
12645 tree outer_type, inner_type;
12646
12647 if (!tree_nop_conversion (exp))
12648 return false;
12649
12650 outer_type = TREE_TYPE (exp);
12651 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12652
12653 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12654 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12655 }
12656
12657 /* Strip conversions from EXP according to tree_nop_conversion and
12658 return the resulting expression. */
12659
12660 tree
12661 tree_strip_nop_conversions (tree exp)
12662 {
12663 while (tree_nop_conversion (exp))
12664 exp = TREE_OPERAND (exp, 0);
12665 return exp;
12666 }
12667
12668 /* Strip conversions from EXP according to tree_sign_nop_conversion
12669 and return the resulting expression. */
12670
12671 tree
12672 tree_strip_sign_nop_conversions (tree exp)
12673 {
12674 while (tree_sign_nop_conversion (exp))
12675 exp = TREE_OPERAND (exp, 0);
12676 return exp;
12677 }
12678
12679 /* Avoid any floating point extensions from EXP. */
12680 tree
12681 strip_float_extensions (tree exp)
12682 {
12683 tree sub, expt, subt;
12684
12685 /* For floating point constant look up the narrowest type that can hold
12686 it properly and handle it like (type)(narrowest_type)constant.
12687 This way we can optimize for instance a=a*2.0 where "a" is float
12688 but 2.0 is double constant. */
12689 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12690 {
12691 REAL_VALUE_TYPE orig;
12692 tree type = NULL;
12693
12694 orig = TREE_REAL_CST (exp);
12695 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12696 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12697 type = float_type_node;
12698 else if (TYPE_PRECISION (TREE_TYPE (exp))
12699 > TYPE_PRECISION (double_type_node)
12700 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12701 type = double_type_node;
12702 if (type)
12703 return build_real_truncate (type, orig);
12704 }
12705
12706 if (!CONVERT_EXPR_P (exp))
12707 return exp;
12708
12709 sub = TREE_OPERAND (exp, 0);
12710 subt = TREE_TYPE (sub);
12711 expt = TREE_TYPE (exp);
12712
12713 if (!FLOAT_TYPE_P (subt))
12714 return exp;
12715
12716 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12717 return exp;
12718
12719 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12720 return exp;
12721
12722 return strip_float_extensions (sub);
12723 }
12724
12725 /* Strip out all handled components that produce invariant
12726 offsets. */
12727
12728 const_tree
12729 strip_invariant_refs (const_tree op)
12730 {
12731 while (handled_component_p (op))
12732 {
12733 switch (TREE_CODE (op))
12734 {
12735 case ARRAY_REF:
12736 case ARRAY_RANGE_REF:
12737 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12738 || TREE_OPERAND (op, 2) != NULL_TREE
12739 || TREE_OPERAND (op, 3) != NULL_TREE)
12740 return NULL;
12741 break;
12742
12743 case COMPONENT_REF:
12744 if (TREE_OPERAND (op, 2) != NULL_TREE)
12745 return NULL;
12746 break;
12747
12748 default:;
12749 }
12750 op = TREE_OPERAND (op, 0);
12751 }
12752
12753 return op;
12754 }
12755
12756 static GTY(()) tree gcc_eh_personality_decl;
12757
12758 /* Return the GCC personality function decl. */
12759
12760 tree
12761 lhd_gcc_personality (void)
12762 {
12763 if (!gcc_eh_personality_decl)
12764 gcc_eh_personality_decl = build_personality_function ("gcc");
12765 return gcc_eh_personality_decl;
12766 }
12767
12768 /* TARGET is a call target of GIMPLE call statement
12769 (obtained by gimple_call_fn). Return true if it is
12770 OBJ_TYPE_REF representing an virtual call of C++ method.
12771 (As opposed to OBJ_TYPE_REF representing objc calls
12772 through a cast where middle-end devirtualization machinery
12773 can't apply.) */
12774
12775 bool
12776 virtual_method_call_p (const_tree target)
12777 {
12778 if (TREE_CODE (target) != OBJ_TYPE_REF)
12779 return false;
12780 tree t = TREE_TYPE (target);
12781 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12782 t = TREE_TYPE (t);
12783 if (TREE_CODE (t) == FUNCTION_TYPE)
12784 return false;
12785 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12786 /* If we do not have BINFO associated, it means that type was built
12787 without devirtualization enabled. Do not consider this a virtual
12788 call. */
12789 if (!TYPE_BINFO (obj_type_ref_class (target)))
12790 return false;
12791 return true;
12792 }
12793
12794 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12795
12796 static tree
12797 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12798 {
12799 unsigned int i;
12800 tree base_binfo, b;
12801
12802 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12803 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12804 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12805 return base_binfo;
12806 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12807 return b;
12808 return NULL;
12809 }
12810
12811 /* Try to find a base info of BINFO that would have its field decl at offset
12812 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12813 found, return, otherwise return NULL_TREE. */
12814
12815 tree
12816 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12817 {
12818 tree type = BINFO_TYPE (binfo);
12819
12820 while (true)
12821 {
12822 HOST_WIDE_INT pos, size;
12823 tree fld;
12824 int i;
12825
12826 if (types_same_for_odr (type, expected_type))
12827 return binfo;
12828 if (maybe_lt (offset, 0))
12829 return NULL_TREE;
12830
12831 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12832 {
12833 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12834 continue;
12835
12836 pos = int_bit_position (fld);
12837 size = tree_to_uhwi (DECL_SIZE (fld));
12838 if (known_in_range_p (offset, pos, size))
12839 break;
12840 }
12841 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12842 return NULL_TREE;
12843
12844 /* Offset 0 indicates the primary base, whose vtable contents are
12845 represented in the binfo for the derived class. */
12846 else if (maybe_ne (offset, 0))
12847 {
12848 tree found_binfo = NULL, base_binfo;
12849 /* Offsets in BINFO are in bytes relative to the whole structure
12850 while POS is in bits relative to the containing field. */
12851 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12852 / BITS_PER_UNIT);
12853
12854 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12855 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12856 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12857 {
12858 found_binfo = base_binfo;
12859 break;
12860 }
12861 if (found_binfo)
12862 binfo = found_binfo;
12863 else
12864 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12865 binfo_offset);
12866 }
12867
12868 type = TREE_TYPE (fld);
12869 offset -= pos;
12870 }
12871 }
12872
12873 /* Returns true if X is a typedef decl. */
12874
12875 bool
12876 is_typedef_decl (const_tree x)
12877 {
12878 return (x && TREE_CODE (x) == TYPE_DECL
12879 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12880 }
12881
12882 /* Returns true iff TYPE is a type variant created for a typedef. */
12883
12884 bool
12885 typedef_variant_p (const_tree type)
12886 {
12887 return is_typedef_decl (TYPE_NAME (type));
12888 }
12889
12890 /* PR 84195: Replace control characters in "unescaped" with their
12891 escaped equivalents. Allow newlines if -fmessage-length has
12892 been set to a non-zero value. This is done here, rather than
12893 where the attribute is recorded as the message length can
12894 change between these two locations. */
12895
12896 void
12897 escaped_string::escape (const char *unescaped)
12898 {
12899 char *escaped;
12900 size_t i, new_i, len;
12901
12902 if (m_owned)
12903 free (m_str);
12904
12905 m_str = const_cast<char *> (unescaped);
12906 m_owned = false;
12907
12908 if (unescaped == NULL || *unescaped == 0)
12909 return;
12910
12911 len = strlen (unescaped);
12912 escaped = NULL;
12913 new_i = 0;
12914
12915 for (i = 0; i < len; i++)
12916 {
12917 char c = unescaped[i];
12918
12919 if (!ISCNTRL (c))
12920 {
12921 if (escaped)
12922 escaped[new_i++] = c;
12923 continue;
12924 }
12925
12926 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12927 {
12928 if (escaped == NULL)
12929 {
12930 /* We only allocate space for a new string if we
12931 actually encounter a control character that
12932 needs replacing. */
12933 escaped = (char *) xmalloc (len * 2 + 1);
12934 strncpy (escaped, unescaped, i);
12935 new_i = i;
12936 }
12937
12938 escaped[new_i++] = '\\';
12939
12940 switch (c)
12941 {
12942 case '\a': escaped[new_i++] = 'a'; break;
12943 case '\b': escaped[new_i++] = 'b'; break;
12944 case '\f': escaped[new_i++] = 'f'; break;
12945 case '\n': escaped[new_i++] = 'n'; break;
12946 case '\r': escaped[new_i++] = 'r'; break;
12947 case '\t': escaped[new_i++] = 't'; break;
12948 case '\v': escaped[new_i++] = 'v'; break;
12949 default: escaped[new_i++] = '?'; break;
12950 }
12951 }
12952 else if (escaped)
12953 escaped[new_i++] = c;
12954 }
12955
12956 if (escaped)
12957 {
12958 escaped[new_i] = 0;
12959 m_str = escaped;
12960 m_owned = true;
12961 }
12962 }
12963
12964 /* Warn about a use of an identifier which was marked deprecated. Returns
12965 whether a warning was given. */
12966
12967 bool
12968 warn_deprecated_use (tree node, tree attr)
12969 {
12970 escaped_string msg;
12971
12972 if (node == 0 || !warn_deprecated_decl)
12973 return false;
12974
12975 if (!attr)
12976 {
12977 if (DECL_P (node))
12978 attr = DECL_ATTRIBUTES (node);
12979 else if (TYPE_P (node))
12980 {
12981 tree decl = TYPE_STUB_DECL (node);
12982 if (decl)
12983 attr = lookup_attribute ("deprecated",
12984 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12985 }
12986 }
12987
12988 if (attr)
12989 attr = lookup_attribute ("deprecated", attr);
12990
12991 if (attr)
12992 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12993
12994 bool w = false;
12995 if (DECL_P (node))
12996 {
12997 auto_diagnostic_group d;
12998 if (msg)
12999 w = warning (OPT_Wdeprecated_declarations,
13000 "%qD is deprecated: %s", node, (const char *) msg);
13001 else
13002 w = warning (OPT_Wdeprecated_declarations,
13003 "%qD is deprecated", node);
13004 if (w)
13005 inform (DECL_SOURCE_LOCATION (node), "declared here");
13006 }
13007 else if (TYPE_P (node))
13008 {
13009 tree what = NULL_TREE;
13010 tree decl = TYPE_STUB_DECL (node);
13011
13012 if (TYPE_NAME (node))
13013 {
13014 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
13015 what = TYPE_NAME (node);
13016 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
13017 && DECL_NAME (TYPE_NAME (node)))
13018 what = DECL_NAME (TYPE_NAME (node));
13019 }
13020
13021 auto_diagnostic_group d;
13022 if (what)
13023 {
13024 if (msg)
13025 w = warning (OPT_Wdeprecated_declarations,
13026 "%qE is deprecated: %s", what, (const char *) msg);
13027 else
13028 w = warning (OPT_Wdeprecated_declarations,
13029 "%qE is deprecated", what);
13030 }
13031 else
13032 {
13033 if (msg)
13034 w = warning (OPT_Wdeprecated_declarations,
13035 "type is deprecated: %s", (const char *) msg);
13036 else
13037 w = warning (OPT_Wdeprecated_declarations,
13038 "type is deprecated");
13039 }
13040
13041 if (w && decl)
13042 inform (DECL_SOURCE_LOCATION (decl), "declared here");
13043 }
13044
13045 return w;
13046 }
13047
13048 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
13049 somewhere in it. */
13050
13051 bool
13052 contains_bitfld_component_ref_p (const_tree ref)
13053 {
13054 while (handled_component_p (ref))
13055 {
13056 if (TREE_CODE (ref) == COMPONENT_REF
13057 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
13058 return true;
13059 ref = TREE_OPERAND (ref, 0);
13060 }
13061
13062 return false;
13063 }
13064
13065 /* Try to determine whether a TRY_CATCH expression can fall through.
13066 This is a subroutine of block_may_fallthru. */
13067
13068 static bool
13069 try_catch_may_fallthru (const_tree stmt)
13070 {
13071 tree_stmt_iterator i;
13072
13073 /* If the TRY block can fall through, the whole TRY_CATCH can
13074 fall through. */
13075 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
13076 return true;
13077
13078 i = tsi_start (TREE_OPERAND (stmt, 1));
13079 switch (TREE_CODE (tsi_stmt (i)))
13080 {
13081 case CATCH_EXPR:
13082 /* We expect to see a sequence of CATCH_EXPR trees, each with a
13083 catch expression and a body. The whole TRY_CATCH may fall
13084 through iff any of the catch bodies falls through. */
13085 for (; !tsi_end_p (i); tsi_next (&i))
13086 {
13087 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
13088 return true;
13089 }
13090 return false;
13091
13092 case EH_FILTER_EXPR:
13093 /* The exception filter expression only matters if there is an
13094 exception. If the exception does not match EH_FILTER_TYPES,
13095 we will execute EH_FILTER_FAILURE, and we will fall through
13096 if that falls through. If the exception does match
13097 EH_FILTER_TYPES, the stack unwinder will continue up the
13098 stack, so we will not fall through. We don't know whether we
13099 will throw an exception which matches EH_FILTER_TYPES or not,
13100 so we just ignore EH_FILTER_TYPES and assume that we might
13101 throw an exception which doesn't match. */
13102 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
13103
13104 default:
13105 /* This case represents statements to be executed when an
13106 exception occurs. Those statements are implicitly followed
13107 by a RESX statement to resume execution after the exception.
13108 So in this case the TRY_CATCH never falls through. */
13109 return false;
13110 }
13111 }
13112
13113 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
13114 need not be 100% accurate; simply be conservative and return true if we
13115 don't know. This is used only to avoid stupidly generating extra code.
13116 If we're wrong, we'll just delete the extra code later. */
13117
13118 bool
13119 block_may_fallthru (const_tree block)
13120 {
13121 /* This CONST_CAST is okay because expr_last returns its argument
13122 unmodified and we assign it to a const_tree. */
13123 const_tree stmt = expr_last (CONST_CAST_TREE (block));
13124
13125 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
13126 {
13127 case GOTO_EXPR:
13128 case RETURN_EXPR:
13129 /* Easy cases. If the last statement of the block implies
13130 control transfer, then we can't fall through. */
13131 return false;
13132
13133 case SWITCH_EXPR:
13134 /* If there is a default: label or case labels cover all possible
13135 SWITCH_COND values, then the SWITCH_EXPR will transfer control
13136 to some case label in all cases and all we care is whether the
13137 SWITCH_BODY falls through. */
13138 if (SWITCH_ALL_CASES_P (stmt))
13139 return block_may_fallthru (SWITCH_BODY (stmt));
13140 return true;
13141
13142 case COND_EXPR:
13143 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
13144 return true;
13145 return block_may_fallthru (COND_EXPR_ELSE (stmt));
13146
13147 case BIND_EXPR:
13148 return block_may_fallthru (BIND_EXPR_BODY (stmt));
13149
13150 case TRY_CATCH_EXPR:
13151 return try_catch_may_fallthru (stmt);
13152
13153 case TRY_FINALLY_EXPR:
13154 /* The finally clause is always executed after the try clause,
13155 so if it does not fall through, then the try-finally will not
13156 fall through. Otherwise, if the try clause does not fall
13157 through, then when the finally clause falls through it will
13158 resume execution wherever the try clause was going. So the
13159 whole try-finally will only fall through if both the try
13160 clause and the finally clause fall through. */
13161 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
13162 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
13163
13164 case EH_ELSE_EXPR:
13165 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13166
13167 case MODIFY_EXPR:
13168 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
13169 stmt = TREE_OPERAND (stmt, 1);
13170 else
13171 return true;
13172 /* FALLTHRU */
13173
13174 case CALL_EXPR:
13175 /* Functions that do not return do not fall through. */
13176 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
13177
13178 case CLEANUP_POINT_EXPR:
13179 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13180
13181 case TARGET_EXPR:
13182 return block_may_fallthru (TREE_OPERAND (stmt, 1));
13183
13184 case ERROR_MARK:
13185 return true;
13186
13187 default:
13188 return lang_hooks.block_may_fallthru (stmt);
13189 }
13190 }
13191
13192 /* True if we are using EH to handle cleanups. */
13193 static bool using_eh_for_cleanups_flag = false;
13194
13195 /* This routine is called from front ends to indicate eh should be used for
13196 cleanups. */
13197 void
13198 using_eh_for_cleanups (void)
13199 {
13200 using_eh_for_cleanups_flag = true;
13201 }
13202
13203 /* Query whether EH is used for cleanups. */
13204 bool
13205 using_eh_for_cleanups_p (void)
13206 {
13207 return using_eh_for_cleanups_flag;
13208 }
13209
13210 /* Wrapper for tree_code_name to ensure that tree code is valid */
13211 const char *
13212 get_tree_code_name (enum tree_code code)
13213 {
13214 const char *invalid = "<invalid tree code>";
13215
13216 if (code >= MAX_TREE_CODES)
13217 {
13218 if (code == 0xa5a5)
13219 return "ggc_freed";
13220 return invalid;
13221 }
13222
13223 return tree_code_name[code];
13224 }
13225
13226 /* Drops the TREE_OVERFLOW flag from T. */
13227
13228 tree
13229 drop_tree_overflow (tree t)
13230 {
13231 gcc_checking_assert (TREE_OVERFLOW (t));
13232
13233 /* For tree codes with a sharing machinery re-build the result. */
13234 if (poly_int_tree_p (t))
13235 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
13236
13237 /* For VECTOR_CST, remove the overflow bits from the encoded elements
13238 and canonicalize the result. */
13239 if (TREE_CODE (t) == VECTOR_CST)
13240 {
13241 tree_vector_builder builder;
13242 builder.new_unary_operation (TREE_TYPE (t), t, true);
13243 unsigned int count = builder.encoded_nelts ();
13244 for (unsigned int i = 0; i < count; ++i)
13245 {
13246 tree elt = VECTOR_CST_ELT (t, i);
13247 if (TREE_OVERFLOW (elt))
13248 elt = drop_tree_overflow (elt);
13249 builder.quick_push (elt);
13250 }
13251 return builder.build ();
13252 }
13253
13254 /* Otherwise, as all tcc_constants are possibly shared, copy the node
13255 and drop the flag. */
13256 t = copy_node (t);
13257 TREE_OVERFLOW (t) = 0;
13258
13259 /* For constants that contain nested constants, drop the flag
13260 from those as well. */
13261 if (TREE_CODE (t) == COMPLEX_CST)
13262 {
13263 if (TREE_OVERFLOW (TREE_REALPART (t)))
13264 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
13265 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
13266 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
13267 }
13268
13269 return t;
13270 }
13271
13272 /* Given a memory reference expression T, return its base address.
13273 The base address of a memory reference expression is the main
13274 object being referenced. For instance, the base address for
13275 'array[i].fld[j]' is 'array'. You can think of this as stripping
13276 away the offset part from a memory address.
13277
13278 This function calls handled_component_p to strip away all the inner
13279 parts of the memory reference until it reaches the base object. */
13280
13281 tree
13282 get_base_address (tree t)
13283 {
13284 while (handled_component_p (t))
13285 t = TREE_OPERAND (t, 0);
13286
13287 if ((TREE_CODE (t) == MEM_REF
13288 || TREE_CODE (t) == TARGET_MEM_REF)
13289 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
13290 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
13291
13292 /* ??? Either the alias oracle or all callers need to properly deal
13293 with WITH_SIZE_EXPRs before we can look through those. */
13294 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13295 return NULL_TREE;
13296
13297 return t;
13298 }
13299
13300 /* Return a tree of sizetype representing the size, in bytes, of the element
13301 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13302
13303 tree
13304 array_ref_element_size (tree exp)
13305 {
13306 tree aligned_size = TREE_OPERAND (exp, 3);
13307 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
13308 location_t loc = EXPR_LOCATION (exp);
13309
13310 /* If a size was specified in the ARRAY_REF, it's the size measured
13311 in alignment units of the element type. So multiply by that value. */
13312 if (aligned_size)
13313 {
13314 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13315 sizetype from another type of the same width and signedness. */
13316 if (TREE_TYPE (aligned_size) != sizetype)
13317 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13318 return size_binop_loc (loc, MULT_EXPR, aligned_size,
13319 size_int (TYPE_ALIGN_UNIT (elmt_type)));
13320 }
13321
13322 /* Otherwise, take the size from that of the element type. Substitute
13323 any PLACEHOLDER_EXPR that we have. */
13324 else
13325 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13326 }
13327
13328 /* Return a tree representing the lower bound of the array mentioned in
13329 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13330
13331 tree
13332 array_ref_low_bound (tree exp)
13333 {
13334 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13335
13336 /* If a lower bound is specified in EXP, use it. */
13337 if (TREE_OPERAND (exp, 2))
13338 return TREE_OPERAND (exp, 2);
13339
13340 /* Otherwise, if there is a domain type and it has a lower bound, use it,
13341 substituting for a PLACEHOLDER_EXPR as needed. */
13342 if (domain_type && TYPE_MIN_VALUE (domain_type))
13343 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13344
13345 /* Otherwise, return a zero of the appropriate type. */
13346 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
13347 }
13348
13349 /* Return a tree representing the upper bound of the array mentioned in
13350 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13351
13352 tree
13353 array_ref_up_bound (tree exp)
13354 {
13355 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13356
13357 /* If there is a domain type and it has an upper bound, use it, substituting
13358 for a PLACEHOLDER_EXPR as needed. */
13359 if (domain_type && TYPE_MAX_VALUE (domain_type))
13360 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13361
13362 /* Otherwise fail. */
13363 return NULL_TREE;
13364 }
13365
13366 /* Returns true if REF is an array reference or a component reference
13367 to an array at the end of a structure.
13368 If this is the case, the array may be allocated larger
13369 than its upper bound implies. */
13370
13371 bool
13372 array_at_struct_end_p (tree ref)
13373 {
13374 tree atype;
13375
13376 if (TREE_CODE (ref) == ARRAY_REF
13377 || TREE_CODE (ref) == ARRAY_RANGE_REF)
13378 {
13379 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
13380 ref = TREE_OPERAND (ref, 0);
13381 }
13382 else if (TREE_CODE (ref) == COMPONENT_REF
13383 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
13384 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
13385 else
13386 return false;
13387
13388 if (TREE_CODE (ref) == STRING_CST)
13389 return false;
13390
13391 tree ref_to_array = ref;
13392 while (handled_component_p (ref))
13393 {
13394 /* If the reference chain contains a component reference to a
13395 non-union type and there follows another field the reference
13396 is not at the end of a structure. */
13397 if (TREE_CODE (ref) == COMPONENT_REF)
13398 {
13399 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13400 {
13401 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13402 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13403 nextf = DECL_CHAIN (nextf);
13404 if (nextf)
13405 return false;
13406 }
13407 }
13408 /* If we have a multi-dimensional array we do not consider
13409 a non-innermost dimension as flex array if the whole
13410 multi-dimensional array is at struct end.
13411 Same for an array of aggregates with a trailing array
13412 member. */
13413 else if (TREE_CODE (ref) == ARRAY_REF)
13414 return false;
13415 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
13416 ;
13417 /* If we view an underlying object as sth else then what we
13418 gathered up to now is what we have to rely on. */
13419 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
13420 break;
13421 else
13422 gcc_unreachable ();
13423
13424 ref = TREE_OPERAND (ref, 0);
13425 }
13426
13427 /* The array now is at struct end. Treat flexible arrays as
13428 always subject to extend, even into just padding constrained by
13429 an underlying decl. */
13430 if (! TYPE_SIZE (atype)
13431 || ! TYPE_DOMAIN (atype)
13432 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13433 return true;
13434
13435 if (TREE_CODE (ref) == MEM_REF
13436 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13437 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13438
13439 /* If the reference is based on a declared entity, the size of the array
13440 is constrained by its given domain. (Do not trust commons PR/69368). */
13441 if (DECL_P (ref)
13442 && !(flag_unconstrained_commons
13443 && VAR_P (ref) && DECL_COMMON (ref))
13444 && DECL_SIZE_UNIT (ref)
13445 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
13446 {
13447 /* Check whether the array domain covers all of the available
13448 padding. */
13449 poly_int64 offset;
13450 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
13451 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
13452 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
13453 return true;
13454 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
13455 return true;
13456
13457 /* If at least one extra element fits it is a flexarray. */
13458 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13459 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13460 + 2)
13461 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13462 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13463 return true;
13464
13465 return false;
13466 }
13467
13468 return true;
13469 }
13470
13471 /* Return a tree representing the offset, in bytes, of the field referenced
13472 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13473
13474 tree
13475 component_ref_field_offset (tree exp)
13476 {
13477 tree aligned_offset = TREE_OPERAND (exp, 2);
13478 tree field = TREE_OPERAND (exp, 1);
13479 location_t loc = EXPR_LOCATION (exp);
13480
13481 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13482 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13483 value. */
13484 if (aligned_offset)
13485 {
13486 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13487 sizetype from another type of the same width and signedness. */
13488 if (TREE_TYPE (aligned_offset) != sizetype)
13489 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13490 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13491 size_int (DECL_OFFSET_ALIGN (field)
13492 / BITS_PER_UNIT));
13493 }
13494
13495 /* Otherwise, take the offset from that of the field. Substitute
13496 any PLACEHOLDER_EXPR that we have. */
13497 else
13498 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13499 }
13500
13501 /* Determines the size of the member referenced by the COMPONENT_REF
13502 REF, using its initializer expression if necessary in order to
13503 determine the size of an initialized flexible array member.
13504 Returns the size (which might be zero for an object with
13505 an uninitialized flexible array member) or null if the size
13506 cannot be determined. */
13507
13508 tree
13509 component_ref_size (tree ref)
13510 {
13511 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13512
13513 tree member = TREE_OPERAND (ref, 1);
13514
13515 /* If the member is not an array, or is not last, or is an array with
13516 more than one element, return its size. Otherwise it's either
13517 a bona fide flexible array member, or a zero-length array member,
13518 or an array of length one treated as such. */
13519 tree size = DECL_SIZE_UNIT (member);
13520 if (size)
13521 {
13522 tree memtype = TREE_TYPE (member);
13523 if (TREE_CODE (memtype) != ARRAY_TYPE
13524 || !array_at_struct_end_p (ref))
13525 return size;
13526
13527 if (!integer_zerop (size))
13528 if (tree dom = TYPE_DOMAIN (memtype))
13529 if (tree min = TYPE_MIN_VALUE (dom))
13530 if (tree max = TYPE_MAX_VALUE (dom))
13531 if (TREE_CODE (min) == INTEGER_CST
13532 && TREE_CODE (max) == INTEGER_CST)
13533 {
13534 offset_int minidx = wi::to_offset (min);
13535 offset_int maxidx = wi::to_offset (max);
13536 if (maxidx - minidx > 1)
13537 return size;
13538 }
13539 }
13540
13541 /* If the reference is to a declared object and the member a true
13542 flexible array, try to determine its size from its initializer. */
13543 poly_int64 off = 0;
13544 tree base = get_addr_base_and_unit_offset (ref, &off);
13545 if (!base || !VAR_P (base))
13546 return NULL_TREE;
13547
13548 /* The size of any member of a declared object other than a flexible
13549 array member is that obtained above. */
13550 if (size)
13551 return size;
13552
13553 if (tree init = DECL_INITIAL (base))
13554 if (TREE_CODE (init) == CONSTRUCTOR)
13555 {
13556 off <<= LOG2_BITS_PER_UNIT;
13557 init = fold_ctor_reference (NULL_TREE, init, off, 0, base);
13558 if (init)
13559 return TYPE_SIZE_UNIT (TREE_TYPE (init));
13560 }
13561
13562 /* Return "don't know" for an external non-array object since its
13563 flexible array member can be initialized to have any number of
13564 elements. Otherwise, return zero because the flexible array
13565 member has no elements. */
13566 return (DECL_EXTERNAL (base) && TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
13567 ? NULL_TREE : integer_zero_node);
13568 }
13569
13570 /* Return the machine mode of T. For vectors, returns the mode of the
13571 inner type. The main use case is to feed the result to HONOR_NANS,
13572 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13573
13574 machine_mode
13575 element_mode (const_tree t)
13576 {
13577 if (!TYPE_P (t))
13578 t = TREE_TYPE (t);
13579 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13580 t = TREE_TYPE (t);
13581 return TYPE_MODE (t);
13582 }
13583
13584 /* Vector types need to re-check the target flags each time we report
13585 the machine mode. We need to do this because attribute target can
13586 change the result of vector_mode_supported_p and have_regs_of_mode
13587 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13588 change on a per-function basis. */
13589 /* ??? Possibly a better solution is to run through all the types
13590 referenced by a function and re-compute the TYPE_MODE once, rather
13591 than make the TYPE_MODE macro call a function. */
13592
13593 machine_mode
13594 vector_type_mode (const_tree t)
13595 {
13596 machine_mode mode;
13597
13598 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13599
13600 mode = t->type_common.mode;
13601 if (VECTOR_MODE_P (mode)
13602 && (!targetm.vector_mode_supported_p (mode)
13603 || !have_regs_of_mode[mode]))
13604 {
13605 scalar_int_mode innermode;
13606
13607 /* For integers, try mapping it to a same-sized scalar mode. */
13608 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13609 {
13610 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13611 * GET_MODE_BITSIZE (innermode));
13612 scalar_int_mode mode;
13613 if (int_mode_for_size (size, 0).exists (&mode)
13614 && have_regs_of_mode[mode])
13615 return mode;
13616 }
13617
13618 return BLKmode;
13619 }
13620
13621 return mode;
13622 }
13623
13624 /* Verify that basic properties of T match TV and thus T can be a variant of
13625 TV. TV should be the more specified variant (i.e. the main variant). */
13626
13627 static bool
13628 verify_type_variant (const_tree t, tree tv)
13629 {
13630 /* Type variant can differ by:
13631
13632 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13633 ENCODE_QUAL_ADDR_SPACE.
13634 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13635 in this case some values may not be set in the variant types
13636 (see TYPE_COMPLETE_P checks).
13637 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13638 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13639 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13640 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13641 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13642 this is necessary to make it possible to merge types form different TUs
13643 - arrays, pointers and references may have TREE_TYPE that is a variant
13644 of TREE_TYPE of their main variants.
13645 - aggregates may have new TYPE_FIELDS list that list variants of
13646 the main variant TYPE_FIELDS.
13647 - vector types may differ by TYPE_VECTOR_OPAQUE
13648 */
13649
13650 /* Convenience macro for matching individual fields. */
13651 #define verify_variant_match(flag) \
13652 do { \
13653 if (flag (tv) != flag (t)) \
13654 { \
13655 error ("type variant differs by %s", #flag); \
13656 debug_tree (tv); \
13657 return false; \
13658 } \
13659 } while (false)
13660
13661 /* tree_base checks. */
13662
13663 verify_variant_match (TREE_CODE);
13664 /* FIXME: Ada builds non-artificial variants of artificial types. */
13665 if (TYPE_ARTIFICIAL (tv) && 0)
13666 verify_variant_match (TYPE_ARTIFICIAL);
13667 if (POINTER_TYPE_P (tv))
13668 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13669 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13670 verify_variant_match (TYPE_UNSIGNED);
13671 verify_variant_match (TYPE_PACKED);
13672 if (TREE_CODE (t) == REFERENCE_TYPE)
13673 verify_variant_match (TYPE_REF_IS_RVALUE);
13674 if (AGGREGATE_TYPE_P (t))
13675 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13676 else
13677 verify_variant_match (TYPE_SATURATING);
13678 /* FIXME: This check trigger during libstdc++ build. */
13679 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13680 verify_variant_match (TYPE_FINAL_P);
13681
13682 /* tree_type_common checks. */
13683
13684 if (COMPLETE_TYPE_P (t))
13685 {
13686 verify_variant_match (TYPE_MODE);
13687 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13688 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13689 verify_variant_match (TYPE_SIZE);
13690 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13691 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13692 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13693 {
13694 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13695 TYPE_SIZE_UNIT (tv), 0));
13696 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13697 debug_tree (tv);
13698 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13699 debug_tree (TYPE_SIZE_UNIT (tv));
13700 error ("type%'s %<TYPE_SIZE_UNIT%>");
13701 debug_tree (TYPE_SIZE_UNIT (t));
13702 return false;
13703 }
13704 }
13705 verify_variant_match (TYPE_PRECISION);
13706 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13707 if (RECORD_OR_UNION_TYPE_P (t))
13708 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13709 else if (TREE_CODE (t) == ARRAY_TYPE)
13710 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13711 /* During LTO we merge variant lists from diferent translation units
13712 that may differ BY TYPE_CONTEXT that in turn may point
13713 to TRANSLATION_UNIT_DECL.
13714 Ada also builds variants of types with different TYPE_CONTEXT. */
13715 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13716 verify_variant_match (TYPE_CONTEXT);
13717 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13718 verify_variant_match (TYPE_STRING_FLAG);
13719 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13720 verify_variant_match (TYPE_CXX_ODR_P);
13721 if (TYPE_ALIAS_SET_KNOWN_P (t))
13722 {
13723 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13724 debug_tree (tv);
13725 return false;
13726 }
13727
13728 /* tree_type_non_common checks. */
13729
13730 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13731 and dangle the pointer from time to time. */
13732 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13733 && (in_lto_p || !TYPE_VFIELD (tv)
13734 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13735 {
13736 error ("type variant has different %<TYPE_VFIELD%>");
13737 debug_tree (tv);
13738 return false;
13739 }
13740 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13741 || TREE_CODE (t) == INTEGER_TYPE
13742 || TREE_CODE (t) == BOOLEAN_TYPE
13743 || TREE_CODE (t) == REAL_TYPE
13744 || TREE_CODE (t) == FIXED_POINT_TYPE)
13745 {
13746 verify_variant_match (TYPE_MAX_VALUE);
13747 verify_variant_match (TYPE_MIN_VALUE);
13748 }
13749 if (TREE_CODE (t) == METHOD_TYPE)
13750 verify_variant_match (TYPE_METHOD_BASETYPE);
13751 if (TREE_CODE (t) == OFFSET_TYPE)
13752 verify_variant_match (TYPE_OFFSET_BASETYPE);
13753 if (TREE_CODE (t) == ARRAY_TYPE)
13754 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13755 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13756 or even type's main variant. This is needed to make bootstrap pass
13757 and the bug seems new in GCC 5.
13758 C++ FE should be updated to make this consistent and we should check
13759 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13760 is a match with main variant.
13761
13762 Also disable the check for Java for now because of parser hack that builds
13763 first an dummy BINFO and then sometimes replace it by real BINFO in some
13764 of the copies. */
13765 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13766 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13767 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13768 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13769 at LTO time only. */
13770 && (in_lto_p && odr_type_p (t)))
13771 {
13772 error ("type variant has different %<TYPE_BINFO%>");
13773 debug_tree (tv);
13774 error ("type variant%'s %<TYPE_BINFO%>");
13775 debug_tree (TYPE_BINFO (tv));
13776 error ("type%'s %<TYPE_BINFO%>");
13777 debug_tree (TYPE_BINFO (t));
13778 return false;
13779 }
13780
13781 /* Check various uses of TYPE_VALUES_RAW. */
13782 if (TREE_CODE (t) == ENUMERAL_TYPE
13783 && TYPE_VALUES (t))
13784 verify_variant_match (TYPE_VALUES);
13785 else if (TREE_CODE (t) == ARRAY_TYPE)
13786 verify_variant_match (TYPE_DOMAIN);
13787 /* Permit incomplete variants of complete type. While FEs may complete
13788 all variants, this does not happen for C++ templates in all cases. */
13789 else if (RECORD_OR_UNION_TYPE_P (t)
13790 && COMPLETE_TYPE_P (t)
13791 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13792 {
13793 tree f1, f2;
13794
13795 /* Fortran builds qualified variants as new records with items of
13796 qualified type. Verify that they looks same. */
13797 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13798 f1 && f2;
13799 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13800 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13801 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13802 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13803 /* FIXME: gfc_nonrestricted_type builds all types as variants
13804 with exception of pointer types. It deeply copies the type
13805 which means that we may end up with a variant type
13806 referring non-variant pointer. We may change it to
13807 produce types as variants, too, like
13808 objc_get_protocol_qualified_type does. */
13809 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13810 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13811 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13812 break;
13813 if (f1 || f2)
13814 {
13815 error ("type variant has different %<TYPE_FIELDS%>");
13816 debug_tree (tv);
13817 error ("first mismatch is field");
13818 debug_tree (f1);
13819 error ("and field");
13820 debug_tree (f2);
13821 return false;
13822 }
13823 }
13824 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13825 verify_variant_match (TYPE_ARG_TYPES);
13826 /* For C++ the qualified variant of array type is really an array type
13827 of qualified TREE_TYPE.
13828 objc builds variants of pointer where pointer to type is a variant, too
13829 in objc_get_protocol_qualified_type. */
13830 if (TREE_TYPE (t) != TREE_TYPE (tv)
13831 && ((TREE_CODE (t) != ARRAY_TYPE
13832 && !POINTER_TYPE_P (t))
13833 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13834 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13835 {
13836 error ("type variant has different %<TREE_TYPE%>");
13837 debug_tree (tv);
13838 error ("type variant%'s %<TREE_TYPE%>");
13839 debug_tree (TREE_TYPE (tv));
13840 error ("type%'s %<TREE_TYPE%>");
13841 debug_tree (TREE_TYPE (t));
13842 return false;
13843 }
13844 if (type_with_alias_set_p (t)
13845 && !gimple_canonical_types_compatible_p (t, tv, false))
13846 {
13847 error ("type is not compatible with its variant");
13848 debug_tree (tv);
13849 error ("type variant%'s %<TREE_TYPE%>");
13850 debug_tree (TREE_TYPE (tv));
13851 error ("type%'s %<TREE_TYPE%>");
13852 debug_tree (TREE_TYPE (t));
13853 return false;
13854 }
13855 return true;
13856 #undef verify_variant_match
13857 }
13858
13859
13860 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13861 the middle-end types_compatible_p function. It needs to avoid
13862 claiming types are different for types that should be treated
13863 the same with respect to TBAA. Canonical types are also used
13864 for IL consistency checks via the useless_type_conversion_p
13865 predicate which does not handle all type kinds itself but falls
13866 back to pointer-comparison of TYPE_CANONICAL for aggregates
13867 for example. */
13868
13869 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13870 type calculation because we need to allow inter-operability between signed
13871 and unsigned variants. */
13872
13873 bool
13874 type_with_interoperable_signedness (const_tree type)
13875 {
13876 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13877 signed char and unsigned char. Similarly fortran FE builds
13878 C_SIZE_T as signed type, while C defines it unsigned. */
13879
13880 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13881 == INTEGER_TYPE
13882 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13883 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13884 }
13885
13886 /* Return true iff T1 and T2 are structurally identical for what
13887 TBAA is concerned.
13888 This function is used both by lto.c canonical type merging and by the
13889 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13890 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13891 only for LTO because only in these cases TYPE_CANONICAL equivalence
13892 correspond to one defined by gimple_canonical_types_compatible_p. */
13893
13894 bool
13895 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13896 bool trust_type_canonical)
13897 {
13898 /* Type variants should be same as the main variant. When not doing sanity
13899 checking to verify this fact, go to main variants and save some work. */
13900 if (trust_type_canonical)
13901 {
13902 t1 = TYPE_MAIN_VARIANT (t1);
13903 t2 = TYPE_MAIN_VARIANT (t2);
13904 }
13905
13906 /* Check first for the obvious case of pointer identity. */
13907 if (t1 == t2)
13908 return true;
13909
13910 /* Check that we have two types to compare. */
13911 if (t1 == NULL_TREE || t2 == NULL_TREE)
13912 return false;
13913
13914 /* We consider complete types always compatible with incomplete type.
13915 This does not make sense for canonical type calculation and thus we
13916 need to ensure that we are never called on it.
13917
13918 FIXME: For more correctness the function probably should have three modes
13919 1) mode assuming that types are complete mathcing their structure
13920 2) mode allowing incomplete types but producing equivalence classes
13921 and thus ignoring all info from complete types
13922 3) mode allowing incomplete types to match complete but checking
13923 compatibility between complete types.
13924
13925 1 and 2 can be used for canonical type calculation. 3 is the real
13926 definition of type compatibility that can be used i.e. for warnings during
13927 declaration merging. */
13928
13929 gcc_assert (!trust_type_canonical
13930 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13931
13932 /* If the types have been previously registered and found equal
13933 they still are. */
13934
13935 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13936 && trust_type_canonical)
13937 {
13938 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13939 they are always NULL, but they are set to non-NULL for types
13940 constructed by build_pointer_type and variants. In this case the
13941 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13942 all pointers are considered equal. Be sure to not return false
13943 negatives. */
13944 gcc_checking_assert (canonical_type_used_p (t1)
13945 && canonical_type_used_p (t2));
13946 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13947 }
13948
13949 /* For types where we do ODR based TBAA the canonical type is always
13950 set correctly, so we know that types are different if their
13951 canonical types does not match. */
13952 if (trust_type_canonical
13953 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13954 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13955 return false;
13956
13957 /* Can't be the same type if the types don't have the same code. */
13958 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13959 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13960 return false;
13961
13962 /* Qualifiers do not matter for canonical type comparison purposes. */
13963
13964 /* Void types and nullptr types are always the same. */
13965 if (TREE_CODE (t1) == VOID_TYPE
13966 || TREE_CODE (t1) == NULLPTR_TYPE)
13967 return true;
13968
13969 /* Can't be the same type if they have different mode. */
13970 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13971 return false;
13972
13973 /* Non-aggregate types can be handled cheaply. */
13974 if (INTEGRAL_TYPE_P (t1)
13975 || SCALAR_FLOAT_TYPE_P (t1)
13976 || FIXED_POINT_TYPE_P (t1)
13977 || TREE_CODE (t1) == VECTOR_TYPE
13978 || TREE_CODE (t1) == COMPLEX_TYPE
13979 || TREE_CODE (t1) == OFFSET_TYPE
13980 || POINTER_TYPE_P (t1))
13981 {
13982 /* Can't be the same type if they have different recision. */
13983 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13984 return false;
13985
13986 /* In some cases the signed and unsigned types are required to be
13987 inter-operable. */
13988 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13989 && !type_with_interoperable_signedness (t1))
13990 return false;
13991
13992 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13993 interoperable with "signed char". Unless all frontends are revisited
13994 to agree on these types, we must ignore the flag completely. */
13995
13996 /* Fortran standard define C_PTR type that is compatible with every
13997 C pointer. For this reason we need to glob all pointers into one.
13998 Still pointers in different address spaces are not compatible. */
13999 if (POINTER_TYPE_P (t1))
14000 {
14001 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
14002 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
14003 return false;
14004 }
14005
14006 /* Tail-recurse to components. */
14007 if (TREE_CODE (t1) == VECTOR_TYPE
14008 || TREE_CODE (t1) == COMPLEX_TYPE)
14009 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
14010 TREE_TYPE (t2),
14011 trust_type_canonical);
14012
14013 return true;
14014 }
14015
14016 /* Do type-specific comparisons. */
14017 switch (TREE_CODE (t1))
14018 {
14019 case ARRAY_TYPE:
14020 /* Array types are the same if the element types are the same and
14021 the number of elements are the same. */
14022 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14023 trust_type_canonical)
14024 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
14025 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
14026 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
14027 return false;
14028 else
14029 {
14030 tree i1 = TYPE_DOMAIN (t1);
14031 tree i2 = TYPE_DOMAIN (t2);
14032
14033 /* For an incomplete external array, the type domain can be
14034 NULL_TREE. Check this condition also. */
14035 if (i1 == NULL_TREE && i2 == NULL_TREE)
14036 return true;
14037 else if (i1 == NULL_TREE || i2 == NULL_TREE)
14038 return false;
14039 else
14040 {
14041 tree min1 = TYPE_MIN_VALUE (i1);
14042 tree min2 = TYPE_MIN_VALUE (i2);
14043 tree max1 = TYPE_MAX_VALUE (i1);
14044 tree max2 = TYPE_MAX_VALUE (i2);
14045
14046 /* The minimum/maximum values have to be the same. */
14047 if ((min1 == min2
14048 || (min1 && min2
14049 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
14050 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
14051 || operand_equal_p (min1, min2, 0))))
14052 && (max1 == max2
14053 || (max1 && max2
14054 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
14055 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
14056 || operand_equal_p (max1, max2, 0)))))
14057 return true;
14058 else
14059 return false;
14060 }
14061 }
14062
14063 case METHOD_TYPE:
14064 case FUNCTION_TYPE:
14065 /* Function types are the same if the return type and arguments types
14066 are the same. */
14067 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14068 trust_type_canonical))
14069 return false;
14070
14071 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
14072 return true;
14073 else
14074 {
14075 tree parms1, parms2;
14076
14077 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
14078 parms1 && parms2;
14079 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
14080 {
14081 if (!gimple_canonical_types_compatible_p
14082 (TREE_VALUE (parms1), TREE_VALUE (parms2),
14083 trust_type_canonical))
14084 return false;
14085 }
14086
14087 if (parms1 || parms2)
14088 return false;
14089
14090 return true;
14091 }
14092
14093 case RECORD_TYPE:
14094 case UNION_TYPE:
14095 case QUAL_UNION_TYPE:
14096 {
14097 tree f1, f2;
14098
14099 /* Don't try to compare variants of an incomplete type, before
14100 TYPE_FIELDS has been copied around. */
14101 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
14102 return true;
14103
14104
14105 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
14106 return false;
14107
14108 /* For aggregate types, all the fields must be the same. */
14109 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
14110 f1 || f2;
14111 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14112 {
14113 /* Skip non-fields and zero-sized fields. */
14114 while (f1 && (TREE_CODE (f1) != FIELD_DECL
14115 || (DECL_SIZE (f1)
14116 && integer_zerop (DECL_SIZE (f1)))))
14117 f1 = TREE_CHAIN (f1);
14118 while (f2 && (TREE_CODE (f2) != FIELD_DECL
14119 || (DECL_SIZE (f2)
14120 && integer_zerop (DECL_SIZE (f2)))))
14121 f2 = TREE_CHAIN (f2);
14122 if (!f1 || !f2)
14123 break;
14124 /* The fields must have the same name, offset and type. */
14125 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
14126 || !gimple_compare_field_offset (f1, f2)
14127 || !gimple_canonical_types_compatible_p
14128 (TREE_TYPE (f1), TREE_TYPE (f2),
14129 trust_type_canonical))
14130 return false;
14131 }
14132
14133 /* If one aggregate has more fields than the other, they
14134 are not the same. */
14135 if (f1 || f2)
14136 return false;
14137
14138 return true;
14139 }
14140
14141 default:
14142 /* Consider all types with language specific trees in them mutually
14143 compatible. This is executed only from verify_type and false
14144 positives can be tolerated. */
14145 gcc_assert (!in_lto_p);
14146 return true;
14147 }
14148 }
14149
14150 /* Verify type T. */
14151
14152 void
14153 verify_type (const_tree t)
14154 {
14155 bool error_found = false;
14156 tree mv = TYPE_MAIN_VARIANT (t);
14157 if (!mv)
14158 {
14159 error ("main variant is not defined");
14160 error_found = true;
14161 }
14162 else if (mv != TYPE_MAIN_VARIANT (mv))
14163 {
14164 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14165 debug_tree (mv);
14166 error_found = true;
14167 }
14168 else if (t != mv && !verify_type_variant (t, mv))
14169 error_found = true;
14170
14171 tree ct = TYPE_CANONICAL (t);
14172 if (!ct)
14173 ;
14174 else if (TYPE_CANONICAL (t) != ct)
14175 {
14176 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14177 debug_tree (ct);
14178 error_found = true;
14179 }
14180 /* Method and function types cannot be used to address memory and thus
14181 TYPE_CANONICAL really matters only for determining useless conversions.
14182
14183 FIXME: C++ FE produce declarations of builtin functions that are not
14184 compatible with main variants. */
14185 else if (TREE_CODE (t) == FUNCTION_TYPE)
14186 ;
14187 else if (t != ct
14188 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14189 with variably sized arrays because their sizes possibly
14190 gimplified to different variables. */
14191 && !variably_modified_type_p (ct, NULL)
14192 && !gimple_canonical_types_compatible_p (t, ct, false)
14193 && COMPLETE_TYPE_P (t))
14194 {
14195 error ("%<TYPE_CANONICAL%> is not compatible");
14196 debug_tree (ct);
14197 error_found = true;
14198 }
14199
14200 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14201 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14202 {
14203 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14204 debug_tree (ct);
14205 error_found = true;
14206 }
14207 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14208 {
14209 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14210 debug_tree (ct);
14211 debug_tree (TYPE_MAIN_VARIANT (ct));
14212 error_found = true;
14213 }
14214
14215
14216 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14217 if (RECORD_OR_UNION_TYPE_P (t))
14218 {
14219 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14220 and danagle the pointer from time to time. */
14221 if (TYPE_VFIELD (t)
14222 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14223 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14224 {
14225 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14226 debug_tree (TYPE_VFIELD (t));
14227 error_found = true;
14228 }
14229 }
14230 else if (TREE_CODE (t) == POINTER_TYPE)
14231 {
14232 if (TYPE_NEXT_PTR_TO (t)
14233 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14234 {
14235 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14236 debug_tree (TYPE_NEXT_PTR_TO (t));
14237 error_found = true;
14238 }
14239 }
14240 else if (TREE_CODE (t) == REFERENCE_TYPE)
14241 {
14242 if (TYPE_NEXT_REF_TO (t)
14243 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14244 {
14245 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14246 debug_tree (TYPE_NEXT_REF_TO (t));
14247 error_found = true;
14248 }
14249 }
14250 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14251 || TREE_CODE (t) == FIXED_POINT_TYPE)
14252 {
14253 /* FIXME: The following check should pass:
14254 useless_type_conversion_p (const_cast <tree> (t),
14255 TREE_TYPE (TYPE_MIN_VALUE (t))
14256 but does not for C sizetypes in LTO. */
14257 }
14258
14259 /* Check various uses of TYPE_MAXVAL_RAW. */
14260 if (RECORD_OR_UNION_TYPE_P (t))
14261 {
14262 if (!TYPE_BINFO (t))
14263 ;
14264 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14265 {
14266 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14267 debug_tree (TYPE_BINFO (t));
14268 error_found = true;
14269 }
14270 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14271 {
14272 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14273 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14274 error_found = true;
14275 }
14276 }
14277 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14278 {
14279 if (TYPE_METHOD_BASETYPE (t)
14280 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14281 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14282 {
14283 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14284 debug_tree (TYPE_METHOD_BASETYPE (t));
14285 error_found = true;
14286 }
14287 }
14288 else if (TREE_CODE (t) == OFFSET_TYPE)
14289 {
14290 if (TYPE_OFFSET_BASETYPE (t)
14291 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14292 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14293 {
14294 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14295 debug_tree (TYPE_OFFSET_BASETYPE (t));
14296 error_found = true;
14297 }
14298 }
14299 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14300 || TREE_CODE (t) == FIXED_POINT_TYPE)
14301 {
14302 /* FIXME: The following check should pass:
14303 useless_type_conversion_p (const_cast <tree> (t),
14304 TREE_TYPE (TYPE_MAX_VALUE (t))
14305 but does not for C sizetypes in LTO. */
14306 }
14307 else if (TREE_CODE (t) == ARRAY_TYPE)
14308 {
14309 if (TYPE_ARRAY_MAX_SIZE (t)
14310 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14311 {
14312 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14313 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14314 error_found = true;
14315 }
14316 }
14317 else if (TYPE_MAX_VALUE_RAW (t))
14318 {
14319 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14320 debug_tree (TYPE_MAX_VALUE_RAW (t));
14321 error_found = true;
14322 }
14323
14324 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14325 {
14326 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14327 debug_tree (TYPE_LANG_SLOT_1 (t));
14328 error_found = true;
14329 }
14330
14331 /* Check various uses of TYPE_VALUES_RAW. */
14332 if (TREE_CODE (t) == ENUMERAL_TYPE)
14333 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14334 {
14335 tree value = TREE_VALUE (l);
14336 tree name = TREE_PURPOSE (l);
14337
14338 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14339 CONST_DECL of ENUMERAL TYPE. */
14340 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14341 {
14342 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14343 debug_tree (value);
14344 debug_tree (name);
14345 error_found = true;
14346 }
14347 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14348 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14349 {
14350 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14351 "to the enum");
14352 debug_tree (value);
14353 debug_tree (name);
14354 error_found = true;
14355 }
14356 if (TREE_CODE (name) != IDENTIFIER_NODE)
14357 {
14358 error ("enum value name is not %<IDENTIFIER_NODE%>");
14359 debug_tree (value);
14360 debug_tree (name);
14361 error_found = true;
14362 }
14363 }
14364 else if (TREE_CODE (t) == ARRAY_TYPE)
14365 {
14366 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14367 {
14368 error ("array %<TYPE_DOMAIN%> is not integer type");
14369 debug_tree (TYPE_DOMAIN (t));
14370 error_found = true;
14371 }
14372 }
14373 else if (RECORD_OR_UNION_TYPE_P (t))
14374 {
14375 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14376 {
14377 error ("%<TYPE_FIELDS%> defined in incomplete type");
14378 error_found = true;
14379 }
14380 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14381 {
14382 /* TODO: verify properties of decls. */
14383 if (TREE_CODE (fld) == FIELD_DECL)
14384 ;
14385 else if (TREE_CODE (fld) == TYPE_DECL)
14386 ;
14387 else if (TREE_CODE (fld) == CONST_DECL)
14388 ;
14389 else if (VAR_P (fld))
14390 ;
14391 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14392 ;
14393 else if (TREE_CODE (fld) == USING_DECL)
14394 ;
14395 else if (TREE_CODE (fld) == FUNCTION_DECL)
14396 ;
14397 else
14398 {
14399 error ("wrong tree in %<TYPE_FIELDS%> list");
14400 debug_tree (fld);
14401 error_found = true;
14402 }
14403 }
14404 }
14405 else if (TREE_CODE (t) == INTEGER_TYPE
14406 || TREE_CODE (t) == BOOLEAN_TYPE
14407 || TREE_CODE (t) == OFFSET_TYPE
14408 || TREE_CODE (t) == REFERENCE_TYPE
14409 || TREE_CODE (t) == NULLPTR_TYPE
14410 || TREE_CODE (t) == POINTER_TYPE)
14411 {
14412 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14413 {
14414 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14415 "is %p",
14416 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14417 error_found = true;
14418 }
14419 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14420 {
14421 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14422 debug_tree (TYPE_CACHED_VALUES (t));
14423 error_found = true;
14424 }
14425 /* Verify just enough of cache to ensure that no one copied it to new type.
14426 All copying should go by copy_node that should clear it. */
14427 else if (TYPE_CACHED_VALUES_P (t))
14428 {
14429 int i;
14430 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14431 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14432 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14433 {
14434 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14435 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14436 error_found = true;
14437 break;
14438 }
14439 }
14440 }
14441 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14442 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14443 {
14444 /* C++ FE uses TREE_PURPOSE to store initial values. */
14445 if (TREE_PURPOSE (l) && in_lto_p)
14446 {
14447 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14448 debug_tree (l);
14449 error_found = true;
14450 }
14451 if (!TYPE_P (TREE_VALUE (l)))
14452 {
14453 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14454 debug_tree (l);
14455 error_found = true;
14456 }
14457 }
14458 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14459 {
14460 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14461 debug_tree (TYPE_VALUES_RAW (t));
14462 error_found = true;
14463 }
14464 if (TREE_CODE (t) != INTEGER_TYPE
14465 && TREE_CODE (t) != BOOLEAN_TYPE
14466 && TREE_CODE (t) != OFFSET_TYPE
14467 && TREE_CODE (t) != REFERENCE_TYPE
14468 && TREE_CODE (t) != NULLPTR_TYPE
14469 && TREE_CODE (t) != POINTER_TYPE
14470 && TYPE_CACHED_VALUES_P (t))
14471 {
14472 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14473 error_found = true;
14474 }
14475
14476 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14477 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14478 of a type. */
14479 if (TREE_CODE (t) == METHOD_TYPE
14480 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14481 {
14482 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14483 error_found = true;
14484 }
14485
14486 if (error_found)
14487 {
14488 debug_tree (const_cast <tree> (t));
14489 internal_error ("%qs failed", __func__);
14490 }
14491 }
14492
14493
14494 /* Return 1 if ARG interpreted as signed in its precision is known to be
14495 always positive or 2 if ARG is known to be always negative, or 3 if
14496 ARG may be positive or negative. */
14497
14498 int
14499 get_range_pos_neg (tree arg)
14500 {
14501 if (arg == error_mark_node)
14502 return 3;
14503
14504 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14505 int cnt = 0;
14506 if (TREE_CODE (arg) == INTEGER_CST)
14507 {
14508 wide_int w = wi::sext (wi::to_wide (arg), prec);
14509 if (wi::neg_p (w))
14510 return 2;
14511 else
14512 return 1;
14513 }
14514 while (CONVERT_EXPR_P (arg)
14515 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14516 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14517 {
14518 arg = TREE_OPERAND (arg, 0);
14519 /* Narrower value zero extended into wider type
14520 will always result in positive values. */
14521 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14522 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14523 return 1;
14524 prec = TYPE_PRECISION (TREE_TYPE (arg));
14525 if (++cnt > 30)
14526 return 3;
14527 }
14528
14529 if (TREE_CODE (arg) != SSA_NAME)
14530 return 3;
14531 wide_int arg_min, arg_max;
14532 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
14533 {
14534 gimple *g = SSA_NAME_DEF_STMT (arg);
14535 if (is_gimple_assign (g)
14536 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14537 {
14538 tree t = gimple_assign_rhs1 (g);
14539 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14540 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14541 {
14542 if (TYPE_UNSIGNED (TREE_TYPE (t))
14543 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14544 return 1;
14545 prec = TYPE_PRECISION (TREE_TYPE (t));
14546 arg = t;
14547 if (++cnt > 30)
14548 return 3;
14549 continue;
14550 }
14551 }
14552 return 3;
14553 }
14554 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14555 {
14556 /* For unsigned values, the "positive" range comes
14557 below the "negative" range. */
14558 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14559 return 1;
14560 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14561 return 2;
14562 }
14563 else
14564 {
14565 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14566 return 1;
14567 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14568 return 2;
14569 }
14570 return 3;
14571 }
14572
14573
14574
14575
14576 /* Return true if ARG is marked with the nonnull attribute in the
14577 current function signature. */
14578
14579 bool
14580 nonnull_arg_p (const_tree arg)
14581 {
14582 tree t, attrs, fntype;
14583 unsigned HOST_WIDE_INT arg_num;
14584
14585 gcc_assert (TREE_CODE (arg) == PARM_DECL
14586 && (POINTER_TYPE_P (TREE_TYPE (arg))
14587 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14588
14589 /* The static chain decl is always non null. */
14590 if (arg == cfun->static_chain_decl)
14591 return true;
14592
14593 /* THIS argument of method is always non-NULL. */
14594 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14595 && arg == DECL_ARGUMENTS (cfun->decl)
14596 && flag_delete_null_pointer_checks)
14597 return true;
14598
14599 /* Values passed by reference are always non-NULL. */
14600 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14601 && flag_delete_null_pointer_checks)
14602 return true;
14603
14604 fntype = TREE_TYPE (cfun->decl);
14605 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14606 {
14607 attrs = lookup_attribute ("nonnull", attrs);
14608
14609 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14610 if (attrs == NULL_TREE)
14611 return false;
14612
14613 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14614 if (TREE_VALUE (attrs) == NULL_TREE)
14615 return true;
14616
14617 /* Get the position number for ARG in the function signature. */
14618 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14619 t;
14620 t = DECL_CHAIN (t), arg_num++)
14621 {
14622 if (t == arg)
14623 break;
14624 }
14625
14626 gcc_assert (t == arg);
14627
14628 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14629 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14630 {
14631 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14632 return true;
14633 }
14634 }
14635
14636 return false;
14637 }
14638
14639 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14640 information. */
14641
14642 location_t
14643 set_block (location_t loc, tree block)
14644 {
14645 location_t pure_loc = get_pure_location (loc);
14646 source_range src_range = get_range_from_loc (line_table, loc);
14647 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14648 }
14649
14650 location_t
14651 set_source_range (tree expr, location_t start, location_t finish)
14652 {
14653 source_range src_range;
14654 src_range.m_start = start;
14655 src_range.m_finish = finish;
14656 return set_source_range (expr, src_range);
14657 }
14658
14659 location_t
14660 set_source_range (tree expr, source_range src_range)
14661 {
14662 if (!EXPR_P (expr))
14663 return UNKNOWN_LOCATION;
14664
14665 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14666 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14667 pure_loc,
14668 src_range,
14669 NULL);
14670 SET_EXPR_LOCATION (expr, adhoc);
14671 return adhoc;
14672 }
14673
14674 /* Return EXPR, potentially wrapped with a node expression LOC,
14675 if !CAN_HAVE_LOCATION_P (expr).
14676
14677 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14678 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14679
14680 Wrapper nodes can be identified using location_wrapper_p. */
14681
14682 tree
14683 maybe_wrap_with_location (tree expr, location_t loc)
14684 {
14685 if (expr == NULL)
14686 return NULL;
14687 if (loc == UNKNOWN_LOCATION)
14688 return expr;
14689 if (CAN_HAVE_LOCATION_P (expr))
14690 return expr;
14691 /* We should only be adding wrappers for constants and for decls,
14692 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14693 gcc_assert (CONSTANT_CLASS_P (expr)
14694 || DECL_P (expr)
14695 || EXCEPTIONAL_CLASS_P (expr));
14696
14697 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14698 any impact of the wrapper nodes. */
14699 if (EXCEPTIONAL_CLASS_P (expr))
14700 return expr;
14701
14702 /* If any auto_suppress_location_wrappers are active, don't create
14703 wrappers. */
14704 if (suppress_location_wrappers > 0)
14705 return expr;
14706
14707 tree_code code
14708 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14709 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14710 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14711 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14712 /* Mark this node as being a wrapper. */
14713 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14714 return wrapper;
14715 }
14716
14717 int suppress_location_wrappers;
14718
14719 /* Return the name of combined function FN, for debugging purposes. */
14720
14721 const char *
14722 combined_fn_name (combined_fn fn)
14723 {
14724 if (builtin_fn_p (fn))
14725 {
14726 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14727 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14728 }
14729 else
14730 return internal_fn_name (as_internal_fn (fn));
14731 }
14732
14733 /* Return a bitmap with a bit set corresponding to each argument in
14734 a function call type FNTYPE declared with attribute nonnull,
14735 or null if none of the function's argument are nonnull. The caller
14736 must free the bitmap. */
14737
14738 bitmap
14739 get_nonnull_args (const_tree fntype)
14740 {
14741 if (fntype == NULL_TREE)
14742 return NULL;
14743
14744 tree attrs = TYPE_ATTRIBUTES (fntype);
14745 if (!attrs)
14746 return NULL;
14747
14748 bitmap argmap = NULL;
14749
14750 /* A function declaration can specify multiple attribute nonnull,
14751 each with zero or more arguments. The loop below creates a bitmap
14752 representing a union of all the arguments. An empty (but non-null)
14753 bitmap means that all arguments have been declaraed nonnull. */
14754 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14755 {
14756 attrs = lookup_attribute ("nonnull", attrs);
14757 if (!attrs)
14758 break;
14759
14760 if (!argmap)
14761 argmap = BITMAP_ALLOC (NULL);
14762
14763 if (!TREE_VALUE (attrs))
14764 {
14765 /* Clear the bitmap in case a previous attribute nonnull
14766 set it and this one overrides it for all arguments. */
14767 bitmap_clear (argmap);
14768 return argmap;
14769 }
14770
14771 /* Iterate over the indices of the format arguments declared nonnull
14772 and set a bit for each. */
14773 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14774 {
14775 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14776 bitmap_set_bit (argmap, val);
14777 }
14778 }
14779
14780 return argmap;
14781 }
14782
14783 /* Returns true if TYPE is a type where it and all of its subobjects
14784 (recursively) are of structure, union, or array type. */
14785
14786 static bool
14787 default_is_empty_type (tree type)
14788 {
14789 if (RECORD_OR_UNION_TYPE_P (type))
14790 {
14791 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14792 if (TREE_CODE (field) == FIELD_DECL
14793 && !DECL_PADDING_P (field)
14794 && !default_is_empty_type (TREE_TYPE (field)))
14795 return false;
14796 return true;
14797 }
14798 else if (TREE_CODE (type) == ARRAY_TYPE)
14799 return (integer_minus_onep (array_type_nelts (type))
14800 || TYPE_DOMAIN (type) == NULL_TREE
14801 || default_is_empty_type (TREE_TYPE (type)));
14802 return false;
14803 }
14804
14805 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14806 that shouldn't be passed via stack. */
14807
14808 bool
14809 default_is_empty_record (const_tree type)
14810 {
14811 if (!abi_version_at_least (12))
14812 return false;
14813
14814 if (type == error_mark_node)
14815 return false;
14816
14817 if (TREE_ADDRESSABLE (type))
14818 return false;
14819
14820 return default_is_empty_type (TYPE_MAIN_VARIANT (type));
14821 }
14822
14823 /* Like int_size_in_bytes, but handle empty records specially. */
14824
14825 HOST_WIDE_INT
14826 arg_int_size_in_bytes (const_tree type)
14827 {
14828 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14829 }
14830
14831 /* Like size_in_bytes, but handle empty records specially. */
14832
14833 tree
14834 arg_size_in_bytes (const_tree type)
14835 {
14836 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14837 }
14838
14839 /* Return true if an expression with CODE has to have the same result type as
14840 its first operand. */
14841
14842 bool
14843 expr_type_first_operand_type_p (tree_code code)
14844 {
14845 switch (code)
14846 {
14847 case NEGATE_EXPR:
14848 case ABS_EXPR:
14849 case BIT_NOT_EXPR:
14850 case PAREN_EXPR:
14851 case CONJ_EXPR:
14852
14853 case PLUS_EXPR:
14854 case MINUS_EXPR:
14855 case MULT_EXPR:
14856 case TRUNC_DIV_EXPR:
14857 case CEIL_DIV_EXPR:
14858 case FLOOR_DIV_EXPR:
14859 case ROUND_DIV_EXPR:
14860 case TRUNC_MOD_EXPR:
14861 case CEIL_MOD_EXPR:
14862 case FLOOR_MOD_EXPR:
14863 case ROUND_MOD_EXPR:
14864 case RDIV_EXPR:
14865 case EXACT_DIV_EXPR:
14866 case MIN_EXPR:
14867 case MAX_EXPR:
14868 case BIT_IOR_EXPR:
14869 case BIT_XOR_EXPR:
14870 case BIT_AND_EXPR:
14871
14872 case LSHIFT_EXPR:
14873 case RSHIFT_EXPR:
14874 case LROTATE_EXPR:
14875 case RROTATE_EXPR:
14876 return true;
14877
14878 default:
14879 return false;
14880 }
14881 }
14882
14883 /* Return a typenode for the "standard" C type with a given name. */
14884 tree
14885 get_typenode_from_name (const char *name)
14886 {
14887 if (name == NULL || *name == '\0')
14888 return NULL_TREE;
14889
14890 if (strcmp (name, "char") == 0)
14891 return char_type_node;
14892 if (strcmp (name, "unsigned char") == 0)
14893 return unsigned_char_type_node;
14894 if (strcmp (name, "signed char") == 0)
14895 return signed_char_type_node;
14896
14897 if (strcmp (name, "short int") == 0)
14898 return short_integer_type_node;
14899 if (strcmp (name, "short unsigned int") == 0)
14900 return short_unsigned_type_node;
14901
14902 if (strcmp (name, "int") == 0)
14903 return integer_type_node;
14904 if (strcmp (name, "unsigned int") == 0)
14905 return unsigned_type_node;
14906
14907 if (strcmp (name, "long int") == 0)
14908 return long_integer_type_node;
14909 if (strcmp (name, "long unsigned int") == 0)
14910 return long_unsigned_type_node;
14911
14912 if (strcmp (name, "long long int") == 0)
14913 return long_long_integer_type_node;
14914 if (strcmp (name, "long long unsigned int") == 0)
14915 return long_long_unsigned_type_node;
14916
14917 gcc_unreachable ();
14918 }
14919
14920 /* List of pointer types used to declare builtins before we have seen their
14921 real declaration.
14922
14923 Keep the size up to date in tree.h ! */
14924 const builtin_structptr_type builtin_structptr_types[6] =
14925 {
14926 { fileptr_type_node, ptr_type_node, "FILE" },
14927 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14928 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14929 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14930 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14931 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14932 };
14933
14934 /* Return the maximum object size. */
14935
14936 tree
14937 max_object_size (void)
14938 {
14939 /* To do: Make this a configurable parameter. */
14940 return TYPE_MAX_VALUE (ptrdiff_type_node);
14941 }
14942
14943 #if CHECKING_P
14944
14945 namespace selftest {
14946
14947 /* Selftests for tree. */
14948
14949 /* Verify that integer constants are sane. */
14950
14951 static void
14952 test_integer_constants ()
14953 {
14954 ASSERT_TRUE (integer_type_node != NULL);
14955 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14956
14957 tree type = integer_type_node;
14958
14959 tree zero = build_zero_cst (type);
14960 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14961 ASSERT_EQ (type, TREE_TYPE (zero));
14962
14963 tree one = build_int_cst (type, 1);
14964 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14965 ASSERT_EQ (type, TREE_TYPE (zero));
14966 }
14967
14968 /* Verify identifiers. */
14969
14970 static void
14971 test_identifiers ()
14972 {
14973 tree identifier = get_identifier ("foo");
14974 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14975 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14976 }
14977
14978 /* Verify LABEL_DECL. */
14979
14980 static void
14981 test_labels ()
14982 {
14983 tree identifier = get_identifier ("err");
14984 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14985 identifier, void_type_node);
14986 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14987 ASSERT_FALSE (FORCED_LABEL (label_decl));
14988 }
14989
14990 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14991 are given by VALS. */
14992
14993 static tree
14994 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
14995 {
14996 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
14997 tree_vector_builder builder (type, vals.length (), 1);
14998 builder.splice (vals);
14999 return builder.build ();
15000 }
15001
15002 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15003
15004 static void
15005 check_vector_cst (vec<tree> expected, tree actual)
15006 {
15007 ASSERT_KNOWN_EQ (expected.length (),
15008 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15009 for (unsigned int i = 0; i < expected.length (); ++i)
15010 ASSERT_EQ (wi::to_wide (expected[i]),
15011 wi::to_wide (vector_cst_elt (actual, i)));
15012 }
15013
15014 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15015 and that its elements match EXPECTED. */
15016
15017 static void
15018 check_vector_cst_duplicate (vec<tree> expected, tree actual,
15019 unsigned int npatterns)
15020 {
15021 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15022 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15023 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15024 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15025 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15026 check_vector_cst (expected, actual);
15027 }
15028
15029 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15030 and NPATTERNS background elements, and that its elements match
15031 EXPECTED. */
15032
15033 static void
15034 check_vector_cst_fill (vec<tree> expected, tree actual,
15035 unsigned int npatterns)
15036 {
15037 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15038 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15039 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15040 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15041 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15042 check_vector_cst (expected, actual);
15043 }
15044
15045 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15046 and that its elements match EXPECTED. */
15047
15048 static void
15049 check_vector_cst_stepped (vec<tree> expected, tree actual,
15050 unsigned int npatterns)
15051 {
15052 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15053 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15054 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15055 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15056 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15057 check_vector_cst (expected, actual);
15058 }
15059
15060 /* Test the creation of VECTOR_CSTs. */
15061
15062 static void
15063 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15064 {
15065 auto_vec<tree, 8> elements (8);
15066 elements.quick_grow (8);
15067 tree element_type = build_nonstandard_integer_type (16, true);
15068 tree vector_type = build_vector_type (element_type, 8);
15069
15070 /* Test a simple linear series with a base of 0 and a step of 1:
15071 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15072 for (unsigned int i = 0; i < 8; ++i)
15073 elements[i] = build_int_cst (element_type, i);
15074 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15075 check_vector_cst_stepped (elements, vector, 1);
15076
15077 /* Try the same with the first element replaced by 100:
15078 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15079 elements[0] = build_int_cst (element_type, 100);
15080 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15081 check_vector_cst_stepped (elements, vector, 1);
15082
15083 /* Try a series that wraps around.
15084 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15085 for (unsigned int i = 1; i < 8; ++i)
15086 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15087 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15088 check_vector_cst_stepped (elements, vector, 1);
15089
15090 /* Try a downward series:
15091 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15092 for (unsigned int i = 1; i < 8; ++i)
15093 elements[i] = build_int_cst (element_type, 80 - i);
15094 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15095 check_vector_cst_stepped (elements, vector, 1);
15096
15097 /* Try two interleaved series with different bases and steps:
15098 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15099 elements[1] = build_int_cst (element_type, 53);
15100 for (unsigned int i = 2; i < 8; i += 2)
15101 {
15102 elements[i] = build_int_cst (element_type, 70 - i * 2);
15103 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15104 }
15105 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15106 check_vector_cst_stepped (elements, vector, 2);
15107
15108 /* Try a duplicated value:
15109 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15110 for (unsigned int i = 1; i < 8; ++i)
15111 elements[i] = elements[0];
15112 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15113 check_vector_cst_duplicate (elements, vector, 1);
15114
15115 /* Try an interleaved duplicated value:
15116 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15117 elements[1] = build_int_cst (element_type, 55);
15118 for (unsigned int i = 2; i < 8; ++i)
15119 elements[i] = elements[i - 2];
15120 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15121 check_vector_cst_duplicate (elements, vector, 2);
15122
15123 /* Try a duplicated value with 2 exceptions
15124 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15125 elements[0] = build_int_cst (element_type, 41);
15126 elements[1] = build_int_cst (element_type, 97);
15127 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15128 check_vector_cst_fill (elements, vector, 2);
15129
15130 /* Try with and without a step
15131 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15132 for (unsigned int i = 3; i < 8; i += 2)
15133 elements[i] = build_int_cst (element_type, i * 7);
15134 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15135 check_vector_cst_stepped (elements, vector, 2);
15136
15137 /* Try a fully-general constant:
15138 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15139 elements[5] = build_int_cst (element_type, 9990);
15140 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15141 check_vector_cst_fill (elements, vector, 4);
15142 }
15143
15144 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15145 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15146 modifying its argument in-place. */
15147
15148 static void
15149 check_strip_nops (tree node, tree expected)
15150 {
15151 STRIP_NOPS (node);
15152 ASSERT_EQ (expected, node);
15153 }
15154
15155 /* Verify location wrappers. */
15156
15157 static void
15158 test_location_wrappers ()
15159 {
15160 location_t loc = BUILTINS_LOCATION;
15161
15162 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15163
15164 /* Wrapping a constant. */
15165 tree int_cst = build_int_cst (integer_type_node, 42);
15166 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15167 ASSERT_FALSE (location_wrapper_p (int_cst));
15168
15169 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15170 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15171 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15172 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15173
15174 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15175 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15176
15177 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15178 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15179 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15180 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15181
15182 /* Wrapping a STRING_CST. */
15183 tree string_cst = build_string (4, "foo");
15184 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15185 ASSERT_FALSE (location_wrapper_p (string_cst));
15186
15187 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15188 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15189 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15190 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15191 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15192
15193
15194 /* Wrapping a variable. */
15195 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15196 get_identifier ("some_int_var"),
15197 integer_type_node);
15198 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15199 ASSERT_FALSE (location_wrapper_p (int_var));
15200
15201 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15202 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15203 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15204 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15205
15206 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15207 wrapper. */
15208 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15209 ASSERT_FALSE (location_wrapper_p (r_cast));
15210 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15211
15212 /* Verify that STRIP_NOPS removes wrappers. */
15213 check_strip_nops (wrapped_int_cst, int_cst);
15214 check_strip_nops (wrapped_string_cst, string_cst);
15215 check_strip_nops (wrapped_int_var, int_var);
15216 }
15217
15218 /* Test various tree predicates. Verify that location wrappers don't
15219 affect the results. */
15220
15221 static void
15222 test_predicates ()
15223 {
15224 /* Build various constants and wrappers around them. */
15225
15226 location_t loc = BUILTINS_LOCATION;
15227
15228 tree i_0 = build_int_cst (integer_type_node, 0);
15229 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15230
15231 tree i_1 = build_int_cst (integer_type_node, 1);
15232 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15233
15234 tree i_m1 = build_int_cst (integer_type_node, -1);
15235 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15236
15237 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15238 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15239 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15240 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15241 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15242 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15243
15244 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15245 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15246 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15247
15248 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15249 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15250 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15251
15252 /* TODO: vector constants. */
15253
15254 /* Test integer_onep. */
15255 ASSERT_FALSE (integer_onep (i_0));
15256 ASSERT_FALSE (integer_onep (wr_i_0));
15257 ASSERT_TRUE (integer_onep (i_1));
15258 ASSERT_TRUE (integer_onep (wr_i_1));
15259 ASSERT_FALSE (integer_onep (i_m1));
15260 ASSERT_FALSE (integer_onep (wr_i_m1));
15261 ASSERT_FALSE (integer_onep (f_0));
15262 ASSERT_FALSE (integer_onep (wr_f_0));
15263 ASSERT_FALSE (integer_onep (f_1));
15264 ASSERT_FALSE (integer_onep (wr_f_1));
15265 ASSERT_FALSE (integer_onep (f_m1));
15266 ASSERT_FALSE (integer_onep (wr_f_m1));
15267 ASSERT_FALSE (integer_onep (c_i_0));
15268 ASSERT_TRUE (integer_onep (c_i_1));
15269 ASSERT_FALSE (integer_onep (c_i_m1));
15270 ASSERT_FALSE (integer_onep (c_f_0));
15271 ASSERT_FALSE (integer_onep (c_f_1));
15272 ASSERT_FALSE (integer_onep (c_f_m1));
15273
15274 /* Test integer_zerop. */
15275 ASSERT_TRUE (integer_zerop (i_0));
15276 ASSERT_TRUE (integer_zerop (wr_i_0));
15277 ASSERT_FALSE (integer_zerop (i_1));
15278 ASSERT_FALSE (integer_zerop (wr_i_1));
15279 ASSERT_FALSE (integer_zerop (i_m1));
15280 ASSERT_FALSE (integer_zerop (wr_i_m1));
15281 ASSERT_FALSE (integer_zerop (f_0));
15282 ASSERT_FALSE (integer_zerop (wr_f_0));
15283 ASSERT_FALSE (integer_zerop (f_1));
15284 ASSERT_FALSE (integer_zerop (wr_f_1));
15285 ASSERT_FALSE (integer_zerop (f_m1));
15286 ASSERT_FALSE (integer_zerop (wr_f_m1));
15287 ASSERT_TRUE (integer_zerop (c_i_0));
15288 ASSERT_FALSE (integer_zerop (c_i_1));
15289 ASSERT_FALSE (integer_zerop (c_i_m1));
15290 ASSERT_FALSE (integer_zerop (c_f_0));
15291 ASSERT_FALSE (integer_zerop (c_f_1));
15292 ASSERT_FALSE (integer_zerop (c_f_m1));
15293
15294 /* Test integer_all_onesp. */
15295 ASSERT_FALSE (integer_all_onesp (i_0));
15296 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15297 ASSERT_FALSE (integer_all_onesp (i_1));
15298 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15299 ASSERT_TRUE (integer_all_onesp (i_m1));
15300 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15301 ASSERT_FALSE (integer_all_onesp (f_0));
15302 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15303 ASSERT_FALSE (integer_all_onesp (f_1));
15304 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15305 ASSERT_FALSE (integer_all_onesp (f_m1));
15306 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15307 ASSERT_FALSE (integer_all_onesp (c_i_0));
15308 ASSERT_FALSE (integer_all_onesp (c_i_1));
15309 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15310 ASSERT_FALSE (integer_all_onesp (c_f_0));
15311 ASSERT_FALSE (integer_all_onesp (c_f_1));
15312 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15313
15314 /* Test integer_minus_onep. */
15315 ASSERT_FALSE (integer_minus_onep (i_0));
15316 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15317 ASSERT_FALSE (integer_minus_onep (i_1));
15318 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15319 ASSERT_TRUE (integer_minus_onep (i_m1));
15320 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15321 ASSERT_FALSE (integer_minus_onep (f_0));
15322 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15323 ASSERT_FALSE (integer_minus_onep (f_1));
15324 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15325 ASSERT_FALSE (integer_minus_onep (f_m1));
15326 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15327 ASSERT_FALSE (integer_minus_onep (c_i_0));
15328 ASSERT_FALSE (integer_minus_onep (c_i_1));
15329 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15330 ASSERT_FALSE (integer_minus_onep (c_f_0));
15331 ASSERT_FALSE (integer_minus_onep (c_f_1));
15332 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15333
15334 /* Test integer_each_onep. */
15335 ASSERT_FALSE (integer_each_onep (i_0));
15336 ASSERT_FALSE (integer_each_onep (wr_i_0));
15337 ASSERT_TRUE (integer_each_onep (i_1));
15338 ASSERT_TRUE (integer_each_onep (wr_i_1));
15339 ASSERT_FALSE (integer_each_onep (i_m1));
15340 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15341 ASSERT_FALSE (integer_each_onep (f_0));
15342 ASSERT_FALSE (integer_each_onep (wr_f_0));
15343 ASSERT_FALSE (integer_each_onep (f_1));
15344 ASSERT_FALSE (integer_each_onep (wr_f_1));
15345 ASSERT_FALSE (integer_each_onep (f_m1));
15346 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15347 ASSERT_FALSE (integer_each_onep (c_i_0));
15348 ASSERT_FALSE (integer_each_onep (c_i_1));
15349 ASSERT_FALSE (integer_each_onep (c_i_m1));
15350 ASSERT_FALSE (integer_each_onep (c_f_0));
15351 ASSERT_FALSE (integer_each_onep (c_f_1));
15352 ASSERT_FALSE (integer_each_onep (c_f_m1));
15353
15354 /* Test integer_truep. */
15355 ASSERT_FALSE (integer_truep (i_0));
15356 ASSERT_FALSE (integer_truep (wr_i_0));
15357 ASSERT_TRUE (integer_truep (i_1));
15358 ASSERT_TRUE (integer_truep (wr_i_1));
15359 ASSERT_FALSE (integer_truep (i_m1));
15360 ASSERT_FALSE (integer_truep (wr_i_m1));
15361 ASSERT_FALSE (integer_truep (f_0));
15362 ASSERT_FALSE (integer_truep (wr_f_0));
15363 ASSERT_FALSE (integer_truep (f_1));
15364 ASSERT_FALSE (integer_truep (wr_f_1));
15365 ASSERT_FALSE (integer_truep (f_m1));
15366 ASSERT_FALSE (integer_truep (wr_f_m1));
15367 ASSERT_FALSE (integer_truep (c_i_0));
15368 ASSERT_TRUE (integer_truep (c_i_1));
15369 ASSERT_FALSE (integer_truep (c_i_m1));
15370 ASSERT_FALSE (integer_truep (c_f_0));
15371 ASSERT_FALSE (integer_truep (c_f_1));
15372 ASSERT_FALSE (integer_truep (c_f_m1));
15373
15374 /* Test integer_nonzerop. */
15375 ASSERT_FALSE (integer_nonzerop (i_0));
15376 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15377 ASSERT_TRUE (integer_nonzerop (i_1));
15378 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15379 ASSERT_TRUE (integer_nonzerop (i_m1));
15380 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15381 ASSERT_FALSE (integer_nonzerop (f_0));
15382 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15383 ASSERT_FALSE (integer_nonzerop (f_1));
15384 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15385 ASSERT_FALSE (integer_nonzerop (f_m1));
15386 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15387 ASSERT_FALSE (integer_nonzerop (c_i_0));
15388 ASSERT_TRUE (integer_nonzerop (c_i_1));
15389 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15390 ASSERT_FALSE (integer_nonzerop (c_f_0));
15391 ASSERT_FALSE (integer_nonzerop (c_f_1));
15392 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15393
15394 /* Test real_zerop. */
15395 ASSERT_FALSE (real_zerop (i_0));
15396 ASSERT_FALSE (real_zerop (wr_i_0));
15397 ASSERT_FALSE (real_zerop (i_1));
15398 ASSERT_FALSE (real_zerop (wr_i_1));
15399 ASSERT_FALSE (real_zerop (i_m1));
15400 ASSERT_FALSE (real_zerop (wr_i_m1));
15401 ASSERT_TRUE (real_zerop (f_0));
15402 ASSERT_TRUE (real_zerop (wr_f_0));
15403 ASSERT_FALSE (real_zerop (f_1));
15404 ASSERT_FALSE (real_zerop (wr_f_1));
15405 ASSERT_FALSE (real_zerop (f_m1));
15406 ASSERT_FALSE (real_zerop (wr_f_m1));
15407 ASSERT_FALSE (real_zerop (c_i_0));
15408 ASSERT_FALSE (real_zerop (c_i_1));
15409 ASSERT_FALSE (real_zerop (c_i_m1));
15410 ASSERT_TRUE (real_zerop (c_f_0));
15411 ASSERT_FALSE (real_zerop (c_f_1));
15412 ASSERT_FALSE (real_zerop (c_f_m1));
15413
15414 /* Test real_onep. */
15415 ASSERT_FALSE (real_onep (i_0));
15416 ASSERT_FALSE (real_onep (wr_i_0));
15417 ASSERT_FALSE (real_onep (i_1));
15418 ASSERT_FALSE (real_onep (wr_i_1));
15419 ASSERT_FALSE (real_onep (i_m1));
15420 ASSERT_FALSE (real_onep (wr_i_m1));
15421 ASSERT_FALSE (real_onep (f_0));
15422 ASSERT_FALSE (real_onep (wr_f_0));
15423 ASSERT_TRUE (real_onep (f_1));
15424 ASSERT_TRUE (real_onep (wr_f_1));
15425 ASSERT_FALSE (real_onep (f_m1));
15426 ASSERT_FALSE (real_onep (wr_f_m1));
15427 ASSERT_FALSE (real_onep (c_i_0));
15428 ASSERT_FALSE (real_onep (c_i_1));
15429 ASSERT_FALSE (real_onep (c_i_m1));
15430 ASSERT_FALSE (real_onep (c_f_0));
15431 ASSERT_TRUE (real_onep (c_f_1));
15432 ASSERT_FALSE (real_onep (c_f_m1));
15433
15434 /* Test real_minus_onep. */
15435 ASSERT_FALSE (real_minus_onep (i_0));
15436 ASSERT_FALSE (real_minus_onep (wr_i_0));
15437 ASSERT_FALSE (real_minus_onep (i_1));
15438 ASSERT_FALSE (real_minus_onep (wr_i_1));
15439 ASSERT_FALSE (real_minus_onep (i_m1));
15440 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15441 ASSERT_FALSE (real_minus_onep (f_0));
15442 ASSERT_FALSE (real_minus_onep (wr_f_0));
15443 ASSERT_FALSE (real_minus_onep (f_1));
15444 ASSERT_FALSE (real_minus_onep (wr_f_1));
15445 ASSERT_TRUE (real_minus_onep (f_m1));
15446 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15447 ASSERT_FALSE (real_minus_onep (c_i_0));
15448 ASSERT_FALSE (real_minus_onep (c_i_1));
15449 ASSERT_FALSE (real_minus_onep (c_i_m1));
15450 ASSERT_FALSE (real_minus_onep (c_f_0));
15451 ASSERT_FALSE (real_minus_onep (c_f_1));
15452 ASSERT_TRUE (real_minus_onep (c_f_m1));
15453
15454 /* Test zerop. */
15455 ASSERT_TRUE (zerop (i_0));
15456 ASSERT_TRUE (zerop (wr_i_0));
15457 ASSERT_FALSE (zerop (i_1));
15458 ASSERT_FALSE (zerop (wr_i_1));
15459 ASSERT_FALSE (zerop (i_m1));
15460 ASSERT_FALSE (zerop (wr_i_m1));
15461 ASSERT_TRUE (zerop (f_0));
15462 ASSERT_TRUE (zerop (wr_f_0));
15463 ASSERT_FALSE (zerop (f_1));
15464 ASSERT_FALSE (zerop (wr_f_1));
15465 ASSERT_FALSE (zerop (f_m1));
15466 ASSERT_FALSE (zerop (wr_f_m1));
15467 ASSERT_TRUE (zerop (c_i_0));
15468 ASSERT_FALSE (zerop (c_i_1));
15469 ASSERT_FALSE (zerop (c_i_m1));
15470 ASSERT_TRUE (zerop (c_f_0));
15471 ASSERT_FALSE (zerop (c_f_1));
15472 ASSERT_FALSE (zerop (c_f_m1));
15473
15474 /* Test tree_expr_nonnegative_p. */
15475 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15476 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15477 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15478 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15479 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15480 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15481 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15482 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15483 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15484 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15485 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15486 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15487 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15488 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15489 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15490 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15491 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15492 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15493
15494 /* Test tree_expr_nonzero_p. */
15495 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15496 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15497 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15498 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15499 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15500 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15501
15502 /* Test integer_valued_real_p. */
15503 ASSERT_FALSE (integer_valued_real_p (i_0));
15504 ASSERT_TRUE (integer_valued_real_p (f_0));
15505 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15506 ASSERT_TRUE (integer_valued_real_p (f_1));
15507 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15508
15509 /* Test integer_pow2p. */
15510 ASSERT_FALSE (integer_pow2p (i_0));
15511 ASSERT_TRUE (integer_pow2p (i_1));
15512 ASSERT_TRUE (integer_pow2p (wr_i_1));
15513
15514 /* Test uniform_integer_cst_p. */
15515 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15516 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15517 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15518 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15519 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15520 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15521 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15522 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15523 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15524 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15525 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15526 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15527 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15528 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15529 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15530 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15531 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15532 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15533 }
15534
15535 /* Check that string escaping works correctly. */
15536
15537 static void
15538 test_escaped_strings (void)
15539 {
15540 int saved_cutoff;
15541 escaped_string msg;
15542
15543 msg.escape (NULL);
15544 /* ASSERT_STREQ does not accept NULL as a valid test
15545 result, so we have to use ASSERT_EQ instead. */
15546 ASSERT_EQ (NULL, (const char *) msg);
15547
15548 msg.escape ("");
15549 ASSERT_STREQ ("", (const char *) msg);
15550
15551 msg.escape ("foobar");
15552 ASSERT_STREQ ("foobar", (const char *) msg);
15553
15554 /* Ensure that we have -fmessage-length set to 0. */
15555 saved_cutoff = pp_line_cutoff (global_dc->printer);
15556 pp_line_cutoff (global_dc->printer) = 0;
15557
15558 msg.escape ("foo\nbar");
15559 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15560
15561 msg.escape ("\a\b\f\n\r\t\v");
15562 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15563
15564 /* Now repeat the tests with -fmessage-length set to 5. */
15565 pp_line_cutoff (global_dc->printer) = 5;
15566
15567 /* Note that the newline is not translated into an escape. */
15568 msg.escape ("foo\nbar");
15569 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15570
15571 msg.escape ("\a\b\f\n\r\t\v");
15572 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15573
15574 /* Restore the original message length setting. */
15575 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15576 }
15577
15578 /* Run all of the selftests within this file. */
15579
15580 void
15581 tree_c_tests ()
15582 {
15583 test_integer_constants ();
15584 test_identifiers ();
15585 test_labels ();
15586 test_vector_cst_patterns ();
15587 test_location_wrappers ();
15588 test_predicates ();
15589 test_escaped_strings ();
15590 }
15591
15592 } // namespace selftest
15593
15594 #endif /* CHECKING_P */
15595
15596 #include "gt-tree.h"