compile, runtime: permit anonymous and empty fields in C header
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "rtl.h"
68 #include "regs.h"
69 #include "tree-vector-builder.h"
70
71 /* Tree code classes. */
72
73 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
74 #define END_OF_BASE_TREE_CODES tcc_exceptional,
75
76 const enum tree_code_class tree_code_type[] = {
77 #include "all-tree.def"
78 };
79
80 #undef DEFTREECODE
81 #undef END_OF_BASE_TREE_CODES
82
83 /* Table indexed by tree code giving number of expression
84 operands beyond the fixed part of the node structure.
85 Not used for types or decls. */
86
87 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
88 #define END_OF_BASE_TREE_CODES 0,
89
90 const unsigned char tree_code_length[] = {
91 #include "all-tree.def"
92 };
93
94 #undef DEFTREECODE
95 #undef END_OF_BASE_TREE_CODES
96
97 /* Names of tree components.
98 Used for printing out the tree and error messages. */
99 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
100 #define END_OF_BASE_TREE_CODES "@dummy",
101
102 static const char *const tree_code_name[] = {
103 #include "all-tree.def"
104 };
105
106 #undef DEFTREECODE
107 #undef END_OF_BASE_TREE_CODES
108
109 /* Each tree code class has an associated string representation.
110 These must correspond to the tree_code_class entries. */
111
112 const char *const tree_code_class_strings[] =
113 {
114 "exceptional",
115 "constant",
116 "type",
117 "declaration",
118 "reference",
119 "comparison",
120 "unary",
121 "binary",
122 "statement",
123 "vl_exp",
124 "expression"
125 };
126
127 /* obstack.[ch] explicitly declined to prototype this. */
128 extern int _obstack_allocated_p (struct obstack *h, void *obj);
129
130 /* Statistics-gathering stuff. */
131
132 static uint64_t tree_code_counts[MAX_TREE_CODES];
133 uint64_t tree_node_counts[(int) all_kinds];
134 uint64_t tree_node_sizes[(int) all_kinds];
135
136 /* Keep in sync with tree.h:enum tree_node_kind. */
137 static const char * const tree_node_kind_names[] = {
138 "decls",
139 "types",
140 "blocks",
141 "stmts",
142 "refs",
143 "exprs",
144 "constants",
145 "identifiers",
146 "vecs",
147 "binfos",
148 "ssa names",
149 "constructors",
150 "random kinds",
151 "lang_decl kinds",
152 "lang_type kinds",
153 "omp clauses",
154 };
155
156 /* Unique id for next decl created. */
157 static GTY(()) int next_decl_uid;
158 /* Unique id for next type created. */
159 static GTY(()) unsigned next_type_uid = 1;
160 /* Unique id for next debug decl created. Use negative numbers,
161 to catch erroneous uses. */
162 static GTY(()) int next_debug_decl_uid;
163
164 /* Since we cannot rehash a type after it is in the table, we have to
165 keep the hash code. */
166
167 struct GTY((for_user)) type_hash {
168 unsigned long hash;
169 tree type;
170 };
171
172 /* Initial size of the hash table (rounded to next prime). */
173 #define TYPE_HASH_INITIAL_SIZE 1000
174
175 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
176 {
177 static hashval_t hash (type_hash *t) { return t->hash; }
178 static bool equal (type_hash *a, type_hash *b);
179
180 static int
181 keep_cache_entry (type_hash *&t)
182 {
183 return ggc_marked_p (t->type);
184 }
185 };
186
187 /* Now here is the hash table. When recording a type, it is added to
188 the slot whose index is the hash code. Note that the hash table is
189 used for several kinds of types (function types, array types and
190 array index range types, for now). While all these live in the
191 same table, they are completely independent, and the hash code is
192 computed differently for each of these. */
193
194 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
195
196 /* Hash table and temporary node for larger integer const values. */
197 static GTY (()) tree int_cst_node;
198
199 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
200 {
201 static hashval_t hash (tree t);
202 static bool equal (tree x, tree y);
203 };
204
205 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
206
207 /* Class and variable for making sure that there is a single POLY_INT_CST
208 for a given value. */
209 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
210 {
211 typedef std::pair<tree, const poly_wide_int *> compare_type;
212 static hashval_t hash (tree t);
213 static bool equal (tree x, const compare_type &y);
214 };
215
216 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
217
218 /* Hash table for optimization flags and target option flags. Use the same
219 hash table for both sets of options. Nodes for building the current
220 optimization and target option nodes. The assumption is most of the time
221 the options created will already be in the hash table, so we avoid
222 allocating and freeing up a node repeatably. */
223 static GTY (()) tree cl_optimization_node;
224 static GTY (()) tree cl_target_option_node;
225
226 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
227 {
228 static hashval_t hash (tree t);
229 static bool equal (tree x, tree y);
230 };
231
232 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
233
234 /* General tree->tree mapping structure for use in hash tables. */
235
236
237 static GTY ((cache))
238 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
239
240 static GTY ((cache))
241 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
242
243 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
244 {
245 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
246
247 static bool
248 equal (tree_vec_map *a, tree_vec_map *b)
249 {
250 return a->base.from == b->base.from;
251 }
252
253 static int
254 keep_cache_entry (tree_vec_map *&m)
255 {
256 return ggc_marked_p (m->base.from);
257 }
258 };
259
260 static GTY ((cache))
261 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
262
263 static void set_type_quals (tree, int);
264 static void print_type_hash_statistics (void);
265 static void print_debug_expr_statistics (void);
266 static void print_value_expr_statistics (void);
267
268 static tree build_array_type_1 (tree, tree, bool, bool);
269
270 tree global_trees[TI_MAX];
271 tree integer_types[itk_none];
272
273 bool int_n_enabled_p[NUM_INT_N_ENTS];
274 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
275
276 bool tree_contains_struct[MAX_TREE_CODES][64];
277
278 /* Number of operands for each OpenMP clause. */
279 unsigned const char omp_clause_num_ops[] =
280 {
281 0, /* OMP_CLAUSE_ERROR */
282 1, /* OMP_CLAUSE_PRIVATE */
283 1, /* OMP_CLAUSE_SHARED */
284 1, /* OMP_CLAUSE_FIRSTPRIVATE */
285 2, /* OMP_CLAUSE_LASTPRIVATE */
286 5, /* OMP_CLAUSE_REDUCTION */
287 5, /* OMP_CLAUSE_TASK_REDUCTION */
288 5, /* OMP_CLAUSE_IN_REDUCTION */
289 1, /* OMP_CLAUSE_COPYIN */
290 1, /* OMP_CLAUSE_COPYPRIVATE */
291 3, /* OMP_CLAUSE_LINEAR */
292 2, /* OMP_CLAUSE_ALIGNED */
293 1, /* OMP_CLAUSE_DEPEND */
294 1, /* OMP_CLAUSE_NONTEMPORAL */
295 1, /* OMP_CLAUSE_UNIFORM */
296 1, /* OMP_CLAUSE_TO_DECLARE */
297 1, /* OMP_CLAUSE_LINK */
298 2, /* OMP_CLAUSE_FROM */
299 2, /* OMP_CLAUSE_TO */
300 2, /* OMP_CLAUSE_MAP */
301 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
302 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
303 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
304 1, /* OMP_CLAUSE_INCLUSIVE */
305 1, /* OMP_CLAUSE_EXCLUSIVE */
306 2, /* OMP_CLAUSE__CACHE_ */
307 2, /* OMP_CLAUSE_GANG */
308 1, /* OMP_CLAUSE_ASYNC */
309 1, /* OMP_CLAUSE_WAIT */
310 0, /* OMP_CLAUSE_AUTO */
311 0, /* OMP_CLAUSE_SEQ */
312 1, /* OMP_CLAUSE__LOOPTEMP_ */
313 1, /* OMP_CLAUSE__REDUCTEMP_ */
314 1, /* OMP_CLAUSE__CONDTEMP_ */
315 1, /* OMP_CLAUSE__SCANTEMP_ */
316 1, /* OMP_CLAUSE_IF */
317 1, /* OMP_CLAUSE_NUM_THREADS */
318 1, /* OMP_CLAUSE_SCHEDULE */
319 0, /* OMP_CLAUSE_NOWAIT */
320 1, /* OMP_CLAUSE_ORDERED */
321 0, /* OMP_CLAUSE_DEFAULT */
322 3, /* OMP_CLAUSE_COLLAPSE */
323 0, /* OMP_CLAUSE_UNTIED */
324 1, /* OMP_CLAUSE_FINAL */
325 0, /* OMP_CLAUSE_MERGEABLE */
326 1, /* OMP_CLAUSE_DEVICE */
327 1, /* OMP_CLAUSE_DIST_SCHEDULE */
328 0, /* OMP_CLAUSE_INBRANCH */
329 0, /* OMP_CLAUSE_NOTINBRANCH */
330 1, /* OMP_CLAUSE_NUM_TEAMS */
331 1, /* OMP_CLAUSE_THREAD_LIMIT */
332 0, /* OMP_CLAUSE_PROC_BIND */
333 1, /* OMP_CLAUSE_SAFELEN */
334 1, /* OMP_CLAUSE_SIMDLEN */
335 0, /* OMP_CLAUSE_DEVICE_TYPE */
336 0, /* OMP_CLAUSE_FOR */
337 0, /* OMP_CLAUSE_PARALLEL */
338 0, /* OMP_CLAUSE_SECTIONS */
339 0, /* OMP_CLAUSE_TASKGROUP */
340 1, /* OMP_CLAUSE_PRIORITY */
341 1, /* OMP_CLAUSE_GRAINSIZE */
342 1, /* OMP_CLAUSE_NUM_TASKS */
343 0, /* OMP_CLAUSE_NOGROUP */
344 0, /* OMP_CLAUSE_THREADS */
345 0, /* OMP_CLAUSE_SIMD */
346 1, /* OMP_CLAUSE_HINT */
347 0, /* OMP_CLAUSE_DEFAULTMAP */
348 0, /* OMP_CLAUSE_ORDER */
349 0, /* OMP_CLAUSE_BIND */
350 1, /* OMP_CLAUSE__SIMDUID_ */
351 0, /* OMP_CLAUSE__SIMT_ */
352 0, /* OMP_CLAUSE_INDEPENDENT */
353 1, /* OMP_CLAUSE_WORKER */
354 1, /* OMP_CLAUSE_VECTOR */
355 1, /* OMP_CLAUSE_NUM_GANGS */
356 1, /* OMP_CLAUSE_NUM_WORKERS */
357 1, /* OMP_CLAUSE_VECTOR_LENGTH */
358 3, /* OMP_CLAUSE_TILE */
359 2, /* OMP_CLAUSE__GRIDDIM_ */
360 0, /* OMP_CLAUSE_IF_PRESENT */
361 0, /* OMP_CLAUSE_FINALIZE */
362 };
363
364 const char * const omp_clause_code_name[] =
365 {
366 "error_clause",
367 "private",
368 "shared",
369 "firstprivate",
370 "lastprivate",
371 "reduction",
372 "task_reduction",
373 "in_reduction",
374 "copyin",
375 "copyprivate",
376 "linear",
377 "aligned",
378 "depend",
379 "nontemporal",
380 "uniform",
381 "to",
382 "link",
383 "from",
384 "to",
385 "map",
386 "use_device_ptr",
387 "use_device_addr",
388 "is_device_ptr",
389 "inclusive",
390 "exclusive",
391 "_cache_",
392 "gang",
393 "async",
394 "wait",
395 "auto",
396 "seq",
397 "_looptemp_",
398 "_reductemp_",
399 "_condtemp_",
400 "_scantemp_",
401 "if",
402 "num_threads",
403 "schedule",
404 "nowait",
405 "ordered",
406 "default",
407 "collapse",
408 "untied",
409 "final",
410 "mergeable",
411 "device",
412 "dist_schedule",
413 "inbranch",
414 "notinbranch",
415 "num_teams",
416 "thread_limit",
417 "proc_bind",
418 "safelen",
419 "simdlen",
420 "device_type",
421 "for",
422 "parallel",
423 "sections",
424 "taskgroup",
425 "priority",
426 "grainsize",
427 "num_tasks",
428 "nogroup",
429 "threads",
430 "simd",
431 "hint",
432 "defaultmap",
433 "order",
434 "bind",
435 "_simduid_",
436 "_simt_",
437 "independent",
438 "worker",
439 "vector",
440 "num_gangs",
441 "num_workers",
442 "vector_length",
443 "tile",
444 "_griddim_",
445 "if_present",
446 "finalize",
447 };
448
449
450 /* Return the tree node structure used by tree code CODE. */
451
452 static inline enum tree_node_structure_enum
453 tree_node_structure_for_code (enum tree_code code)
454 {
455 switch (TREE_CODE_CLASS (code))
456 {
457 case tcc_declaration:
458 {
459 switch (code)
460 {
461 case FIELD_DECL:
462 return TS_FIELD_DECL;
463 case PARM_DECL:
464 return TS_PARM_DECL;
465 case VAR_DECL:
466 return TS_VAR_DECL;
467 case LABEL_DECL:
468 return TS_LABEL_DECL;
469 case RESULT_DECL:
470 return TS_RESULT_DECL;
471 case DEBUG_EXPR_DECL:
472 return TS_DECL_WRTL;
473 case CONST_DECL:
474 return TS_CONST_DECL;
475 case TYPE_DECL:
476 return TS_TYPE_DECL;
477 case FUNCTION_DECL:
478 return TS_FUNCTION_DECL;
479 case TRANSLATION_UNIT_DECL:
480 return TS_TRANSLATION_UNIT_DECL;
481 default:
482 return TS_DECL_NON_COMMON;
483 }
484 }
485 case tcc_type:
486 return TS_TYPE_NON_COMMON;
487 case tcc_reference:
488 case tcc_comparison:
489 case tcc_unary:
490 case tcc_binary:
491 case tcc_expression:
492 case tcc_statement:
493 case tcc_vl_exp:
494 return TS_EXP;
495 default: /* tcc_constant and tcc_exceptional */
496 break;
497 }
498 switch (code)
499 {
500 /* tcc_constant cases. */
501 case VOID_CST: return TS_TYPED;
502 case INTEGER_CST: return TS_INT_CST;
503 case POLY_INT_CST: return TS_POLY_INT_CST;
504 case REAL_CST: return TS_REAL_CST;
505 case FIXED_CST: return TS_FIXED_CST;
506 case COMPLEX_CST: return TS_COMPLEX;
507 case VECTOR_CST: return TS_VECTOR;
508 case STRING_CST: return TS_STRING;
509 /* tcc_exceptional cases. */
510 case ERROR_MARK: return TS_COMMON;
511 case IDENTIFIER_NODE: return TS_IDENTIFIER;
512 case TREE_LIST: return TS_LIST;
513 case TREE_VEC: return TS_VEC;
514 case SSA_NAME: return TS_SSA_NAME;
515 case PLACEHOLDER_EXPR: return TS_COMMON;
516 case STATEMENT_LIST: return TS_STATEMENT_LIST;
517 case BLOCK: return TS_BLOCK;
518 case CONSTRUCTOR: return TS_CONSTRUCTOR;
519 case TREE_BINFO: return TS_BINFO;
520 case OMP_CLAUSE: return TS_OMP_CLAUSE;
521 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
522 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
523
524 default:
525 gcc_unreachable ();
526 }
527 }
528
529
530 /* Initialize tree_contains_struct to describe the hierarchy of tree
531 nodes. */
532
533 static void
534 initialize_tree_contains_struct (void)
535 {
536 unsigned i;
537
538 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
539 {
540 enum tree_code code;
541 enum tree_node_structure_enum ts_code;
542
543 code = (enum tree_code) i;
544 ts_code = tree_node_structure_for_code (code);
545
546 /* Mark the TS structure itself. */
547 tree_contains_struct[code][ts_code] = 1;
548
549 /* Mark all the structures that TS is derived from. */
550 switch (ts_code)
551 {
552 case TS_TYPED:
553 case TS_BLOCK:
554 case TS_OPTIMIZATION:
555 case TS_TARGET_OPTION:
556 MARK_TS_BASE (code);
557 break;
558
559 case TS_COMMON:
560 case TS_INT_CST:
561 case TS_POLY_INT_CST:
562 case TS_REAL_CST:
563 case TS_FIXED_CST:
564 case TS_VECTOR:
565 case TS_STRING:
566 case TS_COMPLEX:
567 case TS_SSA_NAME:
568 case TS_CONSTRUCTOR:
569 case TS_EXP:
570 case TS_STATEMENT_LIST:
571 MARK_TS_TYPED (code);
572 break;
573
574 case TS_IDENTIFIER:
575 case TS_DECL_MINIMAL:
576 case TS_TYPE_COMMON:
577 case TS_LIST:
578 case TS_VEC:
579 case TS_BINFO:
580 case TS_OMP_CLAUSE:
581 MARK_TS_COMMON (code);
582 break;
583
584 case TS_TYPE_WITH_LANG_SPECIFIC:
585 MARK_TS_TYPE_COMMON (code);
586 break;
587
588 case TS_TYPE_NON_COMMON:
589 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
590 break;
591
592 case TS_DECL_COMMON:
593 MARK_TS_DECL_MINIMAL (code);
594 break;
595
596 case TS_DECL_WRTL:
597 case TS_CONST_DECL:
598 MARK_TS_DECL_COMMON (code);
599 break;
600
601 case TS_DECL_NON_COMMON:
602 MARK_TS_DECL_WITH_VIS (code);
603 break;
604
605 case TS_DECL_WITH_VIS:
606 case TS_PARM_DECL:
607 case TS_LABEL_DECL:
608 case TS_RESULT_DECL:
609 MARK_TS_DECL_WRTL (code);
610 break;
611
612 case TS_FIELD_DECL:
613 MARK_TS_DECL_COMMON (code);
614 break;
615
616 case TS_VAR_DECL:
617 MARK_TS_DECL_WITH_VIS (code);
618 break;
619
620 case TS_TYPE_DECL:
621 case TS_FUNCTION_DECL:
622 MARK_TS_DECL_NON_COMMON (code);
623 break;
624
625 case TS_TRANSLATION_UNIT_DECL:
626 MARK_TS_DECL_COMMON (code);
627 break;
628
629 default:
630 gcc_unreachable ();
631 }
632 }
633
634 /* Basic consistency checks for attributes used in fold. */
635 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
636 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
637 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
645 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
646 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
647 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
648 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
649 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
650 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
651 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
657 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
659 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
660 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
661 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
662 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
663 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
664 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
665 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
666 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
667 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
668 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
669 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
670 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
671 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
673 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
674 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
675 }
676
677
678 /* Init tree.c. */
679
680 void
681 init_ttree (void)
682 {
683 /* Initialize the hash table of types. */
684 type_hash_table
685 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
686
687 debug_expr_for_decl
688 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
689
690 value_expr_for_decl
691 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
692
693 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
694
695 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
696
697 int_cst_node = make_int_cst (1, 1);
698
699 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
700
701 cl_optimization_node = make_node (OPTIMIZATION_NODE);
702 cl_target_option_node = make_node (TARGET_OPTION_NODE);
703
704 /* Initialize the tree_contains_struct array. */
705 initialize_tree_contains_struct ();
706 lang_hooks.init_ts ();
707 }
708
709 \f
710 /* The name of the object as the assembler will see it (but before any
711 translations made by ASM_OUTPUT_LABELREF). Often this is the same
712 as DECL_NAME. It is an IDENTIFIER_NODE. */
713 tree
714 decl_assembler_name (tree decl)
715 {
716 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
717 lang_hooks.set_decl_assembler_name (decl);
718 return DECL_ASSEMBLER_NAME_RAW (decl);
719 }
720
721 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
722 (either of which may be NULL). Inform the FE, if this changes the
723 name. */
724
725 void
726 overwrite_decl_assembler_name (tree decl, tree name)
727 {
728 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
729 lang_hooks.overwrite_decl_assembler_name (decl, name);
730 }
731
732 /* When the target supports COMDAT groups, this indicates which group the
733 DECL is associated with. This can be either an IDENTIFIER_NODE or a
734 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
735 tree
736 decl_comdat_group (const_tree node)
737 {
738 struct symtab_node *snode = symtab_node::get (node);
739 if (!snode)
740 return NULL;
741 return snode->get_comdat_group ();
742 }
743
744 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
745 tree
746 decl_comdat_group_id (const_tree node)
747 {
748 struct symtab_node *snode = symtab_node::get (node);
749 if (!snode)
750 return NULL;
751 return snode->get_comdat_group_id ();
752 }
753
754 /* When the target supports named section, return its name as IDENTIFIER_NODE
755 or NULL if it is in no section. */
756 const char *
757 decl_section_name (const_tree node)
758 {
759 struct symtab_node *snode = symtab_node::get (node);
760 if (!snode)
761 return NULL;
762 return snode->get_section ();
763 }
764
765 /* Set section name of NODE to VALUE (that is expected to be
766 identifier node) */
767 void
768 set_decl_section_name (tree node, const char *value)
769 {
770 struct symtab_node *snode;
771
772 if (value == NULL)
773 {
774 snode = symtab_node::get (node);
775 if (!snode)
776 return;
777 }
778 else if (VAR_P (node))
779 snode = varpool_node::get_create (node);
780 else
781 snode = cgraph_node::get_create (node);
782 snode->set_section (value);
783 }
784
785 /* Return TLS model of a variable NODE. */
786 enum tls_model
787 decl_tls_model (const_tree node)
788 {
789 struct varpool_node *snode = varpool_node::get (node);
790 if (!snode)
791 return TLS_MODEL_NONE;
792 return snode->tls_model;
793 }
794
795 /* Set TLS model of variable NODE to MODEL. */
796 void
797 set_decl_tls_model (tree node, enum tls_model model)
798 {
799 struct varpool_node *vnode;
800
801 if (model == TLS_MODEL_NONE)
802 {
803 vnode = varpool_node::get (node);
804 if (!vnode)
805 return;
806 }
807 else
808 vnode = varpool_node::get_create (node);
809 vnode->tls_model = model;
810 }
811
812 /* Compute the number of bytes occupied by a tree with code CODE.
813 This function cannot be used for nodes that have variable sizes,
814 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
815 size_t
816 tree_code_size (enum tree_code code)
817 {
818 switch (TREE_CODE_CLASS (code))
819 {
820 case tcc_declaration: /* A decl node */
821 switch (code)
822 {
823 case FIELD_DECL: return sizeof (tree_field_decl);
824 case PARM_DECL: return sizeof (tree_parm_decl);
825 case VAR_DECL: return sizeof (tree_var_decl);
826 case LABEL_DECL: return sizeof (tree_label_decl);
827 case RESULT_DECL: return sizeof (tree_result_decl);
828 case CONST_DECL: return sizeof (tree_const_decl);
829 case TYPE_DECL: return sizeof (tree_type_decl);
830 case FUNCTION_DECL: return sizeof (tree_function_decl);
831 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
832 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
833 case NAMESPACE_DECL:
834 case IMPORTED_DECL:
835 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
836 default:
837 gcc_checking_assert (code >= NUM_TREE_CODES);
838 return lang_hooks.tree_size (code);
839 }
840
841 case tcc_type: /* a type node */
842 switch (code)
843 {
844 case OFFSET_TYPE:
845 case ENUMERAL_TYPE:
846 case BOOLEAN_TYPE:
847 case INTEGER_TYPE:
848 case REAL_TYPE:
849 case POINTER_TYPE:
850 case REFERENCE_TYPE:
851 case NULLPTR_TYPE:
852 case FIXED_POINT_TYPE:
853 case COMPLEX_TYPE:
854 case VECTOR_TYPE:
855 case ARRAY_TYPE:
856 case RECORD_TYPE:
857 case UNION_TYPE:
858 case QUAL_UNION_TYPE:
859 case VOID_TYPE:
860 case FUNCTION_TYPE:
861 case METHOD_TYPE:
862 case LANG_TYPE: return sizeof (tree_type_non_common);
863 default:
864 gcc_checking_assert (code >= NUM_TREE_CODES);
865 return lang_hooks.tree_size (code);
866 }
867
868 case tcc_reference: /* a reference */
869 case tcc_expression: /* an expression */
870 case tcc_statement: /* an expression with side effects */
871 case tcc_comparison: /* a comparison expression */
872 case tcc_unary: /* a unary arithmetic expression */
873 case tcc_binary: /* a binary arithmetic expression */
874 return (sizeof (struct tree_exp)
875 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
876
877 case tcc_constant: /* a constant */
878 switch (code)
879 {
880 case VOID_CST: return sizeof (tree_typed);
881 case INTEGER_CST: gcc_unreachable ();
882 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
883 case REAL_CST: return sizeof (tree_real_cst);
884 case FIXED_CST: return sizeof (tree_fixed_cst);
885 case COMPLEX_CST: return sizeof (tree_complex);
886 case VECTOR_CST: gcc_unreachable ();
887 case STRING_CST: gcc_unreachable ();
888 default:
889 gcc_checking_assert (code >= NUM_TREE_CODES);
890 return lang_hooks.tree_size (code);
891 }
892
893 case tcc_exceptional: /* something random, like an identifier. */
894 switch (code)
895 {
896 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
897 case TREE_LIST: return sizeof (tree_list);
898
899 case ERROR_MARK:
900 case PLACEHOLDER_EXPR: return sizeof (tree_common);
901
902 case TREE_VEC: gcc_unreachable ();
903 case OMP_CLAUSE: gcc_unreachable ();
904
905 case SSA_NAME: return sizeof (tree_ssa_name);
906
907 case STATEMENT_LIST: return sizeof (tree_statement_list);
908 case BLOCK: return sizeof (struct tree_block);
909 case CONSTRUCTOR: return sizeof (tree_constructor);
910 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
911 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
912
913 default:
914 gcc_checking_assert (code >= NUM_TREE_CODES);
915 return lang_hooks.tree_size (code);
916 }
917
918 default:
919 gcc_unreachable ();
920 }
921 }
922
923 /* Compute the number of bytes occupied by NODE. This routine only
924 looks at TREE_CODE, except for those nodes that have variable sizes. */
925 size_t
926 tree_size (const_tree node)
927 {
928 const enum tree_code code = TREE_CODE (node);
929 switch (code)
930 {
931 case INTEGER_CST:
932 return (sizeof (struct tree_int_cst)
933 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
934
935 case TREE_BINFO:
936 return (offsetof (struct tree_binfo, base_binfos)
937 + vec<tree, va_gc>
938 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
939
940 case TREE_VEC:
941 return (sizeof (struct tree_vec)
942 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
943
944 case VECTOR_CST:
945 return (sizeof (struct tree_vector)
946 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
947
948 case STRING_CST:
949 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
950
951 case OMP_CLAUSE:
952 return (sizeof (struct tree_omp_clause)
953 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
954 * sizeof (tree));
955
956 default:
957 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
958 return (sizeof (struct tree_exp)
959 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
960 else
961 return tree_code_size (code);
962 }
963 }
964
965 /* Return tree node kind based on tree CODE. */
966
967 static tree_node_kind
968 get_stats_node_kind (enum tree_code code)
969 {
970 enum tree_code_class type = TREE_CODE_CLASS (code);
971
972 switch (type)
973 {
974 case tcc_declaration: /* A decl node */
975 return d_kind;
976 case tcc_type: /* a type node */
977 return t_kind;
978 case tcc_statement: /* an expression with side effects */
979 return s_kind;
980 case tcc_reference: /* a reference */
981 return r_kind;
982 case tcc_expression: /* an expression */
983 case tcc_comparison: /* a comparison expression */
984 case tcc_unary: /* a unary arithmetic expression */
985 case tcc_binary: /* a binary arithmetic expression */
986 return e_kind;
987 case tcc_constant: /* a constant */
988 return c_kind;
989 case tcc_exceptional: /* something random, like an identifier. */
990 switch (code)
991 {
992 case IDENTIFIER_NODE:
993 return id_kind;
994 case TREE_VEC:
995 return vec_kind;
996 case TREE_BINFO:
997 return binfo_kind;
998 case SSA_NAME:
999 return ssa_name_kind;
1000 case BLOCK:
1001 return b_kind;
1002 case CONSTRUCTOR:
1003 return constr_kind;
1004 case OMP_CLAUSE:
1005 return omp_clause_kind;
1006 default:
1007 return x_kind;
1008 }
1009 break;
1010 case tcc_vl_exp:
1011 return e_kind;
1012 default:
1013 gcc_unreachable ();
1014 }
1015 }
1016
1017 /* Record interesting allocation statistics for a tree node with CODE
1018 and LENGTH. */
1019
1020 static void
1021 record_node_allocation_statistics (enum tree_code code, size_t length)
1022 {
1023 if (!GATHER_STATISTICS)
1024 return;
1025
1026 tree_node_kind kind = get_stats_node_kind (code);
1027
1028 tree_code_counts[(int) code]++;
1029 tree_node_counts[(int) kind]++;
1030 tree_node_sizes[(int) kind] += length;
1031 }
1032
1033 /* Allocate and return a new UID from the DECL_UID namespace. */
1034
1035 int
1036 allocate_decl_uid (void)
1037 {
1038 return next_decl_uid++;
1039 }
1040
1041 /* Return a newly allocated node of code CODE. For decl and type
1042 nodes, some other fields are initialized. The rest of the node is
1043 initialized to zero. This function cannot be used for TREE_VEC,
1044 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1045 tree_code_size.
1046
1047 Achoo! I got a code in the node. */
1048
1049 tree
1050 make_node (enum tree_code code MEM_STAT_DECL)
1051 {
1052 tree t;
1053 enum tree_code_class type = TREE_CODE_CLASS (code);
1054 size_t length = tree_code_size (code);
1055
1056 record_node_allocation_statistics (code, length);
1057
1058 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1059 TREE_SET_CODE (t, code);
1060
1061 switch (type)
1062 {
1063 case tcc_statement:
1064 if (code != DEBUG_BEGIN_STMT)
1065 TREE_SIDE_EFFECTS (t) = 1;
1066 break;
1067
1068 case tcc_declaration:
1069 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1070 {
1071 if (code == FUNCTION_DECL)
1072 {
1073 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1074 SET_DECL_MODE (t, FUNCTION_MODE);
1075 }
1076 else
1077 SET_DECL_ALIGN (t, 1);
1078 }
1079 DECL_SOURCE_LOCATION (t) = input_location;
1080 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1081 DECL_UID (t) = --next_debug_decl_uid;
1082 else
1083 {
1084 DECL_UID (t) = allocate_decl_uid ();
1085 SET_DECL_PT_UID (t, -1);
1086 }
1087 if (TREE_CODE (t) == LABEL_DECL)
1088 LABEL_DECL_UID (t) = -1;
1089
1090 break;
1091
1092 case tcc_type:
1093 TYPE_UID (t) = next_type_uid++;
1094 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1095 TYPE_USER_ALIGN (t) = 0;
1096 TYPE_MAIN_VARIANT (t) = t;
1097 TYPE_CANONICAL (t) = t;
1098
1099 /* Default to no attributes for type, but let target change that. */
1100 TYPE_ATTRIBUTES (t) = NULL_TREE;
1101 targetm.set_default_type_attributes (t);
1102
1103 /* We have not yet computed the alias set for this type. */
1104 TYPE_ALIAS_SET (t) = -1;
1105 break;
1106
1107 case tcc_constant:
1108 TREE_CONSTANT (t) = 1;
1109 break;
1110
1111 case tcc_expression:
1112 switch (code)
1113 {
1114 case INIT_EXPR:
1115 case MODIFY_EXPR:
1116 case VA_ARG_EXPR:
1117 case PREDECREMENT_EXPR:
1118 case PREINCREMENT_EXPR:
1119 case POSTDECREMENT_EXPR:
1120 case POSTINCREMENT_EXPR:
1121 /* All of these have side-effects, no matter what their
1122 operands are. */
1123 TREE_SIDE_EFFECTS (t) = 1;
1124 break;
1125
1126 default:
1127 break;
1128 }
1129 break;
1130
1131 case tcc_exceptional:
1132 switch (code)
1133 {
1134 case TARGET_OPTION_NODE:
1135 TREE_TARGET_OPTION(t)
1136 = ggc_cleared_alloc<struct cl_target_option> ();
1137 break;
1138
1139 case OPTIMIZATION_NODE:
1140 TREE_OPTIMIZATION (t)
1141 = ggc_cleared_alloc<struct cl_optimization> ();
1142 break;
1143
1144 default:
1145 break;
1146 }
1147 break;
1148
1149 default:
1150 /* Other classes need no special treatment. */
1151 break;
1152 }
1153
1154 return t;
1155 }
1156
1157 /* Free tree node. */
1158
1159 void
1160 free_node (tree node)
1161 {
1162 enum tree_code code = TREE_CODE (node);
1163 if (GATHER_STATISTICS)
1164 {
1165 enum tree_node_kind kind = get_stats_node_kind (code);
1166
1167 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1168 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1169 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1170
1171 tree_code_counts[(int) TREE_CODE (node)]--;
1172 tree_node_counts[(int) kind]--;
1173 tree_node_sizes[(int) kind] -= tree_size (node);
1174 }
1175 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1176 vec_free (CONSTRUCTOR_ELTS (node));
1177 else if (code == BLOCK)
1178 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1179 else if (code == TREE_BINFO)
1180 vec_free (BINFO_BASE_ACCESSES (node));
1181 ggc_free (node);
1182 }
1183 \f
1184 /* Return a new node with the same contents as NODE except that its
1185 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1186
1187 tree
1188 copy_node (tree node MEM_STAT_DECL)
1189 {
1190 tree t;
1191 enum tree_code code = TREE_CODE (node);
1192 size_t length;
1193
1194 gcc_assert (code != STATEMENT_LIST);
1195
1196 length = tree_size (node);
1197 record_node_allocation_statistics (code, length);
1198 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1199 memcpy (t, node, length);
1200
1201 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1202 TREE_CHAIN (t) = 0;
1203 TREE_ASM_WRITTEN (t) = 0;
1204 TREE_VISITED (t) = 0;
1205
1206 if (TREE_CODE_CLASS (code) == tcc_declaration)
1207 {
1208 if (code == DEBUG_EXPR_DECL)
1209 DECL_UID (t) = --next_debug_decl_uid;
1210 else
1211 {
1212 DECL_UID (t) = allocate_decl_uid ();
1213 if (DECL_PT_UID_SET_P (node))
1214 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1215 }
1216 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1217 && DECL_HAS_VALUE_EXPR_P (node))
1218 {
1219 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1220 DECL_HAS_VALUE_EXPR_P (t) = 1;
1221 }
1222 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1223 if (VAR_P (node))
1224 {
1225 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1226 t->decl_with_vis.symtab_node = NULL;
1227 }
1228 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1229 {
1230 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1231 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1232 }
1233 if (TREE_CODE (node) == FUNCTION_DECL)
1234 {
1235 DECL_STRUCT_FUNCTION (t) = NULL;
1236 t->decl_with_vis.symtab_node = NULL;
1237 }
1238 }
1239 else if (TREE_CODE_CLASS (code) == tcc_type)
1240 {
1241 TYPE_UID (t) = next_type_uid++;
1242 /* The following is so that the debug code for
1243 the copy is different from the original type.
1244 The two statements usually duplicate each other
1245 (because they clear fields of the same union),
1246 but the optimizer should catch that. */
1247 TYPE_SYMTAB_ADDRESS (t) = 0;
1248 TYPE_SYMTAB_DIE (t) = 0;
1249
1250 /* Do not copy the values cache. */
1251 if (TYPE_CACHED_VALUES_P (t))
1252 {
1253 TYPE_CACHED_VALUES_P (t) = 0;
1254 TYPE_CACHED_VALUES (t) = NULL_TREE;
1255 }
1256 }
1257 else if (code == TARGET_OPTION_NODE)
1258 {
1259 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1260 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1261 sizeof (struct cl_target_option));
1262 }
1263 else if (code == OPTIMIZATION_NODE)
1264 {
1265 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1266 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1267 sizeof (struct cl_optimization));
1268 }
1269
1270 return t;
1271 }
1272
1273 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1274 For example, this can copy a list made of TREE_LIST nodes. */
1275
1276 tree
1277 copy_list (tree list)
1278 {
1279 tree head;
1280 tree prev, next;
1281
1282 if (list == 0)
1283 return 0;
1284
1285 head = prev = copy_node (list);
1286 next = TREE_CHAIN (list);
1287 while (next)
1288 {
1289 TREE_CHAIN (prev) = copy_node (next);
1290 prev = TREE_CHAIN (prev);
1291 next = TREE_CHAIN (next);
1292 }
1293 return head;
1294 }
1295
1296 \f
1297 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1298 INTEGER_CST with value CST and type TYPE. */
1299
1300 static unsigned int
1301 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1302 {
1303 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1304 /* We need extra HWIs if CST is an unsigned integer with its
1305 upper bit set. */
1306 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1307 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1308 return cst.get_len ();
1309 }
1310
1311 /* Return a new INTEGER_CST with value CST and type TYPE. */
1312
1313 static tree
1314 build_new_int_cst (tree type, const wide_int &cst)
1315 {
1316 unsigned int len = cst.get_len ();
1317 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1318 tree nt = make_int_cst (len, ext_len);
1319
1320 if (len < ext_len)
1321 {
1322 --ext_len;
1323 TREE_INT_CST_ELT (nt, ext_len)
1324 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1325 for (unsigned int i = len; i < ext_len; ++i)
1326 TREE_INT_CST_ELT (nt, i) = -1;
1327 }
1328 else if (TYPE_UNSIGNED (type)
1329 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1330 {
1331 len--;
1332 TREE_INT_CST_ELT (nt, len)
1333 = zext_hwi (cst.elt (len),
1334 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1335 }
1336
1337 for (unsigned int i = 0; i < len; i++)
1338 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1339 TREE_TYPE (nt) = type;
1340 return nt;
1341 }
1342
1343 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1344
1345 static tree
1346 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1347 CXX_MEM_STAT_INFO)
1348 {
1349 size_t length = sizeof (struct tree_poly_int_cst);
1350 record_node_allocation_statistics (POLY_INT_CST, length);
1351
1352 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1353
1354 TREE_SET_CODE (t, POLY_INT_CST);
1355 TREE_CONSTANT (t) = 1;
1356 TREE_TYPE (t) = type;
1357 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1358 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1359 return t;
1360 }
1361
1362 /* Create a constant tree that contains CST sign-extended to TYPE. */
1363
1364 tree
1365 build_int_cst (tree type, poly_int64 cst)
1366 {
1367 /* Support legacy code. */
1368 if (!type)
1369 type = integer_type_node;
1370
1371 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1372 }
1373
1374 /* Create a constant tree that contains CST zero-extended to TYPE. */
1375
1376 tree
1377 build_int_cstu (tree type, poly_uint64 cst)
1378 {
1379 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1380 }
1381
1382 /* Create a constant tree that contains CST sign-extended to TYPE. */
1383
1384 tree
1385 build_int_cst_type (tree type, poly_int64 cst)
1386 {
1387 gcc_assert (type);
1388 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1389 }
1390
1391 /* Constructs tree in type TYPE from with value given by CST. Signedness
1392 of CST is assumed to be the same as the signedness of TYPE. */
1393
1394 tree
1395 double_int_to_tree (tree type, double_int cst)
1396 {
1397 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1398 }
1399
1400 /* We force the wide_int CST to the range of the type TYPE by sign or
1401 zero extending it. OVERFLOWABLE indicates if we are interested in
1402 overflow of the value, when >0 we are only interested in signed
1403 overflow, for <0 we are interested in any overflow. OVERFLOWED
1404 indicates whether overflow has already occurred. CONST_OVERFLOWED
1405 indicates whether constant overflow has already occurred. We force
1406 T's value to be within range of T's type (by setting to 0 or 1 all
1407 the bits outside the type's range). We set TREE_OVERFLOWED if,
1408 OVERFLOWED is nonzero,
1409 or OVERFLOWABLE is >0 and signed overflow occurs
1410 or OVERFLOWABLE is <0 and any overflow occurs
1411 We return a new tree node for the extended wide_int. The node
1412 is shared if no overflow flags are set. */
1413
1414
1415 tree
1416 force_fit_type (tree type, const poly_wide_int_ref &cst,
1417 int overflowable, bool overflowed)
1418 {
1419 signop sign = TYPE_SIGN (type);
1420
1421 /* If we need to set overflow flags, return a new unshared node. */
1422 if (overflowed || !wi::fits_to_tree_p (cst, type))
1423 {
1424 if (overflowed
1425 || overflowable < 0
1426 || (overflowable > 0 && sign == SIGNED))
1427 {
1428 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1429 sign);
1430 tree t;
1431 if (tmp.is_constant ())
1432 t = build_new_int_cst (type, tmp.coeffs[0]);
1433 else
1434 {
1435 tree coeffs[NUM_POLY_INT_COEFFS];
1436 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1437 {
1438 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1439 TREE_OVERFLOW (coeffs[i]) = 1;
1440 }
1441 t = build_new_poly_int_cst (type, coeffs);
1442 }
1443 TREE_OVERFLOW (t) = 1;
1444 return t;
1445 }
1446 }
1447
1448 /* Else build a shared node. */
1449 return wide_int_to_tree (type, cst);
1450 }
1451
1452 /* These are the hash table functions for the hash table of INTEGER_CST
1453 nodes of a sizetype. */
1454
1455 /* Return the hash code X, an INTEGER_CST. */
1456
1457 hashval_t
1458 int_cst_hasher::hash (tree x)
1459 {
1460 const_tree const t = x;
1461 hashval_t code = TYPE_UID (TREE_TYPE (t));
1462 int i;
1463
1464 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1465 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1466
1467 return code;
1468 }
1469
1470 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1471 is the same as that given by *Y, which is the same. */
1472
1473 bool
1474 int_cst_hasher::equal (tree x, tree y)
1475 {
1476 const_tree const xt = x;
1477 const_tree const yt = y;
1478
1479 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1480 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1481 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1482 return false;
1483
1484 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1485 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1486 return false;
1487
1488 return true;
1489 }
1490
1491 /* Create an INT_CST node of TYPE and value CST.
1492 The returned node is always shared. For small integers we use a
1493 per-type vector cache, for larger ones we use a single hash table.
1494 The value is extended from its precision according to the sign of
1495 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1496 the upper bits and ensures that hashing and value equality based
1497 upon the underlying HOST_WIDE_INTs works without masking. */
1498
1499 static tree
1500 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1501 {
1502 tree t;
1503 int ix = -1;
1504 int limit = 0;
1505
1506 gcc_assert (type);
1507 unsigned int prec = TYPE_PRECISION (type);
1508 signop sgn = TYPE_SIGN (type);
1509
1510 /* Verify that everything is canonical. */
1511 int l = pcst.get_len ();
1512 if (l > 1)
1513 {
1514 if (pcst.elt (l - 1) == 0)
1515 gcc_checking_assert (pcst.elt (l - 2) < 0);
1516 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1517 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1518 }
1519
1520 wide_int cst = wide_int::from (pcst, prec, sgn);
1521 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1522
1523 if (ext_len == 1)
1524 {
1525 /* We just need to store a single HOST_WIDE_INT. */
1526 HOST_WIDE_INT hwi;
1527 if (TYPE_UNSIGNED (type))
1528 hwi = cst.to_uhwi ();
1529 else
1530 hwi = cst.to_shwi ();
1531
1532 switch (TREE_CODE (type))
1533 {
1534 case NULLPTR_TYPE:
1535 gcc_assert (hwi == 0);
1536 /* Fallthru. */
1537
1538 case POINTER_TYPE:
1539 case REFERENCE_TYPE:
1540 /* Cache NULL pointer and zero bounds. */
1541 if (hwi == 0)
1542 {
1543 limit = 1;
1544 ix = 0;
1545 }
1546 break;
1547
1548 case BOOLEAN_TYPE:
1549 /* Cache false or true. */
1550 limit = 2;
1551 if (IN_RANGE (hwi, 0, 1))
1552 ix = hwi;
1553 break;
1554
1555 case INTEGER_TYPE:
1556 case OFFSET_TYPE:
1557 if (TYPE_SIGN (type) == UNSIGNED)
1558 {
1559 /* Cache [0, N). */
1560 limit = INTEGER_SHARE_LIMIT;
1561 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1562 ix = hwi;
1563 }
1564 else
1565 {
1566 /* Cache [-1, N). */
1567 limit = INTEGER_SHARE_LIMIT + 1;
1568 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1569 ix = hwi + 1;
1570 }
1571 break;
1572
1573 case ENUMERAL_TYPE:
1574 break;
1575
1576 default:
1577 gcc_unreachable ();
1578 }
1579
1580 if (ix >= 0)
1581 {
1582 /* Look for it in the type's vector of small shared ints. */
1583 if (!TYPE_CACHED_VALUES_P (type))
1584 {
1585 TYPE_CACHED_VALUES_P (type) = 1;
1586 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1587 }
1588
1589 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1590 if (t)
1591 /* Make sure no one is clobbering the shared constant. */
1592 gcc_checking_assert (TREE_TYPE (t) == type
1593 && TREE_INT_CST_NUNITS (t) == 1
1594 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1595 && TREE_INT_CST_EXT_NUNITS (t) == 1
1596 && TREE_INT_CST_ELT (t, 0) == hwi);
1597 else
1598 {
1599 /* Create a new shared int. */
1600 t = build_new_int_cst (type, cst);
1601 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1602 }
1603 }
1604 else
1605 {
1606 /* Use the cache of larger shared ints, using int_cst_node as
1607 a temporary. */
1608
1609 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1610 TREE_TYPE (int_cst_node) = type;
1611
1612 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1613 t = *slot;
1614 if (!t)
1615 {
1616 /* Insert this one into the hash table. */
1617 t = int_cst_node;
1618 *slot = t;
1619 /* Make a new node for next time round. */
1620 int_cst_node = make_int_cst (1, 1);
1621 }
1622 }
1623 }
1624 else
1625 {
1626 /* The value either hashes properly or we drop it on the floor
1627 for the gc to take care of. There will not be enough of them
1628 to worry about. */
1629
1630 tree nt = build_new_int_cst (type, cst);
1631 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1632 t = *slot;
1633 if (!t)
1634 {
1635 /* Insert this one into the hash table. */
1636 t = nt;
1637 *slot = t;
1638 }
1639 else
1640 ggc_free (nt);
1641 }
1642
1643 return t;
1644 }
1645
1646 hashval_t
1647 poly_int_cst_hasher::hash (tree t)
1648 {
1649 inchash::hash hstate;
1650
1651 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1652 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1653 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1654
1655 return hstate.end ();
1656 }
1657
1658 bool
1659 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1660 {
1661 if (TREE_TYPE (x) != y.first)
1662 return false;
1663 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1664 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1665 return false;
1666 return true;
1667 }
1668
1669 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1670 The elements must also have type TYPE. */
1671
1672 tree
1673 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1674 {
1675 unsigned int prec = TYPE_PRECISION (type);
1676 gcc_assert (prec <= values.coeffs[0].get_precision ());
1677 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1678
1679 inchash::hash h;
1680 h.add_int (TYPE_UID (type));
1681 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1682 h.add_wide_int (c.coeffs[i]);
1683 poly_int_cst_hasher::compare_type comp (type, &c);
1684 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1685 INSERT);
1686 if (*slot == NULL_TREE)
1687 {
1688 tree coeffs[NUM_POLY_INT_COEFFS];
1689 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1690 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1691 *slot = build_new_poly_int_cst (type, coeffs);
1692 }
1693 return *slot;
1694 }
1695
1696 /* Create a constant tree with value VALUE in type TYPE. */
1697
1698 tree
1699 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1700 {
1701 if (value.is_constant ())
1702 return wide_int_to_tree_1 (type, value.coeffs[0]);
1703 return build_poly_int_cst (type, value);
1704 }
1705
1706 void
1707 cache_integer_cst (tree t)
1708 {
1709 tree type = TREE_TYPE (t);
1710 int ix = -1;
1711 int limit = 0;
1712 int prec = TYPE_PRECISION (type);
1713
1714 gcc_assert (!TREE_OVERFLOW (t));
1715
1716 switch (TREE_CODE (type))
1717 {
1718 case NULLPTR_TYPE:
1719 gcc_assert (integer_zerop (t));
1720 /* Fallthru. */
1721
1722 case POINTER_TYPE:
1723 case REFERENCE_TYPE:
1724 /* Cache NULL pointer. */
1725 if (integer_zerop (t))
1726 {
1727 limit = 1;
1728 ix = 0;
1729 }
1730 break;
1731
1732 case BOOLEAN_TYPE:
1733 /* Cache false or true. */
1734 limit = 2;
1735 if (wi::ltu_p (wi::to_wide (t), 2))
1736 ix = TREE_INT_CST_ELT (t, 0);
1737 break;
1738
1739 case INTEGER_TYPE:
1740 case OFFSET_TYPE:
1741 if (TYPE_UNSIGNED (type))
1742 {
1743 /* Cache 0..N */
1744 limit = INTEGER_SHARE_LIMIT;
1745
1746 /* This is a little hokie, but if the prec is smaller than
1747 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1748 obvious test will not get the correct answer. */
1749 if (prec < HOST_BITS_PER_WIDE_INT)
1750 {
1751 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1752 ix = tree_to_uhwi (t);
1753 }
1754 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1755 ix = tree_to_uhwi (t);
1756 }
1757 else
1758 {
1759 /* Cache -1..N */
1760 limit = INTEGER_SHARE_LIMIT + 1;
1761
1762 if (integer_minus_onep (t))
1763 ix = 0;
1764 else if (!wi::neg_p (wi::to_wide (t)))
1765 {
1766 if (prec < HOST_BITS_PER_WIDE_INT)
1767 {
1768 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1769 ix = tree_to_shwi (t) + 1;
1770 }
1771 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1772 ix = tree_to_shwi (t) + 1;
1773 }
1774 }
1775 break;
1776
1777 case ENUMERAL_TYPE:
1778 break;
1779
1780 default:
1781 gcc_unreachable ();
1782 }
1783
1784 if (ix >= 0)
1785 {
1786 /* Look for it in the type's vector of small shared ints. */
1787 if (!TYPE_CACHED_VALUES_P (type))
1788 {
1789 TYPE_CACHED_VALUES_P (type) = 1;
1790 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1791 }
1792
1793 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1794 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1795 }
1796 else
1797 {
1798 /* Use the cache of larger shared ints. */
1799 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1800 /* If there is already an entry for the number verify it's the
1801 same. */
1802 if (*slot)
1803 gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1804 else
1805 /* Otherwise insert this one into the hash table. */
1806 *slot = t;
1807 }
1808 }
1809
1810
1811 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1812 and the rest are zeros. */
1813
1814 tree
1815 build_low_bits_mask (tree type, unsigned bits)
1816 {
1817 gcc_assert (bits <= TYPE_PRECISION (type));
1818
1819 return wide_int_to_tree (type, wi::mask (bits, false,
1820 TYPE_PRECISION (type)));
1821 }
1822
1823 /* Checks that X is integer constant that can be expressed in (unsigned)
1824 HOST_WIDE_INT without loss of precision. */
1825
1826 bool
1827 cst_and_fits_in_hwi (const_tree x)
1828 {
1829 return (TREE_CODE (x) == INTEGER_CST
1830 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1831 }
1832
1833 /* Build a newly constructed VECTOR_CST with the given values of
1834 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
1835
1836 tree
1837 make_vector (unsigned log2_npatterns,
1838 unsigned int nelts_per_pattern MEM_STAT_DECL)
1839 {
1840 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1841 tree t;
1842 unsigned npatterns = 1 << log2_npatterns;
1843 unsigned encoded_nelts = npatterns * nelts_per_pattern;
1844 unsigned length = (sizeof (struct tree_vector)
1845 + (encoded_nelts - 1) * sizeof (tree));
1846
1847 record_node_allocation_statistics (VECTOR_CST, length);
1848
1849 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1850
1851 TREE_SET_CODE (t, VECTOR_CST);
1852 TREE_CONSTANT (t) = 1;
1853 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1854 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1855
1856 return t;
1857 }
1858
1859 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1860 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1861
1862 tree
1863 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1864 {
1865 unsigned HOST_WIDE_INT idx, nelts;
1866 tree value;
1867
1868 /* We can't construct a VECTOR_CST for a variable number of elements. */
1869 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
1870 tree_vector_builder vec (type, nelts, 1);
1871 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1872 {
1873 if (TREE_CODE (value) == VECTOR_CST)
1874 {
1875 /* If NELTS is constant then this must be too. */
1876 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
1877 for (unsigned i = 0; i < sub_nelts; ++i)
1878 vec.quick_push (VECTOR_CST_ELT (value, i));
1879 }
1880 else
1881 vec.quick_push (value);
1882 }
1883 while (vec.length () < nelts)
1884 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1885
1886 return vec.build ();
1887 }
1888
1889 /* Build a vector of type VECTYPE where all the elements are SCs. */
1890 tree
1891 build_vector_from_val (tree vectype, tree sc)
1892 {
1893 unsigned HOST_WIDE_INT i, nunits;
1894
1895 if (sc == error_mark_node)
1896 return sc;
1897
1898 /* Verify that the vector type is suitable for SC. Note that there
1899 is some inconsistency in the type-system with respect to restrict
1900 qualifications of pointers. Vector types always have a main-variant
1901 element type and the qualification is applied to the vector-type.
1902 So TREE_TYPE (vector-type) does not return a properly qualified
1903 vector element-type. */
1904 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1905 TREE_TYPE (vectype)));
1906
1907 if (CONSTANT_CLASS_P (sc))
1908 {
1909 tree_vector_builder v (vectype, 1, 1);
1910 v.quick_push (sc);
1911 return v.build ();
1912 }
1913 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
1914 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
1915 else
1916 {
1917 vec<constructor_elt, va_gc> *v;
1918 vec_alloc (v, nunits);
1919 for (i = 0; i < nunits; ++i)
1920 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1921 return build_constructor (vectype, v);
1922 }
1923 }
1924
1925 /* If TYPE is not a vector type, just return SC, otherwise return
1926 build_vector_from_val (TYPE, SC). */
1927
1928 tree
1929 build_uniform_cst (tree type, tree sc)
1930 {
1931 if (!VECTOR_TYPE_P (type))
1932 return sc;
1933
1934 return build_vector_from_val (type, sc);
1935 }
1936
1937 /* Build a vector series of type TYPE in which element I has the value
1938 BASE + I * STEP. The result is a constant if BASE and STEP are constant
1939 and a VEC_SERIES_EXPR otherwise. */
1940
1941 tree
1942 build_vec_series (tree type, tree base, tree step)
1943 {
1944 if (integer_zerop (step))
1945 return build_vector_from_val (type, base);
1946 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
1947 {
1948 tree_vector_builder builder (type, 1, 3);
1949 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
1950 wi::to_wide (base) + wi::to_wide (step));
1951 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
1952 wi::to_wide (elt1) + wi::to_wide (step));
1953 builder.quick_push (base);
1954 builder.quick_push (elt1);
1955 builder.quick_push (elt2);
1956 return builder.build ();
1957 }
1958 return build2 (VEC_SERIES_EXPR, type, base, step);
1959 }
1960
1961 /* Return a vector with the same number of units and number of bits
1962 as VEC_TYPE, but in which the elements are a linear series of unsigned
1963 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
1964
1965 tree
1966 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
1967 {
1968 tree index_vec_type = vec_type;
1969 tree index_elt_type = TREE_TYPE (vec_type);
1970 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
1971 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
1972 {
1973 index_elt_type = build_nonstandard_integer_type
1974 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
1975 index_vec_type = build_vector_type (index_elt_type, nunits);
1976 }
1977
1978 tree_vector_builder v (index_vec_type, 1, 3);
1979 for (unsigned int i = 0; i < 3; ++i)
1980 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
1981 return v.build ();
1982 }
1983
1984 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
1985 elements are A and the rest are B. */
1986
1987 tree
1988 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
1989 {
1990 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
1991 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
1992 /* Optimize the constant case. */
1993 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
1994 count /= 2;
1995 tree_vector_builder builder (vec_type, count, 2);
1996 for (unsigned int i = 0; i < count * 2; ++i)
1997 builder.quick_push (i < num_a ? a : b);
1998 return builder.build ();
1999 }
2000
2001 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2002 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2003
2004 void
2005 recompute_constructor_flags (tree c)
2006 {
2007 unsigned int i;
2008 tree val;
2009 bool constant_p = true;
2010 bool side_effects_p = false;
2011 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2012
2013 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2014 {
2015 /* Mostly ctors will have elts that don't have side-effects, so
2016 the usual case is to scan all the elements. Hence a single
2017 loop for both const and side effects, rather than one loop
2018 each (with early outs). */
2019 if (!TREE_CONSTANT (val))
2020 constant_p = false;
2021 if (TREE_SIDE_EFFECTS (val))
2022 side_effects_p = true;
2023 }
2024
2025 TREE_SIDE_EFFECTS (c) = side_effects_p;
2026 TREE_CONSTANT (c) = constant_p;
2027 }
2028
2029 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2030 CONSTRUCTOR C. */
2031
2032 void
2033 verify_constructor_flags (tree c)
2034 {
2035 unsigned int i;
2036 tree val;
2037 bool constant_p = TREE_CONSTANT (c);
2038 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2039 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2040
2041 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2042 {
2043 if (constant_p && !TREE_CONSTANT (val))
2044 internal_error ("non-constant element in constant CONSTRUCTOR");
2045 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2046 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2047 }
2048 }
2049
2050 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2051 are in the vec pointed to by VALS. */
2052 tree
2053 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2054 {
2055 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2056
2057 TREE_TYPE (c) = type;
2058 CONSTRUCTOR_ELTS (c) = vals;
2059
2060 recompute_constructor_flags (c);
2061
2062 return c;
2063 }
2064
2065 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2066 INDEX and VALUE. */
2067 tree
2068 build_constructor_single (tree type, tree index, tree value)
2069 {
2070 vec<constructor_elt, va_gc> *v;
2071 constructor_elt elt = {index, value};
2072
2073 vec_alloc (v, 1);
2074 v->quick_push (elt);
2075
2076 return build_constructor (type, v);
2077 }
2078
2079
2080 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2081 are in a list pointed to by VALS. */
2082 tree
2083 build_constructor_from_list (tree type, tree vals)
2084 {
2085 tree t;
2086 vec<constructor_elt, va_gc> *v = NULL;
2087
2088 if (vals)
2089 {
2090 vec_alloc (v, list_length (vals));
2091 for (t = vals; t; t = TREE_CHAIN (t))
2092 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2093 }
2094
2095 return build_constructor (type, v);
2096 }
2097
2098 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2099 of elements, provided as index/value pairs. */
2100
2101 tree
2102 build_constructor_va (tree type, int nelts, ...)
2103 {
2104 vec<constructor_elt, va_gc> *v = NULL;
2105 va_list p;
2106
2107 va_start (p, nelts);
2108 vec_alloc (v, nelts);
2109 while (nelts--)
2110 {
2111 tree index = va_arg (p, tree);
2112 tree value = va_arg (p, tree);
2113 CONSTRUCTOR_APPEND_ELT (v, index, value);
2114 }
2115 va_end (p);
2116 return build_constructor (type, v);
2117 }
2118
2119 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2120
2121 tree
2122 build_clobber (tree type)
2123 {
2124 tree clobber = build_constructor (type, NULL);
2125 TREE_THIS_VOLATILE (clobber) = true;
2126 return clobber;
2127 }
2128
2129 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2130
2131 tree
2132 build_fixed (tree type, FIXED_VALUE_TYPE f)
2133 {
2134 tree v;
2135 FIXED_VALUE_TYPE *fp;
2136
2137 v = make_node (FIXED_CST);
2138 fp = ggc_alloc<fixed_value> ();
2139 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2140
2141 TREE_TYPE (v) = type;
2142 TREE_FIXED_CST_PTR (v) = fp;
2143 return v;
2144 }
2145
2146 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2147
2148 tree
2149 build_real (tree type, REAL_VALUE_TYPE d)
2150 {
2151 tree v;
2152 REAL_VALUE_TYPE *dp;
2153 int overflow = 0;
2154
2155 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2156 Consider doing it via real_convert now. */
2157
2158 v = make_node (REAL_CST);
2159 dp = ggc_alloc<real_value> ();
2160 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2161
2162 TREE_TYPE (v) = type;
2163 TREE_REAL_CST_PTR (v) = dp;
2164 TREE_OVERFLOW (v) = overflow;
2165 return v;
2166 }
2167
2168 /* Like build_real, but first truncate D to the type. */
2169
2170 tree
2171 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2172 {
2173 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2174 }
2175
2176 /* Return a new REAL_CST node whose type is TYPE
2177 and whose value is the integer value of the INTEGER_CST node I. */
2178
2179 REAL_VALUE_TYPE
2180 real_value_from_int_cst (const_tree type, const_tree i)
2181 {
2182 REAL_VALUE_TYPE d;
2183
2184 /* Clear all bits of the real value type so that we can later do
2185 bitwise comparisons to see if two values are the same. */
2186 memset (&d, 0, sizeof d);
2187
2188 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2189 TYPE_SIGN (TREE_TYPE (i)));
2190 return d;
2191 }
2192
2193 /* Given a tree representing an integer constant I, return a tree
2194 representing the same value as a floating-point constant of type TYPE. */
2195
2196 tree
2197 build_real_from_int_cst (tree type, const_tree i)
2198 {
2199 tree v;
2200 int overflow = TREE_OVERFLOW (i);
2201
2202 v = build_real (type, real_value_from_int_cst (type, i));
2203
2204 TREE_OVERFLOW (v) |= overflow;
2205 return v;
2206 }
2207
2208 /* Return a newly constructed STRING_CST node whose value is
2209 the LEN characters at STR.
2210 Note that for a C string literal, LEN should include the trailing NUL.
2211 The TREE_TYPE is not initialized. */
2212
2213 tree
2214 build_string (int len, const char *str)
2215 {
2216 tree s;
2217 size_t length;
2218
2219 /* Do not waste bytes provided by padding of struct tree_string. */
2220 length = len + offsetof (struct tree_string, str) + 1;
2221
2222 record_node_allocation_statistics (STRING_CST, length);
2223
2224 s = (tree) ggc_internal_alloc (length);
2225
2226 memset (s, 0, sizeof (struct tree_typed));
2227 TREE_SET_CODE (s, STRING_CST);
2228 TREE_CONSTANT (s) = 1;
2229 TREE_STRING_LENGTH (s) = len;
2230 memcpy (s->string.str, str, len);
2231 s->string.str[len] = '\0';
2232
2233 return s;
2234 }
2235
2236 /* Return a newly constructed COMPLEX_CST node whose value is
2237 specified by the real and imaginary parts REAL and IMAG.
2238 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2239 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2240
2241 tree
2242 build_complex (tree type, tree real, tree imag)
2243 {
2244 gcc_assert (CONSTANT_CLASS_P (real));
2245 gcc_assert (CONSTANT_CLASS_P (imag));
2246
2247 tree t = make_node (COMPLEX_CST);
2248
2249 TREE_REALPART (t) = real;
2250 TREE_IMAGPART (t) = imag;
2251 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2252 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2253 return t;
2254 }
2255
2256 /* Build a complex (inf +- 0i), such as for the result of cproj.
2257 TYPE is the complex tree type of the result. If NEG is true, the
2258 imaginary zero is negative. */
2259
2260 tree
2261 build_complex_inf (tree type, bool neg)
2262 {
2263 REAL_VALUE_TYPE rinf, rzero = dconst0;
2264
2265 real_inf (&rinf);
2266 rzero.sign = neg;
2267 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2268 build_real (TREE_TYPE (type), rzero));
2269 }
2270
2271 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2272 element is set to 1. In particular, this is 1 + i for complex types. */
2273
2274 tree
2275 build_each_one_cst (tree type)
2276 {
2277 if (TREE_CODE (type) == COMPLEX_TYPE)
2278 {
2279 tree scalar = build_one_cst (TREE_TYPE (type));
2280 return build_complex (type, scalar, scalar);
2281 }
2282 else
2283 return build_one_cst (type);
2284 }
2285
2286 /* Return a constant of arithmetic type TYPE which is the
2287 multiplicative identity of the set TYPE. */
2288
2289 tree
2290 build_one_cst (tree type)
2291 {
2292 switch (TREE_CODE (type))
2293 {
2294 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2295 case POINTER_TYPE: case REFERENCE_TYPE:
2296 case OFFSET_TYPE:
2297 return build_int_cst (type, 1);
2298
2299 case REAL_TYPE:
2300 return build_real (type, dconst1);
2301
2302 case FIXED_POINT_TYPE:
2303 /* We can only generate 1 for accum types. */
2304 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2305 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2306
2307 case VECTOR_TYPE:
2308 {
2309 tree scalar = build_one_cst (TREE_TYPE (type));
2310
2311 return build_vector_from_val (type, scalar);
2312 }
2313
2314 case COMPLEX_TYPE:
2315 return build_complex (type,
2316 build_one_cst (TREE_TYPE (type)),
2317 build_zero_cst (TREE_TYPE (type)));
2318
2319 default:
2320 gcc_unreachable ();
2321 }
2322 }
2323
2324 /* Return an integer of type TYPE containing all 1's in as much precision as
2325 it contains, or a complex or vector whose subparts are such integers. */
2326
2327 tree
2328 build_all_ones_cst (tree type)
2329 {
2330 if (TREE_CODE (type) == COMPLEX_TYPE)
2331 {
2332 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2333 return build_complex (type, scalar, scalar);
2334 }
2335 else
2336 return build_minus_one_cst (type);
2337 }
2338
2339 /* Return a constant of arithmetic type TYPE which is the
2340 opposite of the multiplicative identity of the set TYPE. */
2341
2342 tree
2343 build_minus_one_cst (tree type)
2344 {
2345 switch (TREE_CODE (type))
2346 {
2347 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2348 case POINTER_TYPE: case REFERENCE_TYPE:
2349 case OFFSET_TYPE:
2350 return build_int_cst (type, -1);
2351
2352 case REAL_TYPE:
2353 return build_real (type, dconstm1);
2354
2355 case FIXED_POINT_TYPE:
2356 /* We can only generate 1 for accum types. */
2357 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2358 return build_fixed (type,
2359 fixed_from_double_int (double_int_minus_one,
2360 SCALAR_TYPE_MODE (type)));
2361
2362 case VECTOR_TYPE:
2363 {
2364 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2365
2366 return build_vector_from_val (type, scalar);
2367 }
2368
2369 case COMPLEX_TYPE:
2370 return build_complex (type,
2371 build_minus_one_cst (TREE_TYPE (type)),
2372 build_zero_cst (TREE_TYPE (type)));
2373
2374 default:
2375 gcc_unreachable ();
2376 }
2377 }
2378
2379 /* Build 0 constant of type TYPE. This is used by constructor folding
2380 and thus the constant should be represented in memory by
2381 zero(es). */
2382
2383 tree
2384 build_zero_cst (tree type)
2385 {
2386 switch (TREE_CODE (type))
2387 {
2388 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2389 case POINTER_TYPE: case REFERENCE_TYPE:
2390 case OFFSET_TYPE: case NULLPTR_TYPE:
2391 return build_int_cst (type, 0);
2392
2393 case REAL_TYPE:
2394 return build_real (type, dconst0);
2395
2396 case FIXED_POINT_TYPE:
2397 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2398
2399 case VECTOR_TYPE:
2400 {
2401 tree scalar = build_zero_cst (TREE_TYPE (type));
2402
2403 return build_vector_from_val (type, scalar);
2404 }
2405
2406 case COMPLEX_TYPE:
2407 {
2408 tree zero = build_zero_cst (TREE_TYPE (type));
2409
2410 return build_complex (type, zero, zero);
2411 }
2412
2413 default:
2414 if (!AGGREGATE_TYPE_P (type))
2415 return fold_convert (type, integer_zero_node);
2416 return build_constructor (type, NULL);
2417 }
2418 }
2419
2420
2421 /* Build a BINFO with LEN language slots. */
2422
2423 tree
2424 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2425 {
2426 tree t;
2427 size_t length = (offsetof (struct tree_binfo, base_binfos)
2428 + vec<tree, va_gc>::embedded_size (base_binfos));
2429
2430 record_node_allocation_statistics (TREE_BINFO, length);
2431
2432 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2433
2434 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2435
2436 TREE_SET_CODE (t, TREE_BINFO);
2437
2438 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2439
2440 return t;
2441 }
2442
2443 /* Create a CASE_LABEL_EXPR tree node and return it. */
2444
2445 tree
2446 build_case_label (tree low_value, tree high_value, tree label_decl)
2447 {
2448 tree t = make_node (CASE_LABEL_EXPR);
2449
2450 TREE_TYPE (t) = void_type_node;
2451 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2452
2453 CASE_LOW (t) = low_value;
2454 CASE_HIGH (t) = high_value;
2455 CASE_LABEL (t) = label_decl;
2456 CASE_CHAIN (t) = NULL_TREE;
2457
2458 return t;
2459 }
2460
2461 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2462 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2463 The latter determines the length of the HOST_WIDE_INT vector. */
2464
2465 tree
2466 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2467 {
2468 tree t;
2469 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2470 + sizeof (struct tree_int_cst));
2471
2472 gcc_assert (len);
2473 record_node_allocation_statistics (INTEGER_CST, length);
2474
2475 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2476
2477 TREE_SET_CODE (t, INTEGER_CST);
2478 TREE_INT_CST_NUNITS (t) = len;
2479 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2480 /* to_offset can only be applied to trees that are offset_int-sized
2481 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2482 must be exactly the precision of offset_int and so LEN is correct. */
2483 if (ext_len <= OFFSET_INT_ELTS)
2484 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2485 else
2486 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2487
2488 TREE_CONSTANT (t) = 1;
2489
2490 return t;
2491 }
2492
2493 /* Build a newly constructed TREE_VEC node of length LEN. */
2494
2495 tree
2496 make_tree_vec (int len MEM_STAT_DECL)
2497 {
2498 tree t;
2499 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2500
2501 record_node_allocation_statistics (TREE_VEC, length);
2502
2503 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2504
2505 TREE_SET_CODE (t, TREE_VEC);
2506 TREE_VEC_LENGTH (t) = len;
2507
2508 return t;
2509 }
2510
2511 /* Grow a TREE_VEC node to new length LEN. */
2512
2513 tree
2514 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2515 {
2516 gcc_assert (TREE_CODE (v) == TREE_VEC);
2517
2518 int oldlen = TREE_VEC_LENGTH (v);
2519 gcc_assert (len > oldlen);
2520
2521 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2522 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2523
2524 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2525
2526 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2527
2528 TREE_VEC_LENGTH (v) = len;
2529
2530 return v;
2531 }
2532 \f
2533 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2534 fixed, and scalar, complex or vector. */
2535
2536 bool
2537 zerop (const_tree expr)
2538 {
2539 return (integer_zerop (expr)
2540 || real_zerop (expr)
2541 || fixed_zerop (expr));
2542 }
2543
2544 /* Return 1 if EXPR is the integer constant zero or a complex constant
2545 of zero, or a location wrapper for such a constant. */
2546
2547 bool
2548 integer_zerop (const_tree expr)
2549 {
2550 STRIP_ANY_LOCATION_WRAPPER (expr);
2551
2552 switch (TREE_CODE (expr))
2553 {
2554 case INTEGER_CST:
2555 return wi::to_wide (expr) == 0;
2556 case COMPLEX_CST:
2557 return (integer_zerop (TREE_REALPART (expr))
2558 && integer_zerop (TREE_IMAGPART (expr)));
2559 case VECTOR_CST:
2560 return (VECTOR_CST_NPATTERNS (expr) == 1
2561 && VECTOR_CST_DUPLICATE_P (expr)
2562 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2563 default:
2564 return false;
2565 }
2566 }
2567
2568 /* Return 1 if EXPR is the integer constant one or the corresponding
2569 complex constant, or a location wrapper for such a constant. */
2570
2571 bool
2572 integer_onep (const_tree expr)
2573 {
2574 STRIP_ANY_LOCATION_WRAPPER (expr);
2575
2576 switch (TREE_CODE (expr))
2577 {
2578 case INTEGER_CST:
2579 return wi::eq_p (wi::to_widest (expr), 1);
2580 case COMPLEX_CST:
2581 return (integer_onep (TREE_REALPART (expr))
2582 && integer_zerop (TREE_IMAGPART (expr)));
2583 case VECTOR_CST:
2584 return (VECTOR_CST_NPATTERNS (expr) == 1
2585 && VECTOR_CST_DUPLICATE_P (expr)
2586 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2587 default:
2588 return false;
2589 }
2590 }
2591
2592 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2593 return 1 if every piece is the integer constant one.
2594 Also return 1 for location wrappers for such a constant. */
2595
2596 bool
2597 integer_each_onep (const_tree expr)
2598 {
2599 STRIP_ANY_LOCATION_WRAPPER (expr);
2600
2601 if (TREE_CODE (expr) == COMPLEX_CST)
2602 return (integer_onep (TREE_REALPART (expr))
2603 && integer_onep (TREE_IMAGPART (expr)));
2604 else
2605 return integer_onep (expr);
2606 }
2607
2608 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2609 it contains, or a complex or vector whose subparts are such integers,
2610 or a location wrapper for such a constant. */
2611
2612 bool
2613 integer_all_onesp (const_tree expr)
2614 {
2615 STRIP_ANY_LOCATION_WRAPPER (expr);
2616
2617 if (TREE_CODE (expr) == COMPLEX_CST
2618 && integer_all_onesp (TREE_REALPART (expr))
2619 && integer_all_onesp (TREE_IMAGPART (expr)))
2620 return true;
2621
2622 else if (TREE_CODE (expr) == VECTOR_CST)
2623 return (VECTOR_CST_NPATTERNS (expr) == 1
2624 && VECTOR_CST_DUPLICATE_P (expr)
2625 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2626
2627 else if (TREE_CODE (expr) != INTEGER_CST)
2628 return false;
2629
2630 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2631 == wi::to_wide (expr));
2632 }
2633
2634 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2635 for such a constant. */
2636
2637 bool
2638 integer_minus_onep (const_tree expr)
2639 {
2640 STRIP_ANY_LOCATION_WRAPPER (expr);
2641
2642 if (TREE_CODE (expr) == COMPLEX_CST)
2643 return (integer_all_onesp (TREE_REALPART (expr))
2644 && integer_zerop (TREE_IMAGPART (expr)));
2645 else
2646 return integer_all_onesp (expr);
2647 }
2648
2649 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2650 one bit on), or a location wrapper for such a constant. */
2651
2652 bool
2653 integer_pow2p (const_tree expr)
2654 {
2655 STRIP_ANY_LOCATION_WRAPPER (expr);
2656
2657 if (TREE_CODE (expr) == COMPLEX_CST
2658 && integer_pow2p (TREE_REALPART (expr))
2659 && integer_zerop (TREE_IMAGPART (expr)))
2660 return true;
2661
2662 if (TREE_CODE (expr) != INTEGER_CST)
2663 return false;
2664
2665 return wi::popcount (wi::to_wide (expr)) == 1;
2666 }
2667
2668 /* Return 1 if EXPR is an integer constant other than zero or a
2669 complex constant other than zero, or a location wrapper for such a
2670 constant. */
2671
2672 bool
2673 integer_nonzerop (const_tree expr)
2674 {
2675 STRIP_ANY_LOCATION_WRAPPER (expr);
2676
2677 return ((TREE_CODE (expr) == INTEGER_CST
2678 && wi::to_wide (expr) != 0)
2679 || (TREE_CODE (expr) == COMPLEX_CST
2680 && (integer_nonzerop (TREE_REALPART (expr))
2681 || integer_nonzerop (TREE_IMAGPART (expr)))));
2682 }
2683
2684 /* Return 1 if EXPR is the integer constant one. For vector,
2685 return 1 if every piece is the integer constant minus one
2686 (representing the value TRUE).
2687 Also return 1 for location wrappers for such a constant. */
2688
2689 bool
2690 integer_truep (const_tree expr)
2691 {
2692 STRIP_ANY_LOCATION_WRAPPER (expr);
2693
2694 if (TREE_CODE (expr) == VECTOR_CST)
2695 return integer_all_onesp (expr);
2696 return integer_onep (expr);
2697 }
2698
2699 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2700 for such a constant. */
2701
2702 bool
2703 fixed_zerop (const_tree expr)
2704 {
2705 STRIP_ANY_LOCATION_WRAPPER (expr);
2706
2707 return (TREE_CODE (expr) == FIXED_CST
2708 && TREE_FIXED_CST (expr).data.is_zero ());
2709 }
2710
2711 /* Return the power of two represented by a tree node known to be a
2712 power of two. */
2713
2714 int
2715 tree_log2 (const_tree expr)
2716 {
2717 if (TREE_CODE (expr) == COMPLEX_CST)
2718 return tree_log2 (TREE_REALPART (expr));
2719
2720 return wi::exact_log2 (wi::to_wide (expr));
2721 }
2722
2723 /* Similar, but return the largest integer Y such that 2 ** Y is less
2724 than or equal to EXPR. */
2725
2726 int
2727 tree_floor_log2 (const_tree expr)
2728 {
2729 if (TREE_CODE (expr) == COMPLEX_CST)
2730 return tree_log2 (TREE_REALPART (expr));
2731
2732 return wi::floor_log2 (wi::to_wide (expr));
2733 }
2734
2735 /* Return number of known trailing zero bits in EXPR, or, if the value of
2736 EXPR is known to be zero, the precision of it's type. */
2737
2738 unsigned int
2739 tree_ctz (const_tree expr)
2740 {
2741 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2742 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2743 return 0;
2744
2745 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2746 switch (TREE_CODE (expr))
2747 {
2748 case INTEGER_CST:
2749 ret1 = wi::ctz (wi::to_wide (expr));
2750 return MIN (ret1, prec);
2751 case SSA_NAME:
2752 ret1 = wi::ctz (get_nonzero_bits (expr));
2753 return MIN (ret1, prec);
2754 case PLUS_EXPR:
2755 case MINUS_EXPR:
2756 case BIT_IOR_EXPR:
2757 case BIT_XOR_EXPR:
2758 case MIN_EXPR:
2759 case MAX_EXPR:
2760 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2761 if (ret1 == 0)
2762 return ret1;
2763 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2764 return MIN (ret1, ret2);
2765 case POINTER_PLUS_EXPR:
2766 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2767 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2768 /* Second operand is sizetype, which could be in theory
2769 wider than pointer's precision. Make sure we never
2770 return more than prec. */
2771 ret2 = MIN (ret2, prec);
2772 return MIN (ret1, ret2);
2773 case BIT_AND_EXPR:
2774 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2775 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2776 return MAX (ret1, ret2);
2777 case MULT_EXPR:
2778 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2779 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2780 return MIN (ret1 + ret2, prec);
2781 case LSHIFT_EXPR:
2782 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2783 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2784 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2785 {
2786 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2787 return MIN (ret1 + ret2, prec);
2788 }
2789 return ret1;
2790 case RSHIFT_EXPR:
2791 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2792 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2793 {
2794 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2795 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2796 if (ret1 > ret2)
2797 return ret1 - ret2;
2798 }
2799 return 0;
2800 case TRUNC_DIV_EXPR:
2801 case CEIL_DIV_EXPR:
2802 case FLOOR_DIV_EXPR:
2803 case ROUND_DIV_EXPR:
2804 case EXACT_DIV_EXPR:
2805 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2806 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2807 {
2808 int l = tree_log2 (TREE_OPERAND (expr, 1));
2809 if (l >= 0)
2810 {
2811 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2812 ret2 = l;
2813 if (ret1 > ret2)
2814 return ret1 - ret2;
2815 }
2816 }
2817 return 0;
2818 CASE_CONVERT:
2819 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2820 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2821 ret1 = prec;
2822 return MIN (ret1, prec);
2823 case SAVE_EXPR:
2824 return tree_ctz (TREE_OPERAND (expr, 0));
2825 case COND_EXPR:
2826 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2827 if (ret1 == 0)
2828 return 0;
2829 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2830 return MIN (ret1, ret2);
2831 case COMPOUND_EXPR:
2832 return tree_ctz (TREE_OPERAND (expr, 1));
2833 case ADDR_EXPR:
2834 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2835 if (ret1 > BITS_PER_UNIT)
2836 {
2837 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2838 return MIN (ret1, prec);
2839 }
2840 return 0;
2841 default:
2842 return 0;
2843 }
2844 }
2845
2846 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2847 decimal float constants, so don't return 1 for them.
2848 Also return 1 for location wrappers around such a constant. */
2849
2850 bool
2851 real_zerop (const_tree expr)
2852 {
2853 STRIP_ANY_LOCATION_WRAPPER (expr);
2854
2855 switch (TREE_CODE (expr))
2856 {
2857 case REAL_CST:
2858 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2859 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2860 case COMPLEX_CST:
2861 return real_zerop (TREE_REALPART (expr))
2862 && real_zerop (TREE_IMAGPART (expr));
2863 case VECTOR_CST:
2864 {
2865 /* Don't simply check for a duplicate because the predicate
2866 accepts both +0.0 and -0.0. */
2867 unsigned count = vector_cst_encoded_nelts (expr);
2868 for (unsigned int i = 0; i < count; ++i)
2869 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2870 return false;
2871 return true;
2872 }
2873 default:
2874 return false;
2875 }
2876 }
2877
2878 /* Return 1 if EXPR is the real constant one in real or complex form.
2879 Trailing zeroes matter for decimal float constants, so don't return
2880 1 for them.
2881 Also return 1 for location wrappers around such a constant. */
2882
2883 bool
2884 real_onep (const_tree expr)
2885 {
2886 STRIP_ANY_LOCATION_WRAPPER (expr);
2887
2888 switch (TREE_CODE (expr))
2889 {
2890 case REAL_CST:
2891 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2892 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2893 case COMPLEX_CST:
2894 return real_onep (TREE_REALPART (expr))
2895 && real_zerop (TREE_IMAGPART (expr));
2896 case VECTOR_CST:
2897 return (VECTOR_CST_NPATTERNS (expr) == 1
2898 && VECTOR_CST_DUPLICATE_P (expr)
2899 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2900 default:
2901 return false;
2902 }
2903 }
2904
2905 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2906 matter for decimal float constants, so don't return 1 for them.
2907 Also return 1 for location wrappers around such a constant. */
2908
2909 bool
2910 real_minus_onep (const_tree expr)
2911 {
2912 STRIP_ANY_LOCATION_WRAPPER (expr);
2913
2914 switch (TREE_CODE (expr))
2915 {
2916 case REAL_CST:
2917 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2918 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2919 case COMPLEX_CST:
2920 return real_minus_onep (TREE_REALPART (expr))
2921 && real_zerop (TREE_IMAGPART (expr));
2922 case VECTOR_CST:
2923 return (VECTOR_CST_NPATTERNS (expr) == 1
2924 && VECTOR_CST_DUPLICATE_P (expr)
2925 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2926 default:
2927 return false;
2928 }
2929 }
2930
2931 /* Nonzero if EXP is a constant or a cast of a constant. */
2932
2933 bool
2934 really_constant_p (const_tree exp)
2935 {
2936 /* This is not quite the same as STRIP_NOPS. It does more. */
2937 while (CONVERT_EXPR_P (exp)
2938 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2939 exp = TREE_OPERAND (exp, 0);
2940 return TREE_CONSTANT (exp);
2941 }
2942
2943 /* Return true if T holds a polynomial pointer difference, storing it in
2944 *VALUE if so. A true return means that T's precision is no greater
2945 than 64 bits, which is the largest address space we support, so *VALUE
2946 never loses precision. However, the signedness of the result does
2947 not necessarily match the signedness of T: sometimes an unsigned type
2948 like sizetype is used to encode a value that is actually negative. */
2949
2950 bool
2951 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
2952 {
2953 if (!t)
2954 return false;
2955 if (TREE_CODE (t) == INTEGER_CST)
2956 {
2957 if (!cst_and_fits_in_hwi (t))
2958 return false;
2959 *value = int_cst_value (t);
2960 return true;
2961 }
2962 if (POLY_INT_CST_P (t))
2963 {
2964 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2965 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
2966 return false;
2967 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2968 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
2969 return true;
2970 }
2971 return false;
2972 }
2973
2974 poly_int64
2975 tree_to_poly_int64 (const_tree t)
2976 {
2977 gcc_assert (tree_fits_poly_int64_p (t));
2978 if (POLY_INT_CST_P (t))
2979 return poly_int_cst_value (t).force_shwi ();
2980 return TREE_INT_CST_LOW (t);
2981 }
2982
2983 poly_uint64
2984 tree_to_poly_uint64 (const_tree t)
2985 {
2986 gcc_assert (tree_fits_poly_uint64_p (t));
2987 if (POLY_INT_CST_P (t))
2988 return poly_int_cst_value (t).force_uhwi ();
2989 return TREE_INT_CST_LOW (t);
2990 }
2991 \f
2992 /* Return first list element whose TREE_VALUE is ELEM.
2993 Return 0 if ELEM is not in LIST. */
2994
2995 tree
2996 value_member (tree elem, tree list)
2997 {
2998 while (list)
2999 {
3000 if (elem == TREE_VALUE (list))
3001 return list;
3002 list = TREE_CHAIN (list);
3003 }
3004 return NULL_TREE;
3005 }
3006
3007 /* Return first list element whose TREE_PURPOSE is ELEM.
3008 Return 0 if ELEM is not in LIST. */
3009
3010 tree
3011 purpose_member (const_tree elem, tree list)
3012 {
3013 while (list)
3014 {
3015 if (elem == TREE_PURPOSE (list))
3016 return list;
3017 list = TREE_CHAIN (list);
3018 }
3019 return NULL_TREE;
3020 }
3021
3022 /* Return true if ELEM is in V. */
3023
3024 bool
3025 vec_member (const_tree elem, vec<tree, va_gc> *v)
3026 {
3027 unsigned ix;
3028 tree t;
3029 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3030 if (elem == t)
3031 return true;
3032 return false;
3033 }
3034
3035 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3036 NULL_TREE. */
3037
3038 tree
3039 chain_index (int idx, tree chain)
3040 {
3041 for (; chain && idx > 0; --idx)
3042 chain = TREE_CHAIN (chain);
3043 return chain;
3044 }
3045
3046 /* Return nonzero if ELEM is part of the chain CHAIN. */
3047
3048 bool
3049 chain_member (const_tree elem, const_tree chain)
3050 {
3051 while (chain)
3052 {
3053 if (elem == chain)
3054 return true;
3055 chain = DECL_CHAIN (chain);
3056 }
3057
3058 return false;
3059 }
3060
3061 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3062 We expect a null pointer to mark the end of the chain.
3063 This is the Lisp primitive `length'. */
3064
3065 int
3066 list_length (const_tree t)
3067 {
3068 const_tree p = t;
3069 #ifdef ENABLE_TREE_CHECKING
3070 const_tree q = t;
3071 #endif
3072 int len = 0;
3073
3074 while (p)
3075 {
3076 p = TREE_CHAIN (p);
3077 #ifdef ENABLE_TREE_CHECKING
3078 if (len % 2)
3079 q = TREE_CHAIN (q);
3080 gcc_assert (p != q);
3081 #endif
3082 len++;
3083 }
3084
3085 return len;
3086 }
3087
3088 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3089 UNION_TYPE TYPE, or NULL_TREE if none. */
3090
3091 tree
3092 first_field (const_tree type)
3093 {
3094 tree t = TYPE_FIELDS (type);
3095 while (t && TREE_CODE (t) != FIELD_DECL)
3096 t = TREE_CHAIN (t);
3097 return t;
3098 }
3099
3100 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3101 by modifying the last node in chain 1 to point to chain 2.
3102 This is the Lisp primitive `nconc'. */
3103
3104 tree
3105 chainon (tree op1, tree op2)
3106 {
3107 tree t1;
3108
3109 if (!op1)
3110 return op2;
3111 if (!op2)
3112 return op1;
3113
3114 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3115 continue;
3116 TREE_CHAIN (t1) = op2;
3117
3118 #ifdef ENABLE_TREE_CHECKING
3119 {
3120 tree t2;
3121 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3122 gcc_assert (t2 != t1);
3123 }
3124 #endif
3125
3126 return op1;
3127 }
3128
3129 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3130
3131 tree
3132 tree_last (tree chain)
3133 {
3134 tree next;
3135 if (chain)
3136 while ((next = TREE_CHAIN (chain)))
3137 chain = next;
3138 return chain;
3139 }
3140
3141 /* Reverse the order of elements in the chain T,
3142 and return the new head of the chain (old last element). */
3143
3144 tree
3145 nreverse (tree t)
3146 {
3147 tree prev = 0, decl, next;
3148 for (decl = t; decl; decl = next)
3149 {
3150 /* We shouldn't be using this function to reverse BLOCK chains; we
3151 have blocks_nreverse for that. */
3152 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3153 next = TREE_CHAIN (decl);
3154 TREE_CHAIN (decl) = prev;
3155 prev = decl;
3156 }
3157 return prev;
3158 }
3159 \f
3160 /* Return a newly created TREE_LIST node whose
3161 purpose and value fields are PARM and VALUE. */
3162
3163 tree
3164 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3165 {
3166 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3167 TREE_PURPOSE (t) = parm;
3168 TREE_VALUE (t) = value;
3169 return t;
3170 }
3171
3172 /* Build a chain of TREE_LIST nodes from a vector. */
3173
3174 tree
3175 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3176 {
3177 tree ret = NULL_TREE;
3178 tree *pp = &ret;
3179 unsigned int i;
3180 tree t;
3181 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3182 {
3183 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3184 pp = &TREE_CHAIN (*pp);
3185 }
3186 return ret;
3187 }
3188
3189 /* Return a newly created TREE_LIST node whose
3190 purpose and value fields are PURPOSE and VALUE
3191 and whose TREE_CHAIN is CHAIN. */
3192
3193 tree
3194 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3195 {
3196 tree node;
3197
3198 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3199 memset (node, 0, sizeof (struct tree_common));
3200
3201 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3202
3203 TREE_SET_CODE (node, TREE_LIST);
3204 TREE_CHAIN (node) = chain;
3205 TREE_PURPOSE (node) = purpose;
3206 TREE_VALUE (node) = value;
3207 return node;
3208 }
3209
3210 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3211 trees. */
3212
3213 vec<tree, va_gc> *
3214 ctor_to_vec (tree ctor)
3215 {
3216 vec<tree, va_gc> *vec;
3217 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3218 unsigned int ix;
3219 tree val;
3220
3221 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3222 vec->quick_push (val);
3223
3224 return vec;
3225 }
3226 \f
3227 /* Return the size nominally occupied by an object of type TYPE
3228 when it resides in memory. The value is measured in units of bytes,
3229 and its data type is that normally used for type sizes
3230 (which is the first type created by make_signed_type or
3231 make_unsigned_type). */
3232
3233 tree
3234 size_in_bytes_loc (location_t loc, const_tree type)
3235 {
3236 tree t;
3237
3238 if (type == error_mark_node)
3239 return integer_zero_node;
3240
3241 type = TYPE_MAIN_VARIANT (type);
3242 t = TYPE_SIZE_UNIT (type);
3243
3244 if (t == 0)
3245 {
3246 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3247 return size_zero_node;
3248 }
3249
3250 return t;
3251 }
3252
3253 /* Return the size of TYPE (in bytes) as a wide integer
3254 or return -1 if the size can vary or is larger than an integer. */
3255
3256 HOST_WIDE_INT
3257 int_size_in_bytes (const_tree type)
3258 {
3259 tree t;
3260
3261 if (type == error_mark_node)
3262 return 0;
3263
3264 type = TYPE_MAIN_VARIANT (type);
3265 t = TYPE_SIZE_UNIT (type);
3266
3267 if (t && tree_fits_uhwi_p (t))
3268 return TREE_INT_CST_LOW (t);
3269 else
3270 return -1;
3271 }
3272
3273 /* Return the maximum size of TYPE (in bytes) as a wide integer
3274 or return -1 if the size can vary or is larger than an integer. */
3275
3276 HOST_WIDE_INT
3277 max_int_size_in_bytes (const_tree type)
3278 {
3279 HOST_WIDE_INT size = -1;
3280 tree size_tree;
3281
3282 /* If this is an array type, check for a possible MAX_SIZE attached. */
3283
3284 if (TREE_CODE (type) == ARRAY_TYPE)
3285 {
3286 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3287
3288 if (size_tree && tree_fits_uhwi_p (size_tree))
3289 size = tree_to_uhwi (size_tree);
3290 }
3291
3292 /* If we still haven't been able to get a size, see if the language
3293 can compute a maximum size. */
3294
3295 if (size == -1)
3296 {
3297 size_tree = lang_hooks.types.max_size (type);
3298
3299 if (size_tree && tree_fits_uhwi_p (size_tree))
3300 size = tree_to_uhwi (size_tree);
3301 }
3302
3303 return size;
3304 }
3305 \f
3306 /* Return the bit position of FIELD, in bits from the start of the record.
3307 This is a tree of type bitsizetype. */
3308
3309 tree
3310 bit_position (const_tree field)
3311 {
3312 return bit_from_pos (DECL_FIELD_OFFSET (field),
3313 DECL_FIELD_BIT_OFFSET (field));
3314 }
3315 \f
3316 /* Return the byte position of FIELD, in bytes from the start of the record.
3317 This is a tree of type sizetype. */
3318
3319 tree
3320 byte_position (const_tree field)
3321 {
3322 return byte_from_pos (DECL_FIELD_OFFSET (field),
3323 DECL_FIELD_BIT_OFFSET (field));
3324 }
3325
3326 /* Likewise, but return as an integer. It must be representable in
3327 that way (since it could be a signed value, we don't have the
3328 option of returning -1 like int_size_in_byte can. */
3329
3330 HOST_WIDE_INT
3331 int_byte_position (const_tree field)
3332 {
3333 return tree_to_shwi (byte_position (field));
3334 }
3335 \f
3336 /* Return the strictest alignment, in bits, that T is known to have. */
3337
3338 unsigned int
3339 expr_align (const_tree t)
3340 {
3341 unsigned int align0, align1;
3342
3343 switch (TREE_CODE (t))
3344 {
3345 CASE_CONVERT: case NON_LVALUE_EXPR:
3346 /* If we have conversions, we know that the alignment of the
3347 object must meet each of the alignments of the types. */
3348 align0 = expr_align (TREE_OPERAND (t, 0));
3349 align1 = TYPE_ALIGN (TREE_TYPE (t));
3350 return MAX (align0, align1);
3351
3352 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3353 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3354 case CLEANUP_POINT_EXPR:
3355 /* These don't change the alignment of an object. */
3356 return expr_align (TREE_OPERAND (t, 0));
3357
3358 case COND_EXPR:
3359 /* The best we can do is say that the alignment is the least aligned
3360 of the two arms. */
3361 align0 = expr_align (TREE_OPERAND (t, 1));
3362 align1 = expr_align (TREE_OPERAND (t, 2));
3363 return MIN (align0, align1);
3364
3365 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3366 meaningfully, it's always 1. */
3367 case LABEL_DECL: case CONST_DECL:
3368 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3369 case FUNCTION_DECL:
3370 gcc_assert (DECL_ALIGN (t) != 0);
3371 return DECL_ALIGN (t);
3372
3373 default:
3374 break;
3375 }
3376
3377 /* Otherwise take the alignment from that of the type. */
3378 return TYPE_ALIGN (TREE_TYPE (t));
3379 }
3380 \f
3381 /* Return, as a tree node, the number of elements for TYPE (which is an
3382 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3383
3384 tree
3385 array_type_nelts (const_tree type)
3386 {
3387 tree index_type, min, max;
3388
3389 /* If they did it with unspecified bounds, then we should have already
3390 given an error about it before we got here. */
3391 if (! TYPE_DOMAIN (type))
3392 return error_mark_node;
3393
3394 index_type = TYPE_DOMAIN (type);
3395 min = TYPE_MIN_VALUE (index_type);
3396 max = TYPE_MAX_VALUE (index_type);
3397
3398 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3399 if (!max)
3400 return error_mark_node;
3401
3402 return (integer_zerop (min)
3403 ? max
3404 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3405 }
3406 \f
3407 /* If arg is static -- a reference to an object in static storage -- then
3408 return the object. This is not the same as the C meaning of `static'.
3409 If arg isn't static, return NULL. */
3410
3411 tree
3412 staticp (tree arg)
3413 {
3414 switch (TREE_CODE (arg))
3415 {
3416 case FUNCTION_DECL:
3417 /* Nested functions are static, even though taking their address will
3418 involve a trampoline as we unnest the nested function and create
3419 the trampoline on the tree level. */
3420 return arg;
3421
3422 case VAR_DECL:
3423 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3424 && ! DECL_THREAD_LOCAL_P (arg)
3425 && ! DECL_DLLIMPORT_P (arg)
3426 ? arg : NULL);
3427
3428 case CONST_DECL:
3429 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3430 ? arg : NULL);
3431
3432 case CONSTRUCTOR:
3433 return TREE_STATIC (arg) ? arg : NULL;
3434
3435 case LABEL_DECL:
3436 case STRING_CST:
3437 return arg;
3438
3439 case COMPONENT_REF:
3440 /* If the thing being referenced is not a field, then it is
3441 something language specific. */
3442 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3443
3444 /* If we are referencing a bitfield, we can't evaluate an
3445 ADDR_EXPR at compile time and so it isn't a constant. */
3446 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3447 return NULL;
3448
3449 return staticp (TREE_OPERAND (arg, 0));
3450
3451 case BIT_FIELD_REF:
3452 return NULL;
3453
3454 case INDIRECT_REF:
3455 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3456
3457 case ARRAY_REF:
3458 case ARRAY_RANGE_REF:
3459 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3460 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3461 return staticp (TREE_OPERAND (arg, 0));
3462 else
3463 return NULL;
3464
3465 case COMPOUND_LITERAL_EXPR:
3466 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3467
3468 default:
3469 return NULL;
3470 }
3471 }
3472
3473 \f
3474
3475
3476 /* Return whether OP is a DECL whose address is function-invariant. */
3477
3478 bool
3479 decl_address_invariant_p (const_tree op)
3480 {
3481 /* The conditions below are slightly less strict than the one in
3482 staticp. */
3483
3484 switch (TREE_CODE (op))
3485 {
3486 case PARM_DECL:
3487 case RESULT_DECL:
3488 case LABEL_DECL:
3489 case FUNCTION_DECL:
3490 return true;
3491
3492 case VAR_DECL:
3493 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3494 || DECL_THREAD_LOCAL_P (op)
3495 || DECL_CONTEXT (op) == current_function_decl
3496 || decl_function_context (op) == current_function_decl)
3497 return true;
3498 break;
3499
3500 case CONST_DECL:
3501 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3502 || decl_function_context (op) == current_function_decl)
3503 return true;
3504 break;
3505
3506 default:
3507 break;
3508 }
3509
3510 return false;
3511 }
3512
3513 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3514
3515 bool
3516 decl_address_ip_invariant_p (const_tree op)
3517 {
3518 /* The conditions below are slightly less strict than the one in
3519 staticp. */
3520
3521 switch (TREE_CODE (op))
3522 {
3523 case LABEL_DECL:
3524 case FUNCTION_DECL:
3525 case STRING_CST:
3526 return true;
3527
3528 case VAR_DECL:
3529 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3530 && !DECL_DLLIMPORT_P (op))
3531 || DECL_THREAD_LOCAL_P (op))
3532 return true;
3533 break;
3534
3535 case CONST_DECL:
3536 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3537 return true;
3538 break;
3539
3540 default:
3541 break;
3542 }
3543
3544 return false;
3545 }
3546
3547
3548 /* Return true if T is function-invariant (internal function, does
3549 not handle arithmetic; that's handled in skip_simple_arithmetic and
3550 tree_invariant_p). */
3551
3552 static bool
3553 tree_invariant_p_1 (tree t)
3554 {
3555 tree op;
3556
3557 if (TREE_CONSTANT (t)
3558 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3559 return true;
3560
3561 switch (TREE_CODE (t))
3562 {
3563 case SAVE_EXPR:
3564 return true;
3565
3566 case ADDR_EXPR:
3567 op = TREE_OPERAND (t, 0);
3568 while (handled_component_p (op))
3569 {
3570 switch (TREE_CODE (op))
3571 {
3572 case ARRAY_REF:
3573 case ARRAY_RANGE_REF:
3574 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3575 || TREE_OPERAND (op, 2) != NULL_TREE
3576 || TREE_OPERAND (op, 3) != NULL_TREE)
3577 return false;
3578 break;
3579
3580 case COMPONENT_REF:
3581 if (TREE_OPERAND (op, 2) != NULL_TREE)
3582 return false;
3583 break;
3584
3585 default:;
3586 }
3587 op = TREE_OPERAND (op, 0);
3588 }
3589
3590 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3591
3592 default:
3593 break;
3594 }
3595
3596 return false;
3597 }
3598
3599 /* Return true if T is function-invariant. */
3600
3601 bool
3602 tree_invariant_p (tree t)
3603 {
3604 tree inner = skip_simple_arithmetic (t);
3605 return tree_invariant_p_1 (inner);
3606 }
3607
3608 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3609 Do this to any expression which may be used in more than one place,
3610 but must be evaluated only once.
3611
3612 Normally, expand_expr would reevaluate the expression each time.
3613 Calling save_expr produces something that is evaluated and recorded
3614 the first time expand_expr is called on it. Subsequent calls to
3615 expand_expr just reuse the recorded value.
3616
3617 The call to expand_expr that generates code that actually computes
3618 the value is the first call *at compile time*. Subsequent calls
3619 *at compile time* generate code to use the saved value.
3620 This produces correct result provided that *at run time* control
3621 always flows through the insns made by the first expand_expr
3622 before reaching the other places where the save_expr was evaluated.
3623 You, the caller of save_expr, must make sure this is so.
3624
3625 Constants, and certain read-only nodes, are returned with no
3626 SAVE_EXPR because that is safe. Expressions containing placeholders
3627 are not touched; see tree.def for an explanation of what these
3628 are used for. */
3629
3630 tree
3631 save_expr (tree expr)
3632 {
3633 tree inner;
3634
3635 /* If the tree evaluates to a constant, then we don't want to hide that
3636 fact (i.e. this allows further folding, and direct checks for constants).
3637 However, a read-only object that has side effects cannot be bypassed.
3638 Since it is no problem to reevaluate literals, we just return the
3639 literal node. */
3640 inner = skip_simple_arithmetic (expr);
3641 if (TREE_CODE (inner) == ERROR_MARK)
3642 return inner;
3643
3644 if (tree_invariant_p_1 (inner))
3645 return expr;
3646
3647 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3648 it means that the size or offset of some field of an object depends on
3649 the value within another field.
3650
3651 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3652 and some variable since it would then need to be both evaluated once and
3653 evaluated more than once. Front-ends must assure this case cannot
3654 happen by surrounding any such subexpressions in their own SAVE_EXPR
3655 and forcing evaluation at the proper time. */
3656 if (contains_placeholder_p (inner))
3657 return expr;
3658
3659 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3660
3661 /* This expression might be placed ahead of a jump to ensure that the
3662 value was computed on both sides of the jump. So make sure it isn't
3663 eliminated as dead. */
3664 TREE_SIDE_EFFECTS (expr) = 1;
3665 return expr;
3666 }
3667
3668 /* Look inside EXPR into any simple arithmetic operations. Return the
3669 outermost non-arithmetic or non-invariant node. */
3670
3671 tree
3672 skip_simple_arithmetic (tree expr)
3673 {
3674 /* We don't care about whether this can be used as an lvalue in this
3675 context. */
3676 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3677 expr = TREE_OPERAND (expr, 0);
3678
3679 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3680 a constant, it will be more efficient to not make another SAVE_EXPR since
3681 it will allow better simplification and GCSE will be able to merge the
3682 computations if they actually occur. */
3683 while (true)
3684 {
3685 if (UNARY_CLASS_P (expr))
3686 expr = TREE_OPERAND (expr, 0);
3687 else if (BINARY_CLASS_P (expr))
3688 {
3689 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3690 expr = TREE_OPERAND (expr, 0);
3691 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3692 expr = TREE_OPERAND (expr, 1);
3693 else
3694 break;
3695 }
3696 else
3697 break;
3698 }
3699
3700 return expr;
3701 }
3702
3703 /* Look inside EXPR into simple arithmetic operations involving constants.
3704 Return the outermost non-arithmetic or non-constant node. */
3705
3706 tree
3707 skip_simple_constant_arithmetic (tree expr)
3708 {
3709 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3710 expr = TREE_OPERAND (expr, 0);
3711
3712 while (true)
3713 {
3714 if (UNARY_CLASS_P (expr))
3715 expr = TREE_OPERAND (expr, 0);
3716 else if (BINARY_CLASS_P (expr))
3717 {
3718 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3719 expr = TREE_OPERAND (expr, 0);
3720 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3721 expr = TREE_OPERAND (expr, 1);
3722 else
3723 break;
3724 }
3725 else
3726 break;
3727 }
3728
3729 return expr;
3730 }
3731
3732 /* Return which tree structure is used by T. */
3733
3734 enum tree_node_structure_enum
3735 tree_node_structure (const_tree t)
3736 {
3737 const enum tree_code code = TREE_CODE (t);
3738 return tree_node_structure_for_code (code);
3739 }
3740
3741 /* Set various status flags when building a CALL_EXPR object T. */
3742
3743 static void
3744 process_call_operands (tree t)
3745 {
3746 bool side_effects = TREE_SIDE_EFFECTS (t);
3747 bool read_only = false;
3748 int i = call_expr_flags (t);
3749
3750 /* Calls have side-effects, except those to const or pure functions. */
3751 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3752 side_effects = true;
3753 /* Propagate TREE_READONLY of arguments for const functions. */
3754 if (i & ECF_CONST)
3755 read_only = true;
3756
3757 if (!side_effects || read_only)
3758 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3759 {
3760 tree op = TREE_OPERAND (t, i);
3761 if (op && TREE_SIDE_EFFECTS (op))
3762 side_effects = true;
3763 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3764 read_only = false;
3765 }
3766
3767 TREE_SIDE_EFFECTS (t) = side_effects;
3768 TREE_READONLY (t) = read_only;
3769 }
3770 \f
3771 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3772 size or offset that depends on a field within a record. */
3773
3774 bool
3775 contains_placeholder_p (const_tree exp)
3776 {
3777 enum tree_code code;
3778
3779 if (!exp)
3780 return 0;
3781
3782 code = TREE_CODE (exp);
3783 if (code == PLACEHOLDER_EXPR)
3784 return 1;
3785
3786 switch (TREE_CODE_CLASS (code))
3787 {
3788 case tcc_reference:
3789 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3790 position computations since they will be converted into a
3791 WITH_RECORD_EXPR involving the reference, which will assume
3792 here will be valid. */
3793 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3794
3795 case tcc_exceptional:
3796 if (code == TREE_LIST)
3797 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3798 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3799 break;
3800
3801 case tcc_unary:
3802 case tcc_binary:
3803 case tcc_comparison:
3804 case tcc_expression:
3805 switch (code)
3806 {
3807 case COMPOUND_EXPR:
3808 /* Ignoring the first operand isn't quite right, but works best. */
3809 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3810
3811 case COND_EXPR:
3812 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3813 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3814 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3815
3816 case SAVE_EXPR:
3817 /* The save_expr function never wraps anything containing
3818 a PLACEHOLDER_EXPR. */
3819 return 0;
3820
3821 default:
3822 break;
3823 }
3824
3825 switch (TREE_CODE_LENGTH (code))
3826 {
3827 case 1:
3828 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3829 case 2:
3830 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3831 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3832 default:
3833 return 0;
3834 }
3835
3836 case tcc_vl_exp:
3837 switch (code)
3838 {
3839 case CALL_EXPR:
3840 {
3841 const_tree arg;
3842 const_call_expr_arg_iterator iter;
3843 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3844 if (CONTAINS_PLACEHOLDER_P (arg))
3845 return 1;
3846 return 0;
3847 }
3848 default:
3849 return 0;
3850 }
3851
3852 default:
3853 return 0;
3854 }
3855 return 0;
3856 }
3857
3858 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3859 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3860 field positions. */
3861
3862 static bool
3863 type_contains_placeholder_1 (const_tree type)
3864 {
3865 /* If the size contains a placeholder or the parent type (component type in
3866 the case of arrays) type involves a placeholder, this type does. */
3867 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3868 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3869 || (!POINTER_TYPE_P (type)
3870 && TREE_TYPE (type)
3871 && type_contains_placeholder_p (TREE_TYPE (type))))
3872 return true;
3873
3874 /* Now do type-specific checks. Note that the last part of the check above
3875 greatly limits what we have to do below. */
3876 switch (TREE_CODE (type))
3877 {
3878 case VOID_TYPE:
3879 case COMPLEX_TYPE:
3880 case ENUMERAL_TYPE:
3881 case BOOLEAN_TYPE:
3882 case POINTER_TYPE:
3883 case OFFSET_TYPE:
3884 case REFERENCE_TYPE:
3885 case METHOD_TYPE:
3886 case FUNCTION_TYPE:
3887 case VECTOR_TYPE:
3888 case NULLPTR_TYPE:
3889 return false;
3890
3891 case INTEGER_TYPE:
3892 case REAL_TYPE:
3893 case FIXED_POINT_TYPE:
3894 /* Here we just check the bounds. */
3895 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3896 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3897
3898 case ARRAY_TYPE:
3899 /* We have already checked the component type above, so just check
3900 the domain type. Flexible array members have a null domain. */
3901 return TYPE_DOMAIN (type) ?
3902 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3903
3904 case RECORD_TYPE:
3905 case UNION_TYPE:
3906 case QUAL_UNION_TYPE:
3907 {
3908 tree field;
3909
3910 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3911 if (TREE_CODE (field) == FIELD_DECL
3912 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3913 || (TREE_CODE (type) == QUAL_UNION_TYPE
3914 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3915 || type_contains_placeholder_p (TREE_TYPE (field))))
3916 return true;
3917
3918 return false;
3919 }
3920
3921 default:
3922 gcc_unreachable ();
3923 }
3924 }
3925
3926 /* Wrapper around above function used to cache its result. */
3927
3928 bool
3929 type_contains_placeholder_p (tree type)
3930 {
3931 bool result;
3932
3933 /* If the contains_placeholder_bits field has been initialized,
3934 then we know the answer. */
3935 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3936 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3937
3938 /* Indicate that we've seen this type node, and the answer is false.
3939 This is what we want to return if we run into recursion via fields. */
3940 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3941
3942 /* Compute the real value. */
3943 result = type_contains_placeholder_1 (type);
3944
3945 /* Store the real value. */
3946 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3947
3948 return result;
3949 }
3950 \f
3951 /* Push tree EXP onto vector QUEUE if it is not already present. */
3952
3953 static void
3954 push_without_duplicates (tree exp, vec<tree> *queue)
3955 {
3956 unsigned int i;
3957 tree iter;
3958
3959 FOR_EACH_VEC_ELT (*queue, i, iter)
3960 if (simple_cst_equal (iter, exp) == 1)
3961 break;
3962
3963 if (!iter)
3964 queue->safe_push (exp);
3965 }
3966
3967 /* Given a tree EXP, find all occurrences of references to fields
3968 in a PLACEHOLDER_EXPR and place them in vector REFS without
3969 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3970 we assume here that EXP contains only arithmetic expressions
3971 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3972 argument list. */
3973
3974 void
3975 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3976 {
3977 enum tree_code code = TREE_CODE (exp);
3978 tree inner;
3979 int i;
3980
3981 /* We handle TREE_LIST and COMPONENT_REF separately. */
3982 if (code == TREE_LIST)
3983 {
3984 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3985 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3986 }
3987 else if (code == COMPONENT_REF)
3988 {
3989 for (inner = TREE_OPERAND (exp, 0);
3990 REFERENCE_CLASS_P (inner);
3991 inner = TREE_OPERAND (inner, 0))
3992 ;
3993
3994 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3995 push_without_duplicates (exp, refs);
3996 else
3997 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3998 }
3999 else
4000 switch (TREE_CODE_CLASS (code))
4001 {
4002 case tcc_constant:
4003 break;
4004
4005 case tcc_declaration:
4006 /* Variables allocated to static storage can stay. */
4007 if (!TREE_STATIC (exp))
4008 push_without_duplicates (exp, refs);
4009 break;
4010
4011 case tcc_expression:
4012 /* This is the pattern built in ada/make_aligning_type. */
4013 if (code == ADDR_EXPR
4014 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4015 {
4016 push_without_duplicates (exp, refs);
4017 break;
4018 }
4019
4020 /* Fall through. */
4021
4022 case tcc_exceptional:
4023 case tcc_unary:
4024 case tcc_binary:
4025 case tcc_comparison:
4026 case tcc_reference:
4027 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4028 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4029 break;
4030
4031 case tcc_vl_exp:
4032 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4033 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4034 break;
4035
4036 default:
4037 gcc_unreachable ();
4038 }
4039 }
4040
4041 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4042 return a tree with all occurrences of references to F in a
4043 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4044 CONST_DECLs. Note that we assume here that EXP contains only
4045 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4046 occurring only in their argument list. */
4047
4048 tree
4049 substitute_in_expr (tree exp, tree f, tree r)
4050 {
4051 enum tree_code code = TREE_CODE (exp);
4052 tree op0, op1, op2, op3;
4053 tree new_tree;
4054
4055 /* We handle TREE_LIST and COMPONENT_REF separately. */
4056 if (code == TREE_LIST)
4057 {
4058 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4059 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4060 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4061 return exp;
4062
4063 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4064 }
4065 else if (code == COMPONENT_REF)
4066 {
4067 tree inner;
4068
4069 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4070 and it is the right field, replace it with R. */
4071 for (inner = TREE_OPERAND (exp, 0);
4072 REFERENCE_CLASS_P (inner);
4073 inner = TREE_OPERAND (inner, 0))
4074 ;
4075
4076 /* The field. */
4077 op1 = TREE_OPERAND (exp, 1);
4078
4079 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4080 return r;
4081
4082 /* If this expression hasn't been completed let, leave it alone. */
4083 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4084 return exp;
4085
4086 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4087 if (op0 == TREE_OPERAND (exp, 0))
4088 return exp;
4089
4090 new_tree
4091 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4092 }
4093 else
4094 switch (TREE_CODE_CLASS (code))
4095 {
4096 case tcc_constant:
4097 return exp;
4098
4099 case tcc_declaration:
4100 if (exp == f)
4101 return r;
4102 else
4103 return exp;
4104
4105 case tcc_expression:
4106 if (exp == f)
4107 return r;
4108
4109 /* Fall through. */
4110
4111 case tcc_exceptional:
4112 case tcc_unary:
4113 case tcc_binary:
4114 case tcc_comparison:
4115 case tcc_reference:
4116 switch (TREE_CODE_LENGTH (code))
4117 {
4118 case 0:
4119 return exp;
4120
4121 case 1:
4122 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4123 if (op0 == TREE_OPERAND (exp, 0))
4124 return exp;
4125
4126 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4127 break;
4128
4129 case 2:
4130 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4131 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4132
4133 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4134 return exp;
4135
4136 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4137 break;
4138
4139 case 3:
4140 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4141 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4142 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4143
4144 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4145 && op2 == TREE_OPERAND (exp, 2))
4146 return exp;
4147
4148 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4149 break;
4150
4151 case 4:
4152 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4153 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4154 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4155 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4156
4157 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4158 && op2 == TREE_OPERAND (exp, 2)
4159 && op3 == TREE_OPERAND (exp, 3))
4160 return exp;
4161
4162 new_tree
4163 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4164 break;
4165
4166 default:
4167 gcc_unreachable ();
4168 }
4169 break;
4170
4171 case tcc_vl_exp:
4172 {
4173 int i;
4174
4175 new_tree = NULL_TREE;
4176
4177 /* If we are trying to replace F with a constant or with another
4178 instance of one of the arguments of the call, inline back
4179 functions which do nothing else than computing a value from
4180 the arguments they are passed. This makes it possible to
4181 fold partially or entirely the replacement expression. */
4182 if (code == CALL_EXPR)
4183 {
4184 bool maybe_inline = false;
4185 if (CONSTANT_CLASS_P (r))
4186 maybe_inline = true;
4187 else
4188 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4189 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4190 {
4191 maybe_inline = true;
4192 break;
4193 }
4194 if (maybe_inline)
4195 {
4196 tree t = maybe_inline_call_in_expr (exp);
4197 if (t)
4198 return SUBSTITUTE_IN_EXPR (t, f, r);
4199 }
4200 }
4201
4202 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4203 {
4204 tree op = TREE_OPERAND (exp, i);
4205 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4206 if (new_op != op)
4207 {
4208 if (!new_tree)
4209 new_tree = copy_node (exp);
4210 TREE_OPERAND (new_tree, i) = new_op;
4211 }
4212 }
4213
4214 if (new_tree)
4215 {
4216 new_tree = fold (new_tree);
4217 if (TREE_CODE (new_tree) == CALL_EXPR)
4218 process_call_operands (new_tree);
4219 }
4220 else
4221 return exp;
4222 }
4223 break;
4224
4225 default:
4226 gcc_unreachable ();
4227 }
4228
4229 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4230
4231 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4232 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4233
4234 return new_tree;
4235 }
4236
4237 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4238 for it within OBJ, a tree that is an object or a chain of references. */
4239
4240 tree
4241 substitute_placeholder_in_expr (tree exp, tree obj)
4242 {
4243 enum tree_code code = TREE_CODE (exp);
4244 tree op0, op1, op2, op3;
4245 tree new_tree;
4246
4247 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4248 in the chain of OBJ. */
4249 if (code == PLACEHOLDER_EXPR)
4250 {
4251 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4252 tree elt;
4253
4254 for (elt = obj; elt != 0;
4255 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4256 || TREE_CODE (elt) == COND_EXPR)
4257 ? TREE_OPERAND (elt, 1)
4258 : (REFERENCE_CLASS_P (elt)
4259 || UNARY_CLASS_P (elt)
4260 || BINARY_CLASS_P (elt)
4261 || VL_EXP_CLASS_P (elt)
4262 || EXPRESSION_CLASS_P (elt))
4263 ? TREE_OPERAND (elt, 0) : 0))
4264 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4265 return elt;
4266
4267 for (elt = obj; elt != 0;
4268 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4269 || TREE_CODE (elt) == COND_EXPR)
4270 ? TREE_OPERAND (elt, 1)
4271 : (REFERENCE_CLASS_P (elt)
4272 || UNARY_CLASS_P (elt)
4273 || BINARY_CLASS_P (elt)
4274 || VL_EXP_CLASS_P (elt)
4275 || EXPRESSION_CLASS_P (elt))
4276 ? TREE_OPERAND (elt, 0) : 0))
4277 if (POINTER_TYPE_P (TREE_TYPE (elt))
4278 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4279 == need_type))
4280 return fold_build1 (INDIRECT_REF, need_type, elt);
4281
4282 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4283 survives until RTL generation, there will be an error. */
4284 return exp;
4285 }
4286
4287 /* TREE_LIST is special because we need to look at TREE_VALUE
4288 and TREE_CHAIN, not TREE_OPERANDS. */
4289 else if (code == TREE_LIST)
4290 {
4291 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4292 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4293 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4294 return exp;
4295
4296 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4297 }
4298 else
4299 switch (TREE_CODE_CLASS (code))
4300 {
4301 case tcc_constant:
4302 case tcc_declaration:
4303 return exp;
4304
4305 case tcc_exceptional:
4306 case tcc_unary:
4307 case tcc_binary:
4308 case tcc_comparison:
4309 case tcc_expression:
4310 case tcc_reference:
4311 case tcc_statement:
4312 switch (TREE_CODE_LENGTH (code))
4313 {
4314 case 0:
4315 return exp;
4316
4317 case 1:
4318 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4319 if (op0 == TREE_OPERAND (exp, 0))
4320 return exp;
4321
4322 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4323 break;
4324
4325 case 2:
4326 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4327 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4328
4329 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4330 return exp;
4331
4332 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4333 break;
4334
4335 case 3:
4336 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4337 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4338 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4339
4340 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4341 && op2 == TREE_OPERAND (exp, 2))
4342 return exp;
4343
4344 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4345 break;
4346
4347 case 4:
4348 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4349 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4350 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4351 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4352
4353 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4354 && op2 == TREE_OPERAND (exp, 2)
4355 && op3 == TREE_OPERAND (exp, 3))
4356 return exp;
4357
4358 new_tree
4359 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4360 break;
4361
4362 default:
4363 gcc_unreachable ();
4364 }
4365 break;
4366
4367 case tcc_vl_exp:
4368 {
4369 int i;
4370
4371 new_tree = NULL_TREE;
4372
4373 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4374 {
4375 tree op = TREE_OPERAND (exp, i);
4376 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4377 if (new_op != op)
4378 {
4379 if (!new_tree)
4380 new_tree = copy_node (exp);
4381 TREE_OPERAND (new_tree, i) = new_op;
4382 }
4383 }
4384
4385 if (new_tree)
4386 {
4387 new_tree = fold (new_tree);
4388 if (TREE_CODE (new_tree) == CALL_EXPR)
4389 process_call_operands (new_tree);
4390 }
4391 else
4392 return exp;
4393 }
4394 break;
4395
4396 default:
4397 gcc_unreachable ();
4398 }
4399
4400 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4401
4402 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4403 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4404
4405 return new_tree;
4406 }
4407 \f
4408
4409 /* Subroutine of stabilize_reference; this is called for subtrees of
4410 references. Any expression with side-effects must be put in a SAVE_EXPR
4411 to ensure that it is only evaluated once.
4412
4413 We don't put SAVE_EXPR nodes around everything, because assigning very
4414 simple expressions to temporaries causes us to miss good opportunities
4415 for optimizations. Among other things, the opportunity to fold in the
4416 addition of a constant into an addressing mode often gets lost, e.g.
4417 "y[i+1] += x;". In general, we take the approach that we should not make
4418 an assignment unless we are forced into it - i.e., that any non-side effect
4419 operator should be allowed, and that cse should take care of coalescing
4420 multiple utterances of the same expression should that prove fruitful. */
4421
4422 static tree
4423 stabilize_reference_1 (tree e)
4424 {
4425 tree result;
4426 enum tree_code code = TREE_CODE (e);
4427
4428 /* We cannot ignore const expressions because it might be a reference
4429 to a const array but whose index contains side-effects. But we can
4430 ignore things that are actual constant or that already have been
4431 handled by this function. */
4432
4433 if (tree_invariant_p (e))
4434 return e;
4435
4436 switch (TREE_CODE_CLASS (code))
4437 {
4438 case tcc_exceptional:
4439 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4440 have side-effects. */
4441 if (code == STATEMENT_LIST)
4442 return save_expr (e);
4443 /* FALLTHRU */
4444 case tcc_type:
4445 case tcc_declaration:
4446 case tcc_comparison:
4447 case tcc_statement:
4448 case tcc_expression:
4449 case tcc_reference:
4450 case tcc_vl_exp:
4451 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4452 so that it will only be evaluated once. */
4453 /* The reference (r) and comparison (<) classes could be handled as
4454 below, but it is generally faster to only evaluate them once. */
4455 if (TREE_SIDE_EFFECTS (e))
4456 return save_expr (e);
4457 return e;
4458
4459 case tcc_constant:
4460 /* Constants need no processing. In fact, we should never reach
4461 here. */
4462 return e;
4463
4464 case tcc_binary:
4465 /* Division is slow and tends to be compiled with jumps,
4466 especially the division by powers of 2 that is often
4467 found inside of an array reference. So do it just once. */
4468 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4469 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4470 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4471 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4472 return save_expr (e);
4473 /* Recursively stabilize each operand. */
4474 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4475 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4476 break;
4477
4478 case tcc_unary:
4479 /* Recursively stabilize each operand. */
4480 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4481 break;
4482
4483 default:
4484 gcc_unreachable ();
4485 }
4486
4487 TREE_TYPE (result) = TREE_TYPE (e);
4488 TREE_READONLY (result) = TREE_READONLY (e);
4489 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4490 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4491
4492 return result;
4493 }
4494
4495 /* Stabilize a reference so that we can use it any number of times
4496 without causing its operands to be evaluated more than once.
4497 Returns the stabilized reference. This works by means of save_expr,
4498 so see the caveats in the comments about save_expr.
4499
4500 Also allows conversion expressions whose operands are references.
4501 Any other kind of expression is returned unchanged. */
4502
4503 tree
4504 stabilize_reference (tree ref)
4505 {
4506 tree result;
4507 enum tree_code code = TREE_CODE (ref);
4508
4509 switch (code)
4510 {
4511 case VAR_DECL:
4512 case PARM_DECL:
4513 case RESULT_DECL:
4514 /* No action is needed in this case. */
4515 return ref;
4516
4517 CASE_CONVERT:
4518 case FLOAT_EXPR:
4519 case FIX_TRUNC_EXPR:
4520 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4521 break;
4522
4523 case INDIRECT_REF:
4524 result = build_nt (INDIRECT_REF,
4525 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4526 break;
4527
4528 case COMPONENT_REF:
4529 result = build_nt (COMPONENT_REF,
4530 stabilize_reference (TREE_OPERAND (ref, 0)),
4531 TREE_OPERAND (ref, 1), NULL_TREE);
4532 break;
4533
4534 case BIT_FIELD_REF:
4535 result = build_nt (BIT_FIELD_REF,
4536 stabilize_reference (TREE_OPERAND (ref, 0)),
4537 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4538 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4539 break;
4540
4541 case ARRAY_REF:
4542 result = build_nt (ARRAY_REF,
4543 stabilize_reference (TREE_OPERAND (ref, 0)),
4544 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4545 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4546 break;
4547
4548 case ARRAY_RANGE_REF:
4549 result = build_nt (ARRAY_RANGE_REF,
4550 stabilize_reference (TREE_OPERAND (ref, 0)),
4551 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4552 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4553 break;
4554
4555 case COMPOUND_EXPR:
4556 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4557 it wouldn't be ignored. This matters when dealing with
4558 volatiles. */
4559 return stabilize_reference_1 (ref);
4560
4561 /* If arg isn't a kind of lvalue we recognize, make no change.
4562 Caller should recognize the error for an invalid lvalue. */
4563 default:
4564 return ref;
4565
4566 case ERROR_MARK:
4567 return error_mark_node;
4568 }
4569
4570 TREE_TYPE (result) = TREE_TYPE (ref);
4571 TREE_READONLY (result) = TREE_READONLY (ref);
4572 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4573 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4574
4575 return result;
4576 }
4577 \f
4578 /* Low-level constructors for expressions. */
4579
4580 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4581 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4582
4583 void
4584 recompute_tree_invariant_for_addr_expr (tree t)
4585 {
4586 tree node;
4587 bool tc = true, se = false;
4588
4589 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4590
4591 /* We started out assuming this address is both invariant and constant, but
4592 does not have side effects. Now go down any handled components and see if
4593 any of them involve offsets that are either non-constant or non-invariant.
4594 Also check for side-effects.
4595
4596 ??? Note that this code makes no attempt to deal with the case where
4597 taking the address of something causes a copy due to misalignment. */
4598
4599 #define UPDATE_FLAGS(NODE) \
4600 do { tree _node = (NODE); \
4601 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4602 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4603
4604 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4605 node = TREE_OPERAND (node, 0))
4606 {
4607 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4608 array reference (probably made temporarily by the G++ front end),
4609 so ignore all the operands. */
4610 if ((TREE_CODE (node) == ARRAY_REF
4611 || TREE_CODE (node) == ARRAY_RANGE_REF)
4612 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4613 {
4614 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4615 if (TREE_OPERAND (node, 2))
4616 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4617 if (TREE_OPERAND (node, 3))
4618 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4619 }
4620 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4621 FIELD_DECL, apparently. The G++ front end can put something else
4622 there, at least temporarily. */
4623 else if (TREE_CODE (node) == COMPONENT_REF
4624 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4625 {
4626 if (TREE_OPERAND (node, 2))
4627 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4628 }
4629 }
4630
4631 node = lang_hooks.expr_to_decl (node, &tc, &se);
4632
4633 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4634 the address, since &(*a)->b is a form of addition. If it's a constant, the
4635 address is constant too. If it's a decl, its address is constant if the
4636 decl is static. Everything else is not constant and, furthermore,
4637 taking the address of a volatile variable is not volatile. */
4638 if (TREE_CODE (node) == INDIRECT_REF
4639 || TREE_CODE (node) == MEM_REF)
4640 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4641 else if (CONSTANT_CLASS_P (node))
4642 ;
4643 else if (DECL_P (node))
4644 tc &= (staticp (node) != NULL_TREE);
4645 else
4646 {
4647 tc = false;
4648 se |= TREE_SIDE_EFFECTS (node);
4649 }
4650
4651
4652 TREE_CONSTANT (t) = tc;
4653 TREE_SIDE_EFFECTS (t) = se;
4654 #undef UPDATE_FLAGS
4655 }
4656
4657 /* Build an expression of code CODE, data type TYPE, and operands as
4658 specified. Expressions and reference nodes can be created this way.
4659 Constants, decls, types and misc nodes cannot be.
4660
4661 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4662 enough for all extant tree codes. */
4663
4664 tree
4665 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4666 {
4667 tree t;
4668
4669 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4670
4671 t = make_node (code PASS_MEM_STAT);
4672 TREE_TYPE (t) = tt;
4673
4674 return t;
4675 }
4676
4677 tree
4678 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4679 {
4680 int length = sizeof (struct tree_exp);
4681 tree t;
4682
4683 record_node_allocation_statistics (code, length);
4684
4685 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4686
4687 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4688
4689 memset (t, 0, sizeof (struct tree_common));
4690
4691 TREE_SET_CODE (t, code);
4692
4693 TREE_TYPE (t) = type;
4694 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4695 TREE_OPERAND (t, 0) = node;
4696 if (node && !TYPE_P (node))
4697 {
4698 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4699 TREE_READONLY (t) = TREE_READONLY (node);
4700 }
4701
4702 if (TREE_CODE_CLASS (code) == tcc_statement)
4703 {
4704 if (code != DEBUG_BEGIN_STMT)
4705 TREE_SIDE_EFFECTS (t) = 1;
4706 }
4707 else switch (code)
4708 {
4709 case VA_ARG_EXPR:
4710 /* All of these have side-effects, no matter what their
4711 operands are. */
4712 TREE_SIDE_EFFECTS (t) = 1;
4713 TREE_READONLY (t) = 0;
4714 break;
4715
4716 case INDIRECT_REF:
4717 /* Whether a dereference is readonly has nothing to do with whether
4718 its operand is readonly. */
4719 TREE_READONLY (t) = 0;
4720 break;
4721
4722 case ADDR_EXPR:
4723 if (node)
4724 recompute_tree_invariant_for_addr_expr (t);
4725 break;
4726
4727 default:
4728 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4729 && node && !TYPE_P (node)
4730 && TREE_CONSTANT (node))
4731 TREE_CONSTANT (t) = 1;
4732 if (TREE_CODE_CLASS (code) == tcc_reference
4733 && node && TREE_THIS_VOLATILE (node))
4734 TREE_THIS_VOLATILE (t) = 1;
4735 break;
4736 }
4737
4738 return t;
4739 }
4740
4741 #define PROCESS_ARG(N) \
4742 do { \
4743 TREE_OPERAND (t, N) = arg##N; \
4744 if (arg##N &&!TYPE_P (arg##N)) \
4745 { \
4746 if (TREE_SIDE_EFFECTS (arg##N)) \
4747 side_effects = 1; \
4748 if (!TREE_READONLY (arg##N) \
4749 && !CONSTANT_CLASS_P (arg##N)) \
4750 (void) (read_only = 0); \
4751 if (!TREE_CONSTANT (arg##N)) \
4752 (void) (constant = 0); \
4753 } \
4754 } while (0)
4755
4756 tree
4757 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4758 {
4759 bool constant, read_only, side_effects, div_by_zero;
4760 tree t;
4761
4762 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4763
4764 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4765 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4766 /* When sizetype precision doesn't match that of pointers
4767 we need to be able to build explicit extensions or truncations
4768 of the offset argument. */
4769 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4770 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4771 && TREE_CODE (arg1) == INTEGER_CST);
4772
4773 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4774 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4775 && ptrofftype_p (TREE_TYPE (arg1)));
4776
4777 t = make_node (code PASS_MEM_STAT);
4778 TREE_TYPE (t) = tt;
4779
4780 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4781 result based on those same flags for the arguments. But if the
4782 arguments aren't really even `tree' expressions, we shouldn't be trying
4783 to do this. */
4784
4785 /* Expressions without side effects may be constant if their
4786 arguments are as well. */
4787 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4788 || TREE_CODE_CLASS (code) == tcc_binary);
4789 read_only = 1;
4790 side_effects = TREE_SIDE_EFFECTS (t);
4791
4792 switch (code)
4793 {
4794 case TRUNC_DIV_EXPR:
4795 case CEIL_DIV_EXPR:
4796 case FLOOR_DIV_EXPR:
4797 case ROUND_DIV_EXPR:
4798 case EXACT_DIV_EXPR:
4799 case CEIL_MOD_EXPR:
4800 case FLOOR_MOD_EXPR:
4801 case ROUND_MOD_EXPR:
4802 case TRUNC_MOD_EXPR:
4803 div_by_zero = integer_zerop (arg1);
4804 break;
4805 default:
4806 div_by_zero = false;
4807 }
4808
4809 PROCESS_ARG (0);
4810 PROCESS_ARG (1);
4811
4812 TREE_SIDE_EFFECTS (t) = side_effects;
4813 if (code == MEM_REF)
4814 {
4815 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4816 {
4817 tree o = TREE_OPERAND (arg0, 0);
4818 TREE_READONLY (t) = TREE_READONLY (o);
4819 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4820 }
4821 }
4822 else
4823 {
4824 TREE_READONLY (t) = read_only;
4825 /* Don't mark X / 0 as constant. */
4826 TREE_CONSTANT (t) = constant && !div_by_zero;
4827 TREE_THIS_VOLATILE (t)
4828 = (TREE_CODE_CLASS (code) == tcc_reference
4829 && arg0 && TREE_THIS_VOLATILE (arg0));
4830 }
4831
4832 return t;
4833 }
4834
4835
4836 tree
4837 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4838 tree arg2 MEM_STAT_DECL)
4839 {
4840 bool constant, read_only, side_effects;
4841 tree t;
4842
4843 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4844 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4845
4846 t = make_node (code PASS_MEM_STAT);
4847 TREE_TYPE (t) = tt;
4848
4849 read_only = 1;
4850
4851 /* As a special exception, if COND_EXPR has NULL branches, we
4852 assume that it is a gimple statement and always consider
4853 it to have side effects. */
4854 if (code == COND_EXPR
4855 && tt == void_type_node
4856 && arg1 == NULL_TREE
4857 && arg2 == NULL_TREE)
4858 side_effects = true;
4859 else
4860 side_effects = TREE_SIDE_EFFECTS (t);
4861
4862 PROCESS_ARG (0);
4863 PROCESS_ARG (1);
4864 PROCESS_ARG (2);
4865
4866 if (code == COND_EXPR)
4867 TREE_READONLY (t) = read_only;
4868
4869 TREE_SIDE_EFFECTS (t) = side_effects;
4870 TREE_THIS_VOLATILE (t)
4871 = (TREE_CODE_CLASS (code) == tcc_reference
4872 && arg0 && TREE_THIS_VOLATILE (arg0));
4873
4874 return t;
4875 }
4876
4877 tree
4878 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4879 tree arg2, tree arg3 MEM_STAT_DECL)
4880 {
4881 bool constant, read_only, side_effects;
4882 tree t;
4883
4884 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4885
4886 t = make_node (code PASS_MEM_STAT);
4887 TREE_TYPE (t) = tt;
4888
4889 side_effects = TREE_SIDE_EFFECTS (t);
4890
4891 PROCESS_ARG (0);
4892 PROCESS_ARG (1);
4893 PROCESS_ARG (2);
4894 PROCESS_ARG (3);
4895
4896 TREE_SIDE_EFFECTS (t) = side_effects;
4897 TREE_THIS_VOLATILE (t)
4898 = (TREE_CODE_CLASS (code) == tcc_reference
4899 && arg0 && TREE_THIS_VOLATILE (arg0));
4900
4901 return t;
4902 }
4903
4904 tree
4905 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4906 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4907 {
4908 bool constant, read_only, side_effects;
4909 tree t;
4910
4911 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4912
4913 t = make_node (code PASS_MEM_STAT);
4914 TREE_TYPE (t) = tt;
4915
4916 side_effects = TREE_SIDE_EFFECTS (t);
4917
4918 PROCESS_ARG (0);
4919 PROCESS_ARG (1);
4920 PROCESS_ARG (2);
4921 PROCESS_ARG (3);
4922 PROCESS_ARG (4);
4923
4924 TREE_SIDE_EFFECTS (t) = side_effects;
4925 if (code == TARGET_MEM_REF)
4926 {
4927 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4928 {
4929 tree o = TREE_OPERAND (arg0, 0);
4930 TREE_READONLY (t) = TREE_READONLY (o);
4931 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4932 }
4933 }
4934 else
4935 TREE_THIS_VOLATILE (t)
4936 = (TREE_CODE_CLASS (code) == tcc_reference
4937 && arg0 && TREE_THIS_VOLATILE (arg0));
4938
4939 return t;
4940 }
4941
4942 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4943 on the pointer PTR. */
4944
4945 tree
4946 build_simple_mem_ref_loc (location_t loc, tree ptr)
4947 {
4948 poly_int64 offset = 0;
4949 tree ptype = TREE_TYPE (ptr);
4950 tree tem;
4951 /* For convenience allow addresses that collapse to a simple base
4952 and offset. */
4953 if (TREE_CODE (ptr) == ADDR_EXPR
4954 && (handled_component_p (TREE_OPERAND (ptr, 0))
4955 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4956 {
4957 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4958 gcc_assert (ptr);
4959 if (TREE_CODE (ptr) == MEM_REF)
4960 {
4961 offset += mem_ref_offset (ptr).force_shwi ();
4962 ptr = TREE_OPERAND (ptr, 0);
4963 }
4964 else
4965 ptr = build_fold_addr_expr (ptr);
4966 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4967 }
4968 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4969 ptr, build_int_cst (ptype, offset));
4970 SET_EXPR_LOCATION (tem, loc);
4971 return tem;
4972 }
4973
4974 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4975
4976 poly_offset_int
4977 mem_ref_offset (const_tree t)
4978 {
4979 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
4980 SIGNED);
4981 }
4982
4983 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4984 offsetted by OFFSET units. */
4985
4986 tree
4987 build_invariant_address (tree type, tree base, poly_int64 offset)
4988 {
4989 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4990 build_fold_addr_expr (base),
4991 build_int_cst (ptr_type_node, offset));
4992 tree addr = build1 (ADDR_EXPR, type, ref);
4993 recompute_tree_invariant_for_addr_expr (addr);
4994 return addr;
4995 }
4996
4997 /* Similar except don't specify the TREE_TYPE
4998 and leave the TREE_SIDE_EFFECTS as 0.
4999 It is permissible for arguments to be null,
5000 or even garbage if their values do not matter. */
5001
5002 tree
5003 build_nt (enum tree_code code, ...)
5004 {
5005 tree t;
5006 int length;
5007 int i;
5008 va_list p;
5009
5010 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5011
5012 va_start (p, code);
5013
5014 t = make_node (code);
5015 length = TREE_CODE_LENGTH (code);
5016
5017 for (i = 0; i < length; i++)
5018 TREE_OPERAND (t, i) = va_arg (p, tree);
5019
5020 va_end (p);
5021 return t;
5022 }
5023
5024 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5025 tree vec. */
5026
5027 tree
5028 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5029 {
5030 tree ret, t;
5031 unsigned int ix;
5032
5033 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5034 CALL_EXPR_FN (ret) = fn;
5035 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5036 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5037 CALL_EXPR_ARG (ret, ix) = t;
5038 return ret;
5039 }
5040 \f
5041 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5042 and data type TYPE.
5043 We do NOT enter this node in any sort of symbol table.
5044
5045 LOC is the location of the decl.
5046
5047 layout_decl is used to set up the decl's storage layout.
5048 Other slots are initialized to 0 or null pointers. */
5049
5050 tree
5051 build_decl (location_t loc, enum tree_code code, tree name,
5052 tree type MEM_STAT_DECL)
5053 {
5054 tree t;
5055
5056 t = make_node (code PASS_MEM_STAT);
5057 DECL_SOURCE_LOCATION (t) = loc;
5058
5059 /* if (type == error_mark_node)
5060 type = integer_type_node; */
5061 /* That is not done, deliberately, so that having error_mark_node
5062 as the type can suppress useless errors in the use of this variable. */
5063
5064 DECL_NAME (t) = name;
5065 TREE_TYPE (t) = type;
5066
5067 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5068 layout_decl (t, 0);
5069
5070 return t;
5071 }
5072
5073 /* Builds and returns function declaration with NAME and TYPE. */
5074
5075 tree
5076 build_fn_decl (const char *name, tree type)
5077 {
5078 tree id = get_identifier (name);
5079 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5080
5081 DECL_EXTERNAL (decl) = 1;
5082 TREE_PUBLIC (decl) = 1;
5083 DECL_ARTIFICIAL (decl) = 1;
5084 TREE_NOTHROW (decl) = 1;
5085
5086 return decl;
5087 }
5088
5089 vec<tree, va_gc> *all_translation_units;
5090
5091 /* Builds a new translation-unit decl with name NAME, queues it in the
5092 global list of translation-unit decls and returns it. */
5093
5094 tree
5095 build_translation_unit_decl (tree name)
5096 {
5097 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5098 name, NULL_TREE);
5099 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5100 vec_safe_push (all_translation_units, tu);
5101 return tu;
5102 }
5103
5104 \f
5105 /* BLOCK nodes are used to represent the structure of binding contours
5106 and declarations, once those contours have been exited and their contents
5107 compiled. This information is used for outputting debugging info. */
5108
5109 tree
5110 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5111 {
5112 tree block = make_node (BLOCK);
5113
5114 BLOCK_VARS (block) = vars;
5115 BLOCK_SUBBLOCKS (block) = subblocks;
5116 BLOCK_SUPERCONTEXT (block) = supercontext;
5117 BLOCK_CHAIN (block) = chain;
5118 return block;
5119 }
5120
5121 \f
5122 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5123
5124 LOC is the location to use in tree T. */
5125
5126 void
5127 protected_set_expr_location (tree t, location_t loc)
5128 {
5129 if (CAN_HAVE_LOCATION_P (t))
5130 SET_EXPR_LOCATION (t, loc);
5131 }
5132
5133 /* Data used when collecting DECLs and TYPEs for language data removal. */
5134
5135 class free_lang_data_d
5136 {
5137 public:
5138 free_lang_data_d () : decls (100), types (100) {}
5139
5140 /* Worklist to avoid excessive recursion. */
5141 auto_vec<tree> worklist;
5142
5143 /* Set of traversed objects. Used to avoid duplicate visits. */
5144 hash_set<tree> pset;
5145
5146 /* Array of symbols to process with free_lang_data_in_decl. */
5147 auto_vec<tree> decls;
5148
5149 /* Array of types to process with free_lang_data_in_type. */
5150 auto_vec<tree> types;
5151 };
5152
5153
5154 /* Add type or decl T to one of the list of tree nodes that need their
5155 language data removed. The lists are held inside FLD. */
5156
5157 static void
5158 add_tree_to_fld_list (tree t, class free_lang_data_d *fld)
5159 {
5160 if (DECL_P (t))
5161 fld->decls.safe_push (t);
5162 else if (TYPE_P (t))
5163 fld->types.safe_push (t);
5164 else
5165 gcc_unreachable ();
5166 }
5167
5168 /* Push tree node T into FLD->WORKLIST. */
5169
5170 static inline void
5171 fld_worklist_push (tree t, class free_lang_data_d *fld)
5172 {
5173 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5174 fld->worklist.safe_push ((t));
5175 }
5176
5177
5178 \f
5179 /* Return simplified TYPE_NAME of TYPE. */
5180
5181 static tree
5182 fld_simplified_type_name (tree type)
5183 {
5184 if (!TYPE_NAME (type) || TREE_CODE (TYPE_NAME (type)) != TYPE_DECL)
5185 return TYPE_NAME (type);
5186 /* Drop TYPE_DECLs in TYPE_NAME in favor of the identifier in the
5187 TYPE_DECL if the type doesn't have linkage.
5188 this must match fld_ */
5189 if (type != TYPE_MAIN_VARIANT (type)
5190 || (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type))
5191 && (TREE_CODE (type) != RECORD_TYPE
5192 || !TYPE_BINFO (type)
5193 || !BINFO_VTABLE (TYPE_BINFO (type)))))
5194 return DECL_NAME (TYPE_NAME (type));
5195 return TYPE_NAME (type);
5196 }
5197
5198 /* Do same comparsion as check_qualified_type skipping lang part of type
5199 and be more permissive about type names: we only care that names are
5200 same (for diagnostics) and that ODR names are the same.
5201 If INNER_TYPE is non-NULL, be sure that TREE_TYPE match it. */
5202
5203 static bool
5204 fld_type_variant_equal_p (tree t, tree v, tree inner_type)
5205 {
5206 if (TYPE_QUALS (t) != TYPE_QUALS (v)
5207 /* We want to match incomplete variants with complete types.
5208 In this case we need to ignore alignment. */
5209 || ((!RECORD_OR_UNION_TYPE_P (t) || COMPLETE_TYPE_P (v))
5210 && (TYPE_ALIGN (t) != TYPE_ALIGN (v)
5211 || TYPE_USER_ALIGN (t) != TYPE_USER_ALIGN (v)))
5212 || fld_simplified_type_name (t) != fld_simplified_type_name (v)
5213 || !attribute_list_equal (TYPE_ATTRIBUTES (t),
5214 TYPE_ATTRIBUTES (v))
5215 || (inner_type && TREE_TYPE (v) != inner_type))
5216 return false;
5217
5218 return true;
5219 }
5220
5221 /* Find variant of FIRST that match T and create new one if necessary.
5222 Set TREE_TYPE to INNER_TYPE if non-NULL. */
5223
5224 static tree
5225 fld_type_variant (tree first, tree t, class free_lang_data_d *fld,
5226 tree inner_type = NULL)
5227 {
5228 if (first == TYPE_MAIN_VARIANT (t))
5229 return t;
5230 for (tree v = first; v; v = TYPE_NEXT_VARIANT (v))
5231 if (fld_type_variant_equal_p (t, v, inner_type))
5232 return v;
5233 tree v = build_variant_type_copy (first);
5234 TYPE_READONLY (v) = TYPE_READONLY (t);
5235 TYPE_VOLATILE (v) = TYPE_VOLATILE (t);
5236 TYPE_ATOMIC (v) = TYPE_ATOMIC (t);
5237 TYPE_RESTRICT (v) = TYPE_RESTRICT (t);
5238 TYPE_ADDR_SPACE (v) = TYPE_ADDR_SPACE (t);
5239 TYPE_NAME (v) = TYPE_NAME (t);
5240 TYPE_ATTRIBUTES (v) = TYPE_ATTRIBUTES (t);
5241 TYPE_CANONICAL (v) = TYPE_CANONICAL (t);
5242 /* Variants of incomplete types should have alignment
5243 set to BITS_PER_UNIT. Do not copy the actual alignment. */
5244 if (!RECORD_OR_UNION_TYPE_P (v) || COMPLETE_TYPE_P (v))
5245 {
5246 SET_TYPE_ALIGN (v, TYPE_ALIGN (t));
5247 TYPE_USER_ALIGN (v) = TYPE_USER_ALIGN (t);
5248 }
5249 if (inner_type)
5250 TREE_TYPE (v) = inner_type;
5251 gcc_checking_assert (fld_type_variant_equal_p (t,v, inner_type));
5252 if (!fld->pset.add (v))
5253 add_tree_to_fld_list (v, fld);
5254 return v;
5255 }
5256
5257 /* Map complete types to incomplete types. */
5258
5259 static hash_map<tree, tree> *fld_incomplete_types;
5260
5261 /* Map types to simplified types. */
5262
5263 static hash_map<tree, tree> *fld_simplified_types;
5264
5265 /* Produce variant of T whose TREE_TYPE is T2. If it is main variant,
5266 use MAP to prevent duplicates. */
5267
5268 static tree
5269 fld_process_array_type (tree t, tree t2, hash_map<tree, tree> *map,
5270 class free_lang_data_d *fld)
5271 {
5272 if (TREE_TYPE (t) == t2)
5273 return t;
5274
5275 if (TYPE_MAIN_VARIANT (t) != t)
5276 {
5277 return fld_type_variant
5278 (fld_process_array_type (TYPE_MAIN_VARIANT (t),
5279 TYPE_MAIN_VARIANT (t2), map, fld),
5280 t, fld, t2);
5281 }
5282
5283 bool existed;
5284 tree &array
5285 = map->get_or_insert (t, &existed);
5286 if (!existed)
5287 {
5288 array = build_array_type_1 (t2, TYPE_DOMAIN (t),
5289 TYPE_TYPELESS_STORAGE (t), false);
5290 TYPE_CANONICAL (array) = TYPE_CANONICAL (t);
5291 if (!fld->pset.add (array))
5292 add_tree_to_fld_list (array, fld);
5293 }
5294 return array;
5295 }
5296
5297 /* Return CTX after removal of contexts that are not relevant */
5298
5299 static tree
5300 fld_decl_context (tree ctx)
5301 {
5302 /* Variably modified types are needed for tree_is_indexable to decide
5303 whether the type needs to go to local or global section.
5304 This code is semi-broken but for now it is easiest to keep contexts
5305 as expected. */
5306 if (ctx && TYPE_P (ctx)
5307 && !variably_modified_type_p (ctx, NULL_TREE))
5308 {
5309 while (ctx && TYPE_P (ctx))
5310 ctx = TYPE_CONTEXT (ctx);
5311 }
5312 return ctx;
5313 }
5314
5315 /* For T being aggregate type try to turn it into a incomplete variant.
5316 Return T if no simplification is possible. */
5317
5318 static tree
5319 fld_incomplete_type_of (tree t, class free_lang_data_d *fld)
5320 {
5321 if (!t)
5322 return NULL;
5323 if (POINTER_TYPE_P (t))
5324 {
5325 tree t2 = fld_incomplete_type_of (TREE_TYPE (t), fld);
5326 if (t2 != TREE_TYPE (t))
5327 {
5328 tree first;
5329 if (TREE_CODE (t) == POINTER_TYPE)
5330 first = build_pointer_type_for_mode (t2, TYPE_MODE (t),
5331 TYPE_REF_CAN_ALIAS_ALL (t));
5332 else
5333 first = build_reference_type_for_mode (t2, TYPE_MODE (t),
5334 TYPE_REF_CAN_ALIAS_ALL (t));
5335 gcc_assert (TYPE_CANONICAL (t2) != t2
5336 && TYPE_CANONICAL (t2) == TYPE_CANONICAL (TREE_TYPE (t)));
5337 if (!fld->pset.add (first))
5338 add_tree_to_fld_list (first, fld);
5339 return fld_type_variant (first, t, fld);
5340 }
5341 return t;
5342 }
5343 if (TREE_CODE (t) == ARRAY_TYPE)
5344 return fld_process_array_type (t,
5345 fld_incomplete_type_of (TREE_TYPE (t), fld),
5346 fld_incomplete_types, fld);
5347 if ((!RECORD_OR_UNION_TYPE_P (t) && TREE_CODE (t) != ENUMERAL_TYPE)
5348 || !COMPLETE_TYPE_P (t))
5349 return t;
5350 if (TYPE_MAIN_VARIANT (t) == t)
5351 {
5352 bool existed;
5353 tree &copy
5354 = fld_incomplete_types->get_or_insert (t, &existed);
5355
5356 if (!existed)
5357 {
5358 copy = build_distinct_type_copy (t);
5359
5360 /* It is possible that type was not seen by free_lang_data yet. */
5361 if (!fld->pset.add (copy))
5362 add_tree_to_fld_list (copy, fld);
5363 TYPE_SIZE (copy) = NULL;
5364 TYPE_USER_ALIGN (copy) = 0;
5365 TYPE_SIZE_UNIT (copy) = NULL;
5366 TYPE_CANONICAL (copy) = TYPE_CANONICAL (t);
5367 TREE_ADDRESSABLE (copy) = 0;
5368 if (AGGREGATE_TYPE_P (t))
5369 {
5370 SET_TYPE_MODE (copy, VOIDmode);
5371 SET_TYPE_ALIGN (copy, BITS_PER_UNIT);
5372 TYPE_TYPELESS_STORAGE (copy) = 0;
5373 TYPE_FIELDS (copy) = NULL;
5374 TYPE_BINFO (copy) = NULL;
5375 }
5376 else
5377 TYPE_VALUES (copy) = NULL;
5378
5379 /* Build copy of TYPE_DECL in TYPE_NAME if necessary.
5380 This is needed for ODR violation warnings to come out right (we
5381 want duplicate TYPE_DECLs whenever the type is duplicated because
5382 of ODR violation. Because lang data in the TYPE_DECL may not
5383 have been freed yet, rebuild it from scratch and copy relevant
5384 fields. */
5385 TYPE_NAME (copy) = fld_simplified_type_name (copy);
5386 tree name = TYPE_NAME (copy);
5387
5388 if (name && TREE_CODE (name) == TYPE_DECL)
5389 {
5390 gcc_checking_assert (TREE_TYPE (name) == t);
5391 tree name2 = build_decl (DECL_SOURCE_LOCATION (name), TYPE_DECL,
5392 DECL_NAME (name), copy);
5393 if (DECL_ASSEMBLER_NAME_SET_P (name))
5394 SET_DECL_ASSEMBLER_NAME (name2, DECL_ASSEMBLER_NAME (name));
5395 SET_DECL_ALIGN (name2, 0);
5396 DECL_CONTEXT (name2) = fld_decl_context
5397 (DECL_CONTEXT (name));
5398 TYPE_NAME (copy) = name2;
5399 }
5400 }
5401 return copy;
5402 }
5403 return (fld_type_variant
5404 (fld_incomplete_type_of (TYPE_MAIN_VARIANT (t), fld), t, fld));
5405 }
5406
5407 /* Simplify type T for scenarios where we do not need complete pointer
5408 types. */
5409
5410 static tree
5411 fld_simplified_type (tree t, class free_lang_data_d *fld)
5412 {
5413 if (!t)
5414 return t;
5415 if (POINTER_TYPE_P (t))
5416 return fld_incomplete_type_of (t, fld);
5417 /* FIXME: This triggers verification error, see PR88140. */
5418 if (TREE_CODE (t) == ARRAY_TYPE && 0)
5419 return fld_process_array_type (t, fld_simplified_type (TREE_TYPE (t), fld),
5420 fld_simplified_types, fld);
5421 return t;
5422 }
5423
5424 /* Reset the expression *EXPR_P, a size or position.
5425
5426 ??? We could reset all non-constant sizes or positions. But it's cheap
5427 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5428
5429 We need to reset self-referential sizes or positions because they cannot
5430 be gimplified and thus can contain a CALL_EXPR after the gimplification
5431 is finished, which will run afoul of LTO streaming. And they need to be
5432 reset to something essentially dummy but not constant, so as to preserve
5433 the properties of the object they are attached to. */
5434
5435 static inline void
5436 free_lang_data_in_one_sizepos (tree *expr_p)
5437 {
5438 tree expr = *expr_p;
5439 if (CONTAINS_PLACEHOLDER_P (expr))
5440 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5441 }
5442
5443
5444 /* Reset all the fields in a binfo node BINFO. We only keep
5445 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5446
5447 static void
5448 free_lang_data_in_binfo (tree binfo)
5449 {
5450 unsigned i;
5451 tree t;
5452
5453 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5454
5455 BINFO_VIRTUALS (binfo) = NULL_TREE;
5456 BINFO_BASE_ACCESSES (binfo) = NULL;
5457 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5458 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5459 BINFO_VPTR_FIELD (binfo) = NULL_TREE;
5460
5461 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5462 free_lang_data_in_binfo (t);
5463 }
5464
5465
5466 /* Reset all language specific information still present in TYPE. */
5467
5468 static void
5469 free_lang_data_in_type (tree type, class free_lang_data_d *fld)
5470 {
5471 gcc_assert (TYPE_P (type));
5472
5473 /* Give the FE a chance to remove its own data first. */
5474 lang_hooks.free_lang_data (type);
5475
5476 TREE_LANG_FLAG_0 (type) = 0;
5477 TREE_LANG_FLAG_1 (type) = 0;
5478 TREE_LANG_FLAG_2 (type) = 0;
5479 TREE_LANG_FLAG_3 (type) = 0;
5480 TREE_LANG_FLAG_4 (type) = 0;
5481 TREE_LANG_FLAG_5 (type) = 0;
5482 TREE_LANG_FLAG_6 (type) = 0;
5483
5484 TYPE_NEEDS_CONSTRUCTING (type) = 0;
5485
5486 /* Purge non-marked variants from the variants chain, so that they
5487 don't reappear in the IL after free_lang_data. */
5488 while (TYPE_NEXT_VARIANT (type)
5489 && !fld->pset.contains (TYPE_NEXT_VARIANT (type)))
5490 {
5491 tree t = TYPE_NEXT_VARIANT (type);
5492 TYPE_NEXT_VARIANT (type) = TYPE_NEXT_VARIANT (t);
5493 /* Turn the removed types into distinct types. */
5494 TYPE_MAIN_VARIANT (t) = t;
5495 TYPE_NEXT_VARIANT (t) = NULL_TREE;
5496 }
5497
5498 if (TREE_CODE (type) == FUNCTION_TYPE)
5499 {
5500 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5501 /* Remove the const and volatile qualifiers from arguments. The
5502 C++ front end removes them, but the C front end does not,
5503 leading to false ODR violation errors when merging two
5504 instances of the same function signature compiled by
5505 different front ends. */
5506 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5507 {
5508 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5509 tree arg_type = TREE_VALUE (p);
5510
5511 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5512 {
5513 int quals = TYPE_QUALS (arg_type)
5514 & ~TYPE_QUAL_CONST
5515 & ~TYPE_QUAL_VOLATILE;
5516 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5517 if (!fld->pset.add (TREE_VALUE (p)))
5518 free_lang_data_in_type (TREE_VALUE (p), fld);
5519 }
5520 /* C++ FE uses TREE_PURPOSE to store initial values. */
5521 TREE_PURPOSE (p) = NULL;
5522 }
5523 }
5524 else if (TREE_CODE (type) == METHOD_TYPE)
5525 {
5526 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5527 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5528 {
5529 /* C++ FE uses TREE_PURPOSE to store initial values. */
5530 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5531 TREE_PURPOSE (p) = NULL;
5532 }
5533 }
5534 else if (RECORD_OR_UNION_TYPE_P (type))
5535 {
5536 /* Remove members that are not FIELD_DECLs from the field list
5537 of an aggregate. These occur in C++. */
5538 for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
5539 if (TREE_CODE (member) == FIELD_DECL)
5540 prev = &DECL_CHAIN (member);
5541 else
5542 *prev = DECL_CHAIN (member);
5543
5544 TYPE_VFIELD (type) = NULL_TREE;
5545
5546 if (TYPE_BINFO (type))
5547 {
5548 free_lang_data_in_binfo (TYPE_BINFO (type));
5549 /* We need to preserve link to bases and virtual table for all
5550 polymorphic types to make devirtualization machinery working. */
5551 if (!BINFO_VTABLE (TYPE_BINFO (type)))
5552 TYPE_BINFO (type) = NULL;
5553 }
5554 }
5555 else if (INTEGRAL_TYPE_P (type)
5556 || SCALAR_FLOAT_TYPE_P (type)
5557 || FIXED_POINT_TYPE_P (type))
5558 {
5559 if (TREE_CODE (type) == ENUMERAL_TYPE)
5560 {
5561 /* Type values are used only for C++ ODR checking. Drop them
5562 for all type variants and non-ODR types.
5563 For ODR types the data is freed in free_odr_warning_data. */
5564 if (TYPE_MAIN_VARIANT (type) != type
5565 || !type_with_linkage_p (type))
5566 TYPE_VALUES (type) = NULL;
5567 else
5568 /* Simplify representation by recording only values rather
5569 than const decls. */
5570 for (tree e = TYPE_VALUES (type); e; e = TREE_CHAIN (e))
5571 if (TREE_CODE (TREE_VALUE (e)) == CONST_DECL)
5572 TREE_VALUE (e) = DECL_INITIAL (TREE_VALUE (e));
5573 }
5574 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5575 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5576 }
5577
5578 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5579
5580 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5581 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5582
5583 if (TYPE_CONTEXT (type)
5584 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5585 {
5586 tree ctx = TYPE_CONTEXT (type);
5587 do
5588 {
5589 ctx = BLOCK_SUPERCONTEXT (ctx);
5590 }
5591 while (ctx && TREE_CODE (ctx) == BLOCK);
5592 TYPE_CONTEXT (type) = ctx;
5593 }
5594
5595 TYPE_STUB_DECL (type) = NULL;
5596 TYPE_NAME (type) = fld_simplified_type_name (type);
5597 }
5598
5599
5600 /* Return true if DECL may need an assembler name to be set. */
5601
5602 static inline bool
5603 need_assembler_name_p (tree decl)
5604 {
5605 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5606 Rule merging. This makes type_odr_p to return true on those types during
5607 LTO and by comparing the mangled name, we can say what types are intended
5608 to be equivalent across compilation unit.
5609
5610 We do not store names of type_in_anonymous_namespace_p.
5611
5612 Record, union and enumeration type have linkage that allows use
5613 to check type_in_anonymous_namespace_p. We do not mangle compound types
5614 that always can be compared structurally.
5615
5616 Similarly for builtin types, we compare properties of their main variant.
5617 A special case are integer types where mangling do make differences
5618 between char/signed char/unsigned char etc. Storing name for these makes
5619 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5620 See cp/mangle.c:write_builtin_type for details. */
5621
5622 if (TREE_CODE (decl) == TYPE_DECL)
5623 {
5624 if (DECL_NAME (decl)
5625 && decl == TYPE_NAME (TREE_TYPE (decl))
5626 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5627 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5628 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
5629 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
5630 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
5631 && (type_with_linkage_p (TREE_TYPE (decl))
5632 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5633 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5634 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5635 return false;
5636 }
5637 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5638 if (!VAR_OR_FUNCTION_DECL_P (decl))
5639 return false;
5640
5641 /* If DECL already has its assembler name set, it does not need a
5642 new one. */
5643 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5644 || DECL_ASSEMBLER_NAME_SET_P (decl))
5645 return false;
5646
5647 /* Abstract decls do not need an assembler name. */
5648 if (DECL_ABSTRACT_P (decl))
5649 return false;
5650
5651 /* For VAR_DECLs, only static, public and external symbols need an
5652 assembler name. */
5653 if (VAR_P (decl)
5654 && !TREE_STATIC (decl)
5655 && !TREE_PUBLIC (decl)
5656 && !DECL_EXTERNAL (decl))
5657 return false;
5658
5659 if (TREE_CODE (decl) == FUNCTION_DECL)
5660 {
5661 /* Do not set assembler name on builtins. Allow RTL expansion to
5662 decide whether to expand inline or via a regular call. */
5663 if (fndecl_built_in_p (decl)
5664 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5665 return false;
5666
5667 /* Functions represented in the callgraph need an assembler name. */
5668 if (cgraph_node::get (decl) != NULL)
5669 return true;
5670
5671 /* Unused and not public functions don't need an assembler name. */
5672 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5673 return false;
5674 }
5675
5676 return true;
5677 }
5678
5679
5680 /* Reset all language specific information still present in symbol
5681 DECL. */
5682
5683 static void
5684 free_lang_data_in_decl (tree decl, class free_lang_data_d *fld)
5685 {
5686 gcc_assert (DECL_P (decl));
5687
5688 /* Give the FE a chance to remove its own data first. */
5689 lang_hooks.free_lang_data (decl);
5690
5691 TREE_LANG_FLAG_0 (decl) = 0;
5692 TREE_LANG_FLAG_1 (decl) = 0;
5693 TREE_LANG_FLAG_2 (decl) = 0;
5694 TREE_LANG_FLAG_3 (decl) = 0;
5695 TREE_LANG_FLAG_4 (decl) = 0;
5696 TREE_LANG_FLAG_5 (decl) = 0;
5697 TREE_LANG_FLAG_6 (decl) = 0;
5698
5699 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5700 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5701 if (TREE_CODE (decl) == FIELD_DECL)
5702 {
5703 DECL_FCONTEXT (decl) = NULL;
5704 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5705 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5706 DECL_QUALIFIER (decl) = NULL_TREE;
5707 }
5708
5709 if (TREE_CODE (decl) == FUNCTION_DECL)
5710 {
5711 struct cgraph_node *node;
5712 /* Frontends do not set TREE_ADDRESSABLE on public variables even though
5713 the address may be taken in other unit, so this flag has no practical
5714 use for middle-end.
5715
5716 It would make more sense if frontends set TREE_ADDRESSABLE to 0 only
5717 for public objects that indeed cannot be adressed, but it is not
5718 the case. Set the flag to true so we do not get merge failures for
5719 i.e. virtual tables between units that take address of it and
5720 units that don't. */
5721 if (TREE_PUBLIC (decl))
5722 TREE_ADDRESSABLE (decl) = true;
5723 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5724 if (!(node = cgraph_node::get (decl))
5725 || (!node->definition && !node->clones))
5726 {
5727 if (node)
5728 node->release_body ();
5729 else
5730 {
5731 release_function_body (decl);
5732 DECL_ARGUMENTS (decl) = NULL;
5733 DECL_RESULT (decl) = NULL;
5734 DECL_INITIAL (decl) = error_mark_node;
5735 }
5736 }
5737 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5738 {
5739 tree t;
5740
5741 /* If DECL has a gimple body, then the context for its
5742 arguments must be DECL. Otherwise, it doesn't really
5743 matter, as we will not be emitting any code for DECL. In
5744 general, there may be other instances of DECL created by
5745 the front end and since PARM_DECLs are generally shared,
5746 their DECL_CONTEXT changes as the replicas of DECL are
5747 created. The only time where DECL_CONTEXT is important
5748 is for the FUNCTION_DECLs that have a gimple body (since
5749 the PARM_DECL will be used in the function's body). */
5750 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5751 DECL_CONTEXT (t) = decl;
5752 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5753 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5754 = target_option_default_node;
5755 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5756 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5757 = optimization_default_node;
5758 }
5759
5760 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5761 At this point, it is not needed anymore. */
5762 DECL_SAVED_TREE (decl) = NULL_TREE;
5763
5764 /* Clear the abstract origin if it refers to a method.
5765 Otherwise dwarf2out.c will ICE as we splice functions out of
5766 TYPE_FIELDS and thus the origin will not be output
5767 correctly. */
5768 if (DECL_ABSTRACT_ORIGIN (decl)
5769 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5770 && RECORD_OR_UNION_TYPE_P
5771 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5772 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5773
5774 DECL_VINDEX (decl) = NULL_TREE;
5775 }
5776 else if (VAR_P (decl))
5777 {
5778 /* See comment above why we set the flag for functoins. */
5779 if (TREE_PUBLIC (decl))
5780 TREE_ADDRESSABLE (decl) = true;
5781 if ((DECL_EXTERNAL (decl)
5782 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5783 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5784 DECL_INITIAL (decl) = NULL_TREE;
5785 }
5786 else if (TREE_CODE (decl) == TYPE_DECL)
5787 {
5788 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5789 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5790 TREE_PUBLIC (decl) = 0;
5791 TREE_PRIVATE (decl) = 0;
5792 DECL_ARTIFICIAL (decl) = 0;
5793 TYPE_DECL_SUPPRESS_DEBUG (decl) = 0;
5794 DECL_INITIAL (decl) = NULL_TREE;
5795 DECL_ORIGINAL_TYPE (decl) = NULL_TREE;
5796 DECL_MODE (decl) = VOIDmode;
5797 SET_DECL_ALIGN (decl, 0);
5798 /* TREE_TYPE is cleared at WPA time in free_odr_warning_data. */
5799 }
5800 else if (TREE_CODE (decl) == FIELD_DECL)
5801 {
5802 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5803 DECL_INITIAL (decl) = NULL_TREE;
5804 }
5805 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5806 && DECL_INITIAL (decl)
5807 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5808 {
5809 /* Strip builtins from the translation-unit BLOCK. We still have targets
5810 without builtin_decl_explicit support and also builtins are shared
5811 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5812 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5813 while (*nextp)
5814 {
5815 tree var = *nextp;
5816 if (fndecl_built_in_p (var))
5817 *nextp = TREE_CHAIN (var);
5818 else
5819 nextp = &TREE_CHAIN (var);
5820 }
5821 }
5822 /* We need to keep field decls associated with their trees. Otherwise tree
5823 merging may merge some fileds and keep others disjoint wich in turn will
5824 not do well with TREE_CHAIN pointers linking them.
5825
5826 Also do not drop containing types for virtual methods and tables because
5827 these are needed by devirtualization.
5828 C++ destructors are special because C++ frontends sometimes produces
5829 virtual destructor as an alias of non-virtual destructor. In
5830 devirutalization code we always walk through aliases and we need
5831 context to be preserved too. See PR89335 */
5832 if (TREE_CODE (decl) != FIELD_DECL
5833 && ((TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL)
5834 || (!DECL_VIRTUAL_P (decl)
5835 && (TREE_CODE (decl) != FUNCTION_DECL
5836 || !DECL_CXX_DESTRUCTOR_P (decl)))))
5837 DECL_CONTEXT (decl) = fld_decl_context (DECL_CONTEXT (decl));
5838 }
5839
5840
5841 /* Operand callback helper for free_lang_data_in_node. *TP is the
5842 subtree operand being considered. */
5843
5844 static tree
5845 find_decls_types_r (tree *tp, int *ws, void *data)
5846 {
5847 tree t = *tp;
5848 class free_lang_data_d *fld = (class free_lang_data_d *) data;
5849
5850 if (TREE_CODE (t) == TREE_LIST)
5851 return NULL_TREE;
5852
5853 /* Language specific nodes will be removed, so there is no need
5854 to gather anything under them. */
5855 if (is_lang_specific (t))
5856 {
5857 *ws = 0;
5858 return NULL_TREE;
5859 }
5860
5861 if (DECL_P (t))
5862 {
5863 /* Note that walk_tree does not traverse every possible field in
5864 decls, so we have to do our own traversals here. */
5865 add_tree_to_fld_list (t, fld);
5866
5867 fld_worklist_push (DECL_NAME (t), fld);
5868 fld_worklist_push (DECL_CONTEXT (t), fld);
5869 fld_worklist_push (DECL_SIZE (t), fld);
5870 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5871
5872 /* We are going to remove everything under DECL_INITIAL for
5873 TYPE_DECLs. No point walking them. */
5874 if (TREE_CODE (t) != TYPE_DECL)
5875 fld_worklist_push (DECL_INITIAL (t), fld);
5876
5877 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5878 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5879
5880 if (TREE_CODE (t) == FUNCTION_DECL)
5881 {
5882 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5883 fld_worklist_push (DECL_RESULT (t), fld);
5884 }
5885 else if (TREE_CODE (t) == FIELD_DECL)
5886 {
5887 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5888 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5889 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5890 fld_worklist_push (DECL_FCONTEXT (t), fld);
5891 }
5892
5893 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5894 && DECL_HAS_VALUE_EXPR_P (t))
5895 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5896
5897 if (TREE_CODE (t) != FIELD_DECL
5898 && TREE_CODE (t) != TYPE_DECL)
5899 fld_worklist_push (TREE_CHAIN (t), fld);
5900 *ws = 0;
5901 }
5902 else if (TYPE_P (t))
5903 {
5904 /* Note that walk_tree does not traverse every possible field in
5905 types, so we have to do our own traversals here. */
5906 add_tree_to_fld_list (t, fld);
5907
5908 if (!RECORD_OR_UNION_TYPE_P (t))
5909 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5910 fld_worklist_push (TYPE_SIZE (t), fld);
5911 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5912 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5913 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5914 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5915 fld_worklist_push (TYPE_NAME (t), fld);
5916 /* While we do not stream TYPE_POINTER_TO and TYPE_REFERENCE_TO
5917 lists, we may look types up in these lists and use them while
5918 optimizing the function body. Thus we need to free lang data
5919 in them. */
5920 if (TREE_CODE (t) == POINTER_TYPE)
5921 fld_worklist_push (TYPE_NEXT_PTR_TO (t), fld);
5922 if (TREE_CODE (t) == REFERENCE_TYPE)
5923 fld_worklist_push (TYPE_NEXT_REF_TO (t), fld);
5924 if (!POINTER_TYPE_P (t))
5925 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5926 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5927 if (!RECORD_OR_UNION_TYPE_P (t))
5928 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5929 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5930 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5931 do not and want not to reach unused variants this way. */
5932 if (TYPE_CONTEXT (t))
5933 {
5934 tree ctx = TYPE_CONTEXT (t);
5935 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5936 So push that instead. */
5937 while (ctx && TREE_CODE (ctx) == BLOCK)
5938 ctx = BLOCK_SUPERCONTEXT (ctx);
5939 fld_worklist_push (ctx, fld);
5940 }
5941 fld_worklist_push (TYPE_CANONICAL (t), fld);
5942
5943 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5944 {
5945 unsigned i;
5946 tree tem;
5947 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5948 fld_worklist_push (TREE_TYPE (tem), fld);
5949 fld_worklist_push (BINFO_TYPE (TYPE_BINFO (t)), fld);
5950 fld_worklist_push (BINFO_VTABLE (TYPE_BINFO (t)), fld);
5951 }
5952 if (RECORD_OR_UNION_TYPE_P (t))
5953 {
5954 tree tem;
5955 /* Push all TYPE_FIELDS - there can be interleaving interesting
5956 and non-interesting things. */
5957 tem = TYPE_FIELDS (t);
5958 while (tem)
5959 {
5960 if (TREE_CODE (tem) == FIELD_DECL)
5961 fld_worklist_push (tem, fld);
5962 tem = TREE_CHAIN (tem);
5963 }
5964 }
5965 if (FUNC_OR_METHOD_TYPE_P (t))
5966 fld_worklist_push (TYPE_METHOD_BASETYPE (t), fld);
5967
5968 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5969 *ws = 0;
5970 }
5971 else if (TREE_CODE (t) == BLOCK)
5972 {
5973 for (tree *tem = &BLOCK_VARS (t); *tem; )
5974 {
5975 if (TREE_CODE (*tem) != VAR_DECL
5976 || !auto_var_in_fn_p (*tem, DECL_CONTEXT (*tem)))
5977 {
5978 gcc_assert (TREE_CODE (*tem) != RESULT_DECL
5979 && TREE_CODE (*tem) != PARM_DECL);
5980 *tem = TREE_CHAIN (*tem);
5981 }
5982 else
5983 {
5984 fld_worklist_push (*tem, fld);
5985 tem = &TREE_CHAIN (*tem);
5986 }
5987 }
5988 for (tree tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5989 fld_worklist_push (tem, fld);
5990 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5991 }
5992
5993 if (TREE_CODE (t) != IDENTIFIER_NODE
5994 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5995 fld_worklist_push (TREE_TYPE (t), fld);
5996
5997 return NULL_TREE;
5998 }
5999
6000
6001 /* Find decls and types in T. */
6002
6003 static void
6004 find_decls_types (tree t, class free_lang_data_d *fld)
6005 {
6006 while (1)
6007 {
6008 if (!fld->pset.contains (t))
6009 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
6010 if (fld->worklist.is_empty ())
6011 break;
6012 t = fld->worklist.pop ();
6013 }
6014 }
6015
6016 /* Translate all the types in LIST with the corresponding runtime
6017 types. */
6018
6019 static tree
6020 get_eh_types_for_runtime (tree list)
6021 {
6022 tree head, prev;
6023
6024 if (list == NULL_TREE)
6025 return NULL_TREE;
6026
6027 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6028 prev = head;
6029 list = TREE_CHAIN (list);
6030 while (list)
6031 {
6032 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6033 TREE_CHAIN (prev) = n;
6034 prev = TREE_CHAIN (prev);
6035 list = TREE_CHAIN (list);
6036 }
6037
6038 return head;
6039 }
6040
6041
6042 /* Find decls and types referenced in EH region R and store them in
6043 FLD->DECLS and FLD->TYPES. */
6044
6045 static void
6046 find_decls_types_in_eh_region (eh_region r, class free_lang_data_d *fld)
6047 {
6048 switch (r->type)
6049 {
6050 case ERT_CLEANUP:
6051 break;
6052
6053 case ERT_TRY:
6054 {
6055 eh_catch c;
6056
6057 /* The types referenced in each catch must first be changed to the
6058 EH types used at runtime. This removes references to FE types
6059 in the region. */
6060 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
6061 {
6062 c->type_list = get_eh_types_for_runtime (c->type_list);
6063 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
6064 }
6065 }
6066 break;
6067
6068 case ERT_ALLOWED_EXCEPTIONS:
6069 r->u.allowed.type_list
6070 = get_eh_types_for_runtime (r->u.allowed.type_list);
6071 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
6072 break;
6073
6074 case ERT_MUST_NOT_THROW:
6075 walk_tree (&r->u.must_not_throw.failure_decl,
6076 find_decls_types_r, fld, &fld->pset);
6077 break;
6078 }
6079 }
6080
6081
6082 /* Find decls and types referenced in cgraph node N and store them in
6083 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6084 look for *every* kind of DECL and TYPE node reachable from N,
6085 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6086 NAMESPACE_DECLs, etc). */
6087
6088 static void
6089 find_decls_types_in_node (struct cgraph_node *n, class free_lang_data_d *fld)
6090 {
6091 basic_block bb;
6092 struct function *fn;
6093 unsigned ix;
6094 tree t;
6095
6096 find_decls_types (n->decl, fld);
6097
6098 if (!gimple_has_body_p (n->decl))
6099 return;
6100
6101 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
6102
6103 fn = DECL_STRUCT_FUNCTION (n->decl);
6104
6105 /* Traverse locals. */
6106 FOR_EACH_LOCAL_DECL (fn, ix, t)
6107 find_decls_types (t, fld);
6108
6109 /* Traverse EH regions in FN. */
6110 {
6111 eh_region r;
6112 FOR_ALL_EH_REGION_FN (r, fn)
6113 find_decls_types_in_eh_region (r, fld);
6114 }
6115
6116 /* Traverse every statement in FN. */
6117 FOR_EACH_BB_FN (bb, fn)
6118 {
6119 gphi_iterator psi;
6120 gimple_stmt_iterator si;
6121 unsigned i;
6122
6123 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
6124 {
6125 gphi *phi = psi.phi ();
6126
6127 for (i = 0; i < gimple_phi_num_args (phi); i++)
6128 {
6129 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
6130 find_decls_types (*arg_p, fld);
6131 }
6132 }
6133
6134 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6135 {
6136 gimple *stmt = gsi_stmt (si);
6137
6138 if (is_gimple_call (stmt))
6139 find_decls_types (gimple_call_fntype (stmt), fld);
6140
6141 for (i = 0; i < gimple_num_ops (stmt); i++)
6142 {
6143 tree arg = gimple_op (stmt, i);
6144 find_decls_types (arg, fld);
6145 }
6146 }
6147 }
6148 }
6149
6150
6151 /* Find decls and types referenced in varpool node N and store them in
6152 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6153 look for *every* kind of DECL and TYPE node reachable from N,
6154 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6155 NAMESPACE_DECLs, etc). */
6156
6157 static void
6158 find_decls_types_in_var (varpool_node *v, class free_lang_data_d *fld)
6159 {
6160 find_decls_types (v->decl, fld);
6161 }
6162
6163 /* If T needs an assembler name, have one created for it. */
6164
6165 void
6166 assign_assembler_name_if_needed (tree t)
6167 {
6168 if (need_assembler_name_p (t))
6169 {
6170 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
6171 diagnostics that use input_location to show locus
6172 information. The problem here is that, at this point,
6173 input_location is generally anchored to the end of the file
6174 (since the parser is long gone), so we don't have a good
6175 position to pin it to.
6176
6177 To alleviate this problem, this uses the location of T's
6178 declaration. Examples of this are
6179 testsuite/g++.dg/template/cond2.C and
6180 testsuite/g++.dg/template/pr35240.C. */
6181 location_t saved_location = input_location;
6182 input_location = DECL_SOURCE_LOCATION (t);
6183
6184 decl_assembler_name (t);
6185
6186 input_location = saved_location;
6187 }
6188 }
6189
6190
6191 /* Free language specific information for every operand and expression
6192 in every node of the call graph. This process operates in three stages:
6193
6194 1- Every callgraph node and varpool node is traversed looking for
6195 decls and types embedded in them. This is a more exhaustive
6196 search than that done by find_referenced_vars, because it will
6197 also collect individual fields, decls embedded in types, etc.
6198
6199 2- All the decls found are sent to free_lang_data_in_decl.
6200
6201 3- All the types found are sent to free_lang_data_in_type.
6202
6203 The ordering between decls and types is important because
6204 free_lang_data_in_decl sets assembler names, which includes
6205 mangling. So types cannot be freed up until assembler names have
6206 been set up. */
6207
6208 static void
6209 free_lang_data_in_cgraph (class free_lang_data_d *fld)
6210 {
6211 struct cgraph_node *n;
6212 varpool_node *v;
6213 tree t;
6214 unsigned i;
6215 alias_pair *p;
6216
6217 /* Find decls and types in the body of every function in the callgraph. */
6218 FOR_EACH_FUNCTION (n)
6219 find_decls_types_in_node (n, fld);
6220
6221 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
6222 find_decls_types (p->decl, fld);
6223
6224 /* Find decls and types in every varpool symbol. */
6225 FOR_EACH_VARIABLE (v)
6226 find_decls_types_in_var (v, fld);
6227
6228 /* Set the assembler name on every decl found. We need to do this
6229 now because free_lang_data_in_decl will invalidate data needed
6230 for mangling. This breaks mangling on interdependent decls. */
6231 FOR_EACH_VEC_ELT (fld->decls, i, t)
6232 assign_assembler_name_if_needed (t);
6233
6234 /* Traverse every decl found freeing its language data. */
6235 FOR_EACH_VEC_ELT (fld->decls, i, t)
6236 free_lang_data_in_decl (t, fld);
6237
6238 /* Traverse every type found freeing its language data. */
6239 FOR_EACH_VEC_ELT (fld->types, i, t)
6240 free_lang_data_in_type (t, fld);
6241 }
6242
6243
6244 /* Free resources that are used by FE but are not needed once they are done. */
6245
6246 static unsigned
6247 free_lang_data (void)
6248 {
6249 unsigned i;
6250 class free_lang_data_d fld;
6251
6252 /* If we are the LTO frontend we have freed lang-specific data already. */
6253 if (in_lto_p
6254 || (!flag_generate_lto && !flag_generate_offload))
6255 {
6256 /* Rebuild type inheritance graph even when not doing LTO to get
6257 consistent profile data. */
6258 rebuild_type_inheritance_graph ();
6259 return 0;
6260 }
6261
6262 fld_incomplete_types = new hash_map<tree, tree>;
6263 fld_simplified_types = new hash_map<tree, tree>;
6264
6265 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
6266 if (vec_safe_is_empty (all_translation_units))
6267 build_translation_unit_decl (NULL_TREE);
6268
6269 /* Allocate and assign alias sets to the standard integer types
6270 while the slots are still in the way the frontends generated them. */
6271 for (i = 0; i < itk_none; ++i)
6272 if (integer_types[i])
6273 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6274
6275 /* Traverse the IL resetting language specific information for
6276 operands, expressions, etc. */
6277 free_lang_data_in_cgraph (&fld);
6278
6279 /* Create gimple variants for common types. */
6280 for (unsigned i = 0;
6281 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
6282 ++i)
6283 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
6284
6285 /* Reset some langhooks. Do not reset types_compatible_p, it may
6286 still be used indirectly via the get_alias_set langhook. */
6287 lang_hooks.dwarf_name = lhd_dwarf_name;
6288 lang_hooks.decl_printable_name = gimple_decl_printable_name;
6289 lang_hooks.gimplify_expr = lhd_gimplify_expr;
6290 lang_hooks.overwrite_decl_assembler_name = lhd_overwrite_decl_assembler_name;
6291 lang_hooks.print_xnode = lhd_print_tree_nothing;
6292 lang_hooks.print_decl = lhd_print_tree_nothing;
6293 lang_hooks.print_type = lhd_print_tree_nothing;
6294 lang_hooks.print_identifier = lhd_print_tree_nothing;
6295
6296 lang_hooks.tree_inlining.var_mod_type_p = hook_bool_tree_tree_false;
6297
6298 if (flag_checking)
6299 {
6300 int i;
6301 tree t;
6302
6303 FOR_EACH_VEC_ELT (fld.types, i, t)
6304 verify_type (t);
6305 }
6306
6307 /* We do not want the default decl_assembler_name implementation,
6308 rather if we have fixed everything we want a wrapper around it
6309 asserting that all non-local symbols already got their assembler
6310 name and only produce assembler names for local symbols. Or rather
6311 make sure we never call decl_assembler_name on local symbols and
6312 devise a separate, middle-end private scheme for it. */
6313
6314 /* Reset diagnostic machinery. */
6315 tree_diagnostics_defaults (global_dc);
6316
6317 rebuild_type_inheritance_graph ();
6318
6319 delete fld_incomplete_types;
6320 delete fld_simplified_types;
6321
6322 return 0;
6323 }
6324
6325
6326 namespace {
6327
6328 const pass_data pass_data_ipa_free_lang_data =
6329 {
6330 SIMPLE_IPA_PASS, /* type */
6331 "*free_lang_data", /* name */
6332 OPTGROUP_NONE, /* optinfo_flags */
6333 TV_IPA_FREE_LANG_DATA, /* tv_id */
6334 0, /* properties_required */
6335 0, /* properties_provided */
6336 0, /* properties_destroyed */
6337 0, /* todo_flags_start */
6338 0, /* todo_flags_finish */
6339 };
6340
6341 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6342 {
6343 public:
6344 pass_ipa_free_lang_data (gcc::context *ctxt)
6345 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6346 {}
6347
6348 /* opt_pass methods: */
6349 virtual unsigned int execute (function *) { return free_lang_data (); }
6350
6351 }; // class pass_ipa_free_lang_data
6352
6353 } // anon namespace
6354
6355 simple_ipa_opt_pass *
6356 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6357 {
6358 return new pass_ipa_free_lang_data (ctxt);
6359 }
6360 \f
6361 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6362 of the various TYPE_QUAL values. */
6363
6364 static void
6365 set_type_quals (tree type, int type_quals)
6366 {
6367 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6368 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6369 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6370 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6371 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6372 }
6373
6374 /* Returns true iff CAND and BASE have equivalent language-specific
6375 qualifiers. */
6376
6377 bool
6378 check_lang_type (const_tree cand, const_tree base)
6379 {
6380 if (lang_hooks.types.type_hash_eq == NULL)
6381 return true;
6382 /* type_hash_eq currently only applies to these types. */
6383 if (TREE_CODE (cand) != FUNCTION_TYPE
6384 && TREE_CODE (cand) != METHOD_TYPE)
6385 return true;
6386 return lang_hooks.types.type_hash_eq (cand, base);
6387 }
6388
6389 /* This function checks to see if TYPE matches the size one of the built-in
6390 atomic types, and returns that core atomic type. */
6391
6392 static tree
6393 find_atomic_core_type (const_tree type)
6394 {
6395 tree base_atomic_type;
6396
6397 /* Only handle complete types. */
6398 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
6399 return NULL_TREE;
6400
6401 switch (tree_to_uhwi (TYPE_SIZE (type)))
6402 {
6403 case 8:
6404 base_atomic_type = atomicQI_type_node;
6405 break;
6406
6407 case 16:
6408 base_atomic_type = atomicHI_type_node;
6409 break;
6410
6411 case 32:
6412 base_atomic_type = atomicSI_type_node;
6413 break;
6414
6415 case 64:
6416 base_atomic_type = atomicDI_type_node;
6417 break;
6418
6419 case 128:
6420 base_atomic_type = atomicTI_type_node;
6421 break;
6422
6423 default:
6424 base_atomic_type = NULL_TREE;
6425 }
6426
6427 return base_atomic_type;
6428 }
6429
6430 /* Returns true iff unqualified CAND and BASE are equivalent. */
6431
6432 bool
6433 check_base_type (const_tree cand, const_tree base)
6434 {
6435 if (TYPE_NAME (cand) != TYPE_NAME (base)
6436 /* Apparently this is needed for Objective-C. */
6437 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
6438 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
6439 TYPE_ATTRIBUTES (base)))
6440 return false;
6441 /* Check alignment. */
6442 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base))
6443 return true;
6444 /* Atomic types increase minimal alignment. We must to do so as well
6445 or we get duplicated canonical types. See PR88686. */
6446 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
6447 {
6448 /* See if this object can map to a basic atomic type. */
6449 tree atomic_type = find_atomic_core_type (cand);
6450 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
6451 return true;
6452 }
6453 return false;
6454 }
6455
6456 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6457
6458 bool
6459 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6460 {
6461 return (TYPE_QUALS (cand) == type_quals
6462 && check_base_type (cand, base)
6463 && check_lang_type (cand, base));
6464 }
6465
6466 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6467
6468 static bool
6469 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6470 {
6471 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6472 && TYPE_NAME (cand) == TYPE_NAME (base)
6473 /* Apparently this is needed for Objective-C. */
6474 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6475 /* Check alignment. */
6476 && TYPE_ALIGN (cand) == align
6477 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6478 TYPE_ATTRIBUTES (base))
6479 && check_lang_type (cand, base));
6480 }
6481
6482 /* Return a version of the TYPE, qualified as indicated by the
6483 TYPE_QUALS, if one exists. If no qualified version exists yet,
6484 return NULL_TREE. */
6485
6486 tree
6487 get_qualified_type (tree type, int type_quals)
6488 {
6489 if (TYPE_QUALS (type) == type_quals)
6490 return type;
6491
6492 tree mv = TYPE_MAIN_VARIANT (type);
6493 if (check_qualified_type (mv, type, type_quals))
6494 return mv;
6495
6496 /* Search the chain of variants to see if there is already one there just
6497 like the one we need to have. If so, use that existing one. We must
6498 preserve the TYPE_NAME, since there is code that depends on this. */
6499 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
6500 if (check_qualified_type (*tp, type, type_quals))
6501 {
6502 /* Put the found variant at the head of the variant list so
6503 frequently searched variants get found faster. The C++ FE
6504 benefits greatly from this. */
6505 tree t = *tp;
6506 *tp = TYPE_NEXT_VARIANT (t);
6507 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
6508 TYPE_NEXT_VARIANT (mv) = t;
6509 return t;
6510 }
6511
6512 return NULL_TREE;
6513 }
6514
6515 /* Like get_qualified_type, but creates the type if it does not
6516 exist. This function never returns NULL_TREE. */
6517
6518 tree
6519 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
6520 {
6521 tree t;
6522
6523 /* See if we already have the appropriate qualified variant. */
6524 t = get_qualified_type (type, type_quals);
6525
6526 /* If not, build it. */
6527 if (!t)
6528 {
6529 t = build_variant_type_copy (type PASS_MEM_STAT);
6530 set_type_quals (t, type_quals);
6531
6532 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6533 {
6534 /* See if this object can map to a basic atomic type. */
6535 tree atomic_type = find_atomic_core_type (type);
6536 if (atomic_type)
6537 {
6538 /* Ensure the alignment of this type is compatible with
6539 the required alignment of the atomic type. */
6540 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6541 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6542 }
6543 }
6544
6545 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6546 /* Propagate structural equality. */
6547 SET_TYPE_STRUCTURAL_EQUALITY (t);
6548 else if (TYPE_CANONICAL (type) != type)
6549 /* Build the underlying canonical type, since it is different
6550 from TYPE. */
6551 {
6552 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6553 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6554 }
6555 else
6556 /* T is its own canonical type. */
6557 TYPE_CANONICAL (t) = t;
6558
6559 }
6560
6561 return t;
6562 }
6563
6564 /* Create a variant of type T with alignment ALIGN. */
6565
6566 tree
6567 build_aligned_type (tree type, unsigned int align)
6568 {
6569 tree t;
6570
6571 if (TYPE_PACKED (type)
6572 || TYPE_ALIGN (type) == align)
6573 return type;
6574
6575 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6576 if (check_aligned_type (t, type, align))
6577 return t;
6578
6579 t = build_variant_type_copy (type);
6580 SET_TYPE_ALIGN (t, align);
6581 TYPE_USER_ALIGN (t) = 1;
6582
6583 return t;
6584 }
6585
6586 /* Create a new distinct copy of TYPE. The new type is made its own
6587 MAIN_VARIANT. If TYPE requires structural equality checks, the
6588 resulting type requires structural equality checks; otherwise, its
6589 TYPE_CANONICAL points to itself. */
6590
6591 tree
6592 build_distinct_type_copy (tree type MEM_STAT_DECL)
6593 {
6594 tree t = copy_node (type PASS_MEM_STAT);
6595
6596 TYPE_POINTER_TO (t) = 0;
6597 TYPE_REFERENCE_TO (t) = 0;
6598
6599 /* Set the canonical type either to a new equivalence class, or
6600 propagate the need for structural equality checks. */
6601 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6602 SET_TYPE_STRUCTURAL_EQUALITY (t);
6603 else
6604 TYPE_CANONICAL (t) = t;
6605
6606 /* Make it its own variant. */
6607 TYPE_MAIN_VARIANT (t) = t;
6608 TYPE_NEXT_VARIANT (t) = 0;
6609
6610 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6611 whose TREE_TYPE is not t. This can also happen in the Ada
6612 frontend when using subtypes. */
6613
6614 return t;
6615 }
6616
6617 /* Create a new variant of TYPE, equivalent but distinct. This is so
6618 the caller can modify it. TYPE_CANONICAL for the return type will
6619 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6620 are considered equal by the language itself (or that both types
6621 require structural equality checks). */
6622
6623 tree
6624 build_variant_type_copy (tree type MEM_STAT_DECL)
6625 {
6626 tree t, m = TYPE_MAIN_VARIANT (type);
6627
6628 t = build_distinct_type_copy (type PASS_MEM_STAT);
6629
6630 /* Since we're building a variant, assume that it is a non-semantic
6631 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6632 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6633 /* Type variants have no alias set defined. */
6634 TYPE_ALIAS_SET (t) = -1;
6635
6636 /* Add the new type to the chain of variants of TYPE. */
6637 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6638 TYPE_NEXT_VARIANT (m) = t;
6639 TYPE_MAIN_VARIANT (t) = m;
6640
6641 return t;
6642 }
6643 \f
6644 /* Return true if the from tree in both tree maps are equal. */
6645
6646 int
6647 tree_map_base_eq (const void *va, const void *vb)
6648 {
6649 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6650 *const b = (const struct tree_map_base *) vb;
6651 return (a->from == b->from);
6652 }
6653
6654 /* Hash a from tree in a tree_base_map. */
6655
6656 unsigned int
6657 tree_map_base_hash (const void *item)
6658 {
6659 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6660 }
6661
6662 /* Return true if this tree map structure is marked for garbage collection
6663 purposes. We simply return true if the from tree is marked, so that this
6664 structure goes away when the from tree goes away. */
6665
6666 int
6667 tree_map_base_marked_p (const void *p)
6668 {
6669 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6670 }
6671
6672 /* Hash a from tree in a tree_map. */
6673
6674 unsigned int
6675 tree_map_hash (const void *item)
6676 {
6677 return (((const struct tree_map *) item)->hash);
6678 }
6679
6680 /* Hash a from tree in a tree_decl_map. */
6681
6682 unsigned int
6683 tree_decl_map_hash (const void *item)
6684 {
6685 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6686 }
6687
6688 /* Return the initialization priority for DECL. */
6689
6690 priority_type
6691 decl_init_priority_lookup (tree decl)
6692 {
6693 symtab_node *snode = symtab_node::get (decl);
6694
6695 if (!snode)
6696 return DEFAULT_INIT_PRIORITY;
6697 return
6698 snode->get_init_priority ();
6699 }
6700
6701 /* Return the finalization priority for DECL. */
6702
6703 priority_type
6704 decl_fini_priority_lookup (tree decl)
6705 {
6706 cgraph_node *node = cgraph_node::get (decl);
6707
6708 if (!node)
6709 return DEFAULT_INIT_PRIORITY;
6710 return
6711 node->get_fini_priority ();
6712 }
6713
6714 /* Set the initialization priority for DECL to PRIORITY. */
6715
6716 void
6717 decl_init_priority_insert (tree decl, priority_type priority)
6718 {
6719 struct symtab_node *snode;
6720
6721 if (priority == DEFAULT_INIT_PRIORITY)
6722 {
6723 snode = symtab_node::get (decl);
6724 if (!snode)
6725 return;
6726 }
6727 else if (VAR_P (decl))
6728 snode = varpool_node::get_create (decl);
6729 else
6730 snode = cgraph_node::get_create (decl);
6731 snode->set_init_priority (priority);
6732 }
6733
6734 /* Set the finalization priority for DECL to PRIORITY. */
6735
6736 void
6737 decl_fini_priority_insert (tree decl, priority_type priority)
6738 {
6739 struct cgraph_node *node;
6740
6741 if (priority == DEFAULT_INIT_PRIORITY)
6742 {
6743 node = cgraph_node::get (decl);
6744 if (!node)
6745 return;
6746 }
6747 else
6748 node = cgraph_node::get_create (decl);
6749 node->set_fini_priority (priority);
6750 }
6751
6752 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6753
6754 static void
6755 print_debug_expr_statistics (void)
6756 {
6757 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6758 (long) debug_expr_for_decl->size (),
6759 (long) debug_expr_for_decl->elements (),
6760 debug_expr_for_decl->collisions ());
6761 }
6762
6763 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6764
6765 static void
6766 print_value_expr_statistics (void)
6767 {
6768 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6769 (long) value_expr_for_decl->size (),
6770 (long) value_expr_for_decl->elements (),
6771 value_expr_for_decl->collisions ());
6772 }
6773
6774 /* Lookup a debug expression for FROM, and return it if we find one. */
6775
6776 tree
6777 decl_debug_expr_lookup (tree from)
6778 {
6779 struct tree_decl_map *h, in;
6780 in.base.from = from;
6781
6782 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6783 if (h)
6784 return h->to;
6785 return NULL_TREE;
6786 }
6787
6788 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6789
6790 void
6791 decl_debug_expr_insert (tree from, tree to)
6792 {
6793 struct tree_decl_map *h;
6794
6795 h = ggc_alloc<tree_decl_map> ();
6796 h->base.from = from;
6797 h->to = to;
6798 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6799 }
6800
6801 /* Lookup a value expression for FROM, and return it if we find one. */
6802
6803 tree
6804 decl_value_expr_lookup (tree from)
6805 {
6806 struct tree_decl_map *h, in;
6807 in.base.from = from;
6808
6809 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6810 if (h)
6811 return h->to;
6812 return NULL_TREE;
6813 }
6814
6815 /* Insert a mapping FROM->TO in the value expression hashtable. */
6816
6817 void
6818 decl_value_expr_insert (tree from, tree to)
6819 {
6820 struct tree_decl_map *h;
6821
6822 h = ggc_alloc<tree_decl_map> ();
6823 h->base.from = from;
6824 h->to = to;
6825 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6826 }
6827
6828 /* Lookup a vector of debug arguments for FROM, and return it if we
6829 find one. */
6830
6831 vec<tree, va_gc> **
6832 decl_debug_args_lookup (tree from)
6833 {
6834 struct tree_vec_map *h, in;
6835
6836 if (!DECL_HAS_DEBUG_ARGS_P (from))
6837 return NULL;
6838 gcc_checking_assert (debug_args_for_decl != NULL);
6839 in.base.from = from;
6840 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6841 if (h)
6842 return &h->to;
6843 return NULL;
6844 }
6845
6846 /* Insert a mapping FROM->empty vector of debug arguments in the value
6847 expression hashtable. */
6848
6849 vec<tree, va_gc> **
6850 decl_debug_args_insert (tree from)
6851 {
6852 struct tree_vec_map *h;
6853 tree_vec_map **loc;
6854
6855 if (DECL_HAS_DEBUG_ARGS_P (from))
6856 return decl_debug_args_lookup (from);
6857 if (debug_args_for_decl == NULL)
6858 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6859 h = ggc_alloc<tree_vec_map> ();
6860 h->base.from = from;
6861 h->to = NULL;
6862 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6863 *loc = h;
6864 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6865 return &h->to;
6866 }
6867
6868 /* Hashing of types so that we don't make duplicates.
6869 The entry point is `type_hash_canon'. */
6870
6871 /* Generate the default hash code for TYPE. This is designed for
6872 speed, rather than maximum entropy. */
6873
6874 hashval_t
6875 type_hash_canon_hash (tree type)
6876 {
6877 inchash::hash hstate;
6878
6879 hstate.add_int (TREE_CODE (type));
6880
6881 if (TREE_TYPE (type))
6882 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6883
6884 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6885 /* Just the identifier is adequate to distinguish. */
6886 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6887
6888 switch (TREE_CODE (type))
6889 {
6890 case METHOD_TYPE:
6891 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6892 /* FALLTHROUGH. */
6893 case FUNCTION_TYPE:
6894 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6895 if (TREE_VALUE (t) != error_mark_node)
6896 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6897 break;
6898
6899 case OFFSET_TYPE:
6900 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6901 break;
6902
6903 case ARRAY_TYPE:
6904 {
6905 if (TYPE_DOMAIN (type))
6906 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6907 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6908 {
6909 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6910 hstate.add_object (typeless);
6911 }
6912 }
6913 break;
6914
6915 case INTEGER_TYPE:
6916 {
6917 tree t = TYPE_MAX_VALUE (type);
6918 if (!t)
6919 t = TYPE_MIN_VALUE (type);
6920 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6921 hstate.add_object (TREE_INT_CST_ELT (t, i));
6922 break;
6923 }
6924
6925 case REAL_TYPE:
6926 case FIXED_POINT_TYPE:
6927 {
6928 unsigned prec = TYPE_PRECISION (type);
6929 hstate.add_object (prec);
6930 break;
6931 }
6932
6933 case VECTOR_TYPE:
6934 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6935 break;
6936
6937 default:
6938 break;
6939 }
6940
6941 return hstate.end ();
6942 }
6943
6944 /* These are the Hashtable callback functions. */
6945
6946 /* Returns true iff the types are equivalent. */
6947
6948 bool
6949 type_cache_hasher::equal (type_hash *a, type_hash *b)
6950 {
6951 /* First test the things that are the same for all types. */
6952 if (a->hash != b->hash
6953 || TREE_CODE (a->type) != TREE_CODE (b->type)
6954 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6955 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6956 TYPE_ATTRIBUTES (b->type))
6957 || (TREE_CODE (a->type) != COMPLEX_TYPE
6958 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6959 return 0;
6960
6961 /* Be careful about comparing arrays before and after the element type
6962 has been completed; don't compare TYPE_ALIGN unless both types are
6963 complete. */
6964 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6965 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6966 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6967 return 0;
6968
6969 switch (TREE_CODE (a->type))
6970 {
6971 case VOID_TYPE:
6972 case COMPLEX_TYPE:
6973 case POINTER_TYPE:
6974 case REFERENCE_TYPE:
6975 case NULLPTR_TYPE:
6976 return 1;
6977
6978 case VECTOR_TYPE:
6979 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6980 TYPE_VECTOR_SUBPARTS (b->type));
6981
6982 case ENUMERAL_TYPE:
6983 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6984 && !(TYPE_VALUES (a->type)
6985 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6986 && TYPE_VALUES (b->type)
6987 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6988 && type_list_equal (TYPE_VALUES (a->type),
6989 TYPE_VALUES (b->type))))
6990 return 0;
6991
6992 /* fall through */
6993
6994 case INTEGER_TYPE:
6995 case REAL_TYPE:
6996 case BOOLEAN_TYPE:
6997 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6998 return false;
6999 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7000 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7001 TYPE_MAX_VALUE (b->type)))
7002 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7003 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7004 TYPE_MIN_VALUE (b->type))));
7005
7006 case FIXED_POINT_TYPE:
7007 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7008
7009 case OFFSET_TYPE:
7010 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7011
7012 case METHOD_TYPE:
7013 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7014 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7015 || (TYPE_ARG_TYPES (a->type)
7016 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7017 && TYPE_ARG_TYPES (b->type)
7018 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7019 && type_list_equal (TYPE_ARG_TYPES (a->type),
7020 TYPE_ARG_TYPES (b->type)))))
7021 break;
7022 return 0;
7023 case ARRAY_TYPE:
7024 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
7025 where the flag should be inherited from the element type
7026 and can change after ARRAY_TYPEs are created; on non-aggregates
7027 compare it and hash it, scalars will never have that flag set
7028 and we need to differentiate between arrays created by different
7029 front-ends or middle-end created arrays. */
7030 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
7031 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
7032 || (TYPE_TYPELESS_STORAGE (a->type)
7033 == TYPE_TYPELESS_STORAGE (b->type))));
7034
7035 case RECORD_TYPE:
7036 case UNION_TYPE:
7037 case QUAL_UNION_TYPE:
7038 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7039 || (TYPE_FIELDS (a->type)
7040 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7041 && TYPE_FIELDS (b->type)
7042 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7043 && type_list_equal (TYPE_FIELDS (a->type),
7044 TYPE_FIELDS (b->type))));
7045
7046 case FUNCTION_TYPE:
7047 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7048 || (TYPE_ARG_TYPES (a->type)
7049 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7050 && TYPE_ARG_TYPES (b->type)
7051 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7052 && type_list_equal (TYPE_ARG_TYPES (a->type),
7053 TYPE_ARG_TYPES (b->type))))
7054 break;
7055 return 0;
7056
7057 default:
7058 return 0;
7059 }
7060
7061 if (lang_hooks.types.type_hash_eq != NULL)
7062 return lang_hooks.types.type_hash_eq (a->type, b->type);
7063
7064 return 1;
7065 }
7066
7067 /* Given TYPE, and HASHCODE its hash code, return the canonical
7068 object for an identical type if one already exists.
7069 Otherwise, return TYPE, and record it as the canonical object.
7070
7071 To use this function, first create a type of the sort you want.
7072 Then compute its hash code from the fields of the type that
7073 make it different from other similar types.
7074 Then call this function and use the value. */
7075
7076 tree
7077 type_hash_canon (unsigned int hashcode, tree type)
7078 {
7079 type_hash in;
7080 type_hash **loc;
7081
7082 /* The hash table only contains main variants, so ensure that's what we're
7083 being passed. */
7084 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7085
7086 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7087 must call that routine before comparing TYPE_ALIGNs. */
7088 layout_type (type);
7089
7090 in.hash = hashcode;
7091 in.type = type;
7092
7093 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7094 if (*loc)
7095 {
7096 tree t1 = ((type_hash *) *loc)->type;
7097 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
7098 && t1 != type);
7099 if (TYPE_UID (type) + 1 == next_type_uid)
7100 --next_type_uid;
7101 /* Free also min/max values and the cache for integer
7102 types. This can't be done in free_node, as LTO frees
7103 those on its own. */
7104 if (TREE_CODE (type) == INTEGER_TYPE)
7105 {
7106 if (TYPE_MIN_VALUE (type)
7107 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
7108 {
7109 /* Zero is always in TYPE_CACHED_VALUES. */
7110 if (! TYPE_UNSIGNED (type))
7111 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
7112 ggc_free (TYPE_MIN_VALUE (type));
7113 }
7114 if (TYPE_MAX_VALUE (type)
7115 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
7116 {
7117 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
7118 ggc_free (TYPE_MAX_VALUE (type));
7119 }
7120 if (TYPE_CACHED_VALUES_P (type))
7121 ggc_free (TYPE_CACHED_VALUES (type));
7122 }
7123 free_node (type);
7124 return t1;
7125 }
7126 else
7127 {
7128 struct type_hash *h;
7129
7130 h = ggc_alloc<type_hash> ();
7131 h->hash = hashcode;
7132 h->type = type;
7133 *loc = h;
7134
7135 return type;
7136 }
7137 }
7138
7139 static void
7140 print_type_hash_statistics (void)
7141 {
7142 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7143 (long) type_hash_table->size (),
7144 (long) type_hash_table->elements (),
7145 type_hash_table->collisions ());
7146 }
7147
7148 /* Given two lists of types
7149 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7150 return 1 if the lists contain the same types in the same order.
7151 Also, the TREE_PURPOSEs must match. */
7152
7153 bool
7154 type_list_equal (const_tree l1, const_tree l2)
7155 {
7156 const_tree t1, t2;
7157
7158 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7159 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7160 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7161 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7162 && (TREE_TYPE (TREE_PURPOSE (t1))
7163 == TREE_TYPE (TREE_PURPOSE (t2))))))
7164 return false;
7165
7166 return t1 == t2;
7167 }
7168
7169 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7170 given by TYPE. If the argument list accepts variable arguments,
7171 then this function counts only the ordinary arguments. */
7172
7173 int
7174 type_num_arguments (const_tree fntype)
7175 {
7176 int i = 0;
7177
7178 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
7179 /* If the function does not take a variable number of arguments,
7180 the last element in the list will have type `void'. */
7181 if (VOID_TYPE_P (TREE_VALUE (t)))
7182 break;
7183 else
7184 ++i;
7185
7186 return i;
7187 }
7188
7189 /* Return the type of the function TYPE's argument ARGNO if known.
7190 For vararg function's where ARGNO refers to one of the variadic
7191 arguments return null. Otherwise, return a void_type_node for
7192 out-of-bounds ARGNO. */
7193
7194 tree
7195 type_argument_type (const_tree fntype, unsigned argno)
7196 {
7197 /* Treat zero the same as an out-of-bounds argument number. */
7198 if (!argno)
7199 return void_type_node;
7200
7201 function_args_iterator iter;
7202
7203 tree argtype;
7204 unsigned i = 1;
7205 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
7206 {
7207 /* A vararg function's argument list ends in a null. Otherwise,
7208 an ordinary function's argument list ends with void. Return
7209 null if ARGNO refers to a vararg argument, void_type_node if
7210 it's out of bounds, and the formal argument type otherwise. */
7211 if (!argtype)
7212 break;
7213
7214 if (i == argno || VOID_TYPE_P (argtype))
7215 return argtype;
7216
7217 ++i;
7218 }
7219
7220 return NULL_TREE;
7221 }
7222
7223 /* Nonzero if integer constants T1 and T2
7224 represent the same constant value. */
7225
7226 int
7227 tree_int_cst_equal (const_tree t1, const_tree t2)
7228 {
7229 if (t1 == t2)
7230 return 1;
7231
7232 if (t1 == 0 || t2 == 0)
7233 return 0;
7234
7235 STRIP_ANY_LOCATION_WRAPPER (t1);
7236 STRIP_ANY_LOCATION_WRAPPER (t2);
7237
7238 if (TREE_CODE (t1) == INTEGER_CST
7239 && TREE_CODE (t2) == INTEGER_CST
7240 && wi::to_widest (t1) == wi::to_widest (t2))
7241 return 1;
7242
7243 return 0;
7244 }
7245
7246 /* Return true if T is an INTEGER_CST whose numerical value (extended
7247 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7248
7249 bool
7250 tree_fits_shwi_p (const_tree t)
7251 {
7252 return (t != NULL_TREE
7253 && TREE_CODE (t) == INTEGER_CST
7254 && wi::fits_shwi_p (wi::to_widest (t)));
7255 }
7256
7257 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7258 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
7259
7260 bool
7261 tree_fits_poly_int64_p (const_tree t)
7262 {
7263 if (t == NULL_TREE)
7264 return false;
7265 if (POLY_INT_CST_P (t))
7266 {
7267 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7268 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
7269 return false;
7270 return true;
7271 }
7272 return (TREE_CODE (t) == INTEGER_CST
7273 && wi::fits_shwi_p (wi::to_widest (t)));
7274 }
7275
7276 /* Return true if T is an INTEGER_CST whose numerical value (extended
7277 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7278
7279 bool
7280 tree_fits_uhwi_p (const_tree t)
7281 {
7282 return (t != NULL_TREE
7283 && TREE_CODE (t) == INTEGER_CST
7284 && wi::fits_uhwi_p (wi::to_widest (t)));
7285 }
7286
7287 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7288 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
7289
7290 bool
7291 tree_fits_poly_uint64_p (const_tree t)
7292 {
7293 if (t == NULL_TREE)
7294 return false;
7295 if (POLY_INT_CST_P (t))
7296 {
7297 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7298 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
7299 return false;
7300 return true;
7301 }
7302 return (TREE_CODE (t) == INTEGER_CST
7303 && wi::fits_uhwi_p (wi::to_widest (t)));
7304 }
7305
7306 /* T is an INTEGER_CST whose numerical value (extended according to
7307 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7308 HOST_WIDE_INT. */
7309
7310 HOST_WIDE_INT
7311 tree_to_shwi (const_tree t)
7312 {
7313 gcc_assert (tree_fits_shwi_p (t));
7314 return TREE_INT_CST_LOW (t);
7315 }
7316
7317 /* T is an INTEGER_CST whose numerical value (extended according to
7318 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7319 HOST_WIDE_INT. */
7320
7321 unsigned HOST_WIDE_INT
7322 tree_to_uhwi (const_tree t)
7323 {
7324 gcc_assert (tree_fits_uhwi_p (t));
7325 return TREE_INT_CST_LOW (t);
7326 }
7327
7328 /* Return the most significant (sign) bit of T. */
7329
7330 int
7331 tree_int_cst_sign_bit (const_tree t)
7332 {
7333 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7334
7335 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
7336 }
7337
7338 /* Return an indication of the sign of the integer constant T.
7339 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7340 Note that -1 will never be returned if T's type is unsigned. */
7341
7342 int
7343 tree_int_cst_sgn (const_tree t)
7344 {
7345 if (wi::to_wide (t) == 0)
7346 return 0;
7347 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7348 return 1;
7349 else if (wi::neg_p (wi::to_wide (t)))
7350 return -1;
7351 else
7352 return 1;
7353 }
7354
7355 /* Return the minimum number of bits needed to represent VALUE in a
7356 signed or unsigned type, UNSIGNEDP says which. */
7357
7358 unsigned int
7359 tree_int_cst_min_precision (tree value, signop sgn)
7360 {
7361 /* If the value is negative, compute its negative minus 1. The latter
7362 adjustment is because the absolute value of the largest negative value
7363 is one larger than the largest positive value. This is equivalent to
7364 a bit-wise negation, so use that operation instead. */
7365
7366 if (tree_int_cst_sgn (value) < 0)
7367 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7368
7369 /* Return the number of bits needed, taking into account the fact
7370 that we need one more bit for a signed than unsigned type.
7371 If value is 0 or -1, the minimum precision is 1 no matter
7372 whether unsignedp is true or false. */
7373
7374 if (integer_zerop (value))
7375 return 1;
7376 else
7377 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7378 }
7379
7380 /* Return truthvalue of whether T1 is the same tree structure as T2.
7381 Return 1 if they are the same.
7382 Return 0 if they are understandably different.
7383 Return -1 if either contains tree structure not understood by
7384 this function. */
7385
7386 int
7387 simple_cst_equal (const_tree t1, const_tree t2)
7388 {
7389 enum tree_code code1, code2;
7390 int cmp;
7391 int i;
7392
7393 if (t1 == t2)
7394 return 1;
7395 if (t1 == 0 || t2 == 0)
7396 return 0;
7397
7398 /* For location wrappers to be the same, they must be at the same
7399 source location (and wrap the same thing). */
7400 if (location_wrapper_p (t1) && location_wrapper_p (t2))
7401 {
7402 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
7403 return 0;
7404 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7405 }
7406
7407 code1 = TREE_CODE (t1);
7408 code2 = TREE_CODE (t2);
7409
7410 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7411 {
7412 if (CONVERT_EXPR_CODE_P (code2)
7413 || code2 == NON_LVALUE_EXPR)
7414 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7415 else
7416 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7417 }
7418
7419 else if (CONVERT_EXPR_CODE_P (code2)
7420 || code2 == NON_LVALUE_EXPR)
7421 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7422
7423 if (code1 != code2)
7424 return 0;
7425
7426 switch (code1)
7427 {
7428 case INTEGER_CST:
7429 return wi::to_widest (t1) == wi::to_widest (t2);
7430
7431 case REAL_CST:
7432 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7433
7434 case FIXED_CST:
7435 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7436
7437 case STRING_CST:
7438 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7439 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7440 TREE_STRING_LENGTH (t1)));
7441
7442 case CONSTRUCTOR:
7443 {
7444 unsigned HOST_WIDE_INT idx;
7445 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7446 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7447
7448 if (vec_safe_length (v1) != vec_safe_length (v2))
7449 return false;
7450
7451 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7452 /* ??? Should we handle also fields here? */
7453 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7454 return false;
7455 return true;
7456 }
7457
7458 case SAVE_EXPR:
7459 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7460
7461 case CALL_EXPR:
7462 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7463 if (cmp <= 0)
7464 return cmp;
7465 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7466 return 0;
7467 {
7468 const_tree arg1, arg2;
7469 const_call_expr_arg_iterator iter1, iter2;
7470 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7471 arg2 = first_const_call_expr_arg (t2, &iter2);
7472 arg1 && arg2;
7473 arg1 = next_const_call_expr_arg (&iter1),
7474 arg2 = next_const_call_expr_arg (&iter2))
7475 {
7476 cmp = simple_cst_equal (arg1, arg2);
7477 if (cmp <= 0)
7478 return cmp;
7479 }
7480 return arg1 == arg2;
7481 }
7482
7483 case TARGET_EXPR:
7484 /* Special case: if either target is an unallocated VAR_DECL,
7485 it means that it's going to be unified with whatever the
7486 TARGET_EXPR is really supposed to initialize, so treat it
7487 as being equivalent to anything. */
7488 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7489 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7490 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7491 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7492 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7493 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7494 cmp = 1;
7495 else
7496 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7497
7498 if (cmp <= 0)
7499 return cmp;
7500
7501 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7502
7503 case WITH_CLEANUP_EXPR:
7504 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7505 if (cmp <= 0)
7506 return cmp;
7507
7508 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7509
7510 case COMPONENT_REF:
7511 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7512 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7513
7514 return 0;
7515
7516 case VAR_DECL:
7517 case PARM_DECL:
7518 case CONST_DECL:
7519 case FUNCTION_DECL:
7520 return 0;
7521
7522 default:
7523 if (POLY_INT_CST_P (t1))
7524 /* A false return means maybe_ne rather than known_ne. */
7525 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
7526 TYPE_SIGN (TREE_TYPE (t1))),
7527 poly_widest_int::from (poly_int_cst_value (t2),
7528 TYPE_SIGN (TREE_TYPE (t2))));
7529 break;
7530 }
7531
7532 /* This general rule works for most tree codes. All exceptions should be
7533 handled above. If this is a language-specific tree code, we can't
7534 trust what might be in the operand, so say we don't know
7535 the situation. */
7536 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7537 return -1;
7538
7539 switch (TREE_CODE_CLASS (code1))
7540 {
7541 case tcc_unary:
7542 case tcc_binary:
7543 case tcc_comparison:
7544 case tcc_expression:
7545 case tcc_reference:
7546 case tcc_statement:
7547 cmp = 1;
7548 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7549 {
7550 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7551 if (cmp <= 0)
7552 return cmp;
7553 }
7554
7555 return cmp;
7556
7557 default:
7558 return -1;
7559 }
7560 }
7561
7562 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7563 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7564 than U, respectively. */
7565
7566 int
7567 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7568 {
7569 if (tree_int_cst_sgn (t) < 0)
7570 return -1;
7571 else if (!tree_fits_uhwi_p (t))
7572 return 1;
7573 else if (TREE_INT_CST_LOW (t) == u)
7574 return 0;
7575 else if (TREE_INT_CST_LOW (t) < u)
7576 return -1;
7577 else
7578 return 1;
7579 }
7580
7581 /* Return true if SIZE represents a constant size that is in bounds of
7582 what the middle-end and the backend accepts (covering not more than
7583 half of the address-space).
7584 When PERR is non-null, set *PERR on failure to the description of
7585 why SIZE is not valid. */
7586
7587 bool
7588 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
7589 {
7590 if (POLY_INT_CST_P (size))
7591 {
7592 if (TREE_OVERFLOW (size))
7593 return false;
7594 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7595 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
7596 return false;
7597 return true;
7598 }
7599
7600 cst_size_error error;
7601 if (!perr)
7602 perr = &error;
7603
7604 if (TREE_CODE (size) != INTEGER_CST)
7605 {
7606 *perr = cst_size_not_constant;
7607 return false;
7608 }
7609
7610 if (TREE_OVERFLOW_P (size))
7611 {
7612 *perr = cst_size_overflow;
7613 return false;
7614 }
7615
7616 if (tree_int_cst_sgn (size) < 0)
7617 {
7618 *perr = cst_size_negative;
7619 return false;
7620 }
7621 if (!tree_fits_uhwi_p (size)
7622 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
7623 < wi::to_widest (size) * 2))
7624 {
7625 *perr = cst_size_too_big;
7626 return false;
7627 }
7628
7629 return true;
7630 }
7631
7632 /* Return the precision of the type, or for a complex or vector type the
7633 precision of the type of its elements. */
7634
7635 unsigned int
7636 element_precision (const_tree type)
7637 {
7638 if (!TYPE_P (type))
7639 type = TREE_TYPE (type);
7640 enum tree_code code = TREE_CODE (type);
7641 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7642 type = TREE_TYPE (type);
7643
7644 return TYPE_PRECISION (type);
7645 }
7646
7647 /* Return true if CODE represents an associative tree code. Otherwise
7648 return false. */
7649 bool
7650 associative_tree_code (enum tree_code code)
7651 {
7652 switch (code)
7653 {
7654 case BIT_IOR_EXPR:
7655 case BIT_AND_EXPR:
7656 case BIT_XOR_EXPR:
7657 case PLUS_EXPR:
7658 case MULT_EXPR:
7659 case MIN_EXPR:
7660 case MAX_EXPR:
7661 return true;
7662
7663 default:
7664 break;
7665 }
7666 return false;
7667 }
7668
7669 /* Return true if CODE represents a commutative tree code. Otherwise
7670 return false. */
7671 bool
7672 commutative_tree_code (enum tree_code code)
7673 {
7674 switch (code)
7675 {
7676 case PLUS_EXPR:
7677 case MULT_EXPR:
7678 case MULT_HIGHPART_EXPR:
7679 case MIN_EXPR:
7680 case MAX_EXPR:
7681 case BIT_IOR_EXPR:
7682 case BIT_XOR_EXPR:
7683 case BIT_AND_EXPR:
7684 case NE_EXPR:
7685 case EQ_EXPR:
7686 case UNORDERED_EXPR:
7687 case ORDERED_EXPR:
7688 case UNEQ_EXPR:
7689 case LTGT_EXPR:
7690 case TRUTH_AND_EXPR:
7691 case TRUTH_XOR_EXPR:
7692 case TRUTH_OR_EXPR:
7693 case WIDEN_MULT_EXPR:
7694 case VEC_WIDEN_MULT_HI_EXPR:
7695 case VEC_WIDEN_MULT_LO_EXPR:
7696 case VEC_WIDEN_MULT_EVEN_EXPR:
7697 case VEC_WIDEN_MULT_ODD_EXPR:
7698 return true;
7699
7700 default:
7701 break;
7702 }
7703 return false;
7704 }
7705
7706 /* Return true if CODE represents a ternary tree code for which the
7707 first two operands are commutative. Otherwise return false. */
7708 bool
7709 commutative_ternary_tree_code (enum tree_code code)
7710 {
7711 switch (code)
7712 {
7713 case WIDEN_MULT_PLUS_EXPR:
7714 case WIDEN_MULT_MINUS_EXPR:
7715 case DOT_PROD_EXPR:
7716 return true;
7717
7718 default:
7719 break;
7720 }
7721 return false;
7722 }
7723
7724 /* Returns true if CODE can overflow. */
7725
7726 bool
7727 operation_can_overflow (enum tree_code code)
7728 {
7729 switch (code)
7730 {
7731 case PLUS_EXPR:
7732 case MINUS_EXPR:
7733 case MULT_EXPR:
7734 case LSHIFT_EXPR:
7735 /* Can overflow in various ways. */
7736 return true;
7737 case TRUNC_DIV_EXPR:
7738 case EXACT_DIV_EXPR:
7739 case FLOOR_DIV_EXPR:
7740 case CEIL_DIV_EXPR:
7741 /* For INT_MIN / -1. */
7742 return true;
7743 case NEGATE_EXPR:
7744 case ABS_EXPR:
7745 /* For -INT_MIN. */
7746 return true;
7747 default:
7748 /* These operators cannot overflow. */
7749 return false;
7750 }
7751 }
7752
7753 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7754 ftrapv doesn't generate trapping insns for CODE. */
7755
7756 bool
7757 operation_no_trapping_overflow (tree type, enum tree_code code)
7758 {
7759 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7760
7761 /* We don't generate instructions that trap on overflow for complex or vector
7762 types. */
7763 if (!INTEGRAL_TYPE_P (type))
7764 return true;
7765
7766 if (!TYPE_OVERFLOW_TRAPS (type))
7767 return true;
7768
7769 switch (code)
7770 {
7771 case PLUS_EXPR:
7772 case MINUS_EXPR:
7773 case MULT_EXPR:
7774 case NEGATE_EXPR:
7775 case ABS_EXPR:
7776 /* These operators can overflow, and -ftrapv generates trapping code for
7777 these. */
7778 return false;
7779 case TRUNC_DIV_EXPR:
7780 case EXACT_DIV_EXPR:
7781 case FLOOR_DIV_EXPR:
7782 case CEIL_DIV_EXPR:
7783 case LSHIFT_EXPR:
7784 /* These operators can overflow, but -ftrapv does not generate trapping
7785 code for these. */
7786 return true;
7787 default:
7788 /* These operators cannot overflow. */
7789 return true;
7790 }
7791 }
7792
7793 namespace inchash
7794 {
7795
7796 /* Generate a hash value for an expression. This can be used iteratively
7797 by passing a previous result as the HSTATE argument.
7798
7799 This function is intended to produce the same hash for expressions which
7800 would compare equal using operand_equal_p. */
7801 void
7802 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7803 {
7804 int i;
7805 enum tree_code code;
7806 enum tree_code_class tclass;
7807
7808 if (t == NULL_TREE || t == error_mark_node)
7809 {
7810 hstate.merge_hash (0);
7811 return;
7812 }
7813
7814 STRIP_ANY_LOCATION_WRAPPER (t);
7815
7816 if (!(flags & OEP_ADDRESS_OF))
7817 STRIP_NOPS (t);
7818
7819 code = TREE_CODE (t);
7820
7821 switch (code)
7822 {
7823 /* Alas, constants aren't shared, so we can't rely on pointer
7824 identity. */
7825 case VOID_CST:
7826 hstate.merge_hash (0);
7827 return;
7828 case INTEGER_CST:
7829 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7830 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7831 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
7832 return;
7833 case REAL_CST:
7834 {
7835 unsigned int val2;
7836 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7837 val2 = rvc_zero;
7838 else
7839 val2 = real_hash (TREE_REAL_CST_PTR (t));
7840 hstate.merge_hash (val2);
7841 return;
7842 }
7843 case FIXED_CST:
7844 {
7845 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7846 hstate.merge_hash (val2);
7847 return;
7848 }
7849 case STRING_CST:
7850 hstate.add ((const void *) TREE_STRING_POINTER (t),
7851 TREE_STRING_LENGTH (t));
7852 return;
7853 case COMPLEX_CST:
7854 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7855 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7856 return;
7857 case VECTOR_CST:
7858 {
7859 hstate.add_int (VECTOR_CST_NPATTERNS (t));
7860 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
7861 unsigned int count = vector_cst_encoded_nelts (t);
7862 for (unsigned int i = 0; i < count; ++i)
7863 inchash::add_expr (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
7864 return;
7865 }
7866 case SSA_NAME:
7867 /* We can just compare by pointer. */
7868 hstate.add_hwi (SSA_NAME_VERSION (t));
7869 return;
7870 case PLACEHOLDER_EXPR:
7871 /* The node itself doesn't matter. */
7872 return;
7873 case BLOCK:
7874 case OMP_CLAUSE:
7875 /* Ignore. */
7876 return;
7877 case TREE_LIST:
7878 /* A list of expressions, for a CALL_EXPR or as the elements of a
7879 VECTOR_CST. */
7880 for (; t; t = TREE_CHAIN (t))
7881 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7882 return;
7883 case CONSTRUCTOR:
7884 {
7885 unsigned HOST_WIDE_INT idx;
7886 tree field, value;
7887 flags &= ~OEP_ADDRESS_OF;
7888 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7889 {
7890 inchash::add_expr (field, hstate, flags);
7891 inchash::add_expr (value, hstate, flags);
7892 }
7893 return;
7894 }
7895 case STATEMENT_LIST:
7896 {
7897 tree_stmt_iterator i;
7898 for (i = tsi_start (CONST_CAST_TREE (t));
7899 !tsi_end_p (i); tsi_next (&i))
7900 inchash::add_expr (tsi_stmt (i), hstate, flags);
7901 return;
7902 }
7903 case TREE_VEC:
7904 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
7905 inchash::add_expr (TREE_VEC_ELT (t, i), hstate, flags);
7906 return;
7907 case IDENTIFIER_NODE:
7908 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
7909 return;
7910 case FUNCTION_DECL:
7911 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7912 Otherwise nodes that compare equal according to operand_equal_p might
7913 get different hash codes. However, don't do this for machine specific
7914 or front end builtins, since the function code is overloaded in those
7915 cases. */
7916 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7917 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7918 {
7919 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7920 code = TREE_CODE (t);
7921 }
7922 /* FALL THROUGH */
7923 default:
7924 if (POLY_INT_CST_P (t))
7925 {
7926 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7927 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
7928 return;
7929 }
7930 tclass = TREE_CODE_CLASS (code);
7931
7932 if (tclass == tcc_declaration)
7933 {
7934 /* DECL's have a unique ID */
7935 hstate.add_hwi (DECL_UID (t));
7936 }
7937 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7938 {
7939 /* For comparisons that can be swapped, use the lower
7940 tree code. */
7941 enum tree_code ccode = swap_tree_comparison (code);
7942 if (code < ccode)
7943 ccode = code;
7944 hstate.add_object (ccode);
7945 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7946 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7947 }
7948 else if (CONVERT_EXPR_CODE_P (code))
7949 {
7950 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7951 operand_equal_p. */
7952 enum tree_code ccode = NOP_EXPR;
7953 hstate.add_object (ccode);
7954
7955 /* Don't hash the type, that can lead to having nodes which
7956 compare equal according to operand_equal_p, but which
7957 have different hash codes. Make sure to include signedness
7958 in the hash computation. */
7959 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7960 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7961 }
7962 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7963 else if (code == MEM_REF
7964 && (flags & OEP_ADDRESS_OF) != 0
7965 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7966 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7967 && integer_zerop (TREE_OPERAND (t, 1)))
7968 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7969 hstate, flags);
7970 /* Don't ICE on FE specific trees, or their arguments etc.
7971 during operand_equal_p hash verification. */
7972 else if (!IS_EXPR_CODE_CLASS (tclass))
7973 gcc_assert (flags & OEP_HASH_CHECK);
7974 else
7975 {
7976 unsigned int sflags = flags;
7977
7978 hstate.add_object (code);
7979
7980 switch (code)
7981 {
7982 case ADDR_EXPR:
7983 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7984 flags |= OEP_ADDRESS_OF;
7985 sflags = flags;
7986 break;
7987
7988 case INDIRECT_REF:
7989 case MEM_REF:
7990 case TARGET_MEM_REF:
7991 flags &= ~OEP_ADDRESS_OF;
7992 sflags = flags;
7993 break;
7994
7995 case ARRAY_REF:
7996 case ARRAY_RANGE_REF:
7997 case COMPONENT_REF:
7998 case BIT_FIELD_REF:
7999 sflags &= ~OEP_ADDRESS_OF;
8000 break;
8001
8002 case COND_EXPR:
8003 flags &= ~OEP_ADDRESS_OF;
8004 break;
8005
8006 case WIDEN_MULT_PLUS_EXPR:
8007 case WIDEN_MULT_MINUS_EXPR:
8008 {
8009 /* The multiplication operands are commutative. */
8010 inchash::hash one, two;
8011 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
8012 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
8013 hstate.add_commutative (one, two);
8014 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
8015 return;
8016 }
8017
8018 case CALL_EXPR:
8019 if (CALL_EXPR_FN (t) == NULL_TREE)
8020 hstate.add_int (CALL_EXPR_IFN (t));
8021 break;
8022
8023 case TARGET_EXPR:
8024 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
8025 Usually different TARGET_EXPRs just should use
8026 different temporaries in their slots. */
8027 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
8028 return;
8029
8030 default:
8031 break;
8032 }
8033
8034 /* Don't hash the type, that can lead to having nodes which
8035 compare equal according to operand_equal_p, but which
8036 have different hash codes. */
8037 if (code == NON_LVALUE_EXPR)
8038 {
8039 /* Make sure to include signness in the hash computation. */
8040 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
8041 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
8042 }
8043
8044 else if (commutative_tree_code (code))
8045 {
8046 /* It's a commutative expression. We want to hash it the same
8047 however it appears. We do this by first hashing both operands
8048 and then rehashing based on the order of their independent
8049 hashes. */
8050 inchash::hash one, two;
8051 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
8052 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
8053 hstate.add_commutative (one, two);
8054 }
8055 else
8056 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
8057 inchash::add_expr (TREE_OPERAND (t, i), hstate,
8058 i == 0 ? flags : sflags);
8059 }
8060 return;
8061 }
8062 }
8063
8064 }
8065
8066 /* Constructors for pointer, array and function types.
8067 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
8068 constructed by language-dependent code, not here.) */
8069
8070 /* Construct, lay out and return the type of pointers to TO_TYPE with
8071 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
8072 reference all of memory. If such a type has already been
8073 constructed, reuse it. */
8074
8075 tree
8076 build_pointer_type_for_mode (tree to_type, machine_mode mode,
8077 bool can_alias_all)
8078 {
8079 tree t;
8080 bool could_alias = can_alias_all;
8081
8082 if (to_type == error_mark_node)
8083 return error_mark_node;
8084
8085 /* If the pointed-to type has the may_alias attribute set, force
8086 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8087 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8088 can_alias_all = true;
8089
8090 /* In some cases, languages will have things that aren't a POINTER_TYPE
8091 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
8092 In that case, return that type without regard to the rest of our
8093 operands.
8094
8095 ??? This is a kludge, but consistent with the way this function has
8096 always operated and there doesn't seem to be a good way to avoid this
8097 at the moment. */
8098 if (TYPE_POINTER_TO (to_type) != 0
8099 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
8100 return TYPE_POINTER_TO (to_type);
8101
8102 /* First, if we already have a type for pointers to TO_TYPE and it's
8103 the proper mode, use it. */
8104 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
8105 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8106 return t;
8107
8108 t = make_node (POINTER_TYPE);
8109
8110 TREE_TYPE (t) = to_type;
8111 SET_TYPE_MODE (t, mode);
8112 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8113 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
8114 TYPE_POINTER_TO (to_type) = t;
8115
8116 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8117 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8118 SET_TYPE_STRUCTURAL_EQUALITY (t);
8119 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8120 TYPE_CANONICAL (t)
8121 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
8122 mode, false);
8123
8124 /* Lay out the type. This function has many callers that are concerned
8125 with expression-construction, and this simplifies them all. */
8126 layout_type (t);
8127
8128 return t;
8129 }
8130
8131 /* By default build pointers in ptr_mode. */
8132
8133 tree
8134 build_pointer_type (tree to_type)
8135 {
8136 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8137 : TYPE_ADDR_SPACE (to_type);
8138 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8139 return build_pointer_type_for_mode (to_type, pointer_mode, false);
8140 }
8141
8142 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
8143
8144 tree
8145 build_reference_type_for_mode (tree to_type, machine_mode mode,
8146 bool can_alias_all)
8147 {
8148 tree t;
8149 bool could_alias = can_alias_all;
8150
8151 if (to_type == error_mark_node)
8152 return error_mark_node;
8153
8154 /* If the pointed-to type has the may_alias attribute set, force
8155 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8156 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8157 can_alias_all = true;
8158
8159 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
8160 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
8161 In that case, return that type without regard to the rest of our
8162 operands.
8163
8164 ??? This is a kludge, but consistent with the way this function has
8165 always operated and there doesn't seem to be a good way to avoid this
8166 at the moment. */
8167 if (TYPE_REFERENCE_TO (to_type) != 0
8168 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
8169 return TYPE_REFERENCE_TO (to_type);
8170
8171 /* First, if we already have a type for pointers to TO_TYPE and it's
8172 the proper mode, use it. */
8173 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
8174 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8175 return t;
8176
8177 t = make_node (REFERENCE_TYPE);
8178
8179 TREE_TYPE (t) = to_type;
8180 SET_TYPE_MODE (t, mode);
8181 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8182 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
8183 TYPE_REFERENCE_TO (to_type) = t;
8184
8185 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8186 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8187 SET_TYPE_STRUCTURAL_EQUALITY (t);
8188 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8189 TYPE_CANONICAL (t)
8190 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
8191 mode, false);
8192
8193 layout_type (t);
8194
8195 return t;
8196 }
8197
8198
8199 /* Build the node for the type of references-to-TO_TYPE by default
8200 in ptr_mode. */
8201
8202 tree
8203 build_reference_type (tree to_type)
8204 {
8205 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8206 : TYPE_ADDR_SPACE (to_type);
8207 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8208 return build_reference_type_for_mode (to_type, pointer_mode, false);
8209 }
8210
8211 #define MAX_INT_CACHED_PREC \
8212 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8213 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8214
8215 /* Builds a signed or unsigned integer type of precision PRECISION.
8216 Used for C bitfields whose precision does not match that of
8217 built-in target types. */
8218 tree
8219 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8220 int unsignedp)
8221 {
8222 tree itype, ret;
8223
8224 if (unsignedp)
8225 unsignedp = MAX_INT_CACHED_PREC + 1;
8226
8227 if (precision <= MAX_INT_CACHED_PREC)
8228 {
8229 itype = nonstandard_integer_type_cache[precision + unsignedp];
8230 if (itype)
8231 return itype;
8232 }
8233
8234 itype = make_node (INTEGER_TYPE);
8235 TYPE_PRECISION (itype) = precision;
8236
8237 if (unsignedp)
8238 fixup_unsigned_type (itype);
8239 else
8240 fixup_signed_type (itype);
8241
8242 inchash::hash hstate;
8243 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8244 ret = type_hash_canon (hstate.end (), itype);
8245 if (precision <= MAX_INT_CACHED_PREC)
8246 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8247
8248 return ret;
8249 }
8250
8251 #define MAX_BOOL_CACHED_PREC \
8252 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8253 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8254
8255 /* Builds a boolean type of precision PRECISION.
8256 Used for boolean vectors to choose proper vector element size. */
8257 tree
8258 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8259 {
8260 tree type;
8261
8262 if (precision <= MAX_BOOL_CACHED_PREC)
8263 {
8264 type = nonstandard_boolean_type_cache[precision];
8265 if (type)
8266 return type;
8267 }
8268
8269 type = make_node (BOOLEAN_TYPE);
8270 TYPE_PRECISION (type) = precision;
8271 fixup_signed_type (type);
8272
8273 if (precision <= MAX_INT_CACHED_PREC)
8274 nonstandard_boolean_type_cache[precision] = type;
8275
8276 return type;
8277 }
8278
8279 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8280 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8281 is true, reuse such a type that has already been constructed. */
8282
8283 static tree
8284 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8285 {
8286 tree itype = make_node (INTEGER_TYPE);
8287
8288 TREE_TYPE (itype) = type;
8289
8290 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8291 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8292
8293 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8294 SET_TYPE_MODE (itype, TYPE_MODE (type));
8295 TYPE_SIZE (itype) = TYPE_SIZE (type);
8296 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8297 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8298 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8299 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
8300
8301 if (!shared)
8302 return itype;
8303
8304 if ((TYPE_MIN_VALUE (itype)
8305 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8306 || (TYPE_MAX_VALUE (itype)
8307 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8308 {
8309 /* Since we cannot reliably merge this type, we need to compare it using
8310 structural equality checks. */
8311 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8312 return itype;
8313 }
8314
8315 hashval_t hash = type_hash_canon_hash (itype);
8316 itype = type_hash_canon (hash, itype);
8317
8318 return itype;
8319 }
8320
8321 /* Wrapper around build_range_type_1 with SHARED set to true. */
8322
8323 tree
8324 build_range_type (tree type, tree lowval, tree highval)
8325 {
8326 return build_range_type_1 (type, lowval, highval, true);
8327 }
8328
8329 /* Wrapper around build_range_type_1 with SHARED set to false. */
8330
8331 tree
8332 build_nonshared_range_type (tree type, tree lowval, tree highval)
8333 {
8334 return build_range_type_1 (type, lowval, highval, false);
8335 }
8336
8337 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8338 MAXVAL should be the maximum value in the domain
8339 (one less than the length of the array).
8340
8341 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8342 We don't enforce this limit, that is up to caller (e.g. language front end).
8343 The limit exists because the result is a signed type and we don't handle
8344 sizes that use more than one HOST_WIDE_INT. */
8345
8346 tree
8347 build_index_type (tree maxval)
8348 {
8349 return build_range_type (sizetype, size_zero_node, maxval);
8350 }
8351
8352 /* Return true if the debug information for TYPE, a subtype, should be emitted
8353 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8354 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8355 debug info and doesn't reflect the source code. */
8356
8357 bool
8358 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8359 {
8360 tree base_type = TREE_TYPE (type), low, high;
8361
8362 /* Subrange types have a base type which is an integral type. */
8363 if (!INTEGRAL_TYPE_P (base_type))
8364 return false;
8365
8366 /* Get the real bounds of the subtype. */
8367 if (lang_hooks.types.get_subrange_bounds)
8368 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8369 else
8370 {
8371 low = TYPE_MIN_VALUE (type);
8372 high = TYPE_MAX_VALUE (type);
8373 }
8374
8375 /* If the type and its base type have the same representation and the same
8376 name, then the type is not a subrange but a copy of the base type. */
8377 if ((TREE_CODE (base_type) == INTEGER_TYPE
8378 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8379 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8380 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8381 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8382 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8383 return false;
8384
8385 if (lowval)
8386 *lowval = low;
8387 if (highval)
8388 *highval = high;
8389 return true;
8390 }
8391
8392 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8393 and number of elements specified by the range of values of INDEX_TYPE.
8394 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
8395 If SHARED is true, reuse such a type that has already been constructed. */
8396
8397 static tree
8398 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
8399 bool shared)
8400 {
8401 tree t;
8402
8403 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8404 {
8405 error ("arrays of functions are not meaningful");
8406 elt_type = integer_type_node;
8407 }
8408
8409 t = make_node (ARRAY_TYPE);
8410 TREE_TYPE (t) = elt_type;
8411 TYPE_DOMAIN (t) = index_type;
8412 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8413 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
8414 layout_type (t);
8415
8416 /* If the element type is incomplete at this point we get marked for
8417 structural equality. Do not record these types in the canonical
8418 type hashtable. */
8419 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8420 return t;
8421
8422 if (shared)
8423 {
8424 hashval_t hash = type_hash_canon_hash (t);
8425 t = type_hash_canon (hash, t);
8426 }
8427
8428 if (TYPE_CANONICAL (t) == t)
8429 {
8430 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8431 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8432 || in_lto_p)
8433 SET_TYPE_STRUCTURAL_EQUALITY (t);
8434 else if (TYPE_CANONICAL (elt_type) != elt_type
8435 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8436 TYPE_CANONICAL (t)
8437 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8438 index_type
8439 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8440 typeless_storage, shared);
8441 }
8442
8443 return t;
8444 }
8445
8446 /* Wrapper around build_array_type_1 with SHARED set to true. */
8447
8448 tree
8449 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
8450 {
8451 return build_array_type_1 (elt_type, index_type, typeless_storage, true);
8452 }
8453
8454 /* Wrapper around build_array_type_1 with SHARED set to false. */
8455
8456 tree
8457 build_nonshared_array_type (tree elt_type, tree index_type)
8458 {
8459 return build_array_type_1 (elt_type, index_type, false, false);
8460 }
8461
8462 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8463 sizetype. */
8464
8465 tree
8466 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
8467 {
8468 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8469 }
8470
8471 /* Recursively examines the array elements of TYPE, until a non-array
8472 element type is found. */
8473
8474 tree
8475 strip_array_types (tree type)
8476 {
8477 while (TREE_CODE (type) == ARRAY_TYPE)
8478 type = TREE_TYPE (type);
8479
8480 return type;
8481 }
8482
8483 /* Computes the canonical argument types from the argument type list
8484 ARGTYPES.
8485
8486 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8487 on entry to this function, or if any of the ARGTYPES are
8488 structural.
8489
8490 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8491 true on entry to this function, or if any of the ARGTYPES are
8492 non-canonical.
8493
8494 Returns a canonical argument list, which may be ARGTYPES when the
8495 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8496 true) or would not differ from ARGTYPES. */
8497
8498 static tree
8499 maybe_canonicalize_argtypes (tree argtypes,
8500 bool *any_structural_p,
8501 bool *any_noncanonical_p)
8502 {
8503 tree arg;
8504 bool any_noncanonical_argtypes_p = false;
8505
8506 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8507 {
8508 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8509 /* Fail gracefully by stating that the type is structural. */
8510 *any_structural_p = true;
8511 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8512 *any_structural_p = true;
8513 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8514 || TREE_PURPOSE (arg))
8515 /* If the argument has a default argument, we consider it
8516 non-canonical even though the type itself is canonical.
8517 That way, different variants of function and method types
8518 with default arguments will all point to the variant with
8519 no defaults as their canonical type. */
8520 any_noncanonical_argtypes_p = true;
8521 }
8522
8523 if (*any_structural_p)
8524 return argtypes;
8525
8526 if (any_noncanonical_argtypes_p)
8527 {
8528 /* Build the canonical list of argument types. */
8529 tree canon_argtypes = NULL_TREE;
8530 bool is_void = false;
8531
8532 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8533 {
8534 if (arg == void_list_node)
8535 is_void = true;
8536 else
8537 canon_argtypes = tree_cons (NULL_TREE,
8538 TYPE_CANONICAL (TREE_VALUE (arg)),
8539 canon_argtypes);
8540 }
8541
8542 canon_argtypes = nreverse (canon_argtypes);
8543 if (is_void)
8544 canon_argtypes = chainon (canon_argtypes, void_list_node);
8545
8546 /* There is a non-canonical type. */
8547 *any_noncanonical_p = true;
8548 return canon_argtypes;
8549 }
8550
8551 /* The canonical argument types are the same as ARGTYPES. */
8552 return argtypes;
8553 }
8554
8555 /* Construct, lay out and return
8556 the type of functions returning type VALUE_TYPE
8557 given arguments of types ARG_TYPES.
8558 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8559 are data type nodes for the arguments of the function.
8560 If such a type has already been constructed, reuse it. */
8561
8562 tree
8563 build_function_type (tree value_type, tree arg_types)
8564 {
8565 tree t;
8566 inchash::hash hstate;
8567 bool any_structural_p, any_noncanonical_p;
8568 tree canon_argtypes;
8569
8570 gcc_assert (arg_types != error_mark_node);
8571
8572 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8573 {
8574 error ("function return type cannot be function");
8575 value_type = integer_type_node;
8576 }
8577
8578 /* Make a node of the sort we want. */
8579 t = make_node (FUNCTION_TYPE);
8580 TREE_TYPE (t) = value_type;
8581 TYPE_ARG_TYPES (t) = arg_types;
8582
8583 /* If we already have such a type, use the old one. */
8584 hashval_t hash = type_hash_canon_hash (t);
8585 t = type_hash_canon (hash, t);
8586
8587 /* Set up the canonical type. */
8588 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8589 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8590 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8591 &any_structural_p,
8592 &any_noncanonical_p);
8593 if (any_structural_p)
8594 SET_TYPE_STRUCTURAL_EQUALITY (t);
8595 else if (any_noncanonical_p)
8596 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8597 canon_argtypes);
8598
8599 if (!COMPLETE_TYPE_P (t))
8600 layout_type (t);
8601 return t;
8602 }
8603
8604 /* Build a function type. The RETURN_TYPE is the type returned by the
8605 function. If VAARGS is set, no void_type_node is appended to the
8606 list. ARGP must be always be terminated be a NULL_TREE. */
8607
8608 static tree
8609 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8610 {
8611 tree t, args, last;
8612
8613 t = va_arg (argp, tree);
8614 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8615 args = tree_cons (NULL_TREE, t, args);
8616
8617 if (vaargs)
8618 {
8619 last = args;
8620 if (args != NULL_TREE)
8621 args = nreverse (args);
8622 gcc_assert (last != void_list_node);
8623 }
8624 else if (args == NULL_TREE)
8625 args = void_list_node;
8626 else
8627 {
8628 last = args;
8629 args = nreverse (args);
8630 TREE_CHAIN (last) = void_list_node;
8631 }
8632 args = build_function_type (return_type, args);
8633
8634 return args;
8635 }
8636
8637 /* Build a function type. The RETURN_TYPE is the type returned by the
8638 function. If additional arguments are provided, they are
8639 additional argument types. The list of argument types must always
8640 be terminated by NULL_TREE. */
8641
8642 tree
8643 build_function_type_list (tree return_type, ...)
8644 {
8645 tree args;
8646 va_list p;
8647
8648 va_start (p, return_type);
8649 args = build_function_type_list_1 (false, return_type, p);
8650 va_end (p);
8651 return args;
8652 }
8653
8654 /* Build a variable argument function type. The RETURN_TYPE is the
8655 type returned by the function. If additional arguments are provided,
8656 they are additional argument types. The list of argument types must
8657 always be terminated by NULL_TREE. */
8658
8659 tree
8660 build_varargs_function_type_list (tree return_type, ...)
8661 {
8662 tree args;
8663 va_list p;
8664
8665 va_start (p, return_type);
8666 args = build_function_type_list_1 (true, return_type, p);
8667 va_end (p);
8668
8669 return args;
8670 }
8671
8672 /* Build a function type. RETURN_TYPE is the type returned by the
8673 function; VAARGS indicates whether the function takes varargs. The
8674 function takes N named arguments, the types of which are provided in
8675 ARG_TYPES. */
8676
8677 static tree
8678 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8679 tree *arg_types)
8680 {
8681 int i;
8682 tree t = vaargs ? NULL_TREE : void_list_node;
8683
8684 for (i = n - 1; i >= 0; i--)
8685 t = tree_cons (NULL_TREE, arg_types[i], t);
8686
8687 return build_function_type (return_type, t);
8688 }
8689
8690 /* Build a function type. RETURN_TYPE is the type returned by the
8691 function. The function takes N named arguments, the types of which
8692 are provided in ARG_TYPES. */
8693
8694 tree
8695 build_function_type_array (tree return_type, int n, tree *arg_types)
8696 {
8697 return build_function_type_array_1 (false, return_type, n, arg_types);
8698 }
8699
8700 /* Build a variable argument function type. RETURN_TYPE is the type
8701 returned by the function. The function takes N named arguments, the
8702 types of which are provided in ARG_TYPES. */
8703
8704 tree
8705 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8706 {
8707 return build_function_type_array_1 (true, return_type, n, arg_types);
8708 }
8709
8710 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8711 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8712 for the method. An implicit additional parameter (of type
8713 pointer-to-BASETYPE) is added to the ARGTYPES. */
8714
8715 tree
8716 build_method_type_directly (tree basetype,
8717 tree rettype,
8718 tree argtypes)
8719 {
8720 tree t;
8721 tree ptype;
8722 bool any_structural_p, any_noncanonical_p;
8723 tree canon_argtypes;
8724
8725 /* Make a node of the sort we want. */
8726 t = make_node (METHOD_TYPE);
8727
8728 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8729 TREE_TYPE (t) = rettype;
8730 ptype = build_pointer_type (basetype);
8731
8732 /* The actual arglist for this function includes a "hidden" argument
8733 which is "this". Put it into the list of argument types. */
8734 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8735 TYPE_ARG_TYPES (t) = argtypes;
8736
8737 /* If we already have such a type, use the old one. */
8738 hashval_t hash = type_hash_canon_hash (t);
8739 t = type_hash_canon (hash, t);
8740
8741 /* Set up the canonical type. */
8742 any_structural_p
8743 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8744 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8745 any_noncanonical_p
8746 = (TYPE_CANONICAL (basetype) != basetype
8747 || TYPE_CANONICAL (rettype) != rettype);
8748 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8749 &any_structural_p,
8750 &any_noncanonical_p);
8751 if (any_structural_p)
8752 SET_TYPE_STRUCTURAL_EQUALITY (t);
8753 else if (any_noncanonical_p)
8754 TYPE_CANONICAL (t)
8755 = build_method_type_directly (TYPE_CANONICAL (basetype),
8756 TYPE_CANONICAL (rettype),
8757 canon_argtypes);
8758 if (!COMPLETE_TYPE_P (t))
8759 layout_type (t);
8760
8761 return t;
8762 }
8763
8764 /* Construct, lay out and return the type of methods belonging to class
8765 BASETYPE and whose arguments and values are described by TYPE.
8766 If that type exists already, reuse it.
8767 TYPE must be a FUNCTION_TYPE node. */
8768
8769 tree
8770 build_method_type (tree basetype, tree type)
8771 {
8772 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8773
8774 return build_method_type_directly (basetype,
8775 TREE_TYPE (type),
8776 TYPE_ARG_TYPES (type));
8777 }
8778
8779 /* Construct, lay out and return the type of offsets to a value
8780 of type TYPE, within an object of type BASETYPE.
8781 If a suitable offset type exists already, reuse it. */
8782
8783 tree
8784 build_offset_type (tree basetype, tree type)
8785 {
8786 tree t;
8787
8788 /* Make a node of the sort we want. */
8789 t = make_node (OFFSET_TYPE);
8790
8791 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8792 TREE_TYPE (t) = type;
8793
8794 /* If we already have such a type, use the old one. */
8795 hashval_t hash = type_hash_canon_hash (t);
8796 t = type_hash_canon (hash, t);
8797
8798 if (!COMPLETE_TYPE_P (t))
8799 layout_type (t);
8800
8801 if (TYPE_CANONICAL (t) == t)
8802 {
8803 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8804 || TYPE_STRUCTURAL_EQUALITY_P (type))
8805 SET_TYPE_STRUCTURAL_EQUALITY (t);
8806 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8807 || TYPE_CANONICAL (type) != type)
8808 TYPE_CANONICAL (t)
8809 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8810 TYPE_CANONICAL (type));
8811 }
8812
8813 return t;
8814 }
8815
8816 /* Create a complex type whose components are COMPONENT_TYPE.
8817
8818 If NAMED is true, the type is given a TYPE_NAME. We do not always
8819 do so because this creates a DECL node and thus make the DECL_UIDs
8820 dependent on the type canonicalization hashtable, which is GC-ed,
8821 so the DECL_UIDs would not be stable wrt garbage collection. */
8822
8823 tree
8824 build_complex_type (tree component_type, bool named)
8825 {
8826 gcc_assert (INTEGRAL_TYPE_P (component_type)
8827 || SCALAR_FLOAT_TYPE_P (component_type)
8828 || FIXED_POINT_TYPE_P (component_type));
8829
8830 /* Make a node of the sort we want. */
8831 tree probe = make_node (COMPLEX_TYPE);
8832
8833 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8834
8835 /* If we already have such a type, use the old one. */
8836 hashval_t hash = type_hash_canon_hash (probe);
8837 tree t = type_hash_canon (hash, probe);
8838
8839 if (t == probe)
8840 {
8841 /* We created a new type. The hash insertion will have laid
8842 out the type. We need to check the canonicalization and
8843 maybe set the name. */
8844 gcc_checking_assert (COMPLETE_TYPE_P (t)
8845 && !TYPE_NAME (t)
8846 && TYPE_CANONICAL (t) == t);
8847
8848 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8849 SET_TYPE_STRUCTURAL_EQUALITY (t);
8850 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8851 TYPE_CANONICAL (t)
8852 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8853
8854 /* We need to create a name, since complex is a fundamental type. */
8855 if (named)
8856 {
8857 const char *name = NULL;
8858
8859 if (TREE_TYPE (t) == char_type_node)
8860 name = "complex char";
8861 else if (TREE_TYPE (t) == signed_char_type_node)
8862 name = "complex signed char";
8863 else if (TREE_TYPE (t) == unsigned_char_type_node)
8864 name = "complex unsigned char";
8865 else if (TREE_TYPE (t) == short_integer_type_node)
8866 name = "complex short int";
8867 else if (TREE_TYPE (t) == short_unsigned_type_node)
8868 name = "complex short unsigned int";
8869 else if (TREE_TYPE (t) == integer_type_node)
8870 name = "complex int";
8871 else if (TREE_TYPE (t) == unsigned_type_node)
8872 name = "complex unsigned int";
8873 else if (TREE_TYPE (t) == long_integer_type_node)
8874 name = "complex long int";
8875 else if (TREE_TYPE (t) == long_unsigned_type_node)
8876 name = "complex long unsigned int";
8877 else if (TREE_TYPE (t) == long_long_integer_type_node)
8878 name = "complex long long int";
8879 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8880 name = "complex long long unsigned int";
8881
8882 if (name != NULL)
8883 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8884 get_identifier (name), t);
8885 }
8886 }
8887
8888 return build_qualified_type (t, TYPE_QUALS (component_type));
8889 }
8890
8891 /* If TYPE is a real or complex floating-point type and the target
8892 does not directly support arithmetic on TYPE then return the wider
8893 type to be used for arithmetic on TYPE. Otherwise, return
8894 NULL_TREE. */
8895
8896 tree
8897 excess_precision_type (tree type)
8898 {
8899 /* The target can give two different responses to the question of
8900 which excess precision mode it would like depending on whether we
8901 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8902
8903 enum excess_precision_type requested_type
8904 = (flag_excess_precision == EXCESS_PRECISION_FAST
8905 ? EXCESS_PRECISION_TYPE_FAST
8906 : EXCESS_PRECISION_TYPE_STANDARD);
8907
8908 enum flt_eval_method target_flt_eval_method
8909 = targetm.c.excess_precision (requested_type);
8910
8911 /* The target should not ask for unpredictable float evaluation (though
8912 it might advertise that implicitly the evaluation is unpredictable,
8913 but we don't care about that here, it will have been reported
8914 elsewhere). If it does ask for unpredictable evaluation, we have
8915 nothing to do here. */
8916 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8917
8918 /* Nothing to do. The target has asked for all types we know about
8919 to be computed with their native precision and range. */
8920 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8921 return NULL_TREE;
8922
8923 /* The target will promote this type in a target-dependent way, so excess
8924 precision ought to leave it alone. */
8925 if (targetm.promoted_type (type) != NULL_TREE)
8926 return NULL_TREE;
8927
8928 machine_mode float16_type_mode = (float16_type_node
8929 ? TYPE_MODE (float16_type_node)
8930 : VOIDmode);
8931 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8932 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8933
8934 switch (TREE_CODE (type))
8935 {
8936 case REAL_TYPE:
8937 {
8938 machine_mode type_mode = TYPE_MODE (type);
8939 switch (target_flt_eval_method)
8940 {
8941 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8942 if (type_mode == float16_type_mode)
8943 return float_type_node;
8944 break;
8945 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8946 if (type_mode == float16_type_mode
8947 || type_mode == float_type_mode)
8948 return double_type_node;
8949 break;
8950 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8951 if (type_mode == float16_type_mode
8952 || type_mode == float_type_mode
8953 || type_mode == double_type_mode)
8954 return long_double_type_node;
8955 break;
8956 default:
8957 gcc_unreachable ();
8958 }
8959 break;
8960 }
8961 case COMPLEX_TYPE:
8962 {
8963 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8964 return NULL_TREE;
8965 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8966 switch (target_flt_eval_method)
8967 {
8968 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8969 if (type_mode == float16_type_mode)
8970 return complex_float_type_node;
8971 break;
8972 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8973 if (type_mode == float16_type_mode
8974 || type_mode == float_type_mode)
8975 return complex_double_type_node;
8976 break;
8977 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8978 if (type_mode == float16_type_mode
8979 || type_mode == float_type_mode
8980 || type_mode == double_type_mode)
8981 return complex_long_double_type_node;
8982 break;
8983 default:
8984 gcc_unreachable ();
8985 }
8986 break;
8987 }
8988 default:
8989 break;
8990 }
8991
8992 return NULL_TREE;
8993 }
8994 \f
8995 /* Return OP, stripped of any conversions to wider types as much as is safe.
8996 Converting the value back to OP's type makes a value equivalent to OP.
8997
8998 If FOR_TYPE is nonzero, we return a value which, if converted to
8999 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
9000
9001 OP must have integer, real or enumeral type. Pointers are not allowed!
9002
9003 There are some cases where the obvious value we could return
9004 would regenerate to OP if converted to OP's type,
9005 but would not extend like OP to wider types.
9006 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
9007 For example, if OP is (unsigned short)(signed char)-1,
9008 we avoid returning (signed char)-1 if FOR_TYPE is int,
9009 even though extending that to an unsigned short would regenerate OP,
9010 since the result of extending (signed char)-1 to (int)
9011 is different from (int) OP. */
9012
9013 tree
9014 get_unwidened (tree op, tree for_type)
9015 {
9016 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
9017 tree type = TREE_TYPE (op);
9018 unsigned final_prec
9019 = TYPE_PRECISION (for_type != 0 ? for_type : type);
9020 int uns
9021 = (for_type != 0 && for_type != type
9022 && final_prec > TYPE_PRECISION (type)
9023 && TYPE_UNSIGNED (type));
9024 tree win = op;
9025
9026 while (CONVERT_EXPR_P (op))
9027 {
9028 int bitschange;
9029
9030 /* TYPE_PRECISION on vector types has different meaning
9031 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
9032 so avoid them here. */
9033 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
9034 break;
9035
9036 bitschange = TYPE_PRECISION (TREE_TYPE (op))
9037 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
9038
9039 /* Truncations are many-one so cannot be removed.
9040 Unless we are later going to truncate down even farther. */
9041 if (bitschange < 0
9042 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
9043 break;
9044
9045 /* See what's inside this conversion. If we decide to strip it,
9046 we will set WIN. */
9047 op = TREE_OPERAND (op, 0);
9048
9049 /* If we have not stripped any zero-extensions (uns is 0),
9050 we can strip any kind of extension.
9051 If we have previously stripped a zero-extension,
9052 only zero-extensions can safely be stripped.
9053 Any extension can be stripped if the bits it would produce
9054 are all going to be discarded later by truncating to FOR_TYPE. */
9055
9056 if (bitschange > 0)
9057 {
9058 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
9059 win = op;
9060 /* TYPE_UNSIGNED says whether this is a zero-extension.
9061 Let's avoid computing it if it does not affect WIN
9062 and if UNS will not be needed again. */
9063 if ((uns
9064 || CONVERT_EXPR_P (op))
9065 && TYPE_UNSIGNED (TREE_TYPE (op)))
9066 {
9067 uns = 1;
9068 win = op;
9069 }
9070 }
9071 }
9072
9073 /* If we finally reach a constant see if it fits in sth smaller and
9074 in that case convert it. */
9075 if (TREE_CODE (win) == INTEGER_CST)
9076 {
9077 tree wtype = TREE_TYPE (win);
9078 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
9079 if (for_type)
9080 prec = MAX (prec, final_prec);
9081 if (prec < TYPE_PRECISION (wtype))
9082 {
9083 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
9084 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
9085 win = fold_convert (t, win);
9086 }
9087 }
9088
9089 return win;
9090 }
9091 \f
9092 /* Return OP or a simpler expression for a narrower value
9093 which can be sign-extended or zero-extended to give back OP.
9094 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
9095 or 0 if the value should be sign-extended. */
9096
9097 tree
9098 get_narrower (tree op, int *unsignedp_ptr)
9099 {
9100 int uns = 0;
9101 int first = 1;
9102 tree win = op;
9103 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
9104
9105 while (TREE_CODE (op) == NOP_EXPR)
9106 {
9107 int bitschange
9108 = (TYPE_PRECISION (TREE_TYPE (op))
9109 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
9110
9111 /* Truncations are many-one so cannot be removed. */
9112 if (bitschange < 0)
9113 break;
9114
9115 /* See what's inside this conversion. If we decide to strip it,
9116 we will set WIN. */
9117
9118 if (bitschange > 0)
9119 {
9120 op = TREE_OPERAND (op, 0);
9121 /* An extension: the outermost one can be stripped,
9122 but remember whether it is zero or sign extension. */
9123 if (first)
9124 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9125 /* Otherwise, if a sign extension has been stripped,
9126 only sign extensions can now be stripped;
9127 if a zero extension has been stripped, only zero-extensions. */
9128 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
9129 break;
9130 first = 0;
9131 }
9132 else /* bitschange == 0 */
9133 {
9134 /* A change in nominal type can always be stripped, but we must
9135 preserve the unsignedness. */
9136 if (first)
9137 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9138 first = 0;
9139 op = TREE_OPERAND (op, 0);
9140 /* Keep trying to narrow, but don't assign op to win if it
9141 would turn an integral type into something else. */
9142 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
9143 continue;
9144 }
9145
9146 win = op;
9147 }
9148
9149 if (TREE_CODE (op) == COMPONENT_REF
9150 /* Since type_for_size always gives an integer type. */
9151 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
9152 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
9153 /* Ensure field is laid out already. */
9154 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
9155 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
9156 {
9157 unsigned HOST_WIDE_INT innerprec
9158 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
9159 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
9160 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
9161 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
9162
9163 /* We can get this structure field in a narrower type that fits it,
9164 but the resulting extension to its nominal type (a fullword type)
9165 must satisfy the same conditions as for other extensions.
9166
9167 Do this only for fields that are aligned (not bit-fields),
9168 because when bit-field insns will be used there is no
9169 advantage in doing this. */
9170
9171 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
9172 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
9173 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
9174 && type != 0)
9175 {
9176 if (first)
9177 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
9178 win = fold_convert (type, op);
9179 }
9180 }
9181
9182 *unsignedp_ptr = uns;
9183 return win;
9184 }
9185 \f
9186 /* Return true if integer constant C has a value that is permissible
9187 for TYPE, an integral type. */
9188
9189 bool
9190 int_fits_type_p (const_tree c, const_tree type)
9191 {
9192 tree type_low_bound, type_high_bound;
9193 bool ok_for_low_bound, ok_for_high_bound;
9194 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
9195
9196 /* Non-standard boolean types can have arbitrary precision but various
9197 transformations assume that they can only take values 0 and +/-1. */
9198 if (TREE_CODE (type) == BOOLEAN_TYPE)
9199 return wi::fits_to_boolean_p (wi::to_wide (c), type);
9200
9201 retry:
9202 type_low_bound = TYPE_MIN_VALUE (type);
9203 type_high_bound = TYPE_MAX_VALUE (type);
9204
9205 /* If at least one bound of the type is a constant integer, we can check
9206 ourselves and maybe make a decision. If no such decision is possible, but
9207 this type is a subtype, try checking against that. Otherwise, use
9208 fits_to_tree_p, which checks against the precision.
9209
9210 Compute the status for each possibly constant bound, and return if we see
9211 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
9212 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
9213 for "constant known to fit". */
9214
9215 /* Check if c >= type_low_bound. */
9216 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9217 {
9218 if (tree_int_cst_lt (c, type_low_bound))
9219 return false;
9220 ok_for_low_bound = true;
9221 }
9222 else
9223 ok_for_low_bound = false;
9224
9225 /* Check if c <= type_high_bound. */
9226 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9227 {
9228 if (tree_int_cst_lt (type_high_bound, c))
9229 return false;
9230 ok_for_high_bound = true;
9231 }
9232 else
9233 ok_for_high_bound = false;
9234
9235 /* If the constant fits both bounds, the result is known. */
9236 if (ok_for_low_bound && ok_for_high_bound)
9237 return true;
9238
9239 /* Perform some generic filtering which may allow making a decision
9240 even if the bounds are not constant. First, negative integers
9241 never fit in unsigned types, */
9242 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
9243 return false;
9244
9245 /* Second, narrower types always fit in wider ones. */
9246 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9247 return true;
9248
9249 /* Third, unsigned integers with top bit set never fit signed types. */
9250 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9251 {
9252 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
9253 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9254 {
9255 /* When a tree_cst is converted to a wide-int, the precision
9256 is taken from the type. However, if the precision of the
9257 mode underneath the type is smaller than that, it is
9258 possible that the value will not fit. The test below
9259 fails if any bit is set between the sign bit of the
9260 underlying mode and the top bit of the type. */
9261 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
9262 return false;
9263 }
9264 else if (wi::neg_p (wi::to_wide (c)))
9265 return false;
9266 }
9267
9268 /* If we haven't been able to decide at this point, there nothing more we
9269 can check ourselves here. Look at the base type if we have one and it
9270 has the same precision. */
9271 if (TREE_CODE (type) == INTEGER_TYPE
9272 && TREE_TYPE (type) != 0
9273 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9274 {
9275 type = TREE_TYPE (type);
9276 goto retry;
9277 }
9278
9279 /* Or to fits_to_tree_p, if nothing else. */
9280 return wi::fits_to_tree_p (wi::to_wide (c), type);
9281 }
9282
9283 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9284 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9285 represented (assuming two's-complement arithmetic) within the bit
9286 precision of the type are returned instead. */
9287
9288 void
9289 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9290 {
9291 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9292 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9293 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
9294 else
9295 {
9296 if (TYPE_UNSIGNED (type))
9297 mpz_set_ui (min, 0);
9298 else
9299 {
9300 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9301 wi::to_mpz (mn, min, SIGNED);
9302 }
9303 }
9304
9305 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9306 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9307 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
9308 else
9309 {
9310 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9311 wi::to_mpz (mn, max, TYPE_SIGN (type));
9312 }
9313 }
9314
9315 /* Return true if VAR is an automatic variable. */
9316
9317 bool
9318 auto_var_p (const_tree var)
9319 {
9320 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
9321 || TREE_CODE (var) == PARM_DECL)
9322 && ! TREE_STATIC (var))
9323 || TREE_CODE (var) == RESULT_DECL);
9324 }
9325
9326 /* Return true if VAR is an automatic variable defined in function FN. */
9327
9328 bool
9329 auto_var_in_fn_p (const_tree var, const_tree fn)
9330 {
9331 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9332 && (auto_var_p (var)
9333 || TREE_CODE (var) == LABEL_DECL));
9334 }
9335
9336 /* Subprogram of following function. Called by walk_tree.
9337
9338 Return *TP if it is an automatic variable or parameter of the
9339 function passed in as DATA. */
9340
9341 static tree
9342 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9343 {
9344 tree fn = (tree) data;
9345
9346 if (TYPE_P (*tp))
9347 *walk_subtrees = 0;
9348
9349 else if (DECL_P (*tp)
9350 && auto_var_in_fn_p (*tp, fn))
9351 return *tp;
9352
9353 return NULL_TREE;
9354 }
9355
9356 /* Returns true if T is, contains, or refers to a type with variable
9357 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9358 arguments, but not the return type. If FN is nonzero, only return
9359 true if a modifier of the type or position of FN is a variable or
9360 parameter inside FN.
9361
9362 This concept is more general than that of C99 'variably modified types':
9363 in C99, a struct type is never variably modified because a VLA may not
9364 appear as a structure member. However, in GNU C code like:
9365
9366 struct S { int i[f()]; };
9367
9368 is valid, and other languages may define similar constructs. */
9369
9370 bool
9371 variably_modified_type_p (tree type, tree fn)
9372 {
9373 tree t;
9374
9375 /* Test if T is either variable (if FN is zero) or an expression containing
9376 a variable in FN. If TYPE isn't gimplified, return true also if
9377 gimplify_one_sizepos would gimplify the expression into a local
9378 variable. */
9379 #define RETURN_TRUE_IF_VAR(T) \
9380 do { tree _t = (T); \
9381 if (_t != NULL_TREE \
9382 && _t != error_mark_node \
9383 && !CONSTANT_CLASS_P (_t) \
9384 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9385 && (!fn \
9386 || (!TYPE_SIZES_GIMPLIFIED (type) \
9387 && (TREE_CODE (_t) != VAR_DECL \
9388 && !CONTAINS_PLACEHOLDER_P (_t))) \
9389 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9390 return true; } while (0)
9391
9392 if (type == error_mark_node)
9393 return false;
9394
9395 /* If TYPE itself has variable size, it is variably modified. */
9396 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9397 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9398
9399 switch (TREE_CODE (type))
9400 {
9401 case POINTER_TYPE:
9402 case REFERENCE_TYPE:
9403 case VECTOR_TYPE:
9404 /* Ada can have pointer types refering to themselves indirectly. */
9405 if (TREE_VISITED (type))
9406 return false;
9407 TREE_VISITED (type) = true;
9408 if (variably_modified_type_p (TREE_TYPE (type), fn))
9409 {
9410 TREE_VISITED (type) = false;
9411 return true;
9412 }
9413 TREE_VISITED (type) = false;
9414 break;
9415
9416 case FUNCTION_TYPE:
9417 case METHOD_TYPE:
9418 /* If TYPE is a function type, it is variably modified if the
9419 return type is variably modified. */
9420 if (variably_modified_type_p (TREE_TYPE (type), fn))
9421 return true;
9422 break;
9423
9424 case INTEGER_TYPE:
9425 case REAL_TYPE:
9426 case FIXED_POINT_TYPE:
9427 case ENUMERAL_TYPE:
9428 case BOOLEAN_TYPE:
9429 /* Scalar types are variably modified if their end points
9430 aren't constant. */
9431 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9432 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9433 break;
9434
9435 case RECORD_TYPE:
9436 case UNION_TYPE:
9437 case QUAL_UNION_TYPE:
9438 /* We can't see if any of the fields are variably-modified by the
9439 definition we normally use, since that would produce infinite
9440 recursion via pointers. */
9441 /* This is variably modified if some field's type is. */
9442 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9443 if (TREE_CODE (t) == FIELD_DECL)
9444 {
9445 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9446 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9447 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9448
9449 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9450 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9451 }
9452 break;
9453
9454 case ARRAY_TYPE:
9455 /* Do not call ourselves to avoid infinite recursion. This is
9456 variably modified if the element type is. */
9457 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9458 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9459 break;
9460
9461 default:
9462 break;
9463 }
9464
9465 /* The current language may have other cases to check, but in general,
9466 all other types are not variably modified. */
9467 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9468
9469 #undef RETURN_TRUE_IF_VAR
9470 }
9471
9472 /* Given a DECL or TYPE, return the scope in which it was declared, or
9473 NULL_TREE if there is no containing scope. */
9474
9475 tree
9476 get_containing_scope (const_tree t)
9477 {
9478 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9479 }
9480
9481 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
9482
9483 const_tree
9484 get_ultimate_context (const_tree decl)
9485 {
9486 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
9487 {
9488 if (TREE_CODE (decl) == BLOCK)
9489 decl = BLOCK_SUPERCONTEXT (decl);
9490 else
9491 decl = get_containing_scope (decl);
9492 }
9493 return decl;
9494 }
9495
9496 /* Return the innermost context enclosing DECL that is
9497 a FUNCTION_DECL, or zero if none. */
9498
9499 tree
9500 decl_function_context (const_tree decl)
9501 {
9502 tree context;
9503
9504 if (TREE_CODE (decl) == ERROR_MARK)
9505 return 0;
9506
9507 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9508 where we look up the function at runtime. Such functions always take
9509 a first argument of type 'pointer to real context'.
9510
9511 C++ should really be fixed to use DECL_CONTEXT for the real context,
9512 and use something else for the "virtual context". */
9513 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
9514 context
9515 = TYPE_MAIN_VARIANT
9516 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9517 else
9518 context = DECL_CONTEXT (decl);
9519
9520 while (context && TREE_CODE (context) != FUNCTION_DECL)
9521 {
9522 if (TREE_CODE (context) == BLOCK)
9523 context = BLOCK_SUPERCONTEXT (context);
9524 else
9525 context = get_containing_scope (context);
9526 }
9527
9528 return context;
9529 }
9530
9531 /* Return the innermost context enclosing DECL that is
9532 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9533 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9534
9535 tree
9536 decl_type_context (const_tree decl)
9537 {
9538 tree context = DECL_CONTEXT (decl);
9539
9540 while (context)
9541 switch (TREE_CODE (context))
9542 {
9543 case NAMESPACE_DECL:
9544 case TRANSLATION_UNIT_DECL:
9545 return NULL_TREE;
9546
9547 case RECORD_TYPE:
9548 case UNION_TYPE:
9549 case QUAL_UNION_TYPE:
9550 return context;
9551
9552 case TYPE_DECL:
9553 case FUNCTION_DECL:
9554 context = DECL_CONTEXT (context);
9555 break;
9556
9557 case BLOCK:
9558 context = BLOCK_SUPERCONTEXT (context);
9559 break;
9560
9561 default:
9562 gcc_unreachable ();
9563 }
9564
9565 return NULL_TREE;
9566 }
9567
9568 /* CALL is a CALL_EXPR. Return the declaration for the function
9569 called, or NULL_TREE if the called function cannot be
9570 determined. */
9571
9572 tree
9573 get_callee_fndecl (const_tree call)
9574 {
9575 tree addr;
9576
9577 if (call == error_mark_node)
9578 return error_mark_node;
9579
9580 /* It's invalid to call this function with anything but a
9581 CALL_EXPR. */
9582 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9583
9584 /* The first operand to the CALL is the address of the function
9585 called. */
9586 addr = CALL_EXPR_FN (call);
9587
9588 /* If there is no function, return early. */
9589 if (addr == NULL_TREE)
9590 return NULL_TREE;
9591
9592 STRIP_NOPS (addr);
9593
9594 /* If this is a readonly function pointer, extract its initial value. */
9595 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9596 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9597 && DECL_INITIAL (addr))
9598 addr = DECL_INITIAL (addr);
9599
9600 /* If the address is just `&f' for some function `f', then we know
9601 that `f' is being called. */
9602 if (TREE_CODE (addr) == ADDR_EXPR
9603 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9604 return TREE_OPERAND (addr, 0);
9605
9606 /* We couldn't figure out what was being called. */
9607 return NULL_TREE;
9608 }
9609
9610 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9611 return the associated function code, otherwise return CFN_LAST. */
9612
9613 combined_fn
9614 get_call_combined_fn (const_tree call)
9615 {
9616 /* It's invalid to call this function with anything but a CALL_EXPR. */
9617 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9618
9619 if (!CALL_EXPR_FN (call))
9620 return as_combined_fn (CALL_EXPR_IFN (call));
9621
9622 tree fndecl = get_callee_fndecl (call);
9623 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9624 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9625
9626 return CFN_LAST;
9627 }
9628
9629 /* Comparator of indices based on tree_node_counts. */
9630
9631 static int
9632 tree_nodes_cmp (const void *p1, const void *p2)
9633 {
9634 const unsigned *n1 = (const unsigned *)p1;
9635 const unsigned *n2 = (const unsigned *)p2;
9636
9637 return tree_node_counts[*n1] - tree_node_counts[*n2];
9638 }
9639
9640 /* Comparator of indices based on tree_code_counts. */
9641
9642 static int
9643 tree_codes_cmp (const void *p1, const void *p2)
9644 {
9645 const unsigned *n1 = (const unsigned *)p1;
9646 const unsigned *n2 = (const unsigned *)p2;
9647
9648 return tree_code_counts[*n1] - tree_code_counts[*n2];
9649 }
9650
9651 #define TREE_MEM_USAGE_SPACES 40
9652
9653 /* Print debugging information about tree nodes generated during the compile,
9654 and any language-specific information. */
9655
9656 void
9657 dump_tree_statistics (void)
9658 {
9659 if (GATHER_STATISTICS)
9660 {
9661 uint64_t total_nodes, total_bytes;
9662 fprintf (stderr, "\nKind Nodes Bytes\n");
9663 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9664 total_nodes = total_bytes = 0;
9665
9666 {
9667 auto_vec<unsigned> indices (all_kinds);
9668 for (unsigned i = 0; i < all_kinds; i++)
9669 indices.quick_push (i);
9670 indices.qsort (tree_nodes_cmp);
9671
9672 for (unsigned i = 0; i < (int) all_kinds; i++)
9673 {
9674 unsigned j = indices[i];
9675 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
9676 tree_node_kind_names[i], SIZE_AMOUNT (tree_node_counts[j]),
9677 SIZE_AMOUNT (tree_node_sizes[j]));
9678 total_nodes += tree_node_counts[j];
9679 total_bytes += tree_node_sizes[j];
9680 }
9681 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9682 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
9683 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
9684 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9685 }
9686
9687 {
9688 fprintf (stderr, "Code Nodes\n");
9689 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9690
9691 auto_vec<unsigned> indices (MAX_TREE_CODES);
9692 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9693 indices.quick_push (i);
9694 indices.qsort (tree_codes_cmp);
9695
9696 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9697 {
9698 unsigned j = indices[i];
9699 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
9700 get_tree_code_name ((enum tree_code) j),
9701 SIZE_AMOUNT (tree_code_counts[j]));
9702 }
9703 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9704 fprintf (stderr, "\n");
9705 ssanames_print_statistics ();
9706 fprintf (stderr, "\n");
9707 phinodes_print_statistics ();
9708 fprintf (stderr, "\n");
9709 }
9710 }
9711 else
9712 fprintf (stderr, "(No per-node statistics)\n");
9713
9714 print_type_hash_statistics ();
9715 print_debug_expr_statistics ();
9716 print_value_expr_statistics ();
9717 lang_hooks.print_statistics ();
9718 }
9719 \f
9720 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9721
9722 /* Generate a crc32 of the low BYTES bytes of VALUE. */
9723
9724 unsigned
9725 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
9726 {
9727 /* This relies on the raw feedback's top 4 bits being zero. */
9728 #define FEEDBACK(X) ((X) * 0x04c11db7)
9729 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9730 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9731 static const unsigned syndromes[16] =
9732 {
9733 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9734 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9735 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9736 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9737 };
9738 #undef FEEDBACK
9739 #undef SYNDROME
9740
9741 value <<= (32 - bytes * 8);
9742 for (unsigned ix = bytes * 2; ix--; value <<= 4)
9743 {
9744 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
9745
9746 chksum = (chksum << 4) ^ feedback;
9747 }
9748
9749 return chksum;
9750 }
9751
9752 /* Generate a crc32 of a string. */
9753
9754 unsigned
9755 crc32_string (unsigned chksum, const char *string)
9756 {
9757 do
9758 chksum = crc32_byte (chksum, *string);
9759 while (*string++);
9760 return chksum;
9761 }
9762
9763 /* P is a string that will be used in a symbol. Mask out any characters
9764 that are not valid in that context. */
9765
9766 void
9767 clean_symbol_name (char *p)
9768 {
9769 for (; *p; p++)
9770 if (! (ISALNUM (*p)
9771 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9772 || *p == '$'
9773 #endif
9774 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9775 || *p == '.'
9776 #endif
9777 ))
9778 *p = '_';
9779 }
9780
9781 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
9782
9783 /* Create a unique anonymous identifier. The identifier is still a
9784 valid assembly label. */
9785
9786 tree
9787 make_anon_name ()
9788 {
9789 const char *fmt =
9790 #if !defined (NO_DOT_IN_LABEL)
9791 "."
9792 #elif !defined (NO_DOLLAR_IN_LABEL)
9793 "$"
9794 #else
9795 "_"
9796 #endif
9797 "_anon_%d";
9798
9799 char buf[24];
9800 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
9801 gcc_checking_assert (len < int (sizeof (buf)));
9802
9803 tree id = get_identifier_with_length (buf, len);
9804 IDENTIFIER_ANON_P (id) = true;
9805
9806 return id;
9807 }
9808
9809 /* Generate a name for a special-purpose function.
9810 The generated name may need to be unique across the whole link.
9811 Changes to this function may also require corresponding changes to
9812 xstrdup_mask_random.
9813 TYPE is some string to identify the purpose of this function to the
9814 linker or collect2; it must start with an uppercase letter,
9815 one of:
9816 I - for constructors
9817 D - for destructors
9818 N - for C++ anonymous namespaces
9819 F - for DWARF unwind frame information. */
9820
9821 tree
9822 get_file_function_name (const char *type)
9823 {
9824 char *buf;
9825 const char *p;
9826 char *q;
9827
9828 /* If we already have a name we know to be unique, just use that. */
9829 if (first_global_object_name)
9830 p = q = ASTRDUP (first_global_object_name);
9831 /* If the target is handling the constructors/destructors, they
9832 will be local to this file and the name is only necessary for
9833 debugging purposes.
9834 We also assign sub_I and sub_D sufixes to constructors called from
9835 the global static constructors. These are always local. */
9836 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9837 || (strncmp (type, "sub_", 4) == 0
9838 && (type[4] == 'I' || type[4] == 'D')))
9839 {
9840 const char *file = main_input_filename;
9841 if (! file)
9842 file = LOCATION_FILE (input_location);
9843 /* Just use the file's basename, because the full pathname
9844 might be quite long. */
9845 p = q = ASTRDUP (lbasename (file));
9846 }
9847 else
9848 {
9849 /* Otherwise, the name must be unique across the entire link.
9850 We don't have anything that we know to be unique to this translation
9851 unit, so use what we do have and throw in some randomness. */
9852 unsigned len;
9853 const char *name = weak_global_object_name;
9854 const char *file = main_input_filename;
9855
9856 if (! name)
9857 name = "";
9858 if (! file)
9859 file = LOCATION_FILE (input_location);
9860
9861 len = strlen (file);
9862 q = (char *) alloca (9 + 19 + len + 1);
9863 memcpy (q, file, len + 1);
9864
9865 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9866 crc32_string (0, name), get_random_seed (false));
9867
9868 p = q;
9869 }
9870
9871 clean_symbol_name (q);
9872 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9873 + strlen (type));
9874
9875 /* Set up the name of the file-level functions we may need.
9876 Use a global object (which is already required to be unique over
9877 the program) rather than the file name (which imposes extra
9878 constraints). */
9879 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9880
9881 return get_identifier (buf);
9882 }
9883 \f
9884 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9885
9886 /* Complain that the tree code of NODE does not match the expected 0
9887 terminated list of trailing codes. The trailing code list can be
9888 empty, for a more vague error message. FILE, LINE, and FUNCTION
9889 are of the caller. */
9890
9891 void
9892 tree_check_failed (const_tree node, const char *file,
9893 int line, const char *function, ...)
9894 {
9895 va_list args;
9896 const char *buffer;
9897 unsigned length = 0;
9898 enum tree_code code;
9899
9900 va_start (args, function);
9901 while ((code = (enum tree_code) va_arg (args, int)))
9902 length += 4 + strlen (get_tree_code_name (code));
9903 va_end (args);
9904 if (length)
9905 {
9906 char *tmp;
9907 va_start (args, function);
9908 length += strlen ("expected ");
9909 buffer = tmp = (char *) alloca (length);
9910 length = 0;
9911 while ((code = (enum tree_code) va_arg (args, int)))
9912 {
9913 const char *prefix = length ? " or " : "expected ";
9914
9915 strcpy (tmp + length, prefix);
9916 length += strlen (prefix);
9917 strcpy (tmp + length, get_tree_code_name (code));
9918 length += strlen (get_tree_code_name (code));
9919 }
9920 va_end (args);
9921 }
9922 else
9923 buffer = "unexpected node";
9924
9925 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9926 buffer, get_tree_code_name (TREE_CODE (node)),
9927 function, trim_filename (file), line);
9928 }
9929
9930 /* Complain that the tree code of NODE does match the expected 0
9931 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9932 the caller. */
9933
9934 void
9935 tree_not_check_failed (const_tree node, const char *file,
9936 int line, const char *function, ...)
9937 {
9938 va_list args;
9939 char *buffer;
9940 unsigned length = 0;
9941 enum tree_code code;
9942
9943 va_start (args, function);
9944 while ((code = (enum tree_code) va_arg (args, int)))
9945 length += 4 + strlen (get_tree_code_name (code));
9946 va_end (args);
9947 va_start (args, function);
9948 buffer = (char *) alloca (length);
9949 length = 0;
9950 while ((code = (enum tree_code) va_arg (args, int)))
9951 {
9952 if (length)
9953 {
9954 strcpy (buffer + length, " or ");
9955 length += 4;
9956 }
9957 strcpy (buffer + length, get_tree_code_name (code));
9958 length += strlen (get_tree_code_name (code));
9959 }
9960 va_end (args);
9961
9962 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9963 buffer, get_tree_code_name (TREE_CODE (node)),
9964 function, trim_filename (file), line);
9965 }
9966
9967 /* Similar to tree_check_failed, except that we check for a class of tree
9968 code, given in CL. */
9969
9970 void
9971 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9972 const char *file, int line, const char *function)
9973 {
9974 internal_error
9975 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9976 TREE_CODE_CLASS_STRING (cl),
9977 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9978 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9979 }
9980
9981 /* Similar to tree_check_failed, except that instead of specifying a
9982 dozen codes, use the knowledge that they're all sequential. */
9983
9984 void
9985 tree_range_check_failed (const_tree node, const char *file, int line,
9986 const char *function, enum tree_code c1,
9987 enum tree_code c2)
9988 {
9989 char *buffer;
9990 unsigned length = 0;
9991 unsigned int c;
9992
9993 for (c = c1; c <= c2; ++c)
9994 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9995
9996 length += strlen ("expected ");
9997 buffer = (char *) alloca (length);
9998 length = 0;
9999
10000 for (c = c1; c <= c2; ++c)
10001 {
10002 const char *prefix = length ? " or " : "expected ";
10003
10004 strcpy (buffer + length, prefix);
10005 length += strlen (prefix);
10006 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
10007 length += strlen (get_tree_code_name ((enum tree_code) c));
10008 }
10009
10010 internal_error ("tree check: %s, have %s in %s, at %s:%d",
10011 buffer, get_tree_code_name (TREE_CODE (node)),
10012 function, trim_filename (file), line);
10013 }
10014
10015
10016 /* Similar to tree_check_failed, except that we check that a tree does
10017 not have the specified code, given in CL. */
10018
10019 void
10020 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
10021 const char *file, int line, const char *function)
10022 {
10023 internal_error
10024 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
10025 TREE_CODE_CLASS_STRING (cl),
10026 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
10027 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
10028 }
10029
10030
10031 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
10032
10033 void
10034 omp_clause_check_failed (const_tree node, const char *file, int line,
10035 const char *function, enum omp_clause_code code)
10036 {
10037 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
10038 "in %s, at %s:%d",
10039 omp_clause_code_name[code],
10040 get_tree_code_name (TREE_CODE (node)),
10041 function, trim_filename (file), line);
10042 }
10043
10044
10045 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
10046
10047 void
10048 omp_clause_range_check_failed (const_tree node, const char *file, int line,
10049 const char *function, enum omp_clause_code c1,
10050 enum omp_clause_code c2)
10051 {
10052 char *buffer;
10053 unsigned length = 0;
10054 unsigned int c;
10055
10056 for (c = c1; c <= c2; ++c)
10057 length += 4 + strlen (omp_clause_code_name[c]);
10058
10059 length += strlen ("expected ");
10060 buffer = (char *) alloca (length);
10061 length = 0;
10062
10063 for (c = c1; c <= c2; ++c)
10064 {
10065 const char *prefix = length ? " or " : "expected ";
10066
10067 strcpy (buffer + length, prefix);
10068 length += strlen (prefix);
10069 strcpy (buffer + length, omp_clause_code_name[c]);
10070 length += strlen (omp_clause_code_name[c]);
10071 }
10072
10073 internal_error ("tree check: %s, have %s in %s, at %s:%d",
10074 buffer, omp_clause_code_name[TREE_CODE (node)],
10075 function, trim_filename (file), line);
10076 }
10077
10078
10079 #undef DEFTREESTRUCT
10080 #define DEFTREESTRUCT(VAL, NAME) NAME,
10081
10082 static const char *ts_enum_names[] = {
10083 #include "treestruct.def"
10084 };
10085 #undef DEFTREESTRUCT
10086
10087 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
10088
10089 /* Similar to tree_class_check_failed, except that we check for
10090 whether CODE contains the tree structure identified by EN. */
10091
10092 void
10093 tree_contains_struct_check_failed (const_tree node,
10094 const enum tree_node_structure_enum en,
10095 const char *file, int line,
10096 const char *function)
10097 {
10098 internal_error
10099 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
10100 TS_ENUM_NAME (en),
10101 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
10102 }
10103
10104
10105 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
10106 (dynamically sized) vector. */
10107
10108 void
10109 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
10110 const char *function)
10111 {
10112 internal_error
10113 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
10114 "at %s:%d",
10115 idx + 1, len, function, trim_filename (file), line);
10116 }
10117
10118 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
10119 (dynamically sized) vector. */
10120
10121 void
10122 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
10123 const char *function)
10124 {
10125 internal_error
10126 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
10127 idx + 1, len, function, trim_filename (file), line);
10128 }
10129
10130 /* Similar to above, except that the check is for the bounds of the operand
10131 vector of an expression node EXP. */
10132
10133 void
10134 tree_operand_check_failed (int idx, const_tree exp, const char *file,
10135 int line, const char *function)
10136 {
10137 enum tree_code code = TREE_CODE (exp);
10138 internal_error
10139 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
10140 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
10141 function, trim_filename (file), line);
10142 }
10143
10144 /* Similar to above, except that the check is for the number of
10145 operands of an OMP_CLAUSE node. */
10146
10147 void
10148 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
10149 int line, const char *function)
10150 {
10151 internal_error
10152 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
10153 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
10154 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
10155 trim_filename (file), line);
10156 }
10157 #endif /* ENABLE_TREE_CHECKING */
10158 \f
10159 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
10160 and mapped to the machine mode MODE. Initialize its fields and build
10161 the information necessary for debugging output. */
10162
10163 static tree
10164 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
10165 {
10166 tree t;
10167 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
10168
10169 t = make_node (VECTOR_TYPE);
10170 TREE_TYPE (t) = mv_innertype;
10171 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
10172 SET_TYPE_MODE (t, mode);
10173
10174 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
10175 SET_TYPE_STRUCTURAL_EQUALITY (t);
10176 else if ((TYPE_CANONICAL (mv_innertype) != innertype
10177 || mode != VOIDmode)
10178 && !VECTOR_BOOLEAN_TYPE_P (t))
10179 TYPE_CANONICAL (t)
10180 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
10181
10182 layout_type (t);
10183
10184 hashval_t hash = type_hash_canon_hash (t);
10185 t = type_hash_canon (hash, t);
10186
10187 /* We have built a main variant, based on the main variant of the
10188 inner type. Use it to build the variant we return. */
10189 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
10190 && TREE_TYPE (t) != innertype)
10191 return build_type_attribute_qual_variant (t,
10192 TYPE_ATTRIBUTES (innertype),
10193 TYPE_QUALS (innertype));
10194
10195 return t;
10196 }
10197
10198 static tree
10199 make_or_reuse_type (unsigned size, int unsignedp)
10200 {
10201 int i;
10202
10203 if (size == INT_TYPE_SIZE)
10204 return unsignedp ? unsigned_type_node : integer_type_node;
10205 if (size == CHAR_TYPE_SIZE)
10206 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
10207 if (size == SHORT_TYPE_SIZE)
10208 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
10209 if (size == LONG_TYPE_SIZE)
10210 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
10211 if (size == LONG_LONG_TYPE_SIZE)
10212 return (unsignedp ? long_long_unsigned_type_node
10213 : long_long_integer_type_node);
10214
10215 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10216 if (size == int_n_data[i].bitsize
10217 && int_n_enabled_p[i])
10218 return (unsignedp ? int_n_trees[i].unsigned_type
10219 : int_n_trees[i].signed_type);
10220
10221 if (unsignedp)
10222 return make_unsigned_type (size);
10223 else
10224 return make_signed_type (size);
10225 }
10226
10227 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
10228
10229 static tree
10230 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
10231 {
10232 if (satp)
10233 {
10234 if (size == SHORT_FRACT_TYPE_SIZE)
10235 return unsignedp ? sat_unsigned_short_fract_type_node
10236 : sat_short_fract_type_node;
10237 if (size == FRACT_TYPE_SIZE)
10238 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
10239 if (size == LONG_FRACT_TYPE_SIZE)
10240 return unsignedp ? sat_unsigned_long_fract_type_node
10241 : sat_long_fract_type_node;
10242 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10243 return unsignedp ? sat_unsigned_long_long_fract_type_node
10244 : sat_long_long_fract_type_node;
10245 }
10246 else
10247 {
10248 if (size == SHORT_FRACT_TYPE_SIZE)
10249 return unsignedp ? unsigned_short_fract_type_node
10250 : short_fract_type_node;
10251 if (size == FRACT_TYPE_SIZE)
10252 return unsignedp ? unsigned_fract_type_node : fract_type_node;
10253 if (size == LONG_FRACT_TYPE_SIZE)
10254 return unsignedp ? unsigned_long_fract_type_node
10255 : long_fract_type_node;
10256 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10257 return unsignedp ? unsigned_long_long_fract_type_node
10258 : long_long_fract_type_node;
10259 }
10260
10261 return make_fract_type (size, unsignedp, satp);
10262 }
10263
10264 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
10265
10266 static tree
10267 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10268 {
10269 if (satp)
10270 {
10271 if (size == SHORT_ACCUM_TYPE_SIZE)
10272 return unsignedp ? sat_unsigned_short_accum_type_node
10273 : sat_short_accum_type_node;
10274 if (size == ACCUM_TYPE_SIZE)
10275 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10276 if (size == LONG_ACCUM_TYPE_SIZE)
10277 return unsignedp ? sat_unsigned_long_accum_type_node
10278 : sat_long_accum_type_node;
10279 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10280 return unsignedp ? sat_unsigned_long_long_accum_type_node
10281 : sat_long_long_accum_type_node;
10282 }
10283 else
10284 {
10285 if (size == SHORT_ACCUM_TYPE_SIZE)
10286 return unsignedp ? unsigned_short_accum_type_node
10287 : short_accum_type_node;
10288 if (size == ACCUM_TYPE_SIZE)
10289 return unsignedp ? unsigned_accum_type_node : accum_type_node;
10290 if (size == LONG_ACCUM_TYPE_SIZE)
10291 return unsignedp ? unsigned_long_accum_type_node
10292 : long_accum_type_node;
10293 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10294 return unsignedp ? unsigned_long_long_accum_type_node
10295 : long_long_accum_type_node;
10296 }
10297
10298 return make_accum_type (size, unsignedp, satp);
10299 }
10300
10301
10302 /* Create an atomic variant node for TYPE. This routine is called
10303 during initialization of data types to create the 5 basic atomic
10304 types. The generic build_variant_type function requires these to
10305 already be set up in order to function properly, so cannot be
10306 called from there. If ALIGN is non-zero, then ensure alignment is
10307 overridden to this value. */
10308
10309 static tree
10310 build_atomic_base (tree type, unsigned int align)
10311 {
10312 tree t;
10313
10314 /* Make sure its not already registered. */
10315 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10316 return t;
10317
10318 t = build_variant_type_copy (type);
10319 set_type_quals (t, TYPE_QUAL_ATOMIC);
10320
10321 if (align)
10322 SET_TYPE_ALIGN (t, align);
10323
10324 return t;
10325 }
10326
10327 /* Information about the _FloatN and _FloatNx types. This must be in
10328 the same order as the corresponding TI_* enum values. */
10329 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
10330 {
10331 { 16, false },
10332 { 32, false },
10333 { 64, false },
10334 { 128, false },
10335 { 32, true },
10336 { 64, true },
10337 { 128, true },
10338 };
10339
10340
10341 /* Create nodes for all integer types (and error_mark_node) using the sizes
10342 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10343
10344 void
10345 build_common_tree_nodes (bool signed_char)
10346 {
10347 int i;
10348
10349 error_mark_node = make_node (ERROR_MARK);
10350 TREE_TYPE (error_mark_node) = error_mark_node;
10351
10352 initialize_sizetypes ();
10353
10354 /* Define both `signed char' and `unsigned char'. */
10355 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10356 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10357 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10358 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10359
10360 /* Define `char', which is like either `signed char' or `unsigned char'
10361 but not the same as either. */
10362 char_type_node
10363 = (signed_char
10364 ? make_signed_type (CHAR_TYPE_SIZE)
10365 : make_unsigned_type (CHAR_TYPE_SIZE));
10366 TYPE_STRING_FLAG (char_type_node) = 1;
10367
10368 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10369 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10370 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10371 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10372 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10373 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10374 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10375 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10376
10377 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10378 {
10379 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10380 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10381
10382 if (int_n_enabled_p[i])
10383 {
10384 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10385 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10386 }
10387 }
10388
10389 /* Define a boolean type. This type only represents boolean values but
10390 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10391 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10392 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10393 TYPE_PRECISION (boolean_type_node) = 1;
10394 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10395
10396 /* Define what type to use for size_t. */
10397 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10398 size_type_node = unsigned_type_node;
10399 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10400 size_type_node = long_unsigned_type_node;
10401 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10402 size_type_node = long_long_unsigned_type_node;
10403 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10404 size_type_node = short_unsigned_type_node;
10405 else
10406 {
10407 int i;
10408
10409 size_type_node = NULL_TREE;
10410 for (i = 0; i < NUM_INT_N_ENTS; i++)
10411 if (int_n_enabled_p[i])
10412 {
10413 char name[50], altname[50];
10414 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10415 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
10416
10417 if (strcmp (name, SIZE_TYPE) == 0
10418 || strcmp (altname, SIZE_TYPE) == 0)
10419 {
10420 size_type_node = int_n_trees[i].unsigned_type;
10421 }
10422 }
10423 if (size_type_node == NULL_TREE)
10424 gcc_unreachable ();
10425 }
10426
10427 /* Define what type to use for ptrdiff_t. */
10428 if (strcmp (PTRDIFF_TYPE, "int") == 0)
10429 ptrdiff_type_node = integer_type_node;
10430 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
10431 ptrdiff_type_node = long_integer_type_node;
10432 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
10433 ptrdiff_type_node = long_long_integer_type_node;
10434 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
10435 ptrdiff_type_node = short_integer_type_node;
10436 else
10437 {
10438 ptrdiff_type_node = NULL_TREE;
10439 for (int i = 0; i < NUM_INT_N_ENTS; i++)
10440 if (int_n_enabled_p[i])
10441 {
10442 char name[50], altname[50];
10443 sprintf (name, "__int%d", int_n_data[i].bitsize);
10444 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
10445
10446 if (strcmp (name, PTRDIFF_TYPE) == 0
10447 || strcmp (altname, PTRDIFF_TYPE) == 0)
10448 ptrdiff_type_node = int_n_trees[i].signed_type;
10449 }
10450 if (ptrdiff_type_node == NULL_TREE)
10451 gcc_unreachable ();
10452 }
10453
10454 /* Fill in the rest of the sized types. Reuse existing type nodes
10455 when possible. */
10456 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10457 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10458 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10459 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10460 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10461
10462 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10463 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10464 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10465 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10466 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10467
10468 /* Don't call build_qualified type for atomics. That routine does
10469 special processing for atomics, and until they are initialized
10470 it's better not to make that call.
10471
10472 Check to see if there is a target override for atomic types. */
10473
10474 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10475 targetm.atomic_align_for_mode (QImode));
10476 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10477 targetm.atomic_align_for_mode (HImode));
10478 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10479 targetm.atomic_align_for_mode (SImode));
10480 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10481 targetm.atomic_align_for_mode (DImode));
10482 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10483 targetm.atomic_align_for_mode (TImode));
10484
10485 access_public_node = get_identifier ("public");
10486 access_protected_node = get_identifier ("protected");
10487 access_private_node = get_identifier ("private");
10488
10489 /* Define these next since types below may used them. */
10490 integer_zero_node = build_int_cst (integer_type_node, 0);
10491 integer_one_node = build_int_cst (integer_type_node, 1);
10492 integer_three_node = build_int_cst (integer_type_node, 3);
10493 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10494
10495 size_zero_node = size_int (0);
10496 size_one_node = size_int (1);
10497 bitsize_zero_node = bitsize_int (0);
10498 bitsize_one_node = bitsize_int (1);
10499 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10500
10501 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10502 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10503
10504 void_type_node = make_node (VOID_TYPE);
10505 layout_type (void_type_node);
10506
10507 /* We are not going to have real types in C with less than byte alignment,
10508 so we might as well not have any types that claim to have it. */
10509 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10510 TYPE_USER_ALIGN (void_type_node) = 0;
10511
10512 void_node = make_node (VOID_CST);
10513 TREE_TYPE (void_node) = void_type_node;
10514
10515 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10516 layout_type (TREE_TYPE (null_pointer_node));
10517
10518 ptr_type_node = build_pointer_type (void_type_node);
10519 const_ptr_type_node
10520 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10521 for (unsigned i = 0;
10522 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
10523 ++i)
10524 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
10525
10526 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10527
10528 float_type_node = make_node (REAL_TYPE);
10529 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10530 layout_type (float_type_node);
10531
10532 double_type_node = make_node (REAL_TYPE);
10533 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10534 layout_type (double_type_node);
10535
10536 long_double_type_node = make_node (REAL_TYPE);
10537 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10538 layout_type (long_double_type_node);
10539
10540 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10541 {
10542 int n = floatn_nx_types[i].n;
10543 bool extended = floatn_nx_types[i].extended;
10544 scalar_float_mode mode;
10545 if (!targetm.floatn_mode (n, extended).exists (&mode))
10546 continue;
10547 int precision = GET_MODE_PRECISION (mode);
10548 /* Work around the rs6000 KFmode having precision 113 not
10549 128. */
10550 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
10551 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
10552 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
10553 if (!extended)
10554 gcc_assert (min_precision == n);
10555 if (precision < min_precision)
10556 precision = min_precision;
10557 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
10558 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
10559 layout_type (FLOATN_NX_TYPE_NODE (i));
10560 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
10561 }
10562
10563 float_ptr_type_node = build_pointer_type (float_type_node);
10564 double_ptr_type_node = build_pointer_type (double_type_node);
10565 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10566 integer_ptr_type_node = build_pointer_type (integer_type_node);
10567
10568 /* Fixed size integer types. */
10569 uint16_type_node = make_or_reuse_type (16, 1);
10570 uint32_type_node = make_or_reuse_type (32, 1);
10571 uint64_type_node = make_or_reuse_type (64, 1);
10572
10573 /* Decimal float types. */
10574 dfloat32_type_node = make_node (REAL_TYPE);
10575 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10576 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10577 layout_type (dfloat32_type_node);
10578 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10579
10580 dfloat64_type_node = make_node (REAL_TYPE);
10581 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10582 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10583 layout_type (dfloat64_type_node);
10584 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10585
10586 dfloat128_type_node = make_node (REAL_TYPE);
10587 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10588 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10589 layout_type (dfloat128_type_node);
10590 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10591
10592 complex_integer_type_node = build_complex_type (integer_type_node, true);
10593 complex_float_type_node = build_complex_type (float_type_node, true);
10594 complex_double_type_node = build_complex_type (double_type_node, true);
10595 complex_long_double_type_node = build_complex_type (long_double_type_node,
10596 true);
10597
10598 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10599 {
10600 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
10601 COMPLEX_FLOATN_NX_TYPE_NODE (i)
10602 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
10603 }
10604
10605 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10606 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10607 sat_ ## KIND ## _type_node = \
10608 make_sat_signed_ ## KIND ## _type (SIZE); \
10609 sat_unsigned_ ## KIND ## _type_node = \
10610 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10611 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10612 unsigned_ ## KIND ## _type_node = \
10613 make_unsigned_ ## KIND ## _type (SIZE);
10614
10615 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10616 sat_ ## WIDTH ## KIND ## _type_node = \
10617 make_sat_signed_ ## KIND ## _type (SIZE); \
10618 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10619 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10620 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10621 unsigned_ ## WIDTH ## KIND ## _type_node = \
10622 make_unsigned_ ## KIND ## _type (SIZE);
10623
10624 /* Make fixed-point type nodes based on four different widths. */
10625 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10626 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10627 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10628 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10629 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10630
10631 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10632 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10633 NAME ## _type_node = \
10634 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10635 u ## NAME ## _type_node = \
10636 make_or_reuse_unsigned_ ## KIND ## _type \
10637 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10638 sat_ ## NAME ## _type_node = \
10639 make_or_reuse_sat_signed_ ## KIND ## _type \
10640 (GET_MODE_BITSIZE (MODE ## mode)); \
10641 sat_u ## NAME ## _type_node = \
10642 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10643 (GET_MODE_BITSIZE (U ## MODE ## mode));
10644
10645 /* Fixed-point type and mode nodes. */
10646 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10647 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10648 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10649 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10650 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10651 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10652 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10653 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10654 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10655 MAKE_FIXED_MODE_NODE (accum, da, DA)
10656 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10657
10658 {
10659 tree t = targetm.build_builtin_va_list ();
10660
10661 /* Many back-ends define record types without setting TYPE_NAME.
10662 If we copied the record type here, we'd keep the original
10663 record type without a name. This breaks name mangling. So,
10664 don't copy record types and let c_common_nodes_and_builtins()
10665 declare the type to be __builtin_va_list. */
10666 if (TREE_CODE (t) != RECORD_TYPE)
10667 t = build_variant_type_copy (t);
10668
10669 va_list_type_node = t;
10670 }
10671
10672 /* SCEV analyzer global shared trees. */
10673 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
10674 TREE_TYPE (chrec_dont_know) = void_type_node;
10675 chrec_known = make_node (SCEV_KNOWN);
10676 TREE_TYPE (chrec_known) = void_type_node;
10677 }
10678
10679 /* Modify DECL for given flags.
10680 TM_PURE attribute is set only on types, so the function will modify
10681 DECL's type when ECF_TM_PURE is used. */
10682
10683 void
10684 set_call_expr_flags (tree decl, int flags)
10685 {
10686 if (flags & ECF_NOTHROW)
10687 TREE_NOTHROW (decl) = 1;
10688 if (flags & ECF_CONST)
10689 TREE_READONLY (decl) = 1;
10690 if (flags & ECF_PURE)
10691 DECL_PURE_P (decl) = 1;
10692 if (flags & ECF_LOOPING_CONST_OR_PURE)
10693 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10694 if (flags & ECF_NOVOPS)
10695 DECL_IS_NOVOPS (decl) = 1;
10696 if (flags & ECF_NORETURN)
10697 TREE_THIS_VOLATILE (decl) = 1;
10698 if (flags & ECF_MALLOC)
10699 DECL_IS_MALLOC (decl) = 1;
10700 if (flags & ECF_RETURNS_TWICE)
10701 DECL_IS_RETURNS_TWICE (decl) = 1;
10702 if (flags & ECF_LEAF)
10703 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10704 NULL, DECL_ATTRIBUTES (decl));
10705 if (flags & ECF_COLD)
10706 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
10707 NULL, DECL_ATTRIBUTES (decl));
10708 if (flags & ECF_RET1)
10709 DECL_ATTRIBUTES (decl)
10710 = tree_cons (get_identifier ("fn spec"),
10711 build_tree_list (NULL_TREE, build_string (1, "1")),
10712 DECL_ATTRIBUTES (decl));
10713 if ((flags & ECF_TM_PURE) && flag_tm)
10714 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10715 /* Looping const or pure is implied by noreturn.
10716 There is currently no way to declare looping const or looping pure alone. */
10717 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10718 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10719 }
10720
10721
10722 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10723
10724 static void
10725 local_define_builtin (const char *name, tree type, enum built_in_function code,
10726 const char *library_name, int ecf_flags)
10727 {
10728 tree decl;
10729
10730 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10731 library_name, NULL_TREE);
10732 set_call_expr_flags (decl, ecf_flags);
10733
10734 set_builtin_decl (code, decl, true);
10735 }
10736
10737 /* Call this function after instantiating all builtins that the language
10738 front end cares about. This will build the rest of the builtins
10739 and internal functions that are relied upon by the tree optimizers and
10740 the middle-end. */
10741
10742 void
10743 build_common_builtin_nodes (void)
10744 {
10745 tree tmp, ftype;
10746 int ecf_flags;
10747
10748 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10749 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10750 {
10751 ftype = build_function_type (void_type_node, void_list_node);
10752 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10753 local_define_builtin ("__builtin_unreachable", ftype,
10754 BUILT_IN_UNREACHABLE,
10755 "__builtin_unreachable",
10756 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10757 | ECF_CONST | ECF_COLD);
10758 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10759 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10760 "abort",
10761 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
10762 }
10763
10764 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10765 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10766 {
10767 ftype = build_function_type_list (ptr_type_node,
10768 ptr_type_node, const_ptr_type_node,
10769 size_type_node, NULL_TREE);
10770
10771 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10772 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10773 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10774 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10775 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10776 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10777 }
10778
10779 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10780 {
10781 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10782 const_ptr_type_node, size_type_node,
10783 NULL_TREE);
10784 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10785 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10786 }
10787
10788 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10789 {
10790 ftype = build_function_type_list (ptr_type_node,
10791 ptr_type_node, integer_type_node,
10792 size_type_node, NULL_TREE);
10793 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10794 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10795 }
10796
10797 /* If we're checking the stack, `alloca' can throw. */
10798 const int alloca_flags
10799 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10800
10801 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10802 {
10803 ftype = build_function_type_list (ptr_type_node,
10804 size_type_node, NULL_TREE);
10805 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10806 "alloca", alloca_flags);
10807 }
10808
10809 ftype = build_function_type_list (ptr_type_node, size_type_node,
10810 size_type_node, NULL_TREE);
10811 local_define_builtin ("__builtin_alloca_with_align", ftype,
10812 BUILT_IN_ALLOCA_WITH_ALIGN,
10813 "__builtin_alloca_with_align",
10814 alloca_flags);
10815
10816 ftype = build_function_type_list (ptr_type_node, size_type_node,
10817 size_type_node, size_type_node, NULL_TREE);
10818 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
10819 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
10820 "__builtin_alloca_with_align_and_max",
10821 alloca_flags);
10822
10823 ftype = build_function_type_list (void_type_node,
10824 ptr_type_node, ptr_type_node,
10825 ptr_type_node, NULL_TREE);
10826 local_define_builtin ("__builtin_init_trampoline", ftype,
10827 BUILT_IN_INIT_TRAMPOLINE,
10828 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10829 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10830 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10831 "__builtin_init_heap_trampoline",
10832 ECF_NOTHROW | ECF_LEAF);
10833 local_define_builtin ("__builtin_init_descriptor", ftype,
10834 BUILT_IN_INIT_DESCRIPTOR,
10835 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10836
10837 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10838 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10839 BUILT_IN_ADJUST_TRAMPOLINE,
10840 "__builtin_adjust_trampoline",
10841 ECF_CONST | ECF_NOTHROW);
10842 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10843 BUILT_IN_ADJUST_DESCRIPTOR,
10844 "__builtin_adjust_descriptor",
10845 ECF_CONST | ECF_NOTHROW);
10846
10847 ftype = build_function_type_list (void_type_node,
10848 ptr_type_node, ptr_type_node, NULL_TREE);
10849 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10850 BUILT_IN_NONLOCAL_GOTO,
10851 "__builtin_nonlocal_goto",
10852 ECF_NORETURN | ECF_NOTHROW);
10853
10854 ftype = build_function_type_list (void_type_node,
10855 ptr_type_node, ptr_type_node, NULL_TREE);
10856 local_define_builtin ("__builtin_setjmp_setup", ftype,
10857 BUILT_IN_SETJMP_SETUP,
10858 "__builtin_setjmp_setup", ECF_NOTHROW);
10859
10860 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10861 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10862 BUILT_IN_SETJMP_RECEIVER,
10863 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10864
10865 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10866 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10867 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10868
10869 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10870 local_define_builtin ("__builtin_stack_restore", ftype,
10871 BUILT_IN_STACK_RESTORE,
10872 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10873
10874 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10875 const_ptr_type_node, size_type_node,
10876 NULL_TREE);
10877 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10878 "__builtin_memcmp_eq",
10879 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10880
10881 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
10882 "__builtin_strncmp_eq",
10883 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10884
10885 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
10886 "__builtin_strcmp_eq",
10887 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10888
10889 /* If there's a possibility that we might use the ARM EABI, build the
10890 alternate __cxa_end_cleanup node used to resume from C++. */
10891 if (targetm.arm_eabi_unwinder)
10892 {
10893 ftype = build_function_type_list (void_type_node, NULL_TREE);
10894 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10895 BUILT_IN_CXA_END_CLEANUP,
10896 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10897 }
10898
10899 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10900 local_define_builtin ("__builtin_unwind_resume", ftype,
10901 BUILT_IN_UNWIND_RESUME,
10902 ((targetm_common.except_unwind_info (&global_options)
10903 == UI_SJLJ)
10904 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10905 ECF_NORETURN);
10906
10907 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10908 {
10909 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10910 NULL_TREE);
10911 local_define_builtin ("__builtin_return_address", ftype,
10912 BUILT_IN_RETURN_ADDRESS,
10913 "__builtin_return_address",
10914 ECF_NOTHROW);
10915 }
10916
10917 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10918 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10919 {
10920 ftype = build_function_type_list (void_type_node, ptr_type_node,
10921 ptr_type_node, NULL_TREE);
10922 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10923 local_define_builtin ("__cyg_profile_func_enter", ftype,
10924 BUILT_IN_PROFILE_FUNC_ENTER,
10925 "__cyg_profile_func_enter", 0);
10926 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10927 local_define_builtin ("__cyg_profile_func_exit", ftype,
10928 BUILT_IN_PROFILE_FUNC_EXIT,
10929 "__cyg_profile_func_exit", 0);
10930 }
10931
10932 /* The exception object and filter values from the runtime. The argument
10933 must be zero before exception lowering, i.e. from the front end. After
10934 exception lowering, it will be the region number for the exception
10935 landing pad. These functions are PURE instead of CONST to prevent
10936 them from being hoisted past the exception edge that will initialize
10937 its value in the landing pad. */
10938 ftype = build_function_type_list (ptr_type_node,
10939 integer_type_node, NULL_TREE);
10940 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10941 /* Only use TM_PURE if we have TM language support. */
10942 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10943 ecf_flags |= ECF_TM_PURE;
10944 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10945 "__builtin_eh_pointer", ecf_flags);
10946
10947 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10948 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10949 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10950 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10951
10952 ftype = build_function_type_list (void_type_node,
10953 integer_type_node, integer_type_node,
10954 NULL_TREE);
10955 local_define_builtin ("__builtin_eh_copy_values", ftype,
10956 BUILT_IN_EH_COPY_VALUES,
10957 "__builtin_eh_copy_values", ECF_NOTHROW);
10958
10959 /* Complex multiplication and division. These are handled as builtins
10960 rather than optabs because emit_library_call_value doesn't support
10961 complex. Further, we can do slightly better with folding these
10962 beasties if the real and complex parts of the arguments are separate. */
10963 {
10964 int mode;
10965
10966 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10967 {
10968 char mode_name_buf[4], *q;
10969 const char *p;
10970 enum built_in_function mcode, dcode;
10971 tree type, inner_type;
10972 const char *prefix = "__";
10973
10974 if (targetm.libfunc_gnu_prefix)
10975 prefix = "__gnu_";
10976
10977 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10978 if (type == NULL)
10979 continue;
10980 inner_type = TREE_TYPE (type);
10981
10982 ftype = build_function_type_list (type, inner_type, inner_type,
10983 inner_type, inner_type, NULL_TREE);
10984
10985 mcode = ((enum built_in_function)
10986 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10987 dcode = ((enum built_in_function)
10988 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10989
10990 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10991 *q = TOLOWER (*p);
10992 *q = '\0';
10993
10994 /* For -ftrapping-math these should throw from a former
10995 -fnon-call-exception stmt. */
10996 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10997 NULL);
10998 local_define_builtin (built_in_names[mcode], ftype, mcode,
10999 built_in_names[mcode],
11000 ECF_CONST | ECF_LEAF);
11001
11002 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
11003 NULL);
11004 local_define_builtin (built_in_names[dcode], ftype, dcode,
11005 built_in_names[dcode],
11006 ECF_CONST | ECF_LEAF);
11007 }
11008 }
11009
11010 init_internal_fns ();
11011 }
11012
11013 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
11014 better way.
11015
11016 If we requested a pointer to a vector, build up the pointers that
11017 we stripped off while looking for the inner type. Similarly for
11018 return values from functions.
11019
11020 The argument TYPE is the top of the chain, and BOTTOM is the
11021 new type which we will point to. */
11022
11023 tree
11024 reconstruct_complex_type (tree type, tree bottom)
11025 {
11026 tree inner, outer;
11027
11028 if (TREE_CODE (type) == POINTER_TYPE)
11029 {
11030 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11031 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
11032 TYPE_REF_CAN_ALIAS_ALL (type));
11033 }
11034 else if (TREE_CODE (type) == REFERENCE_TYPE)
11035 {
11036 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11037 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
11038 TYPE_REF_CAN_ALIAS_ALL (type));
11039 }
11040 else if (TREE_CODE (type) == ARRAY_TYPE)
11041 {
11042 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11043 outer = build_array_type (inner, TYPE_DOMAIN (type));
11044 }
11045 else if (TREE_CODE (type) == FUNCTION_TYPE)
11046 {
11047 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11048 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
11049 }
11050 else if (TREE_CODE (type) == METHOD_TYPE)
11051 {
11052 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11053 /* The build_method_type_directly() routine prepends 'this' to argument list,
11054 so we must compensate by getting rid of it. */
11055 outer
11056 = build_method_type_directly
11057 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
11058 inner,
11059 TREE_CHAIN (TYPE_ARG_TYPES (type)));
11060 }
11061 else if (TREE_CODE (type) == OFFSET_TYPE)
11062 {
11063 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
11064 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
11065 }
11066 else
11067 return bottom;
11068
11069 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
11070 TYPE_QUALS (type));
11071 }
11072
11073 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
11074 the inner type. */
11075 tree
11076 build_vector_type_for_mode (tree innertype, machine_mode mode)
11077 {
11078 poly_int64 nunits;
11079 unsigned int bitsize;
11080
11081 switch (GET_MODE_CLASS (mode))
11082 {
11083 case MODE_VECTOR_BOOL:
11084 case MODE_VECTOR_INT:
11085 case MODE_VECTOR_FLOAT:
11086 case MODE_VECTOR_FRACT:
11087 case MODE_VECTOR_UFRACT:
11088 case MODE_VECTOR_ACCUM:
11089 case MODE_VECTOR_UACCUM:
11090 nunits = GET_MODE_NUNITS (mode);
11091 break;
11092
11093 case MODE_INT:
11094 /* Check that there are no leftover bits. */
11095 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
11096 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
11097 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
11098 break;
11099
11100 default:
11101 gcc_unreachable ();
11102 }
11103
11104 return make_vector_type (innertype, nunits, mode);
11105 }
11106
11107 /* Similarly, but takes the inner type and number of units, which must be
11108 a power of two. */
11109
11110 tree
11111 build_vector_type (tree innertype, poly_int64 nunits)
11112 {
11113 return make_vector_type (innertype, nunits, VOIDmode);
11114 }
11115
11116 /* Build truth vector with specified length and number of units. */
11117
11118 tree
11119 build_truth_vector_type (poly_uint64 nunits, poly_uint64 vector_size)
11120 {
11121 machine_mode mask_mode
11122 = targetm.vectorize.get_mask_mode (nunits, vector_size).else_blk ();
11123
11124 poly_uint64 vsize;
11125 if (mask_mode == BLKmode)
11126 vsize = vector_size * BITS_PER_UNIT;
11127 else
11128 vsize = GET_MODE_BITSIZE (mask_mode);
11129
11130 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
11131
11132 tree bool_type = build_nonstandard_boolean_type (esize);
11133
11134 return make_vector_type (bool_type, nunits, mask_mode);
11135 }
11136
11137 /* Returns a vector type corresponding to a comparison of VECTYPE. */
11138
11139 tree
11140 build_same_sized_truth_vector_type (tree vectype)
11141 {
11142 if (VECTOR_BOOLEAN_TYPE_P (vectype))
11143 return vectype;
11144
11145 poly_uint64 size = GET_MODE_SIZE (TYPE_MODE (vectype));
11146
11147 if (known_eq (size, 0U))
11148 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
11149
11150 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
11151 }
11152
11153 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
11154
11155 tree
11156 build_opaque_vector_type (tree innertype, poly_int64 nunits)
11157 {
11158 tree t = make_vector_type (innertype, nunits, VOIDmode);
11159 tree cand;
11160 /* We always build the non-opaque variant before the opaque one,
11161 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
11162 cand = TYPE_NEXT_VARIANT (t);
11163 if (cand
11164 && TYPE_VECTOR_OPAQUE (cand)
11165 && check_qualified_type (cand, t, TYPE_QUALS (t)))
11166 return cand;
11167 /* Othewise build a variant type and make sure to queue it after
11168 the non-opaque type. */
11169 cand = build_distinct_type_copy (t);
11170 TYPE_VECTOR_OPAQUE (cand) = true;
11171 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
11172 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
11173 TYPE_NEXT_VARIANT (t) = cand;
11174 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
11175 return cand;
11176 }
11177
11178 /* Return the value of element I of VECTOR_CST T as a wide_int. */
11179
11180 wide_int
11181 vector_cst_int_elt (const_tree t, unsigned int i)
11182 {
11183 /* First handle elements that are directly encoded. */
11184 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
11185 if (i < encoded_nelts)
11186 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, i));
11187
11188 /* Identify the pattern that contains element I and work out the index of
11189 the last encoded element for that pattern. */
11190 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
11191 unsigned int pattern = i % npatterns;
11192 unsigned int count = i / npatterns;
11193 unsigned int final_i = encoded_nelts - npatterns + pattern;
11194
11195 /* If there are no steps, the final encoded value is the right one. */
11196 if (!VECTOR_CST_STEPPED_P (t))
11197 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
11198
11199 /* Otherwise work out the value from the last two encoded elements. */
11200 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
11201 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
11202 wide_int diff = wi::to_wide (v2) - wi::to_wide (v1);
11203 return wi::to_wide (v2) + (count - 2) * diff;
11204 }
11205
11206 /* Return the value of element I of VECTOR_CST T. */
11207
11208 tree
11209 vector_cst_elt (const_tree t, unsigned int i)
11210 {
11211 /* First handle elements that are directly encoded. */
11212 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
11213 if (i < encoded_nelts)
11214 return VECTOR_CST_ENCODED_ELT (t, i);
11215
11216 /* If there are no steps, the final encoded value is the right one. */
11217 if (!VECTOR_CST_STEPPED_P (t))
11218 {
11219 /* Identify the pattern that contains element I and work out the index of
11220 the last encoded element for that pattern. */
11221 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
11222 unsigned int pattern = i % npatterns;
11223 unsigned int final_i = encoded_nelts - npatterns + pattern;
11224 return VECTOR_CST_ENCODED_ELT (t, final_i);
11225 }
11226
11227 /* Otherwise work out the value from the last two encoded elements. */
11228 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
11229 vector_cst_int_elt (t, i));
11230 }
11231
11232 /* Given an initializer INIT, return TRUE if INIT is zero or some
11233 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
11234 null, set *NONZERO if and only if INIT is known not to be all
11235 zeros. The combination of return value of false and *NONZERO
11236 false implies that INIT may but need not be all zeros. Other
11237 combinations indicate definitive answers. */
11238
11239 bool
11240 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
11241 {
11242 bool dummy;
11243 if (!nonzero)
11244 nonzero = &dummy;
11245
11246 /* Conservatively clear NONZERO and set it only if INIT is definitely
11247 not all zero. */
11248 *nonzero = false;
11249
11250 STRIP_NOPS (init);
11251
11252 unsigned HOST_WIDE_INT off = 0;
11253
11254 switch (TREE_CODE (init))
11255 {
11256 case INTEGER_CST:
11257 if (integer_zerop (init))
11258 return true;
11259
11260 *nonzero = true;
11261 return false;
11262
11263 case REAL_CST:
11264 /* ??? Note that this is not correct for C4X float formats. There,
11265 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
11266 negative exponent. */
11267 if (real_zerop (init)
11268 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
11269 return true;
11270
11271 *nonzero = true;
11272 return false;
11273
11274 case FIXED_CST:
11275 if (fixed_zerop (init))
11276 return true;
11277
11278 *nonzero = true;
11279 return false;
11280
11281 case COMPLEX_CST:
11282 if (integer_zerop (init)
11283 || (real_zerop (init)
11284 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
11285 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
11286 return true;
11287
11288 *nonzero = true;
11289 return false;
11290
11291 case VECTOR_CST:
11292 if (VECTOR_CST_NPATTERNS (init) == 1
11293 && VECTOR_CST_DUPLICATE_P (init)
11294 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
11295 return true;
11296
11297 *nonzero = true;
11298 return false;
11299
11300 case CONSTRUCTOR:
11301 {
11302 if (TREE_CLOBBER_P (init))
11303 return false;
11304
11305 unsigned HOST_WIDE_INT idx;
11306 tree elt;
11307
11308 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
11309 if (!initializer_zerop (elt, nonzero))
11310 return false;
11311
11312 return true;
11313 }
11314
11315 case MEM_REF:
11316 {
11317 tree arg = TREE_OPERAND (init, 0);
11318 if (TREE_CODE (arg) != ADDR_EXPR)
11319 return false;
11320 tree offset = TREE_OPERAND (init, 1);
11321 if (TREE_CODE (offset) != INTEGER_CST
11322 || !tree_fits_uhwi_p (offset))
11323 return false;
11324 off = tree_to_uhwi (offset);
11325 if (INT_MAX < off)
11326 return false;
11327 arg = TREE_OPERAND (arg, 0);
11328 if (TREE_CODE (arg) != STRING_CST)
11329 return false;
11330 init = arg;
11331 }
11332 /* Fall through. */
11333
11334 case STRING_CST:
11335 {
11336 gcc_assert (off <= INT_MAX);
11337
11338 int i = off;
11339 int n = TREE_STRING_LENGTH (init);
11340 if (n <= i)
11341 return false;
11342
11343 /* We need to loop through all elements to handle cases like
11344 "\0" and "\0foobar". */
11345 for (i = 0; i < n; ++i)
11346 if (TREE_STRING_POINTER (init)[i] != '\0')
11347 {
11348 *nonzero = true;
11349 return false;
11350 }
11351
11352 return true;
11353 }
11354
11355 default:
11356 return false;
11357 }
11358 }
11359
11360 /* Return true if EXPR is an initializer expression in which every element
11361 is a constant that is numerically equal to 0 or 1. The elements do not
11362 need to be equal to each other. */
11363
11364 bool
11365 initializer_each_zero_or_onep (const_tree expr)
11366 {
11367 STRIP_ANY_LOCATION_WRAPPER (expr);
11368
11369 switch (TREE_CODE (expr))
11370 {
11371 case INTEGER_CST:
11372 return integer_zerop (expr) || integer_onep (expr);
11373
11374 case REAL_CST:
11375 return real_zerop (expr) || real_onep (expr);
11376
11377 case VECTOR_CST:
11378 {
11379 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
11380 if (VECTOR_CST_STEPPED_P (expr)
11381 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
11382 return false;
11383
11384 for (unsigned int i = 0; i < nelts; ++i)
11385 {
11386 tree elt = vector_cst_elt (expr, i);
11387 if (!initializer_each_zero_or_onep (elt))
11388 return false;
11389 }
11390
11391 return true;
11392 }
11393
11394 default:
11395 return false;
11396 }
11397 }
11398
11399 /* Given an initializer INIT for a TYPE, return true if INIT is zero
11400 so that it can be replaced by value initialization. This function
11401 distinguishes betwen empty strings as initializers for arrays and
11402 for pointers (which make it return false). */
11403
11404 bool
11405 type_initializer_zero_p (tree type, tree init)
11406 {
11407 if (type == error_mark_node || init == error_mark_node)
11408 return false;
11409
11410 STRIP_NOPS (init);
11411
11412 if (POINTER_TYPE_P (type))
11413 return TREE_CODE (init) != STRING_CST && initializer_zerop (init);
11414
11415 if (TREE_CODE (init) != CONSTRUCTOR)
11416 return initializer_zerop (init);
11417
11418 if (TREE_CODE (type) == ARRAY_TYPE)
11419 {
11420 tree elt_type = TREE_TYPE (type);
11421 elt_type = TYPE_MAIN_VARIANT (elt_type);
11422 if (elt_type == char_type_node)
11423 return initializer_zerop (init);
11424
11425 tree elt_init;
11426 unsigned HOST_WIDE_INT i;
11427 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), i, elt_init)
11428 if (!type_initializer_zero_p (elt_type, elt_init))
11429 return false;
11430 return true;
11431 }
11432
11433 if (TREE_CODE (type) != RECORD_TYPE)
11434 return initializer_zerop (init);
11435
11436 tree fld = TYPE_FIELDS (type);
11437
11438 tree fld_init;
11439 unsigned HOST_WIDE_INT i;
11440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), i, fld_init)
11441 {
11442 /* Advance to the next member, skipping over everything that
11443 canot be initialized (including unnamed bit-fields). */
11444 while (TREE_CODE (fld) != FIELD_DECL
11445 || DECL_ARTIFICIAL (fld)
11446 || (DECL_BIT_FIELD (fld) && !DECL_NAME (fld)))
11447 {
11448 fld = DECL_CHAIN (fld);
11449 if (!fld)
11450 return true;
11451 continue;
11452 }
11453
11454 tree fldtype = TREE_TYPE (fld);
11455 if (!type_initializer_zero_p (fldtype, fld_init))
11456 return false;
11457
11458 fld = DECL_CHAIN (fld);
11459 if (!fld)
11460 break;
11461 }
11462
11463 return true;
11464 }
11465
11466 /* Check if vector VEC consists of all the equal elements and
11467 that the number of elements corresponds to the type of VEC.
11468 The function returns first element of the vector
11469 or NULL_TREE if the vector is not uniform. */
11470 tree
11471 uniform_vector_p (const_tree vec)
11472 {
11473 tree first, t;
11474 unsigned HOST_WIDE_INT i, nelts;
11475
11476 if (vec == NULL_TREE)
11477 return NULL_TREE;
11478
11479 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
11480
11481 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
11482 return TREE_OPERAND (vec, 0);
11483
11484 else if (TREE_CODE (vec) == VECTOR_CST)
11485 {
11486 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
11487 return VECTOR_CST_ENCODED_ELT (vec, 0);
11488 return NULL_TREE;
11489 }
11490
11491 else if (TREE_CODE (vec) == CONSTRUCTOR
11492 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
11493 {
11494 first = error_mark_node;
11495
11496 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
11497 {
11498 if (i == 0)
11499 {
11500 first = t;
11501 continue;
11502 }
11503 if (!operand_equal_p (first, t, 0))
11504 return NULL_TREE;
11505 }
11506 if (i != nelts)
11507 return NULL_TREE;
11508
11509 return first;
11510 }
11511
11512 return NULL_TREE;
11513 }
11514
11515 /* If the argument is INTEGER_CST, return it. If the argument is vector
11516 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
11517 return NULL_TREE.
11518 Look through location wrappers. */
11519
11520 tree
11521 uniform_integer_cst_p (tree t)
11522 {
11523 STRIP_ANY_LOCATION_WRAPPER (t);
11524
11525 if (TREE_CODE (t) == INTEGER_CST)
11526 return t;
11527
11528 if (VECTOR_TYPE_P (TREE_TYPE (t)))
11529 {
11530 t = uniform_vector_p (t);
11531 if (t && TREE_CODE (t) == INTEGER_CST)
11532 return t;
11533 }
11534
11535 return NULL_TREE;
11536 }
11537
11538 /* If VECTOR_CST T has a single nonzero element, return the index of that
11539 element, otherwise return -1. */
11540
11541 int
11542 single_nonzero_element (const_tree t)
11543 {
11544 unsigned HOST_WIDE_INT nelts;
11545 unsigned int repeat_nelts;
11546 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
11547 repeat_nelts = nelts;
11548 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
11549 {
11550 nelts = vector_cst_encoded_nelts (t);
11551 repeat_nelts = VECTOR_CST_NPATTERNS (t);
11552 }
11553 else
11554 return -1;
11555
11556 int res = -1;
11557 for (unsigned int i = 0; i < nelts; ++i)
11558 {
11559 tree elt = vector_cst_elt (t, i);
11560 if (!integer_zerop (elt) && !real_zerop (elt))
11561 {
11562 if (res >= 0 || i >= repeat_nelts)
11563 return -1;
11564 res = i;
11565 }
11566 }
11567 return res;
11568 }
11569
11570 /* Build an empty statement at location LOC. */
11571
11572 tree
11573 build_empty_stmt (location_t loc)
11574 {
11575 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11576 SET_EXPR_LOCATION (t, loc);
11577 return t;
11578 }
11579
11580
11581 /* Build an OpenMP clause with code CODE. LOC is the location of the
11582 clause. */
11583
11584 tree
11585 build_omp_clause (location_t loc, enum omp_clause_code code)
11586 {
11587 tree t;
11588 int size, length;
11589
11590 length = omp_clause_num_ops[code];
11591 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11592
11593 record_node_allocation_statistics (OMP_CLAUSE, size);
11594
11595 t = (tree) ggc_internal_alloc (size);
11596 memset (t, 0, size);
11597 TREE_SET_CODE (t, OMP_CLAUSE);
11598 OMP_CLAUSE_SET_CODE (t, code);
11599 OMP_CLAUSE_LOCATION (t) = loc;
11600
11601 return t;
11602 }
11603
11604 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
11605 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11606 Except for the CODE and operand count field, other storage for the
11607 object is initialized to zeros. */
11608
11609 tree
11610 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
11611 {
11612 tree t;
11613 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11614
11615 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11616 gcc_assert (len >= 1);
11617
11618 record_node_allocation_statistics (code, length);
11619
11620 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11621
11622 TREE_SET_CODE (t, code);
11623
11624 /* Can't use TREE_OPERAND to store the length because if checking is
11625 enabled, it will try to check the length before we store it. :-P */
11626 t->exp.operands[0] = build_int_cst (sizetype, len);
11627
11628 return t;
11629 }
11630
11631 /* Helper function for build_call_* functions; build a CALL_EXPR with
11632 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11633 the argument slots. */
11634
11635 static tree
11636 build_call_1 (tree return_type, tree fn, int nargs)
11637 {
11638 tree t;
11639
11640 t = build_vl_exp (CALL_EXPR, nargs + 3);
11641 TREE_TYPE (t) = return_type;
11642 CALL_EXPR_FN (t) = fn;
11643 CALL_EXPR_STATIC_CHAIN (t) = NULL;
11644
11645 return t;
11646 }
11647
11648 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11649 FN and a null static chain slot. NARGS is the number of call arguments
11650 which are specified as "..." arguments. */
11651
11652 tree
11653 build_call_nary (tree return_type, tree fn, int nargs, ...)
11654 {
11655 tree ret;
11656 va_list args;
11657 va_start (args, nargs);
11658 ret = build_call_valist (return_type, fn, nargs, args);
11659 va_end (args);
11660 return ret;
11661 }
11662
11663 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11664 FN and a null static chain slot. NARGS is the number of call arguments
11665 which are specified as a va_list ARGS. */
11666
11667 tree
11668 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11669 {
11670 tree t;
11671 int i;
11672
11673 t = build_call_1 (return_type, fn, nargs);
11674 for (i = 0; i < nargs; i++)
11675 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11676 process_call_operands (t);
11677 return t;
11678 }
11679
11680 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11681 FN and a null static chain slot. NARGS is the number of call arguments
11682 which are specified as a tree array ARGS. */
11683
11684 tree
11685 build_call_array_loc (location_t loc, tree return_type, tree fn,
11686 int nargs, const tree *args)
11687 {
11688 tree t;
11689 int i;
11690
11691 t = build_call_1 (return_type, fn, nargs);
11692 for (i = 0; i < nargs; i++)
11693 CALL_EXPR_ARG (t, i) = args[i];
11694 process_call_operands (t);
11695 SET_EXPR_LOCATION (t, loc);
11696 return t;
11697 }
11698
11699 /* Like build_call_array, but takes a vec. */
11700
11701 tree
11702 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11703 {
11704 tree ret, t;
11705 unsigned int ix;
11706
11707 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11708 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11709 CALL_EXPR_ARG (ret, ix) = t;
11710 process_call_operands (ret);
11711 return ret;
11712 }
11713
11714 /* Conveniently construct a function call expression. FNDECL names the
11715 function to be called and N arguments are passed in the array
11716 ARGARRAY. */
11717
11718 tree
11719 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11720 {
11721 tree fntype = TREE_TYPE (fndecl);
11722 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11723
11724 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11725 }
11726
11727 /* Conveniently construct a function call expression. FNDECL names the
11728 function to be called and the arguments are passed in the vector
11729 VEC. */
11730
11731 tree
11732 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11733 {
11734 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11735 vec_safe_address (vec));
11736 }
11737
11738
11739 /* Conveniently construct a function call expression. FNDECL names the
11740 function to be called, N is the number of arguments, and the "..."
11741 parameters are the argument expressions. */
11742
11743 tree
11744 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11745 {
11746 va_list ap;
11747 tree *argarray = XALLOCAVEC (tree, n);
11748 int i;
11749
11750 va_start (ap, n);
11751 for (i = 0; i < n; i++)
11752 argarray[i] = va_arg (ap, tree);
11753 va_end (ap);
11754 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11755 }
11756
11757 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11758 varargs macros aren't supported by all bootstrap compilers. */
11759
11760 tree
11761 build_call_expr (tree fndecl, int n, ...)
11762 {
11763 va_list ap;
11764 tree *argarray = XALLOCAVEC (tree, n);
11765 int i;
11766
11767 va_start (ap, n);
11768 for (i = 0; i < n; i++)
11769 argarray[i] = va_arg (ap, tree);
11770 va_end (ap);
11771 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11772 }
11773
11774 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11775 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11776 It will get gimplified later into an ordinary internal function. */
11777
11778 tree
11779 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11780 tree type, int n, const tree *args)
11781 {
11782 tree t = build_call_1 (type, NULL_TREE, n);
11783 for (int i = 0; i < n; ++i)
11784 CALL_EXPR_ARG (t, i) = args[i];
11785 SET_EXPR_LOCATION (t, loc);
11786 CALL_EXPR_IFN (t) = ifn;
11787 return t;
11788 }
11789
11790 /* Build internal call expression. This is just like CALL_EXPR, except
11791 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11792 internal function. */
11793
11794 tree
11795 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11796 tree type, int n, ...)
11797 {
11798 va_list ap;
11799 tree *argarray = XALLOCAVEC (tree, n);
11800 int i;
11801
11802 va_start (ap, n);
11803 for (i = 0; i < n; i++)
11804 argarray[i] = va_arg (ap, tree);
11805 va_end (ap);
11806 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11807 }
11808
11809 /* Return a function call to FN, if the target is guaranteed to support it,
11810 or null otherwise.
11811
11812 N is the number of arguments, passed in the "...", and TYPE is the
11813 type of the return value. */
11814
11815 tree
11816 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11817 int n, ...)
11818 {
11819 va_list ap;
11820 tree *argarray = XALLOCAVEC (tree, n);
11821 int i;
11822
11823 va_start (ap, n);
11824 for (i = 0; i < n; i++)
11825 argarray[i] = va_arg (ap, tree);
11826 va_end (ap);
11827 if (internal_fn_p (fn))
11828 {
11829 internal_fn ifn = as_internal_fn (fn);
11830 if (direct_internal_fn_p (ifn))
11831 {
11832 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11833 if (!direct_internal_fn_supported_p (ifn, types,
11834 OPTIMIZE_FOR_BOTH))
11835 return NULL_TREE;
11836 }
11837 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11838 }
11839 else
11840 {
11841 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11842 if (!fndecl)
11843 return NULL_TREE;
11844 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11845 }
11846 }
11847
11848 /* Return a function call to the appropriate builtin alloca variant.
11849
11850 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
11851 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
11852 bound for SIZE in case it is not a fixed value. */
11853
11854 tree
11855 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
11856 {
11857 if (max_size >= 0)
11858 {
11859 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
11860 return
11861 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
11862 }
11863 else if (align > 0)
11864 {
11865 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11866 return build_call_expr (t, 2, size, size_int (align));
11867 }
11868 else
11869 {
11870 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
11871 return build_call_expr (t, 1, size);
11872 }
11873 }
11874
11875 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
11876 if SIZE == -1) and return a tree node representing char* pointer to
11877 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). The STRING_CST value
11878 is the LEN bytes at STR (the representation of the string, which may
11879 be wide). */
11880
11881 tree
11882 build_string_literal (int len, const char *str,
11883 tree eltype /* = char_type_node */,
11884 unsigned HOST_WIDE_INT size /* = -1 */)
11885 {
11886 tree t = build_string (len, str);
11887 /* Set the maximum valid index based on the string length or SIZE. */
11888 unsigned HOST_WIDE_INT maxidx
11889 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
11890
11891 tree index = build_index_type (size_int (maxidx));
11892 eltype = build_type_variant (eltype, 1, 0);
11893 tree type = build_array_type (eltype, index);
11894 TREE_TYPE (t) = type;
11895 TREE_CONSTANT (t) = 1;
11896 TREE_READONLY (t) = 1;
11897 TREE_STATIC (t) = 1;
11898
11899 type = build_pointer_type (eltype);
11900 t = build1 (ADDR_EXPR, type,
11901 build4 (ARRAY_REF, eltype,
11902 t, integer_zero_node, NULL_TREE, NULL_TREE));
11903 return t;
11904 }
11905
11906
11907
11908 /* Return true if T (assumed to be a DECL) must be assigned a memory
11909 location. */
11910
11911 bool
11912 needs_to_live_in_memory (const_tree t)
11913 {
11914 return (TREE_ADDRESSABLE (t)
11915 || is_global_var (t)
11916 || (TREE_CODE (t) == RESULT_DECL
11917 && !DECL_BY_REFERENCE (t)
11918 && aggregate_value_p (t, current_function_decl)));
11919 }
11920
11921 /* Return value of a constant X and sign-extend it. */
11922
11923 HOST_WIDE_INT
11924 int_cst_value (const_tree x)
11925 {
11926 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11927 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11928
11929 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11930 gcc_assert (cst_and_fits_in_hwi (x));
11931
11932 if (bits < HOST_BITS_PER_WIDE_INT)
11933 {
11934 bool negative = ((val >> (bits - 1)) & 1) != 0;
11935 if (negative)
11936 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11937 else
11938 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11939 }
11940
11941 return val;
11942 }
11943
11944 /* If TYPE is an integral or pointer type, return an integer type with
11945 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11946 if TYPE is already an integer type of signedness UNSIGNEDP.
11947 If TYPE is a floating-point type, return an integer type with the same
11948 bitsize and with the signedness given by UNSIGNEDP; this is useful
11949 when doing bit-level operations on a floating-point value. */
11950
11951 tree
11952 signed_or_unsigned_type_for (int unsignedp, tree type)
11953 {
11954 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11955 return type;
11956
11957 if (TREE_CODE (type) == VECTOR_TYPE)
11958 {
11959 tree inner = TREE_TYPE (type);
11960 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11961 if (!inner2)
11962 return NULL_TREE;
11963 if (inner == inner2)
11964 return type;
11965 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11966 }
11967
11968 if (TREE_CODE (type) == COMPLEX_TYPE)
11969 {
11970 tree inner = TREE_TYPE (type);
11971 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11972 if (!inner2)
11973 return NULL_TREE;
11974 if (inner == inner2)
11975 return type;
11976 return build_complex_type (inner2);
11977 }
11978
11979 unsigned int bits;
11980 if (INTEGRAL_TYPE_P (type)
11981 || POINTER_TYPE_P (type)
11982 || TREE_CODE (type) == OFFSET_TYPE)
11983 bits = TYPE_PRECISION (type);
11984 else if (TREE_CODE (type) == REAL_TYPE)
11985 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11986 else
11987 return NULL_TREE;
11988
11989 return build_nonstandard_integer_type (bits, unsignedp);
11990 }
11991
11992 /* If TYPE is an integral or pointer type, return an integer type with
11993 the same precision which is unsigned, or itself if TYPE is already an
11994 unsigned integer type. If TYPE is a floating-point type, return an
11995 unsigned integer type with the same bitsize as TYPE. */
11996
11997 tree
11998 unsigned_type_for (tree type)
11999 {
12000 return signed_or_unsigned_type_for (1, type);
12001 }
12002
12003 /* If TYPE is an integral or pointer type, return an integer type with
12004 the same precision which is signed, or itself if TYPE is already a
12005 signed integer type. If TYPE is a floating-point type, return a
12006 signed integer type with the same bitsize as TYPE. */
12007
12008 tree
12009 signed_type_for (tree type)
12010 {
12011 return signed_or_unsigned_type_for (0, type);
12012 }
12013
12014 /* If TYPE is a vector type, return a signed integer vector type with the
12015 same width and number of subparts. Otherwise return boolean_type_node. */
12016
12017 tree
12018 truth_type_for (tree type)
12019 {
12020 if (TREE_CODE (type) == VECTOR_TYPE)
12021 {
12022 if (VECTOR_BOOLEAN_TYPE_P (type))
12023 return type;
12024 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
12025 GET_MODE_SIZE (TYPE_MODE (type)));
12026 }
12027 else
12028 return boolean_type_node;
12029 }
12030
12031 /* Returns the largest value obtainable by casting something in INNER type to
12032 OUTER type. */
12033
12034 tree
12035 upper_bound_in_type (tree outer, tree inner)
12036 {
12037 unsigned int det = 0;
12038 unsigned oprec = TYPE_PRECISION (outer);
12039 unsigned iprec = TYPE_PRECISION (inner);
12040 unsigned prec;
12041
12042 /* Compute a unique number for every combination. */
12043 det |= (oprec > iprec) ? 4 : 0;
12044 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
12045 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
12046
12047 /* Determine the exponent to use. */
12048 switch (det)
12049 {
12050 case 0:
12051 case 1:
12052 /* oprec <= iprec, outer: signed, inner: don't care. */
12053 prec = oprec - 1;
12054 break;
12055 case 2:
12056 case 3:
12057 /* oprec <= iprec, outer: unsigned, inner: don't care. */
12058 prec = oprec;
12059 break;
12060 case 4:
12061 /* oprec > iprec, outer: signed, inner: signed. */
12062 prec = iprec - 1;
12063 break;
12064 case 5:
12065 /* oprec > iprec, outer: signed, inner: unsigned. */
12066 prec = iprec;
12067 break;
12068 case 6:
12069 /* oprec > iprec, outer: unsigned, inner: signed. */
12070 prec = oprec;
12071 break;
12072 case 7:
12073 /* oprec > iprec, outer: unsigned, inner: unsigned. */
12074 prec = iprec;
12075 break;
12076 default:
12077 gcc_unreachable ();
12078 }
12079
12080 return wide_int_to_tree (outer,
12081 wi::mask (prec, false, TYPE_PRECISION (outer)));
12082 }
12083
12084 /* Returns the smallest value obtainable by casting something in INNER type to
12085 OUTER type. */
12086
12087 tree
12088 lower_bound_in_type (tree outer, tree inner)
12089 {
12090 unsigned oprec = TYPE_PRECISION (outer);
12091 unsigned iprec = TYPE_PRECISION (inner);
12092
12093 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
12094 and obtain 0. */
12095 if (TYPE_UNSIGNED (outer)
12096 /* If we are widening something of an unsigned type, OUTER type
12097 contains all values of INNER type. In particular, both INNER
12098 and OUTER types have zero in common. */
12099 || (oprec > iprec && TYPE_UNSIGNED (inner)))
12100 return build_int_cst (outer, 0);
12101 else
12102 {
12103 /* If we are widening a signed type to another signed type, we
12104 want to obtain -2^^(iprec-1). If we are keeping the
12105 precision or narrowing to a signed type, we want to obtain
12106 -2^(oprec-1). */
12107 unsigned prec = oprec > iprec ? iprec : oprec;
12108 return wide_int_to_tree (outer,
12109 wi::mask (prec - 1, true,
12110 TYPE_PRECISION (outer)));
12111 }
12112 }
12113
12114 /* Return nonzero if two operands that are suitable for PHI nodes are
12115 necessarily equal. Specifically, both ARG0 and ARG1 must be either
12116 SSA_NAME or invariant. Note that this is strictly an optimization.
12117 That is, callers of this function can directly call operand_equal_p
12118 and get the same result, only slower. */
12119
12120 int
12121 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
12122 {
12123 if (arg0 == arg1)
12124 return 1;
12125 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
12126 return 0;
12127 return operand_equal_p (arg0, arg1, 0);
12128 }
12129
12130 /* Returns number of zeros at the end of binary representation of X. */
12131
12132 tree
12133 num_ending_zeros (const_tree x)
12134 {
12135 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
12136 }
12137
12138
12139 #define WALK_SUBTREE(NODE) \
12140 do \
12141 { \
12142 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
12143 if (result) \
12144 return result; \
12145 } \
12146 while (0)
12147
12148 /* This is a subroutine of walk_tree that walks field of TYPE that are to
12149 be walked whenever a type is seen in the tree. Rest of operands and return
12150 value are as for walk_tree. */
12151
12152 static tree
12153 walk_type_fields (tree type, walk_tree_fn func, void *data,
12154 hash_set<tree> *pset, walk_tree_lh lh)
12155 {
12156 tree result = NULL_TREE;
12157
12158 switch (TREE_CODE (type))
12159 {
12160 case POINTER_TYPE:
12161 case REFERENCE_TYPE:
12162 case VECTOR_TYPE:
12163 /* We have to worry about mutually recursive pointers. These can't
12164 be written in C. They can in Ada. It's pathological, but
12165 there's an ACATS test (c38102a) that checks it. Deal with this
12166 by checking if we're pointing to another pointer, that one
12167 points to another pointer, that one does too, and we have no htab.
12168 If so, get a hash table. We check three levels deep to avoid
12169 the cost of the hash table if we don't need one. */
12170 if (POINTER_TYPE_P (TREE_TYPE (type))
12171 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
12172 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
12173 && !pset)
12174 {
12175 result = walk_tree_without_duplicates (&TREE_TYPE (type),
12176 func, data);
12177 if (result)
12178 return result;
12179
12180 break;
12181 }
12182
12183 /* fall through */
12184
12185 case COMPLEX_TYPE:
12186 WALK_SUBTREE (TREE_TYPE (type));
12187 break;
12188
12189 case METHOD_TYPE:
12190 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
12191
12192 /* Fall through. */
12193
12194 case FUNCTION_TYPE:
12195 WALK_SUBTREE (TREE_TYPE (type));
12196 {
12197 tree arg;
12198
12199 /* We never want to walk into default arguments. */
12200 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
12201 WALK_SUBTREE (TREE_VALUE (arg));
12202 }
12203 break;
12204
12205 case ARRAY_TYPE:
12206 /* Don't follow this nodes's type if a pointer for fear that
12207 we'll have infinite recursion. If we have a PSET, then we
12208 need not fear. */
12209 if (pset
12210 || (!POINTER_TYPE_P (TREE_TYPE (type))
12211 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
12212 WALK_SUBTREE (TREE_TYPE (type));
12213 WALK_SUBTREE (TYPE_DOMAIN (type));
12214 break;
12215
12216 case OFFSET_TYPE:
12217 WALK_SUBTREE (TREE_TYPE (type));
12218 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
12219 break;
12220
12221 default:
12222 break;
12223 }
12224
12225 return NULL_TREE;
12226 }
12227
12228 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
12229 called with the DATA and the address of each sub-tree. If FUNC returns a
12230 non-NULL value, the traversal is stopped, and the value returned by FUNC
12231 is returned. If PSET is non-NULL it is used to record the nodes visited,
12232 and to avoid visiting a node more than once. */
12233
12234 tree
12235 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
12236 hash_set<tree> *pset, walk_tree_lh lh)
12237 {
12238 enum tree_code code;
12239 int walk_subtrees;
12240 tree result;
12241
12242 #define WALK_SUBTREE_TAIL(NODE) \
12243 do \
12244 { \
12245 tp = & (NODE); \
12246 goto tail_recurse; \
12247 } \
12248 while (0)
12249
12250 tail_recurse:
12251 /* Skip empty subtrees. */
12252 if (!*tp)
12253 return NULL_TREE;
12254
12255 /* Don't walk the same tree twice, if the user has requested
12256 that we avoid doing so. */
12257 if (pset && pset->add (*tp))
12258 return NULL_TREE;
12259
12260 /* Call the function. */
12261 walk_subtrees = 1;
12262 result = (*func) (tp, &walk_subtrees, data);
12263
12264 /* If we found something, return it. */
12265 if (result)
12266 return result;
12267
12268 code = TREE_CODE (*tp);
12269
12270 /* Even if we didn't, FUNC may have decided that there was nothing
12271 interesting below this point in the tree. */
12272 if (!walk_subtrees)
12273 {
12274 /* But we still need to check our siblings. */
12275 if (code == TREE_LIST)
12276 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12277 else if (code == OMP_CLAUSE)
12278 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12279 else
12280 return NULL_TREE;
12281 }
12282
12283 if (lh)
12284 {
12285 result = (*lh) (tp, &walk_subtrees, func, data, pset);
12286 if (result || !walk_subtrees)
12287 return result;
12288 }
12289
12290 switch (code)
12291 {
12292 case ERROR_MARK:
12293 case IDENTIFIER_NODE:
12294 case INTEGER_CST:
12295 case REAL_CST:
12296 case FIXED_CST:
12297 case VECTOR_CST:
12298 case STRING_CST:
12299 case BLOCK:
12300 case PLACEHOLDER_EXPR:
12301 case SSA_NAME:
12302 case FIELD_DECL:
12303 case RESULT_DECL:
12304 /* None of these have subtrees other than those already walked
12305 above. */
12306 break;
12307
12308 case TREE_LIST:
12309 WALK_SUBTREE (TREE_VALUE (*tp));
12310 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12311 break;
12312
12313 case TREE_VEC:
12314 {
12315 int len = TREE_VEC_LENGTH (*tp);
12316
12317 if (len == 0)
12318 break;
12319
12320 /* Walk all elements but the first. */
12321 while (--len)
12322 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
12323
12324 /* Now walk the first one as a tail call. */
12325 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
12326 }
12327
12328 case COMPLEX_CST:
12329 WALK_SUBTREE (TREE_REALPART (*tp));
12330 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
12331
12332 case CONSTRUCTOR:
12333 {
12334 unsigned HOST_WIDE_INT idx;
12335 constructor_elt *ce;
12336
12337 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
12338 idx++)
12339 WALK_SUBTREE (ce->value);
12340 }
12341 break;
12342
12343 case SAVE_EXPR:
12344 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
12345
12346 case BIND_EXPR:
12347 {
12348 tree decl;
12349 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
12350 {
12351 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
12352 into declarations that are just mentioned, rather than
12353 declared; they don't really belong to this part of the tree.
12354 And, we can see cycles: the initializer for a declaration
12355 can refer to the declaration itself. */
12356 WALK_SUBTREE (DECL_INITIAL (decl));
12357 WALK_SUBTREE (DECL_SIZE (decl));
12358 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
12359 }
12360 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
12361 }
12362
12363 case STATEMENT_LIST:
12364 {
12365 tree_stmt_iterator i;
12366 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
12367 WALK_SUBTREE (*tsi_stmt_ptr (i));
12368 }
12369 break;
12370
12371 case OMP_CLAUSE:
12372 switch (OMP_CLAUSE_CODE (*tp))
12373 {
12374 case OMP_CLAUSE_GANG:
12375 case OMP_CLAUSE__GRIDDIM_:
12376 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12377 /* FALLTHRU */
12378
12379 case OMP_CLAUSE_ASYNC:
12380 case OMP_CLAUSE_WAIT:
12381 case OMP_CLAUSE_WORKER:
12382 case OMP_CLAUSE_VECTOR:
12383 case OMP_CLAUSE_NUM_GANGS:
12384 case OMP_CLAUSE_NUM_WORKERS:
12385 case OMP_CLAUSE_VECTOR_LENGTH:
12386 case OMP_CLAUSE_PRIVATE:
12387 case OMP_CLAUSE_SHARED:
12388 case OMP_CLAUSE_FIRSTPRIVATE:
12389 case OMP_CLAUSE_COPYIN:
12390 case OMP_CLAUSE_COPYPRIVATE:
12391 case OMP_CLAUSE_FINAL:
12392 case OMP_CLAUSE_IF:
12393 case OMP_CLAUSE_NUM_THREADS:
12394 case OMP_CLAUSE_SCHEDULE:
12395 case OMP_CLAUSE_UNIFORM:
12396 case OMP_CLAUSE_DEPEND:
12397 case OMP_CLAUSE_NONTEMPORAL:
12398 case OMP_CLAUSE_NUM_TEAMS:
12399 case OMP_CLAUSE_THREAD_LIMIT:
12400 case OMP_CLAUSE_DEVICE:
12401 case OMP_CLAUSE_DIST_SCHEDULE:
12402 case OMP_CLAUSE_SAFELEN:
12403 case OMP_CLAUSE_SIMDLEN:
12404 case OMP_CLAUSE_ORDERED:
12405 case OMP_CLAUSE_PRIORITY:
12406 case OMP_CLAUSE_GRAINSIZE:
12407 case OMP_CLAUSE_NUM_TASKS:
12408 case OMP_CLAUSE_HINT:
12409 case OMP_CLAUSE_TO_DECLARE:
12410 case OMP_CLAUSE_LINK:
12411 case OMP_CLAUSE_USE_DEVICE_PTR:
12412 case OMP_CLAUSE_USE_DEVICE_ADDR:
12413 case OMP_CLAUSE_IS_DEVICE_PTR:
12414 case OMP_CLAUSE_INCLUSIVE:
12415 case OMP_CLAUSE_EXCLUSIVE:
12416 case OMP_CLAUSE__LOOPTEMP_:
12417 case OMP_CLAUSE__REDUCTEMP_:
12418 case OMP_CLAUSE__CONDTEMP_:
12419 case OMP_CLAUSE__SCANTEMP_:
12420 case OMP_CLAUSE__SIMDUID_:
12421 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
12422 /* FALLTHRU */
12423
12424 case OMP_CLAUSE_INDEPENDENT:
12425 case OMP_CLAUSE_NOWAIT:
12426 case OMP_CLAUSE_DEFAULT:
12427 case OMP_CLAUSE_UNTIED:
12428 case OMP_CLAUSE_MERGEABLE:
12429 case OMP_CLAUSE_PROC_BIND:
12430 case OMP_CLAUSE_DEVICE_TYPE:
12431 case OMP_CLAUSE_INBRANCH:
12432 case OMP_CLAUSE_NOTINBRANCH:
12433 case OMP_CLAUSE_FOR:
12434 case OMP_CLAUSE_PARALLEL:
12435 case OMP_CLAUSE_SECTIONS:
12436 case OMP_CLAUSE_TASKGROUP:
12437 case OMP_CLAUSE_NOGROUP:
12438 case OMP_CLAUSE_THREADS:
12439 case OMP_CLAUSE_SIMD:
12440 case OMP_CLAUSE_DEFAULTMAP:
12441 case OMP_CLAUSE_ORDER:
12442 case OMP_CLAUSE_BIND:
12443 case OMP_CLAUSE_AUTO:
12444 case OMP_CLAUSE_SEQ:
12445 case OMP_CLAUSE_TILE:
12446 case OMP_CLAUSE__SIMT_:
12447 case OMP_CLAUSE_IF_PRESENT:
12448 case OMP_CLAUSE_FINALIZE:
12449 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12450
12451 case OMP_CLAUSE_LASTPRIVATE:
12452 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12453 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
12454 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12455
12456 case OMP_CLAUSE_COLLAPSE:
12457 {
12458 int i;
12459 for (i = 0; i < 3; i++)
12460 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12461 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12462 }
12463
12464 case OMP_CLAUSE_LINEAR:
12465 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12466 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
12467 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
12468 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12469
12470 case OMP_CLAUSE_ALIGNED:
12471 case OMP_CLAUSE_FROM:
12472 case OMP_CLAUSE_TO:
12473 case OMP_CLAUSE_MAP:
12474 case OMP_CLAUSE__CACHE_:
12475 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12476 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12477 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12478
12479 case OMP_CLAUSE_REDUCTION:
12480 case OMP_CLAUSE_TASK_REDUCTION:
12481 case OMP_CLAUSE_IN_REDUCTION:
12482 {
12483 int i;
12484 for (i = 0; i < 5; i++)
12485 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12486 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12487 }
12488
12489 default:
12490 gcc_unreachable ();
12491 }
12492 break;
12493
12494 case TARGET_EXPR:
12495 {
12496 int i, len;
12497
12498 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
12499 But, we only want to walk once. */
12500 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
12501 for (i = 0; i < len; ++i)
12502 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12503 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
12504 }
12505
12506 case DECL_EXPR:
12507 /* If this is a TYPE_DECL, walk into the fields of the type that it's
12508 defining. We only want to walk into these fields of a type in this
12509 case and not in the general case of a mere reference to the type.
12510
12511 The criterion is as follows: if the field can be an expression, it
12512 must be walked only here. This should be in keeping with the fields
12513 that are directly gimplified in gimplify_type_sizes in order for the
12514 mark/copy-if-shared/unmark machinery of the gimplifier to work with
12515 variable-sized types.
12516
12517 Note that DECLs get walked as part of processing the BIND_EXPR. */
12518 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
12519 {
12520 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
12521 if (TREE_CODE (*type_p) == ERROR_MARK)
12522 return NULL_TREE;
12523
12524 /* Call the function for the type. See if it returns anything or
12525 doesn't want us to continue. If we are to continue, walk both
12526 the normal fields and those for the declaration case. */
12527 result = (*func) (type_p, &walk_subtrees, data);
12528 if (result || !walk_subtrees)
12529 return result;
12530
12531 /* But do not walk a pointed-to type since it may itself need to
12532 be walked in the declaration case if it isn't anonymous. */
12533 if (!POINTER_TYPE_P (*type_p))
12534 {
12535 result = walk_type_fields (*type_p, func, data, pset, lh);
12536 if (result)
12537 return result;
12538 }
12539
12540 /* If this is a record type, also walk the fields. */
12541 if (RECORD_OR_UNION_TYPE_P (*type_p))
12542 {
12543 tree field;
12544
12545 for (field = TYPE_FIELDS (*type_p); field;
12546 field = DECL_CHAIN (field))
12547 {
12548 /* We'd like to look at the type of the field, but we can
12549 easily get infinite recursion. So assume it's pointed
12550 to elsewhere in the tree. Also, ignore things that
12551 aren't fields. */
12552 if (TREE_CODE (field) != FIELD_DECL)
12553 continue;
12554
12555 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
12556 WALK_SUBTREE (DECL_SIZE (field));
12557 WALK_SUBTREE (DECL_SIZE_UNIT (field));
12558 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
12559 WALK_SUBTREE (DECL_QUALIFIER (field));
12560 }
12561 }
12562
12563 /* Same for scalar types. */
12564 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
12565 || TREE_CODE (*type_p) == ENUMERAL_TYPE
12566 || TREE_CODE (*type_p) == INTEGER_TYPE
12567 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
12568 || TREE_CODE (*type_p) == REAL_TYPE)
12569 {
12570 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
12571 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
12572 }
12573
12574 WALK_SUBTREE (TYPE_SIZE (*type_p));
12575 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
12576 }
12577 /* FALLTHRU */
12578
12579 default:
12580 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
12581 {
12582 int i, len;
12583
12584 /* Walk over all the sub-trees of this operand. */
12585 len = TREE_OPERAND_LENGTH (*tp);
12586
12587 /* Go through the subtrees. We need to do this in forward order so
12588 that the scope of a FOR_EXPR is handled properly. */
12589 if (len)
12590 {
12591 for (i = 0; i < len - 1; ++i)
12592 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12593 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
12594 }
12595 }
12596 /* If this is a type, walk the needed fields in the type. */
12597 else if (TYPE_P (*tp))
12598 return walk_type_fields (*tp, func, data, pset, lh);
12599 break;
12600 }
12601
12602 /* We didn't find what we were looking for. */
12603 return NULL_TREE;
12604
12605 #undef WALK_SUBTREE_TAIL
12606 }
12607 #undef WALK_SUBTREE
12608
12609 /* Like walk_tree, but does not walk duplicate nodes more than once. */
12610
12611 tree
12612 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
12613 walk_tree_lh lh)
12614 {
12615 tree result;
12616
12617 hash_set<tree> pset;
12618 result = walk_tree_1 (tp, func, data, &pset, lh);
12619 return result;
12620 }
12621
12622
12623 tree
12624 tree_block (tree t)
12625 {
12626 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12627
12628 if (IS_EXPR_CODE_CLASS (c))
12629 return LOCATION_BLOCK (t->exp.locus);
12630 gcc_unreachable ();
12631 return NULL;
12632 }
12633
12634 void
12635 tree_set_block (tree t, tree b)
12636 {
12637 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12638
12639 if (IS_EXPR_CODE_CLASS (c))
12640 {
12641 t->exp.locus = set_block (t->exp.locus, b);
12642 }
12643 else
12644 gcc_unreachable ();
12645 }
12646
12647 /* Create a nameless artificial label and put it in the current
12648 function context. The label has a location of LOC. Returns the
12649 newly created label. */
12650
12651 tree
12652 create_artificial_label (location_t loc)
12653 {
12654 tree lab = build_decl (loc,
12655 LABEL_DECL, NULL_TREE, void_type_node);
12656
12657 DECL_ARTIFICIAL (lab) = 1;
12658 DECL_IGNORED_P (lab) = 1;
12659 DECL_CONTEXT (lab) = current_function_decl;
12660 return lab;
12661 }
12662
12663 /* Given a tree, try to return a useful variable name that we can use
12664 to prefix a temporary that is being assigned the value of the tree.
12665 I.E. given <temp> = &A, return A. */
12666
12667 const char *
12668 get_name (tree t)
12669 {
12670 tree stripped_decl;
12671
12672 stripped_decl = t;
12673 STRIP_NOPS (stripped_decl);
12674 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12675 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12676 else if (TREE_CODE (stripped_decl) == SSA_NAME)
12677 {
12678 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12679 if (!name)
12680 return NULL;
12681 return IDENTIFIER_POINTER (name);
12682 }
12683 else
12684 {
12685 switch (TREE_CODE (stripped_decl))
12686 {
12687 case ADDR_EXPR:
12688 return get_name (TREE_OPERAND (stripped_decl, 0));
12689 default:
12690 return NULL;
12691 }
12692 }
12693 }
12694
12695 /* Return true if TYPE has a variable argument list. */
12696
12697 bool
12698 stdarg_p (const_tree fntype)
12699 {
12700 function_args_iterator args_iter;
12701 tree n = NULL_TREE, t;
12702
12703 if (!fntype)
12704 return false;
12705
12706 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12707 {
12708 n = t;
12709 }
12710
12711 return n != NULL_TREE && n != void_type_node;
12712 }
12713
12714 /* Return true if TYPE has a prototype. */
12715
12716 bool
12717 prototype_p (const_tree fntype)
12718 {
12719 tree t;
12720
12721 gcc_assert (fntype != NULL_TREE);
12722
12723 t = TYPE_ARG_TYPES (fntype);
12724 return (t != NULL_TREE);
12725 }
12726
12727 /* If BLOCK is inlined from an __attribute__((__artificial__))
12728 routine, return pointer to location from where it has been
12729 called. */
12730 location_t *
12731 block_nonartificial_location (tree block)
12732 {
12733 location_t *ret = NULL;
12734
12735 while (block && TREE_CODE (block) == BLOCK
12736 && BLOCK_ABSTRACT_ORIGIN (block))
12737 {
12738 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12739 if (TREE_CODE (ao) == FUNCTION_DECL)
12740 {
12741 /* If AO is an artificial inline, point RET to the
12742 call site locus at which it has been inlined and continue
12743 the loop, in case AO's caller is also an artificial
12744 inline. */
12745 if (DECL_DECLARED_INLINE_P (ao)
12746 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12747 ret = &BLOCK_SOURCE_LOCATION (block);
12748 else
12749 break;
12750 }
12751 else if (TREE_CODE (ao) != BLOCK)
12752 break;
12753
12754 block = BLOCK_SUPERCONTEXT (block);
12755 }
12756 return ret;
12757 }
12758
12759
12760 /* If EXP is inlined from an __attribute__((__artificial__))
12761 function, return the location of the original call expression. */
12762
12763 location_t
12764 tree_nonartificial_location (tree exp)
12765 {
12766 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12767
12768 if (loc)
12769 return *loc;
12770 else
12771 return EXPR_LOCATION (exp);
12772 }
12773
12774
12775 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12776 nodes. */
12777
12778 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12779
12780 hashval_t
12781 cl_option_hasher::hash (tree x)
12782 {
12783 const_tree const t = x;
12784 const char *p;
12785 size_t i;
12786 size_t len = 0;
12787 hashval_t hash = 0;
12788
12789 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12790 {
12791 p = (const char *)TREE_OPTIMIZATION (t);
12792 len = sizeof (struct cl_optimization);
12793 }
12794
12795 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12796 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12797
12798 else
12799 gcc_unreachable ();
12800
12801 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12802 something else. */
12803 for (i = 0; i < len; i++)
12804 if (p[i])
12805 hash = (hash << 4) ^ ((i << 2) | p[i]);
12806
12807 return hash;
12808 }
12809
12810 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12811 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12812 same. */
12813
12814 bool
12815 cl_option_hasher::equal (tree x, tree y)
12816 {
12817 const_tree const xt = x;
12818 const_tree const yt = y;
12819
12820 if (TREE_CODE (xt) != TREE_CODE (yt))
12821 return 0;
12822
12823 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12824 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
12825 TREE_OPTIMIZATION (yt));
12826 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12827 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12828 TREE_TARGET_OPTION (yt));
12829 else
12830 gcc_unreachable ();
12831 }
12832
12833 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12834
12835 tree
12836 build_optimization_node (struct gcc_options *opts)
12837 {
12838 tree t;
12839
12840 /* Use the cache of optimization nodes. */
12841
12842 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12843 opts);
12844
12845 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12846 t = *slot;
12847 if (!t)
12848 {
12849 /* Insert this one into the hash table. */
12850 t = cl_optimization_node;
12851 *slot = t;
12852
12853 /* Make a new node for next time round. */
12854 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12855 }
12856
12857 return t;
12858 }
12859
12860 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12861
12862 tree
12863 build_target_option_node (struct gcc_options *opts)
12864 {
12865 tree t;
12866
12867 /* Use the cache of optimization nodes. */
12868
12869 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12870 opts);
12871
12872 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12873 t = *slot;
12874 if (!t)
12875 {
12876 /* Insert this one into the hash table. */
12877 t = cl_target_option_node;
12878 *slot = t;
12879
12880 /* Make a new node for next time round. */
12881 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12882 }
12883
12884 return t;
12885 }
12886
12887 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12888 so that they aren't saved during PCH writing. */
12889
12890 void
12891 prepare_target_option_nodes_for_pch (void)
12892 {
12893 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12894 for (; iter != cl_option_hash_table->end (); ++iter)
12895 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12896 TREE_TARGET_GLOBALS (*iter) = NULL;
12897 }
12898
12899 /* Determine the "ultimate origin" of a block. */
12900
12901 tree
12902 block_ultimate_origin (const_tree block)
12903 {
12904 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
12905
12906 if (origin == NULL_TREE)
12907 return NULL_TREE;
12908 else
12909 {
12910 gcc_checking_assert ((DECL_P (origin)
12911 && DECL_ORIGIN (origin) == origin)
12912 || BLOCK_ORIGIN (origin) == origin);
12913 return origin;
12914 }
12915 }
12916
12917 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12918 no instruction. */
12919
12920 bool
12921 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12922 {
12923 /* Do not strip casts into or out of differing address spaces. */
12924 if (POINTER_TYPE_P (outer_type)
12925 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12926 {
12927 if (!POINTER_TYPE_P (inner_type)
12928 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12929 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12930 return false;
12931 }
12932 else if (POINTER_TYPE_P (inner_type)
12933 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12934 {
12935 /* We already know that outer_type is not a pointer with
12936 a non-generic address space. */
12937 return false;
12938 }
12939
12940 /* Use precision rather then machine mode when we can, which gives
12941 the correct answer even for submode (bit-field) types. */
12942 if ((INTEGRAL_TYPE_P (outer_type)
12943 || POINTER_TYPE_P (outer_type)
12944 || TREE_CODE (outer_type) == OFFSET_TYPE)
12945 && (INTEGRAL_TYPE_P (inner_type)
12946 || POINTER_TYPE_P (inner_type)
12947 || TREE_CODE (inner_type) == OFFSET_TYPE))
12948 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12949
12950 /* Otherwise fall back on comparing machine modes (e.g. for
12951 aggregate types, floats). */
12952 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12953 }
12954
12955 /* Return true iff conversion in EXP generates no instruction. Mark
12956 it inline so that we fully inline into the stripping functions even
12957 though we have two uses of this function. */
12958
12959 static inline bool
12960 tree_nop_conversion (const_tree exp)
12961 {
12962 tree outer_type, inner_type;
12963
12964 if (location_wrapper_p (exp))
12965 return true;
12966 if (!CONVERT_EXPR_P (exp)
12967 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12968 return false;
12969
12970 outer_type = TREE_TYPE (exp);
12971 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12972 if (!inner_type || inner_type == error_mark_node)
12973 return false;
12974
12975 return tree_nop_conversion_p (outer_type, inner_type);
12976 }
12977
12978 /* Return true iff conversion in EXP generates no instruction. Don't
12979 consider conversions changing the signedness. */
12980
12981 static bool
12982 tree_sign_nop_conversion (const_tree exp)
12983 {
12984 tree outer_type, inner_type;
12985
12986 if (!tree_nop_conversion (exp))
12987 return false;
12988
12989 outer_type = TREE_TYPE (exp);
12990 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12991
12992 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12993 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12994 }
12995
12996 /* Strip conversions from EXP according to tree_nop_conversion and
12997 return the resulting expression. */
12998
12999 tree
13000 tree_strip_nop_conversions (tree exp)
13001 {
13002 while (tree_nop_conversion (exp))
13003 exp = TREE_OPERAND (exp, 0);
13004 return exp;
13005 }
13006
13007 /* Strip conversions from EXP according to tree_sign_nop_conversion
13008 and return the resulting expression. */
13009
13010 tree
13011 tree_strip_sign_nop_conversions (tree exp)
13012 {
13013 while (tree_sign_nop_conversion (exp))
13014 exp = TREE_OPERAND (exp, 0);
13015 return exp;
13016 }
13017
13018 /* Avoid any floating point extensions from EXP. */
13019 tree
13020 strip_float_extensions (tree exp)
13021 {
13022 tree sub, expt, subt;
13023
13024 /* For floating point constant look up the narrowest type that can hold
13025 it properly and handle it like (type)(narrowest_type)constant.
13026 This way we can optimize for instance a=a*2.0 where "a" is float
13027 but 2.0 is double constant. */
13028 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
13029 {
13030 REAL_VALUE_TYPE orig;
13031 tree type = NULL;
13032
13033 orig = TREE_REAL_CST (exp);
13034 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
13035 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
13036 type = float_type_node;
13037 else if (TYPE_PRECISION (TREE_TYPE (exp))
13038 > TYPE_PRECISION (double_type_node)
13039 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
13040 type = double_type_node;
13041 if (type)
13042 return build_real_truncate (type, orig);
13043 }
13044
13045 if (!CONVERT_EXPR_P (exp))
13046 return exp;
13047
13048 sub = TREE_OPERAND (exp, 0);
13049 subt = TREE_TYPE (sub);
13050 expt = TREE_TYPE (exp);
13051
13052 if (!FLOAT_TYPE_P (subt))
13053 return exp;
13054
13055 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
13056 return exp;
13057
13058 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
13059 return exp;
13060
13061 return strip_float_extensions (sub);
13062 }
13063
13064 /* Strip out all handled components that produce invariant
13065 offsets. */
13066
13067 const_tree
13068 strip_invariant_refs (const_tree op)
13069 {
13070 while (handled_component_p (op))
13071 {
13072 switch (TREE_CODE (op))
13073 {
13074 case ARRAY_REF:
13075 case ARRAY_RANGE_REF:
13076 if (!is_gimple_constant (TREE_OPERAND (op, 1))
13077 || TREE_OPERAND (op, 2) != NULL_TREE
13078 || TREE_OPERAND (op, 3) != NULL_TREE)
13079 return NULL;
13080 break;
13081
13082 case COMPONENT_REF:
13083 if (TREE_OPERAND (op, 2) != NULL_TREE)
13084 return NULL;
13085 break;
13086
13087 default:;
13088 }
13089 op = TREE_OPERAND (op, 0);
13090 }
13091
13092 return op;
13093 }
13094
13095 static GTY(()) tree gcc_eh_personality_decl;
13096
13097 /* Return the GCC personality function decl. */
13098
13099 tree
13100 lhd_gcc_personality (void)
13101 {
13102 if (!gcc_eh_personality_decl)
13103 gcc_eh_personality_decl = build_personality_function ("gcc");
13104 return gcc_eh_personality_decl;
13105 }
13106
13107 /* TARGET is a call target of GIMPLE call statement
13108 (obtained by gimple_call_fn). Return true if it is
13109 OBJ_TYPE_REF representing an virtual call of C++ method.
13110 (As opposed to OBJ_TYPE_REF representing objc calls
13111 through a cast where middle-end devirtualization machinery
13112 can't apply.) */
13113
13114 bool
13115 virtual_method_call_p (const_tree target)
13116 {
13117 if (TREE_CODE (target) != OBJ_TYPE_REF)
13118 return false;
13119 tree t = TREE_TYPE (target);
13120 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
13121 t = TREE_TYPE (t);
13122 if (TREE_CODE (t) == FUNCTION_TYPE)
13123 return false;
13124 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
13125 /* If we do not have BINFO associated, it means that type was built
13126 without devirtualization enabled. Do not consider this a virtual
13127 call. */
13128 if (!TYPE_BINFO (obj_type_ref_class (target)))
13129 return false;
13130 return true;
13131 }
13132
13133 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
13134
13135 static tree
13136 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
13137 {
13138 unsigned int i;
13139 tree base_binfo, b;
13140
13141 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
13142 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
13143 && types_same_for_odr (TREE_TYPE (base_binfo), type))
13144 return base_binfo;
13145 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
13146 return b;
13147 return NULL;
13148 }
13149
13150 /* Try to find a base info of BINFO that would have its field decl at offset
13151 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
13152 found, return, otherwise return NULL_TREE. */
13153
13154 tree
13155 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
13156 {
13157 tree type = BINFO_TYPE (binfo);
13158
13159 while (true)
13160 {
13161 HOST_WIDE_INT pos, size;
13162 tree fld;
13163 int i;
13164
13165 if (types_same_for_odr (type, expected_type))
13166 return binfo;
13167 if (maybe_lt (offset, 0))
13168 return NULL_TREE;
13169
13170 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
13171 {
13172 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
13173 continue;
13174
13175 pos = int_bit_position (fld);
13176 size = tree_to_uhwi (DECL_SIZE (fld));
13177 if (known_in_range_p (offset, pos, size))
13178 break;
13179 }
13180 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
13181 return NULL_TREE;
13182
13183 /* Offset 0 indicates the primary base, whose vtable contents are
13184 represented in the binfo for the derived class. */
13185 else if (maybe_ne (offset, 0))
13186 {
13187 tree found_binfo = NULL, base_binfo;
13188 /* Offsets in BINFO are in bytes relative to the whole structure
13189 while POS is in bits relative to the containing field. */
13190 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
13191 / BITS_PER_UNIT);
13192
13193 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
13194 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
13195 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
13196 {
13197 found_binfo = base_binfo;
13198 break;
13199 }
13200 if (found_binfo)
13201 binfo = found_binfo;
13202 else
13203 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
13204 binfo_offset);
13205 }
13206
13207 type = TREE_TYPE (fld);
13208 offset -= pos;
13209 }
13210 }
13211
13212 /* Returns true if X is a typedef decl. */
13213
13214 bool
13215 is_typedef_decl (const_tree x)
13216 {
13217 return (x && TREE_CODE (x) == TYPE_DECL
13218 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
13219 }
13220
13221 /* Returns true iff TYPE is a type variant created for a typedef. */
13222
13223 bool
13224 typedef_variant_p (const_tree type)
13225 {
13226 return is_typedef_decl (TYPE_NAME (type));
13227 }
13228
13229 /* A class to handle converting a string that might contain
13230 control characters, (eg newline, form-feed, etc), into one
13231 in which contains escape sequences instead. */
13232
13233 class escaped_string
13234 {
13235 public:
13236 escaped_string () { m_owned = false; m_str = NULL; };
13237 ~escaped_string () { if (m_owned) free (m_str); }
13238 operator const char *() const { return (const char *) m_str; }
13239 void escape (const char *);
13240 private:
13241 char *m_str;
13242 bool m_owned;
13243 };
13244
13245 /* PR 84195: Replace control characters in "unescaped" with their
13246 escaped equivalents. Allow newlines if -fmessage-length has
13247 been set to a non-zero value. This is done here, rather than
13248 where the attribute is recorded as the message length can
13249 change between these two locations. */
13250
13251 void
13252 escaped_string::escape (const char *unescaped)
13253 {
13254 char *escaped;
13255 size_t i, new_i, len;
13256
13257 if (m_owned)
13258 free (m_str);
13259
13260 m_str = const_cast<char *> (unescaped);
13261 m_owned = false;
13262
13263 if (unescaped == NULL || *unescaped == 0)
13264 return;
13265
13266 len = strlen (unescaped);
13267 escaped = NULL;
13268 new_i = 0;
13269
13270 for (i = 0; i < len; i++)
13271 {
13272 char c = unescaped[i];
13273
13274 if (!ISCNTRL (c))
13275 {
13276 if (escaped)
13277 escaped[new_i++] = c;
13278 continue;
13279 }
13280
13281 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
13282 {
13283 if (escaped == NULL)
13284 {
13285 /* We only allocate space for a new string if we
13286 actually encounter a control character that
13287 needs replacing. */
13288 escaped = (char *) xmalloc (len * 2 + 1);
13289 strncpy (escaped, unescaped, i);
13290 new_i = i;
13291 }
13292
13293 escaped[new_i++] = '\\';
13294
13295 switch (c)
13296 {
13297 case '\a': escaped[new_i++] = 'a'; break;
13298 case '\b': escaped[new_i++] = 'b'; break;
13299 case '\f': escaped[new_i++] = 'f'; break;
13300 case '\n': escaped[new_i++] = 'n'; break;
13301 case '\r': escaped[new_i++] = 'r'; break;
13302 case '\t': escaped[new_i++] = 't'; break;
13303 case '\v': escaped[new_i++] = 'v'; break;
13304 default: escaped[new_i++] = '?'; break;
13305 }
13306 }
13307 else if (escaped)
13308 escaped[new_i++] = c;
13309 }
13310
13311 if (escaped)
13312 {
13313 escaped[new_i] = 0;
13314 m_str = escaped;
13315 m_owned = true;
13316 }
13317 }
13318
13319 /* Warn about a use of an identifier which was marked deprecated. Returns
13320 whether a warning was given. */
13321
13322 bool
13323 warn_deprecated_use (tree node, tree attr)
13324 {
13325 escaped_string msg;
13326
13327 if (node == 0 || !warn_deprecated_decl)
13328 return false;
13329
13330 if (!attr)
13331 {
13332 if (DECL_P (node))
13333 attr = DECL_ATTRIBUTES (node);
13334 else if (TYPE_P (node))
13335 {
13336 tree decl = TYPE_STUB_DECL (node);
13337 if (decl)
13338 attr = lookup_attribute ("deprecated",
13339 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
13340 }
13341 }
13342
13343 if (attr)
13344 attr = lookup_attribute ("deprecated", attr);
13345
13346 if (attr)
13347 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
13348
13349 bool w = false;
13350 if (DECL_P (node))
13351 {
13352 auto_diagnostic_group d;
13353 if (msg)
13354 w = warning (OPT_Wdeprecated_declarations,
13355 "%qD is deprecated: %s", node, (const char *) msg);
13356 else
13357 w = warning (OPT_Wdeprecated_declarations,
13358 "%qD is deprecated", node);
13359 if (w)
13360 inform (DECL_SOURCE_LOCATION (node), "declared here");
13361 }
13362 else if (TYPE_P (node))
13363 {
13364 tree what = NULL_TREE;
13365 tree decl = TYPE_STUB_DECL (node);
13366
13367 if (TYPE_NAME (node))
13368 {
13369 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
13370 what = TYPE_NAME (node);
13371 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
13372 && DECL_NAME (TYPE_NAME (node)))
13373 what = DECL_NAME (TYPE_NAME (node));
13374 }
13375
13376 auto_diagnostic_group d;
13377 if (what)
13378 {
13379 if (msg)
13380 w = warning (OPT_Wdeprecated_declarations,
13381 "%qE is deprecated: %s", what, (const char *) msg);
13382 else
13383 w = warning (OPT_Wdeprecated_declarations,
13384 "%qE is deprecated", what);
13385 }
13386 else
13387 {
13388 if (msg)
13389 w = warning (OPT_Wdeprecated_declarations,
13390 "type is deprecated: %s", (const char *) msg);
13391 else
13392 w = warning (OPT_Wdeprecated_declarations,
13393 "type is deprecated");
13394 }
13395
13396 if (w && decl)
13397 inform (DECL_SOURCE_LOCATION (decl), "declared here");
13398 }
13399
13400 return w;
13401 }
13402
13403 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
13404 somewhere in it. */
13405
13406 bool
13407 contains_bitfld_component_ref_p (const_tree ref)
13408 {
13409 while (handled_component_p (ref))
13410 {
13411 if (TREE_CODE (ref) == COMPONENT_REF
13412 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
13413 return true;
13414 ref = TREE_OPERAND (ref, 0);
13415 }
13416
13417 return false;
13418 }
13419
13420 /* Try to determine whether a TRY_CATCH expression can fall through.
13421 This is a subroutine of block_may_fallthru. */
13422
13423 static bool
13424 try_catch_may_fallthru (const_tree stmt)
13425 {
13426 tree_stmt_iterator i;
13427
13428 /* If the TRY block can fall through, the whole TRY_CATCH can
13429 fall through. */
13430 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
13431 return true;
13432
13433 i = tsi_start (TREE_OPERAND (stmt, 1));
13434 switch (TREE_CODE (tsi_stmt (i)))
13435 {
13436 case CATCH_EXPR:
13437 /* We expect to see a sequence of CATCH_EXPR trees, each with a
13438 catch expression and a body. The whole TRY_CATCH may fall
13439 through iff any of the catch bodies falls through. */
13440 for (; !tsi_end_p (i); tsi_next (&i))
13441 {
13442 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
13443 return true;
13444 }
13445 return false;
13446
13447 case EH_FILTER_EXPR:
13448 /* The exception filter expression only matters if there is an
13449 exception. If the exception does not match EH_FILTER_TYPES,
13450 we will execute EH_FILTER_FAILURE, and we will fall through
13451 if that falls through. If the exception does match
13452 EH_FILTER_TYPES, the stack unwinder will continue up the
13453 stack, so we will not fall through. We don't know whether we
13454 will throw an exception which matches EH_FILTER_TYPES or not,
13455 so we just ignore EH_FILTER_TYPES and assume that we might
13456 throw an exception which doesn't match. */
13457 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
13458
13459 default:
13460 /* This case represents statements to be executed when an
13461 exception occurs. Those statements are implicitly followed
13462 by a RESX statement to resume execution after the exception.
13463 So in this case the TRY_CATCH never falls through. */
13464 return false;
13465 }
13466 }
13467
13468 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
13469 need not be 100% accurate; simply be conservative and return true if we
13470 don't know. This is used only to avoid stupidly generating extra code.
13471 If we're wrong, we'll just delete the extra code later. */
13472
13473 bool
13474 block_may_fallthru (const_tree block)
13475 {
13476 /* This CONST_CAST is okay because expr_last returns its argument
13477 unmodified and we assign it to a const_tree. */
13478 const_tree stmt = expr_last (CONST_CAST_TREE (block));
13479
13480 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
13481 {
13482 case GOTO_EXPR:
13483 case RETURN_EXPR:
13484 /* Easy cases. If the last statement of the block implies
13485 control transfer, then we can't fall through. */
13486 return false;
13487
13488 case SWITCH_EXPR:
13489 /* If there is a default: label or case labels cover all possible
13490 SWITCH_COND values, then the SWITCH_EXPR will transfer control
13491 to some case label in all cases and all we care is whether the
13492 SWITCH_BODY falls through. */
13493 if (SWITCH_ALL_CASES_P (stmt))
13494 return block_may_fallthru (SWITCH_BODY (stmt));
13495 return true;
13496
13497 case COND_EXPR:
13498 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
13499 return true;
13500 return block_may_fallthru (COND_EXPR_ELSE (stmt));
13501
13502 case BIND_EXPR:
13503 return block_may_fallthru (BIND_EXPR_BODY (stmt));
13504
13505 case TRY_CATCH_EXPR:
13506 return try_catch_may_fallthru (stmt);
13507
13508 case TRY_FINALLY_EXPR:
13509 /* The finally clause is always executed after the try clause,
13510 so if it does not fall through, then the try-finally will not
13511 fall through. Otherwise, if the try clause does not fall
13512 through, then when the finally clause falls through it will
13513 resume execution wherever the try clause was going. So the
13514 whole try-finally will only fall through if both the try
13515 clause and the finally clause fall through. */
13516 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
13517 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
13518
13519 case EH_ELSE_EXPR:
13520 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13521
13522 case MODIFY_EXPR:
13523 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
13524 stmt = TREE_OPERAND (stmt, 1);
13525 else
13526 return true;
13527 /* FALLTHRU */
13528
13529 case CALL_EXPR:
13530 /* Functions that do not return do not fall through. */
13531 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
13532
13533 case CLEANUP_POINT_EXPR:
13534 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13535
13536 case TARGET_EXPR:
13537 return block_may_fallthru (TREE_OPERAND (stmt, 1));
13538
13539 case ERROR_MARK:
13540 return true;
13541
13542 default:
13543 return lang_hooks.block_may_fallthru (stmt);
13544 }
13545 }
13546
13547 /* True if we are using EH to handle cleanups. */
13548 static bool using_eh_for_cleanups_flag = false;
13549
13550 /* This routine is called from front ends to indicate eh should be used for
13551 cleanups. */
13552 void
13553 using_eh_for_cleanups (void)
13554 {
13555 using_eh_for_cleanups_flag = true;
13556 }
13557
13558 /* Query whether EH is used for cleanups. */
13559 bool
13560 using_eh_for_cleanups_p (void)
13561 {
13562 return using_eh_for_cleanups_flag;
13563 }
13564
13565 /* Wrapper for tree_code_name to ensure that tree code is valid */
13566 const char *
13567 get_tree_code_name (enum tree_code code)
13568 {
13569 const char *invalid = "<invalid tree code>";
13570
13571 if (code >= MAX_TREE_CODES)
13572 {
13573 if (code == 0xa5a5)
13574 return "ggc_freed";
13575 return invalid;
13576 }
13577
13578 return tree_code_name[code];
13579 }
13580
13581 /* Drops the TREE_OVERFLOW flag from T. */
13582
13583 tree
13584 drop_tree_overflow (tree t)
13585 {
13586 gcc_checking_assert (TREE_OVERFLOW (t));
13587
13588 /* For tree codes with a sharing machinery re-build the result. */
13589 if (poly_int_tree_p (t))
13590 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
13591
13592 /* For VECTOR_CST, remove the overflow bits from the encoded elements
13593 and canonicalize the result. */
13594 if (TREE_CODE (t) == VECTOR_CST)
13595 {
13596 tree_vector_builder builder;
13597 builder.new_unary_operation (TREE_TYPE (t), t, true);
13598 unsigned int count = builder.encoded_nelts ();
13599 for (unsigned int i = 0; i < count; ++i)
13600 {
13601 tree elt = VECTOR_CST_ELT (t, i);
13602 if (TREE_OVERFLOW (elt))
13603 elt = drop_tree_overflow (elt);
13604 builder.quick_push (elt);
13605 }
13606 return builder.build ();
13607 }
13608
13609 /* Otherwise, as all tcc_constants are possibly shared, copy the node
13610 and drop the flag. */
13611 t = copy_node (t);
13612 TREE_OVERFLOW (t) = 0;
13613
13614 /* For constants that contain nested constants, drop the flag
13615 from those as well. */
13616 if (TREE_CODE (t) == COMPLEX_CST)
13617 {
13618 if (TREE_OVERFLOW (TREE_REALPART (t)))
13619 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
13620 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
13621 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
13622 }
13623
13624 return t;
13625 }
13626
13627 /* Given a memory reference expression T, return its base address.
13628 The base address of a memory reference expression is the main
13629 object being referenced. For instance, the base address for
13630 'array[i].fld[j]' is 'array'. You can think of this as stripping
13631 away the offset part from a memory address.
13632
13633 This function calls handled_component_p to strip away all the inner
13634 parts of the memory reference until it reaches the base object. */
13635
13636 tree
13637 get_base_address (tree t)
13638 {
13639 while (handled_component_p (t))
13640 t = TREE_OPERAND (t, 0);
13641
13642 if ((TREE_CODE (t) == MEM_REF
13643 || TREE_CODE (t) == TARGET_MEM_REF)
13644 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
13645 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
13646
13647 /* ??? Either the alias oracle or all callers need to properly deal
13648 with WITH_SIZE_EXPRs before we can look through those. */
13649 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13650 return NULL_TREE;
13651
13652 return t;
13653 }
13654
13655 /* Return a tree of sizetype representing the size, in bytes, of the element
13656 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13657
13658 tree
13659 array_ref_element_size (tree exp)
13660 {
13661 tree aligned_size = TREE_OPERAND (exp, 3);
13662 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
13663 location_t loc = EXPR_LOCATION (exp);
13664
13665 /* If a size was specified in the ARRAY_REF, it's the size measured
13666 in alignment units of the element type. So multiply by that value. */
13667 if (aligned_size)
13668 {
13669 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13670 sizetype from another type of the same width and signedness. */
13671 if (TREE_TYPE (aligned_size) != sizetype)
13672 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13673 return size_binop_loc (loc, MULT_EXPR, aligned_size,
13674 size_int (TYPE_ALIGN_UNIT (elmt_type)));
13675 }
13676
13677 /* Otherwise, take the size from that of the element type. Substitute
13678 any PLACEHOLDER_EXPR that we have. */
13679 else
13680 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13681 }
13682
13683 /* Return a tree representing the lower bound of the array mentioned in
13684 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13685
13686 tree
13687 array_ref_low_bound (tree exp)
13688 {
13689 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13690
13691 /* If a lower bound is specified in EXP, use it. */
13692 if (TREE_OPERAND (exp, 2))
13693 return TREE_OPERAND (exp, 2);
13694
13695 /* Otherwise, if there is a domain type and it has a lower bound, use it,
13696 substituting for a PLACEHOLDER_EXPR as needed. */
13697 if (domain_type && TYPE_MIN_VALUE (domain_type))
13698 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13699
13700 /* Otherwise, return a zero of the appropriate type. */
13701 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
13702 }
13703
13704 /* Return a tree representing the upper bound of the array mentioned in
13705 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13706
13707 tree
13708 array_ref_up_bound (tree exp)
13709 {
13710 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13711
13712 /* If there is a domain type and it has an upper bound, use it, substituting
13713 for a PLACEHOLDER_EXPR as needed. */
13714 if (domain_type && TYPE_MAX_VALUE (domain_type))
13715 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13716
13717 /* Otherwise fail. */
13718 return NULL_TREE;
13719 }
13720
13721 /* Returns true if REF is an array reference or a component reference
13722 to an array at the end of a structure.
13723 If this is the case, the array may be allocated larger
13724 than its upper bound implies. */
13725
13726 bool
13727 array_at_struct_end_p (tree ref)
13728 {
13729 tree atype;
13730
13731 if (TREE_CODE (ref) == ARRAY_REF
13732 || TREE_CODE (ref) == ARRAY_RANGE_REF)
13733 {
13734 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
13735 ref = TREE_OPERAND (ref, 0);
13736 }
13737 else if (TREE_CODE (ref) == COMPONENT_REF
13738 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
13739 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
13740 else
13741 return false;
13742
13743 if (TREE_CODE (ref) == STRING_CST)
13744 return false;
13745
13746 tree ref_to_array = ref;
13747 while (handled_component_p (ref))
13748 {
13749 /* If the reference chain contains a component reference to a
13750 non-union type and there follows another field the reference
13751 is not at the end of a structure. */
13752 if (TREE_CODE (ref) == COMPONENT_REF)
13753 {
13754 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13755 {
13756 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13757 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13758 nextf = DECL_CHAIN (nextf);
13759 if (nextf)
13760 return false;
13761 }
13762 }
13763 /* If we have a multi-dimensional array we do not consider
13764 a non-innermost dimension as flex array if the whole
13765 multi-dimensional array is at struct end.
13766 Same for an array of aggregates with a trailing array
13767 member. */
13768 else if (TREE_CODE (ref) == ARRAY_REF)
13769 return false;
13770 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
13771 ;
13772 /* If we view an underlying object as sth else then what we
13773 gathered up to now is what we have to rely on. */
13774 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
13775 break;
13776 else
13777 gcc_unreachable ();
13778
13779 ref = TREE_OPERAND (ref, 0);
13780 }
13781
13782 /* The array now is at struct end. Treat flexible arrays as
13783 always subject to extend, even into just padding constrained by
13784 an underlying decl. */
13785 if (! TYPE_SIZE (atype)
13786 || ! TYPE_DOMAIN (atype)
13787 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13788 return true;
13789
13790 if (TREE_CODE (ref) == MEM_REF
13791 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13792 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13793
13794 /* If the reference is based on a declared entity, the size of the array
13795 is constrained by its given domain. (Do not trust commons PR/69368). */
13796 if (DECL_P (ref)
13797 && !(flag_unconstrained_commons
13798 && VAR_P (ref) && DECL_COMMON (ref))
13799 && DECL_SIZE_UNIT (ref)
13800 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
13801 {
13802 /* Check whether the array domain covers all of the available
13803 padding. */
13804 poly_int64 offset;
13805 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
13806 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
13807 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
13808 return true;
13809 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
13810 return true;
13811
13812 /* If at least one extra element fits it is a flexarray. */
13813 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13814 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13815 + 2)
13816 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13817 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13818 return true;
13819
13820 return false;
13821 }
13822
13823 return true;
13824 }
13825
13826 /* Return a tree representing the offset, in bytes, of the field referenced
13827 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13828
13829 tree
13830 component_ref_field_offset (tree exp)
13831 {
13832 tree aligned_offset = TREE_OPERAND (exp, 2);
13833 tree field = TREE_OPERAND (exp, 1);
13834 location_t loc = EXPR_LOCATION (exp);
13835
13836 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13837 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13838 value. */
13839 if (aligned_offset)
13840 {
13841 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13842 sizetype from another type of the same width and signedness. */
13843 if (TREE_TYPE (aligned_offset) != sizetype)
13844 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13845 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13846 size_int (DECL_OFFSET_ALIGN (field)
13847 / BITS_PER_UNIT));
13848 }
13849
13850 /* Otherwise, take the offset from that of the field. Substitute
13851 any PLACEHOLDER_EXPR that we have. */
13852 else
13853 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13854 }
13855
13856 /* Return the machine mode of T. For vectors, returns the mode of the
13857 inner type. The main use case is to feed the result to HONOR_NANS,
13858 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13859
13860 machine_mode
13861 element_mode (const_tree t)
13862 {
13863 if (!TYPE_P (t))
13864 t = TREE_TYPE (t);
13865 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13866 t = TREE_TYPE (t);
13867 return TYPE_MODE (t);
13868 }
13869
13870 /* Vector types need to re-check the target flags each time we report
13871 the machine mode. We need to do this because attribute target can
13872 change the result of vector_mode_supported_p and have_regs_of_mode
13873 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13874 change on a per-function basis. */
13875 /* ??? Possibly a better solution is to run through all the types
13876 referenced by a function and re-compute the TYPE_MODE once, rather
13877 than make the TYPE_MODE macro call a function. */
13878
13879 machine_mode
13880 vector_type_mode (const_tree t)
13881 {
13882 machine_mode mode;
13883
13884 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13885
13886 mode = t->type_common.mode;
13887 if (VECTOR_MODE_P (mode)
13888 && (!targetm.vector_mode_supported_p (mode)
13889 || !have_regs_of_mode[mode]))
13890 {
13891 scalar_int_mode innermode;
13892
13893 /* For integers, try mapping it to a same-sized scalar mode. */
13894 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13895 {
13896 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13897 * GET_MODE_BITSIZE (innermode));
13898 scalar_int_mode mode;
13899 if (int_mode_for_size (size, 0).exists (&mode)
13900 && have_regs_of_mode[mode])
13901 return mode;
13902 }
13903
13904 return BLKmode;
13905 }
13906
13907 return mode;
13908 }
13909
13910 /* Verify that basic properties of T match TV and thus T can be a variant of
13911 TV. TV should be the more specified variant (i.e. the main variant). */
13912
13913 static bool
13914 verify_type_variant (const_tree t, tree tv)
13915 {
13916 /* Type variant can differ by:
13917
13918 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13919 ENCODE_QUAL_ADDR_SPACE.
13920 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13921 in this case some values may not be set in the variant types
13922 (see TYPE_COMPLETE_P checks).
13923 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13924 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13925 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13926 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13927 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13928 this is necessary to make it possible to merge types form different TUs
13929 - arrays, pointers and references may have TREE_TYPE that is a variant
13930 of TREE_TYPE of their main variants.
13931 - aggregates may have new TYPE_FIELDS list that list variants of
13932 the main variant TYPE_FIELDS.
13933 - vector types may differ by TYPE_VECTOR_OPAQUE
13934 */
13935
13936 /* Convenience macro for matching individual fields. */
13937 #define verify_variant_match(flag) \
13938 do { \
13939 if (flag (tv) != flag (t)) \
13940 { \
13941 error ("type variant differs by %s", #flag); \
13942 debug_tree (tv); \
13943 return false; \
13944 } \
13945 } while (false)
13946
13947 /* tree_base checks. */
13948
13949 verify_variant_match (TREE_CODE);
13950 /* FIXME: Ada builds non-artificial variants of artificial types. */
13951 if (TYPE_ARTIFICIAL (tv) && 0)
13952 verify_variant_match (TYPE_ARTIFICIAL);
13953 if (POINTER_TYPE_P (tv))
13954 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13955 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13956 verify_variant_match (TYPE_UNSIGNED);
13957 verify_variant_match (TYPE_PACKED);
13958 if (TREE_CODE (t) == REFERENCE_TYPE)
13959 verify_variant_match (TYPE_REF_IS_RVALUE);
13960 if (AGGREGATE_TYPE_P (t))
13961 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13962 else
13963 verify_variant_match (TYPE_SATURATING);
13964 /* FIXME: This check trigger during libstdc++ build. */
13965 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13966 verify_variant_match (TYPE_FINAL_P);
13967
13968 /* tree_type_common checks. */
13969
13970 if (COMPLETE_TYPE_P (t))
13971 {
13972 verify_variant_match (TYPE_MODE);
13973 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13974 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13975 verify_variant_match (TYPE_SIZE);
13976 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13977 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13978 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13979 {
13980 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13981 TYPE_SIZE_UNIT (tv), 0));
13982 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13983 debug_tree (tv);
13984 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13985 debug_tree (TYPE_SIZE_UNIT (tv));
13986 error ("type%'s %<TYPE_SIZE_UNIT%>");
13987 debug_tree (TYPE_SIZE_UNIT (t));
13988 return false;
13989 }
13990 }
13991 verify_variant_match (TYPE_PRECISION);
13992 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13993 if (RECORD_OR_UNION_TYPE_P (t))
13994 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13995 else if (TREE_CODE (t) == ARRAY_TYPE)
13996 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13997 /* During LTO we merge variant lists from diferent translation units
13998 that may differ BY TYPE_CONTEXT that in turn may point
13999 to TRANSLATION_UNIT_DECL.
14000 Ada also builds variants of types with different TYPE_CONTEXT. */
14001 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
14002 verify_variant_match (TYPE_CONTEXT);
14003 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
14004 verify_variant_match (TYPE_STRING_FLAG);
14005 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
14006 verify_variant_match (TYPE_CXX_ODR_P);
14007 if (TYPE_ALIAS_SET_KNOWN_P (t))
14008 {
14009 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
14010 debug_tree (tv);
14011 return false;
14012 }
14013
14014 /* tree_type_non_common checks. */
14015
14016 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14017 and dangle the pointer from time to time. */
14018 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
14019 && (in_lto_p || !TYPE_VFIELD (tv)
14020 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
14021 {
14022 error ("type variant has different %<TYPE_VFIELD%>");
14023 debug_tree (tv);
14024 return false;
14025 }
14026 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
14027 || TREE_CODE (t) == INTEGER_TYPE
14028 || TREE_CODE (t) == BOOLEAN_TYPE
14029 || TREE_CODE (t) == REAL_TYPE
14030 || TREE_CODE (t) == FIXED_POINT_TYPE)
14031 {
14032 verify_variant_match (TYPE_MAX_VALUE);
14033 verify_variant_match (TYPE_MIN_VALUE);
14034 }
14035 if (TREE_CODE (t) == METHOD_TYPE)
14036 verify_variant_match (TYPE_METHOD_BASETYPE);
14037 if (TREE_CODE (t) == OFFSET_TYPE)
14038 verify_variant_match (TYPE_OFFSET_BASETYPE);
14039 if (TREE_CODE (t) == ARRAY_TYPE)
14040 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
14041 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
14042 or even type's main variant. This is needed to make bootstrap pass
14043 and the bug seems new in GCC 5.
14044 C++ FE should be updated to make this consistent and we should check
14045 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
14046 is a match with main variant.
14047
14048 Also disable the check for Java for now because of parser hack that builds
14049 first an dummy BINFO and then sometimes replace it by real BINFO in some
14050 of the copies. */
14051 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
14052 && TYPE_BINFO (t) != TYPE_BINFO (tv)
14053 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
14054 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
14055 at LTO time only. */
14056 && (in_lto_p && odr_type_p (t)))
14057 {
14058 error ("type variant has different %<TYPE_BINFO%>");
14059 debug_tree (tv);
14060 error ("type variant%'s %<TYPE_BINFO%>");
14061 debug_tree (TYPE_BINFO (tv));
14062 error ("type%'s %<TYPE_BINFO%>");
14063 debug_tree (TYPE_BINFO (t));
14064 return false;
14065 }
14066
14067 /* Check various uses of TYPE_VALUES_RAW. */
14068 if (TREE_CODE (t) == ENUMERAL_TYPE
14069 && TYPE_VALUES (t))
14070 verify_variant_match (TYPE_VALUES);
14071 else if (TREE_CODE (t) == ARRAY_TYPE)
14072 verify_variant_match (TYPE_DOMAIN);
14073 /* Permit incomplete variants of complete type. While FEs may complete
14074 all variants, this does not happen for C++ templates in all cases. */
14075 else if (RECORD_OR_UNION_TYPE_P (t)
14076 && COMPLETE_TYPE_P (t)
14077 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
14078 {
14079 tree f1, f2;
14080
14081 /* Fortran builds qualified variants as new records with items of
14082 qualified type. Verify that they looks same. */
14083 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
14084 f1 && f2;
14085 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14086 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
14087 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
14088 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
14089 /* FIXME: gfc_nonrestricted_type builds all types as variants
14090 with exception of pointer types. It deeply copies the type
14091 which means that we may end up with a variant type
14092 referring non-variant pointer. We may change it to
14093 produce types as variants, too, like
14094 objc_get_protocol_qualified_type does. */
14095 && !POINTER_TYPE_P (TREE_TYPE (f1)))
14096 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
14097 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
14098 break;
14099 if (f1 || f2)
14100 {
14101 error ("type variant has different %<TYPE_FIELDS%>");
14102 debug_tree (tv);
14103 error ("first mismatch is field");
14104 debug_tree (f1);
14105 error ("and field");
14106 debug_tree (f2);
14107 return false;
14108 }
14109 }
14110 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
14111 verify_variant_match (TYPE_ARG_TYPES);
14112 /* For C++ the qualified variant of array type is really an array type
14113 of qualified TREE_TYPE.
14114 objc builds variants of pointer where pointer to type is a variant, too
14115 in objc_get_protocol_qualified_type. */
14116 if (TREE_TYPE (t) != TREE_TYPE (tv)
14117 && ((TREE_CODE (t) != ARRAY_TYPE
14118 && !POINTER_TYPE_P (t))
14119 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
14120 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
14121 {
14122 error ("type variant has different %<TREE_TYPE%>");
14123 debug_tree (tv);
14124 error ("type variant%'s %<TREE_TYPE%>");
14125 debug_tree (TREE_TYPE (tv));
14126 error ("type%'s %<TREE_TYPE%>");
14127 debug_tree (TREE_TYPE (t));
14128 return false;
14129 }
14130 if (type_with_alias_set_p (t)
14131 && !gimple_canonical_types_compatible_p (t, tv, false))
14132 {
14133 error ("type is not compatible with its variant");
14134 debug_tree (tv);
14135 error ("type variant%'s %<TREE_TYPE%>");
14136 debug_tree (TREE_TYPE (tv));
14137 error ("type%'s %<TREE_TYPE%>");
14138 debug_tree (TREE_TYPE (t));
14139 return false;
14140 }
14141 return true;
14142 #undef verify_variant_match
14143 }
14144
14145
14146 /* The TYPE_CANONICAL merging machinery. It should closely resemble
14147 the middle-end types_compatible_p function. It needs to avoid
14148 claiming types are different for types that should be treated
14149 the same with respect to TBAA. Canonical types are also used
14150 for IL consistency checks via the useless_type_conversion_p
14151 predicate which does not handle all type kinds itself but falls
14152 back to pointer-comparison of TYPE_CANONICAL for aggregates
14153 for example. */
14154
14155 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
14156 type calculation because we need to allow inter-operability between signed
14157 and unsigned variants. */
14158
14159 bool
14160 type_with_interoperable_signedness (const_tree type)
14161 {
14162 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
14163 signed char and unsigned char. Similarly fortran FE builds
14164 C_SIZE_T as signed type, while C defines it unsigned. */
14165
14166 return tree_code_for_canonical_type_merging (TREE_CODE (type))
14167 == INTEGER_TYPE
14168 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
14169 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
14170 }
14171
14172 /* Return true iff T1 and T2 are structurally identical for what
14173 TBAA is concerned.
14174 This function is used both by lto.c canonical type merging and by the
14175 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
14176 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
14177 only for LTO because only in these cases TYPE_CANONICAL equivalence
14178 correspond to one defined by gimple_canonical_types_compatible_p. */
14179
14180 bool
14181 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
14182 bool trust_type_canonical)
14183 {
14184 /* Type variants should be same as the main variant. When not doing sanity
14185 checking to verify this fact, go to main variants and save some work. */
14186 if (trust_type_canonical)
14187 {
14188 t1 = TYPE_MAIN_VARIANT (t1);
14189 t2 = TYPE_MAIN_VARIANT (t2);
14190 }
14191
14192 /* Check first for the obvious case of pointer identity. */
14193 if (t1 == t2)
14194 return true;
14195
14196 /* Check that we have two types to compare. */
14197 if (t1 == NULL_TREE || t2 == NULL_TREE)
14198 return false;
14199
14200 /* We consider complete types always compatible with incomplete type.
14201 This does not make sense for canonical type calculation and thus we
14202 need to ensure that we are never called on it.
14203
14204 FIXME: For more correctness the function probably should have three modes
14205 1) mode assuming that types are complete mathcing their structure
14206 2) mode allowing incomplete types but producing equivalence classes
14207 and thus ignoring all info from complete types
14208 3) mode allowing incomplete types to match complete but checking
14209 compatibility between complete types.
14210
14211 1 and 2 can be used for canonical type calculation. 3 is the real
14212 definition of type compatibility that can be used i.e. for warnings during
14213 declaration merging. */
14214
14215 gcc_assert (!trust_type_canonical
14216 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
14217
14218 /* If the types have been previously registered and found equal
14219 they still are. */
14220
14221 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
14222 && trust_type_canonical)
14223 {
14224 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
14225 they are always NULL, but they are set to non-NULL for types
14226 constructed by build_pointer_type and variants. In this case the
14227 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
14228 all pointers are considered equal. Be sure to not return false
14229 negatives. */
14230 gcc_checking_assert (canonical_type_used_p (t1)
14231 && canonical_type_used_p (t2));
14232 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
14233 }
14234
14235 /* For types where we do ODR based TBAA the canonical type is always
14236 set correctly, so we know that types are different if their
14237 canonical types does not match. */
14238 if (trust_type_canonical
14239 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
14240 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
14241 return false;
14242
14243 /* Can't be the same type if the types don't have the same code. */
14244 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
14245 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
14246 return false;
14247
14248 /* Qualifiers do not matter for canonical type comparison purposes. */
14249
14250 /* Void types and nullptr types are always the same. */
14251 if (TREE_CODE (t1) == VOID_TYPE
14252 || TREE_CODE (t1) == NULLPTR_TYPE)
14253 return true;
14254
14255 /* Can't be the same type if they have different mode. */
14256 if (TYPE_MODE (t1) != TYPE_MODE (t2))
14257 return false;
14258
14259 /* Non-aggregate types can be handled cheaply. */
14260 if (INTEGRAL_TYPE_P (t1)
14261 || SCALAR_FLOAT_TYPE_P (t1)
14262 || FIXED_POINT_TYPE_P (t1)
14263 || TREE_CODE (t1) == VECTOR_TYPE
14264 || TREE_CODE (t1) == COMPLEX_TYPE
14265 || TREE_CODE (t1) == OFFSET_TYPE
14266 || POINTER_TYPE_P (t1))
14267 {
14268 /* Can't be the same type if they have different recision. */
14269 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
14270 return false;
14271
14272 /* In some cases the signed and unsigned types are required to be
14273 inter-operable. */
14274 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
14275 && !type_with_interoperable_signedness (t1))
14276 return false;
14277
14278 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
14279 interoperable with "signed char". Unless all frontends are revisited
14280 to agree on these types, we must ignore the flag completely. */
14281
14282 /* Fortran standard define C_PTR type that is compatible with every
14283 C pointer. For this reason we need to glob all pointers into one.
14284 Still pointers in different address spaces are not compatible. */
14285 if (POINTER_TYPE_P (t1))
14286 {
14287 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
14288 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
14289 return false;
14290 }
14291
14292 /* Tail-recurse to components. */
14293 if (TREE_CODE (t1) == VECTOR_TYPE
14294 || TREE_CODE (t1) == COMPLEX_TYPE)
14295 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
14296 TREE_TYPE (t2),
14297 trust_type_canonical);
14298
14299 return true;
14300 }
14301
14302 /* Do type-specific comparisons. */
14303 switch (TREE_CODE (t1))
14304 {
14305 case ARRAY_TYPE:
14306 /* Array types are the same if the element types are the same and
14307 the number of elements are the same. */
14308 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14309 trust_type_canonical)
14310 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
14311 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
14312 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
14313 return false;
14314 else
14315 {
14316 tree i1 = TYPE_DOMAIN (t1);
14317 tree i2 = TYPE_DOMAIN (t2);
14318
14319 /* For an incomplete external array, the type domain can be
14320 NULL_TREE. Check this condition also. */
14321 if (i1 == NULL_TREE && i2 == NULL_TREE)
14322 return true;
14323 else if (i1 == NULL_TREE || i2 == NULL_TREE)
14324 return false;
14325 else
14326 {
14327 tree min1 = TYPE_MIN_VALUE (i1);
14328 tree min2 = TYPE_MIN_VALUE (i2);
14329 tree max1 = TYPE_MAX_VALUE (i1);
14330 tree max2 = TYPE_MAX_VALUE (i2);
14331
14332 /* The minimum/maximum values have to be the same. */
14333 if ((min1 == min2
14334 || (min1 && min2
14335 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
14336 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
14337 || operand_equal_p (min1, min2, 0))))
14338 && (max1 == max2
14339 || (max1 && max2
14340 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
14341 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
14342 || operand_equal_p (max1, max2, 0)))))
14343 return true;
14344 else
14345 return false;
14346 }
14347 }
14348
14349 case METHOD_TYPE:
14350 case FUNCTION_TYPE:
14351 /* Function types are the same if the return type and arguments types
14352 are the same. */
14353 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14354 trust_type_canonical))
14355 return false;
14356
14357 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
14358 return true;
14359 else
14360 {
14361 tree parms1, parms2;
14362
14363 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
14364 parms1 && parms2;
14365 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
14366 {
14367 if (!gimple_canonical_types_compatible_p
14368 (TREE_VALUE (parms1), TREE_VALUE (parms2),
14369 trust_type_canonical))
14370 return false;
14371 }
14372
14373 if (parms1 || parms2)
14374 return false;
14375
14376 return true;
14377 }
14378
14379 case RECORD_TYPE:
14380 case UNION_TYPE:
14381 case QUAL_UNION_TYPE:
14382 {
14383 tree f1, f2;
14384
14385 /* Don't try to compare variants of an incomplete type, before
14386 TYPE_FIELDS has been copied around. */
14387 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
14388 return true;
14389
14390
14391 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
14392 return false;
14393
14394 /* For aggregate types, all the fields must be the same. */
14395 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
14396 f1 || f2;
14397 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14398 {
14399 /* Skip non-fields and zero-sized fields. */
14400 while (f1 && (TREE_CODE (f1) != FIELD_DECL
14401 || (DECL_SIZE (f1)
14402 && integer_zerop (DECL_SIZE (f1)))))
14403 f1 = TREE_CHAIN (f1);
14404 while (f2 && (TREE_CODE (f2) != FIELD_DECL
14405 || (DECL_SIZE (f2)
14406 && integer_zerop (DECL_SIZE (f2)))))
14407 f2 = TREE_CHAIN (f2);
14408 if (!f1 || !f2)
14409 break;
14410 /* The fields must have the same name, offset and type. */
14411 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
14412 || !gimple_compare_field_offset (f1, f2)
14413 || !gimple_canonical_types_compatible_p
14414 (TREE_TYPE (f1), TREE_TYPE (f2),
14415 trust_type_canonical))
14416 return false;
14417 }
14418
14419 /* If one aggregate has more fields than the other, they
14420 are not the same. */
14421 if (f1 || f2)
14422 return false;
14423
14424 return true;
14425 }
14426
14427 default:
14428 /* Consider all types with language specific trees in them mutually
14429 compatible. This is executed only from verify_type and false
14430 positives can be tolerated. */
14431 gcc_assert (!in_lto_p);
14432 return true;
14433 }
14434 }
14435
14436 /* Verify type T. */
14437
14438 void
14439 verify_type (const_tree t)
14440 {
14441 bool error_found = false;
14442 tree mv = TYPE_MAIN_VARIANT (t);
14443 if (!mv)
14444 {
14445 error ("main variant is not defined");
14446 error_found = true;
14447 }
14448 else if (mv != TYPE_MAIN_VARIANT (mv))
14449 {
14450 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14451 debug_tree (mv);
14452 error_found = true;
14453 }
14454 else if (t != mv && !verify_type_variant (t, mv))
14455 error_found = true;
14456
14457 tree ct = TYPE_CANONICAL (t);
14458 if (!ct)
14459 ;
14460 else if (TYPE_CANONICAL (t) != ct)
14461 {
14462 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14463 debug_tree (ct);
14464 error_found = true;
14465 }
14466 /* Method and function types cannot be used to address memory and thus
14467 TYPE_CANONICAL really matters only for determining useless conversions.
14468
14469 FIXME: C++ FE produce declarations of builtin functions that are not
14470 compatible with main variants. */
14471 else if (TREE_CODE (t) == FUNCTION_TYPE)
14472 ;
14473 else if (t != ct
14474 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14475 with variably sized arrays because their sizes possibly
14476 gimplified to different variables. */
14477 && !variably_modified_type_p (ct, NULL)
14478 && !gimple_canonical_types_compatible_p (t, ct, false)
14479 && COMPLETE_TYPE_P (t))
14480 {
14481 error ("%<TYPE_CANONICAL%> is not compatible");
14482 debug_tree (ct);
14483 error_found = true;
14484 }
14485
14486 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14487 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14488 {
14489 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14490 debug_tree (ct);
14491 error_found = true;
14492 }
14493 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14494 {
14495 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14496 debug_tree (ct);
14497 debug_tree (TYPE_MAIN_VARIANT (ct));
14498 error_found = true;
14499 }
14500
14501
14502 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14503 if (RECORD_OR_UNION_TYPE_P (t))
14504 {
14505 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14506 and danagle the pointer from time to time. */
14507 if (TYPE_VFIELD (t)
14508 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14509 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14510 {
14511 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14512 debug_tree (TYPE_VFIELD (t));
14513 error_found = true;
14514 }
14515 }
14516 else if (TREE_CODE (t) == POINTER_TYPE)
14517 {
14518 if (TYPE_NEXT_PTR_TO (t)
14519 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14520 {
14521 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14522 debug_tree (TYPE_NEXT_PTR_TO (t));
14523 error_found = true;
14524 }
14525 }
14526 else if (TREE_CODE (t) == REFERENCE_TYPE)
14527 {
14528 if (TYPE_NEXT_REF_TO (t)
14529 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14530 {
14531 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14532 debug_tree (TYPE_NEXT_REF_TO (t));
14533 error_found = true;
14534 }
14535 }
14536 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14537 || TREE_CODE (t) == FIXED_POINT_TYPE)
14538 {
14539 /* FIXME: The following check should pass:
14540 useless_type_conversion_p (const_cast <tree> (t),
14541 TREE_TYPE (TYPE_MIN_VALUE (t))
14542 but does not for C sizetypes in LTO. */
14543 }
14544
14545 /* Check various uses of TYPE_MAXVAL_RAW. */
14546 if (RECORD_OR_UNION_TYPE_P (t))
14547 {
14548 if (!TYPE_BINFO (t))
14549 ;
14550 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14551 {
14552 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14553 debug_tree (TYPE_BINFO (t));
14554 error_found = true;
14555 }
14556 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14557 {
14558 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14559 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14560 error_found = true;
14561 }
14562 }
14563 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14564 {
14565 if (TYPE_METHOD_BASETYPE (t)
14566 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14567 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14568 {
14569 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14570 debug_tree (TYPE_METHOD_BASETYPE (t));
14571 error_found = true;
14572 }
14573 }
14574 else if (TREE_CODE (t) == OFFSET_TYPE)
14575 {
14576 if (TYPE_OFFSET_BASETYPE (t)
14577 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14578 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14579 {
14580 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14581 debug_tree (TYPE_OFFSET_BASETYPE (t));
14582 error_found = true;
14583 }
14584 }
14585 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14586 || TREE_CODE (t) == FIXED_POINT_TYPE)
14587 {
14588 /* FIXME: The following check should pass:
14589 useless_type_conversion_p (const_cast <tree> (t),
14590 TREE_TYPE (TYPE_MAX_VALUE (t))
14591 but does not for C sizetypes in LTO. */
14592 }
14593 else if (TREE_CODE (t) == ARRAY_TYPE)
14594 {
14595 if (TYPE_ARRAY_MAX_SIZE (t)
14596 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14597 {
14598 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14599 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14600 error_found = true;
14601 }
14602 }
14603 else if (TYPE_MAX_VALUE_RAW (t))
14604 {
14605 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14606 debug_tree (TYPE_MAX_VALUE_RAW (t));
14607 error_found = true;
14608 }
14609
14610 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14611 {
14612 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14613 debug_tree (TYPE_LANG_SLOT_1 (t));
14614 error_found = true;
14615 }
14616
14617 /* Check various uses of TYPE_VALUES_RAW. */
14618 if (TREE_CODE (t) == ENUMERAL_TYPE)
14619 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14620 {
14621 tree value = TREE_VALUE (l);
14622 tree name = TREE_PURPOSE (l);
14623
14624 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14625 CONST_DECL of ENUMERAL TYPE. */
14626 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14627 {
14628 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14629 debug_tree (value);
14630 debug_tree (name);
14631 error_found = true;
14632 }
14633 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14634 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14635 {
14636 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14637 "to the enum");
14638 debug_tree (value);
14639 debug_tree (name);
14640 error_found = true;
14641 }
14642 if (TREE_CODE (name) != IDENTIFIER_NODE)
14643 {
14644 error ("enum value name is not %<IDENTIFIER_NODE%>");
14645 debug_tree (value);
14646 debug_tree (name);
14647 error_found = true;
14648 }
14649 }
14650 else if (TREE_CODE (t) == ARRAY_TYPE)
14651 {
14652 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14653 {
14654 error ("array %<TYPE_DOMAIN%> is not integer type");
14655 debug_tree (TYPE_DOMAIN (t));
14656 error_found = true;
14657 }
14658 }
14659 else if (RECORD_OR_UNION_TYPE_P (t))
14660 {
14661 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14662 {
14663 error ("%<TYPE_FIELDS%> defined in incomplete type");
14664 error_found = true;
14665 }
14666 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14667 {
14668 /* TODO: verify properties of decls. */
14669 if (TREE_CODE (fld) == FIELD_DECL)
14670 ;
14671 else if (TREE_CODE (fld) == TYPE_DECL)
14672 ;
14673 else if (TREE_CODE (fld) == CONST_DECL)
14674 ;
14675 else if (VAR_P (fld))
14676 ;
14677 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14678 ;
14679 else if (TREE_CODE (fld) == USING_DECL)
14680 ;
14681 else if (TREE_CODE (fld) == FUNCTION_DECL)
14682 ;
14683 else
14684 {
14685 error ("wrong tree in %<TYPE_FIELDS%> list");
14686 debug_tree (fld);
14687 error_found = true;
14688 }
14689 }
14690 }
14691 else if (TREE_CODE (t) == INTEGER_TYPE
14692 || TREE_CODE (t) == BOOLEAN_TYPE
14693 || TREE_CODE (t) == OFFSET_TYPE
14694 || TREE_CODE (t) == REFERENCE_TYPE
14695 || TREE_CODE (t) == NULLPTR_TYPE
14696 || TREE_CODE (t) == POINTER_TYPE)
14697 {
14698 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14699 {
14700 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14701 "is %p",
14702 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14703 error_found = true;
14704 }
14705 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14706 {
14707 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14708 debug_tree (TYPE_CACHED_VALUES (t));
14709 error_found = true;
14710 }
14711 /* Verify just enough of cache to ensure that no one copied it to new type.
14712 All copying should go by copy_node that should clear it. */
14713 else if (TYPE_CACHED_VALUES_P (t))
14714 {
14715 int i;
14716 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14717 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14718 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14719 {
14720 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14721 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14722 error_found = true;
14723 break;
14724 }
14725 }
14726 }
14727 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14728 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14729 {
14730 /* C++ FE uses TREE_PURPOSE to store initial values. */
14731 if (TREE_PURPOSE (l) && in_lto_p)
14732 {
14733 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14734 debug_tree (l);
14735 error_found = true;
14736 }
14737 if (!TYPE_P (TREE_VALUE (l)))
14738 {
14739 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14740 debug_tree (l);
14741 error_found = true;
14742 }
14743 }
14744 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14745 {
14746 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14747 debug_tree (TYPE_VALUES_RAW (t));
14748 error_found = true;
14749 }
14750 if (TREE_CODE (t) != INTEGER_TYPE
14751 && TREE_CODE (t) != BOOLEAN_TYPE
14752 && TREE_CODE (t) != OFFSET_TYPE
14753 && TREE_CODE (t) != REFERENCE_TYPE
14754 && TREE_CODE (t) != NULLPTR_TYPE
14755 && TREE_CODE (t) != POINTER_TYPE
14756 && TYPE_CACHED_VALUES_P (t))
14757 {
14758 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14759 error_found = true;
14760 }
14761
14762 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14763 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14764 of a type. */
14765 if (TREE_CODE (t) == METHOD_TYPE
14766 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14767 {
14768 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14769 error_found = true;
14770 }
14771
14772 if (error_found)
14773 {
14774 debug_tree (const_cast <tree> (t));
14775 internal_error ("%qs failed", __func__);
14776 }
14777 }
14778
14779
14780 /* Return 1 if ARG interpreted as signed in its precision is known to be
14781 always positive or 2 if ARG is known to be always negative, or 3 if
14782 ARG may be positive or negative. */
14783
14784 int
14785 get_range_pos_neg (tree arg)
14786 {
14787 if (arg == error_mark_node)
14788 return 3;
14789
14790 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14791 int cnt = 0;
14792 if (TREE_CODE (arg) == INTEGER_CST)
14793 {
14794 wide_int w = wi::sext (wi::to_wide (arg), prec);
14795 if (wi::neg_p (w))
14796 return 2;
14797 else
14798 return 1;
14799 }
14800 while (CONVERT_EXPR_P (arg)
14801 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14802 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14803 {
14804 arg = TREE_OPERAND (arg, 0);
14805 /* Narrower value zero extended into wider type
14806 will always result in positive values. */
14807 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14808 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14809 return 1;
14810 prec = TYPE_PRECISION (TREE_TYPE (arg));
14811 if (++cnt > 30)
14812 return 3;
14813 }
14814
14815 if (TREE_CODE (arg) != SSA_NAME)
14816 return 3;
14817 wide_int arg_min, arg_max;
14818 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
14819 {
14820 gimple *g = SSA_NAME_DEF_STMT (arg);
14821 if (is_gimple_assign (g)
14822 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14823 {
14824 tree t = gimple_assign_rhs1 (g);
14825 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14826 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14827 {
14828 if (TYPE_UNSIGNED (TREE_TYPE (t))
14829 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14830 return 1;
14831 prec = TYPE_PRECISION (TREE_TYPE (t));
14832 arg = t;
14833 if (++cnt > 30)
14834 return 3;
14835 continue;
14836 }
14837 }
14838 return 3;
14839 }
14840 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14841 {
14842 /* For unsigned values, the "positive" range comes
14843 below the "negative" range. */
14844 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14845 return 1;
14846 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14847 return 2;
14848 }
14849 else
14850 {
14851 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14852 return 1;
14853 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14854 return 2;
14855 }
14856 return 3;
14857 }
14858
14859
14860
14861
14862 /* Return true if ARG is marked with the nonnull attribute in the
14863 current function signature. */
14864
14865 bool
14866 nonnull_arg_p (const_tree arg)
14867 {
14868 tree t, attrs, fntype;
14869 unsigned HOST_WIDE_INT arg_num;
14870
14871 gcc_assert (TREE_CODE (arg) == PARM_DECL
14872 && (POINTER_TYPE_P (TREE_TYPE (arg))
14873 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14874
14875 /* The static chain decl is always non null. */
14876 if (arg == cfun->static_chain_decl)
14877 return true;
14878
14879 /* THIS argument of method is always non-NULL. */
14880 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14881 && arg == DECL_ARGUMENTS (cfun->decl)
14882 && flag_delete_null_pointer_checks)
14883 return true;
14884
14885 /* Values passed by reference are always non-NULL. */
14886 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14887 && flag_delete_null_pointer_checks)
14888 return true;
14889
14890 fntype = TREE_TYPE (cfun->decl);
14891 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14892 {
14893 attrs = lookup_attribute ("nonnull", attrs);
14894
14895 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14896 if (attrs == NULL_TREE)
14897 return false;
14898
14899 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14900 if (TREE_VALUE (attrs) == NULL_TREE)
14901 return true;
14902
14903 /* Get the position number for ARG in the function signature. */
14904 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14905 t;
14906 t = DECL_CHAIN (t), arg_num++)
14907 {
14908 if (t == arg)
14909 break;
14910 }
14911
14912 gcc_assert (t == arg);
14913
14914 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14915 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14916 {
14917 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14918 return true;
14919 }
14920 }
14921
14922 return false;
14923 }
14924
14925 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14926 information. */
14927
14928 location_t
14929 set_block (location_t loc, tree block)
14930 {
14931 location_t pure_loc = get_pure_location (loc);
14932 source_range src_range = get_range_from_loc (line_table, loc);
14933 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14934 }
14935
14936 location_t
14937 set_source_range (tree expr, location_t start, location_t finish)
14938 {
14939 source_range src_range;
14940 src_range.m_start = start;
14941 src_range.m_finish = finish;
14942 return set_source_range (expr, src_range);
14943 }
14944
14945 location_t
14946 set_source_range (tree expr, source_range src_range)
14947 {
14948 if (!EXPR_P (expr))
14949 return UNKNOWN_LOCATION;
14950
14951 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14952 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14953 pure_loc,
14954 src_range,
14955 NULL);
14956 SET_EXPR_LOCATION (expr, adhoc);
14957 return adhoc;
14958 }
14959
14960 /* Return EXPR, potentially wrapped with a node expression LOC,
14961 if !CAN_HAVE_LOCATION_P (expr).
14962
14963 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14964 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14965
14966 Wrapper nodes can be identified using location_wrapper_p. */
14967
14968 tree
14969 maybe_wrap_with_location (tree expr, location_t loc)
14970 {
14971 if (expr == NULL)
14972 return NULL;
14973 if (loc == UNKNOWN_LOCATION)
14974 return expr;
14975 if (CAN_HAVE_LOCATION_P (expr))
14976 return expr;
14977 /* We should only be adding wrappers for constants and for decls,
14978 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14979 gcc_assert (CONSTANT_CLASS_P (expr)
14980 || DECL_P (expr)
14981 || EXCEPTIONAL_CLASS_P (expr));
14982
14983 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14984 any impact of the wrapper nodes. */
14985 if (EXCEPTIONAL_CLASS_P (expr))
14986 return expr;
14987
14988 /* If any auto_suppress_location_wrappers are active, don't create
14989 wrappers. */
14990 if (suppress_location_wrappers > 0)
14991 return expr;
14992
14993 tree_code code
14994 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14995 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14996 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14997 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14998 /* Mark this node as being a wrapper. */
14999 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
15000 return wrapper;
15001 }
15002
15003 int suppress_location_wrappers;
15004
15005 /* Return the name of combined function FN, for debugging purposes. */
15006
15007 const char *
15008 combined_fn_name (combined_fn fn)
15009 {
15010 if (builtin_fn_p (fn))
15011 {
15012 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
15013 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
15014 }
15015 else
15016 return internal_fn_name (as_internal_fn (fn));
15017 }
15018
15019 /* Return a bitmap with a bit set corresponding to each argument in
15020 a function call type FNTYPE declared with attribute nonnull,
15021 or null if none of the function's argument are nonnull. The caller
15022 must free the bitmap. */
15023
15024 bitmap
15025 get_nonnull_args (const_tree fntype)
15026 {
15027 if (fntype == NULL_TREE)
15028 return NULL;
15029
15030 tree attrs = TYPE_ATTRIBUTES (fntype);
15031 if (!attrs)
15032 return NULL;
15033
15034 bitmap argmap = NULL;
15035
15036 /* A function declaration can specify multiple attribute nonnull,
15037 each with zero or more arguments. The loop below creates a bitmap
15038 representing a union of all the arguments. An empty (but non-null)
15039 bitmap means that all arguments have been declaraed nonnull. */
15040 for ( ; attrs; attrs = TREE_CHAIN (attrs))
15041 {
15042 attrs = lookup_attribute ("nonnull", attrs);
15043 if (!attrs)
15044 break;
15045
15046 if (!argmap)
15047 argmap = BITMAP_ALLOC (NULL);
15048
15049 if (!TREE_VALUE (attrs))
15050 {
15051 /* Clear the bitmap in case a previous attribute nonnull
15052 set it and this one overrides it for all arguments. */
15053 bitmap_clear (argmap);
15054 return argmap;
15055 }
15056
15057 /* Iterate over the indices of the format arguments declared nonnull
15058 and set a bit for each. */
15059 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
15060 {
15061 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
15062 bitmap_set_bit (argmap, val);
15063 }
15064 }
15065
15066 return argmap;
15067 }
15068
15069 /* Returns true if TYPE is a type where it and all of its subobjects
15070 (recursively) are of structure, union, or array type. */
15071
15072 static bool
15073 default_is_empty_type (tree type)
15074 {
15075 if (RECORD_OR_UNION_TYPE_P (type))
15076 {
15077 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
15078 if (TREE_CODE (field) == FIELD_DECL
15079 && !DECL_PADDING_P (field)
15080 && !default_is_empty_type (TREE_TYPE (field)))
15081 return false;
15082 return true;
15083 }
15084 else if (TREE_CODE (type) == ARRAY_TYPE)
15085 return (integer_minus_onep (array_type_nelts (type))
15086 || TYPE_DOMAIN (type) == NULL_TREE
15087 || default_is_empty_type (TREE_TYPE (type)));
15088 return false;
15089 }
15090
15091 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
15092 that shouldn't be passed via stack. */
15093
15094 bool
15095 default_is_empty_record (const_tree type)
15096 {
15097 if (!abi_version_at_least (12))
15098 return false;
15099
15100 if (type == error_mark_node)
15101 return false;
15102
15103 if (TREE_ADDRESSABLE (type))
15104 return false;
15105
15106 return default_is_empty_type (TYPE_MAIN_VARIANT (type));
15107 }
15108
15109 /* Like int_size_in_bytes, but handle empty records specially. */
15110
15111 HOST_WIDE_INT
15112 arg_int_size_in_bytes (const_tree type)
15113 {
15114 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
15115 }
15116
15117 /* Like size_in_bytes, but handle empty records specially. */
15118
15119 tree
15120 arg_size_in_bytes (const_tree type)
15121 {
15122 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
15123 }
15124
15125 /* Return true if an expression with CODE has to have the same result type as
15126 its first operand. */
15127
15128 bool
15129 expr_type_first_operand_type_p (tree_code code)
15130 {
15131 switch (code)
15132 {
15133 case NEGATE_EXPR:
15134 case ABS_EXPR:
15135 case BIT_NOT_EXPR:
15136 case PAREN_EXPR:
15137 case CONJ_EXPR:
15138
15139 case PLUS_EXPR:
15140 case MINUS_EXPR:
15141 case MULT_EXPR:
15142 case TRUNC_DIV_EXPR:
15143 case CEIL_DIV_EXPR:
15144 case FLOOR_DIV_EXPR:
15145 case ROUND_DIV_EXPR:
15146 case TRUNC_MOD_EXPR:
15147 case CEIL_MOD_EXPR:
15148 case FLOOR_MOD_EXPR:
15149 case ROUND_MOD_EXPR:
15150 case RDIV_EXPR:
15151 case EXACT_DIV_EXPR:
15152 case MIN_EXPR:
15153 case MAX_EXPR:
15154 case BIT_IOR_EXPR:
15155 case BIT_XOR_EXPR:
15156 case BIT_AND_EXPR:
15157
15158 case LSHIFT_EXPR:
15159 case RSHIFT_EXPR:
15160 case LROTATE_EXPR:
15161 case RROTATE_EXPR:
15162 return true;
15163
15164 default:
15165 return false;
15166 }
15167 }
15168
15169 /* Return a typenode for the "standard" C type with a given name. */
15170 tree
15171 get_typenode_from_name (const char *name)
15172 {
15173 if (name == NULL || *name == '\0')
15174 return NULL_TREE;
15175
15176 if (strcmp (name, "char") == 0)
15177 return char_type_node;
15178 if (strcmp (name, "unsigned char") == 0)
15179 return unsigned_char_type_node;
15180 if (strcmp (name, "signed char") == 0)
15181 return signed_char_type_node;
15182
15183 if (strcmp (name, "short int") == 0)
15184 return short_integer_type_node;
15185 if (strcmp (name, "short unsigned int") == 0)
15186 return short_unsigned_type_node;
15187
15188 if (strcmp (name, "int") == 0)
15189 return integer_type_node;
15190 if (strcmp (name, "unsigned int") == 0)
15191 return unsigned_type_node;
15192
15193 if (strcmp (name, "long int") == 0)
15194 return long_integer_type_node;
15195 if (strcmp (name, "long unsigned int") == 0)
15196 return long_unsigned_type_node;
15197
15198 if (strcmp (name, "long long int") == 0)
15199 return long_long_integer_type_node;
15200 if (strcmp (name, "long long unsigned int") == 0)
15201 return long_long_unsigned_type_node;
15202
15203 gcc_unreachable ();
15204 }
15205
15206 /* List of pointer types used to declare builtins before we have seen their
15207 real declaration.
15208
15209 Keep the size up to date in tree.h ! */
15210 const builtin_structptr_type builtin_structptr_types[6] =
15211 {
15212 { fileptr_type_node, ptr_type_node, "FILE" },
15213 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
15214 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
15215 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
15216 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
15217 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
15218 };
15219
15220 /* Return the maximum object size. */
15221
15222 tree
15223 max_object_size (void)
15224 {
15225 /* To do: Make this a configurable parameter. */
15226 return TYPE_MAX_VALUE (ptrdiff_type_node);
15227 }
15228
15229 #if CHECKING_P
15230
15231 namespace selftest {
15232
15233 /* Selftests for tree. */
15234
15235 /* Verify that integer constants are sane. */
15236
15237 static void
15238 test_integer_constants ()
15239 {
15240 ASSERT_TRUE (integer_type_node != NULL);
15241 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15242
15243 tree type = integer_type_node;
15244
15245 tree zero = build_zero_cst (type);
15246 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15247 ASSERT_EQ (type, TREE_TYPE (zero));
15248
15249 tree one = build_int_cst (type, 1);
15250 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15251 ASSERT_EQ (type, TREE_TYPE (zero));
15252 }
15253
15254 /* Verify identifiers. */
15255
15256 static void
15257 test_identifiers ()
15258 {
15259 tree identifier = get_identifier ("foo");
15260 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15261 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15262 }
15263
15264 /* Verify LABEL_DECL. */
15265
15266 static void
15267 test_labels ()
15268 {
15269 tree identifier = get_identifier ("err");
15270 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15271 identifier, void_type_node);
15272 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15273 ASSERT_FALSE (FORCED_LABEL (label_decl));
15274 }
15275
15276 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15277 are given by VALS. */
15278
15279 static tree
15280 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
15281 {
15282 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15283 tree_vector_builder builder (type, vals.length (), 1);
15284 builder.splice (vals);
15285 return builder.build ();
15286 }
15287
15288 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15289
15290 static void
15291 check_vector_cst (vec<tree> expected, tree actual)
15292 {
15293 ASSERT_KNOWN_EQ (expected.length (),
15294 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15295 for (unsigned int i = 0; i < expected.length (); ++i)
15296 ASSERT_EQ (wi::to_wide (expected[i]),
15297 wi::to_wide (vector_cst_elt (actual, i)));
15298 }
15299
15300 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15301 and that its elements match EXPECTED. */
15302
15303 static void
15304 check_vector_cst_duplicate (vec<tree> expected, tree actual,
15305 unsigned int npatterns)
15306 {
15307 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15308 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15309 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15310 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15311 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15312 check_vector_cst (expected, actual);
15313 }
15314
15315 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15316 and NPATTERNS background elements, and that its elements match
15317 EXPECTED. */
15318
15319 static void
15320 check_vector_cst_fill (vec<tree> expected, tree actual,
15321 unsigned int npatterns)
15322 {
15323 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15324 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15325 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15326 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15327 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15328 check_vector_cst (expected, actual);
15329 }
15330
15331 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15332 and that its elements match EXPECTED. */
15333
15334 static void
15335 check_vector_cst_stepped (vec<tree> expected, tree actual,
15336 unsigned int npatterns)
15337 {
15338 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15339 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15340 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15341 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15342 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15343 check_vector_cst (expected, actual);
15344 }
15345
15346 /* Test the creation of VECTOR_CSTs. */
15347
15348 static void
15349 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15350 {
15351 auto_vec<tree, 8> elements (8);
15352 elements.quick_grow (8);
15353 tree element_type = build_nonstandard_integer_type (16, true);
15354 tree vector_type = build_vector_type (element_type, 8);
15355
15356 /* Test a simple linear series with a base of 0 and a step of 1:
15357 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15358 for (unsigned int i = 0; i < 8; ++i)
15359 elements[i] = build_int_cst (element_type, i);
15360 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15361 check_vector_cst_stepped (elements, vector, 1);
15362
15363 /* Try the same with the first element replaced by 100:
15364 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15365 elements[0] = build_int_cst (element_type, 100);
15366 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15367 check_vector_cst_stepped (elements, vector, 1);
15368
15369 /* Try a series that wraps around.
15370 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15371 for (unsigned int i = 1; i < 8; ++i)
15372 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15373 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15374 check_vector_cst_stepped (elements, vector, 1);
15375
15376 /* Try a downward series:
15377 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15378 for (unsigned int i = 1; i < 8; ++i)
15379 elements[i] = build_int_cst (element_type, 80 - i);
15380 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15381 check_vector_cst_stepped (elements, vector, 1);
15382
15383 /* Try two interleaved series with different bases and steps:
15384 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15385 elements[1] = build_int_cst (element_type, 53);
15386 for (unsigned int i = 2; i < 8; i += 2)
15387 {
15388 elements[i] = build_int_cst (element_type, 70 - i * 2);
15389 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15390 }
15391 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15392 check_vector_cst_stepped (elements, vector, 2);
15393
15394 /* Try a duplicated value:
15395 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15396 for (unsigned int i = 1; i < 8; ++i)
15397 elements[i] = elements[0];
15398 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15399 check_vector_cst_duplicate (elements, vector, 1);
15400
15401 /* Try an interleaved duplicated value:
15402 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15403 elements[1] = build_int_cst (element_type, 55);
15404 for (unsigned int i = 2; i < 8; ++i)
15405 elements[i] = elements[i - 2];
15406 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15407 check_vector_cst_duplicate (elements, vector, 2);
15408
15409 /* Try a duplicated value with 2 exceptions
15410 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15411 elements[0] = build_int_cst (element_type, 41);
15412 elements[1] = build_int_cst (element_type, 97);
15413 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15414 check_vector_cst_fill (elements, vector, 2);
15415
15416 /* Try with and without a step
15417 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15418 for (unsigned int i = 3; i < 8; i += 2)
15419 elements[i] = build_int_cst (element_type, i * 7);
15420 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15421 check_vector_cst_stepped (elements, vector, 2);
15422
15423 /* Try a fully-general constant:
15424 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15425 elements[5] = build_int_cst (element_type, 9990);
15426 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15427 check_vector_cst_fill (elements, vector, 4);
15428 }
15429
15430 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15431 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15432 modifying its argument in-place. */
15433
15434 static void
15435 check_strip_nops (tree node, tree expected)
15436 {
15437 STRIP_NOPS (node);
15438 ASSERT_EQ (expected, node);
15439 }
15440
15441 /* Verify location wrappers. */
15442
15443 static void
15444 test_location_wrappers ()
15445 {
15446 location_t loc = BUILTINS_LOCATION;
15447
15448 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15449
15450 /* Wrapping a constant. */
15451 tree int_cst = build_int_cst (integer_type_node, 42);
15452 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15453 ASSERT_FALSE (location_wrapper_p (int_cst));
15454
15455 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15456 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15457 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15458 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15459
15460 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15461 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15462
15463 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15464 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15465 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15466 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15467
15468 /* Wrapping a STRING_CST. */
15469 tree string_cst = build_string (4, "foo");
15470 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15471 ASSERT_FALSE (location_wrapper_p (string_cst));
15472
15473 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15474 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15475 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15476 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15477 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15478
15479
15480 /* Wrapping a variable. */
15481 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15482 get_identifier ("some_int_var"),
15483 integer_type_node);
15484 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15485 ASSERT_FALSE (location_wrapper_p (int_var));
15486
15487 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15488 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15489 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15490 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15491
15492 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15493 wrapper. */
15494 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15495 ASSERT_FALSE (location_wrapper_p (r_cast));
15496 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15497
15498 /* Verify that STRIP_NOPS removes wrappers. */
15499 check_strip_nops (wrapped_int_cst, int_cst);
15500 check_strip_nops (wrapped_string_cst, string_cst);
15501 check_strip_nops (wrapped_int_var, int_var);
15502 }
15503
15504 /* Test various tree predicates. Verify that location wrappers don't
15505 affect the results. */
15506
15507 static void
15508 test_predicates ()
15509 {
15510 /* Build various constants and wrappers around them. */
15511
15512 location_t loc = BUILTINS_LOCATION;
15513
15514 tree i_0 = build_int_cst (integer_type_node, 0);
15515 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15516
15517 tree i_1 = build_int_cst (integer_type_node, 1);
15518 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15519
15520 tree i_m1 = build_int_cst (integer_type_node, -1);
15521 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15522
15523 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15524 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15525 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15526 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15527 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15528 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15529
15530 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15531 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15532 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15533
15534 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15535 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15536 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15537
15538 /* TODO: vector constants. */
15539
15540 /* Test integer_onep. */
15541 ASSERT_FALSE (integer_onep (i_0));
15542 ASSERT_FALSE (integer_onep (wr_i_0));
15543 ASSERT_TRUE (integer_onep (i_1));
15544 ASSERT_TRUE (integer_onep (wr_i_1));
15545 ASSERT_FALSE (integer_onep (i_m1));
15546 ASSERT_FALSE (integer_onep (wr_i_m1));
15547 ASSERT_FALSE (integer_onep (f_0));
15548 ASSERT_FALSE (integer_onep (wr_f_0));
15549 ASSERT_FALSE (integer_onep (f_1));
15550 ASSERT_FALSE (integer_onep (wr_f_1));
15551 ASSERT_FALSE (integer_onep (f_m1));
15552 ASSERT_FALSE (integer_onep (wr_f_m1));
15553 ASSERT_FALSE (integer_onep (c_i_0));
15554 ASSERT_TRUE (integer_onep (c_i_1));
15555 ASSERT_FALSE (integer_onep (c_i_m1));
15556 ASSERT_FALSE (integer_onep (c_f_0));
15557 ASSERT_FALSE (integer_onep (c_f_1));
15558 ASSERT_FALSE (integer_onep (c_f_m1));
15559
15560 /* Test integer_zerop. */
15561 ASSERT_TRUE (integer_zerop (i_0));
15562 ASSERT_TRUE (integer_zerop (wr_i_0));
15563 ASSERT_FALSE (integer_zerop (i_1));
15564 ASSERT_FALSE (integer_zerop (wr_i_1));
15565 ASSERT_FALSE (integer_zerop (i_m1));
15566 ASSERT_FALSE (integer_zerop (wr_i_m1));
15567 ASSERT_FALSE (integer_zerop (f_0));
15568 ASSERT_FALSE (integer_zerop (wr_f_0));
15569 ASSERT_FALSE (integer_zerop (f_1));
15570 ASSERT_FALSE (integer_zerop (wr_f_1));
15571 ASSERT_FALSE (integer_zerop (f_m1));
15572 ASSERT_FALSE (integer_zerop (wr_f_m1));
15573 ASSERT_TRUE (integer_zerop (c_i_0));
15574 ASSERT_FALSE (integer_zerop (c_i_1));
15575 ASSERT_FALSE (integer_zerop (c_i_m1));
15576 ASSERT_FALSE (integer_zerop (c_f_0));
15577 ASSERT_FALSE (integer_zerop (c_f_1));
15578 ASSERT_FALSE (integer_zerop (c_f_m1));
15579
15580 /* Test integer_all_onesp. */
15581 ASSERT_FALSE (integer_all_onesp (i_0));
15582 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15583 ASSERT_FALSE (integer_all_onesp (i_1));
15584 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15585 ASSERT_TRUE (integer_all_onesp (i_m1));
15586 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15587 ASSERT_FALSE (integer_all_onesp (f_0));
15588 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15589 ASSERT_FALSE (integer_all_onesp (f_1));
15590 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15591 ASSERT_FALSE (integer_all_onesp (f_m1));
15592 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15593 ASSERT_FALSE (integer_all_onesp (c_i_0));
15594 ASSERT_FALSE (integer_all_onesp (c_i_1));
15595 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15596 ASSERT_FALSE (integer_all_onesp (c_f_0));
15597 ASSERT_FALSE (integer_all_onesp (c_f_1));
15598 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15599
15600 /* Test integer_minus_onep. */
15601 ASSERT_FALSE (integer_minus_onep (i_0));
15602 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15603 ASSERT_FALSE (integer_minus_onep (i_1));
15604 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15605 ASSERT_TRUE (integer_minus_onep (i_m1));
15606 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15607 ASSERT_FALSE (integer_minus_onep (f_0));
15608 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15609 ASSERT_FALSE (integer_minus_onep (f_1));
15610 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15611 ASSERT_FALSE (integer_minus_onep (f_m1));
15612 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15613 ASSERT_FALSE (integer_minus_onep (c_i_0));
15614 ASSERT_FALSE (integer_minus_onep (c_i_1));
15615 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15616 ASSERT_FALSE (integer_minus_onep (c_f_0));
15617 ASSERT_FALSE (integer_minus_onep (c_f_1));
15618 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15619
15620 /* Test integer_each_onep. */
15621 ASSERT_FALSE (integer_each_onep (i_0));
15622 ASSERT_FALSE (integer_each_onep (wr_i_0));
15623 ASSERT_TRUE (integer_each_onep (i_1));
15624 ASSERT_TRUE (integer_each_onep (wr_i_1));
15625 ASSERT_FALSE (integer_each_onep (i_m1));
15626 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15627 ASSERT_FALSE (integer_each_onep (f_0));
15628 ASSERT_FALSE (integer_each_onep (wr_f_0));
15629 ASSERT_FALSE (integer_each_onep (f_1));
15630 ASSERT_FALSE (integer_each_onep (wr_f_1));
15631 ASSERT_FALSE (integer_each_onep (f_m1));
15632 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15633 ASSERT_FALSE (integer_each_onep (c_i_0));
15634 ASSERT_FALSE (integer_each_onep (c_i_1));
15635 ASSERT_FALSE (integer_each_onep (c_i_m1));
15636 ASSERT_FALSE (integer_each_onep (c_f_0));
15637 ASSERT_FALSE (integer_each_onep (c_f_1));
15638 ASSERT_FALSE (integer_each_onep (c_f_m1));
15639
15640 /* Test integer_truep. */
15641 ASSERT_FALSE (integer_truep (i_0));
15642 ASSERT_FALSE (integer_truep (wr_i_0));
15643 ASSERT_TRUE (integer_truep (i_1));
15644 ASSERT_TRUE (integer_truep (wr_i_1));
15645 ASSERT_FALSE (integer_truep (i_m1));
15646 ASSERT_FALSE (integer_truep (wr_i_m1));
15647 ASSERT_FALSE (integer_truep (f_0));
15648 ASSERT_FALSE (integer_truep (wr_f_0));
15649 ASSERT_FALSE (integer_truep (f_1));
15650 ASSERT_FALSE (integer_truep (wr_f_1));
15651 ASSERT_FALSE (integer_truep (f_m1));
15652 ASSERT_FALSE (integer_truep (wr_f_m1));
15653 ASSERT_FALSE (integer_truep (c_i_0));
15654 ASSERT_TRUE (integer_truep (c_i_1));
15655 ASSERT_FALSE (integer_truep (c_i_m1));
15656 ASSERT_FALSE (integer_truep (c_f_0));
15657 ASSERT_FALSE (integer_truep (c_f_1));
15658 ASSERT_FALSE (integer_truep (c_f_m1));
15659
15660 /* Test integer_nonzerop. */
15661 ASSERT_FALSE (integer_nonzerop (i_0));
15662 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15663 ASSERT_TRUE (integer_nonzerop (i_1));
15664 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15665 ASSERT_TRUE (integer_nonzerop (i_m1));
15666 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15667 ASSERT_FALSE (integer_nonzerop (f_0));
15668 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15669 ASSERT_FALSE (integer_nonzerop (f_1));
15670 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15671 ASSERT_FALSE (integer_nonzerop (f_m1));
15672 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15673 ASSERT_FALSE (integer_nonzerop (c_i_0));
15674 ASSERT_TRUE (integer_nonzerop (c_i_1));
15675 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15676 ASSERT_FALSE (integer_nonzerop (c_f_0));
15677 ASSERT_FALSE (integer_nonzerop (c_f_1));
15678 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15679
15680 /* Test real_zerop. */
15681 ASSERT_FALSE (real_zerop (i_0));
15682 ASSERT_FALSE (real_zerop (wr_i_0));
15683 ASSERT_FALSE (real_zerop (i_1));
15684 ASSERT_FALSE (real_zerop (wr_i_1));
15685 ASSERT_FALSE (real_zerop (i_m1));
15686 ASSERT_FALSE (real_zerop (wr_i_m1));
15687 ASSERT_TRUE (real_zerop (f_0));
15688 ASSERT_TRUE (real_zerop (wr_f_0));
15689 ASSERT_FALSE (real_zerop (f_1));
15690 ASSERT_FALSE (real_zerop (wr_f_1));
15691 ASSERT_FALSE (real_zerop (f_m1));
15692 ASSERT_FALSE (real_zerop (wr_f_m1));
15693 ASSERT_FALSE (real_zerop (c_i_0));
15694 ASSERT_FALSE (real_zerop (c_i_1));
15695 ASSERT_FALSE (real_zerop (c_i_m1));
15696 ASSERT_TRUE (real_zerop (c_f_0));
15697 ASSERT_FALSE (real_zerop (c_f_1));
15698 ASSERT_FALSE (real_zerop (c_f_m1));
15699
15700 /* Test real_onep. */
15701 ASSERT_FALSE (real_onep (i_0));
15702 ASSERT_FALSE (real_onep (wr_i_0));
15703 ASSERT_FALSE (real_onep (i_1));
15704 ASSERT_FALSE (real_onep (wr_i_1));
15705 ASSERT_FALSE (real_onep (i_m1));
15706 ASSERT_FALSE (real_onep (wr_i_m1));
15707 ASSERT_FALSE (real_onep (f_0));
15708 ASSERT_FALSE (real_onep (wr_f_0));
15709 ASSERT_TRUE (real_onep (f_1));
15710 ASSERT_TRUE (real_onep (wr_f_1));
15711 ASSERT_FALSE (real_onep (f_m1));
15712 ASSERT_FALSE (real_onep (wr_f_m1));
15713 ASSERT_FALSE (real_onep (c_i_0));
15714 ASSERT_FALSE (real_onep (c_i_1));
15715 ASSERT_FALSE (real_onep (c_i_m1));
15716 ASSERT_FALSE (real_onep (c_f_0));
15717 ASSERT_TRUE (real_onep (c_f_1));
15718 ASSERT_FALSE (real_onep (c_f_m1));
15719
15720 /* Test real_minus_onep. */
15721 ASSERT_FALSE (real_minus_onep (i_0));
15722 ASSERT_FALSE (real_minus_onep (wr_i_0));
15723 ASSERT_FALSE (real_minus_onep (i_1));
15724 ASSERT_FALSE (real_minus_onep (wr_i_1));
15725 ASSERT_FALSE (real_minus_onep (i_m1));
15726 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15727 ASSERT_FALSE (real_minus_onep (f_0));
15728 ASSERT_FALSE (real_minus_onep (wr_f_0));
15729 ASSERT_FALSE (real_minus_onep (f_1));
15730 ASSERT_FALSE (real_minus_onep (wr_f_1));
15731 ASSERT_TRUE (real_minus_onep (f_m1));
15732 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15733 ASSERT_FALSE (real_minus_onep (c_i_0));
15734 ASSERT_FALSE (real_minus_onep (c_i_1));
15735 ASSERT_FALSE (real_minus_onep (c_i_m1));
15736 ASSERT_FALSE (real_minus_onep (c_f_0));
15737 ASSERT_FALSE (real_minus_onep (c_f_1));
15738 ASSERT_TRUE (real_minus_onep (c_f_m1));
15739
15740 /* Test zerop. */
15741 ASSERT_TRUE (zerop (i_0));
15742 ASSERT_TRUE (zerop (wr_i_0));
15743 ASSERT_FALSE (zerop (i_1));
15744 ASSERT_FALSE (zerop (wr_i_1));
15745 ASSERT_FALSE (zerop (i_m1));
15746 ASSERT_FALSE (zerop (wr_i_m1));
15747 ASSERT_TRUE (zerop (f_0));
15748 ASSERT_TRUE (zerop (wr_f_0));
15749 ASSERT_FALSE (zerop (f_1));
15750 ASSERT_FALSE (zerop (wr_f_1));
15751 ASSERT_FALSE (zerop (f_m1));
15752 ASSERT_FALSE (zerop (wr_f_m1));
15753 ASSERT_TRUE (zerop (c_i_0));
15754 ASSERT_FALSE (zerop (c_i_1));
15755 ASSERT_FALSE (zerop (c_i_m1));
15756 ASSERT_TRUE (zerop (c_f_0));
15757 ASSERT_FALSE (zerop (c_f_1));
15758 ASSERT_FALSE (zerop (c_f_m1));
15759
15760 /* Test tree_expr_nonnegative_p. */
15761 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15762 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15763 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15764 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15765 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15766 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15767 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15768 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15769 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15770 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15771 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15772 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15773 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15774 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15775 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15776 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15777 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15778 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15779
15780 /* Test tree_expr_nonzero_p. */
15781 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15782 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15783 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15784 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15785 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15786 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15787
15788 /* Test integer_valued_real_p. */
15789 ASSERT_FALSE (integer_valued_real_p (i_0));
15790 ASSERT_TRUE (integer_valued_real_p (f_0));
15791 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15792 ASSERT_TRUE (integer_valued_real_p (f_1));
15793 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15794
15795 /* Test integer_pow2p. */
15796 ASSERT_FALSE (integer_pow2p (i_0));
15797 ASSERT_TRUE (integer_pow2p (i_1));
15798 ASSERT_TRUE (integer_pow2p (wr_i_1));
15799
15800 /* Test uniform_integer_cst_p. */
15801 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15802 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15803 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15804 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15805 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15806 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15807 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15808 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15809 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15810 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15811 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15812 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15813 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15814 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15815 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15816 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15817 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15818 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15819 }
15820
15821 /* Check that string escaping works correctly. */
15822
15823 static void
15824 test_escaped_strings (void)
15825 {
15826 int saved_cutoff;
15827 escaped_string msg;
15828
15829 msg.escape (NULL);
15830 /* ASSERT_STREQ does not accept NULL as a valid test
15831 result, so we have to use ASSERT_EQ instead. */
15832 ASSERT_EQ (NULL, (const char *) msg);
15833
15834 msg.escape ("");
15835 ASSERT_STREQ ("", (const char *) msg);
15836
15837 msg.escape ("foobar");
15838 ASSERT_STREQ ("foobar", (const char *) msg);
15839
15840 /* Ensure that we have -fmessage-length set to 0. */
15841 saved_cutoff = pp_line_cutoff (global_dc->printer);
15842 pp_line_cutoff (global_dc->printer) = 0;
15843
15844 msg.escape ("foo\nbar");
15845 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15846
15847 msg.escape ("\a\b\f\n\r\t\v");
15848 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15849
15850 /* Now repeat the tests with -fmessage-length set to 5. */
15851 pp_line_cutoff (global_dc->printer) = 5;
15852
15853 /* Note that the newline is not translated into an escape. */
15854 msg.escape ("foo\nbar");
15855 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15856
15857 msg.escape ("\a\b\f\n\r\t\v");
15858 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15859
15860 /* Restore the original message length setting. */
15861 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15862 }
15863
15864 /* Run all of the selftests within this file. */
15865
15866 void
15867 tree_c_tests ()
15868 {
15869 test_integer_constants ();
15870 test_identifiers ();
15871 test_labels ();
15872 test_vector_cst_patterns ();
15873 test_location_wrappers ();
15874 test_predicates ();
15875 test_escaped_strings ();
15876 }
15877
15878 } // namespace selftest
15879
15880 #endif /* CHECKING_P */
15881
15882 #include "gt-tree.h"