[RS6000] Don't restore fixed regs
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67
68 /* Tree code classes. */
69
70 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
71 #define END_OF_BASE_TREE_CODES tcc_exceptional,
72
73 const enum tree_code_class tree_code_type[] = {
74 #include "all-tree.def"
75 };
76
77 #undef DEFTREECODE
78 #undef END_OF_BASE_TREE_CODES
79
80 /* Table indexed by tree code giving number of expression
81 operands beyond the fixed part of the node structure.
82 Not used for types or decls. */
83
84 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
85 #define END_OF_BASE_TREE_CODES 0,
86
87 const unsigned char tree_code_length[] = {
88 #include "all-tree.def"
89 };
90
91 #undef DEFTREECODE
92 #undef END_OF_BASE_TREE_CODES
93
94 /* Names of tree components.
95 Used for printing out the tree and error messages. */
96 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
97 #define END_OF_BASE_TREE_CODES "@dummy",
98
99 static const char *const tree_code_name[] = {
100 #include "all-tree.def"
101 };
102
103 #undef DEFTREECODE
104 #undef END_OF_BASE_TREE_CODES
105
106 /* Each tree code class has an associated string representation.
107 These must correspond to the tree_code_class entries. */
108
109 const char *const tree_code_class_strings[] =
110 {
111 "exceptional",
112 "constant",
113 "type",
114 "declaration",
115 "reference",
116 "comparison",
117 "unary",
118 "binary",
119 "statement",
120 "vl_exp",
121 "expression"
122 };
123
124 /* obstack.[ch] explicitly declined to prototype this. */
125 extern int _obstack_allocated_p (struct obstack *h, void *obj);
126
127 /* Statistics-gathering stuff. */
128
129 static int tree_code_counts[MAX_TREE_CODES];
130 int tree_node_counts[(int) all_kinds];
131 int tree_node_sizes[(int) all_kinds];
132
133 /* Keep in sync with tree.h:enum tree_node_kind. */
134 static const char * const tree_node_kind_names[] = {
135 "decls",
136 "types",
137 "blocks",
138 "stmts",
139 "refs",
140 "exprs",
141 "constants",
142 "identifiers",
143 "vecs",
144 "binfos",
145 "ssa names",
146 "constructors",
147 "random kinds",
148 "lang_decl kinds",
149 "lang_type kinds",
150 "omp clauses",
151 };
152
153 /* Unique id for next decl created. */
154 static GTY(()) int next_decl_uid;
155 /* Unique id for next type created. */
156 static GTY(()) unsigned next_type_uid = 1;
157 /* Unique id for next debug decl created. Use negative numbers,
158 to catch erroneous uses. */
159 static GTY(()) int next_debug_decl_uid;
160
161 /* Since we cannot rehash a type after it is in the table, we have to
162 keep the hash code. */
163
164 struct GTY((for_user)) type_hash {
165 unsigned long hash;
166 tree type;
167 };
168
169 /* Initial size of the hash table (rounded to next prime). */
170 #define TYPE_HASH_INITIAL_SIZE 1000
171
172 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
173 {
174 static hashval_t hash (type_hash *t) { return t->hash; }
175 static bool equal (type_hash *a, type_hash *b);
176
177 static int
178 keep_cache_entry (type_hash *&t)
179 {
180 return ggc_marked_p (t->type);
181 }
182 };
183
184 /* Now here is the hash table. When recording a type, it is added to
185 the slot whose index is the hash code. Note that the hash table is
186 used for several kinds of types (function types, array types and
187 array index range types, for now). While all these live in the
188 same table, they are completely independent, and the hash code is
189 computed differently for each of these. */
190
191 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
192
193 /* Hash table and temporary node for larger integer const values. */
194 static GTY (()) tree int_cst_node;
195
196 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
197 {
198 static hashval_t hash (tree t);
199 static bool equal (tree x, tree y);
200 };
201
202 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
203
204 /* Hash table for optimization flags and target option flags. Use the same
205 hash table for both sets of options. Nodes for building the current
206 optimization and target option nodes. The assumption is most of the time
207 the options created will already be in the hash table, so we avoid
208 allocating and freeing up a node repeatably. */
209 static GTY (()) tree cl_optimization_node;
210 static GTY (()) tree cl_target_option_node;
211
212 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
213 {
214 static hashval_t hash (tree t);
215 static bool equal (tree x, tree y);
216 };
217
218 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
219
220 /* General tree->tree mapping structure for use in hash tables. */
221
222
223 static GTY ((cache))
224 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
225
226 static GTY ((cache))
227 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
228
229 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
230 {
231 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
232
233 static bool
234 equal (tree_vec_map *a, tree_vec_map *b)
235 {
236 return a->base.from == b->base.from;
237 }
238
239 static int
240 keep_cache_entry (tree_vec_map *&m)
241 {
242 return ggc_marked_p (m->base.from);
243 }
244 };
245
246 static GTY ((cache))
247 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
248
249 static void set_type_quals (tree, int);
250 static void print_type_hash_statistics (void);
251 static void print_debug_expr_statistics (void);
252 static void print_value_expr_statistics (void);
253
254 tree global_trees[TI_MAX];
255 tree integer_types[itk_none];
256
257 bool int_n_enabled_p[NUM_INT_N_ENTS];
258 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
259
260 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
261
262 /* Number of operands for each OpenMP clause. */
263 unsigned const char omp_clause_num_ops[] =
264 {
265 0, /* OMP_CLAUSE_ERROR */
266 1, /* OMP_CLAUSE_PRIVATE */
267 1, /* OMP_CLAUSE_SHARED */
268 1, /* OMP_CLAUSE_FIRSTPRIVATE */
269 2, /* OMP_CLAUSE_LASTPRIVATE */
270 5, /* OMP_CLAUSE_REDUCTION */
271 1, /* OMP_CLAUSE_COPYIN */
272 1, /* OMP_CLAUSE_COPYPRIVATE */
273 3, /* OMP_CLAUSE_LINEAR */
274 2, /* OMP_CLAUSE_ALIGNED */
275 1, /* OMP_CLAUSE_DEPEND */
276 1, /* OMP_CLAUSE_UNIFORM */
277 1, /* OMP_CLAUSE_TO_DECLARE */
278 1, /* OMP_CLAUSE_LINK */
279 2, /* OMP_CLAUSE_FROM */
280 2, /* OMP_CLAUSE_TO */
281 2, /* OMP_CLAUSE_MAP */
282 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
283 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
284 2, /* OMP_CLAUSE__CACHE_ */
285 2, /* OMP_CLAUSE_GANG */
286 1, /* OMP_CLAUSE_ASYNC */
287 1, /* OMP_CLAUSE_WAIT */
288 0, /* OMP_CLAUSE_AUTO */
289 0, /* OMP_CLAUSE_SEQ */
290 1, /* OMP_CLAUSE__LOOPTEMP_ */
291 1, /* OMP_CLAUSE_IF */
292 1, /* OMP_CLAUSE_NUM_THREADS */
293 1, /* OMP_CLAUSE_SCHEDULE */
294 0, /* OMP_CLAUSE_NOWAIT */
295 1, /* OMP_CLAUSE_ORDERED */
296 0, /* OMP_CLAUSE_DEFAULT */
297 3, /* OMP_CLAUSE_COLLAPSE */
298 0, /* OMP_CLAUSE_UNTIED */
299 1, /* OMP_CLAUSE_FINAL */
300 0, /* OMP_CLAUSE_MERGEABLE */
301 1, /* OMP_CLAUSE_DEVICE */
302 1, /* OMP_CLAUSE_DIST_SCHEDULE */
303 0, /* OMP_CLAUSE_INBRANCH */
304 0, /* OMP_CLAUSE_NOTINBRANCH */
305 1, /* OMP_CLAUSE_NUM_TEAMS */
306 1, /* OMP_CLAUSE_THREAD_LIMIT */
307 0, /* OMP_CLAUSE_PROC_BIND */
308 1, /* OMP_CLAUSE_SAFELEN */
309 1, /* OMP_CLAUSE_SIMDLEN */
310 0, /* OMP_CLAUSE_FOR */
311 0, /* OMP_CLAUSE_PARALLEL */
312 0, /* OMP_CLAUSE_SECTIONS */
313 0, /* OMP_CLAUSE_TASKGROUP */
314 1, /* OMP_CLAUSE_PRIORITY */
315 1, /* OMP_CLAUSE_GRAINSIZE */
316 1, /* OMP_CLAUSE_NUM_TASKS */
317 0, /* OMP_CLAUSE_NOGROUP */
318 0, /* OMP_CLAUSE_THREADS */
319 0, /* OMP_CLAUSE_SIMD */
320 1, /* OMP_CLAUSE_HINT */
321 0, /* OMP_CLAUSE_DEFALTMAP */
322 1, /* OMP_CLAUSE__SIMDUID_ */
323 0, /* OMP_CLAUSE__SIMT_ */
324 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
325 0, /* OMP_CLAUSE_INDEPENDENT */
326 1, /* OMP_CLAUSE_WORKER */
327 1, /* OMP_CLAUSE_VECTOR */
328 1, /* OMP_CLAUSE_NUM_GANGS */
329 1, /* OMP_CLAUSE_NUM_WORKERS */
330 1, /* OMP_CLAUSE_VECTOR_LENGTH */
331 3, /* OMP_CLAUSE_TILE */
332 2, /* OMP_CLAUSE__GRIDDIM_ */
333 };
334
335 const char * const omp_clause_code_name[] =
336 {
337 "error_clause",
338 "private",
339 "shared",
340 "firstprivate",
341 "lastprivate",
342 "reduction",
343 "copyin",
344 "copyprivate",
345 "linear",
346 "aligned",
347 "depend",
348 "uniform",
349 "to",
350 "link",
351 "from",
352 "to",
353 "map",
354 "use_device_ptr",
355 "is_device_ptr",
356 "_cache_",
357 "gang",
358 "async",
359 "wait",
360 "auto",
361 "seq",
362 "_looptemp_",
363 "if",
364 "num_threads",
365 "schedule",
366 "nowait",
367 "ordered",
368 "default",
369 "collapse",
370 "untied",
371 "final",
372 "mergeable",
373 "device",
374 "dist_schedule",
375 "inbranch",
376 "notinbranch",
377 "num_teams",
378 "thread_limit",
379 "proc_bind",
380 "safelen",
381 "simdlen",
382 "for",
383 "parallel",
384 "sections",
385 "taskgroup",
386 "priority",
387 "grainsize",
388 "num_tasks",
389 "nogroup",
390 "threads",
391 "simd",
392 "hint",
393 "defaultmap",
394 "_simduid_",
395 "_simt_",
396 "_Cilk_for_count_",
397 "independent",
398 "worker",
399 "vector",
400 "num_gangs",
401 "num_workers",
402 "vector_length",
403 "tile",
404 "_griddim_"
405 };
406
407
408 /* Return the tree node structure used by tree code CODE. */
409
410 static inline enum tree_node_structure_enum
411 tree_node_structure_for_code (enum tree_code code)
412 {
413 switch (TREE_CODE_CLASS (code))
414 {
415 case tcc_declaration:
416 {
417 switch (code)
418 {
419 case FIELD_DECL:
420 return TS_FIELD_DECL;
421 case PARM_DECL:
422 return TS_PARM_DECL;
423 case VAR_DECL:
424 return TS_VAR_DECL;
425 case LABEL_DECL:
426 return TS_LABEL_DECL;
427 case RESULT_DECL:
428 return TS_RESULT_DECL;
429 case DEBUG_EXPR_DECL:
430 return TS_DECL_WRTL;
431 case CONST_DECL:
432 return TS_CONST_DECL;
433 case TYPE_DECL:
434 return TS_TYPE_DECL;
435 case FUNCTION_DECL:
436 return TS_FUNCTION_DECL;
437 case TRANSLATION_UNIT_DECL:
438 return TS_TRANSLATION_UNIT_DECL;
439 default:
440 return TS_DECL_NON_COMMON;
441 }
442 }
443 case tcc_type:
444 return TS_TYPE_NON_COMMON;
445 case tcc_reference:
446 case tcc_comparison:
447 case tcc_unary:
448 case tcc_binary:
449 case tcc_expression:
450 case tcc_statement:
451 case tcc_vl_exp:
452 return TS_EXP;
453 default: /* tcc_constant and tcc_exceptional */
454 break;
455 }
456 switch (code)
457 {
458 /* tcc_constant cases. */
459 case VOID_CST: return TS_TYPED;
460 case INTEGER_CST: return TS_INT_CST;
461 case REAL_CST: return TS_REAL_CST;
462 case FIXED_CST: return TS_FIXED_CST;
463 case COMPLEX_CST: return TS_COMPLEX;
464 case VECTOR_CST: return TS_VECTOR;
465 case STRING_CST: return TS_STRING;
466 /* tcc_exceptional cases. */
467 case ERROR_MARK: return TS_COMMON;
468 case IDENTIFIER_NODE: return TS_IDENTIFIER;
469 case TREE_LIST: return TS_LIST;
470 case TREE_VEC: return TS_VEC;
471 case SSA_NAME: return TS_SSA_NAME;
472 case PLACEHOLDER_EXPR: return TS_COMMON;
473 case STATEMENT_LIST: return TS_STATEMENT_LIST;
474 case BLOCK: return TS_BLOCK;
475 case CONSTRUCTOR: return TS_CONSTRUCTOR;
476 case TREE_BINFO: return TS_BINFO;
477 case OMP_CLAUSE: return TS_OMP_CLAUSE;
478 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
479 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
480
481 default:
482 gcc_unreachable ();
483 }
484 }
485
486
487 /* Initialize tree_contains_struct to describe the hierarchy of tree
488 nodes. */
489
490 static void
491 initialize_tree_contains_struct (void)
492 {
493 unsigned i;
494
495 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
496 {
497 enum tree_code code;
498 enum tree_node_structure_enum ts_code;
499
500 code = (enum tree_code) i;
501 ts_code = tree_node_structure_for_code (code);
502
503 /* Mark the TS structure itself. */
504 tree_contains_struct[code][ts_code] = 1;
505
506 /* Mark all the structures that TS is derived from. */
507 switch (ts_code)
508 {
509 case TS_TYPED:
510 case TS_BLOCK:
511 case TS_OPTIMIZATION:
512 case TS_TARGET_OPTION:
513 MARK_TS_BASE (code);
514 break;
515
516 case TS_COMMON:
517 case TS_INT_CST:
518 case TS_REAL_CST:
519 case TS_FIXED_CST:
520 case TS_VECTOR:
521 case TS_STRING:
522 case TS_COMPLEX:
523 case TS_SSA_NAME:
524 case TS_CONSTRUCTOR:
525 case TS_EXP:
526 case TS_STATEMENT_LIST:
527 MARK_TS_TYPED (code);
528 break;
529
530 case TS_IDENTIFIER:
531 case TS_DECL_MINIMAL:
532 case TS_TYPE_COMMON:
533 case TS_LIST:
534 case TS_VEC:
535 case TS_BINFO:
536 case TS_OMP_CLAUSE:
537 MARK_TS_COMMON (code);
538 break;
539
540 case TS_TYPE_WITH_LANG_SPECIFIC:
541 MARK_TS_TYPE_COMMON (code);
542 break;
543
544 case TS_TYPE_NON_COMMON:
545 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
546 break;
547
548 case TS_DECL_COMMON:
549 MARK_TS_DECL_MINIMAL (code);
550 break;
551
552 case TS_DECL_WRTL:
553 case TS_CONST_DECL:
554 MARK_TS_DECL_COMMON (code);
555 break;
556
557 case TS_DECL_NON_COMMON:
558 MARK_TS_DECL_WITH_VIS (code);
559 break;
560
561 case TS_DECL_WITH_VIS:
562 case TS_PARM_DECL:
563 case TS_LABEL_DECL:
564 case TS_RESULT_DECL:
565 MARK_TS_DECL_WRTL (code);
566 break;
567
568 case TS_FIELD_DECL:
569 MARK_TS_DECL_COMMON (code);
570 break;
571
572 case TS_VAR_DECL:
573 MARK_TS_DECL_WITH_VIS (code);
574 break;
575
576 case TS_TYPE_DECL:
577 case TS_FUNCTION_DECL:
578 MARK_TS_DECL_NON_COMMON (code);
579 break;
580
581 case TS_TRANSLATION_UNIT_DECL:
582 MARK_TS_DECL_COMMON (code);
583 break;
584
585 default:
586 gcc_unreachable ();
587 }
588 }
589
590 /* Basic consistency checks for attributes used in fold. */
591 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
592 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
593 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
594 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
595 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
596 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
597 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
598 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
599 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
600 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
601 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
602 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
603 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
604 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
605 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
606 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
607 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
608 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
609 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
610 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
611 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
612 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
613 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
614 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
615 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
616 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
617 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
618 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
619 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
620 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
621 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
622 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
623 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
624 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
625 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
626 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
627 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
628 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
629 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
630 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
631 }
632
633
634 /* Init tree.c. */
635
636 void
637 init_ttree (void)
638 {
639 /* Initialize the hash table of types. */
640 type_hash_table
641 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
642
643 debug_expr_for_decl
644 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
645
646 value_expr_for_decl
647 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
648
649 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
650
651 int_cst_node = make_int_cst (1, 1);
652
653 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
654
655 cl_optimization_node = make_node (OPTIMIZATION_NODE);
656 cl_target_option_node = make_node (TARGET_OPTION_NODE);
657
658 /* Initialize the tree_contains_struct array. */
659 initialize_tree_contains_struct ();
660 lang_hooks.init_ts ();
661 }
662
663 \f
664 /* The name of the object as the assembler will see it (but before any
665 translations made by ASM_OUTPUT_LABELREF). Often this is the same
666 as DECL_NAME. It is an IDENTIFIER_NODE. */
667 tree
668 decl_assembler_name (tree decl)
669 {
670 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
671 lang_hooks.set_decl_assembler_name (decl);
672 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
673 }
674
675 /* When the target supports COMDAT groups, this indicates which group the
676 DECL is associated with. This can be either an IDENTIFIER_NODE or a
677 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
678 tree
679 decl_comdat_group (const_tree node)
680 {
681 struct symtab_node *snode = symtab_node::get (node);
682 if (!snode)
683 return NULL;
684 return snode->get_comdat_group ();
685 }
686
687 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
688 tree
689 decl_comdat_group_id (const_tree node)
690 {
691 struct symtab_node *snode = symtab_node::get (node);
692 if (!snode)
693 return NULL;
694 return snode->get_comdat_group_id ();
695 }
696
697 /* When the target supports named section, return its name as IDENTIFIER_NODE
698 or NULL if it is in no section. */
699 const char *
700 decl_section_name (const_tree node)
701 {
702 struct symtab_node *snode = symtab_node::get (node);
703 if (!snode)
704 return NULL;
705 return snode->get_section ();
706 }
707
708 /* Set section name of NODE to VALUE (that is expected to be
709 identifier node) */
710 void
711 set_decl_section_name (tree node, const char *value)
712 {
713 struct symtab_node *snode;
714
715 if (value == NULL)
716 {
717 snode = symtab_node::get (node);
718 if (!snode)
719 return;
720 }
721 else if (VAR_P (node))
722 snode = varpool_node::get_create (node);
723 else
724 snode = cgraph_node::get_create (node);
725 snode->set_section (value);
726 }
727
728 /* Return TLS model of a variable NODE. */
729 enum tls_model
730 decl_tls_model (const_tree node)
731 {
732 struct varpool_node *snode = varpool_node::get (node);
733 if (!snode)
734 return TLS_MODEL_NONE;
735 return snode->tls_model;
736 }
737
738 /* Set TLS model of variable NODE to MODEL. */
739 void
740 set_decl_tls_model (tree node, enum tls_model model)
741 {
742 struct varpool_node *vnode;
743
744 if (model == TLS_MODEL_NONE)
745 {
746 vnode = varpool_node::get (node);
747 if (!vnode)
748 return;
749 }
750 else
751 vnode = varpool_node::get_create (node);
752 vnode->tls_model = model;
753 }
754
755 /* Compute the number of bytes occupied by a tree with code CODE.
756 This function cannot be used for nodes that have variable sizes,
757 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
758 size_t
759 tree_code_size (enum tree_code code)
760 {
761 switch (TREE_CODE_CLASS (code))
762 {
763 case tcc_declaration: /* A decl node */
764 {
765 switch (code)
766 {
767 case FIELD_DECL:
768 return sizeof (struct tree_field_decl);
769 case PARM_DECL:
770 return sizeof (struct tree_parm_decl);
771 case VAR_DECL:
772 return sizeof (struct tree_var_decl);
773 case LABEL_DECL:
774 return sizeof (struct tree_label_decl);
775 case RESULT_DECL:
776 return sizeof (struct tree_result_decl);
777 case CONST_DECL:
778 return sizeof (struct tree_const_decl);
779 case TYPE_DECL:
780 return sizeof (struct tree_type_decl);
781 case FUNCTION_DECL:
782 return sizeof (struct tree_function_decl);
783 case DEBUG_EXPR_DECL:
784 return sizeof (struct tree_decl_with_rtl);
785 case TRANSLATION_UNIT_DECL:
786 return sizeof (struct tree_translation_unit_decl);
787 case NAMESPACE_DECL:
788 case IMPORTED_DECL:
789 case NAMELIST_DECL:
790 return sizeof (struct tree_decl_non_common);
791 default:
792 return lang_hooks.tree_size (code);
793 }
794 }
795
796 case tcc_type: /* a type node */
797 return sizeof (struct tree_type_non_common);
798
799 case tcc_reference: /* a reference */
800 case tcc_expression: /* an expression */
801 case tcc_statement: /* an expression with side effects */
802 case tcc_comparison: /* a comparison expression */
803 case tcc_unary: /* a unary arithmetic expression */
804 case tcc_binary: /* a binary arithmetic expression */
805 return (sizeof (struct tree_exp)
806 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
807
808 case tcc_constant: /* a constant */
809 switch (code)
810 {
811 case VOID_CST: return sizeof (struct tree_typed);
812 case INTEGER_CST: gcc_unreachable ();
813 case REAL_CST: return sizeof (struct tree_real_cst);
814 case FIXED_CST: return sizeof (struct tree_fixed_cst);
815 case COMPLEX_CST: return sizeof (struct tree_complex);
816 case VECTOR_CST: return sizeof (struct tree_vector);
817 case STRING_CST: gcc_unreachable ();
818 default:
819 return lang_hooks.tree_size (code);
820 }
821
822 case tcc_exceptional: /* something random, like an identifier. */
823 switch (code)
824 {
825 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
826 case TREE_LIST: return sizeof (struct tree_list);
827
828 case ERROR_MARK:
829 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
830
831 case TREE_VEC:
832 case OMP_CLAUSE: gcc_unreachable ();
833
834 case SSA_NAME: return sizeof (struct tree_ssa_name);
835
836 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
837 case BLOCK: return sizeof (struct tree_block);
838 case CONSTRUCTOR: return sizeof (struct tree_constructor);
839 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
840 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
841
842 default:
843 return lang_hooks.tree_size (code);
844 }
845
846 default:
847 gcc_unreachable ();
848 }
849 }
850
851 /* Compute the number of bytes occupied by NODE. This routine only
852 looks at TREE_CODE, except for those nodes that have variable sizes. */
853 size_t
854 tree_size (const_tree node)
855 {
856 const enum tree_code code = TREE_CODE (node);
857 switch (code)
858 {
859 case INTEGER_CST:
860 return (sizeof (struct tree_int_cst)
861 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
862
863 case TREE_BINFO:
864 return (offsetof (struct tree_binfo, base_binfos)
865 + vec<tree, va_gc>
866 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
867
868 case TREE_VEC:
869 return (sizeof (struct tree_vec)
870 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
871
872 case VECTOR_CST:
873 return (sizeof (struct tree_vector)
874 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
875
876 case STRING_CST:
877 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
878
879 case OMP_CLAUSE:
880 return (sizeof (struct tree_omp_clause)
881 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
882 * sizeof (tree));
883
884 default:
885 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
886 return (sizeof (struct tree_exp)
887 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
888 else
889 return tree_code_size (code);
890 }
891 }
892
893 /* Record interesting allocation statistics for a tree node with CODE
894 and LENGTH. */
895
896 static void
897 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
898 size_t length ATTRIBUTE_UNUSED)
899 {
900 enum tree_code_class type = TREE_CODE_CLASS (code);
901 tree_node_kind kind;
902
903 if (!GATHER_STATISTICS)
904 return;
905
906 switch (type)
907 {
908 case tcc_declaration: /* A decl node */
909 kind = d_kind;
910 break;
911
912 case tcc_type: /* a type node */
913 kind = t_kind;
914 break;
915
916 case tcc_statement: /* an expression with side effects */
917 kind = s_kind;
918 break;
919
920 case tcc_reference: /* a reference */
921 kind = r_kind;
922 break;
923
924 case tcc_expression: /* an expression */
925 case tcc_comparison: /* a comparison expression */
926 case tcc_unary: /* a unary arithmetic expression */
927 case tcc_binary: /* a binary arithmetic expression */
928 kind = e_kind;
929 break;
930
931 case tcc_constant: /* a constant */
932 kind = c_kind;
933 break;
934
935 case tcc_exceptional: /* something random, like an identifier. */
936 switch (code)
937 {
938 case IDENTIFIER_NODE:
939 kind = id_kind;
940 break;
941
942 case TREE_VEC:
943 kind = vec_kind;
944 break;
945
946 case TREE_BINFO:
947 kind = binfo_kind;
948 break;
949
950 case SSA_NAME:
951 kind = ssa_name_kind;
952 break;
953
954 case BLOCK:
955 kind = b_kind;
956 break;
957
958 case CONSTRUCTOR:
959 kind = constr_kind;
960 break;
961
962 case OMP_CLAUSE:
963 kind = omp_clause_kind;
964 break;
965
966 default:
967 kind = x_kind;
968 break;
969 }
970 break;
971
972 case tcc_vl_exp:
973 kind = e_kind;
974 break;
975
976 default:
977 gcc_unreachable ();
978 }
979
980 tree_code_counts[(int) code]++;
981 tree_node_counts[(int) kind]++;
982 tree_node_sizes[(int) kind] += length;
983 }
984
985 /* Allocate and return a new UID from the DECL_UID namespace. */
986
987 int
988 allocate_decl_uid (void)
989 {
990 return next_decl_uid++;
991 }
992
993 /* Return a newly allocated node of code CODE. For decl and type
994 nodes, some other fields are initialized. The rest of the node is
995 initialized to zero. This function cannot be used for TREE_VEC,
996 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
997 tree_code_size.
998
999 Achoo! I got a code in the node. */
1000
1001 tree
1002 make_node (enum tree_code code MEM_STAT_DECL)
1003 {
1004 tree t;
1005 enum tree_code_class type = TREE_CODE_CLASS (code);
1006 size_t length = tree_code_size (code);
1007
1008 record_node_allocation_statistics (code, length);
1009
1010 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1011 TREE_SET_CODE (t, code);
1012
1013 switch (type)
1014 {
1015 case tcc_statement:
1016 TREE_SIDE_EFFECTS (t) = 1;
1017 break;
1018
1019 case tcc_declaration:
1020 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1021 {
1022 if (code == FUNCTION_DECL)
1023 {
1024 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1025 SET_DECL_MODE (t, FUNCTION_MODE);
1026 }
1027 else
1028 SET_DECL_ALIGN (t, 1);
1029 }
1030 DECL_SOURCE_LOCATION (t) = input_location;
1031 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1032 DECL_UID (t) = --next_debug_decl_uid;
1033 else
1034 {
1035 DECL_UID (t) = allocate_decl_uid ();
1036 SET_DECL_PT_UID (t, -1);
1037 }
1038 if (TREE_CODE (t) == LABEL_DECL)
1039 LABEL_DECL_UID (t) = -1;
1040
1041 break;
1042
1043 case tcc_type:
1044 TYPE_UID (t) = next_type_uid++;
1045 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1046 TYPE_USER_ALIGN (t) = 0;
1047 TYPE_MAIN_VARIANT (t) = t;
1048 TYPE_CANONICAL (t) = t;
1049
1050 /* Default to no attributes for type, but let target change that. */
1051 TYPE_ATTRIBUTES (t) = NULL_TREE;
1052 targetm.set_default_type_attributes (t);
1053
1054 /* We have not yet computed the alias set for this type. */
1055 TYPE_ALIAS_SET (t) = -1;
1056 break;
1057
1058 case tcc_constant:
1059 TREE_CONSTANT (t) = 1;
1060 break;
1061
1062 case tcc_expression:
1063 switch (code)
1064 {
1065 case INIT_EXPR:
1066 case MODIFY_EXPR:
1067 case VA_ARG_EXPR:
1068 case PREDECREMENT_EXPR:
1069 case PREINCREMENT_EXPR:
1070 case POSTDECREMENT_EXPR:
1071 case POSTINCREMENT_EXPR:
1072 /* All of these have side-effects, no matter what their
1073 operands are. */
1074 TREE_SIDE_EFFECTS (t) = 1;
1075 break;
1076
1077 default:
1078 break;
1079 }
1080 break;
1081
1082 case tcc_exceptional:
1083 switch (code)
1084 {
1085 case TARGET_OPTION_NODE:
1086 TREE_TARGET_OPTION(t)
1087 = ggc_cleared_alloc<struct cl_target_option> ();
1088 break;
1089
1090 case OPTIMIZATION_NODE:
1091 TREE_OPTIMIZATION (t)
1092 = ggc_cleared_alloc<struct cl_optimization> ();
1093 break;
1094
1095 default:
1096 break;
1097 }
1098 break;
1099
1100 default:
1101 /* Other classes need no special treatment. */
1102 break;
1103 }
1104
1105 return t;
1106 }
1107
1108 /* Free tree node. */
1109
1110 void
1111 free_node (tree node)
1112 {
1113 enum tree_code code = TREE_CODE (node);
1114 if (GATHER_STATISTICS)
1115 {
1116 tree_code_counts[(int) TREE_CODE (node)]--;
1117 tree_node_counts[(int) t_kind]--;
1118 tree_node_sizes[(int) t_kind] -= tree_size (node);
1119 }
1120 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1121 vec_free (CONSTRUCTOR_ELTS (node));
1122 else if (code == BLOCK)
1123 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1124 else if (code == TREE_BINFO)
1125 vec_free (BINFO_BASE_ACCESSES (node));
1126 ggc_free (node);
1127 }
1128 \f
1129 /* Return a new node with the same contents as NODE except that its
1130 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1131
1132 tree
1133 copy_node (tree node MEM_STAT_DECL)
1134 {
1135 tree t;
1136 enum tree_code code = TREE_CODE (node);
1137 size_t length;
1138
1139 gcc_assert (code != STATEMENT_LIST);
1140
1141 length = tree_size (node);
1142 record_node_allocation_statistics (code, length);
1143 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1144 memcpy (t, node, length);
1145
1146 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1147 TREE_CHAIN (t) = 0;
1148 TREE_ASM_WRITTEN (t) = 0;
1149 TREE_VISITED (t) = 0;
1150
1151 if (TREE_CODE_CLASS (code) == tcc_declaration)
1152 {
1153 if (code == DEBUG_EXPR_DECL)
1154 DECL_UID (t) = --next_debug_decl_uid;
1155 else
1156 {
1157 DECL_UID (t) = allocate_decl_uid ();
1158 if (DECL_PT_UID_SET_P (node))
1159 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1160 }
1161 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1162 && DECL_HAS_VALUE_EXPR_P (node))
1163 {
1164 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1165 DECL_HAS_VALUE_EXPR_P (t) = 1;
1166 }
1167 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1168 if (VAR_P (node))
1169 {
1170 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1171 t->decl_with_vis.symtab_node = NULL;
1172 }
1173 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1174 {
1175 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1176 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1177 }
1178 if (TREE_CODE (node) == FUNCTION_DECL)
1179 {
1180 DECL_STRUCT_FUNCTION (t) = NULL;
1181 t->decl_with_vis.symtab_node = NULL;
1182 }
1183 }
1184 else if (TREE_CODE_CLASS (code) == tcc_type)
1185 {
1186 TYPE_UID (t) = next_type_uid++;
1187 /* The following is so that the debug code for
1188 the copy is different from the original type.
1189 The two statements usually duplicate each other
1190 (because they clear fields of the same union),
1191 but the optimizer should catch that. */
1192 TYPE_SYMTAB_POINTER (t) = 0;
1193 TYPE_SYMTAB_ADDRESS (t) = 0;
1194
1195 /* Do not copy the values cache. */
1196 if (TYPE_CACHED_VALUES_P (t))
1197 {
1198 TYPE_CACHED_VALUES_P (t) = 0;
1199 TYPE_CACHED_VALUES (t) = NULL_TREE;
1200 }
1201 }
1202 else if (code == TARGET_OPTION_NODE)
1203 {
1204 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1205 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1206 sizeof (struct cl_target_option));
1207 }
1208 else if (code == OPTIMIZATION_NODE)
1209 {
1210 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1211 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1212 sizeof (struct cl_optimization));
1213 }
1214
1215 return t;
1216 }
1217
1218 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1219 For example, this can copy a list made of TREE_LIST nodes. */
1220
1221 tree
1222 copy_list (tree list)
1223 {
1224 tree head;
1225 tree prev, next;
1226
1227 if (list == 0)
1228 return 0;
1229
1230 head = prev = copy_node (list);
1231 next = TREE_CHAIN (list);
1232 while (next)
1233 {
1234 TREE_CHAIN (prev) = copy_node (next);
1235 prev = TREE_CHAIN (prev);
1236 next = TREE_CHAIN (next);
1237 }
1238 return head;
1239 }
1240
1241 \f
1242 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1243 INTEGER_CST with value CST and type TYPE. */
1244
1245 static unsigned int
1246 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1247 {
1248 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1249 /* We need extra HWIs if CST is an unsigned integer with its
1250 upper bit set. */
1251 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1252 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1253 return cst.get_len ();
1254 }
1255
1256 /* Return a new INTEGER_CST with value CST and type TYPE. */
1257
1258 static tree
1259 build_new_int_cst (tree type, const wide_int &cst)
1260 {
1261 unsigned int len = cst.get_len ();
1262 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1263 tree nt = make_int_cst (len, ext_len);
1264
1265 if (len < ext_len)
1266 {
1267 --ext_len;
1268 TREE_INT_CST_ELT (nt, ext_len)
1269 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1270 for (unsigned int i = len; i < ext_len; ++i)
1271 TREE_INT_CST_ELT (nt, i) = -1;
1272 }
1273 else if (TYPE_UNSIGNED (type)
1274 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1275 {
1276 len--;
1277 TREE_INT_CST_ELT (nt, len)
1278 = zext_hwi (cst.elt (len),
1279 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1280 }
1281
1282 for (unsigned int i = 0; i < len; i++)
1283 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1284 TREE_TYPE (nt) = type;
1285 return nt;
1286 }
1287
1288 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1289
1290 tree
1291 build_int_cst (tree type, HOST_WIDE_INT low)
1292 {
1293 /* Support legacy code. */
1294 if (!type)
1295 type = integer_type_node;
1296
1297 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1298 }
1299
1300 tree
1301 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1302 {
1303 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1304 }
1305
1306 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1307
1308 tree
1309 build_int_cst_type (tree type, HOST_WIDE_INT low)
1310 {
1311 gcc_assert (type);
1312 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1313 }
1314
1315 /* Constructs tree in type TYPE from with value given by CST. Signedness
1316 of CST is assumed to be the same as the signedness of TYPE. */
1317
1318 tree
1319 double_int_to_tree (tree type, double_int cst)
1320 {
1321 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1322 }
1323
1324 /* We force the wide_int CST to the range of the type TYPE by sign or
1325 zero extending it. OVERFLOWABLE indicates if we are interested in
1326 overflow of the value, when >0 we are only interested in signed
1327 overflow, for <0 we are interested in any overflow. OVERFLOWED
1328 indicates whether overflow has already occurred. CONST_OVERFLOWED
1329 indicates whether constant overflow has already occurred. We force
1330 T's value to be within range of T's type (by setting to 0 or 1 all
1331 the bits outside the type's range). We set TREE_OVERFLOWED if,
1332 OVERFLOWED is nonzero,
1333 or OVERFLOWABLE is >0 and signed overflow occurs
1334 or OVERFLOWABLE is <0 and any overflow occurs
1335 We return a new tree node for the extended wide_int. The node
1336 is shared if no overflow flags are set. */
1337
1338
1339 tree
1340 force_fit_type (tree type, const wide_int_ref &cst,
1341 int overflowable, bool overflowed)
1342 {
1343 signop sign = TYPE_SIGN (type);
1344
1345 /* If we need to set overflow flags, return a new unshared node. */
1346 if (overflowed || !wi::fits_to_tree_p (cst, type))
1347 {
1348 if (overflowed
1349 || overflowable < 0
1350 || (overflowable > 0 && sign == SIGNED))
1351 {
1352 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1353 tree t = build_new_int_cst (type, tmp);
1354 TREE_OVERFLOW (t) = 1;
1355 return t;
1356 }
1357 }
1358
1359 /* Else build a shared node. */
1360 return wide_int_to_tree (type, cst);
1361 }
1362
1363 /* These are the hash table functions for the hash table of INTEGER_CST
1364 nodes of a sizetype. */
1365
1366 /* Return the hash code X, an INTEGER_CST. */
1367
1368 hashval_t
1369 int_cst_hasher::hash (tree x)
1370 {
1371 const_tree const t = x;
1372 hashval_t code = TYPE_UID (TREE_TYPE (t));
1373 int i;
1374
1375 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1376 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1377
1378 return code;
1379 }
1380
1381 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1382 is the same as that given by *Y, which is the same. */
1383
1384 bool
1385 int_cst_hasher::equal (tree x, tree y)
1386 {
1387 const_tree const xt = x;
1388 const_tree const yt = y;
1389
1390 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1391 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1392 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1393 return false;
1394
1395 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1396 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1397 return false;
1398
1399 return true;
1400 }
1401
1402 /* Create an INT_CST node of TYPE and value CST.
1403 The returned node is always shared. For small integers we use a
1404 per-type vector cache, for larger ones we use a single hash table.
1405 The value is extended from its precision according to the sign of
1406 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1407 the upper bits and ensures that hashing and value equality based
1408 upon the underlying HOST_WIDE_INTs works without masking. */
1409
1410 tree
1411 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1412 {
1413 tree t;
1414 int ix = -1;
1415 int limit = 0;
1416
1417 gcc_assert (type);
1418 unsigned int prec = TYPE_PRECISION (type);
1419 signop sgn = TYPE_SIGN (type);
1420
1421 /* Verify that everything is canonical. */
1422 int l = pcst.get_len ();
1423 if (l > 1)
1424 {
1425 if (pcst.elt (l - 1) == 0)
1426 gcc_checking_assert (pcst.elt (l - 2) < 0);
1427 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1428 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1429 }
1430
1431 wide_int cst = wide_int::from (pcst, prec, sgn);
1432 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1433
1434 if (ext_len == 1)
1435 {
1436 /* We just need to store a single HOST_WIDE_INT. */
1437 HOST_WIDE_INT hwi;
1438 if (TYPE_UNSIGNED (type))
1439 hwi = cst.to_uhwi ();
1440 else
1441 hwi = cst.to_shwi ();
1442
1443 switch (TREE_CODE (type))
1444 {
1445 case NULLPTR_TYPE:
1446 gcc_assert (hwi == 0);
1447 /* Fallthru. */
1448
1449 case POINTER_TYPE:
1450 case REFERENCE_TYPE:
1451 case POINTER_BOUNDS_TYPE:
1452 /* Cache NULL pointer and zero bounds. */
1453 if (hwi == 0)
1454 {
1455 limit = 1;
1456 ix = 0;
1457 }
1458 break;
1459
1460 case BOOLEAN_TYPE:
1461 /* Cache false or true. */
1462 limit = 2;
1463 if (IN_RANGE (hwi, 0, 1))
1464 ix = hwi;
1465 break;
1466
1467 case INTEGER_TYPE:
1468 case OFFSET_TYPE:
1469 if (TYPE_SIGN (type) == UNSIGNED)
1470 {
1471 /* Cache [0, N). */
1472 limit = INTEGER_SHARE_LIMIT;
1473 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1474 ix = hwi;
1475 }
1476 else
1477 {
1478 /* Cache [-1, N). */
1479 limit = INTEGER_SHARE_LIMIT + 1;
1480 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1481 ix = hwi + 1;
1482 }
1483 break;
1484
1485 case ENUMERAL_TYPE:
1486 break;
1487
1488 default:
1489 gcc_unreachable ();
1490 }
1491
1492 if (ix >= 0)
1493 {
1494 /* Look for it in the type's vector of small shared ints. */
1495 if (!TYPE_CACHED_VALUES_P (type))
1496 {
1497 TYPE_CACHED_VALUES_P (type) = 1;
1498 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1499 }
1500
1501 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1502 if (t)
1503 /* Make sure no one is clobbering the shared constant. */
1504 gcc_checking_assert (TREE_TYPE (t) == type
1505 && TREE_INT_CST_NUNITS (t) == 1
1506 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1507 && TREE_INT_CST_EXT_NUNITS (t) == 1
1508 && TREE_INT_CST_ELT (t, 0) == hwi);
1509 else
1510 {
1511 /* Create a new shared int. */
1512 t = build_new_int_cst (type, cst);
1513 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1514 }
1515 }
1516 else
1517 {
1518 /* Use the cache of larger shared ints, using int_cst_node as
1519 a temporary. */
1520
1521 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1522 TREE_TYPE (int_cst_node) = type;
1523
1524 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1525 t = *slot;
1526 if (!t)
1527 {
1528 /* Insert this one into the hash table. */
1529 t = int_cst_node;
1530 *slot = t;
1531 /* Make a new node for next time round. */
1532 int_cst_node = make_int_cst (1, 1);
1533 }
1534 }
1535 }
1536 else
1537 {
1538 /* The value either hashes properly or we drop it on the floor
1539 for the gc to take care of. There will not be enough of them
1540 to worry about. */
1541
1542 tree nt = build_new_int_cst (type, cst);
1543 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1544 t = *slot;
1545 if (!t)
1546 {
1547 /* Insert this one into the hash table. */
1548 t = nt;
1549 *slot = t;
1550 }
1551 }
1552
1553 return t;
1554 }
1555
1556 void
1557 cache_integer_cst (tree t)
1558 {
1559 tree type = TREE_TYPE (t);
1560 int ix = -1;
1561 int limit = 0;
1562 int prec = TYPE_PRECISION (type);
1563
1564 gcc_assert (!TREE_OVERFLOW (t));
1565
1566 switch (TREE_CODE (type))
1567 {
1568 case NULLPTR_TYPE:
1569 gcc_assert (integer_zerop (t));
1570 /* Fallthru. */
1571
1572 case POINTER_TYPE:
1573 case REFERENCE_TYPE:
1574 /* Cache NULL pointer. */
1575 if (integer_zerop (t))
1576 {
1577 limit = 1;
1578 ix = 0;
1579 }
1580 break;
1581
1582 case BOOLEAN_TYPE:
1583 /* Cache false or true. */
1584 limit = 2;
1585 if (wi::ltu_p (t, 2))
1586 ix = TREE_INT_CST_ELT (t, 0);
1587 break;
1588
1589 case INTEGER_TYPE:
1590 case OFFSET_TYPE:
1591 if (TYPE_UNSIGNED (type))
1592 {
1593 /* Cache 0..N */
1594 limit = INTEGER_SHARE_LIMIT;
1595
1596 /* This is a little hokie, but if the prec is smaller than
1597 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1598 obvious test will not get the correct answer. */
1599 if (prec < HOST_BITS_PER_WIDE_INT)
1600 {
1601 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1602 ix = tree_to_uhwi (t);
1603 }
1604 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1605 ix = tree_to_uhwi (t);
1606 }
1607 else
1608 {
1609 /* Cache -1..N */
1610 limit = INTEGER_SHARE_LIMIT + 1;
1611
1612 if (integer_minus_onep (t))
1613 ix = 0;
1614 else if (!wi::neg_p (t))
1615 {
1616 if (prec < HOST_BITS_PER_WIDE_INT)
1617 {
1618 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1619 ix = tree_to_shwi (t) + 1;
1620 }
1621 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1622 ix = tree_to_shwi (t) + 1;
1623 }
1624 }
1625 break;
1626
1627 case ENUMERAL_TYPE:
1628 break;
1629
1630 default:
1631 gcc_unreachable ();
1632 }
1633
1634 if (ix >= 0)
1635 {
1636 /* Look for it in the type's vector of small shared ints. */
1637 if (!TYPE_CACHED_VALUES_P (type))
1638 {
1639 TYPE_CACHED_VALUES_P (type) = 1;
1640 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1641 }
1642
1643 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1644 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1645 }
1646 else
1647 {
1648 /* Use the cache of larger shared ints. */
1649 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1650 /* If there is already an entry for the number verify it's the
1651 same. */
1652 if (*slot)
1653 gcc_assert (wi::eq_p (tree (*slot), t));
1654 else
1655 /* Otherwise insert this one into the hash table. */
1656 *slot = t;
1657 }
1658 }
1659
1660
1661 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1662 and the rest are zeros. */
1663
1664 tree
1665 build_low_bits_mask (tree type, unsigned bits)
1666 {
1667 gcc_assert (bits <= TYPE_PRECISION (type));
1668
1669 return wide_int_to_tree (type, wi::mask (bits, false,
1670 TYPE_PRECISION (type)));
1671 }
1672
1673 /* Checks that X is integer constant that can be expressed in (unsigned)
1674 HOST_WIDE_INT without loss of precision. */
1675
1676 bool
1677 cst_and_fits_in_hwi (const_tree x)
1678 {
1679 return (TREE_CODE (x) == INTEGER_CST
1680 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1681 }
1682
1683 /* Build a newly constructed VECTOR_CST node of length LEN. */
1684
1685 tree
1686 make_vector (unsigned len MEM_STAT_DECL)
1687 {
1688 tree t;
1689 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1690
1691 record_node_allocation_statistics (VECTOR_CST, length);
1692
1693 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1694
1695 TREE_SET_CODE (t, VECTOR_CST);
1696 TREE_CONSTANT (t) = 1;
1697
1698 return t;
1699 }
1700
1701 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1702 are in a list pointed to by VALS. */
1703
1704 tree
1705 build_vector (tree type, tree *vals MEM_STAT_DECL)
1706 {
1707 int over = 0;
1708 unsigned cnt = 0;
1709 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1710 TREE_TYPE (v) = type;
1711
1712 /* Iterate through elements and check for overflow. */
1713 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1714 {
1715 tree value = vals[cnt];
1716
1717 VECTOR_CST_ELT (v, cnt) = value;
1718
1719 /* Don't crash if we get an address constant. */
1720 if (!CONSTANT_CLASS_P (value))
1721 continue;
1722
1723 over |= TREE_OVERFLOW (value);
1724 }
1725
1726 TREE_OVERFLOW (v) = over;
1727 return v;
1728 }
1729
1730 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1731 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1732
1733 tree
1734 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1735 {
1736 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1737 unsigned HOST_WIDE_INT idx, pos = 0;
1738 tree value;
1739
1740 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1741 {
1742 if (TREE_CODE (value) == VECTOR_CST)
1743 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1744 vec[pos++] = VECTOR_CST_ELT (value, i);
1745 else
1746 vec[pos++] = value;
1747 }
1748 while (pos < TYPE_VECTOR_SUBPARTS (type))
1749 vec[pos++] = build_zero_cst (TREE_TYPE (type));
1750
1751 return build_vector (type, vec);
1752 }
1753
1754 /* Build a vector of type VECTYPE where all the elements are SCs. */
1755 tree
1756 build_vector_from_val (tree vectype, tree sc)
1757 {
1758 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1759
1760 if (sc == error_mark_node)
1761 return sc;
1762
1763 /* Verify that the vector type is suitable for SC. Note that there
1764 is some inconsistency in the type-system with respect to restrict
1765 qualifications of pointers. Vector types always have a main-variant
1766 element type and the qualification is applied to the vector-type.
1767 So TREE_TYPE (vector-type) does not return a properly qualified
1768 vector element-type. */
1769 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1770 TREE_TYPE (vectype)));
1771
1772 if (CONSTANT_CLASS_P (sc))
1773 {
1774 tree *v = XALLOCAVEC (tree, nunits);
1775 for (i = 0; i < nunits; ++i)
1776 v[i] = sc;
1777 return build_vector (vectype, v);
1778 }
1779 else
1780 {
1781 vec<constructor_elt, va_gc> *v;
1782 vec_alloc (v, nunits);
1783 for (i = 0; i < nunits; ++i)
1784 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1785 return build_constructor (vectype, v);
1786 }
1787 }
1788
1789 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1790 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1791
1792 void
1793 recompute_constructor_flags (tree c)
1794 {
1795 unsigned int i;
1796 tree val;
1797 bool constant_p = true;
1798 bool side_effects_p = false;
1799 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1800
1801 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1802 {
1803 /* Mostly ctors will have elts that don't have side-effects, so
1804 the usual case is to scan all the elements. Hence a single
1805 loop for both const and side effects, rather than one loop
1806 each (with early outs). */
1807 if (!TREE_CONSTANT (val))
1808 constant_p = false;
1809 if (TREE_SIDE_EFFECTS (val))
1810 side_effects_p = true;
1811 }
1812
1813 TREE_SIDE_EFFECTS (c) = side_effects_p;
1814 TREE_CONSTANT (c) = constant_p;
1815 }
1816
1817 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1818 CONSTRUCTOR C. */
1819
1820 void
1821 verify_constructor_flags (tree c)
1822 {
1823 unsigned int i;
1824 tree val;
1825 bool constant_p = TREE_CONSTANT (c);
1826 bool side_effects_p = TREE_SIDE_EFFECTS (c);
1827 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1828
1829 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1830 {
1831 if (constant_p && !TREE_CONSTANT (val))
1832 internal_error ("non-constant element in constant CONSTRUCTOR");
1833 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
1834 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
1835 }
1836 }
1837
1838 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1839 are in the vec pointed to by VALS. */
1840 tree
1841 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1842 {
1843 tree c = make_node (CONSTRUCTOR);
1844
1845 TREE_TYPE (c) = type;
1846 CONSTRUCTOR_ELTS (c) = vals;
1847
1848 recompute_constructor_flags (c);
1849
1850 return c;
1851 }
1852
1853 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1854 INDEX and VALUE. */
1855 tree
1856 build_constructor_single (tree type, tree index, tree value)
1857 {
1858 vec<constructor_elt, va_gc> *v;
1859 constructor_elt elt = {index, value};
1860
1861 vec_alloc (v, 1);
1862 v->quick_push (elt);
1863
1864 return build_constructor (type, v);
1865 }
1866
1867
1868 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1869 are in a list pointed to by VALS. */
1870 tree
1871 build_constructor_from_list (tree type, tree vals)
1872 {
1873 tree t;
1874 vec<constructor_elt, va_gc> *v = NULL;
1875
1876 if (vals)
1877 {
1878 vec_alloc (v, list_length (vals));
1879 for (t = vals; t; t = TREE_CHAIN (t))
1880 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1881 }
1882
1883 return build_constructor (type, v);
1884 }
1885
1886 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1887 of elements, provided as index/value pairs. */
1888
1889 tree
1890 build_constructor_va (tree type, int nelts, ...)
1891 {
1892 vec<constructor_elt, va_gc> *v = NULL;
1893 va_list p;
1894
1895 va_start (p, nelts);
1896 vec_alloc (v, nelts);
1897 while (nelts--)
1898 {
1899 tree index = va_arg (p, tree);
1900 tree value = va_arg (p, tree);
1901 CONSTRUCTOR_APPEND_ELT (v, index, value);
1902 }
1903 va_end (p);
1904 return build_constructor (type, v);
1905 }
1906
1907 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1908
1909 tree
1910 build_fixed (tree type, FIXED_VALUE_TYPE f)
1911 {
1912 tree v;
1913 FIXED_VALUE_TYPE *fp;
1914
1915 v = make_node (FIXED_CST);
1916 fp = ggc_alloc<fixed_value> ();
1917 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1918
1919 TREE_TYPE (v) = type;
1920 TREE_FIXED_CST_PTR (v) = fp;
1921 return v;
1922 }
1923
1924 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1925
1926 tree
1927 build_real (tree type, REAL_VALUE_TYPE d)
1928 {
1929 tree v;
1930 REAL_VALUE_TYPE *dp;
1931 int overflow = 0;
1932
1933 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1934 Consider doing it via real_convert now. */
1935
1936 v = make_node (REAL_CST);
1937 dp = ggc_alloc<real_value> ();
1938 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1939
1940 TREE_TYPE (v) = type;
1941 TREE_REAL_CST_PTR (v) = dp;
1942 TREE_OVERFLOW (v) = overflow;
1943 return v;
1944 }
1945
1946 /* Like build_real, but first truncate D to the type. */
1947
1948 tree
1949 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1950 {
1951 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1952 }
1953
1954 /* Return a new REAL_CST node whose type is TYPE
1955 and whose value is the integer value of the INTEGER_CST node I. */
1956
1957 REAL_VALUE_TYPE
1958 real_value_from_int_cst (const_tree type, const_tree i)
1959 {
1960 REAL_VALUE_TYPE d;
1961
1962 /* Clear all bits of the real value type so that we can later do
1963 bitwise comparisons to see if two values are the same. */
1964 memset (&d, 0, sizeof d);
1965
1966 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1967 TYPE_SIGN (TREE_TYPE (i)));
1968 return d;
1969 }
1970
1971 /* Given a tree representing an integer constant I, return a tree
1972 representing the same value as a floating-point constant of type TYPE. */
1973
1974 tree
1975 build_real_from_int_cst (tree type, const_tree i)
1976 {
1977 tree v;
1978 int overflow = TREE_OVERFLOW (i);
1979
1980 v = build_real (type, real_value_from_int_cst (type, i));
1981
1982 TREE_OVERFLOW (v) |= overflow;
1983 return v;
1984 }
1985
1986 /* Return a newly constructed STRING_CST node whose value is
1987 the LEN characters at STR.
1988 Note that for a C string literal, LEN should include the trailing NUL.
1989 The TREE_TYPE is not initialized. */
1990
1991 tree
1992 build_string (int len, const char *str)
1993 {
1994 tree s;
1995 size_t length;
1996
1997 /* Do not waste bytes provided by padding of struct tree_string. */
1998 length = len + offsetof (struct tree_string, str) + 1;
1999
2000 record_node_allocation_statistics (STRING_CST, length);
2001
2002 s = (tree) ggc_internal_alloc (length);
2003
2004 memset (s, 0, sizeof (struct tree_typed));
2005 TREE_SET_CODE (s, STRING_CST);
2006 TREE_CONSTANT (s) = 1;
2007 TREE_STRING_LENGTH (s) = len;
2008 memcpy (s->string.str, str, len);
2009 s->string.str[len] = '\0';
2010
2011 return s;
2012 }
2013
2014 /* Return a newly constructed COMPLEX_CST node whose value is
2015 specified by the real and imaginary parts REAL and IMAG.
2016 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2017 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2018
2019 tree
2020 build_complex (tree type, tree real, tree imag)
2021 {
2022 tree t = make_node (COMPLEX_CST);
2023
2024 TREE_REALPART (t) = real;
2025 TREE_IMAGPART (t) = imag;
2026 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2027 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2028 return t;
2029 }
2030
2031 /* Build a complex (inf +- 0i), such as for the result of cproj.
2032 TYPE is the complex tree type of the result. If NEG is true, the
2033 imaginary zero is negative. */
2034
2035 tree
2036 build_complex_inf (tree type, bool neg)
2037 {
2038 REAL_VALUE_TYPE rinf, rzero = dconst0;
2039
2040 real_inf (&rinf);
2041 rzero.sign = neg;
2042 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2043 build_real (TREE_TYPE (type), rzero));
2044 }
2045
2046 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2047 element is set to 1. In particular, this is 1 + i for complex types. */
2048
2049 tree
2050 build_each_one_cst (tree type)
2051 {
2052 if (TREE_CODE (type) == COMPLEX_TYPE)
2053 {
2054 tree scalar = build_one_cst (TREE_TYPE (type));
2055 return build_complex (type, scalar, scalar);
2056 }
2057 else
2058 return build_one_cst (type);
2059 }
2060
2061 /* Return a constant of arithmetic type TYPE which is the
2062 multiplicative identity of the set TYPE. */
2063
2064 tree
2065 build_one_cst (tree type)
2066 {
2067 switch (TREE_CODE (type))
2068 {
2069 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2070 case POINTER_TYPE: case REFERENCE_TYPE:
2071 case OFFSET_TYPE:
2072 return build_int_cst (type, 1);
2073
2074 case REAL_TYPE:
2075 return build_real (type, dconst1);
2076
2077 case FIXED_POINT_TYPE:
2078 /* We can only generate 1 for accum types. */
2079 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2080 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2081
2082 case VECTOR_TYPE:
2083 {
2084 tree scalar = build_one_cst (TREE_TYPE (type));
2085
2086 return build_vector_from_val (type, scalar);
2087 }
2088
2089 case COMPLEX_TYPE:
2090 return build_complex (type,
2091 build_one_cst (TREE_TYPE (type)),
2092 build_zero_cst (TREE_TYPE (type)));
2093
2094 default:
2095 gcc_unreachable ();
2096 }
2097 }
2098
2099 /* Return an integer of type TYPE containing all 1's in as much precision as
2100 it contains, or a complex or vector whose subparts are such integers. */
2101
2102 tree
2103 build_all_ones_cst (tree type)
2104 {
2105 if (TREE_CODE (type) == COMPLEX_TYPE)
2106 {
2107 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2108 return build_complex (type, scalar, scalar);
2109 }
2110 else
2111 return build_minus_one_cst (type);
2112 }
2113
2114 /* Return a constant of arithmetic type TYPE which is the
2115 opposite of the multiplicative identity of the set TYPE. */
2116
2117 tree
2118 build_minus_one_cst (tree type)
2119 {
2120 switch (TREE_CODE (type))
2121 {
2122 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2123 case POINTER_TYPE: case REFERENCE_TYPE:
2124 case OFFSET_TYPE:
2125 return build_int_cst (type, -1);
2126
2127 case REAL_TYPE:
2128 return build_real (type, dconstm1);
2129
2130 case FIXED_POINT_TYPE:
2131 /* We can only generate 1 for accum types. */
2132 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2133 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2134 TYPE_MODE (type)));
2135
2136 case VECTOR_TYPE:
2137 {
2138 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2139
2140 return build_vector_from_val (type, scalar);
2141 }
2142
2143 case COMPLEX_TYPE:
2144 return build_complex (type,
2145 build_minus_one_cst (TREE_TYPE (type)),
2146 build_zero_cst (TREE_TYPE (type)));
2147
2148 default:
2149 gcc_unreachable ();
2150 }
2151 }
2152
2153 /* Build 0 constant of type TYPE. This is used by constructor folding
2154 and thus the constant should be represented in memory by
2155 zero(es). */
2156
2157 tree
2158 build_zero_cst (tree type)
2159 {
2160 switch (TREE_CODE (type))
2161 {
2162 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2163 case POINTER_TYPE: case REFERENCE_TYPE:
2164 case OFFSET_TYPE: case NULLPTR_TYPE:
2165 return build_int_cst (type, 0);
2166
2167 case REAL_TYPE:
2168 return build_real (type, dconst0);
2169
2170 case FIXED_POINT_TYPE:
2171 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2172
2173 case VECTOR_TYPE:
2174 {
2175 tree scalar = build_zero_cst (TREE_TYPE (type));
2176
2177 return build_vector_from_val (type, scalar);
2178 }
2179
2180 case COMPLEX_TYPE:
2181 {
2182 tree zero = build_zero_cst (TREE_TYPE (type));
2183
2184 return build_complex (type, zero, zero);
2185 }
2186
2187 default:
2188 if (!AGGREGATE_TYPE_P (type))
2189 return fold_convert (type, integer_zero_node);
2190 return build_constructor (type, NULL);
2191 }
2192 }
2193
2194
2195 /* Build a BINFO with LEN language slots. */
2196
2197 tree
2198 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2199 {
2200 tree t;
2201 size_t length = (offsetof (struct tree_binfo, base_binfos)
2202 + vec<tree, va_gc>::embedded_size (base_binfos));
2203
2204 record_node_allocation_statistics (TREE_BINFO, length);
2205
2206 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2207
2208 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2209
2210 TREE_SET_CODE (t, TREE_BINFO);
2211
2212 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2213
2214 return t;
2215 }
2216
2217 /* Create a CASE_LABEL_EXPR tree node and return it. */
2218
2219 tree
2220 build_case_label (tree low_value, tree high_value, tree label_decl)
2221 {
2222 tree t = make_node (CASE_LABEL_EXPR);
2223
2224 TREE_TYPE (t) = void_type_node;
2225 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2226
2227 CASE_LOW (t) = low_value;
2228 CASE_HIGH (t) = high_value;
2229 CASE_LABEL (t) = label_decl;
2230 CASE_CHAIN (t) = NULL_TREE;
2231
2232 return t;
2233 }
2234
2235 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2236 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2237 The latter determines the length of the HOST_WIDE_INT vector. */
2238
2239 tree
2240 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2241 {
2242 tree t;
2243 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2244 + sizeof (struct tree_int_cst));
2245
2246 gcc_assert (len);
2247 record_node_allocation_statistics (INTEGER_CST, length);
2248
2249 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2250
2251 TREE_SET_CODE (t, INTEGER_CST);
2252 TREE_INT_CST_NUNITS (t) = len;
2253 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2254 /* to_offset can only be applied to trees that are offset_int-sized
2255 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2256 must be exactly the precision of offset_int and so LEN is correct. */
2257 if (ext_len <= OFFSET_INT_ELTS)
2258 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2259 else
2260 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2261
2262 TREE_CONSTANT (t) = 1;
2263
2264 return t;
2265 }
2266
2267 /* Build a newly constructed TREE_VEC node of length LEN. */
2268
2269 tree
2270 make_tree_vec (int len MEM_STAT_DECL)
2271 {
2272 tree t;
2273 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2274
2275 record_node_allocation_statistics (TREE_VEC, length);
2276
2277 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2278
2279 TREE_SET_CODE (t, TREE_VEC);
2280 TREE_VEC_LENGTH (t) = len;
2281
2282 return t;
2283 }
2284
2285 /* Grow a TREE_VEC node to new length LEN. */
2286
2287 tree
2288 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2289 {
2290 gcc_assert (TREE_CODE (v) == TREE_VEC);
2291
2292 int oldlen = TREE_VEC_LENGTH (v);
2293 gcc_assert (len > oldlen);
2294
2295 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2296 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2297
2298 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2299
2300 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2301
2302 TREE_VEC_LENGTH (v) = len;
2303
2304 return v;
2305 }
2306 \f
2307 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2308 fixed, and scalar, complex or vector. */
2309
2310 int
2311 zerop (const_tree expr)
2312 {
2313 return (integer_zerop (expr)
2314 || real_zerop (expr)
2315 || fixed_zerop (expr));
2316 }
2317
2318 /* Return 1 if EXPR is the integer constant zero or a complex constant
2319 of zero. */
2320
2321 int
2322 integer_zerop (const_tree expr)
2323 {
2324 switch (TREE_CODE (expr))
2325 {
2326 case INTEGER_CST:
2327 return wi::eq_p (expr, 0);
2328 case COMPLEX_CST:
2329 return (integer_zerop (TREE_REALPART (expr))
2330 && integer_zerop (TREE_IMAGPART (expr)));
2331 case VECTOR_CST:
2332 {
2333 unsigned i;
2334 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2335 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2336 return false;
2337 return true;
2338 }
2339 default:
2340 return false;
2341 }
2342 }
2343
2344 /* Return 1 if EXPR is the integer constant one or the corresponding
2345 complex constant. */
2346
2347 int
2348 integer_onep (const_tree expr)
2349 {
2350 switch (TREE_CODE (expr))
2351 {
2352 case INTEGER_CST:
2353 return wi::eq_p (wi::to_widest (expr), 1);
2354 case COMPLEX_CST:
2355 return (integer_onep (TREE_REALPART (expr))
2356 && integer_zerop (TREE_IMAGPART (expr)));
2357 case VECTOR_CST:
2358 {
2359 unsigned i;
2360 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2361 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2362 return false;
2363 return true;
2364 }
2365 default:
2366 return false;
2367 }
2368 }
2369
2370 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2371 return 1 if every piece is the integer constant one. */
2372
2373 int
2374 integer_each_onep (const_tree expr)
2375 {
2376 if (TREE_CODE (expr) == COMPLEX_CST)
2377 return (integer_onep (TREE_REALPART (expr))
2378 && integer_onep (TREE_IMAGPART (expr)));
2379 else
2380 return integer_onep (expr);
2381 }
2382
2383 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2384 it contains, or a complex or vector whose subparts are such integers. */
2385
2386 int
2387 integer_all_onesp (const_tree expr)
2388 {
2389 if (TREE_CODE (expr) == COMPLEX_CST
2390 && integer_all_onesp (TREE_REALPART (expr))
2391 && integer_all_onesp (TREE_IMAGPART (expr)))
2392 return 1;
2393
2394 else if (TREE_CODE (expr) == VECTOR_CST)
2395 {
2396 unsigned i;
2397 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2398 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2399 return 0;
2400 return 1;
2401 }
2402
2403 else if (TREE_CODE (expr) != INTEGER_CST)
2404 return 0;
2405
2406 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2407 }
2408
2409 /* Return 1 if EXPR is the integer constant minus one. */
2410
2411 int
2412 integer_minus_onep (const_tree expr)
2413 {
2414 if (TREE_CODE (expr) == COMPLEX_CST)
2415 return (integer_all_onesp (TREE_REALPART (expr))
2416 && integer_zerop (TREE_IMAGPART (expr)));
2417 else
2418 return integer_all_onesp (expr);
2419 }
2420
2421 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2422 one bit on). */
2423
2424 int
2425 integer_pow2p (const_tree expr)
2426 {
2427 if (TREE_CODE (expr) == COMPLEX_CST
2428 && integer_pow2p (TREE_REALPART (expr))
2429 && integer_zerop (TREE_IMAGPART (expr)))
2430 return 1;
2431
2432 if (TREE_CODE (expr) != INTEGER_CST)
2433 return 0;
2434
2435 return wi::popcount (expr) == 1;
2436 }
2437
2438 /* Return 1 if EXPR is an integer constant other than zero or a
2439 complex constant other than zero. */
2440
2441 int
2442 integer_nonzerop (const_tree expr)
2443 {
2444 return ((TREE_CODE (expr) == INTEGER_CST
2445 && !wi::eq_p (expr, 0))
2446 || (TREE_CODE (expr) == COMPLEX_CST
2447 && (integer_nonzerop (TREE_REALPART (expr))
2448 || integer_nonzerop (TREE_IMAGPART (expr)))));
2449 }
2450
2451 /* Return 1 if EXPR is the integer constant one. For vector,
2452 return 1 if every piece is the integer constant minus one
2453 (representing the value TRUE). */
2454
2455 int
2456 integer_truep (const_tree expr)
2457 {
2458 if (TREE_CODE (expr) == VECTOR_CST)
2459 return integer_all_onesp (expr);
2460 return integer_onep (expr);
2461 }
2462
2463 /* Return 1 if EXPR is the fixed-point constant zero. */
2464
2465 int
2466 fixed_zerop (const_tree expr)
2467 {
2468 return (TREE_CODE (expr) == FIXED_CST
2469 && TREE_FIXED_CST (expr).data.is_zero ());
2470 }
2471
2472 /* Return the power of two represented by a tree node known to be a
2473 power of two. */
2474
2475 int
2476 tree_log2 (const_tree expr)
2477 {
2478 if (TREE_CODE (expr) == COMPLEX_CST)
2479 return tree_log2 (TREE_REALPART (expr));
2480
2481 return wi::exact_log2 (expr);
2482 }
2483
2484 /* Similar, but return the largest integer Y such that 2 ** Y is less
2485 than or equal to EXPR. */
2486
2487 int
2488 tree_floor_log2 (const_tree expr)
2489 {
2490 if (TREE_CODE (expr) == COMPLEX_CST)
2491 return tree_log2 (TREE_REALPART (expr));
2492
2493 return wi::floor_log2 (expr);
2494 }
2495
2496 /* Return number of known trailing zero bits in EXPR, or, if the value of
2497 EXPR is known to be zero, the precision of it's type. */
2498
2499 unsigned int
2500 tree_ctz (const_tree expr)
2501 {
2502 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2503 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2504 return 0;
2505
2506 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2507 switch (TREE_CODE (expr))
2508 {
2509 case INTEGER_CST:
2510 ret1 = wi::ctz (expr);
2511 return MIN (ret1, prec);
2512 case SSA_NAME:
2513 ret1 = wi::ctz (get_nonzero_bits (expr));
2514 return MIN (ret1, prec);
2515 case PLUS_EXPR:
2516 case MINUS_EXPR:
2517 case BIT_IOR_EXPR:
2518 case BIT_XOR_EXPR:
2519 case MIN_EXPR:
2520 case MAX_EXPR:
2521 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2522 if (ret1 == 0)
2523 return ret1;
2524 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2525 return MIN (ret1, ret2);
2526 case POINTER_PLUS_EXPR:
2527 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2528 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2529 /* Second operand is sizetype, which could be in theory
2530 wider than pointer's precision. Make sure we never
2531 return more than prec. */
2532 ret2 = MIN (ret2, prec);
2533 return MIN (ret1, ret2);
2534 case BIT_AND_EXPR:
2535 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2536 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2537 return MAX (ret1, ret2);
2538 case MULT_EXPR:
2539 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2540 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2541 return MIN (ret1 + ret2, prec);
2542 case LSHIFT_EXPR:
2543 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2544 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2545 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2546 {
2547 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2548 return MIN (ret1 + ret2, prec);
2549 }
2550 return ret1;
2551 case RSHIFT_EXPR:
2552 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2553 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2554 {
2555 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2556 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2557 if (ret1 > ret2)
2558 return ret1 - ret2;
2559 }
2560 return 0;
2561 case TRUNC_DIV_EXPR:
2562 case CEIL_DIV_EXPR:
2563 case FLOOR_DIV_EXPR:
2564 case ROUND_DIV_EXPR:
2565 case EXACT_DIV_EXPR:
2566 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2567 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2568 {
2569 int l = tree_log2 (TREE_OPERAND (expr, 1));
2570 if (l >= 0)
2571 {
2572 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2573 ret2 = l;
2574 if (ret1 > ret2)
2575 return ret1 - ret2;
2576 }
2577 }
2578 return 0;
2579 CASE_CONVERT:
2580 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2581 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2582 ret1 = prec;
2583 return MIN (ret1, prec);
2584 case SAVE_EXPR:
2585 return tree_ctz (TREE_OPERAND (expr, 0));
2586 case COND_EXPR:
2587 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2588 if (ret1 == 0)
2589 return 0;
2590 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2591 return MIN (ret1, ret2);
2592 case COMPOUND_EXPR:
2593 return tree_ctz (TREE_OPERAND (expr, 1));
2594 case ADDR_EXPR:
2595 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2596 if (ret1 > BITS_PER_UNIT)
2597 {
2598 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2599 return MIN (ret1, prec);
2600 }
2601 return 0;
2602 default:
2603 return 0;
2604 }
2605 }
2606
2607 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2608 decimal float constants, so don't return 1 for them. */
2609
2610 int
2611 real_zerop (const_tree expr)
2612 {
2613 switch (TREE_CODE (expr))
2614 {
2615 case REAL_CST:
2616 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2617 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2618 case COMPLEX_CST:
2619 return real_zerop (TREE_REALPART (expr))
2620 && real_zerop (TREE_IMAGPART (expr));
2621 case VECTOR_CST:
2622 {
2623 unsigned i;
2624 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2625 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2626 return false;
2627 return true;
2628 }
2629 default:
2630 return false;
2631 }
2632 }
2633
2634 /* Return 1 if EXPR is the real constant one in real or complex form.
2635 Trailing zeroes matter for decimal float constants, so don't return
2636 1 for them. */
2637
2638 int
2639 real_onep (const_tree expr)
2640 {
2641 switch (TREE_CODE (expr))
2642 {
2643 case REAL_CST:
2644 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2645 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2646 case COMPLEX_CST:
2647 return real_onep (TREE_REALPART (expr))
2648 && real_zerop (TREE_IMAGPART (expr));
2649 case VECTOR_CST:
2650 {
2651 unsigned i;
2652 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2653 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2654 return false;
2655 return true;
2656 }
2657 default:
2658 return false;
2659 }
2660 }
2661
2662 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2663 matter for decimal float constants, so don't return 1 for them. */
2664
2665 int
2666 real_minus_onep (const_tree expr)
2667 {
2668 switch (TREE_CODE (expr))
2669 {
2670 case REAL_CST:
2671 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2672 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2673 case COMPLEX_CST:
2674 return real_minus_onep (TREE_REALPART (expr))
2675 && real_zerop (TREE_IMAGPART (expr));
2676 case VECTOR_CST:
2677 {
2678 unsigned i;
2679 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2680 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2681 return false;
2682 return true;
2683 }
2684 default:
2685 return false;
2686 }
2687 }
2688
2689 /* Nonzero if EXP is a constant or a cast of a constant. */
2690
2691 int
2692 really_constant_p (const_tree exp)
2693 {
2694 /* This is not quite the same as STRIP_NOPS. It does more. */
2695 while (CONVERT_EXPR_P (exp)
2696 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2697 exp = TREE_OPERAND (exp, 0);
2698 return TREE_CONSTANT (exp);
2699 }
2700 \f
2701 /* Return first list element whose TREE_VALUE is ELEM.
2702 Return 0 if ELEM is not in LIST. */
2703
2704 tree
2705 value_member (tree elem, tree list)
2706 {
2707 while (list)
2708 {
2709 if (elem == TREE_VALUE (list))
2710 return list;
2711 list = TREE_CHAIN (list);
2712 }
2713 return NULL_TREE;
2714 }
2715
2716 /* Return first list element whose TREE_PURPOSE is ELEM.
2717 Return 0 if ELEM is not in LIST. */
2718
2719 tree
2720 purpose_member (const_tree elem, tree list)
2721 {
2722 while (list)
2723 {
2724 if (elem == TREE_PURPOSE (list))
2725 return list;
2726 list = TREE_CHAIN (list);
2727 }
2728 return NULL_TREE;
2729 }
2730
2731 /* Return true if ELEM is in V. */
2732
2733 bool
2734 vec_member (const_tree elem, vec<tree, va_gc> *v)
2735 {
2736 unsigned ix;
2737 tree t;
2738 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2739 if (elem == t)
2740 return true;
2741 return false;
2742 }
2743
2744 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2745 NULL_TREE. */
2746
2747 tree
2748 chain_index (int idx, tree chain)
2749 {
2750 for (; chain && idx > 0; --idx)
2751 chain = TREE_CHAIN (chain);
2752 return chain;
2753 }
2754
2755 /* Return nonzero if ELEM is part of the chain CHAIN. */
2756
2757 int
2758 chain_member (const_tree elem, const_tree chain)
2759 {
2760 while (chain)
2761 {
2762 if (elem == chain)
2763 return 1;
2764 chain = DECL_CHAIN (chain);
2765 }
2766
2767 return 0;
2768 }
2769
2770 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2771 We expect a null pointer to mark the end of the chain.
2772 This is the Lisp primitive `length'. */
2773
2774 int
2775 list_length (const_tree t)
2776 {
2777 const_tree p = t;
2778 #ifdef ENABLE_TREE_CHECKING
2779 const_tree q = t;
2780 #endif
2781 int len = 0;
2782
2783 while (p)
2784 {
2785 p = TREE_CHAIN (p);
2786 #ifdef ENABLE_TREE_CHECKING
2787 if (len % 2)
2788 q = TREE_CHAIN (q);
2789 gcc_assert (p != q);
2790 #endif
2791 len++;
2792 }
2793
2794 return len;
2795 }
2796
2797 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2798 UNION_TYPE TYPE, or NULL_TREE if none. */
2799
2800 tree
2801 first_field (const_tree type)
2802 {
2803 tree t = TYPE_FIELDS (type);
2804 while (t && TREE_CODE (t) != FIELD_DECL)
2805 t = TREE_CHAIN (t);
2806 return t;
2807 }
2808
2809 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2810 by modifying the last node in chain 1 to point to chain 2.
2811 This is the Lisp primitive `nconc'. */
2812
2813 tree
2814 chainon (tree op1, tree op2)
2815 {
2816 tree t1;
2817
2818 if (!op1)
2819 return op2;
2820 if (!op2)
2821 return op1;
2822
2823 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2824 continue;
2825 TREE_CHAIN (t1) = op2;
2826
2827 #ifdef ENABLE_TREE_CHECKING
2828 {
2829 tree t2;
2830 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2831 gcc_assert (t2 != t1);
2832 }
2833 #endif
2834
2835 return op1;
2836 }
2837
2838 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2839
2840 tree
2841 tree_last (tree chain)
2842 {
2843 tree next;
2844 if (chain)
2845 while ((next = TREE_CHAIN (chain)))
2846 chain = next;
2847 return chain;
2848 }
2849
2850 /* Reverse the order of elements in the chain T,
2851 and return the new head of the chain (old last element). */
2852
2853 tree
2854 nreverse (tree t)
2855 {
2856 tree prev = 0, decl, next;
2857 for (decl = t; decl; decl = next)
2858 {
2859 /* We shouldn't be using this function to reverse BLOCK chains; we
2860 have blocks_nreverse for that. */
2861 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2862 next = TREE_CHAIN (decl);
2863 TREE_CHAIN (decl) = prev;
2864 prev = decl;
2865 }
2866 return prev;
2867 }
2868 \f
2869 /* Return a newly created TREE_LIST node whose
2870 purpose and value fields are PARM and VALUE. */
2871
2872 tree
2873 build_tree_list (tree parm, tree value MEM_STAT_DECL)
2874 {
2875 tree t = make_node (TREE_LIST PASS_MEM_STAT);
2876 TREE_PURPOSE (t) = parm;
2877 TREE_VALUE (t) = value;
2878 return t;
2879 }
2880
2881 /* Build a chain of TREE_LIST nodes from a vector. */
2882
2883 tree
2884 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2885 {
2886 tree ret = NULL_TREE;
2887 tree *pp = &ret;
2888 unsigned int i;
2889 tree t;
2890 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2891 {
2892 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
2893 pp = &TREE_CHAIN (*pp);
2894 }
2895 return ret;
2896 }
2897
2898 /* Return a newly created TREE_LIST node whose
2899 purpose and value fields are PURPOSE and VALUE
2900 and whose TREE_CHAIN is CHAIN. */
2901
2902 tree
2903 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
2904 {
2905 tree node;
2906
2907 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2908 memset (node, 0, sizeof (struct tree_common));
2909
2910 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2911
2912 TREE_SET_CODE (node, TREE_LIST);
2913 TREE_CHAIN (node) = chain;
2914 TREE_PURPOSE (node) = purpose;
2915 TREE_VALUE (node) = value;
2916 return node;
2917 }
2918
2919 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2920 trees. */
2921
2922 vec<tree, va_gc> *
2923 ctor_to_vec (tree ctor)
2924 {
2925 vec<tree, va_gc> *vec;
2926 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2927 unsigned int ix;
2928 tree val;
2929
2930 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2931 vec->quick_push (val);
2932
2933 return vec;
2934 }
2935 \f
2936 /* Return the size nominally occupied by an object of type TYPE
2937 when it resides in memory. The value is measured in units of bytes,
2938 and its data type is that normally used for type sizes
2939 (which is the first type created by make_signed_type or
2940 make_unsigned_type). */
2941
2942 tree
2943 size_in_bytes_loc (location_t loc, const_tree type)
2944 {
2945 tree t;
2946
2947 if (type == error_mark_node)
2948 return integer_zero_node;
2949
2950 type = TYPE_MAIN_VARIANT (type);
2951 t = TYPE_SIZE_UNIT (type);
2952
2953 if (t == 0)
2954 {
2955 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
2956 return size_zero_node;
2957 }
2958
2959 return t;
2960 }
2961
2962 /* Return the size of TYPE (in bytes) as a wide integer
2963 or return -1 if the size can vary or is larger than an integer. */
2964
2965 HOST_WIDE_INT
2966 int_size_in_bytes (const_tree type)
2967 {
2968 tree t;
2969
2970 if (type == error_mark_node)
2971 return 0;
2972
2973 type = TYPE_MAIN_VARIANT (type);
2974 t = TYPE_SIZE_UNIT (type);
2975
2976 if (t && tree_fits_uhwi_p (t))
2977 return TREE_INT_CST_LOW (t);
2978 else
2979 return -1;
2980 }
2981
2982 /* Return the maximum size of TYPE (in bytes) as a wide integer
2983 or return -1 if the size can vary or is larger than an integer. */
2984
2985 HOST_WIDE_INT
2986 max_int_size_in_bytes (const_tree type)
2987 {
2988 HOST_WIDE_INT size = -1;
2989 tree size_tree;
2990
2991 /* If this is an array type, check for a possible MAX_SIZE attached. */
2992
2993 if (TREE_CODE (type) == ARRAY_TYPE)
2994 {
2995 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2996
2997 if (size_tree && tree_fits_uhwi_p (size_tree))
2998 size = tree_to_uhwi (size_tree);
2999 }
3000
3001 /* If we still haven't been able to get a size, see if the language
3002 can compute a maximum size. */
3003
3004 if (size == -1)
3005 {
3006 size_tree = lang_hooks.types.max_size (type);
3007
3008 if (size_tree && tree_fits_uhwi_p (size_tree))
3009 size = tree_to_uhwi (size_tree);
3010 }
3011
3012 return size;
3013 }
3014 \f
3015 /* Return the bit position of FIELD, in bits from the start of the record.
3016 This is a tree of type bitsizetype. */
3017
3018 tree
3019 bit_position (const_tree field)
3020 {
3021 return bit_from_pos (DECL_FIELD_OFFSET (field),
3022 DECL_FIELD_BIT_OFFSET (field));
3023 }
3024 \f
3025 /* Return the byte position of FIELD, in bytes from the start of the record.
3026 This is a tree of type sizetype. */
3027
3028 tree
3029 byte_position (const_tree field)
3030 {
3031 return byte_from_pos (DECL_FIELD_OFFSET (field),
3032 DECL_FIELD_BIT_OFFSET (field));
3033 }
3034
3035 /* Likewise, but return as an integer. It must be representable in
3036 that way (since it could be a signed value, we don't have the
3037 option of returning -1 like int_size_in_byte can. */
3038
3039 HOST_WIDE_INT
3040 int_byte_position (const_tree field)
3041 {
3042 return tree_to_shwi (byte_position (field));
3043 }
3044 \f
3045 /* Return the strictest alignment, in bits, that T is known to have. */
3046
3047 unsigned int
3048 expr_align (const_tree t)
3049 {
3050 unsigned int align0, align1;
3051
3052 switch (TREE_CODE (t))
3053 {
3054 CASE_CONVERT: case NON_LVALUE_EXPR:
3055 /* If we have conversions, we know that the alignment of the
3056 object must meet each of the alignments of the types. */
3057 align0 = expr_align (TREE_OPERAND (t, 0));
3058 align1 = TYPE_ALIGN (TREE_TYPE (t));
3059 return MAX (align0, align1);
3060
3061 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3062 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3063 case CLEANUP_POINT_EXPR:
3064 /* These don't change the alignment of an object. */
3065 return expr_align (TREE_OPERAND (t, 0));
3066
3067 case COND_EXPR:
3068 /* The best we can do is say that the alignment is the least aligned
3069 of the two arms. */
3070 align0 = expr_align (TREE_OPERAND (t, 1));
3071 align1 = expr_align (TREE_OPERAND (t, 2));
3072 return MIN (align0, align1);
3073
3074 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3075 meaningfully, it's always 1. */
3076 case LABEL_DECL: case CONST_DECL:
3077 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3078 case FUNCTION_DECL:
3079 gcc_assert (DECL_ALIGN (t) != 0);
3080 return DECL_ALIGN (t);
3081
3082 default:
3083 break;
3084 }
3085
3086 /* Otherwise take the alignment from that of the type. */
3087 return TYPE_ALIGN (TREE_TYPE (t));
3088 }
3089 \f
3090 /* Return, as a tree node, the number of elements for TYPE (which is an
3091 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3092
3093 tree
3094 array_type_nelts (const_tree type)
3095 {
3096 tree index_type, min, max;
3097
3098 /* If they did it with unspecified bounds, then we should have already
3099 given an error about it before we got here. */
3100 if (! TYPE_DOMAIN (type))
3101 return error_mark_node;
3102
3103 index_type = TYPE_DOMAIN (type);
3104 min = TYPE_MIN_VALUE (index_type);
3105 max = TYPE_MAX_VALUE (index_type);
3106
3107 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3108 if (!max)
3109 return error_mark_node;
3110
3111 return (integer_zerop (min)
3112 ? max
3113 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3114 }
3115 \f
3116 /* If arg is static -- a reference to an object in static storage -- then
3117 return the object. This is not the same as the C meaning of `static'.
3118 If arg isn't static, return NULL. */
3119
3120 tree
3121 staticp (tree arg)
3122 {
3123 switch (TREE_CODE (arg))
3124 {
3125 case FUNCTION_DECL:
3126 /* Nested functions are static, even though taking their address will
3127 involve a trampoline as we unnest the nested function and create
3128 the trampoline on the tree level. */
3129 return arg;
3130
3131 case VAR_DECL:
3132 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3133 && ! DECL_THREAD_LOCAL_P (arg)
3134 && ! DECL_DLLIMPORT_P (arg)
3135 ? arg : NULL);
3136
3137 case CONST_DECL:
3138 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3139 ? arg : NULL);
3140
3141 case CONSTRUCTOR:
3142 return TREE_STATIC (arg) ? arg : NULL;
3143
3144 case LABEL_DECL:
3145 case STRING_CST:
3146 return arg;
3147
3148 case COMPONENT_REF:
3149 /* If the thing being referenced is not a field, then it is
3150 something language specific. */
3151 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3152
3153 /* If we are referencing a bitfield, we can't evaluate an
3154 ADDR_EXPR at compile time and so it isn't a constant. */
3155 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3156 return NULL;
3157
3158 return staticp (TREE_OPERAND (arg, 0));
3159
3160 case BIT_FIELD_REF:
3161 return NULL;
3162
3163 case INDIRECT_REF:
3164 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3165
3166 case ARRAY_REF:
3167 case ARRAY_RANGE_REF:
3168 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3169 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3170 return staticp (TREE_OPERAND (arg, 0));
3171 else
3172 return NULL;
3173
3174 case COMPOUND_LITERAL_EXPR:
3175 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3176
3177 default:
3178 return NULL;
3179 }
3180 }
3181
3182 \f
3183
3184
3185 /* Return whether OP is a DECL whose address is function-invariant. */
3186
3187 bool
3188 decl_address_invariant_p (const_tree op)
3189 {
3190 /* The conditions below are slightly less strict than the one in
3191 staticp. */
3192
3193 switch (TREE_CODE (op))
3194 {
3195 case PARM_DECL:
3196 case RESULT_DECL:
3197 case LABEL_DECL:
3198 case FUNCTION_DECL:
3199 return true;
3200
3201 case VAR_DECL:
3202 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3203 || DECL_THREAD_LOCAL_P (op)
3204 || DECL_CONTEXT (op) == current_function_decl
3205 || decl_function_context (op) == current_function_decl)
3206 return true;
3207 break;
3208
3209 case CONST_DECL:
3210 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3211 || decl_function_context (op) == current_function_decl)
3212 return true;
3213 break;
3214
3215 default:
3216 break;
3217 }
3218
3219 return false;
3220 }
3221
3222 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3223
3224 bool
3225 decl_address_ip_invariant_p (const_tree op)
3226 {
3227 /* The conditions below are slightly less strict than the one in
3228 staticp. */
3229
3230 switch (TREE_CODE (op))
3231 {
3232 case LABEL_DECL:
3233 case FUNCTION_DECL:
3234 case STRING_CST:
3235 return true;
3236
3237 case VAR_DECL:
3238 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3239 && !DECL_DLLIMPORT_P (op))
3240 || DECL_THREAD_LOCAL_P (op))
3241 return true;
3242 break;
3243
3244 case CONST_DECL:
3245 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3246 return true;
3247 break;
3248
3249 default:
3250 break;
3251 }
3252
3253 return false;
3254 }
3255
3256
3257 /* Return true if T is function-invariant (internal function, does
3258 not handle arithmetic; that's handled in skip_simple_arithmetic and
3259 tree_invariant_p). */
3260
3261 static bool
3262 tree_invariant_p_1 (tree t)
3263 {
3264 tree op;
3265
3266 if (TREE_CONSTANT (t)
3267 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3268 return true;
3269
3270 switch (TREE_CODE (t))
3271 {
3272 case SAVE_EXPR:
3273 return true;
3274
3275 case ADDR_EXPR:
3276 op = TREE_OPERAND (t, 0);
3277 while (handled_component_p (op))
3278 {
3279 switch (TREE_CODE (op))
3280 {
3281 case ARRAY_REF:
3282 case ARRAY_RANGE_REF:
3283 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3284 || TREE_OPERAND (op, 2) != NULL_TREE
3285 || TREE_OPERAND (op, 3) != NULL_TREE)
3286 return false;
3287 break;
3288
3289 case COMPONENT_REF:
3290 if (TREE_OPERAND (op, 2) != NULL_TREE)
3291 return false;
3292 break;
3293
3294 default:;
3295 }
3296 op = TREE_OPERAND (op, 0);
3297 }
3298
3299 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3300
3301 default:
3302 break;
3303 }
3304
3305 return false;
3306 }
3307
3308 /* Return true if T is function-invariant. */
3309
3310 bool
3311 tree_invariant_p (tree t)
3312 {
3313 tree inner = skip_simple_arithmetic (t);
3314 return tree_invariant_p_1 (inner);
3315 }
3316
3317 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3318 Do this to any expression which may be used in more than one place,
3319 but must be evaluated only once.
3320
3321 Normally, expand_expr would reevaluate the expression each time.
3322 Calling save_expr produces something that is evaluated and recorded
3323 the first time expand_expr is called on it. Subsequent calls to
3324 expand_expr just reuse the recorded value.
3325
3326 The call to expand_expr that generates code that actually computes
3327 the value is the first call *at compile time*. Subsequent calls
3328 *at compile time* generate code to use the saved value.
3329 This produces correct result provided that *at run time* control
3330 always flows through the insns made by the first expand_expr
3331 before reaching the other places where the save_expr was evaluated.
3332 You, the caller of save_expr, must make sure this is so.
3333
3334 Constants, and certain read-only nodes, are returned with no
3335 SAVE_EXPR because that is safe. Expressions containing placeholders
3336 are not touched; see tree.def for an explanation of what these
3337 are used for. */
3338
3339 tree
3340 save_expr (tree expr)
3341 {
3342 tree inner;
3343
3344 /* If the tree evaluates to a constant, then we don't want to hide that
3345 fact (i.e. this allows further folding, and direct checks for constants).
3346 However, a read-only object that has side effects cannot be bypassed.
3347 Since it is no problem to reevaluate literals, we just return the
3348 literal node. */
3349 inner = skip_simple_arithmetic (expr);
3350 if (TREE_CODE (inner) == ERROR_MARK)
3351 return inner;
3352
3353 if (tree_invariant_p_1 (inner))
3354 return expr;
3355
3356 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3357 it means that the size or offset of some field of an object depends on
3358 the value within another field.
3359
3360 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3361 and some variable since it would then need to be both evaluated once and
3362 evaluated more than once. Front-ends must assure this case cannot
3363 happen by surrounding any such subexpressions in their own SAVE_EXPR
3364 and forcing evaluation at the proper time. */
3365 if (contains_placeholder_p (inner))
3366 return expr;
3367
3368 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3369
3370 /* This expression might be placed ahead of a jump to ensure that the
3371 value was computed on both sides of the jump. So make sure it isn't
3372 eliminated as dead. */
3373 TREE_SIDE_EFFECTS (expr) = 1;
3374 return expr;
3375 }
3376
3377 /* Look inside EXPR into any simple arithmetic operations. Return the
3378 outermost non-arithmetic or non-invariant node. */
3379
3380 tree
3381 skip_simple_arithmetic (tree expr)
3382 {
3383 /* We don't care about whether this can be used as an lvalue in this
3384 context. */
3385 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3386 expr = TREE_OPERAND (expr, 0);
3387
3388 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3389 a constant, it will be more efficient to not make another SAVE_EXPR since
3390 it will allow better simplification and GCSE will be able to merge the
3391 computations if they actually occur. */
3392 while (true)
3393 {
3394 if (UNARY_CLASS_P (expr))
3395 expr = TREE_OPERAND (expr, 0);
3396 else if (BINARY_CLASS_P (expr))
3397 {
3398 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3399 expr = TREE_OPERAND (expr, 0);
3400 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3401 expr = TREE_OPERAND (expr, 1);
3402 else
3403 break;
3404 }
3405 else
3406 break;
3407 }
3408
3409 return expr;
3410 }
3411
3412 /* Look inside EXPR into simple arithmetic operations involving constants.
3413 Return the outermost non-arithmetic or non-constant node. */
3414
3415 tree
3416 skip_simple_constant_arithmetic (tree expr)
3417 {
3418 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3419 expr = TREE_OPERAND (expr, 0);
3420
3421 while (true)
3422 {
3423 if (UNARY_CLASS_P (expr))
3424 expr = TREE_OPERAND (expr, 0);
3425 else if (BINARY_CLASS_P (expr))
3426 {
3427 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3428 expr = TREE_OPERAND (expr, 0);
3429 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3430 expr = TREE_OPERAND (expr, 1);
3431 else
3432 break;
3433 }
3434 else
3435 break;
3436 }
3437
3438 return expr;
3439 }
3440
3441 /* Return which tree structure is used by T. */
3442
3443 enum tree_node_structure_enum
3444 tree_node_structure (const_tree t)
3445 {
3446 const enum tree_code code = TREE_CODE (t);
3447 return tree_node_structure_for_code (code);
3448 }
3449
3450 /* Set various status flags when building a CALL_EXPR object T. */
3451
3452 static void
3453 process_call_operands (tree t)
3454 {
3455 bool side_effects = TREE_SIDE_EFFECTS (t);
3456 bool read_only = false;
3457 int i = call_expr_flags (t);
3458
3459 /* Calls have side-effects, except those to const or pure functions. */
3460 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3461 side_effects = true;
3462 /* Propagate TREE_READONLY of arguments for const functions. */
3463 if (i & ECF_CONST)
3464 read_only = true;
3465
3466 if (!side_effects || read_only)
3467 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3468 {
3469 tree op = TREE_OPERAND (t, i);
3470 if (op && TREE_SIDE_EFFECTS (op))
3471 side_effects = true;
3472 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3473 read_only = false;
3474 }
3475
3476 TREE_SIDE_EFFECTS (t) = side_effects;
3477 TREE_READONLY (t) = read_only;
3478 }
3479 \f
3480 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3481 size or offset that depends on a field within a record. */
3482
3483 bool
3484 contains_placeholder_p (const_tree exp)
3485 {
3486 enum tree_code code;
3487
3488 if (!exp)
3489 return 0;
3490
3491 code = TREE_CODE (exp);
3492 if (code == PLACEHOLDER_EXPR)
3493 return 1;
3494
3495 switch (TREE_CODE_CLASS (code))
3496 {
3497 case tcc_reference:
3498 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3499 position computations since they will be converted into a
3500 WITH_RECORD_EXPR involving the reference, which will assume
3501 here will be valid. */
3502 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3503
3504 case tcc_exceptional:
3505 if (code == TREE_LIST)
3506 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3507 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3508 break;
3509
3510 case tcc_unary:
3511 case tcc_binary:
3512 case tcc_comparison:
3513 case tcc_expression:
3514 switch (code)
3515 {
3516 case COMPOUND_EXPR:
3517 /* Ignoring the first operand isn't quite right, but works best. */
3518 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3519
3520 case COND_EXPR:
3521 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3522 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3523 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3524
3525 case SAVE_EXPR:
3526 /* The save_expr function never wraps anything containing
3527 a PLACEHOLDER_EXPR. */
3528 return 0;
3529
3530 default:
3531 break;
3532 }
3533
3534 switch (TREE_CODE_LENGTH (code))
3535 {
3536 case 1:
3537 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3538 case 2:
3539 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3540 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3541 default:
3542 return 0;
3543 }
3544
3545 case tcc_vl_exp:
3546 switch (code)
3547 {
3548 case CALL_EXPR:
3549 {
3550 const_tree arg;
3551 const_call_expr_arg_iterator iter;
3552 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3553 if (CONTAINS_PLACEHOLDER_P (arg))
3554 return 1;
3555 return 0;
3556 }
3557 default:
3558 return 0;
3559 }
3560
3561 default:
3562 return 0;
3563 }
3564 return 0;
3565 }
3566
3567 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3568 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3569 field positions. */
3570
3571 static bool
3572 type_contains_placeholder_1 (const_tree type)
3573 {
3574 /* If the size contains a placeholder or the parent type (component type in
3575 the case of arrays) type involves a placeholder, this type does. */
3576 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3577 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3578 || (!POINTER_TYPE_P (type)
3579 && TREE_TYPE (type)
3580 && type_contains_placeholder_p (TREE_TYPE (type))))
3581 return true;
3582
3583 /* Now do type-specific checks. Note that the last part of the check above
3584 greatly limits what we have to do below. */
3585 switch (TREE_CODE (type))
3586 {
3587 case VOID_TYPE:
3588 case POINTER_BOUNDS_TYPE:
3589 case COMPLEX_TYPE:
3590 case ENUMERAL_TYPE:
3591 case BOOLEAN_TYPE:
3592 case POINTER_TYPE:
3593 case OFFSET_TYPE:
3594 case REFERENCE_TYPE:
3595 case METHOD_TYPE:
3596 case FUNCTION_TYPE:
3597 case VECTOR_TYPE:
3598 case NULLPTR_TYPE:
3599 return false;
3600
3601 case INTEGER_TYPE:
3602 case REAL_TYPE:
3603 case FIXED_POINT_TYPE:
3604 /* Here we just check the bounds. */
3605 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3606 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3607
3608 case ARRAY_TYPE:
3609 /* We have already checked the component type above, so just check
3610 the domain type. Flexible array members have a null domain. */
3611 return TYPE_DOMAIN (type) ?
3612 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3613
3614 case RECORD_TYPE:
3615 case UNION_TYPE:
3616 case QUAL_UNION_TYPE:
3617 {
3618 tree field;
3619
3620 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3621 if (TREE_CODE (field) == FIELD_DECL
3622 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3623 || (TREE_CODE (type) == QUAL_UNION_TYPE
3624 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3625 || type_contains_placeholder_p (TREE_TYPE (field))))
3626 return true;
3627
3628 return false;
3629 }
3630
3631 default:
3632 gcc_unreachable ();
3633 }
3634 }
3635
3636 /* Wrapper around above function used to cache its result. */
3637
3638 bool
3639 type_contains_placeholder_p (tree type)
3640 {
3641 bool result;
3642
3643 /* If the contains_placeholder_bits field has been initialized,
3644 then we know the answer. */
3645 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3646 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3647
3648 /* Indicate that we've seen this type node, and the answer is false.
3649 This is what we want to return if we run into recursion via fields. */
3650 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3651
3652 /* Compute the real value. */
3653 result = type_contains_placeholder_1 (type);
3654
3655 /* Store the real value. */
3656 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3657
3658 return result;
3659 }
3660 \f
3661 /* Push tree EXP onto vector QUEUE if it is not already present. */
3662
3663 static void
3664 push_without_duplicates (tree exp, vec<tree> *queue)
3665 {
3666 unsigned int i;
3667 tree iter;
3668
3669 FOR_EACH_VEC_ELT (*queue, i, iter)
3670 if (simple_cst_equal (iter, exp) == 1)
3671 break;
3672
3673 if (!iter)
3674 queue->safe_push (exp);
3675 }
3676
3677 /* Given a tree EXP, find all occurrences of references to fields
3678 in a PLACEHOLDER_EXPR and place them in vector REFS without
3679 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3680 we assume here that EXP contains only arithmetic expressions
3681 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3682 argument list. */
3683
3684 void
3685 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3686 {
3687 enum tree_code code = TREE_CODE (exp);
3688 tree inner;
3689 int i;
3690
3691 /* We handle TREE_LIST and COMPONENT_REF separately. */
3692 if (code == TREE_LIST)
3693 {
3694 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3695 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3696 }
3697 else if (code == COMPONENT_REF)
3698 {
3699 for (inner = TREE_OPERAND (exp, 0);
3700 REFERENCE_CLASS_P (inner);
3701 inner = TREE_OPERAND (inner, 0))
3702 ;
3703
3704 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3705 push_without_duplicates (exp, refs);
3706 else
3707 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3708 }
3709 else
3710 switch (TREE_CODE_CLASS (code))
3711 {
3712 case tcc_constant:
3713 break;
3714
3715 case tcc_declaration:
3716 /* Variables allocated to static storage can stay. */
3717 if (!TREE_STATIC (exp))
3718 push_without_duplicates (exp, refs);
3719 break;
3720
3721 case tcc_expression:
3722 /* This is the pattern built in ada/make_aligning_type. */
3723 if (code == ADDR_EXPR
3724 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3725 {
3726 push_without_duplicates (exp, refs);
3727 break;
3728 }
3729
3730 /* Fall through. */
3731
3732 case tcc_exceptional:
3733 case tcc_unary:
3734 case tcc_binary:
3735 case tcc_comparison:
3736 case tcc_reference:
3737 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3738 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3739 break;
3740
3741 case tcc_vl_exp:
3742 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3743 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3744 break;
3745
3746 default:
3747 gcc_unreachable ();
3748 }
3749 }
3750
3751 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3752 return a tree with all occurrences of references to F in a
3753 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3754 CONST_DECLs. Note that we assume here that EXP contains only
3755 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3756 occurring only in their argument list. */
3757
3758 tree
3759 substitute_in_expr (tree exp, tree f, tree r)
3760 {
3761 enum tree_code code = TREE_CODE (exp);
3762 tree op0, op1, op2, op3;
3763 tree new_tree;
3764
3765 /* We handle TREE_LIST and COMPONENT_REF separately. */
3766 if (code == TREE_LIST)
3767 {
3768 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3769 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3770 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3771 return exp;
3772
3773 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3774 }
3775 else if (code == COMPONENT_REF)
3776 {
3777 tree inner;
3778
3779 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3780 and it is the right field, replace it with R. */
3781 for (inner = TREE_OPERAND (exp, 0);
3782 REFERENCE_CLASS_P (inner);
3783 inner = TREE_OPERAND (inner, 0))
3784 ;
3785
3786 /* The field. */
3787 op1 = TREE_OPERAND (exp, 1);
3788
3789 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3790 return r;
3791
3792 /* If this expression hasn't been completed let, leave it alone. */
3793 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3794 return exp;
3795
3796 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3797 if (op0 == TREE_OPERAND (exp, 0))
3798 return exp;
3799
3800 new_tree
3801 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3802 }
3803 else
3804 switch (TREE_CODE_CLASS (code))
3805 {
3806 case tcc_constant:
3807 return exp;
3808
3809 case tcc_declaration:
3810 if (exp == f)
3811 return r;
3812 else
3813 return exp;
3814
3815 case tcc_expression:
3816 if (exp == f)
3817 return r;
3818
3819 /* Fall through. */
3820
3821 case tcc_exceptional:
3822 case tcc_unary:
3823 case tcc_binary:
3824 case tcc_comparison:
3825 case tcc_reference:
3826 switch (TREE_CODE_LENGTH (code))
3827 {
3828 case 0:
3829 return exp;
3830
3831 case 1:
3832 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3833 if (op0 == TREE_OPERAND (exp, 0))
3834 return exp;
3835
3836 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3837 break;
3838
3839 case 2:
3840 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3841 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3842
3843 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3844 return exp;
3845
3846 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3847 break;
3848
3849 case 3:
3850 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3851 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3852 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3853
3854 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3855 && op2 == TREE_OPERAND (exp, 2))
3856 return exp;
3857
3858 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3859 break;
3860
3861 case 4:
3862 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3863 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3864 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3865 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3866
3867 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3868 && op2 == TREE_OPERAND (exp, 2)
3869 && op3 == TREE_OPERAND (exp, 3))
3870 return exp;
3871
3872 new_tree
3873 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3874 break;
3875
3876 default:
3877 gcc_unreachable ();
3878 }
3879 break;
3880
3881 case tcc_vl_exp:
3882 {
3883 int i;
3884
3885 new_tree = NULL_TREE;
3886
3887 /* If we are trying to replace F with a constant or with another
3888 instance of one of the arguments of the call, inline back
3889 functions which do nothing else than computing a value from
3890 the arguments they are passed. This makes it possible to
3891 fold partially or entirely the replacement expression. */
3892 if (code == CALL_EXPR)
3893 {
3894 bool maybe_inline = false;
3895 if (CONSTANT_CLASS_P (r))
3896 maybe_inline = true;
3897 else
3898 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
3899 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
3900 {
3901 maybe_inline = true;
3902 break;
3903 }
3904 if (maybe_inline)
3905 {
3906 tree t = maybe_inline_call_in_expr (exp);
3907 if (t)
3908 return SUBSTITUTE_IN_EXPR (t, f, r);
3909 }
3910 }
3911
3912 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3913 {
3914 tree op = TREE_OPERAND (exp, i);
3915 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3916 if (new_op != op)
3917 {
3918 if (!new_tree)
3919 new_tree = copy_node (exp);
3920 TREE_OPERAND (new_tree, i) = new_op;
3921 }
3922 }
3923
3924 if (new_tree)
3925 {
3926 new_tree = fold (new_tree);
3927 if (TREE_CODE (new_tree) == CALL_EXPR)
3928 process_call_operands (new_tree);
3929 }
3930 else
3931 return exp;
3932 }
3933 break;
3934
3935 default:
3936 gcc_unreachable ();
3937 }
3938
3939 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3940
3941 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3942 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3943
3944 return new_tree;
3945 }
3946
3947 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3948 for it within OBJ, a tree that is an object or a chain of references. */
3949
3950 tree
3951 substitute_placeholder_in_expr (tree exp, tree obj)
3952 {
3953 enum tree_code code = TREE_CODE (exp);
3954 tree op0, op1, op2, op3;
3955 tree new_tree;
3956
3957 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3958 in the chain of OBJ. */
3959 if (code == PLACEHOLDER_EXPR)
3960 {
3961 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3962 tree elt;
3963
3964 for (elt = obj; elt != 0;
3965 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3966 || TREE_CODE (elt) == COND_EXPR)
3967 ? TREE_OPERAND (elt, 1)
3968 : (REFERENCE_CLASS_P (elt)
3969 || UNARY_CLASS_P (elt)
3970 || BINARY_CLASS_P (elt)
3971 || VL_EXP_CLASS_P (elt)
3972 || EXPRESSION_CLASS_P (elt))
3973 ? TREE_OPERAND (elt, 0) : 0))
3974 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3975 return elt;
3976
3977 for (elt = obj; elt != 0;
3978 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3979 || TREE_CODE (elt) == COND_EXPR)
3980 ? TREE_OPERAND (elt, 1)
3981 : (REFERENCE_CLASS_P (elt)
3982 || UNARY_CLASS_P (elt)
3983 || BINARY_CLASS_P (elt)
3984 || VL_EXP_CLASS_P (elt)
3985 || EXPRESSION_CLASS_P (elt))
3986 ? TREE_OPERAND (elt, 0) : 0))
3987 if (POINTER_TYPE_P (TREE_TYPE (elt))
3988 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3989 == need_type))
3990 return fold_build1 (INDIRECT_REF, need_type, elt);
3991
3992 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3993 survives until RTL generation, there will be an error. */
3994 return exp;
3995 }
3996
3997 /* TREE_LIST is special because we need to look at TREE_VALUE
3998 and TREE_CHAIN, not TREE_OPERANDS. */
3999 else if (code == TREE_LIST)
4000 {
4001 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4002 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4003 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4004 return exp;
4005
4006 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4007 }
4008 else
4009 switch (TREE_CODE_CLASS (code))
4010 {
4011 case tcc_constant:
4012 case tcc_declaration:
4013 return exp;
4014
4015 case tcc_exceptional:
4016 case tcc_unary:
4017 case tcc_binary:
4018 case tcc_comparison:
4019 case tcc_expression:
4020 case tcc_reference:
4021 case tcc_statement:
4022 switch (TREE_CODE_LENGTH (code))
4023 {
4024 case 0:
4025 return exp;
4026
4027 case 1:
4028 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4029 if (op0 == TREE_OPERAND (exp, 0))
4030 return exp;
4031
4032 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4033 break;
4034
4035 case 2:
4036 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4037 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4038
4039 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4040 return exp;
4041
4042 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4043 break;
4044
4045 case 3:
4046 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4047 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4048 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4049
4050 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4051 && op2 == TREE_OPERAND (exp, 2))
4052 return exp;
4053
4054 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4055 break;
4056
4057 case 4:
4058 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4059 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4060 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4061 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4062
4063 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4064 && op2 == TREE_OPERAND (exp, 2)
4065 && op3 == TREE_OPERAND (exp, 3))
4066 return exp;
4067
4068 new_tree
4069 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4070 break;
4071
4072 default:
4073 gcc_unreachable ();
4074 }
4075 break;
4076
4077 case tcc_vl_exp:
4078 {
4079 int i;
4080
4081 new_tree = NULL_TREE;
4082
4083 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4084 {
4085 tree op = TREE_OPERAND (exp, i);
4086 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4087 if (new_op != op)
4088 {
4089 if (!new_tree)
4090 new_tree = copy_node (exp);
4091 TREE_OPERAND (new_tree, i) = new_op;
4092 }
4093 }
4094
4095 if (new_tree)
4096 {
4097 new_tree = fold (new_tree);
4098 if (TREE_CODE (new_tree) == CALL_EXPR)
4099 process_call_operands (new_tree);
4100 }
4101 else
4102 return exp;
4103 }
4104 break;
4105
4106 default:
4107 gcc_unreachable ();
4108 }
4109
4110 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4111
4112 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4113 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4114
4115 return new_tree;
4116 }
4117 \f
4118
4119 /* Subroutine of stabilize_reference; this is called for subtrees of
4120 references. Any expression with side-effects must be put in a SAVE_EXPR
4121 to ensure that it is only evaluated once.
4122
4123 We don't put SAVE_EXPR nodes around everything, because assigning very
4124 simple expressions to temporaries causes us to miss good opportunities
4125 for optimizations. Among other things, the opportunity to fold in the
4126 addition of a constant into an addressing mode often gets lost, e.g.
4127 "y[i+1] += x;". In general, we take the approach that we should not make
4128 an assignment unless we are forced into it - i.e., that any non-side effect
4129 operator should be allowed, and that cse should take care of coalescing
4130 multiple utterances of the same expression should that prove fruitful. */
4131
4132 static tree
4133 stabilize_reference_1 (tree e)
4134 {
4135 tree result;
4136 enum tree_code code = TREE_CODE (e);
4137
4138 /* We cannot ignore const expressions because it might be a reference
4139 to a const array but whose index contains side-effects. But we can
4140 ignore things that are actual constant or that already have been
4141 handled by this function. */
4142
4143 if (tree_invariant_p (e))
4144 return e;
4145
4146 switch (TREE_CODE_CLASS (code))
4147 {
4148 case tcc_exceptional:
4149 case tcc_type:
4150 case tcc_declaration:
4151 case tcc_comparison:
4152 case tcc_statement:
4153 case tcc_expression:
4154 case tcc_reference:
4155 case tcc_vl_exp:
4156 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4157 so that it will only be evaluated once. */
4158 /* The reference (r) and comparison (<) classes could be handled as
4159 below, but it is generally faster to only evaluate them once. */
4160 if (TREE_SIDE_EFFECTS (e))
4161 return save_expr (e);
4162 return e;
4163
4164 case tcc_constant:
4165 /* Constants need no processing. In fact, we should never reach
4166 here. */
4167 return e;
4168
4169 case tcc_binary:
4170 /* Division is slow and tends to be compiled with jumps,
4171 especially the division by powers of 2 that is often
4172 found inside of an array reference. So do it just once. */
4173 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4174 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4175 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4176 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4177 return save_expr (e);
4178 /* Recursively stabilize each operand. */
4179 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4180 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4181 break;
4182
4183 case tcc_unary:
4184 /* Recursively stabilize each operand. */
4185 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4186 break;
4187
4188 default:
4189 gcc_unreachable ();
4190 }
4191
4192 TREE_TYPE (result) = TREE_TYPE (e);
4193 TREE_READONLY (result) = TREE_READONLY (e);
4194 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4195 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4196
4197 return result;
4198 }
4199
4200 /* Stabilize a reference so that we can use it any number of times
4201 without causing its operands to be evaluated more than once.
4202 Returns the stabilized reference. This works by means of save_expr,
4203 so see the caveats in the comments about save_expr.
4204
4205 Also allows conversion expressions whose operands are references.
4206 Any other kind of expression is returned unchanged. */
4207
4208 tree
4209 stabilize_reference (tree ref)
4210 {
4211 tree result;
4212 enum tree_code code = TREE_CODE (ref);
4213
4214 switch (code)
4215 {
4216 case VAR_DECL:
4217 case PARM_DECL:
4218 case RESULT_DECL:
4219 /* No action is needed in this case. */
4220 return ref;
4221
4222 CASE_CONVERT:
4223 case FLOAT_EXPR:
4224 case FIX_TRUNC_EXPR:
4225 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4226 break;
4227
4228 case INDIRECT_REF:
4229 result = build_nt (INDIRECT_REF,
4230 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4231 break;
4232
4233 case COMPONENT_REF:
4234 result = build_nt (COMPONENT_REF,
4235 stabilize_reference (TREE_OPERAND (ref, 0)),
4236 TREE_OPERAND (ref, 1), NULL_TREE);
4237 break;
4238
4239 case BIT_FIELD_REF:
4240 result = build_nt (BIT_FIELD_REF,
4241 stabilize_reference (TREE_OPERAND (ref, 0)),
4242 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4243 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4244 break;
4245
4246 case ARRAY_REF:
4247 result = build_nt (ARRAY_REF,
4248 stabilize_reference (TREE_OPERAND (ref, 0)),
4249 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4250 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4251 break;
4252
4253 case ARRAY_RANGE_REF:
4254 result = build_nt (ARRAY_RANGE_REF,
4255 stabilize_reference (TREE_OPERAND (ref, 0)),
4256 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4257 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4258 break;
4259
4260 case COMPOUND_EXPR:
4261 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4262 it wouldn't be ignored. This matters when dealing with
4263 volatiles. */
4264 return stabilize_reference_1 (ref);
4265
4266 /* If arg isn't a kind of lvalue we recognize, make no change.
4267 Caller should recognize the error for an invalid lvalue. */
4268 default:
4269 return ref;
4270
4271 case ERROR_MARK:
4272 return error_mark_node;
4273 }
4274
4275 TREE_TYPE (result) = TREE_TYPE (ref);
4276 TREE_READONLY (result) = TREE_READONLY (ref);
4277 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4278 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4279
4280 return result;
4281 }
4282 \f
4283 /* Low-level constructors for expressions. */
4284
4285 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4286 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4287
4288 void
4289 recompute_tree_invariant_for_addr_expr (tree t)
4290 {
4291 tree node;
4292 bool tc = true, se = false;
4293
4294 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4295
4296 /* We started out assuming this address is both invariant and constant, but
4297 does not have side effects. Now go down any handled components and see if
4298 any of them involve offsets that are either non-constant or non-invariant.
4299 Also check for side-effects.
4300
4301 ??? Note that this code makes no attempt to deal with the case where
4302 taking the address of something causes a copy due to misalignment. */
4303
4304 #define UPDATE_FLAGS(NODE) \
4305 do { tree _node = (NODE); \
4306 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4307 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4308
4309 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4310 node = TREE_OPERAND (node, 0))
4311 {
4312 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4313 array reference (probably made temporarily by the G++ front end),
4314 so ignore all the operands. */
4315 if ((TREE_CODE (node) == ARRAY_REF
4316 || TREE_CODE (node) == ARRAY_RANGE_REF)
4317 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4318 {
4319 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4320 if (TREE_OPERAND (node, 2))
4321 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4322 if (TREE_OPERAND (node, 3))
4323 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4324 }
4325 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4326 FIELD_DECL, apparently. The G++ front end can put something else
4327 there, at least temporarily. */
4328 else if (TREE_CODE (node) == COMPONENT_REF
4329 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4330 {
4331 if (TREE_OPERAND (node, 2))
4332 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4333 }
4334 }
4335
4336 node = lang_hooks.expr_to_decl (node, &tc, &se);
4337
4338 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4339 the address, since &(*a)->b is a form of addition. If it's a constant, the
4340 address is constant too. If it's a decl, its address is constant if the
4341 decl is static. Everything else is not constant and, furthermore,
4342 taking the address of a volatile variable is not volatile. */
4343 if (TREE_CODE (node) == INDIRECT_REF
4344 || TREE_CODE (node) == MEM_REF)
4345 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4346 else if (CONSTANT_CLASS_P (node))
4347 ;
4348 else if (DECL_P (node))
4349 tc &= (staticp (node) != NULL_TREE);
4350 else
4351 {
4352 tc = false;
4353 se |= TREE_SIDE_EFFECTS (node);
4354 }
4355
4356
4357 TREE_CONSTANT (t) = tc;
4358 TREE_SIDE_EFFECTS (t) = se;
4359 #undef UPDATE_FLAGS
4360 }
4361
4362 /* Build an expression of code CODE, data type TYPE, and operands as
4363 specified. Expressions and reference nodes can be created this way.
4364 Constants, decls, types and misc nodes cannot be.
4365
4366 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4367 enough for all extant tree codes. */
4368
4369 tree
4370 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4371 {
4372 tree t;
4373
4374 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4375
4376 t = make_node (code PASS_MEM_STAT);
4377 TREE_TYPE (t) = tt;
4378
4379 return t;
4380 }
4381
4382 tree
4383 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4384 {
4385 int length = sizeof (struct tree_exp);
4386 tree t;
4387
4388 record_node_allocation_statistics (code, length);
4389
4390 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4391
4392 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4393
4394 memset (t, 0, sizeof (struct tree_common));
4395
4396 TREE_SET_CODE (t, code);
4397
4398 TREE_TYPE (t) = type;
4399 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4400 TREE_OPERAND (t, 0) = node;
4401 if (node && !TYPE_P (node))
4402 {
4403 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4404 TREE_READONLY (t) = TREE_READONLY (node);
4405 }
4406
4407 if (TREE_CODE_CLASS (code) == tcc_statement)
4408 TREE_SIDE_EFFECTS (t) = 1;
4409 else switch (code)
4410 {
4411 case VA_ARG_EXPR:
4412 /* All of these have side-effects, no matter what their
4413 operands are. */
4414 TREE_SIDE_EFFECTS (t) = 1;
4415 TREE_READONLY (t) = 0;
4416 break;
4417
4418 case INDIRECT_REF:
4419 /* Whether a dereference is readonly has nothing to do with whether
4420 its operand is readonly. */
4421 TREE_READONLY (t) = 0;
4422 break;
4423
4424 case ADDR_EXPR:
4425 if (node)
4426 recompute_tree_invariant_for_addr_expr (t);
4427 break;
4428
4429 default:
4430 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4431 && node && !TYPE_P (node)
4432 && TREE_CONSTANT (node))
4433 TREE_CONSTANT (t) = 1;
4434 if (TREE_CODE_CLASS (code) == tcc_reference
4435 && node && TREE_THIS_VOLATILE (node))
4436 TREE_THIS_VOLATILE (t) = 1;
4437 break;
4438 }
4439
4440 return t;
4441 }
4442
4443 #define PROCESS_ARG(N) \
4444 do { \
4445 TREE_OPERAND (t, N) = arg##N; \
4446 if (arg##N &&!TYPE_P (arg##N)) \
4447 { \
4448 if (TREE_SIDE_EFFECTS (arg##N)) \
4449 side_effects = 1; \
4450 if (!TREE_READONLY (arg##N) \
4451 && !CONSTANT_CLASS_P (arg##N)) \
4452 (void) (read_only = 0); \
4453 if (!TREE_CONSTANT (arg##N)) \
4454 (void) (constant = 0); \
4455 } \
4456 } while (0)
4457
4458 tree
4459 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4460 {
4461 bool constant, read_only, side_effects, div_by_zero;
4462 tree t;
4463
4464 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4465
4466 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4467 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4468 /* When sizetype precision doesn't match that of pointers
4469 we need to be able to build explicit extensions or truncations
4470 of the offset argument. */
4471 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4472 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4473 && TREE_CODE (arg1) == INTEGER_CST);
4474
4475 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4476 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4477 && ptrofftype_p (TREE_TYPE (arg1)));
4478
4479 t = make_node (code PASS_MEM_STAT);
4480 TREE_TYPE (t) = tt;
4481
4482 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4483 result based on those same flags for the arguments. But if the
4484 arguments aren't really even `tree' expressions, we shouldn't be trying
4485 to do this. */
4486
4487 /* Expressions without side effects may be constant if their
4488 arguments are as well. */
4489 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4490 || TREE_CODE_CLASS (code) == tcc_binary);
4491 read_only = 1;
4492 side_effects = TREE_SIDE_EFFECTS (t);
4493
4494 switch (code)
4495 {
4496 case TRUNC_DIV_EXPR:
4497 case CEIL_DIV_EXPR:
4498 case FLOOR_DIV_EXPR:
4499 case ROUND_DIV_EXPR:
4500 case EXACT_DIV_EXPR:
4501 case CEIL_MOD_EXPR:
4502 case FLOOR_MOD_EXPR:
4503 case ROUND_MOD_EXPR:
4504 case TRUNC_MOD_EXPR:
4505 div_by_zero = integer_zerop (arg1);
4506 break;
4507 default:
4508 div_by_zero = false;
4509 }
4510
4511 PROCESS_ARG (0);
4512 PROCESS_ARG (1);
4513
4514 TREE_SIDE_EFFECTS (t) = side_effects;
4515 if (code == MEM_REF)
4516 {
4517 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4518 {
4519 tree o = TREE_OPERAND (arg0, 0);
4520 TREE_READONLY (t) = TREE_READONLY (o);
4521 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4522 }
4523 }
4524 else
4525 {
4526 TREE_READONLY (t) = read_only;
4527 /* Don't mark X / 0 as constant. */
4528 TREE_CONSTANT (t) = constant && !div_by_zero;
4529 TREE_THIS_VOLATILE (t)
4530 = (TREE_CODE_CLASS (code) == tcc_reference
4531 && arg0 && TREE_THIS_VOLATILE (arg0));
4532 }
4533
4534 return t;
4535 }
4536
4537
4538 tree
4539 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4540 tree arg2 MEM_STAT_DECL)
4541 {
4542 bool constant, read_only, side_effects;
4543 tree t;
4544
4545 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4546 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4547
4548 t = make_node (code PASS_MEM_STAT);
4549 TREE_TYPE (t) = tt;
4550
4551 read_only = 1;
4552
4553 /* As a special exception, if COND_EXPR has NULL branches, we
4554 assume that it is a gimple statement and always consider
4555 it to have side effects. */
4556 if (code == COND_EXPR
4557 && tt == void_type_node
4558 && arg1 == NULL_TREE
4559 && arg2 == NULL_TREE)
4560 side_effects = true;
4561 else
4562 side_effects = TREE_SIDE_EFFECTS (t);
4563
4564 PROCESS_ARG (0);
4565 PROCESS_ARG (1);
4566 PROCESS_ARG (2);
4567
4568 if (code == COND_EXPR)
4569 TREE_READONLY (t) = read_only;
4570
4571 TREE_SIDE_EFFECTS (t) = side_effects;
4572 TREE_THIS_VOLATILE (t)
4573 = (TREE_CODE_CLASS (code) == tcc_reference
4574 && arg0 && TREE_THIS_VOLATILE (arg0));
4575
4576 return t;
4577 }
4578
4579 tree
4580 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4581 tree arg2, tree arg3 MEM_STAT_DECL)
4582 {
4583 bool constant, read_only, side_effects;
4584 tree t;
4585
4586 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4587
4588 t = make_node (code PASS_MEM_STAT);
4589 TREE_TYPE (t) = tt;
4590
4591 side_effects = TREE_SIDE_EFFECTS (t);
4592
4593 PROCESS_ARG (0);
4594 PROCESS_ARG (1);
4595 PROCESS_ARG (2);
4596 PROCESS_ARG (3);
4597
4598 TREE_SIDE_EFFECTS (t) = side_effects;
4599 TREE_THIS_VOLATILE (t)
4600 = (TREE_CODE_CLASS (code) == tcc_reference
4601 && arg0 && TREE_THIS_VOLATILE (arg0));
4602
4603 return t;
4604 }
4605
4606 tree
4607 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4608 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4609 {
4610 bool constant, read_only, side_effects;
4611 tree t;
4612
4613 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4614
4615 t = make_node (code PASS_MEM_STAT);
4616 TREE_TYPE (t) = tt;
4617
4618 side_effects = TREE_SIDE_EFFECTS (t);
4619
4620 PROCESS_ARG (0);
4621 PROCESS_ARG (1);
4622 PROCESS_ARG (2);
4623 PROCESS_ARG (3);
4624 PROCESS_ARG (4);
4625
4626 TREE_SIDE_EFFECTS (t) = side_effects;
4627 if (code == TARGET_MEM_REF)
4628 {
4629 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4630 {
4631 tree o = TREE_OPERAND (arg0, 0);
4632 TREE_READONLY (t) = TREE_READONLY (o);
4633 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4634 }
4635 }
4636 else
4637 TREE_THIS_VOLATILE (t)
4638 = (TREE_CODE_CLASS (code) == tcc_reference
4639 && arg0 && TREE_THIS_VOLATILE (arg0));
4640
4641 return t;
4642 }
4643
4644 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4645 on the pointer PTR. */
4646
4647 tree
4648 build_simple_mem_ref_loc (location_t loc, tree ptr)
4649 {
4650 HOST_WIDE_INT offset = 0;
4651 tree ptype = TREE_TYPE (ptr);
4652 tree tem;
4653 /* For convenience allow addresses that collapse to a simple base
4654 and offset. */
4655 if (TREE_CODE (ptr) == ADDR_EXPR
4656 && (handled_component_p (TREE_OPERAND (ptr, 0))
4657 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4658 {
4659 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4660 gcc_assert (ptr);
4661 ptr = build_fold_addr_expr (ptr);
4662 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4663 }
4664 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4665 ptr, build_int_cst (ptype, offset));
4666 SET_EXPR_LOCATION (tem, loc);
4667 return tem;
4668 }
4669
4670 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4671
4672 offset_int
4673 mem_ref_offset (const_tree t)
4674 {
4675 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4676 }
4677
4678 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4679 offsetted by OFFSET units. */
4680
4681 tree
4682 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4683 {
4684 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4685 build_fold_addr_expr (base),
4686 build_int_cst (ptr_type_node, offset));
4687 tree addr = build1 (ADDR_EXPR, type, ref);
4688 recompute_tree_invariant_for_addr_expr (addr);
4689 return addr;
4690 }
4691
4692 /* Similar except don't specify the TREE_TYPE
4693 and leave the TREE_SIDE_EFFECTS as 0.
4694 It is permissible for arguments to be null,
4695 or even garbage if their values do not matter. */
4696
4697 tree
4698 build_nt (enum tree_code code, ...)
4699 {
4700 tree t;
4701 int length;
4702 int i;
4703 va_list p;
4704
4705 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4706
4707 va_start (p, code);
4708
4709 t = make_node (code);
4710 length = TREE_CODE_LENGTH (code);
4711
4712 for (i = 0; i < length; i++)
4713 TREE_OPERAND (t, i) = va_arg (p, tree);
4714
4715 va_end (p);
4716 return t;
4717 }
4718
4719 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4720 tree vec. */
4721
4722 tree
4723 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4724 {
4725 tree ret, t;
4726 unsigned int ix;
4727
4728 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4729 CALL_EXPR_FN (ret) = fn;
4730 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4731 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4732 CALL_EXPR_ARG (ret, ix) = t;
4733 return ret;
4734 }
4735 \f
4736 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4737 We do NOT enter this node in any sort of symbol table.
4738
4739 LOC is the location of the decl.
4740
4741 layout_decl is used to set up the decl's storage layout.
4742 Other slots are initialized to 0 or null pointers. */
4743
4744 tree
4745 build_decl (location_t loc, enum tree_code code, tree name,
4746 tree type MEM_STAT_DECL)
4747 {
4748 tree t;
4749
4750 t = make_node (code PASS_MEM_STAT);
4751 DECL_SOURCE_LOCATION (t) = loc;
4752
4753 /* if (type == error_mark_node)
4754 type = integer_type_node; */
4755 /* That is not done, deliberately, so that having error_mark_node
4756 as the type can suppress useless errors in the use of this variable. */
4757
4758 DECL_NAME (t) = name;
4759 TREE_TYPE (t) = type;
4760
4761 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4762 layout_decl (t, 0);
4763
4764 return t;
4765 }
4766
4767 /* Builds and returns function declaration with NAME and TYPE. */
4768
4769 tree
4770 build_fn_decl (const char *name, tree type)
4771 {
4772 tree id = get_identifier (name);
4773 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4774
4775 DECL_EXTERNAL (decl) = 1;
4776 TREE_PUBLIC (decl) = 1;
4777 DECL_ARTIFICIAL (decl) = 1;
4778 TREE_NOTHROW (decl) = 1;
4779
4780 return decl;
4781 }
4782
4783 vec<tree, va_gc> *all_translation_units;
4784
4785 /* Builds a new translation-unit decl with name NAME, queues it in the
4786 global list of translation-unit decls and returns it. */
4787
4788 tree
4789 build_translation_unit_decl (tree name)
4790 {
4791 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4792 name, NULL_TREE);
4793 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4794 vec_safe_push (all_translation_units, tu);
4795 return tu;
4796 }
4797
4798 \f
4799 /* BLOCK nodes are used to represent the structure of binding contours
4800 and declarations, once those contours have been exited and their contents
4801 compiled. This information is used for outputting debugging info. */
4802
4803 tree
4804 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4805 {
4806 tree block = make_node (BLOCK);
4807
4808 BLOCK_VARS (block) = vars;
4809 BLOCK_SUBBLOCKS (block) = subblocks;
4810 BLOCK_SUPERCONTEXT (block) = supercontext;
4811 BLOCK_CHAIN (block) = chain;
4812 return block;
4813 }
4814
4815 \f
4816 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4817
4818 LOC is the location to use in tree T. */
4819
4820 void
4821 protected_set_expr_location (tree t, location_t loc)
4822 {
4823 if (CAN_HAVE_LOCATION_P (t))
4824 SET_EXPR_LOCATION (t, loc);
4825 }
4826 \f
4827 /* Reset the expression *EXPR_P, a size or position.
4828
4829 ??? We could reset all non-constant sizes or positions. But it's cheap
4830 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4831
4832 We need to reset self-referential sizes or positions because they cannot
4833 be gimplified and thus can contain a CALL_EXPR after the gimplification
4834 is finished, which will run afoul of LTO streaming. And they need to be
4835 reset to something essentially dummy but not constant, so as to preserve
4836 the properties of the object they are attached to. */
4837
4838 static inline void
4839 free_lang_data_in_one_sizepos (tree *expr_p)
4840 {
4841 tree expr = *expr_p;
4842 if (CONTAINS_PLACEHOLDER_P (expr))
4843 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4844 }
4845
4846
4847 /* Reset all the fields in a binfo node BINFO. We only keep
4848 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4849
4850 static void
4851 free_lang_data_in_binfo (tree binfo)
4852 {
4853 unsigned i;
4854 tree t;
4855
4856 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4857
4858 BINFO_VIRTUALS (binfo) = NULL_TREE;
4859 BINFO_BASE_ACCESSES (binfo) = NULL;
4860 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4861 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4862
4863 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4864 free_lang_data_in_binfo (t);
4865 }
4866
4867
4868 /* Reset all language specific information still present in TYPE. */
4869
4870 static void
4871 free_lang_data_in_type (tree type)
4872 {
4873 gcc_assert (TYPE_P (type));
4874
4875 /* Give the FE a chance to remove its own data first. */
4876 lang_hooks.free_lang_data (type);
4877
4878 TREE_LANG_FLAG_0 (type) = 0;
4879 TREE_LANG_FLAG_1 (type) = 0;
4880 TREE_LANG_FLAG_2 (type) = 0;
4881 TREE_LANG_FLAG_3 (type) = 0;
4882 TREE_LANG_FLAG_4 (type) = 0;
4883 TREE_LANG_FLAG_5 (type) = 0;
4884 TREE_LANG_FLAG_6 (type) = 0;
4885
4886 if (TREE_CODE (type) == FUNCTION_TYPE)
4887 {
4888 /* Remove the const and volatile qualifiers from arguments. The
4889 C++ front end removes them, but the C front end does not,
4890 leading to false ODR violation errors when merging two
4891 instances of the same function signature compiled by
4892 different front ends. */
4893 tree p;
4894
4895 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4896 {
4897 tree arg_type = TREE_VALUE (p);
4898
4899 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4900 {
4901 int quals = TYPE_QUALS (arg_type)
4902 & ~TYPE_QUAL_CONST
4903 & ~TYPE_QUAL_VOLATILE;
4904 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4905 free_lang_data_in_type (TREE_VALUE (p));
4906 }
4907 /* C++ FE uses TREE_PURPOSE to store initial values. */
4908 TREE_PURPOSE (p) = NULL;
4909 }
4910 }
4911 if (TREE_CODE (type) == METHOD_TYPE)
4912 {
4913 tree p;
4914
4915 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4916 {
4917 /* C++ FE uses TREE_PURPOSE to store initial values. */
4918 TREE_PURPOSE (p) = NULL;
4919 }
4920 }
4921
4922 /* Remove members that are not actually FIELD_DECLs from the field
4923 list of an aggregate. These occur in C++. */
4924 if (RECORD_OR_UNION_TYPE_P (type))
4925 {
4926 tree prev, member;
4927
4928 /* Note that TYPE_FIELDS can be shared across distinct
4929 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4930 to be removed, we cannot set its TREE_CHAIN to NULL.
4931 Otherwise, we would not be able to find all the other fields
4932 in the other instances of this TREE_TYPE.
4933
4934 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4935 prev = NULL_TREE;
4936 member = TYPE_FIELDS (type);
4937 while (member)
4938 {
4939 if (TREE_CODE (member) == FIELD_DECL
4940 || (TREE_CODE (member) == TYPE_DECL
4941 && !DECL_IGNORED_P (member)
4942 && debug_info_level > DINFO_LEVEL_TERSE
4943 && !is_redundant_typedef (member)))
4944 {
4945 if (prev)
4946 TREE_CHAIN (prev) = member;
4947 else
4948 TYPE_FIELDS (type) = member;
4949 prev = member;
4950 }
4951
4952 member = TREE_CHAIN (member);
4953 }
4954
4955 if (prev)
4956 TREE_CHAIN (prev) = NULL_TREE;
4957 else
4958 TYPE_FIELDS (type) = NULL_TREE;
4959
4960 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
4961 and danagle the pointer from time to time. */
4962 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
4963 TYPE_VFIELD (type) = NULL_TREE;
4964
4965 /* Splice out FUNCTION_DECLS and TEMPLATE_DECLS from
4966 TYPE_FIELDS. So LTO doesn't grow. */
4967 for (tree probe, *prev= &TYPE_FIELDS (type); (probe = *prev); )
4968 if (TREE_CODE (probe) == FUNCTION_DECL
4969 || TREE_CODE (probe) == TEMPLATE_DECL)
4970 *prev = probe;
4971 else
4972 prev = &DECL_CHAIN (probe);
4973
4974 if (TYPE_BINFO (type))
4975 {
4976 free_lang_data_in_binfo (TYPE_BINFO (type));
4977 /* We need to preserve link to bases and virtual table for all
4978 polymorphic types to make devirtualization machinery working.
4979 Debug output cares only about bases, but output also
4980 virtual table pointers so merging of -fdevirtualize and
4981 -fno-devirtualize units is easier. */
4982 if ((!BINFO_VTABLE (TYPE_BINFO (type))
4983 || !flag_devirtualize)
4984 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
4985 && !BINFO_VTABLE (TYPE_BINFO (type)))
4986 || debug_info_level != DINFO_LEVEL_NONE))
4987 TYPE_BINFO (type) = NULL;
4988 }
4989 }
4990 else
4991 {
4992 /* For non-aggregate types, clear out the language slot (which
4993 overloads TYPE_BINFO). */
4994 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4995
4996 if (INTEGRAL_TYPE_P (type)
4997 || SCALAR_FLOAT_TYPE_P (type)
4998 || FIXED_POINT_TYPE_P (type))
4999 {
5000 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5001 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5002 }
5003 }
5004
5005 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5006 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5007
5008 if (TYPE_CONTEXT (type)
5009 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5010 {
5011 tree ctx = TYPE_CONTEXT (type);
5012 do
5013 {
5014 ctx = BLOCK_SUPERCONTEXT (ctx);
5015 }
5016 while (ctx && TREE_CODE (ctx) == BLOCK);
5017 TYPE_CONTEXT (type) = ctx;
5018 }
5019 }
5020
5021
5022 /* Return true if DECL may need an assembler name to be set. */
5023
5024 static inline bool
5025 need_assembler_name_p (tree decl)
5026 {
5027 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5028 Rule merging. This makes type_odr_p to return true on those types during
5029 LTO and by comparing the mangled name, we can say what types are intended
5030 to be equivalent across compilation unit.
5031
5032 We do not store names of type_in_anonymous_namespace_p.
5033
5034 Record, union and enumeration type have linkage that allows use
5035 to check type_in_anonymous_namespace_p. We do not mangle compound types
5036 that always can be compared structurally.
5037
5038 Similarly for builtin types, we compare properties of their main variant.
5039 A special case are integer types where mangling do make differences
5040 between char/signed char/unsigned char etc. Storing name for these makes
5041 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5042 See cp/mangle.c:write_builtin_type for details. */
5043
5044 if (flag_lto_odr_type_mering
5045 && TREE_CODE (decl) == TYPE_DECL
5046 && DECL_NAME (decl)
5047 && decl == TYPE_NAME (TREE_TYPE (decl))
5048 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5049 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5050 && (type_with_linkage_p (TREE_TYPE (decl))
5051 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5052 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5053 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5054 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5055 if (!VAR_OR_FUNCTION_DECL_P (decl))
5056 return false;
5057
5058 /* If DECL already has its assembler name set, it does not need a
5059 new one. */
5060 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5061 || DECL_ASSEMBLER_NAME_SET_P (decl))
5062 return false;
5063
5064 /* Abstract decls do not need an assembler name. */
5065 if (DECL_ABSTRACT_P (decl))
5066 return false;
5067
5068 /* For VAR_DECLs, only static, public and external symbols need an
5069 assembler name. */
5070 if (VAR_P (decl)
5071 && !TREE_STATIC (decl)
5072 && !TREE_PUBLIC (decl)
5073 && !DECL_EXTERNAL (decl))
5074 return false;
5075
5076 if (TREE_CODE (decl) == FUNCTION_DECL)
5077 {
5078 /* Do not set assembler name on builtins. Allow RTL expansion to
5079 decide whether to expand inline or via a regular call. */
5080 if (DECL_BUILT_IN (decl)
5081 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5082 return false;
5083
5084 /* Functions represented in the callgraph need an assembler name. */
5085 if (cgraph_node::get (decl) != NULL)
5086 return true;
5087
5088 /* Unused and not public functions don't need an assembler name. */
5089 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5090 return false;
5091 }
5092
5093 return true;
5094 }
5095
5096
5097 /* Reset all language specific information still present in symbol
5098 DECL. */
5099
5100 static void
5101 free_lang_data_in_decl (tree decl)
5102 {
5103 gcc_assert (DECL_P (decl));
5104
5105 /* Give the FE a chance to remove its own data first. */
5106 lang_hooks.free_lang_data (decl);
5107
5108 TREE_LANG_FLAG_0 (decl) = 0;
5109 TREE_LANG_FLAG_1 (decl) = 0;
5110 TREE_LANG_FLAG_2 (decl) = 0;
5111 TREE_LANG_FLAG_3 (decl) = 0;
5112 TREE_LANG_FLAG_4 (decl) = 0;
5113 TREE_LANG_FLAG_5 (decl) = 0;
5114 TREE_LANG_FLAG_6 (decl) = 0;
5115
5116 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5117 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5118 if (TREE_CODE (decl) == FIELD_DECL)
5119 {
5120 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5121 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5122 DECL_QUALIFIER (decl) = NULL_TREE;
5123 }
5124
5125 if (TREE_CODE (decl) == FUNCTION_DECL)
5126 {
5127 struct cgraph_node *node;
5128 if (!(node = cgraph_node::get (decl))
5129 || (!node->definition && !node->clones))
5130 {
5131 if (node)
5132 node->release_body ();
5133 else
5134 {
5135 release_function_body (decl);
5136 DECL_ARGUMENTS (decl) = NULL;
5137 DECL_RESULT (decl) = NULL;
5138 DECL_INITIAL (decl) = error_mark_node;
5139 }
5140 }
5141 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5142 {
5143 tree t;
5144
5145 /* If DECL has a gimple body, then the context for its
5146 arguments must be DECL. Otherwise, it doesn't really
5147 matter, as we will not be emitting any code for DECL. In
5148 general, there may be other instances of DECL created by
5149 the front end and since PARM_DECLs are generally shared,
5150 their DECL_CONTEXT changes as the replicas of DECL are
5151 created. The only time where DECL_CONTEXT is important
5152 is for the FUNCTION_DECLs that have a gimple body (since
5153 the PARM_DECL will be used in the function's body). */
5154 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5155 DECL_CONTEXT (t) = decl;
5156 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5157 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5158 = target_option_default_node;
5159 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5160 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5161 = optimization_default_node;
5162 }
5163
5164 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5165 At this point, it is not needed anymore. */
5166 DECL_SAVED_TREE (decl) = NULL_TREE;
5167
5168 /* Clear the abstract origin if it refers to a method.
5169 Otherwise dwarf2out.c will ICE as we splice functions out of
5170 TYPE_FIELDS and thus the origin will not be output
5171 correctly. */
5172 if (DECL_ABSTRACT_ORIGIN (decl)
5173 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5174 && RECORD_OR_UNION_TYPE_P
5175 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5176 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5177
5178 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5179 DECL_VINDEX referring to itself into a vtable slot number as it
5180 should. Happens with functions that are copied and then forgotten
5181 about. Just clear it, it won't matter anymore. */
5182 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5183 DECL_VINDEX (decl) = NULL_TREE;
5184 }
5185 else if (VAR_P (decl))
5186 {
5187 if ((DECL_EXTERNAL (decl)
5188 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5189 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5190 DECL_INITIAL (decl) = NULL_TREE;
5191 }
5192 else if (TREE_CODE (decl) == TYPE_DECL)
5193 {
5194 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5195 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5196 DECL_INITIAL (decl) = NULL_TREE;
5197 }
5198 else if (TREE_CODE (decl) == FIELD_DECL)
5199 DECL_INITIAL (decl) = NULL_TREE;
5200 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5201 && DECL_INITIAL (decl)
5202 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5203 {
5204 /* Strip builtins from the translation-unit BLOCK. We still have targets
5205 without builtin_decl_explicit support and also builtins are shared
5206 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5207 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5208 while (*nextp)
5209 {
5210 tree var = *nextp;
5211 if (TREE_CODE (var) == FUNCTION_DECL
5212 && DECL_BUILT_IN (var))
5213 *nextp = TREE_CHAIN (var);
5214 else
5215 nextp = &TREE_CHAIN (var);
5216 }
5217 }
5218 }
5219
5220
5221 /* Data used when collecting DECLs and TYPEs for language data removal. */
5222
5223 struct free_lang_data_d
5224 {
5225 free_lang_data_d () : decls (100), types (100) {}
5226
5227 /* Worklist to avoid excessive recursion. */
5228 auto_vec<tree> worklist;
5229
5230 /* Set of traversed objects. Used to avoid duplicate visits. */
5231 hash_set<tree> pset;
5232
5233 /* Array of symbols to process with free_lang_data_in_decl. */
5234 auto_vec<tree> decls;
5235
5236 /* Array of types to process with free_lang_data_in_type. */
5237 auto_vec<tree> types;
5238 };
5239
5240
5241 /* Save all language fields needed to generate proper debug information
5242 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5243
5244 static void
5245 save_debug_info_for_decl (tree t)
5246 {
5247 /*struct saved_debug_info_d *sdi;*/
5248
5249 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5250
5251 /* FIXME. Partial implementation for saving debug info removed. */
5252 }
5253
5254
5255 /* Save all language fields needed to generate proper debug information
5256 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5257
5258 static void
5259 save_debug_info_for_type (tree t)
5260 {
5261 /*struct saved_debug_info_d *sdi;*/
5262
5263 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5264
5265 /* FIXME. Partial implementation for saving debug info removed. */
5266 }
5267
5268
5269 /* Add type or decl T to one of the list of tree nodes that need their
5270 language data removed. The lists are held inside FLD. */
5271
5272 static void
5273 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5274 {
5275 if (DECL_P (t))
5276 {
5277 fld->decls.safe_push (t);
5278 if (debug_info_level > DINFO_LEVEL_TERSE)
5279 save_debug_info_for_decl (t);
5280 }
5281 else if (TYPE_P (t))
5282 {
5283 fld->types.safe_push (t);
5284 if (debug_info_level > DINFO_LEVEL_TERSE)
5285 save_debug_info_for_type (t);
5286 }
5287 else
5288 gcc_unreachable ();
5289 }
5290
5291 /* Push tree node T into FLD->WORKLIST. */
5292
5293 static inline void
5294 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5295 {
5296 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5297 fld->worklist.safe_push ((t));
5298 }
5299
5300
5301 /* Operand callback helper for free_lang_data_in_node. *TP is the
5302 subtree operand being considered. */
5303
5304 static tree
5305 find_decls_types_r (tree *tp, int *ws, void *data)
5306 {
5307 tree t = *tp;
5308 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5309
5310 if (TREE_CODE (t) == TREE_LIST)
5311 return NULL_TREE;
5312
5313 /* Language specific nodes will be removed, so there is no need
5314 to gather anything under them. */
5315 if (is_lang_specific (t))
5316 {
5317 *ws = 0;
5318 return NULL_TREE;
5319 }
5320
5321 if (DECL_P (t))
5322 {
5323 /* Note that walk_tree does not traverse every possible field in
5324 decls, so we have to do our own traversals here. */
5325 add_tree_to_fld_list (t, fld);
5326
5327 fld_worklist_push (DECL_NAME (t), fld);
5328 fld_worklist_push (DECL_CONTEXT (t), fld);
5329 fld_worklist_push (DECL_SIZE (t), fld);
5330 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5331
5332 /* We are going to remove everything under DECL_INITIAL for
5333 TYPE_DECLs. No point walking them. */
5334 if (TREE_CODE (t) != TYPE_DECL)
5335 fld_worklist_push (DECL_INITIAL (t), fld);
5336
5337 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5338 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5339
5340 if (TREE_CODE (t) == FUNCTION_DECL)
5341 {
5342 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5343 fld_worklist_push (DECL_RESULT (t), fld);
5344 }
5345 else if (TREE_CODE (t) == TYPE_DECL)
5346 {
5347 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5348 }
5349 else if (TREE_CODE (t) == FIELD_DECL)
5350 {
5351 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5352 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5353 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5354 fld_worklist_push (DECL_FCONTEXT (t), fld);
5355 }
5356
5357 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5358 && DECL_HAS_VALUE_EXPR_P (t))
5359 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5360
5361 if (TREE_CODE (t) != FIELD_DECL
5362 && TREE_CODE (t) != TYPE_DECL)
5363 fld_worklist_push (TREE_CHAIN (t), fld);
5364 *ws = 0;
5365 }
5366 else if (TYPE_P (t))
5367 {
5368 /* Note that walk_tree does not traverse every possible field in
5369 types, so we have to do our own traversals here. */
5370 add_tree_to_fld_list (t, fld);
5371
5372 if (!RECORD_OR_UNION_TYPE_P (t))
5373 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5374 fld_worklist_push (TYPE_SIZE (t), fld);
5375 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5376 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5377 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5378 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5379 fld_worklist_push (TYPE_NAME (t), fld);
5380 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5381 them and thus do not and want not to reach unused pointer types
5382 this way. */
5383 if (!POINTER_TYPE_P (t))
5384 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5385 if (!RECORD_OR_UNION_TYPE_P (t))
5386 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5387 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5388 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5389 do not and want not to reach unused variants this way. */
5390 if (TYPE_CONTEXT (t))
5391 {
5392 tree ctx = TYPE_CONTEXT (t);
5393 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5394 So push that instead. */
5395 while (ctx && TREE_CODE (ctx) == BLOCK)
5396 ctx = BLOCK_SUPERCONTEXT (ctx);
5397 fld_worklist_push (ctx, fld);
5398 }
5399 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5400 and want not to reach unused types this way. */
5401
5402 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5403 {
5404 unsigned i;
5405 tree tem;
5406 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5407 fld_worklist_push (TREE_TYPE (tem), fld);
5408 fld_worklist_push (BINFO_VIRTUALS (TYPE_BINFO (t)), fld);
5409 }
5410 if (RECORD_OR_UNION_TYPE_P (t))
5411 {
5412 tree tem;
5413 /* Push all TYPE_FIELDS - there can be interleaving interesting
5414 and non-interesting things. */
5415 tem = TYPE_FIELDS (t);
5416 while (tem)
5417 {
5418 if (TREE_CODE (tem) == FIELD_DECL
5419 || (TREE_CODE (tem) == TYPE_DECL
5420 && !DECL_IGNORED_P (tem)
5421 && debug_info_level > DINFO_LEVEL_TERSE
5422 && !is_redundant_typedef (tem)))
5423 fld_worklist_push (tem, fld);
5424 tem = TREE_CHAIN (tem);
5425 }
5426 }
5427
5428 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5429 *ws = 0;
5430 }
5431 else if (TREE_CODE (t) == BLOCK)
5432 {
5433 tree tem;
5434 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5435 fld_worklist_push (tem, fld);
5436 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5437 fld_worklist_push (tem, fld);
5438 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5439 }
5440
5441 if (TREE_CODE (t) != IDENTIFIER_NODE
5442 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5443 fld_worklist_push (TREE_TYPE (t), fld);
5444
5445 return NULL_TREE;
5446 }
5447
5448
5449 /* Find decls and types in T. */
5450
5451 static void
5452 find_decls_types (tree t, struct free_lang_data_d *fld)
5453 {
5454 while (1)
5455 {
5456 if (!fld->pset.contains (t))
5457 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
5458 if (fld->worklist.is_empty ())
5459 break;
5460 t = fld->worklist.pop ();
5461 }
5462 }
5463
5464 /* Translate all the types in LIST with the corresponding runtime
5465 types. */
5466
5467 static tree
5468 get_eh_types_for_runtime (tree list)
5469 {
5470 tree head, prev;
5471
5472 if (list == NULL_TREE)
5473 return NULL_TREE;
5474
5475 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5476 prev = head;
5477 list = TREE_CHAIN (list);
5478 while (list)
5479 {
5480 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5481 TREE_CHAIN (prev) = n;
5482 prev = TREE_CHAIN (prev);
5483 list = TREE_CHAIN (list);
5484 }
5485
5486 return head;
5487 }
5488
5489
5490 /* Find decls and types referenced in EH region R and store them in
5491 FLD->DECLS and FLD->TYPES. */
5492
5493 static void
5494 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5495 {
5496 switch (r->type)
5497 {
5498 case ERT_CLEANUP:
5499 break;
5500
5501 case ERT_TRY:
5502 {
5503 eh_catch c;
5504
5505 /* The types referenced in each catch must first be changed to the
5506 EH types used at runtime. This removes references to FE types
5507 in the region. */
5508 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5509 {
5510 c->type_list = get_eh_types_for_runtime (c->type_list);
5511 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
5512 }
5513 }
5514 break;
5515
5516 case ERT_ALLOWED_EXCEPTIONS:
5517 r->u.allowed.type_list
5518 = get_eh_types_for_runtime (r->u.allowed.type_list);
5519 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
5520 break;
5521
5522 case ERT_MUST_NOT_THROW:
5523 walk_tree (&r->u.must_not_throw.failure_decl,
5524 find_decls_types_r, fld, &fld->pset);
5525 break;
5526 }
5527 }
5528
5529
5530 /* Find decls and types referenced in cgraph node N and store them in
5531 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5532 look for *every* kind of DECL and TYPE node reachable from N,
5533 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5534 NAMESPACE_DECLs, etc). */
5535
5536 static void
5537 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5538 {
5539 basic_block bb;
5540 struct function *fn;
5541 unsigned ix;
5542 tree t;
5543
5544 find_decls_types (n->decl, fld);
5545
5546 if (!gimple_has_body_p (n->decl))
5547 return;
5548
5549 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5550
5551 fn = DECL_STRUCT_FUNCTION (n->decl);
5552
5553 /* Traverse locals. */
5554 FOR_EACH_LOCAL_DECL (fn, ix, t)
5555 find_decls_types (t, fld);
5556
5557 /* Traverse EH regions in FN. */
5558 {
5559 eh_region r;
5560 FOR_ALL_EH_REGION_FN (r, fn)
5561 find_decls_types_in_eh_region (r, fld);
5562 }
5563
5564 /* Traverse every statement in FN. */
5565 FOR_EACH_BB_FN (bb, fn)
5566 {
5567 gphi_iterator psi;
5568 gimple_stmt_iterator si;
5569 unsigned i;
5570
5571 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5572 {
5573 gphi *phi = psi.phi ();
5574
5575 for (i = 0; i < gimple_phi_num_args (phi); i++)
5576 {
5577 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5578 find_decls_types (*arg_p, fld);
5579 }
5580 }
5581
5582 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5583 {
5584 gimple *stmt = gsi_stmt (si);
5585
5586 if (is_gimple_call (stmt))
5587 find_decls_types (gimple_call_fntype (stmt), fld);
5588
5589 for (i = 0; i < gimple_num_ops (stmt); i++)
5590 {
5591 tree arg = gimple_op (stmt, i);
5592 find_decls_types (arg, fld);
5593 }
5594 }
5595 }
5596 }
5597
5598
5599 /* Find decls and types referenced in varpool node N and store them in
5600 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5601 look for *every* kind of DECL and TYPE node reachable from N,
5602 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5603 NAMESPACE_DECLs, etc). */
5604
5605 static void
5606 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5607 {
5608 find_decls_types (v->decl, fld);
5609 }
5610
5611 /* If T needs an assembler name, have one created for it. */
5612
5613 void
5614 assign_assembler_name_if_needed (tree t)
5615 {
5616 if (need_assembler_name_p (t))
5617 {
5618 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5619 diagnostics that use input_location to show locus
5620 information. The problem here is that, at this point,
5621 input_location is generally anchored to the end of the file
5622 (since the parser is long gone), so we don't have a good
5623 position to pin it to.
5624
5625 To alleviate this problem, this uses the location of T's
5626 declaration. Examples of this are
5627 testsuite/g++.dg/template/cond2.C and
5628 testsuite/g++.dg/template/pr35240.C. */
5629 location_t saved_location = input_location;
5630 input_location = DECL_SOURCE_LOCATION (t);
5631
5632 decl_assembler_name (t);
5633
5634 input_location = saved_location;
5635 }
5636 }
5637
5638
5639 /* Free language specific information for every operand and expression
5640 in every node of the call graph. This process operates in three stages:
5641
5642 1- Every callgraph node and varpool node is traversed looking for
5643 decls and types embedded in them. This is a more exhaustive
5644 search than that done by find_referenced_vars, because it will
5645 also collect individual fields, decls embedded in types, etc.
5646
5647 2- All the decls found are sent to free_lang_data_in_decl.
5648
5649 3- All the types found are sent to free_lang_data_in_type.
5650
5651 The ordering between decls and types is important because
5652 free_lang_data_in_decl sets assembler names, which includes
5653 mangling. So types cannot be freed up until assembler names have
5654 been set up. */
5655
5656 static void
5657 free_lang_data_in_cgraph (void)
5658 {
5659 struct cgraph_node *n;
5660 varpool_node *v;
5661 struct free_lang_data_d fld;
5662 tree t;
5663 unsigned i;
5664 alias_pair *p;
5665
5666 /* Find decls and types in the body of every function in the callgraph. */
5667 FOR_EACH_FUNCTION (n)
5668 find_decls_types_in_node (n, &fld);
5669
5670 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5671 find_decls_types (p->decl, &fld);
5672
5673 /* Find decls and types in every varpool symbol. */
5674 FOR_EACH_VARIABLE (v)
5675 find_decls_types_in_var (v, &fld);
5676
5677 /* Set the assembler name on every decl found. We need to do this
5678 now because free_lang_data_in_decl will invalidate data needed
5679 for mangling. This breaks mangling on interdependent decls. */
5680 FOR_EACH_VEC_ELT (fld.decls, i, t)
5681 assign_assembler_name_if_needed (t);
5682
5683 /* Traverse every decl found freeing its language data. */
5684 FOR_EACH_VEC_ELT (fld.decls, i, t)
5685 free_lang_data_in_decl (t);
5686
5687 /* Traverse every type found freeing its language data. */
5688 FOR_EACH_VEC_ELT (fld.types, i, t)
5689 free_lang_data_in_type (t);
5690 if (flag_checking)
5691 {
5692 FOR_EACH_VEC_ELT (fld.types, i, t)
5693 verify_type (t);
5694 }
5695 }
5696
5697
5698 /* Free resources that are used by FE but are not needed once they are done. */
5699
5700 static unsigned
5701 free_lang_data (void)
5702 {
5703 unsigned i;
5704
5705 /* If we are the LTO frontend we have freed lang-specific data already. */
5706 if (in_lto_p
5707 || (!flag_generate_lto && !flag_generate_offload))
5708 return 0;
5709
5710 /* Allocate and assign alias sets to the standard integer types
5711 while the slots are still in the way the frontends generated them. */
5712 for (i = 0; i < itk_none; ++i)
5713 if (integer_types[i])
5714 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5715
5716 /* Traverse the IL resetting language specific information for
5717 operands, expressions, etc. */
5718 free_lang_data_in_cgraph ();
5719
5720 /* Create gimple variants for common types. */
5721 for (unsigned i = 0;
5722 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
5723 ++i)
5724 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
5725
5726 /* Reset some langhooks. Do not reset types_compatible_p, it may
5727 still be used indirectly via the get_alias_set langhook. */
5728 lang_hooks.dwarf_name = lhd_dwarf_name;
5729 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5730 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5731
5732 /* We do not want the default decl_assembler_name implementation,
5733 rather if we have fixed everything we want a wrapper around it
5734 asserting that all non-local symbols already got their assembler
5735 name and only produce assembler names for local symbols. Or rather
5736 make sure we never call decl_assembler_name on local symbols and
5737 devise a separate, middle-end private scheme for it. */
5738
5739 /* Reset diagnostic machinery. */
5740 tree_diagnostics_defaults (global_dc);
5741
5742 return 0;
5743 }
5744
5745
5746 namespace {
5747
5748 const pass_data pass_data_ipa_free_lang_data =
5749 {
5750 SIMPLE_IPA_PASS, /* type */
5751 "*free_lang_data", /* name */
5752 OPTGROUP_NONE, /* optinfo_flags */
5753 TV_IPA_FREE_LANG_DATA, /* tv_id */
5754 0, /* properties_required */
5755 0, /* properties_provided */
5756 0, /* properties_destroyed */
5757 0, /* todo_flags_start */
5758 0, /* todo_flags_finish */
5759 };
5760
5761 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5762 {
5763 public:
5764 pass_ipa_free_lang_data (gcc::context *ctxt)
5765 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5766 {}
5767
5768 /* opt_pass methods: */
5769 virtual unsigned int execute (function *) { return free_lang_data (); }
5770
5771 }; // class pass_ipa_free_lang_data
5772
5773 } // anon namespace
5774
5775 simple_ipa_opt_pass *
5776 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5777 {
5778 return new pass_ipa_free_lang_data (ctxt);
5779 }
5780 \f
5781 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5782 of the various TYPE_QUAL values. */
5783
5784 static void
5785 set_type_quals (tree type, int type_quals)
5786 {
5787 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5788 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5789 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5790 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5791 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5792 }
5793
5794 /* Returns true iff CAND and BASE have equivalent language-specific
5795 qualifiers. */
5796
5797 bool
5798 check_lang_type (const_tree cand, const_tree base)
5799 {
5800 if (lang_hooks.types.type_hash_eq == NULL)
5801 return true;
5802 /* type_hash_eq currently only applies to these types. */
5803 if (TREE_CODE (cand) != FUNCTION_TYPE
5804 && TREE_CODE (cand) != METHOD_TYPE)
5805 return true;
5806 return lang_hooks.types.type_hash_eq (cand, base);
5807 }
5808
5809 /* Returns true iff unqualified CAND and BASE are equivalent. */
5810
5811 bool
5812 check_base_type (const_tree cand, const_tree base)
5813 {
5814 return (TYPE_NAME (cand) == TYPE_NAME (base)
5815 /* Apparently this is needed for Objective-C. */
5816 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5817 /* Check alignment. */
5818 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5819 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5820 TYPE_ATTRIBUTES (base)));
5821 }
5822
5823 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5824
5825 bool
5826 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5827 {
5828 return (TYPE_QUALS (cand) == type_quals
5829 && check_base_type (cand, base)
5830 && check_lang_type (cand, base));
5831 }
5832
5833 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5834
5835 static bool
5836 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5837 {
5838 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5839 && TYPE_NAME (cand) == TYPE_NAME (base)
5840 /* Apparently this is needed for Objective-C. */
5841 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5842 /* Check alignment. */
5843 && TYPE_ALIGN (cand) == align
5844 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5845 TYPE_ATTRIBUTES (base))
5846 && check_lang_type (cand, base));
5847 }
5848
5849 /* This function checks to see if TYPE matches the size one of the built-in
5850 atomic types, and returns that core atomic type. */
5851
5852 static tree
5853 find_atomic_core_type (tree type)
5854 {
5855 tree base_atomic_type;
5856
5857 /* Only handle complete types. */
5858 if (TYPE_SIZE (type) == NULL_TREE)
5859 return NULL_TREE;
5860
5861 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
5862 switch (type_size)
5863 {
5864 case 8:
5865 base_atomic_type = atomicQI_type_node;
5866 break;
5867
5868 case 16:
5869 base_atomic_type = atomicHI_type_node;
5870 break;
5871
5872 case 32:
5873 base_atomic_type = atomicSI_type_node;
5874 break;
5875
5876 case 64:
5877 base_atomic_type = atomicDI_type_node;
5878 break;
5879
5880 case 128:
5881 base_atomic_type = atomicTI_type_node;
5882 break;
5883
5884 default:
5885 base_atomic_type = NULL_TREE;
5886 }
5887
5888 return base_atomic_type;
5889 }
5890
5891 /* Return a version of the TYPE, qualified as indicated by the
5892 TYPE_QUALS, if one exists. If no qualified version exists yet,
5893 return NULL_TREE. */
5894
5895 tree
5896 get_qualified_type (tree type, int type_quals)
5897 {
5898 tree t;
5899
5900 if (TYPE_QUALS (type) == type_quals)
5901 return type;
5902
5903 /* Search the chain of variants to see if there is already one there just
5904 like the one we need to have. If so, use that existing one. We must
5905 preserve the TYPE_NAME, since there is code that depends on this. */
5906 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5907 if (check_qualified_type (t, type, type_quals))
5908 return t;
5909
5910 return NULL_TREE;
5911 }
5912
5913 /* Like get_qualified_type, but creates the type if it does not
5914 exist. This function never returns NULL_TREE. */
5915
5916 tree
5917 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5918 {
5919 tree t;
5920
5921 /* See if we already have the appropriate qualified variant. */
5922 t = get_qualified_type (type, type_quals);
5923
5924 /* If not, build it. */
5925 if (!t)
5926 {
5927 t = build_variant_type_copy (type PASS_MEM_STAT);
5928 set_type_quals (t, type_quals);
5929
5930 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5931 {
5932 /* See if this object can map to a basic atomic type. */
5933 tree atomic_type = find_atomic_core_type (type);
5934 if (atomic_type)
5935 {
5936 /* Ensure the alignment of this type is compatible with
5937 the required alignment of the atomic type. */
5938 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5939 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5940 }
5941 }
5942
5943 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5944 /* Propagate structural equality. */
5945 SET_TYPE_STRUCTURAL_EQUALITY (t);
5946 else if (TYPE_CANONICAL (type) != type)
5947 /* Build the underlying canonical type, since it is different
5948 from TYPE. */
5949 {
5950 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5951 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5952 }
5953 else
5954 /* T is its own canonical type. */
5955 TYPE_CANONICAL (t) = t;
5956
5957 }
5958
5959 return t;
5960 }
5961
5962 /* Create a variant of type T with alignment ALIGN. */
5963
5964 tree
5965 build_aligned_type (tree type, unsigned int align)
5966 {
5967 tree t;
5968
5969 if (TYPE_PACKED (type)
5970 || TYPE_ALIGN (type) == align)
5971 return type;
5972
5973 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5974 if (check_aligned_type (t, type, align))
5975 return t;
5976
5977 t = build_variant_type_copy (type);
5978 SET_TYPE_ALIGN (t, align);
5979 TYPE_USER_ALIGN (t) = 1;
5980
5981 return t;
5982 }
5983
5984 /* Create a new distinct copy of TYPE. The new type is made its own
5985 MAIN_VARIANT. If TYPE requires structural equality checks, the
5986 resulting type requires structural equality checks; otherwise, its
5987 TYPE_CANONICAL points to itself. */
5988
5989 tree
5990 build_distinct_type_copy (tree type MEM_STAT_DECL)
5991 {
5992 tree t = copy_node (type PASS_MEM_STAT);
5993
5994 TYPE_POINTER_TO (t) = 0;
5995 TYPE_REFERENCE_TO (t) = 0;
5996
5997 /* Set the canonical type either to a new equivalence class, or
5998 propagate the need for structural equality checks. */
5999 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6000 SET_TYPE_STRUCTURAL_EQUALITY (t);
6001 else
6002 TYPE_CANONICAL (t) = t;
6003
6004 /* Make it its own variant. */
6005 TYPE_MAIN_VARIANT (t) = t;
6006 TYPE_NEXT_VARIANT (t) = 0;
6007
6008 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6009 whose TREE_TYPE is not t. This can also happen in the Ada
6010 frontend when using subtypes. */
6011
6012 return t;
6013 }
6014
6015 /* Create a new variant of TYPE, equivalent but distinct. This is so
6016 the caller can modify it. TYPE_CANONICAL for the return type will
6017 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6018 are considered equal by the language itself (or that both types
6019 require structural equality checks). */
6020
6021 tree
6022 build_variant_type_copy (tree type MEM_STAT_DECL)
6023 {
6024 tree t, m = TYPE_MAIN_VARIANT (type);
6025
6026 t = build_distinct_type_copy (type PASS_MEM_STAT);
6027
6028 /* Since we're building a variant, assume that it is a non-semantic
6029 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6030 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6031 /* Type variants have no alias set defined. */
6032 TYPE_ALIAS_SET (t) = -1;
6033
6034 /* Add the new type to the chain of variants of TYPE. */
6035 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6036 TYPE_NEXT_VARIANT (m) = t;
6037 TYPE_MAIN_VARIANT (t) = m;
6038
6039 return t;
6040 }
6041 \f
6042 /* Return true if the from tree in both tree maps are equal. */
6043
6044 int
6045 tree_map_base_eq (const void *va, const void *vb)
6046 {
6047 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6048 *const b = (const struct tree_map_base *) vb;
6049 return (a->from == b->from);
6050 }
6051
6052 /* Hash a from tree in a tree_base_map. */
6053
6054 unsigned int
6055 tree_map_base_hash (const void *item)
6056 {
6057 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6058 }
6059
6060 /* Return true if this tree map structure is marked for garbage collection
6061 purposes. We simply return true if the from tree is marked, so that this
6062 structure goes away when the from tree goes away. */
6063
6064 int
6065 tree_map_base_marked_p (const void *p)
6066 {
6067 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6068 }
6069
6070 /* Hash a from tree in a tree_map. */
6071
6072 unsigned int
6073 tree_map_hash (const void *item)
6074 {
6075 return (((const struct tree_map *) item)->hash);
6076 }
6077
6078 /* Hash a from tree in a tree_decl_map. */
6079
6080 unsigned int
6081 tree_decl_map_hash (const void *item)
6082 {
6083 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6084 }
6085
6086 /* Return the initialization priority for DECL. */
6087
6088 priority_type
6089 decl_init_priority_lookup (tree decl)
6090 {
6091 symtab_node *snode = symtab_node::get (decl);
6092
6093 if (!snode)
6094 return DEFAULT_INIT_PRIORITY;
6095 return
6096 snode->get_init_priority ();
6097 }
6098
6099 /* Return the finalization priority for DECL. */
6100
6101 priority_type
6102 decl_fini_priority_lookup (tree decl)
6103 {
6104 cgraph_node *node = cgraph_node::get (decl);
6105
6106 if (!node)
6107 return DEFAULT_INIT_PRIORITY;
6108 return
6109 node->get_fini_priority ();
6110 }
6111
6112 /* Set the initialization priority for DECL to PRIORITY. */
6113
6114 void
6115 decl_init_priority_insert (tree decl, priority_type priority)
6116 {
6117 struct symtab_node *snode;
6118
6119 if (priority == DEFAULT_INIT_PRIORITY)
6120 {
6121 snode = symtab_node::get (decl);
6122 if (!snode)
6123 return;
6124 }
6125 else if (VAR_P (decl))
6126 snode = varpool_node::get_create (decl);
6127 else
6128 snode = cgraph_node::get_create (decl);
6129 snode->set_init_priority (priority);
6130 }
6131
6132 /* Set the finalization priority for DECL to PRIORITY. */
6133
6134 void
6135 decl_fini_priority_insert (tree decl, priority_type priority)
6136 {
6137 struct cgraph_node *node;
6138
6139 if (priority == DEFAULT_INIT_PRIORITY)
6140 {
6141 node = cgraph_node::get (decl);
6142 if (!node)
6143 return;
6144 }
6145 else
6146 node = cgraph_node::get_create (decl);
6147 node->set_fini_priority (priority);
6148 }
6149
6150 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6151
6152 static void
6153 print_debug_expr_statistics (void)
6154 {
6155 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6156 (long) debug_expr_for_decl->size (),
6157 (long) debug_expr_for_decl->elements (),
6158 debug_expr_for_decl->collisions ());
6159 }
6160
6161 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6162
6163 static void
6164 print_value_expr_statistics (void)
6165 {
6166 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6167 (long) value_expr_for_decl->size (),
6168 (long) value_expr_for_decl->elements (),
6169 value_expr_for_decl->collisions ());
6170 }
6171
6172 /* Lookup a debug expression for FROM, and return it if we find one. */
6173
6174 tree
6175 decl_debug_expr_lookup (tree from)
6176 {
6177 struct tree_decl_map *h, in;
6178 in.base.from = from;
6179
6180 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6181 if (h)
6182 return h->to;
6183 return NULL_TREE;
6184 }
6185
6186 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6187
6188 void
6189 decl_debug_expr_insert (tree from, tree to)
6190 {
6191 struct tree_decl_map *h;
6192
6193 h = ggc_alloc<tree_decl_map> ();
6194 h->base.from = from;
6195 h->to = to;
6196 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6197 }
6198
6199 /* Lookup a value expression for FROM, and return it if we find one. */
6200
6201 tree
6202 decl_value_expr_lookup (tree from)
6203 {
6204 struct tree_decl_map *h, in;
6205 in.base.from = from;
6206
6207 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6208 if (h)
6209 return h->to;
6210 return NULL_TREE;
6211 }
6212
6213 /* Insert a mapping FROM->TO in the value expression hashtable. */
6214
6215 void
6216 decl_value_expr_insert (tree from, tree to)
6217 {
6218 struct tree_decl_map *h;
6219
6220 h = ggc_alloc<tree_decl_map> ();
6221 h->base.from = from;
6222 h->to = to;
6223 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6224 }
6225
6226 /* Lookup a vector of debug arguments for FROM, and return it if we
6227 find one. */
6228
6229 vec<tree, va_gc> **
6230 decl_debug_args_lookup (tree from)
6231 {
6232 struct tree_vec_map *h, in;
6233
6234 if (!DECL_HAS_DEBUG_ARGS_P (from))
6235 return NULL;
6236 gcc_checking_assert (debug_args_for_decl != NULL);
6237 in.base.from = from;
6238 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6239 if (h)
6240 return &h->to;
6241 return NULL;
6242 }
6243
6244 /* Insert a mapping FROM->empty vector of debug arguments in the value
6245 expression hashtable. */
6246
6247 vec<tree, va_gc> **
6248 decl_debug_args_insert (tree from)
6249 {
6250 struct tree_vec_map *h;
6251 tree_vec_map **loc;
6252
6253 if (DECL_HAS_DEBUG_ARGS_P (from))
6254 return decl_debug_args_lookup (from);
6255 if (debug_args_for_decl == NULL)
6256 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6257 h = ggc_alloc<tree_vec_map> ();
6258 h->base.from = from;
6259 h->to = NULL;
6260 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6261 *loc = h;
6262 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6263 return &h->to;
6264 }
6265
6266 /* Hashing of types so that we don't make duplicates.
6267 The entry point is `type_hash_canon'. */
6268
6269 /* Generate the default hash code for TYPE. This is designed for
6270 speed, rather than maximum entropy. */
6271
6272 hashval_t
6273 type_hash_canon_hash (tree type)
6274 {
6275 inchash::hash hstate;
6276
6277 hstate.add_int (TREE_CODE (type));
6278
6279 if (TREE_TYPE (type))
6280 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6281
6282 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6283 /* Just the identifier is adequate to distinguish. */
6284 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6285
6286 switch (TREE_CODE (type))
6287 {
6288 case METHOD_TYPE:
6289 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6290 /* FALLTHROUGH. */
6291 case FUNCTION_TYPE:
6292 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6293 if (TREE_VALUE (t) != error_mark_node)
6294 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6295 break;
6296
6297 case OFFSET_TYPE:
6298 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6299 break;
6300
6301 case ARRAY_TYPE:
6302 {
6303 if (TYPE_DOMAIN (type))
6304 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6305 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6306 {
6307 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6308 hstate.add_object (typeless);
6309 }
6310 }
6311 break;
6312
6313 case INTEGER_TYPE:
6314 {
6315 tree t = TYPE_MAX_VALUE (type);
6316 if (!t)
6317 t = TYPE_MIN_VALUE (type);
6318 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6319 hstate.add_object (TREE_INT_CST_ELT (t, i));
6320 break;
6321 }
6322
6323 case REAL_TYPE:
6324 case FIXED_POINT_TYPE:
6325 {
6326 unsigned prec = TYPE_PRECISION (type);
6327 hstate.add_object (prec);
6328 break;
6329 }
6330
6331 case VECTOR_TYPE:
6332 {
6333 unsigned nunits = TYPE_VECTOR_SUBPARTS (type);
6334 hstate.add_object (nunits);
6335 break;
6336 }
6337
6338 default:
6339 break;
6340 }
6341
6342 return hstate.end ();
6343 }
6344
6345 /* These are the Hashtable callback functions. */
6346
6347 /* Returns true iff the types are equivalent. */
6348
6349 bool
6350 type_cache_hasher::equal (type_hash *a, type_hash *b)
6351 {
6352 /* First test the things that are the same for all types. */
6353 if (a->hash != b->hash
6354 || TREE_CODE (a->type) != TREE_CODE (b->type)
6355 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6356 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6357 TYPE_ATTRIBUTES (b->type))
6358 || (TREE_CODE (a->type) != COMPLEX_TYPE
6359 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6360 return 0;
6361
6362 /* Be careful about comparing arrays before and after the element type
6363 has been completed; don't compare TYPE_ALIGN unless both types are
6364 complete. */
6365 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6366 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6367 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6368 return 0;
6369
6370 switch (TREE_CODE (a->type))
6371 {
6372 case VOID_TYPE:
6373 case COMPLEX_TYPE:
6374 case POINTER_TYPE:
6375 case REFERENCE_TYPE:
6376 case NULLPTR_TYPE:
6377 return 1;
6378
6379 case VECTOR_TYPE:
6380 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6381
6382 case ENUMERAL_TYPE:
6383 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6384 && !(TYPE_VALUES (a->type)
6385 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6386 && TYPE_VALUES (b->type)
6387 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6388 && type_list_equal (TYPE_VALUES (a->type),
6389 TYPE_VALUES (b->type))))
6390 return 0;
6391
6392 /* fall through */
6393
6394 case INTEGER_TYPE:
6395 case REAL_TYPE:
6396 case BOOLEAN_TYPE:
6397 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6398 return false;
6399 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6400 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6401 TYPE_MAX_VALUE (b->type)))
6402 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6403 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6404 TYPE_MIN_VALUE (b->type))));
6405
6406 case FIXED_POINT_TYPE:
6407 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6408
6409 case OFFSET_TYPE:
6410 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6411
6412 case METHOD_TYPE:
6413 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6414 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6415 || (TYPE_ARG_TYPES (a->type)
6416 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6417 && TYPE_ARG_TYPES (b->type)
6418 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6419 && type_list_equal (TYPE_ARG_TYPES (a->type),
6420 TYPE_ARG_TYPES (b->type)))))
6421 break;
6422 return 0;
6423 case ARRAY_TYPE:
6424 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6425 where the flag should be inherited from the element type
6426 and can change after ARRAY_TYPEs are created; on non-aggregates
6427 compare it and hash it, scalars will never have that flag set
6428 and we need to differentiate between arrays created by different
6429 front-ends or middle-end created arrays. */
6430 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6431 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6432 || (TYPE_TYPELESS_STORAGE (a->type)
6433 == TYPE_TYPELESS_STORAGE (b->type))));
6434
6435 case RECORD_TYPE:
6436 case UNION_TYPE:
6437 case QUAL_UNION_TYPE:
6438 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6439 || (TYPE_FIELDS (a->type)
6440 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6441 && TYPE_FIELDS (b->type)
6442 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6443 && type_list_equal (TYPE_FIELDS (a->type),
6444 TYPE_FIELDS (b->type))));
6445
6446 case FUNCTION_TYPE:
6447 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6448 || (TYPE_ARG_TYPES (a->type)
6449 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6450 && TYPE_ARG_TYPES (b->type)
6451 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6452 && type_list_equal (TYPE_ARG_TYPES (a->type),
6453 TYPE_ARG_TYPES (b->type))))
6454 break;
6455 return 0;
6456
6457 default:
6458 return 0;
6459 }
6460
6461 if (lang_hooks.types.type_hash_eq != NULL)
6462 return lang_hooks.types.type_hash_eq (a->type, b->type);
6463
6464 return 1;
6465 }
6466
6467 /* Given TYPE, and HASHCODE its hash code, return the canonical
6468 object for an identical type if one already exists.
6469 Otherwise, return TYPE, and record it as the canonical object.
6470
6471 To use this function, first create a type of the sort you want.
6472 Then compute its hash code from the fields of the type that
6473 make it different from other similar types.
6474 Then call this function and use the value. */
6475
6476 tree
6477 type_hash_canon (unsigned int hashcode, tree type)
6478 {
6479 type_hash in;
6480 type_hash **loc;
6481
6482 /* The hash table only contains main variants, so ensure that's what we're
6483 being passed. */
6484 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6485
6486 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6487 must call that routine before comparing TYPE_ALIGNs. */
6488 layout_type (type);
6489
6490 in.hash = hashcode;
6491 in.type = type;
6492
6493 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6494 if (*loc)
6495 {
6496 tree t1 = ((type_hash *) *loc)->type;
6497 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6498 if (TYPE_UID (type) + 1 == next_type_uid)
6499 --next_type_uid;
6500 /* Free also min/max values and the cache for integer
6501 types. This can't be done in free_node, as LTO frees
6502 those on its own. */
6503 if (TREE_CODE (type) == INTEGER_TYPE)
6504 {
6505 if (TYPE_MIN_VALUE (type)
6506 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6507 ggc_free (TYPE_MIN_VALUE (type));
6508 if (TYPE_MAX_VALUE (type)
6509 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6510 ggc_free (TYPE_MAX_VALUE (type));
6511 if (TYPE_CACHED_VALUES_P (type))
6512 ggc_free (TYPE_CACHED_VALUES (type));
6513 }
6514 free_node (type);
6515 return t1;
6516 }
6517 else
6518 {
6519 struct type_hash *h;
6520
6521 h = ggc_alloc<type_hash> ();
6522 h->hash = hashcode;
6523 h->type = type;
6524 *loc = h;
6525
6526 return type;
6527 }
6528 }
6529
6530 static void
6531 print_type_hash_statistics (void)
6532 {
6533 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6534 (long) type_hash_table->size (),
6535 (long) type_hash_table->elements (),
6536 type_hash_table->collisions ());
6537 }
6538
6539 /* Given two lists of types
6540 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6541 return 1 if the lists contain the same types in the same order.
6542 Also, the TREE_PURPOSEs must match. */
6543
6544 int
6545 type_list_equal (const_tree l1, const_tree l2)
6546 {
6547 const_tree t1, t2;
6548
6549 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6550 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6551 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6552 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6553 && (TREE_TYPE (TREE_PURPOSE (t1))
6554 == TREE_TYPE (TREE_PURPOSE (t2))))))
6555 return 0;
6556
6557 return t1 == t2;
6558 }
6559
6560 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6561 given by TYPE. If the argument list accepts variable arguments,
6562 then this function counts only the ordinary arguments. */
6563
6564 int
6565 type_num_arguments (const_tree type)
6566 {
6567 int i = 0;
6568 tree t;
6569
6570 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6571 /* If the function does not take a variable number of arguments,
6572 the last element in the list will have type `void'. */
6573 if (VOID_TYPE_P (TREE_VALUE (t)))
6574 break;
6575 else
6576 ++i;
6577
6578 return i;
6579 }
6580
6581 /* Nonzero if integer constants T1 and T2
6582 represent the same constant value. */
6583
6584 int
6585 tree_int_cst_equal (const_tree t1, const_tree t2)
6586 {
6587 if (t1 == t2)
6588 return 1;
6589
6590 if (t1 == 0 || t2 == 0)
6591 return 0;
6592
6593 if (TREE_CODE (t1) == INTEGER_CST
6594 && TREE_CODE (t2) == INTEGER_CST
6595 && wi::to_widest (t1) == wi::to_widest (t2))
6596 return 1;
6597
6598 return 0;
6599 }
6600
6601 /* Return true if T is an INTEGER_CST whose numerical value (extended
6602 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6603
6604 bool
6605 tree_fits_shwi_p (const_tree t)
6606 {
6607 return (t != NULL_TREE
6608 && TREE_CODE (t) == INTEGER_CST
6609 && wi::fits_shwi_p (wi::to_widest (t)));
6610 }
6611
6612 /* Return true if T is an INTEGER_CST whose numerical value (extended
6613 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6614
6615 bool
6616 tree_fits_uhwi_p (const_tree t)
6617 {
6618 return (t != NULL_TREE
6619 && TREE_CODE (t) == INTEGER_CST
6620 && wi::fits_uhwi_p (wi::to_widest (t)));
6621 }
6622
6623 /* T is an INTEGER_CST whose numerical value (extended according to
6624 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6625 HOST_WIDE_INT. */
6626
6627 HOST_WIDE_INT
6628 tree_to_shwi (const_tree t)
6629 {
6630 gcc_assert (tree_fits_shwi_p (t));
6631 return TREE_INT_CST_LOW (t);
6632 }
6633
6634 /* T is an INTEGER_CST whose numerical value (extended according to
6635 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6636 HOST_WIDE_INT. */
6637
6638 unsigned HOST_WIDE_INT
6639 tree_to_uhwi (const_tree t)
6640 {
6641 gcc_assert (tree_fits_uhwi_p (t));
6642 return TREE_INT_CST_LOW (t);
6643 }
6644
6645 /* Return the most significant (sign) bit of T. */
6646
6647 int
6648 tree_int_cst_sign_bit (const_tree t)
6649 {
6650 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6651
6652 return wi::extract_uhwi (t, bitno, 1);
6653 }
6654
6655 /* Return an indication of the sign of the integer constant T.
6656 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6657 Note that -1 will never be returned if T's type is unsigned. */
6658
6659 int
6660 tree_int_cst_sgn (const_tree t)
6661 {
6662 if (wi::eq_p (t, 0))
6663 return 0;
6664 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6665 return 1;
6666 else if (wi::neg_p (t))
6667 return -1;
6668 else
6669 return 1;
6670 }
6671
6672 /* Return the minimum number of bits needed to represent VALUE in a
6673 signed or unsigned type, UNSIGNEDP says which. */
6674
6675 unsigned int
6676 tree_int_cst_min_precision (tree value, signop sgn)
6677 {
6678 /* If the value is negative, compute its negative minus 1. The latter
6679 adjustment is because the absolute value of the largest negative value
6680 is one larger than the largest positive value. This is equivalent to
6681 a bit-wise negation, so use that operation instead. */
6682
6683 if (tree_int_cst_sgn (value) < 0)
6684 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6685
6686 /* Return the number of bits needed, taking into account the fact
6687 that we need one more bit for a signed than unsigned type.
6688 If value is 0 or -1, the minimum precision is 1 no matter
6689 whether unsignedp is true or false. */
6690
6691 if (integer_zerop (value))
6692 return 1;
6693 else
6694 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6695 }
6696
6697 /* Return truthvalue of whether T1 is the same tree structure as T2.
6698 Return 1 if they are the same.
6699 Return 0 if they are understandably different.
6700 Return -1 if either contains tree structure not understood by
6701 this function. */
6702
6703 int
6704 simple_cst_equal (const_tree t1, const_tree t2)
6705 {
6706 enum tree_code code1, code2;
6707 int cmp;
6708 int i;
6709
6710 if (t1 == t2)
6711 return 1;
6712 if (t1 == 0 || t2 == 0)
6713 return 0;
6714
6715 code1 = TREE_CODE (t1);
6716 code2 = TREE_CODE (t2);
6717
6718 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6719 {
6720 if (CONVERT_EXPR_CODE_P (code2)
6721 || code2 == NON_LVALUE_EXPR)
6722 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6723 else
6724 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6725 }
6726
6727 else if (CONVERT_EXPR_CODE_P (code2)
6728 || code2 == NON_LVALUE_EXPR)
6729 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6730
6731 if (code1 != code2)
6732 return 0;
6733
6734 switch (code1)
6735 {
6736 case INTEGER_CST:
6737 return wi::to_widest (t1) == wi::to_widest (t2);
6738
6739 case REAL_CST:
6740 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6741
6742 case FIXED_CST:
6743 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6744
6745 case STRING_CST:
6746 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6747 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6748 TREE_STRING_LENGTH (t1)));
6749
6750 case CONSTRUCTOR:
6751 {
6752 unsigned HOST_WIDE_INT idx;
6753 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6754 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6755
6756 if (vec_safe_length (v1) != vec_safe_length (v2))
6757 return false;
6758
6759 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6760 /* ??? Should we handle also fields here? */
6761 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6762 return false;
6763 return true;
6764 }
6765
6766 case SAVE_EXPR:
6767 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6768
6769 case CALL_EXPR:
6770 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6771 if (cmp <= 0)
6772 return cmp;
6773 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6774 return 0;
6775 {
6776 const_tree arg1, arg2;
6777 const_call_expr_arg_iterator iter1, iter2;
6778 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6779 arg2 = first_const_call_expr_arg (t2, &iter2);
6780 arg1 && arg2;
6781 arg1 = next_const_call_expr_arg (&iter1),
6782 arg2 = next_const_call_expr_arg (&iter2))
6783 {
6784 cmp = simple_cst_equal (arg1, arg2);
6785 if (cmp <= 0)
6786 return cmp;
6787 }
6788 return arg1 == arg2;
6789 }
6790
6791 case TARGET_EXPR:
6792 /* Special case: if either target is an unallocated VAR_DECL,
6793 it means that it's going to be unified with whatever the
6794 TARGET_EXPR is really supposed to initialize, so treat it
6795 as being equivalent to anything. */
6796 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6797 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6798 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6799 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6800 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6801 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6802 cmp = 1;
6803 else
6804 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6805
6806 if (cmp <= 0)
6807 return cmp;
6808
6809 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6810
6811 case WITH_CLEANUP_EXPR:
6812 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6813 if (cmp <= 0)
6814 return cmp;
6815
6816 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6817
6818 case COMPONENT_REF:
6819 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6820 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6821
6822 return 0;
6823
6824 case VAR_DECL:
6825 case PARM_DECL:
6826 case CONST_DECL:
6827 case FUNCTION_DECL:
6828 return 0;
6829
6830 default:
6831 break;
6832 }
6833
6834 /* This general rule works for most tree codes. All exceptions should be
6835 handled above. If this is a language-specific tree code, we can't
6836 trust what might be in the operand, so say we don't know
6837 the situation. */
6838 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6839 return -1;
6840
6841 switch (TREE_CODE_CLASS (code1))
6842 {
6843 case tcc_unary:
6844 case tcc_binary:
6845 case tcc_comparison:
6846 case tcc_expression:
6847 case tcc_reference:
6848 case tcc_statement:
6849 cmp = 1;
6850 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6851 {
6852 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6853 if (cmp <= 0)
6854 return cmp;
6855 }
6856
6857 return cmp;
6858
6859 default:
6860 return -1;
6861 }
6862 }
6863
6864 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6865 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6866 than U, respectively. */
6867
6868 int
6869 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6870 {
6871 if (tree_int_cst_sgn (t) < 0)
6872 return -1;
6873 else if (!tree_fits_uhwi_p (t))
6874 return 1;
6875 else if (TREE_INT_CST_LOW (t) == u)
6876 return 0;
6877 else if (TREE_INT_CST_LOW (t) < u)
6878 return -1;
6879 else
6880 return 1;
6881 }
6882
6883 /* Return true if SIZE represents a constant size that is in bounds of
6884 what the middle-end and the backend accepts (covering not more than
6885 half of the address-space). */
6886
6887 bool
6888 valid_constant_size_p (const_tree size)
6889 {
6890 if (! tree_fits_uhwi_p (size)
6891 || TREE_OVERFLOW (size)
6892 || tree_int_cst_sign_bit (size) != 0)
6893 return false;
6894 return true;
6895 }
6896
6897 /* Return the precision of the type, or for a complex or vector type the
6898 precision of the type of its elements. */
6899
6900 unsigned int
6901 element_precision (const_tree type)
6902 {
6903 if (!TYPE_P (type))
6904 type = TREE_TYPE (type);
6905 enum tree_code code = TREE_CODE (type);
6906 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6907 type = TREE_TYPE (type);
6908
6909 return TYPE_PRECISION (type);
6910 }
6911
6912 /* Return true if CODE represents an associative tree code. Otherwise
6913 return false. */
6914 bool
6915 associative_tree_code (enum tree_code code)
6916 {
6917 switch (code)
6918 {
6919 case BIT_IOR_EXPR:
6920 case BIT_AND_EXPR:
6921 case BIT_XOR_EXPR:
6922 case PLUS_EXPR:
6923 case MULT_EXPR:
6924 case MIN_EXPR:
6925 case MAX_EXPR:
6926 return true;
6927
6928 default:
6929 break;
6930 }
6931 return false;
6932 }
6933
6934 /* Return true if CODE represents a commutative tree code. Otherwise
6935 return false. */
6936 bool
6937 commutative_tree_code (enum tree_code code)
6938 {
6939 switch (code)
6940 {
6941 case PLUS_EXPR:
6942 case MULT_EXPR:
6943 case MULT_HIGHPART_EXPR:
6944 case MIN_EXPR:
6945 case MAX_EXPR:
6946 case BIT_IOR_EXPR:
6947 case BIT_XOR_EXPR:
6948 case BIT_AND_EXPR:
6949 case NE_EXPR:
6950 case EQ_EXPR:
6951 case UNORDERED_EXPR:
6952 case ORDERED_EXPR:
6953 case UNEQ_EXPR:
6954 case LTGT_EXPR:
6955 case TRUTH_AND_EXPR:
6956 case TRUTH_XOR_EXPR:
6957 case TRUTH_OR_EXPR:
6958 case WIDEN_MULT_EXPR:
6959 case VEC_WIDEN_MULT_HI_EXPR:
6960 case VEC_WIDEN_MULT_LO_EXPR:
6961 case VEC_WIDEN_MULT_EVEN_EXPR:
6962 case VEC_WIDEN_MULT_ODD_EXPR:
6963 return true;
6964
6965 default:
6966 break;
6967 }
6968 return false;
6969 }
6970
6971 /* Return true if CODE represents a ternary tree code for which the
6972 first two operands are commutative. Otherwise return false. */
6973 bool
6974 commutative_ternary_tree_code (enum tree_code code)
6975 {
6976 switch (code)
6977 {
6978 case WIDEN_MULT_PLUS_EXPR:
6979 case WIDEN_MULT_MINUS_EXPR:
6980 case DOT_PROD_EXPR:
6981 case FMA_EXPR:
6982 return true;
6983
6984 default:
6985 break;
6986 }
6987 return false;
6988 }
6989
6990 /* Returns true if CODE can overflow. */
6991
6992 bool
6993 operation_can_overflow (enum tree_code code)
6994 {
6995 switch (code)
6996 {
6997 case PLUS_EXPR:
6998 case MINUS_EXPR:
6999 case MULT_EXPR:
7000 case LSHIFT_EXPR:
7001 /* Can overflow in various ways. */
7002 return true;
7003 case TRUNC_DIV_EXPR:
7004 case EXACT_DIV_EXPR:
7005 case FLOOR_DIV_EXPR:
7006 case CEIL_DIV_EXPR:
7007 /* For INT_MIN / -1. */
7008 return true;
7009 case NEGATE_EXPR:
7010 case ABS_EXPR:
7011 /* For -INT_MIN. */
7012 return true;
7013 default:
7014 /* These operators cannot overflow. */
7015 return false;
7016 }
7017 }
7018
7019 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7020 ftrapv doesn't generate trapping insns for CODE. */
7021
7022 bool
7023 operation_no_trapping_overflow (tree type, enum tree_code code)
7024 {
7025 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7026
7027 /* We don't generate instructions that trap on overflow for complex or vector
7028 types. */
7029 if (!INTEGRAL_TYPE_P (type))
7030 return true;
7031
7032 if (!TYPE_OVERFLOW_TRAPS (type))
7033 return true;
7034
7035 switch (code)
7036 {
7037 case PLUS_EXPR:
7038 case MINUS_EXPR:
7039 case MULT_EXPR:
7040 case NEGATE_EXPR:
7041 case ABS_EXPR:
7042 /* These operators can overflow, and -ftrapv generates trapping code for
7043 these. */
7044 return false;
7045 case TRUNC_DIV_EXPR:
7046 case EXACT_DIV_EXPR:
7047 case FLOOR_DIV_EXPR:
7048 case CEIL_DIV_EXPR:
7049 case LSHIFT_EXPR:
7050 /* These operators can overflow, but -ftrapv does not generate trapping
7051 code for these. */
7052 return true;
7053 default:
7054 /* These operators cannot overflow. */
7055 return true;
7056 }
7057 }
7058
7059 namespace inchash
7060 {
7061
7062 /* Generate a hash value for an expression. This can be used iteratively
7063 by passing a previous result as the HSTATE argument.
7064
7065 This function is intended to produce the same hash for expressions which
7066 would compare equal using operand_equal_p. */
7067 void
7068 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7069 {
7070 int i;
7071 enum tree_code code;
7072 enum tree_code_class tclass;
7073
7074 if (t == NULL_TREE || t == error_mark_node)
7075 {
7076 hstate.merge_hash (0);
7077 return;
7078 }
7079
7080 if (!(flags & OEP_ADDRESS_OF))
7081 STRIP_NOPS (t);
7082
7083 code = TREE_CODE (t);
7084
7085 switch (code)
7086 {
7087 /* Alas, constants aren't shared, so we can't rely on pointer
7088 identity. */
7089 case VOID_CST:
7090 hstate.merge_hash (0);
7091 return;
7092 case INTEGER_CST:
7093 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7094 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7095 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7096 return;
7097 case REAL_CST:
7098 {
7099 unsigned int val2;
7100 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7101 val2 = rvc_zero;
7102 else
7103 val2 = real_hash (TREE_REAL_CST_PTR (t));
7104 hstate.merge_hash (val2);
7105 return;
7106 }
7107 case FIXED_CST:
7108 {
7109 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7110 hstate.merge_hash (val2);
7111 return;
7112 }
7113 case STRING_CST:
7114 hstate.add ((const void *) TREE_STRING_POINTER (t),
7115 TREE_STRING_LENGTH (t));
7116 return;
7117 case COMPLEX_CST:
7118 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7119 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7120 return;
7121 case VECTOR_CST:
7122 {
7123 unsigned i;
7124 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7125 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate, flags);
7126 return;
7127 }
7128 case SSA_NAME:
7129 /* We can just compare by pointer. */
7130 hstate.add_wide_int (SSA_NAME_VERSION (t));
7131 return;
7132 case PLACEHOLDER_EXPR:
7133 /* The node itself doesn't matter. */
7134 return;
7135 case BLOCK:
7136 case OMP_CLAUSE:
7137 /* Ignore. */
7138 return;
7139 case TREE_LIST:
7140 /* A list of expressions, for a CALL_EXPR or as the elements of a
7141 VECTOR_CST. */
7142 for (; t; t = TREE_CHAIN (t))
7143 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7144 return;
7145 case CONSTRUCTOR:
7146 {
7147 unsigned HOST_WIDE_INT idx;
7148 tree field, value;
7149 flags &= ~OEP_ADDRESS_OF;
7150 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7151 {
7152 inchash::add_expr (field, hstate, flags);
7153 inchash::add_expr (value, hstate, flags);
7154 }
7155 return;
7156 }
7157 case STATEMENT_LIST:
7158 {
7159 tree_stmt_iterator i;
7160 for (i = tsi_start (CONST_CAST_TREE (t));
7161 !tsi_end_p (i); tsi_next (&i))
7162 inchash::add_expr (tsi_stmt (i), hstate, flags);
7163 return;
7164 }
7165 case TREE_VEC:
7166 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
7167 inchash::add_expr (TREE_VEC_ELT (t, i), hstate, flags);
7168 return;
7169 case FUNCTION_DECL:
7170 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7171 Otherwise nodes that compare equal according to operand_equal_p might
7172 get different hash codes. However, don't do this for machine specific
7173 or front end builtins, since the function code is overloaded in those
7174 cases. */
7175 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7176 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7177 {
7178 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7179 code = TREE_CODE (t);
7180 }
7181 /* FALL THROUGH */
7182 default:
7183 tclass = TREE_CODE_CLASS (code);
7184
7185 if (tclass == tcc_declaration)
7186 {
7187 /* DECL's have a unique ID */
7188 hstate.add_wide_int (DECL_UID (t));
7189 }
7190 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7191 {
7192 /* For comparisons that can be swapped, use the lower
7193 tree code. */
7194 enum tree_code ccode = swap_tree_comparison (code);
7195 if (code < ccode)
7196 ccode = code;
7197 hstate.add_object (ccode);
7198 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7199 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7200 }
7201 else if (CONVERT_EXPR_CODE_P (code))
7202 {
7203 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7204 operand_equal_p. */
7205 enum tree_code ccode = NOP_EXPR;
7206 hstate.add_object (ccode);
7207
7208 /* Don't hash the type, that can lead to having nodes which
7209 compare equal according to operand_equal_p, but which
7210 have different hash codes. Make sure to include signedness
7211 in the hash computation. */
7212 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7213 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7214 }
7215 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7216 else if (code == MEM_REF
7217 && (flags & OEP_ADDRESS_OF) != 0
7218 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7219 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7220 && integer_zerop (TREE_OPERAND (t, 1)))
7221 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7222 hstate, flags);
7223 /* Don't ICE on FE specific trees, or their arguments etc.
7224 during operand_equal_p hash verification. */
7225 else if (!IS_EXPR_CODE_CLASS (tclass))
7226 gcc_assert (flags & OEP_HASH_CHECK);
7227 else
7228 {
7229 unsigned int sflags = flags;
7230
7231 hstate.add_object (code);
7232
7233 switch (code)
7234 {
7235 case ADDR_EXPR:
7236 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7237 flags |= OEP_ADDRESS_OF;
7238 sflags = flags;
7239 break;
7240
7241 case INDIRECT_REF:
7242 case MEM_REF:
7243 case TARGET_MEM_REF:
7244 flags &= ~OEP_ADDRESS_OF;
7245 sflags = flags;
7246 break;
7247
7248 case ARRAY_REF:
7249 case ARRAY_RANGE_REF:
7250 case COMPONENT_REF:
7251 case BIT_FIELD_REF:
7252 sflags &= ~OEP_ADDRESS_OF;
7253 break;
7254
7255 case COND_EXPR:
7256 flags &= ~OEP_ADDRESS_OF;
7257 break;
7258
7259 case FMA_EXPR:
7260 case WIDEN_MULT_PLUS_EXPR:
7261 case WIDEN_MULT_MINUS_EXPR:
7262 {
7263 /* The multiplication operands are commutative. */
7264 inchash::hash one, two;
7265 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7266 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7267 hstate.add_commutative (one, two);
7268 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
7269 return;
7270 }
7271
7272 case CALL_EXPR:
7273 if (CALL_EXPR_FN (t) == NULL_TREE)
7274 hstate.add_int (CALL_EXPR_IFN (t));
7275 break;
7276
7277 case TARGET_EXPR:
7278 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
7279 Usually different TARGET_EXPRs just should use
7280 different temporaries in their slots. */
7281 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
7282 return;
7283
7284 default:
7285 break;
7286 }
7287
7288 /* Don't hash the type, that can lead to having nodes which
7289 compare equal according to operand_equal_p, but which
7290 have different hash codes. */
7291 if (code == NON_LVALUE_EXPR)
7292 {
7293 /* Make sure to include signness in the hash computation. */
7294 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7295 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7296 }
7297
7298 else if (commutative_tree_code (code))
7299 {
7300 /* It's a commutative expression. We want to hash it the same
7301 however it appears. We do this by first hashing both operands
7302 and then rehashing based on the order of their independent
7303 hashes. */
7304 inchash::hash one, two;
7305 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7306 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7307 hstate.add_commutative (one, two);
7308 }
7309 else
7310 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7311 inchash::add_expr (TREE_OPERAND (t, i), hstate,
7312 i == 0 ? flags : sflags);
7313 }
7314 return;
7315 }
7316 }
7317
7318 }
7319
7320 /* Constructors for pointer, array and function types.
7321 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7322 constructed by language-dependent code, not here.) */
7323
7324 /* Construct, lay out and return the type of pointers to TO_TYPE with
7325 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7326 reference all of memory. If such a type has already been
7327 constructed, reuse it. */
7328
7329 tree
7330 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7331 bool can_alias_all)
7332 {
7333 tree t;
7334 bool could_alias = can_alias_all;
7335
7336 if (to_type == error_mark_node)
7337 return error_mark_node;
7338
7339 /* If the pointed-to type has the may_alias attribute set, force
7340 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7341 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7342 can_alias_all = true;
7343
7344 /* In some cases, languages will have things that aren't a POINTER_TYPE
7345 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7346 In that case, return that type without regard to the rest of our
7347 operands.
7348
7349 ??? This is a kludge, but consistent with the way this function has
7350 always operated and there doesn't seem to be a good way to avoid this
7351 at the moment. */
7352 if (TYPE_POINTER_TO (to_type) != 0
7353 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7354 return TYPE_POINTER_TO (to_type);
7355
7356 /* First, if we already have a type for pointers to TO_TYPE and it's
7357 the proper mode, use it. */
7358 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7359 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7360 return t;
7361
7362 t = make_node (POINTER_TYPE);
7363
7364 TREE_TYPE (t) = to_type;
7365 SET_TYPE_MODE (t, mode);
7366 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7367 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7368 TYPE_POINTER_TO (to_type) = t;
7369
7370 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7371 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7372 SET_TYPE_STRUCTURAL_EQUALITY (t);
7373 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7374 TYPE_CANONICAL (t)
7375 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7376 mode, false);
7377
7378 /* Lay out the type. This function has many callers that are concerned
7379 with expression-construction, and this simplifies them all. */
7380 layout_type (t);
7381
7382 return t;
7383 }
7384
7385 /* By default build pointers in ptr_mode. */
7386
7387 tree
7388 build_pointer_type (tree to_type)
7389 {
7390 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7391 : TYPE_ADDR_SPACE (to_type);
7392 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7393 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7394 }
7395
7396 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7397
7398 tree
7399 build_reference_type_for_mode (tree to_type, machine_mode mode,
7400 bool can_alias_all)
7401 {
7402 tree t;
7403 bool could_alias = can_alias_all;
7404
7405 if (to_type == error_mark_node)
7406 return error_mark_node;
7407
7408 /* If the pointed-to type has the may_alias attribute set, force
7409 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7410 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7411 can_alias_all = true;
7412
7413 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7414 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7415 In that case, return that type without regard to the rest of our
7416 operands.
7417
7418 ??? This is a kludge, but consistent with the way this function has
7419 always operated and there doesn't seem to be a good way to avoid this
7420 at the moment. */
7421 if (TYPE_REFERENCE_TO (to_type) != 0
7422 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7423 return TYPE_REFERENCE_TO (to_type);
7424
7425 /* First, if we already have a type for pointers to TO_TYPE and it's
7426 the proper mode, use it. */
7427 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7428 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7429 return t;
7430
7431 t = make_node (REFERENCE_TYPE);
7432
7433 TREE_TYPE (t) = to_type;
7434 SET_TYPE_MODE (t, mode);
7435 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7436 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7437 TYPE_REFERENCE_TO (to_type) = t;
7438
7439 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7440 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7441 SET_TYPE_STRUCTURAL_EQUALITY (t);
7442 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7443 TYPE_CANONICAL (t)
7444 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7445 mode, false);
7446
7447 layout_type (t);
7448
7449 return t;
7450 }
7451
7452
7453 /* Build the node for the type of references-to-TO_TYPE by default
7454 in ptr_mode. */
7455
7456 tree
7457 build_reference_type (tree to_type)
7458 {
7459 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7460 : TYPE_ADDR_SPACE (to_type);
7461 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7462 return build_reference_type_for_mode (to_type, pointer_mode, false);
7463 }
7464
7465 #define MAX_INT_CACHED_PREC \
7466 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7467 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7468
7469 /* Builds a signed or unsigned integer type of precision PRECISION.
7470 Used for C bitfields whose precision does not match that of
7471 built-in target types. */
7472 tree
7473 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7474 int unsignedp)
7475 {
7476 tree itype, ret;
7477
7478 if (unsignedp)
7479 unsignedp = MAX_INT_CACHED_PREC + 1;
7480
7481 if (precision <= MAX_INT_CACHED_PREC)
7482 {
7483 itype = nonstandard_integer_type_cache[precision + unsignedp];
7484 if (itype)
7485 return itype;
7486 }
7487
7488 itype = make_node (INTEGER_TYPE);
7489 TYPE_PRECISION (itype) = precision;
7490
7491 if (unsignedp)
7492 fixup_unsigned_type (itype);
7493 else
7494 fixup_signed_type (itype);
7495
7496 ret = itype;
7497 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7498 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7499 if (precision <= MAX_INT_CACHED_PREC)
7500 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7501
7502 return ret;
7503 }
7504
7505 #define MAX_BOOL_CACHED_PREC \
7506 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7507 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7508
7509 /* Builds a boolean type of precision PRECISION.
7510 Used for boolean vectors to choose proper vector element size. */
7511 tree
7512 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7513 {
7514 tree type;
7515
7516 if (precision <= MAX_BOOL_CACHED_PREC)
7517 {
7518 type = nonstandard_boolean_type_cache[precision];
7519 if (type)
7520 return type;
7521 }
7522
7523 type = make_node (BOOLEAN_TYPE);
7524 TYPE_PRECISION (type) = precision;
7525 fixup_signed_type (type);
7526
7527 if (precision <= MAX_INT_CACHED_PREC)
7528 nonstandard_boolean_type_cache[precision] = type;
7529
7530 return type;
7531 }
7532
7533 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7534 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7535 is true, reuse such a type that has already been constructed. */
7536
7537 static tree
7538 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7539 {
7540 tree itype = make_node (INTEGER_TYPE);
7541
7542 TREE_TYPE (itype) = type;
7543
7544 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7545 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7546
7547 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7548 SET_TYPE_MODE (itype, TYPE_MODE (type));
7549 TYPE_SIZE (itype) = TYPE_SIZE (type);
7550 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7551 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7552 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7553
7554 if (!shared)
7555 return itype;
7556
7557 if ((TYPE_MIN_VALUE (itype)
7558 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7559 || (TYPE_MAX_VALUE (itype)
7560 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7561 {
7562 /* Since we cannot reliably merge this type, we need to compare it using
7563 structural equality checks. */
7564 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7565 return itype;
7566 }
7567
7568 hashval_t hash = type_hash_canon_hash (itype);
7569 itype = type_hash_canon (hash, itype);
7570
7571 return itype;
7572 }
7573
7574 /* Wrapper around build_range_type_1 with SHARED set to true. */
7575
7576 tree
7577 build_range_type (tree type, tree lowval, tree highval)
7578 {
7579 return build_range_type_1 (type, lowval, highval, true);
7580 }
7581
7582 /* Wrapper around build_range_type_1 with SHARED set to false. */
7583
7584 tree
7585 build_nonshared_range_type (tree type, tree lowval, tree highval)
7586 {
7587 return build_range_type_1 (type, lowval, highval, false);
7588 }
7589
7590 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7591 MAXVAL should be the maximum value in the domain
7592 (one less than the length of the array).
7593
7594 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7595 We don't enforce this limit, that is up to caller (e.g. language front end).
7596 The limit exists because the result is a signed type and we don't handle
7597 sizes that use more than one HOST_WIDE_INT. */
7598
7599 tree
7600 build_index_type (tree maxval)
7601 {
7602 return build_range_type (sizetype, size_zero_node, maxval);
7603 }
7604
7605 /* Return true if the debug information for TYPE, a subtype, should be emitted
7606 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7607 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7608 debug info and doesn't reflect the source code. */
7609
7610 bool
7611 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7612 {
7613 tree base_type = TREE_TYPE (type), low, high;
7614
7615 /* Subrange types have a base type which is an integral type. */
7616 if (!INTEGRAL_TYPE_P (base_type))
7617 return false;
7618
7619 /* Get the real bounds of the subtype. */
7620 if (lang_hooks.types.get_subrange_bounds)
7621 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7622 else
7623 {
7624 low = TYPE_MIN_VALUE (type);
7625 high = TYPE_MAX_VALUE (type);
7626 }
7627
7628 /* If the type and its base type have the same representation and the same
7629 name, then the type is not a subrange but a copy of the base type. */
7630 if ((TREE_CODE (base_type) == INTEGER_TYPE
7631 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7632 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7633 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7634 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7635 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7636 return false;
7637
7638 if (lowval)
7639 *lowval = low;
7640 if (highval)
7641 *highval = high;
7642 return true;
7643 }
7644
7645 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7646 and number of elements specified by the range of values of INDEX_TYPE.
7647 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7648 If SHARED is true, reuse such a type that has already been constructed. */
7649
7650 static tree
7651 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7652 bool shared)
7653 {
7654 tree t;
7655
7656 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7657 {
7658 error ("arrays of functions are not meaningful");
7659 elt_type = integer_type_node;
7660 }
7661
7662 t = make_node (ARRAY_TYPE);
7663 TREE_TYPE (t) = elt_type;
7664 TYPE_DOMAIN (t) = index_type;
7665 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7666 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7667 layout_type (t);
7668
7669 /* If the element type is incomplete at this point we get marked for
7670 structural equality. Do not record these types in the canonical
7671 type hashtable. */
7672 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7673 return t;
7674
7675 if (shared)
7676 {
7677 hashval_t hash = type_hash_canon_hash (t);
7678 t = type_hash_canon (hash, t);
7679 }
7680
7681 if (TYPE_CANONICAL (t) == t)
7682 {
7683 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7684 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7685 || in_lto_p)
7686 SET_TYPE_STRUCTURAL_EQUALITY (t);
7687 else if (TYPE_CANONICAL (elt_type) != elt_type
7688 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7689 TYPE_CANONICAL (t)
7690 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7691 index_type
7692 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7693 typeless_storage, shared);
7694 }
7695
7696 return t;
7697 }
7698
7699 /* Wrapper around build_array_type_1 with SHARED set to true. */
7700
7701 tree
7702 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7703 {
7704 return build_array_type_1 (elt_type, index_type, typeless_storage, true);
7705 }
7706
7707 /* Wrapper around build_array_type_1 with SHARED set to false. */
7708
7709 tree
7710 build_nonshared_array_type (tree elt_type, tree index_type)
7711 {
7712 return build_array_type_1 (elt_type, index_type, false, false);
7713 }
7714
7715 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7716 sizetype. */
7717
7718 tree
7719 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7720 {
7721 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7722 }
7723
7724 /* Recursively examines the array elements of TYPE, until a non-array
7725 element type is found. */
7726
7727 tree
7728 strip_array_types (tree type)
7729 {
7730 while (TREE_CODE (type) == ARRAY_TYPE)
7731 type = TREE_TYPE (type);
7732
7733 return type;
7734 }
7735
7736 /* Computes the canonical argument types from the argument type list
7737 ARGTYPES.
7738
7739 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7740 on entry to this function, or if any of the ARGTYPES are
7741 structural.
7742
7743 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7744 true on entry to this function, or if any of the ARGTYPES are
7745 non-canonical.
7746
7747 Returns a canonical argument list, which may be ARGTYPES when the
7748 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7749 true) or would not differ from ARGTYPES. */
7750
7751 static tree
7752 maybe_canonicalize_argtypes (tree argtypes,
7753 bool *any_structural_p,
7754 bool *any_noncanonical_p)
7755 {
7756 tree arg;
7757 bool any_noncanonical_argtypes_p = false;
7758
7759 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7760 {
7761 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7762 /* Fail gracefully by stating that the type is structural. */
7763 *any_structural_p = true;
7764 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7765 *any_structural_p = true;
7766 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7767 || TREE_PURPOSE (arg))
7768 /* If the argument has a default argument, we consider it
7769 non-canonical even though the type itself is canonical.
7770 That way, different variants of function and method types
7771 with default arguments will all point to the variant with
7772 no defaults as their canonical type. */
7773 any_noncanonical_argtypes_p = true;
7774 }
7775
7776 if (*any_structural_p)
7777 return argtypes;
7778
7779 if (any_noncanonical_argtypes_p)
7780 {
7781 /* Build the canonical list of argument types. */
7782 tree canon_argtypes = NULL_TREE;
7783 bool is_void = false;
7784
7785 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7786 {
7787 if (arg == void_list_node)
7788 is_void = true;
7789 else
7790 canon_argtypes = tree_cons (NULL_TREE,
7791 TYPE_CANONICAL (TREE_VALUE (arg)),
7792 canon_argtypes);
7793 }
7794
7795 canon_argtypes = nreverse (canon_argtypes);
7796 if (is_void)
7797 canon_argtypes = chainon (canon_argtypes, void_list_node);
7798
7799 /* There is a non-canonical type. */
7800 *any_noncanonical_p = true;
7801 return canon_argtypes;
7802 }
7803
7804 /* The canonical argument types are the same as ARGTYPES. */
7805 return argtypes;
7806 }
7807
7808 /* Construct, lay out and return
7809 the type of functions returning type VALUE_TYPE
7810 given arguments of types ARG_TYPES.
7811 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7812 are data type nodes for the arguments of the function.
7813 If such a type has already been constructed, reuse it. */
7814
7815 tree
7816 build_function_type (tree value_type, tree arg_types)
7817 {
7818 tree t;
7819 inchash::hash hstate;
7820 bool any_structural_p, any_noncanonical_p;
7821 tree canon_argtypes;
7822
7823 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7824 {
7825 error ("function return type cannot be function");
7826 value_type = integer_type_node;
7827 }
7828
7829 /* Make a node of the sort we want. */
7830 t = make_node (FUNCTION_TYPE);
7831 TREE_TYPE (t) = value_type;
7832 TYPE_ARG_TYPES (t) = arg_types;
7833
7834 /* If we already have such a type, use the old one. */
7835 hashval_t hash = type_hash_canon_hash (t);
7836 t = type_hash_canon (hash, t);
7837
7838 /* Set up the canonical type. */
7839 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7840 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7841 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7842 &any_structural_p,
7843 &any_noncanonical_p);
7844 if (any_structural_p)
7845 SET_TYPE_STRUCTURAL_EQUALITY (t);
7846 else if (any_noncanonical_p)
7847 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7848 canon_argtypes);
7849
7850 if (!COMPLETE_TYPE_P (t))
7851 layout_type (t);
7852 return t;
7853 }
7854
7855 /* Build a function type. The RETURN_TYPE is the type returned by the
7856 function. If VAARGS is set, no void_type_node is appended to the
7857 list. ARGP must be always be terminated be a NULL_TREE. */
7858
7859 static tree
7860 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7861 {
7862 tree t, args, last;
7863
7864 t = va_arg (argp, tree);
7865 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7866 args = tree_cons (NULL_TREE, t, args);
7867
7868 if (vaargs)
7869 {
7870 last = args;
7871 if (args != NULL_TREE)
7872 args = nreverse (args);
7873 gcc_assert (last != void_list_node);
7874 }
7875 else if (args == NULL_TREE)
7876 args = void_list_node;
7877 else
7878 {
7879 last = args;
7880 args = nreverse (args);
7881 TREE_CHAIN (last) = void_list_node;
7882 }
7883 args = build_function_type (return_type, args);
7884
7885 return args;
7886 }
7887
7888 /* Build a function type. The RETURN_TYPE is the type returned by the
7889 function. If additional arguments are provided, they are
7890 additional argument types. The list of argument types must always
7891 be terminated by NULL_TREE. */
7892
7893 tree
7894 build_function_type_list (tree return_type, ...)
7895 {
7896 tree args;
7897 va_list p;
7898
7899 va_start (p, return_type);
7900 args = build_function_type_list_1 (false, return_type, p);
7901 va_end (p);
7902 return args;
7903 }
7904
7905 /* Build a variable argument function type. The RETURN_TYPE is the
7906 type returned by the function. If additional arguments are provided,
7907 they are additional argument types. The list of argument types must
7908 always be terminated by NULL_TREE. */
7909
7910 tree
7911 build_varargs_function_type_list (tree return_type, ...)
7912 {
7913 tree args;
7914 va_list p;
7915
7916 va_start (p, return_type);
7917 args = build_function_type_list_1 (true, return_type, p);
7918 va_end (p);
7919
7920 return args;
7921 }
7922
7923 /* Build a function type. RETURN_TYPE is the type returned by the
7924 function; VAARGS indicates whether the function takes varargs. The
7925 function takes N named arguments, the types of which are provided in
7926 ARG_TYPES. */
7927
7928 static tree
7929 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7930 tree *arg_types)
7931 {
7932 int i;
7933 tree t = vaargs ? NULL_TREE : void_list_node;
7934
7935 for (i = n - 1; i >= 0; i--)
7936 t = tree_cons (NULL_TREE, arg_types[i], t);
7937
7938 return build_function_type (return_type, t);
7939 }
7940
7941 /* Build a function type. RETURN_TYPE is the type returned by the
7942 function. The function takes N named arguments, the types of which
7943 are provided in ARG_TYPES. */
7944
7945 tree
7946 build_function_type_array (tree return_type, int n, tree *arg_types)
7947 {
7948 return build_function_type_array_1 (false, return_type, n, arg_types);
7949 }
7950
7951 /* Build a variable argument function type. RETURN_TYPE is the type
7952 returned by the function. The function takes N named arguments, the
7953 types of which are provided in ARG_TYPES. */
7954
7955 tree
7956 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7957 {
7958 return build_function_type_array_1 (true, return_type, n, arg_types);
7959 }
7960
7961 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7962 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7963 for the method. An implicit additional parameter (of type
7964 pointer-to-BASETYPE) is added to the ARGTYPES. */
7965
7966 tree
7967 build_method_type_directly (tree basetype,
7968 tree rettype,
7969 tree argtypes)
7970 {
7971 tree t;
7972 tree ptype;
7973 bool any_structural_p, any_noncanonical_p;
7974 tree canon_argtypes;
7975
7976 /* Make a node of the sort we want. */
7977 t = make_node (METHOD_TYPE);
7978
7979 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7980 TREE_TYPE (t) = rettype;
7981 ptype = build_pointer_type (basetype);
7982
7983 /* The actual arglist for this function includes a "hidden" argument
7984 which is "this". Put it into the list of argument types. */
7985 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7986 TYPE_ARG_TYPES (t) = argtypes;
7987
7988 /* If we already have such a type, use the old one. */
7989 hashval_t hash = type_hash_canon_hash (t);
7990 t = type_hash_canon (hash, t);
7991
7992 /* Set up the canonical type. */
7993 any_structural_p
7994 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7995 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7996 any_noncanonical_p
7997 = (TYPE_CANONICAL (basetype) != basetype
7998 || TYPE_CANONICAL (rettype) != rettype);
7999 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8000 &any_structural_p,
8001 &any_noncanonical_p);
8002 if (any_structural_p)
8003 SET_TYPE_STRUCTURAL_EQUALITY (t);
8004 else if (any_noncanonical_p)
8005 TYPE_CANONICAL (t)
8006 = build_method_type_directly (TYPE_CANONICAL (basetype),
8007 TYPE_CANONICAL (rettype),
8008 canon_argtypes);
8009 if (!COMPLETE_TYPE_P (t))
8010 layout_type (t);
8011
8012 return t;
8013 }
8014
8015 /* Construct, lay out and return the type of methods belonging to class
8016 BASETYPE and whose arguments and values are described by TYPE.
8017 If that type exists already, reuse it.
8018 TYPE must be a FUNCTION_TYPE node. */
8019
8020 tree
8021 build_method_type (tree basetype, tree type)
8022 {
8023 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8024
8025 return build_method_type_directly (basetype,
8026 TREE_TYPE (type),
8027 TYPE_ARG_TYPES (type));
8028 }
8029
8030 /* Construct, lay out and return the type of offsets to a value
8031 of type TYPE, within an object of type BASETYPE.
8032 If a suitable offset type exists already, reuse it. */
8033
8034 tree
8035 build_offset_type (tree basetype, tree type)
8036 {
8037 tree t;
8038
8039 /* Make a node of the sort we want. */
8040 t = make_node (OFFSET_TYPE);
8041
8042 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8043 TREE_TYPE (t) = type;
8044
8045 /* If we already have such a type, use the old one. */
8046 hashval_t hash = type_hash_canon_hash (t);
8047 t = type_hash_canon (hash, t);
8048
8049 if (!COMPLETE_TYPE_P (t))
8050 layout_type (t);
8051
8052 if (TYPE_CANONICAL (t) == t)
8053 {
8054 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8055 || TYPE_STRUCTURAL_EQUALITY_P (type))
8056 SET_TYPE_STRUCTURAL_EQUALITY (t);
8057 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8058 || TYPE_CANONICAL (type) != type)
8059 TYPE_CANONICAL (t)
8060 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8061 TYPE_CANONICAL (type));
8062 }
8063
8064 return t;
8065 }
8066
8067 /* Create a complex type whose components are COMPONENT_TYPE.
8068
8069 If NAMED is true, the type is given a TYPE_NAME. We do not always
8070 do so because this creates a DECL node and thus make the DECL_UIDs
8071 dependent on the type canonicalization hashtable, which is GC-ed,
8072 so the DECL_UIDs would not be stable wrt garbage collection. */
8073
8074 tree
8075 build_complex_type (tree component_type, bool named)
8076 {
8077 tree t;
8078
8079 gcc_assert (INTEGRAL_TYPE_P (component_type)
8080 || SCALAR_FLOAT_TYPE_P (component_type)
8081 || FIXED_POINT_TYPE_P (component_type));
8082
8083 /* Make a node of the sort we want. */
8084 t = make_node (COMPLEX_TYPE);
8085
8086 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8087
8088 /* If we already have such a type, use the old one. */
8089 hashval_t hash = type_hash_canon_hash (t);
8090 t = type_hash_canon (hash, t);
8091
8092 if (!COMPLETE_TYPE_P (t))
8093 layout_type (t);
8094
8095 if (TYPE_CANONICAL (t) == t)
8096 {
8097 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8098 SET_TYPE_STRUCTURAL_EQUALITY (t);
8099 else if (TYPE_CANONICAL (component_type) != component_type)
8100 TYPE_CANONICAL (t)
8101 = build_complex_type (TYPE_CANONICAL (component_type), named);
8102 }
8103
8104 /* We need to create a name, since complex is a fundamental type. */
8105 if (!TYPE_NAME (t) && named)
8106 {
8107 const char *name;
8108 if (component_type == char_type_node)
8109 name = "complex char";
8110 else if (component_type == signed_char_type_node)
8111 name = "complex signed char";
8112 else if (component_type == unsigned_char_type_node)
8113 name = "complex unsigned char";
8114 else if (component_type == short_integer_type_node)
8115 name = "complex short int";
8116 else if (component_type == short_unsigned_type_node)
8117 name = "complex short unsigned int";
8118 else if (component_type == integer_type_node)
8119 name = "complex int";
8120 else if (component_type == unsigned_type_node)
8121 name = "complex unsigned int";
8122 else if (component_type == long_integer_type_node)
8123 name = "complex long int";
8124 else if (component_type == long_unsigned_type_node)
8125 name = "complex long unsigned int";
8126 else if (component_type == long_long_integer_type_node)
8127 name = "complex long long int";
8128 else if (component_type == long_long_unsigned_type_node)
8129 name = "complex long long unsigned int";
8130 else
8131 name = 0;
8132
8133 if (name != 0)
8134 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8135 get_identifier (name), t);
8136 }
8137
8138 return build_qualified_type (t, TYPE_QUALS (component_type));
8139 }
8140
8141 /* If TYPE is a real or complex floating-point type and the target
8142 does not directly support arithmetic on TYPE then return the wider
8143 type to be used for arithmetic on TYPE. Otherwise, return
8144 NULL_TREE. */
8145
8146 tree
8147 excess_precision_type (tree type)
8148 {
8149 /* The target can give two different responses to the question of
8150 which excess precision mode it would like depending on whether we
8151 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8152
8153 enum excess_precision_type requested_type
8154 = (flag_excess_precision == EXCESS_PRECISION_FAST
8155 ? EXCESS_PRECISION_TYPE_FAST
8156 : EXCESS_PRECISION_TYPE_STANDARD);
8157
8158 enum flt_eval_method target_flt_eval_method
8159 = targetm.c.excess_precision (requested_type);
8160
8161 /* The target should not ask for unpredictable float evaluation (though
8162 it might advertise that implicitly the evaluation is unpredictable,
8163 but we don't care about that here, it will have been reported
8164 elsewhere). If it does ask for unpredictable evaluation, we have
8165 nothing to do here. */
8166 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8167
8168 /* Nothing to do. The target has asked for all types we know about
8169 to be computed with their native precision and range. */
8170 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8171 return NULL_TREE;
8172
8173 /* The target will promote this type in a target-dependent way, so excess
8174 precision ought to leave it alone. */
8175 if (targetm.promoted_type (type) != NULL_TREE)
8176 return NULL_TREE;
8177
8178 machine_mode float16_type_mode = (float16_type_node
8179 ? TYPE_MODE (float16_type_node)
8180 : VOIDmode);
8181 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8182 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8183
8184 switch (TREE_CODE (type))
8185 {
8186 case REAL_TYPE:
8187 {
8188 machine_mode type_mode = TYPE_MODE (type);
8189 switch (target_flt_eval_method)
8190 {
8191 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8192 if (type_mode == float16_type_mode)
8193 return float_type_node;
8194 break;
8195 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8196 if (type_mode == float16_type_mode
8197 || type_mode == float_type_mode)
8198 return double_type_node;
8199 break;
8200 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8201 if (type_mode == float16_type_mode
8202 || type_mode == float_type_mode
8203 || type_mode == double_type_mode)
8204 return long_double_type_node;
8205 break;
8206 default:
8207 gcc_unreachable ();
8208 }
8209 break;
8210 }
8211 case COMPLEX_TYPE:
8212 {
8213 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8214 return NULL_TREE;
8215 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8216 switch (target_flt_eval_method)
8217 {
8218 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8219 if (type_mode == float16_type_mode)
8220 return complex_float_type_node;
8221 break;
8222 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8223 if (type_mode == float16_type_mode
8224 || type_mode == float_type_mode)
8225 return complex_double_type_node;
8226 break;
8227 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8228 if (type_mode == float16_type_mode
8229 || type_mode == float_type_mode
8230 || type_mode == double_type_mode)
8231 return complex_long_double_type_node;
8232 break;
8233 default:
8234 gcc_unreachable ();
8235 }
8236 break;
8237 }
8238 default:
8239 break;
8240 }
8241
8242 return NULL_TREE;
8243 }
8244 \f
8245 /* Return OP, stripped of any conversions to wider types as much as is safe.
8246 Converting the value back to OP's type makes a value equivalent to OP.
8247
8248 If FOR_TYPE is nonzero, we return a value which, if converted to
8249 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8250
8251 OP must have integer, real or enumeral type. Pointers are not allowed!
8252
8253 There are some cases where the obvious value we could return
8254 would regenerate to OP if converted to OP's type,
8255 but would not extend like OP to wider types.
8256 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8257 For example, if OP is (unsigned short)(signed char)-1,
8258 we avoid returning (signed char)-1 if FOR_TYPE is int,
8259 even though extending that to an unsigned short would regenerate OP,
8260 since the result of extending (signed char)-1 to (int)
8261 is different from (int) OP. */
8262
8263 tree
8264 get_unwidened (tree op, tree for_type)
8265 {
8266 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8267 tree type = TREE_TYPE (op);
8268 unsigned final_prec
8269 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8270 int uns
8271 = (for_type != 0 && for_type != type
8272 && final_prec > TYPE_PRECISION (type)
8273 && TYPE_UNSIGNED (type));
8274 tree win = op;
8275
8276 while (CONVERT_EXPR_P (op))
8277 {
8278 int bitschange;
8279
8280 /* TYPE_PRECISION on vector types has different meaning
8281 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8282 so avoid them here. */
8283 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8284 break;
8285
8286 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8287 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8288
8289 /* Truncations are many-one so cannot be removed.
8290 Unless we are later going to truncate down even farther. */
8291 if (bitschange < 0
8292 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8293 break;
8294
8295 /* See what's inside this conversion. If we decide to strip it,
8296 we will set WIN. */
8297 op = TREE_OPERAND (op, 0);
8298
8299 /* If we have not stripped any zero-extensions (uns is 0),
8300 we can strip any kind of extension.
8301 If we have previously stripped a zero-extension,
8302 only zero-extensions can safely be stripped.
8303 Any extension can be stripped if the bits it would produce
8304 are all going to be discarded later by truncating to FOR_TYPE. */
8305
8306 if (bitschange > 0)
8307 {
8308 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8309 win = op;
8310 /* TYPE_UNSIGNED says whether this is a zero-extension.
8311 Let's avoid computing it if it does not affect WIN
8312 and if UNS will not be needed again. */
8313 if ((uns
8314 || CONVERT_EXPR_P (op))
8315 && TYPE_UNSIGNED (TREE_TYPE (op)))
8316 {
8317 uns = 1;
8318 win = op;
8319 }
8320 }
8321 }
8322
8323 /* If we finally reach a constant see if it fits in sth smaller and
8324 in that case convert it. */
8325 if (TREE_CODE (win) == INTEGER_CST)
8326 {
8327 tree wtype = TREE_TYPE (win);
8328 unsigned prec = wi::min_precision (win, TYPE_SIGN (wtype));
8329 if (for_type)
8330 prec = MAX (prec, final_prec);
8331 if (prec < TYPE_PRECISION (wtype))
8332 {
8333 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8334 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8335 win = fold_convert (t, win);
8336 }
8337 }
8338
8339 return win;
8340 }
8341 \f
8342 /* Return OP or a simpler expression for a narrower value
8343 which can be sign-extended or zero-extended to give back OP.
8344 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8345 or 0 if the value should be sign-extended. */
8346
8347 tree
8348 get_narrower (tree op, int *unsignedp_ptr)
8349 {
8350 int uns = 0;
8351 int first = 1;
8352 tree win = op;
8353 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8354
8355 while (TREE_CODE (op) == NOP_EXPR)
8356 {
8357 int bitschange
8358 = (TYPE_PRECISION (TREE_TYPE (op))
8359 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8360
8361 /* Truncations are many-one so cannot be removed. */
8362 if (bitschange < 0)
8363 break;
8364
8365 /* See what's inside this conversion. If we decide to strip it,
8366 we will set WIN. */
8367
8368 if (bitschange > 0)
8369 {
8370 op = TREE_OPERAND (op, 0);
8371 /* An extension: the outermost one can be stripped,
8372 but remember whether it is zero or sign extension. */
8373 if (first)
8374 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8375 /* Otherwise, if a sign extension has been stripped,
8376 only sign extensions can now be stripped;
8377 if a zero extension has been stripped, only zero-extensions. */
8378 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8379 break;
8380 first = 0;
8381 }
8382 else /* bitschange == 0 */
8383 {
8384 /* A change in nominal type can always be stripped, but we must
8385 preserve the unsignedness. */
8386 if (first)
8387 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8388 first = 0;
8389 op = TREE_OPERAND (op, 0);
8390 /* Keep trying to narrow, but don't assign op to win if it
8391 would turn an integral type into something else. */
8392 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8393 continue;
8394 }
8395
8396 win = op;
8397 }
8398
8399 if (TREE_CODE (op) == COMPONENT_REF
8400 /* Since type_for_size always gives an integer type. */
8401 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8402 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8403 /* Ensure field is laid out already. */
8404 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8405 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8406 {
8407 unsigned HOST_WIDE_INT innerprec
8408 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8409 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8410 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8411 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8412
8413 /* We can get this structure field in a narrower type that fits it,
8414 but the resulting extension to its nominal type (a fullword type)
8415 must satisfy the same conditions as for other extensions.
8416
8417 Do this only for fields that are aligned (not bit-fields),
8418 because when bit-field insns will be used there is no
8419 advantage in doing this. */
8420
8421 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8422 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8423 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8424 && type != 0)
8425 {
8426 if (first)
8427 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8428 win = fold_convert (type, op);
8429 }
8430 }
8431
8432 *unsignedp_ptr = uns;
8433 return win;
8434 }
8435 \f
8436 /* Return true if integer constant C has a value that is permissible
8437 for TYPE, an integral type. */
8438
8439 bool
8440 int_fits_type_p (const_tree c, const_tree type)
8441 {
8442 tree type_low_bound, type_high_bound;
8443 bool ok_for_low_bound, ok_for_high_bound;
8444 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8445
8446 /* Non-standard boolean types can have arbitrary precision but various
8447 transformations assume that they can only take values 0 and +/-1. */
8448 if (TREE_CODE (type) == BOOLEAN_TYPE)
8449 return wi::fits_to_boolean_p (c, type);
8450
8451 retry:
8452 type_low_bound = TYPE_MIN_VALUE (type);
8453 type_high_bound = TYPE_MAX_VALUE (type);
8454
8455 /* If at least one bound of the type is a constant integer, we can check
8456 ourselves and maybe make a decision. If no such decision is possible, but
8457 this type is a subtype, try checking against that. Otherwise, use
8458 fits_to_tree_p, which checks against the precision.
8459
8460 Compute the status for each possibly constant bound, and return if we see
8461 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8462 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8463 for "constant known to fit". */
8464
8465 /* Check if c >= type_low_bound. */
8466 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8467 {
8468 if (tree_int_cst_lt (c, type_low_bound))
8469 return false;
8470 ok_for_low_bound = true;
8471 }
8472 else
8473 ok_for_low_bound = false;
8474
8475 /* Check if c <= type_high_bound. */
8476 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8477 {
8478 if (tree_int_cst_lt (type_high_bound, c))
8479 return false;
8480 ok_for_high_bound = true;
8481 }
8482 else
8483 ok_for_high_bound = false;
8484
8485 /* If the constant fits both bounds, the result is known. */
8486 if (ok_for_low_bound && ok_for_high_bound)
8487 return true;
8488
8489 /* Perform some generic filtering which may allow making a decision
8490 even if the bounds are not constant. First, negative integers
8491 never fit in unsigned types, */
8492 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8493 return false;
8494
8495 /* Second, narrower types always fit in wider ones. */
8496 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8497 return true;
8498
8499 /* Third, unsigned integers with top bit set never fit signed types. */
8500 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8501 {
8502 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8503 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8504 {
8505 /* When a tree_cst is converted to a wide-int, the precision
8506 is taken from the type. However, if the precision of the
8507 mode underneath the type is smaller than that, it is
8508 possible that the value will not fit. The test below
8509 fails if any bit is set between the sign bit of the
8510 underlying mode and the top bit of the type. */
8511 if (wi::ne_p (wi::zext (c, prec - 1), c))
8512 return false;
8513 }
8514 else if (wi::neg_p (c))
8515 return false;
8516 }
8517
8518 /* If we haven't been able to decide at this point, there nothing more we
8519 can check ourselves here. Look at the base type if we have one and it
8520 has the same precision. */
8521 if (TREE_CODE (type) == INTEGER_TYPE
8522 && TREE_TYPE (type) != 0
8523 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8524 {
8525 type = TREE_TYPE (type);
8526 goto retry;
8527 }
8528
8529 /* Or to fits_to_tree_p, if nothing else. */
8530 return wi::fits_to_tree_p (c, type);
8531 }
8532
8533 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8534 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8535 represented (assuming two's-complement arithmetic) within the bit
8536 precision of the type are returned instead. */
8537
8538 void
8539 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8540 {
8541 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8542 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8543 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8544 else
8545 {
8546 if (TYPE_UNSIGNED (type))
8547 mpz_set_ui (min, 0);
8548 else
8549 {
8550 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8551 wi::to_mpz (mn, min, SIGNED);
8552 }
8553 }
8554
8555 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8556 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8557 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8558 else
8559 {
8560 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8561 wi::to_mpz (mn, max, TYPE_SIGN (type));
8562 }
8563 }
8564
8565 /* Return true if VAR is an automatic variable defined in function FN. */
8566
8567 bool
8568 auto_var_in_fn_p (const_tree var, const_tree fn)
8569 {
8570 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8571 && ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8572 || TREE_CODE (var) == PARM_DECL)
8573 && ! TREE_STATIC (var))
8574 || TREE_CODE (var) == LABEL_DECL
8575 || TREE_CODE (var) == RESULT_DECL));
8576 }
8577
8578 /* Subprogram of following function. Called by walk_tree.
8579
8580 Return *TP if it is an automatic variable or parameter of the
8581 function passed in as DATA. */
8582
8583 static tree
8584 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8585 {
8586 tree fn = (tree) data;
8587
8588 if (TYPE_P (*tp))
8589 *walk_subtrees = 0;
8590
8591 else if (DECL_P (*tp)
8592 && auto_var_in_fn_p (*tp, fn))
8593 return *tp;
8594
8595 return NULL_TREE;
8596 }
8597
8598 /* Returns true if T is, contains, or refers to a type with variable
8599 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8600 arguments, but not the return type. If FN is nonzero, only return
8601 true if a modifier of the type or position of FN is a variable or
8602 parameter inside FN.
8603
8604 This concept is more general than that of C99 'variably modified types':
8605 in C99, a struct type is never variably modified because a VLA may not
8606 appear as a structure member. However, in GNU C code like:
8607
8608 struct S { int i[f()]; };
8609
8610 is valid, and other languages may define similar constructs. */
8611
8612 bool
8613 variably_modified_type_p (tree type, tree fn)
8614 {
8615 tree t;
8616
8617 /* Test if T is either variable (if FN is zero) or an expression containing
8618 a variable in FN. If TYPE isn't gimplified, return true also if
8619 gimplify_one_sizepos would gimplify the expression into a local
8620 variable. */
8621 #define RETURN_TRUE_IF_VAR(T) \
8622 do { tree _t = (T); \
8623 if (_t != NULL_TREE \
8624 && _t != error_mark_node \
8625 && TREE_CODE (_t) != INTEGER_CST \
8626 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8627 && (!fn \
8628 || (!TYPE_SIZES_GIMPLIFIED (type) \
8629 && !is_gimple_sizepos (_t)) \
8630 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8631 return true; } while (0)
8632
8633 if (type == error_mark_node)
8634 return false;
8635
8636 /* If TYPE itself has variable size, it is variably modified. */
8637 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8638 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8639
8640 switch (TREE_CODE (type))
8641 {
8642 case POINTER_TYPE:
8643 case REFERENCE_TYPE:
8644 case VECTOR_TYPE:
8645 if (variably_modified_type_p (TREE_TYPE (type), fn))
8646 return true;
8647 break;
8648
8649 case FUNCTION_TYPE:
8650 case METHOD_TYPE:
8651 /* If TYPE is a function type, it is variably modified if the
8652 return type is variably modified. */
8653 if (variably_modified_type_p (TREE_TYPE (type), fn))
8654 return true;
8655 break;
8656
8657 case INTEGER_TYPE:
8658 case REAL_TYPE:
8659 case FIXED_POINT_TYPE:
8660 case ENUMERAL_TYPE:
8661 case BOOLEAN_TYPE:
8662 /* Scalar types are variably modified if their end points
8663 aren't constant. */
8664 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8665 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8666 break;
8667
8668 case RECORD_TYPE:
8669 case UNION_TYPE:
8670 case QUAL_UNION_TYPE:
8671 /* We can't see if any of the fields are variably-modified by the
8672 definition we normally use, since that would produce infinite
8673 recursion via pointers. */
8674 /* This is variably modified if some field's type is. */
8675 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8676 if (TREE_CODE (t) == FIELD_DECL)
8677 {
8678 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8679 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8680 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8681
8682 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8683 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8684 }
8685 break;
8686
8687 case ARRAY_TYPE:
8688 /* Do not call ourselves to avoid infinite recursion. This is
8689 variably modified if the element type is. */
8690 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8691 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8692 break;
8693
8694 default:
8695 break;
8696 }
8697
8698 /* The current language may have other cases to check, but in general,
8699 all other types are not variably modified. */
8700 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8701
8702 #undef RETURN_TRUE_IF_VAR
8703 }
8704
8705 /* Given a DECL or TYPE, return the scope in which it was declared, or
8706 NULL_TREE if there is no containing scope. */
8707
8708 tree
8709 get_containing_scope (const_tree t)
8710 {
8711 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8712 }
8713
8714 /* Return the innermost context enclosing DECL that is
8715 a FUNCTION_DECL, or zero if none. */
8716
8717 tree
8718 decl_function_context (const_tree decl)
8719 {
8720 tree context;
8721
8722 if (TREE_CODE (decl) == ERROR_MARK)
8723 return 0;
8724
8725 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8726 where we look up the function at runtime. Such functions always take
8727 a first argument of type 'pointer to real context'.
8728
8729 C++ should really be fixed to use DECL_CONTEXT for the real context,
8730 and use something else for the "virtual context". */
8731 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8732 context
8733 = TYPE_MAIN_VARIANT
8734 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8735 else
8736 context = DECL_CONTEXT (decl);
8737
8738 while (context && TREE_CODE (context) != FUNCTION_DECL)
8739 {
8740 if (TREE_CODE (context) == BLOCK)
8741 context = BLOCK_SUPERCONTEXT (context);
8742 else
8743 context = get_containing_scope (context);
8744 }
8745
8746 return context;
8747 }
8748
8749 /* Return the innermost context enclosing DECL that is
8750 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8751 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8752
8753 tree
8754 decl_type_context (const_tree decl)
8755 {
8756 tree context = DECL_CONTEXT (decl);
8757
8758 while (context)
8759 switch (TREE_CODE (context))
8760 {
8761 case NAMESPACE_DECL:
8762 case TRANSLATION_UNIT_DECL:
8763 return NULL_TREE;
8764
8765 case RECORD_TYPE:
8766 case UNION_TYPE:
8767 case QUAL_UNION_TYPE:
8768 return context;
8769
8770 case TYPE_DECL:
8771 case FUNCTION_DECL:
8772 context = DECL_CONTEXT (context);
8773 break;
8774
8775 case BLOCK:
8776 context = BLOCK_SUPERCONTEXT (context);
8777 break;
8778
8779 default:
8780 gcc_unreachable ();
8781 }
8782
8783 return NULL_TREE;
8784 }
8785
8786 /* CALL is a CALL_EXPR. Return the declaration for the function
8787 called, or NULL_TREE if the called function cannot be
8788 determined. */
8789
8790 tree
8791 get_callee_fndecl (const_tree call)
8792 {
8793 tree addr;
8794
8795 if (call == error_mark_node)
8796 return error_mark_node;
8797
8798 /* It's invalid to call this function with anything but a
8799 CALL_EXPR. */
8800 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8801
8802 /* The first operand to the CALL is the address of the function
8803 called. */
8804 addr = CALL_EXPR_FN (call);
8805
8806 /* If there is no function, return early. */
8807 if (addr == NULL_TREE)
8808 return NULL_TREE;
8809
8810 STRIP_NOPS (addr);
8811
8812 /* If this is a readonly function pointer, extract its initial value. */
8813 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8814 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8815 && DECL_INITIAL (addr))
8816 addr = DECL_INITIAL (addr);
8817
8818 /* If the address is just `&f' for some function `f', then we know
8819 that `f' is being called. */
8820 if (TREE_CODE (addr) == ADDR_EXPR
8821 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8822 return TREE_OPERAND (addr, 0);
8823
8824 /* We couldn't figure out what was being called. */
8825 return NULL_TREE;
8826 }
8827
8828 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8829 return the associated function code, otherwise return CFN_LAST. */
8830
8831 combined_fn
8832 get_call_combined_fn (const_tree call)
8833 {
8834 /* It's invalid to call this function with anything but a CALL_EXPR. */
8835 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8836
8837 if (!CALL_EXPR_FN (call))
8838 return as_combined_fn (CALL_EXPR_IFN (call));
8839
8840 tree fndecl = get_callee_fndecl (call);
8841 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8842 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8843
8844 return CFN_LAST;
8845 }
8846
8847 #define TREE_MEM_USAGE_SPACES 40
8848
8849 /* Print debugging information about tree nodes generated during the compile,
8850 and any language-specific information. */
8851
8852 void
8853 dump_tree_statistics (void)
8854 {
8855 if (GATHER_STATISTICS)
8856 {
8857 int i;
8858 int total_nodes, total_bytes;
8859 fprintf (stderr, "\nKind Nodes Bytes\n");
8860 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8861 total_nodes = total_bytes = 0;
8862 for (i = 0; i < (int) all_kinds; i++)
8863 {
8864 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8865 tree_node_counts[i], tree_node_sizes[i]);
8866 total_nodes += tree_node_counts[i];
8867 total_bytes += tree_node_sizes[i];
8868 }
8869 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8870 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8871 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8872 fprintf (stderr, "Code Nodes\n");
8873 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8874 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8875 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
8876 tree_code_counts[i]);
8877 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8878 fprintf (stderr, "\n");
8879 ssanames_print_statistics ();
8880 fprintf (stderr, "\n");
8881 phinodes_print_statistics ();
8882 fprintf (stderr, "\n");
8883 }
8884 else
8885 fprintf (stderr, "(No per-node statistics)\n");
8886
8887 print_type_hash_statistics ();
8888 print_debug_expr_statistics ();
8889 print_value_expr_statistics ();
8890 lang_hooks.print_statistics ();
8891 }
8892 \f
8893 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8894
8895 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8896
8897 unsigned
8898 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8899 {
8900 /* This relies on the raw feedback's top 4 bits being zero. */
8901 #define FEEDBACK(X) ((X) * 0x04c11db7)
8902 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8903 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8904 static const unsigned syndromes[16] =
8905 {
8906 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8907 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8908 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8909 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8910 };
8911 #undef FEEDBACK
8912 #undef SYNDROME
8913
8914 value <<= (32 - bytes * 8);
8915 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8916 {
8917 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8918
8919 chksum = (chksum << 4) ^ feedback;
8920 }
8921
8922 return chksum;
8923 }
8924
8925 /* Generate a crc32 of a string. */
8926
8927 unsigned
8928 crc32_string (unsigned chksum, const char *string)
8929 {
8930 do
8931 chksum = crc32_byte (chksum, *string);
8932 while (*string++);
8933 return chksum;
8934 }
8935
8936 /* P is a string that will be used in a symbol. Mask out any characters
8937 that are not valid in that context. */
8938
8939 void
8940 clean_symbol_name (char *p)
8941 {
8942 for (; *p; p++)
8943 if (! (ISALNUM (*p)
8944 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8945 || *p == '$'
8946 #endif
8947 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8948 || *p == '.'
8949 #endif
8950 ))
8951 *p = '_';
8952 }
8953
8954 /* For anonymous aggregate types, we need some sort of name to
8955 hold on to. In practice, this should not appear, but it should
8956 not be harmful if it does. */
8957 bool
8958 anon_aggrname_p(const_tree id_node)
8959 {
8960 #ifndef NO_DOT_IN_LABEL
8961 return (IDENTIFIER_POINTER (id_node)[0] == '.'
8962 && IDENTIFIER_POINTER (id_node)[1] == '_');
8963 #else /* NO_DOT_IN_LABEL */
8964 #ifndef NO_DOLLAR_IN_LABEL
8965 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
8966 && IDENTIFIER_POINTER (id_node)[1] == '_');
8967 #else /* NO_DOLLAR_IN_LABEL */
8968 #define ANON_AGGRNAME_PREFIX "__anon_"
8969 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
8970 sizeof (ANON_AGGRNAME_PREFIX) - 1));
8971 #endif /* NO_DOLLAR_IN_LABEL */
8972 #endif /* NO_DOT_IN_LABEL */
8973 }
8974
8975 /* Return a format for an anonymous aggregate name. */
8976 const char *
8977 anon_aggrname_format()
8978 {
8979 #ifndef NO_DOT_IN_LABEL
8980 return "._%d";
8981 #else /* NO_DOT_IN_LABEL */
8982 #ifndef NO_DOLLAR_IN_LABEL
8983 return "$_%d";
8984 #else /* NO_DOLLAR_IN_LABEL */
8985 return "__anon_%d";
8986 #endif /* NO_DOLLAR_IN_LABEL */
8987 #endif /* NO_DOT_IN_LABEL */
8988 }
8989
8990 /* Generate a name for a special-purpose function.
8991 The generated name may need to be unique across the whole link.
8992 Changes to this function may also require corresponding changes to
8993 xstrdup_mask_random.
8994 TYPE is some string to identify the purpose of this function to the
8995 linker or collect2; it must start with an uppercase letter,
8996 one of:
8997 I - for constructors
8998 D - for destructors
8999 N - for C++ anonymous namespaces
9000 F - for DWARF unwind frame information. */
9001
9002 tree
9003 get_file_function_name (const char *type)
9004 {
9005 char *buf;
9006 const char *p;
9007 char *q;
9008
9009 /* If we already have a name we know to be unique, just use that. */
9010 if (first_global_object_name)
9011 p = q = ASTRDUP (first_global_object_name);
9012 /* If the target is handling the constructors/destructors, they
9013 will be local to this file and the name is only necessary for
9014 debugging purposes.
9015 We also assign sub_I and sub_D sufixes to constructors called from
9016 the global static constructors. These are always local. */
9017 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9018 || (strncmp (type, "sub_", 4) == 0
9019 && (type[4] == 'I' || type[4] == 'D')))
9020 {
9021 const char *file = main_input_filename;
9022 if (! file)
9023 file = LOCATION_FILE (input_location);
9024 /* Just use the file's basename, because the full pathname
9025 might be quite long. */
9026 p = q = ASTRDUP (lbasename (file));
9027 }
9028 else
9029 {
9030 /* Otherwise, the name must be unique across the entire link.
9031 We don't have anything that we know to be unique to this translation
9032 unit, so use what we do have and throw in some randomness. */
9033 unsigned len;
9034 const char *name = weak_global_object_name;
9035 const char *file = main_input_filename;
9036
9037 if (! name)
9038 name = "";
9039 if (! file)
9040 file = LOCATION_FILE (input_location);
9041
9042 len = strlen (file);
9043 q = (char *) alloca (9 + 19 + len + 1);
9044 memcpy (q, file, len + 1);
9045
9046 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9047 crc32_string (0, name), get_random_seed (false));
9048
9049 p = q;
9050 }
9051
9052 clean_symbol_name (q);
9053 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9054 + strlen (type));
9055
9056 /* Set up the name of the file-level functions we may need.
9057 Use a global object (which is already required to be unique over
9058 the program) rather than the file name (which imposes extra
9059 constraints). */
9060 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9061
9062 return get_identifier (buf);
9063 }
9064 \f
9065 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9066
9067 /* Complain that the tree code of NODE does not match the expected 0
9068 terminated list of trailing codes. The trailing code list can be
9069 empty, for a more vague error message. FILE, LINE, and FUNCTION
9070 are of the caller. */
9071
9072 void
9073 tree_check_failed (const_tree node, const char *file,
9074 int line, const char *function, ...)
9075 {
9076 va_list args;
9077 const char *buffer;
9078 unsigned length = 0;
9079 enum tree_code code;
9080
9081 va_start (args, function);
9082 while ((code = (enum tree_code) va_arg (args, int)))
9083 length += 4 + strlen (get_tree_code_name (code));
9084 va_end (args);
9085 if (length)
9086 {
9087 char *tmp;
9088 va_start (args, function);
9089 length += strlen ("expected ");
9090 buffer = tmp = (char *) alloca (length);
9091 length = 0;
9092 while ((code = (enum tree_code) va_arg (args, int)))
9093 {
9094 const char *prefix = length ? " or " : "expected ";
9095
9096 strcpy (tmp + length, prefix);
9097 length += strlen (prefix);
9098 strcpy (tmp + length, get_tree_code_name (code));
9099 length += strlen (get_tree_code_name (code));
9100 }
9101 va_end (args);
9102 }
9103 else
9104 buffer = "unexpected node";
9105
9106 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9107 buffer, get_tree_code_name (TREE_CODE (node)),
9108 function, trim_filename (file), line);
9109 }
9110
9111 /* Complain that the tree code of NODE does match the expected 0
9112 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9113 the caller. */
9114
9115 void
9116 tree_not_check_failed (const_tree node, const char *file,
9117 int line, const char *function, ...)
9118 {
9119 va_list args;
9120 char *buffer;
9121 unsigned length = 0;
9122 enum tree_code code;
9123
9124 va_start (args, function);
9125 while ((code = (enum tree_code) va_arg (args, int)))
9126 length += 4 + strlen (get_tree_code_name (code));
9127 va_end (args);
9128 va_start (args, function);
9129 buffer = (char *) alloca (length);
9130 length = 0;
9131 while ((code = (enum tree_code) va_arg (args, int)))
9132 {
9133 if (length)
9134 {
9135 strcpy (buffer + length, " or ");
9136 length += 4;
9137 }
9138 strcpy (buffer + length, get_tree_code_name (code));
9139 length += strlen (get_tree_code_name (code));
9140 }
9141 va_end (args);
9142
9143 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9144 buffer, get_tree_code_name (TREE_CODE (node)),
9145 function, trim_filename (file), line);
9146 }
9147
9148 /* Similar to tree_check_failed, except that we check for a class of tree
9149 code, given in CL. */
9150
9151 void
9152 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9153 const char *file, int line, const char *function)
9154 {
9155 internal_error
9156 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9157 TREE_CODE_CLASS_STRING (cl),
9158 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9159 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9160 }
9161
9162 /* Similar to tree_check_failed, except that instead of specifying a
9163 dozen codes, use the knowledge that they're all sequential. */
9164
9165 void
9166 tree_range_check_failed (const_tree node, const char *file, int line,
9167 const char *function, enum tree_code c1,
9168 enum tree_code c2)
9169 {
9170 char *buffer;
9171 unsigned length = 0;
9172 unsigned int c;
9173
9174 for (c = c1; c <= c2; ++c)
9175 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9176
9177 length += strlen ("expected ");
9178 buffer = (char *) alloca (length);
9179 length = 0;
9180
9181 for (c = c1; c <= c2; ++c)
9182 {
9183 const char *prefix = length ? " or " : "expected ";
9184
9185 strcpy (buffer + length, prefix);
9186 length += strlen (prefix);
9187 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9188 length += strlen (get_tree_code_name ((enum tree_code) c));
9189 }
9190
9191 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9192 buffer, get_tree_code_name (TREE_CODE (node)),
9193 function, trim_filename (file), line);
9194 }
9195
9196
9197 /* Similar to tree_check_failed, except that we check that a tree does
9198 not have the specified code, given in CL. */
9199
9200 void
9201 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9202 const char *file, int line, const char *function)
9203 {
9204 internal_error
9205 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9206 TREE_CODE_CLASS_STRING (cl),
9207 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9208 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9209 }
9210
9211
9212 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9213
9214 void
9215 omp_clause_check_failed (const_tree node, const char *file, int line,
9216 const char *function, enum omp_clause_code code)
9217 {
9218 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9219 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9220 function, trim_filename (file), line);
9221 }
9222
9223
9224 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9225
9226 void
9227 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9228 const char *function, enum omp_clause_code c1,
9229 enum omp_clause_code c2)
9230 {
9231 char *buffer;
9232 unsigned length = 0;
9233 unsigned int c;
9234
9235 for (c = c1; c <= c2; ++c)
9236 length += 4 + strlen (omp_clause_code_name[c]);
9237
9238 length += strlen ("expected ");
9239 buffer = (char *) alloca (length);
9240 length = 0;
9241
9242 for (c = c1; c <= c2; ++c)
9243 {
9244 const char *prefix = length ? " or " : "expected ";
9245
9246 strcpy (buffer + length, prefix);
9247 length += strlen (prefix);
9248 strcpy (buffer + length, omp_clause_code_name[c]);
9249 length += strlen (omp_clause_code_name[c]);
9250 }
9251
9252 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9253 buffer, omp_clause_code_name[TREE_CODE (node)],
9254 function, trim_filename (file), line);
9255 }
9256
9257
9258 #undef DEFTREESTRUCT
9259 #define DEFTREESTRUCT(VAL, NAME) NAME,
9260
9261 static const char *ts_enum_names[] = {
9262 #include "treestruct.def"
9263 };
9264 #undef DEFTREESTRUCT
9265
9266 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9267
9268 /* Similar to tree_class_check_failed, except that we check for
9269 whether CODE contains the tree structure identified by EN. */
9270
9271 void
9272 tree_contains_struct_check_failed (const_tree node,
9273 const enum tree_node_structure_enum en,
9274 const char *file, int line,
9275 const char *function)
9276 {
9277 internal_error
9278 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9279 TS_ENUM_NAME (en),
9280 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9281 }
9282
9283
9284 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9285 (dynamically sized) vector. */
9286
9287 void
9288 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9289 const char *function)
9290 {
9291 internal_error
9292 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9293 idx + 1, len, function, trim_filename (file), line);
9294 }
9295
9296 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9297 (dynamically sized) vector. */
9298
9299 void
9300 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9301 const char *function)
9302 {
9303 internal_error
9304 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9305 idx + 1, len, function, trim_filename (file), line);
9306 }
9307
9308 /* Similar to above, except that the check is for the bounds of the operand
9309 vector of an expression node EXP. */
9310
9311 void
9312 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9313 int line, const char *function)
9314 {
9315 enum tree_code code = TREE_CODE (exp);
9316 internal_error
9317 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9318 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9319 function, trim_filename (file), line);
9320 }
9321
9322 /* Similar to above, except that the check is for the number of
9323 operands of an OMP_CLAUSE node. */
9324
9325 void
9326 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9327 int line, const char *function)
9328 {
9329 internal_error
9330 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9331 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9332 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9333 trim_filename (file), line);
9334 }
9335 #endif /* ENABLE_TREE_CHECKING */
9336 \f
9337 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9338 and mapped to the machine mode MODE. Initialize its fields and build
9339 the information necessary for debugging output. */
9340
9341 static tree
9342 make_vector_type (tree innertype, int nunits, machine_mode mode)
9343 {
9344 tree t;
9345 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9346
9347 t = make_node (VECTOR_TYPE);
9348 TREE_TYPE (t) = mv_innertype;
9349 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9350 SET_TYPE_MODE (t, mode);
9351
9352 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9353 SET_TYPE_STRUCTURAL_EQUALITY (t);
9354 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9355 || mode != VOIDmode)
9356 && !VECTOR_BOOLEAN_TYPE_P (t))
9357 TYPE_CANONICAL (t)
9358 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9359
9360 layout_type (t);
9361
9362 hashval_t hash = type_hash_canon_hash (t);
9363 t = type_hash_canon (hash, t);
9364
9365 /* We have built a main variant, based on the main variant of the
9366 inner type. Use it to build the variant we return. */
9367 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9368 && TREE_TYPE (t) != innertype)
9369 return build_type_attribute_qual_variant (t,
9370 TYPE_ATTRIBUTES (innertype),
9371 TYPE_QUALS (innertype));
9372
9373 return t;
9374 }
9375
9376 static tree
9377 make_or_reuse_type (unsigned size, int unsignedp)
9378 {
9379 int i;
9380
9381 if (size == INT_TYPE_SIZE)
9382 return unsignedp ? unsigned_type_node : integer_type_node;
9383 if (size == CHAR_TYPE_SIZE)
9384 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9385 if (size == SHORT_TYPE_SIZE)
9386 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9387 if (size == LONG_TYPE_SIZE)
9388 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9389 if (size == LONG_LONG_TYPE_SIZE)
9390 return (unsignedp ? long_long_unsigned_type_node
9391 : long_long_integer_type_node);
9392
9393 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9394 if (size == int_n_data[i].bitsize
9395 && int_n_enabled_p[i])
9396 return (unsignedp ? int_n_trees[i].unsigned_type
9397 : int_n_trees[i].signed_type);
9398
9399 if (unsignedp)
9400 return make_unsigned_type (size);
9401 else
9402 return make_signed_type (size);
9403 }
9404
9405 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9406
9407 static tree
9408 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9409 {
9410 if (satp)
9411 {
9412 if (size == SHORT_FRACT_TYPE_SIZE)
9413 return unsignedp ? sat_unsigned_short_fract_type_node
9414 : sat_short_fract_type_node;
9415 if (size == FRACT_TYPE_SIZE)
9416 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9417 if (size == LONG_FRACT_TYPE_SIZE)
9418 return unsignedp ? sat_unsigned_long_fract_type_node
9419 : sat_long_fract_type_node;
9420 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9421 return unsignedp ? sat_unsigned_long_long_fract_type_node
9422 : sat_long_long_fract_type_node;
9423 }
9424 else
9425 {
9426 if (size == SHORT_FRACT_TYPE_SIZE)
9427 return unsignedp ? unsigned_short_fract_type_node
9428 : short_fract_type_node;
9429 if (size == FRACT_TYPE_SIZE)
9430 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9431 if (size == LONG_FRACT_TYPE_SIZE)
9432 return unsignedp ? unsigned_long_fract_type_node
9433 : long_fract_type_node;
9434 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9435 return unsignedp ? unsigned_long_long_fract_type_node
9436 : long_long_fract_type_node;
9437 }
9438
9439 return make_fract_type (size, unsignedp, satp);
9440 }
9441
9442 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9443
9444 static tree
9445 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9446 {
9447 if (satp)
9448 {
9449 if (size == SHORT_ACCUM_TYPE_SIZE)
9450 return unsignedp ? sat_unsigned_short_accum_type_node
9451 : sat_short_accum_type_node;
9452 if (size == ACCUM_TYPE_SIZE)
9453 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9454 if (size == LONG_ACCUM_TYPE_SIZE)
9455 return unsignedp ? sat_unsigned_long_accum_type_node
9456 : sat_long_accum_type_node;
9457 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9458 return unsignedp ? sat_unsigned_long_long_accum_type_node
9459 : sat_long_long_accum_type_node;
9460 }
9461 else
9462 {
9463 if (size == SHORT_ACCUM_TYPE_SIZE)
9464 return unsignedp ? unsigned_short_accum_type_node
9465 : short_accum_type_node;
9466 if (size == ACCUM_TYPE_SIZE)
9467 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9468 if (size == LONG_ACCUM_TYPE_SIZE)
9469 return unsignedp ? unsigned_long_accum_type_node
9470 : long_accum_type_node;
9471 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9472 return unsignedp ? unsigned_long_long_accum_type_node
9473 : long_long_accum_type_node;
9474 }
9475
9476 return make_accum_type (size, unsignedp, satp);
9477 }
9478
9479
9480 /* Create an atomic variant node for TYPE. This routine is called
9481 during initialization of data types to create the 5 basic atomic
9482 types. The generic build_variant_type function requires these to
9483 already be set up in order to function properly, so cannot be
9484 called from there. If ALIGN is non-zero, then ensure alignment is
9485 overridden to this value. */
9486
9487 static tree
9488 build_atomic_base (tree type, unsigned int align)
9489 {
9490 tree t;
9491
9492 /* Make sure its not already registered. */
9493 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9494 return t;
9495
9496 t = build_variant_type_copy (type);
9497 set_type_quals (t, TYPE_QUAL_ATOMIC);
9498
9499 if (align)
9500 SET_TYPE_ALIGN (t, align);
9501
9502 return t;
9503 }
9504
9505 /* Information about the _FloatN and _FloatNx types. This must be in
9506 the same order as the corresponding TI_* enum values. */
9507 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9508 {
9509 { 16, false },
9510 { 32, false },
9511 { 64, false },
9512 { 128, false },
9513 { 32, true },
9514 { 64, true },
9515 { 128, true },
9516 };
9517
9518
9519 /* Create nodes for all integer types (and error_mark_node) using the sizes
9520 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9521
9522 void
9523 build_common_tree_nodes (bool signed_char)
9524 {
9525 int i;
9526
9527 error_mark_node = make_node (ERROR_MARK);
9528 TREE_TYPE (error_mark_node) = error_mark_node;
9529
9530 initialize_sizetypes ();
9531
9532 /* Define both `signed char' and `unsigned char'. */
9533 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9534 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9535 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9536 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9537
9538 /* Define `char', which is like either `signed char' or `unsigned char'
9539 but not the same as either. */
9540 char_type_node
9541 = (signed_char
9542 ? make_signed_type (CHAR_TYPE_SIZE)
9543 : make_unsigned_type (CHAR_TYPE_SIZE));
9544 TYPE_STRING_FLAG (char_type_node) = 1;
9545
9546 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9547 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9548 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9549 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9550 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9551 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9552 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9553 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9554
9555 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9556 {
9557 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9558 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9559 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9560 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9561
9562 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9563 && int_n_enabled_p[i])
9564 {
9565 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9566 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9567 }
9568 }
9569
9570 /* Define a boolean type. This type only represents boolean values but
9571 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9572 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9573 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9574 TYPE_PRECISION (boolean_type_node) = 1;
9575 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9576
9577 /* Define what type to use for size_t. */
9578 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9579 size_type_node = unsigned_type_node;
9580 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9581 size_type_node = long_unsigned_type_node;
9582 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9583 size_type_node = long_long_unsigned_type_node;
9584 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9585 size_type_node = short_unsigned_type_node;
9586 else
9587 {
9588 int i;
9589
9590 size_type_node = NULL_TREE;
9591 for (i = 0; i < NUM_INT_N_ENTS; i++)
9592 if (int_n_enabled_p[i])
9593 {
9594 char name[50];
9595 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9596
9597 if (strcmp (name, SIZE_TYPE) == 0)
9598 {
9599 size_type_node = int_n_trees[i].unsigned_type;
9600 }
9601 }
9602 if (size_type_node == NULL_TREE)
9603 gcc_unreachable ();
9604 }
9605
9606 /* Define what type to use for ptrdiff_t. */
9607 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9608 ptrdiff_type_node = integer_type_node;
9609 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9610 ptrdiff_type_node = long_integer_type_node;
9611 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9612 ptrdiff_type_node = long_long_integer_type_node;
9613 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9614 ptrdiff_type_node = short_integer_type_node;
9615 else
9616 {
9617 ptrdiff_type_node = NULL_TREE;
9618 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9619 if (int_n_enabled_p[i])
9620 {
9621 char name[50];
9622 sprintf (name, "__int%d", int_n_data[i].bitsize);
9623 if (strcmp (name, PTRDIFF_TYPE) == 0)
9624 ptrdiff_type_node = int_n_trees[i].signed_type;
9625 }
9626 if (ptrdiff_type_node == NULL_TREE)
9627 gcc_unreachable ();
9628 }
9629
9630 /* Fill in the rest of the sized types. Reuse existing type nodes
9631 when possible. */
9632 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9633 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9634 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9635 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9636 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9637
9638 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9639 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9640 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9641 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9642 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9643
9644 /* Don't call build_qualified type for atomics. That routine does
9645 special processing for atomics, and until they are initialized
9646 it's better not to make that call.
9647
9648 Check to see if there is a target override for atomic types. */
9649
9650 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9651 targetm.atomic_align_for_mode (QImode));
9652 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9653 targetm.atomic_align_for_mode (HImode));
9654 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9655 targetm.atomic_align_for_mode (SImode));
9656 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9657 targetm.atomic_align_for_mode (DImode));
9658 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9659 targetm.atomic_align_for_mode (TImode));
9660
9661 access_public_node = get_identifier ("public");
9662 access_protected_node = get_identifier ("protected");
9663 access_private_node = get_identifier ("private");
9664
9665 /* Define these next since types below may used them. */
9666 integer_zero_node = build_int_cst (integer_type_node, 0);
9667 integer_one_node = build_int_cst (integer_type_node, 1);
9668 integer_three_node = build_int_cst (integer_type_node, 3);
9669 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9670
9671 size_zero_node = size_int (0);
9672 size_one_node = size_int (1);
9673 bitsize_zero_node = bitsize_int (0);
9674 bitsize_one_node = bitsize_int (1);
9675 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9676
9677 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9678 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9679
9680 void_type_node = make_node (VOID_TYPE);
9681 layout_type (void_type_node);
9682
9683 pointer_bounds_type_node = targetm.chkp_bound_type ();
9684
9685 /* We are not going to have real types in C with less than byte alignment,
9686 so we might as well not have any types that claim to have it. */
9687 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9688 TYPE_USER_ALIGN (void_type_node) = 0;
9689
9690 void_node = make_node (VOID_CST);
9691 TREE_TYPE (void_node) = void_type_node;
9692
9693 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9694 layout_type (TREE_TYPE (null_pointer_node));
9695
9696 ptr_type_node = build_pointer_type (void_type_node);
9697 const_ptr_type_node
9698 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9699 for (unsigned i = 0;
9700 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9701 ++i)
9702 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9703
9704 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9705
9706 float_type_node = make_node (REAL_TYPE);
9707 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9708 layout_type (float_type_node);
9709
9710 double_type_node = make_node (REAL_TYPE);
9711 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9712 layout_type (double_type_node);
9713
9714 long_double_type_node = make_node (REAL_TYPE);
9715 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9716 layout_type (long_double_type_node);
9717
9718 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9719 {
9720 int n = floatn_nx_types[i].n;
9721 bool extended = floatn_nx_types[i].extended;
9722 machine_mode mode = targetm.floatn_mode (n, extended);
9723 if (mode == VOIDmode)
9724 continue;
9725 int precision = GET_MODE_PRECISION (mode);
9726 /* Work around the rs6000 KFmode having precision 113 not
9727 128. */
9728 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9729 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9730 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9731 if (!extended)
9732 gcc_assert (min_precision == n);
9733 if (precision < min_precision)
9734 precision = min_precision;
9735 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9736 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9737 layout_type (FLOATN_NX_TYPE_NODE (i));
9738 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9739 }
9740
9741 float_ptr_type_node = build_pointer_type (float_type_node);
9742 double_ptr_type_node = build_pointer_type (double_type_node);
9743 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9744 integer_ptr_type_node = build_pointer_type (integer_type_node);
9745
9746 /* Fixed size integer types. */
9747 uint16_type_node = make_or_reuse_type (16, 1);
9748 uint32_type_node = make_or_reuse_type (32, 1);
9749 uint64_type_node = make_or_reuse_type (64, 1);
9750
9751 /* Decimal float types. */
9752 dfloat32_type_node = make_node (REAL_TYPE);
9753 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9754 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9755 layout_type (dfloat32_type_node);
9756 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9757
9758 dfloat64_type_node = make_node (REAL_TYPE);
9759 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9760 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9761 layout_type (dfloat64_type_node);
9762 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9763
9764 dfloat128_type_node = make_node (REAL_TYPE);
9765 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9766 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9767 layout_type (dfloat128_type_node);
9768 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9769
9770 complex_integer_type_node = build_complex_type (integer_type_node, true);
9771 complex_float_type_node = build_complex_type (float_type_node, true);
9772 complex_double_type_node = build_complex_type (double_type_node, true);
9773 complex_long_double_type_node = build_complex_type (long_double_type_node,
9774 true);
9775
9776 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9777 {
9778 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9779 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9780 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9781 }
9782
9783 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9784 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9785 sat_ ## KIND ## _type_node = \
9786 make_sat_signed_ ## KIND ## _type (SIZE); \
9787 sat_unsigned_ ## KIND ## _type_node = \
9788 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9789 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9790 unsigned_ ## KIND ## _type_node = \
9791 make_unsigned_ ## KIND ## _type (SIZE);
9792
9793 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9794 sat_ ## WIDTH ## KIND ## _type_node = \
9795 make_sat_signed_ ## KIND ## _type (SIZE); \
9796 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9797 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9798 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9799 unsigned_ ## WIDTH ## KIND ## _type_node = \
9800 make_unsigned_ ## KIND ## _type (SIZE);
9801
9802 /* Make fixed-point type nodes based on four different widths. */
9803 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9804 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9805 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9806 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9807 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9808
9809 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9810 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9811 NAME ## _type_node = \
9812 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9813 u ## NAME ## _type_node = \
9814 make_or_reuse_unsigned_ ## KIND ## _type \
9815 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9816 sat_ ## NAME ## _type_node = \
9817 make_or_reuse_sat_signed_ ## KIND ## _type \
9818 (GET_MODE_BITSIZE (MODE ## mode)); \
9819 sat_u ## NAME ## _type_node = \
9820 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9821 (GET_MODE_BITSIZE (U ## MODE ## mode));
9822
9823 /* Fixed-point type and mode nodes. */
9824 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9825 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9826 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9827 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9828 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9829 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9830 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9831 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9832 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9833 MAKE_FIXED_MODE_NODE (accum, da, DA)
9834 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9835
9836 {
9837 tree t = targetm.build_builtin_va_list ();
9838
9839 /* Many back-ends define record types without setting TYPE_NAME.
9840 If we copied the record type here, we'd keep the original
9841 record type without a name. This breaks name mangling. So,
9842 don't copy record types and let c_common_nodes_and_builtins()
9843 declare the type to be __builtin_va_list. */
9844 if (TREE_CODE (t) != RECORD_TYPE)
9845 t = build_variant_type_copy (t);
9846
9847 va_list_type_node = t;
9848 }
9849 }
9850
9851 /* Modify DECL for given flags.
9852 TM_PURE attribute is set only on types, so the function will modify
9853 DECL's type when ECF_TM_PURE is used. */
9854
9855 void
9856 set_call_expr_flags (tree decl, int flags)
9857 {
9858 if (flags & ECF_NOTHROW)
9859 TREE_NOTHROW (decl) = 1;
9860 if (flags & ECF_CONST)
9861 TREE_READONLY (decl) = 1;
9862 if (flags & ECF_PURE)
9863 DECL_PURE_P (decl) = 1;
9864 if (flags & ECF_LOOPING_CONST_OR_PURE)
9865 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9866 if (flags & ECF_NOVOPS)
9867 DECL_IS_NOVOPS (decl) = 1;
9868 if (flags & ECF_NORETURN)
9869 TREE_THIS_VOLATILE (decl) = 1;
9870 if (flags & ECF_MALLOC)
9871 DECL_IS_MALLOC (decl) = 1;
9872 if (flags & ECF_RETURNS_TWICE)
9873 DECL_IS_RETURNS_TWICE (decl) = 1;
9874 if (flags & ECF_LEAF)
9875 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9876 NULL, DECL_ATTRIBUTES (decl));
9877 if (flags & ECF_COLD)
9878 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9879 NULL, DECL_ATTRIBUTES (decl));
9880 if (flags & ECF_RET1)
9881 DECL_ATTRIBUTES (decl)
9882 = tree_cons (get_identifier ("fn spec"),
9883 build_tree_list (NULL_TREE, build_string (1, "1")),
9884 DECL_ATTRIBUTES (decl));
9885 if ((flags & ECF_TM_PURE) && flag_tm)
9886 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9887 /* Looping const or pure is implied by noreturn.
9888 There is currently no way to declare looping const or looping pure alone. */
9889 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9890 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9891 }
9892
9893
9894 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9895
9896 static void
9897 local_define_builtin (const char *name, tree type, enum built_in_function code,
9898 const char *library_name, int ecf_flags)
9899 {
9900 tree decl;
9901
9902 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9903 library_name, NULL_TREE);
9904 set_call_expr_flags (decl, ecf_flags);
9905
9906 set_builtin_decl (code, decl, true);
9907 }
9908
9909 /* Call this function after instantiating all builtins that the language
9910 front end cares about. This will build the rest of the builtins
9911 and internal functions that are relied upon by the tree optimizers and
9912 the middle-end. */
9913
9914 void
9915 build_common_builtin_nodes (void)
9916 {
9917 tree tmp, ftype;
9918 int ecf_flags;
9919
9920 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9921 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9922 {
9923 ftype = build_function_type (void_type_node, void_list_node);
9924 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9925 local_define_builtin ("__builtin_unreachable", ftype,
9926 BUILT_IN_UNREACHABLE,
9927 "__builtin_unreachable",
9928 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9929 | ECF_CONST | ECF_COLD);
9930 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9931 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9932 "abort",
9933 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9934 }
9935
9936 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9937 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9938 {
9939 ftype = build_function_type_list (ptr_type_node,
9940 ptr_type_node, const_ptr_type_node,
9941 size_type_node, NULL_TREE);
9942
9943 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9944 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9945 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
9946 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9947 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9948 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
9949 }
9950
9951 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9952 {
9953 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9954 const_ptr_type_node, size_type_node,
9955 NULL_TREE);
9956 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9957 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9958 }
9959
9960 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9961 {
9962 ftype = build_function_type_list (ptr_type_node,
9963 ptr_type_node, integer_type_node,
9964 size_type_node, NULL_TREE);
9965 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9966 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
9967 }
9968
9969 /* If we're checking the stack, `alloca' can throw. */
9970 const int alloca_flags
9971 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9972
9973 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9974 {
9975 ftype = build_function_type_list (ptr_type_node,
9976 size_type_node, NULL_TREE);
9977 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9978 "alloca", alloca_flags);
9979 }
9980
9981 ftype = build_function_type_list (ptr_type_node, size_type_node,
9982 size_type_node, NULL_TREE);
9983 local_define_builtin ("__builtin_alloca_with_align", ftype,
9984 BUILT_IN_ALLOCA_WITH_ALIGN,
9985 "__builtin_alloca_with_align",
9986 alloca_flags);
9987
9988 ftype = build_function_type_list (void_type_node,
9989 ptr_type_node, ptr_type_node,
9990 ptr_type_node, NULL_TREE);
9991 local_define_builtin ("__builtin_init_trampoline", ftype,
9992 BUILT_IN_INIT_TRAMPOLINE,
9993 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9994 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9995 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9996 "__builtin_init_heap_trampoline",
9997 ECF_NOTHROW | ECF_LEAF);
9998 local_define_builtin ("__builtin_init_descriptor", ftype,
9999 BUILT_IN_INIT_DESCRIPTOR,
10000 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10001
10002 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10003 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10004 BUILT_IN_ADJUST_TRAMPOLINE,
10005 "__builtin_adjust_trampoline",
10006 ECF_CONST | ECF_NOTHROW);
10007 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10008 BUILT_IN_ADJUST_DESCRIPTOR,
10009 "__builtin_adjust_descriptor",
10010 ECF_CONST | ECF_NOTHROW);
10011
10012 ftype = build_function_type_list (void_type_node,
10013 ptr_type_node, ptr_type_node, NULL_TREE);
10014 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10015 BUILT_IN_NONLOCAL_GOTO,
10016 "__builtin_nonlocal_goto",
10017 ECF_NORETURN | ECF_NOTHROW);
10018
10019 ftype = build_function_type_list (void_type_node,
10020 ptr_type_node, ptr_type_node, NULL_TREE);
10021 local_define_builtin ("__builtin_setjmp_setup", ftype,
10022 BUILT_IN_SETJMP_SETUP,
10023 "__builtin_setjmp_setup", ECF_NOTHROW);
10024
10025 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10026 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10027 BUILT_IN_SETJMP_RECEIVER,
10028 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10029
10030 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10031 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10032 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10033
10034 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10035 local_define_builtin ("__builtin_stack_restore", ftype,
10036 BUILT_IN_STACK_RESTORE,
10037 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10038
10039 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10040 const_ptr_type_node, size_type_node,
10041 NULL_TREE);
10042 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10043 "__builtin_memcmp_eq",
10044 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10045
10046 /* If there's a possibility that we might use the ARM EABI, build the
10047 alternate __cxa_end_cleanup node used to resume from C++. */
10048 if (targetm.arm_eabi_unwinder)
10049 {
10050 ftype = build_function_type_list (void_type_node, NULL_TREE);
10051 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10052 BUILT_IN_CXA_END_CLEANUP,
10053 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10054 }
10055
10056 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10057 local_define_builtin ("__builtin_unwind_resume", ftype,
10058 BUILT_IN_UNWIND_RESUME,
10059 ((targetm_common.except_unwind_info (&global_options)
10060 == UI_SJLJ)
10061 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10062 ECF_NORETURN);
10063
10064 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10065 {
10066 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10067 NULL_TREE);
10068 local_define_builtin ("__builtin_return_address", ftype,
10069 BUILT_IN_RETURN_ADDRESS,
10070 "__builtin_return_address",
10071 ECF_NOTHROW);
10072 }
10073
10074 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10075 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10076 {
10077 ftype = build_function_type_list (void_type_node, ptr_type_node,
10078 ptr_type_node, NULL_TREE);
10079 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10080 local_define_builtin ("__cyg_profile_func_enter", ftype,
10081 BUILT_IN_PROFILE_FUNC_ENTER,
10082 "__cyg_profile_func_enter", 0);
10083 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10084 local_define_builtin ("__cyg_profile_func_exit", ftype,
10085 BUILT_IN_PROFILE_FUNC_EXIT,
10086 "__cyg_profile_func_exit", 0);
10087 }
10088
10089 /* The exception object and filter values from the runtime. The argument
10090 must be zero before exception lowering, i.e. from the front end. After
10091 exception lowering, it will be the region number for the exception
10092 landing pad. These functions are PURE instead of CONST to prevent
10093 them from being hoisted past the exception edge that will initialize
10094 its value in the landing pad. */
10095 ftype = build_function_type_list (ptr_type_node,
10096 integer_type_node, NULL_TREE);
10097 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10098 /* Only use TM_PURE if we have TM language support. */
10099 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10100 ecf_flags |= ECF_TM_PURE;
10101 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10102 "__builtin_eh_pointer", ecf_flags);
10103
10104 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10105 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10106 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10107 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10108
10109 ftype = build_function_type_list (void_type_node,
10110 integer_type_node, integer_type_node,
10111 NULL_TREE);
10112 local_define_builtin ("__builtin_eh_copy_values", ftype,
10113 BUILT_IN_EH_COPY_VALUES,
10114 "__builtin_eh_copy_values", ECF_NOTHROW);
10115
10116 /* Complex multiplication and division. These are handled as builtins
10117 rather than optabs because emit_library_call_value doesn't support
10118 complex. Further, we can do slightly better with folding these
10119 beasties if the real and complex parts of the arguments are separate. */
10120 {
10121 int mode;
10122
10123 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10124 {
10125 char mode_name_buf[4], *q;
10126 const char *p;
10127 enum built_in_function mcode, dcode;
10128 tree type, inner_type;
10129 const char *prefix = "__";
10130
10131 if (targetm.libfunc_gnu_prefix)
10132 prefix = "__gnu_";
10133
10134 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10135 if (type == NULL)
10136 continue;
10137 inner_type = TREE_TYPE (type);
10138
10139 ftype = build_function_type_list (type, inner_type, inner_type,
10140 inner_type, inner_type, NULL_TREE);
10141
10142 mcode = ((enum built_in_function)
10143 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10144 dcode = ((enum built_in_function)
10145 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10146
10147 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10148 *q = TOLOWER (*p);
10149 *q = '\0';
10150
10151 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10152 NULL);
10153 local_define_builtin (built_in_names[mcode], ftype, mcode,
10154 built_in_names[mcode],
10155 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10156
10157 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10158 NULL);
10159 local_define_builtin (built_in_names[dcode], ftype, dcode,
10160 built_in_names[dcode],
10161 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10162 }
10163 }
10164
10165 init_internal_fns ();
10166 }
10167
10168 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10169 better way.
10170
10171 If we requested a pointer to a vector, build up the pointers that
10172 we stripped off while looking for the inner type. Similarly for
10173 return values from functions.
10174
10175 The argument TYPE is the top of the chain, and BOTTOM is the
10176 new type which we will point to. */
10177
10178 tree
10179 reconstruct_complex_type (tree type, tree bottom)
10180 {
10181 tree inner, outer;
10182
10183 if (TREE_CODE (type) == POINTER_TYPE)
10184 {
10185 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10186 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10187 TYPE_REF_CAN_ALIAS_ALL (type));
10188 }
10189 else if (TREE_CODE (type) == REFERENCE_TYPE)
10190 {
10191 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10192 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10193 TYPE_REF_CAN_ALIAS_ALL (type));
10194 }
10195 else if (TREE_CODE (type) == ARRAY_TYPE)
10196 {
10197 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10198 outer = build_array_type (inner, TYPE_DOMAIN (type));
10199 }
10200 else if (TREE_CODE (type) == FUNCTION_TYPE)
10201 {
10202 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10203 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10204 }
10205 else if (TREE_CODE (type) == METHOD_TYPE)
10206 {
10207 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10208 /* The build_method_type_directly() routine prepends 'this' to argument list,
10209 so we must compensate by getting rid of it. */
10210 outer
10211 = build_method_type_directly
10212 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10213 inner,
10214 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10215 }
10216 else if (TREE_CODE (type) == OFFSET_TYPE)
10217 {
10218 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10219 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10220 }
10221 else
10222 return bottom;
10223
10224 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10225 TYPE_QUALS (type));
10226 }
10227
10228 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10229 the inner type. */
10230 tree
10231 build_vector_type_for_mode (tree innertype, machine_mode mode)
10232 {
10233 int nunits;
10234
10235 switch (GET_MODE_CLASS (mode))
10236 {
10237 case MODE_VECTOR_INT:
10238 case MODE_VECTOR_FLOAT:
10239 case MODE_VECTOR_FRACT:
10240 case MODE_VECTOR_UFRACT:
10241 case MODE_VECTOR_ACCUM:
10242 case MODE_VECTOR_UACCUM:
10243 nunits = GET_MODE_NUNITS (mode);
10244 break;
10245
10246 case MODE_INT:
10247 /* Check that there are no leftover bits. */
10248 gcc_assert (GET_MODE_BITSIZE (mode)
10249 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10250
10251 nunits = GET_MODE_BITSIZE (mode)
10252 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10253 break;
10254
10255 default:
10256 gcc_unreachable ();
10257 }
10258
10259 return make_vector_type (innertype, nunits, mode);
10260 }
10261
10262 /* Similarly, but takes the inner type and number of units, which must be
10263 a power of two. */
10264
10265 tree
10266 build_vector_type (tree innertype, int nunits)
10267 {
10268 return make_vector_type (innertype, nunits, VOIDmode);
10269 }
10270
10271 /* Build truth vector with specified length and number of units. */
10272
10273 tree
10274 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10275 {
10276 machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits,
10277 vector_size);
10278
10279 gcc_assert (mask_mode != VOIDmode);
10280
10281 unsigned HOST_WIDE_INT vsize;
10282 if (mask_mode == BLKmode)
10283 vsize = vector_size * BITS_PER_UNIT;
10284 else
10285 vsize = GET_MODE_BITSIZE (mask_mode);
10286
10287 unsigned HOST_WIDE_INT esize = vsize / nunits;
10288 gcc_assert (esize * nunits == vsize);
10289
10290 tree bool_type = build_nonstandard_boolean_type (esize);
10291
10292 return make_vector_type (bool_type, nunits, mask_mode);
10293 }
10294
10295 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10296
10297 tree
10298 build_same_sized_truth_vector_type (tree vectype)
10299 {
10300 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10301 return vectype;
10302
10303 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10304
10305 if (!size)
10306 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10307
10308 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10309 }
10310
10311 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10312
10313 tree
10314 build_opaque_vector_type (tree innertype, int nunits)
10315 {
10316 tree t = make_vector_type (innertype, nunits, VOIDmode);
10317 tree cand;
10318 /* We always build the non-opaque variant before the opaque one,
10319 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10320 cand = TYPE_NEXT_VARIANT (t);
10321 if (cand
10322 && TYPE_VECTOR_OPAQUE (cand)
10323 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10324 return cand;
10325 /* Othewise build a variant type and make sure to queue it after
10326 the non-opaque type. */
10327 cand = build_distinct_type_copy (t);
10328 TYPE_VECTOR_OPAQUE (cand) = true;
10329 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10330 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10331 TYPE_NEXT_VARIANT (t) = cand;
10332 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10333 return cand;
10334 }
10335
10336
10337 /* Given an initializer INIT, return TRUE if INIT is zero or some
10338 aggregate of zeros. Otherwise return FALSE. */
10339 bool
10340 initializer_zerop (const_tree init)
10341 {
10342 tree elt;
10343
10344 STRIP_NOPS (init);
10345
10346 switch (TREE_CODE (init))
10347 {
10348 case INTEGER_CST:
10349 return integer_zerop (init);
10350
10351 case REAL_CST:
10352 /* ??? Note that this is not correct for C4X float formats. There,
10353 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10354 negative exponent. */
10355 return real_zerop (init)
10356 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10357
10358 case FIXED_CST:
10359 return fixed_zerop (init);
10360
10361 case COMPLEX_CST:
10362 return integer_zerop (init)
10363 || (real_zerop (init)
10364 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10365 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10366
10367 case VECTOR_CST:
10368 {
10369 unsigned i;
10370 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10371 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10372 return false;
10373 return true;
10374 }
10375
10376 case CONSTRUCTOR:
10377 {
10378 unsigned HOST_WIDE_INT idx;
10379
10380 if (TREE_CLOBBER_P (init))
10381 return false;
10382 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10383 if (!initializer_zerop (elt))
10384 return false;
10385 return true;
10386 }
10387
10388 case STRING_CST:
10389 {
10390 int i;
10391
10392 /* We need to loop through all elements to handle cases like
10393 "\0" and "\0foobar". */
10394 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10395 if (TREE_STRING_POINTER (init)[i] != '\0')
10396 return false;
10397
10398 return true;
10399 }
10400
10401 default:
10402 return false;
10403 }
10404 }
10405
10406 /* Check if vector VEC consists of all the equal elements and
10407 that the number of elements corresponds to the type of VEC.
10408 The function returns first element of the vector
10409 or NULL_TREE if the vector is not uniform. */
10410 tree
10411 uniform_vector_p (const_tree vec)
10412 {
10413 tree first, t;
10414 unsigned i;
10415
10416 if (vec == NULL_TREE)
10417 return NULL_TREE;
10418
10419 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10420
10421 if (TREE_CODE (vec) == VECTOR_CST)
10422 {
10423 first = VECTOR_CST_ELT (vec, 0);
10424 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10425 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10426 return NULL_TREE;
10427
10428 return first;
10429 }
10430
10431 else if (TREE_CODE (vec) == CONSTRUCTOR)
10432 {
10433 first = error_mark_node;
10434
10435 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10436 {
10437 if (i == 0)
10438 {
10439 first = t;
10440 continue;
10441 }
10442 if (!operand_equal_p (first, t, 0))
10443 return NULL_TREE;
10444 }
10445 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10446 return NULL_TREE;
10447
10448 return first;
10449 }
10450
10451 return NULL_TREE;
10452 }
10453
10454 /* Build an empty statement at location LOC. */
10455
10456 tree
10457 build_empty_stmt (location_t loc)
10458 {
10459 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10460 SET_EXPR_LOCATION (t, loc);
10461 return t;
10462 }
10463
10464
10465 /* Build an OpenMP clause with code CODE. LOC is the location of the
10466 clause. */
10467
10468 tree
10469 build_omp_clause (location_t loc, enum omp_clause_code code)
10470 {
10471 tree t;
10472 int size, length;
10473
10474 length = omp_clause_num_ops[code];
10475 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10476
10477 record_node_allocation_statistics (OMP_CLAUSE, size);
10478
10479 t = (tree) ggc_internal_alloc (size);
10480 memset (t, 0, size);
10481 TREE_SET_CODE (t, OMP_CLAUSE);
10482 OMP_CLAUSE_SET_CODE (t, code);
10483 OMP_CLAUSE_LOCATION (t) = loc;
10484
10485 return t;
10486 }
10487
10488 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10489 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10490 Except for the CODE and operand count field, other storage for the
10491 object is initialized to zeros. */
10492
10493 tree
10494 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10495 {
10496 tree t;
10497 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10498
10499 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10500 gcc_assert (len >= 1);
10501
10502 record_node_allocation_statistics (code, length);
10503
10504 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10505
10506 TREE_SET_CODE (t, code);
10507
10508 /* Can't use TREE_OPERAND to store the length because if checking is
10509 enabled, it will try to check the length before we store it. :-P */
10510 t->exp.operands[0] = build_int_cst (sizetype, len);
10511
10512 return t;
10513 }
10514
10515 /* Helper function for build_call_* functions; build a CALL_EXPR with
10516 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10517 the argument slots. */
10518
10519 static tree
10520 build_call_1 (tree return_type, tree fn, int nargs)
10521 {
10522 tree t;
10523
10524 t = build_vl_exp (CALL_EXPR, nargs + 3);
10525 TREE_TYPE (t) = return_type;
10526 CALL_EXPR_FN (t) = fn;
10527 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10528
10529 return t;
10530 }
10531
10532 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10533 FN and a null static chain slot. NARGS is the number of call arguments
10534 which are specified as "..." arguments. */
10535
10536 tree
10537 build_call_nary (tree return_type, tree fn, int nargs, ...)
10538 {
10539 tree ret;
10540 va_list args;
10541 va_start (args, nargs);
10542 ret = build_call_valist (return_type, fn, nargs, args);
10543 va_end (args);
10544 return ret;
10545 }
10546
10547 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10548 FN and a null static chain slot. NARGS is the number of call arguments
10549 which are specified as a va_list ARGS. */
10550
10551 tree
10552 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10553 {
10554 tree t;
10555 int i;
10556
10557 t = build_call_1 (return_type, fn, nargs);
10558 for (i = 0; i < nargs; i++)
10559 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10560 process_call_operands (t);
10561 return t;
10562 }
10563
10564 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10565 FN and a null static chain slot. NARGS is the number of call arguments
10566 which are specified as a tree array ARGS. */
10567
10568 tree
10569 build_call_array_loc (location_t loc, tree return_type, tree fn,
10570 int nargs, const tree *args)
10571 {
10572 tree t;
10573 int i;
10574
10575 t = build_call_1 (return_type, fn, nargs);
10576 for (i = 0; i < nargs; i++)
10577 CALL_EXPR_ARG (t, i) = args[i];
10578 process_call_operands (t);
10579 SET_EXPR_LOCATION (t, loc);
10580 return t;
10581 }
10582
10583 /* Like build_call_array, but takes a vec. */
10584
10585 tree
10586 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10587 {
10588 tree ret, t;
10589 unsigned int ix;
10590
10591 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10592 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10593 CALL_EXPR_ARG (ret, ix) = t;
10594 process_call_operands (ret);
10595 return ret;
10596 }
10597
10598 /* Conveniently construct a function call expression. FNDECL names the
10599 function to be called and N arguments are passed in the array
10600 ARGARRAY. */
10601
10602 tree
10603 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10604 {
10605 tree fntype = TREE_TYPE (fndecl);
10606 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10607
10608 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10609 }
10610
10611 /* Conveniently construct a function call expression. FNDECL names the
10612 function to be called and the arguments are passed in the vector
10613 VEC. */
10614
10615 tree
10616 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10617 {
10618 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10619 vec_safe_address (vec));
10620 }
10621
10622
10623 /* Conveniently construct a function call expression. FNDECL names the
10624 function to be called, N is the number of arguments, and the "..."
10625 parameters are the argument expressions. */
10626
10627 tree
10628 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10629 {
10630 va_list ap;
10631 tree *argarray = XALLOCAVEC (tree, n);
10632 int i;
10633
10634 va_start (ap, n);
10635 for (i = 0; i < n; i++)
10636 argarray[i] = va_arg (ap, tree);
10637 va_end (ap);
10638 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10639 }
10640
10641 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10642 varargs macros aren't supported by all bootstrap compilers. */
10643
10644 tree
10645 build_call_expr (tree fndecl, int n, ...)
10646 {
10647 va_list ap;
10648 tree *argarray = XALLOCAVEC (tree, n);
10649 int i;
10650
10651 va_start (ap, n);
10652 for (i = 0; i < n; i++)
10653 argarray[i] = va_arg (ap, tree);
10654 va_end (ap);
10655 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10656 }
10657
10658 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10659 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10660 It will get gimplified later into an ordinary internal function. */
10661
10662 tree
10663 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10664 tree type, int n, const tree *args)
10665 {
10666 tree t = build_call_1 (type, NULL_TREE, n);
10667 for (int i = 0; i < n; ++i)
10668 CALL_EXPR_ARG (t, i) = args[i];
10669 SET_EXPR_LOCATION (t, loc);
10670 CALL_EXPR_IFN (t) = ifn;
10671 return t;
10672 }
10673
10674 /* Build internal call expression. This is just like CALL_EXPR, except
10675 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10676 internal function. */
10677
10678 tree
10679 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10680 tree type, int n, ...)
10681 {
10682 va_list ap;
10683 tree *argarray = XALLOCAVEC (tree, n);
10684 int i;
10685
10686 va_start (ap, n);
10687 for (i = 0; i < n; i++)
10688 argarray[i] = va_arg (ap, tree);
10689 va_end (ap);
10690 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10691 }
10692
10693 /* Return a function call to FN, if the target is guaranteed to support it,
10694 or null otherwise.
10695
10696 N is the number of arguments, passed in the "...", and TYPE is the
10697 type of the return value. */
10698
10699 tree
10700 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10701 int n, ...)
10702 {
10703 va_list ap;
10704 tree *argarray = XALLOCAVEC (tree, n);
10705 int i;
10706
10707 va_start (ap, n);
10708 for (i = 0; i < n; i++)
10709 argarray[i] = va_arg (ap, tree);
10710 va_end (ap);
10711 if (internal_fn_p (fn))
10712 {
10713 internal_fn ifn = as_internal_fn (fn);
10714 if (direct_internal_fn_p (ifn))
10715 {
10716 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10717 if (!direct_internal_fn_supported_p (ifn, types,
10718 OPTIMIZE_FOR_BOTH))
10719 return NULL_TREE;
10720 }
10721 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10722 }
10723 else
10724 {
10725 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10726 if (!fndecl)
10727 return NULL_TREE;
10728 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10729 }
10730 }
10731
10732 /* Create a new constant string literal and return a char* pointer to it.
10733 The STRING_CST value is the LEN characters at STR. */
10734 tree
10735 build_string_literal (int len, const char *str)
10736 {
10737 tree t, elem, index, type;
10738
10739 t = build_string (len, str);
10740 elem = build_type_variant (char_type_node, 1, 0);
10741 index = build_index_type (size_int (len - 1));
10742 type = build_array_type (elem, index);
10743 TREE_TYPE (t) = type;
10744 TREE_CONSTANT (t) = 1;
10745 TREE_READONLY (t) = 1;
10746 TREE_STATIC (t) = 1;
10747
10748 type = build_pointer_type (elem);
10749 t = build1 (ADDR_EXPR, type,
10750 build4 (ARRAY_REF, elem,
10751 t, integer_zero_node, NULL_TREE, NULL_TREE));
10752 return t;
10753 }
10754
10755
10756
10757 /* Return true if T (assumed to be a DECL) must be assigned a memory
10758 location. */
10759
10760 bool
10761 needs_to_live_in_memory (const_tree t)
10762 {
10763 return (TREE_ADDRESSABLE (t)
10764 || is_global_var (t)
10765 || (TREE_CODE (t) == RESULT_DECL
10766 && !DECL_BY_REFERENCE (t)
10767 && aggregate_value_p (t, current_function_decl)));
10768 }
10769
10770 /* Return value of a constant X and sign-extend it. */
10771
10772 HOST_WIDE_INT
10773 int_cst_value (const_tree x)
10774 {
10775 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10776 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10777
10778 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10779 gcc_assert (cst_and_fits_in_hwi (x));
10780
10781 if (bits < HOST_BITS_PER_WIDE_INT)
10782 {
10783 bool negative = ((val >> (bits - 1)) & 1) != 0;
10784 if (negative)
10785 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10786 else
10787 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10788 }
10789
10790 return val;
10791 }
10792
10793 /* If TYPE is an integral or pointer type, return an integer type with
10794 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10795 if TYPE is already an integer type of signedness UNSIGNEDP. */
10796
10797 tree
10798 signed_or_unsigned_type_for (int unsignedp, tree type)
10799 {
10800 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10801 return type;
10802
10803 if (TREE_CODE (type) == VECTOR_TYPE)
10804 {
10805 tree inner = TREE_TYPE (type);
10806 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10807 if (!inner2)
10808 return NULL_TREE;
10809 if (inner == inner2)
10810 return type;
10811 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10812 }
10813
10814 if (!INTEGRAL_TYPE_P (type)
10815 && !POINTER_TYPE_P (type)
10816 && TREE_CODE (type) != OFFSET_TYPE)
10817 return NULL_TREE;
10818
10819 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10820 }
10821
10822 /* If TYPE is an integral or pointer type, return an integer type with
10823 the same precision which is unsigned, or itself if TYPE is already an
10824 unsigned integer type. */
10825
10826 tree
10827 unsigned_type_for (tree type)
10828 {
10829 return signed_or_unsigned_type_for (1, type);
10830 }
10831
10832 /* If TYPE is an integral or pointer type, return an integer type with
10833 the same precision which is signed, or itself if TYPE is already a
10834 signed integer type. */
10835
10836 tree
10837 signed_type_for (tree type)
10838 {
10839 return signed_or_unsigned_type_for (0, type);
10840 }
10841
10842 /* If TYPE is a vector type, return a signed integer vector type with the
10843 same width and number of subparts. Otherwise return boolean_type_node. */
10844
10845 tree
10846 truth_type_for (tree type)
10847 {
10848 if (TREE_CODE (type) == VECTOR_TYPE)
10849 {
10850 if (VECTOR_BOOLEAN_TYPE_P (type))
10851 return type;
10852 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
10853 GET_MODE_SIZE (TYPE_MODE (type)));
10854 }
10855 else
10856 return boolean_type_node;
10857 }
10858
10859 /* Returns the largest value obtainable by casting something in INNER type to
10860 OUTER type. */
10861
10862 tree
10863 upper_bound_in_type (tree outer, tree inner)
10864 {
10865 unsigned int det = 0;
10866 unsigned oprec = TYPE_PRECISION (outer);
10867 unsigned iprec = TYPE_PRECISION (inner);
10868 unsigned prec;
10869
10870 /* Compute a unique number for every combination. */
10871 det |= (oprec > iprec) ? 4 : 0;
10872 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10873 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10874
10875 /* Determine the exponent to use. */
10876 switch (det)
10877 {
10878 case 0:
10879 case 1:
10880 /* oprec <= iprec, outer: signed, inner: don't care. */
10881 prec = oprec - 1;
10882 break;
10883 case 2:
10884 case 3:
10885 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10886 prec = oprec;
10887 break;
10888 case 4:
10889 /* oprec > iprec, outer: signed, inner: signed. */
10890 prec = iprec - 1;
10891 break;
10892 case 5:
10893 /* oprec > iprec, outer: signed, inner: unsigned. */
10894 prec = iprec;
10895 break;
10896 case 6:
10897 /* oprec > iprec, outer: unsigned, inner: signed. */
10898 prec = oprec;
10899 break;
10900 case 7:
10901 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10902 prec = iprec;
10903 break;
10904 default:
10905 gcc_unreachable ();
10906 }
10907
10908 return wide_int_to_tree (outer,
10909 wi::mask (prec, false, TYPE_PRECISION (outer)));
10910 }
10911
10912 /* Returns the smallest value obtainable by casting something in INNER type to
10913 OUTER type. */
10914
10915 tree
10916 lower_bound_in_type (tree outer, tree inner)
10917 {
10918 unsigned oprec = TYPE_PRECISION (outer);
10919 unsigned iprec = TYPE_PRECISION (inner);
10920
10921 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10922 and obtain 0. */
10923 if (TYPE_UNSIGNED (outer)
10924 /* If we are widening something of an unsigned type, OUTER type
10925 contains all values of INNER type. In particular, both INNER
10926 and OUTER types have zero in common. */
10927 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10928 return build_int_cst (outer, 0);
10929 else
10930 {
10931 /* If we are widening a signed type to another signed type, we
10932 want to obtain -2^^(iprec-1). If we are keeping the
10933 precision or narrowing to a signed type, we want to obtain
10934 -2^(oprec-1). */
10935 unsigned prec = oprec > iprec ? iprec : oprec;
10936 return wide_int_to_tree (outer,
10937 wi::mask (prec - 1, true,
10938 TYPE_PRECISION (outer)));
10939 }
10940 }
10941
10942 /* Return nonzero if two operands that are suitable for PHI nodes are
10943 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10944 SSA_NAME or invariant. Note that this is strictly an optimization.
10945 That is, callers of this function can directly call operand_equal_p
10946 and get the same result, only slower. */
10947
10948 int
10949 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10950 {
10951 if (arg0 == arg1)
10952 return 1;
10953 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10954 return 0;
10955 return operand_equal_p (arg0, arg1, 0);
10956 }
10957
10958 /* Returns number of zeros at the end of binary representation of X. */
10959
10960 tree
10961 num_ending_zeros (const_tree x)
10962 {
10963 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10964 }
10965
10966
10967 #define WALK_SUBTREE(NODE) \
10968 do \
10969 { \
10970 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10971 if (result) \
10972 return result; \
10973 } \
10974 while (0)
10975
10976 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10977 be walked whenever a type is seen in the tree. Rest of operands and return
10978 value are as for walk_tree. */
10979
10980 static tree
10981 walk_type_fields (tree type, walk_tree_fn func, void *data,
10982 hash_set<tree> *pset, walk_tree_lh lh)
10983 {
10984 tree result = NULL_TREE;
10985
10986 switch (TREE_CODE (type))
10987 {
10988 case POINTER_TYPE:
10989 case REFERENCE_TYPE:
10990 case VECTOR_TYPE:
10991 /* We have to worry about mutually recursive pointers. These can't
10992 be written in C. They can in Ada. It's pathological, but
10993 there's an ACATS test (c38102a) that checks it. Deal with this
10994 by checking if we're pointing to another pointer, that one
10995 points to another pointer, that one does too, and we have no htab.
10996 If so, get a hash table. We check three levels deep to avoid
10997 the cost of the hash table if we don't need one. */
10998 if (POINTER_TYPE_P (TREE_TYPE (type))
10999 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11000 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11001 && !pset)
11002 {
11003 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11004 func, data);
11005 if (result)
11006 return result;
11007
11008 break;
11009 }
11010
11011 /* fall through */
11012
11013 case COMPLEX_TYPE:
11014 WALK_SUBTREE (TREE_TYPE (type));
11015 break;
11016
11017 case METHOD_TYPE:
11018 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11019
11020 /* Fall through. */
11021
11022 case FUNCTION_TYPE:
11023 WALK_SUBTREE (TREE_TYPE (type));
11024 {
11025 tree arg;
11026
11027 /* We never want to walk into default arguments. */
11028 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11029 WALK_SUBTREE (TREE_VALUE (arg));
11030 }
11031 break;
11032
11033 case ARRAY_TYPE:
11034 /* Don't follow this nodes's type if a pointer for fear that
11035 we'll have infinite recursion. If we have a PSET, then we
11036 need not fear. */
11037 if (pset
11038 || (!POINTER_TYPE_P (TREE_TYPE (type))
11039 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11040 WALK_SUBTREE (TREE_TYPE (type));
11041 WALK_SUBTREE (TYPE_DOMAIN (type));
11042 break;
11043
11044 case OFFSET_TYPE:
11045 WALK_SUBTREE (TREE_TYPE (type));
11046 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11047 break;
11048
11049 default:
11050 break;
11051 }
11052
11053 return NULL_TREE;
11054 }
11055
11056 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11057 called with the DATA and the address of each sub-tree. If FUNC returns a
11058 non-NULL value, the traversal is stopped, and the value returned by FUNC
11059 is returned. If PSET is non-NULL it is used to record the nodes visited,
11060 and to avoid visiting a node more than once. */
11061
11062 tree
11063 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11064 hash_set<tree> *pset, walk_tree_lh lh)
11065 {
11066 enum tree_code code;
11067 int walk_subtrees;
11068 tree result;
11069
11070 #define WALK_SUBTREE_TAIL(NODE) \
11071 do \
11072 { \
11073 tp = & (NODE); \
11074 goto tail_recurse; \
11075 } \
11076 while (0)
11077
11078 tail_recurse:
11079 /* Skip empty subtrees. */
11080 if (!*tp)
11081 return NULL_TREE;
11082
11083 /* Don't walk the same tree twice, if the user has requested
11084 that we avoid doing so. */
11085 if (pset && pset->add (*tp))
11086 return NULL_TREE;
11087
11088 /* Call the function. */
11089 walk_subtrees = 1;
11090 result = (*func) (tp, &walk_subtrees, data);
11091
11092 /* If we found something, return it. */
11093 if (result)
11094 return result;
11095
11096 code = TREE_CODE (*tp);
11097
11098 /* Even if we didn't, FUNC may have decided that there was nothing
11099 interesting below this point in the tree. */
11100 if (!walk_subtrees)
11101 {
11102 /* But we still need to check our siblings. */
11103 if (code == TREE_LIST)
11104 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11105 else if (code == OMP_CLAUSE)
11106 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11107 else
11108 return NULL_TREE;
11109 }
11110
11111 if (lh)
11112 {
11113 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11114 if (result || !walk_subtrees)
11115 return result;
11116 }
11117
11118 switch (code)
11119 {
11120 case ERROR_MARK:
11121 case IDENTIFIER_NODE:
11122 case INTEGER_CST:
11123 case REAL_CST:
11124 case FIXED_CST:
11125 case VECTOR_CST:
11126 case STRING_CST:
11127 case BLOCK:
11128 case PLACEHOLDER_EXPR:
11129 case SSA_NAME:
11130 case FIELD_DECL:
11131 case RESULT_DECL:
11132 /* None of these have subtrees other than those already walked
11133 above. */
11134 break;
11135
11136 case TREE_LIST:
11137 WALK_SUBTREE (TREE_VALUE (*tp));
11138 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11139 break;
11140
11141 case TREE_VEC:
11142 {
11143 int len = TREE_VEC_LENGTH (*tp);
11144
11145 if (len == 0)
11146 break;
11147
11148 /* Walk all elements but the first. */
11149 while (--len)
11150 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11151
11152 /* Now walk the first one as a tail call. */
11153 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11154 }
11155
11156 case COMPLEX_CST:
11157 WALK_SUBTREE (TREE_REALPART (*tp));
11158 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11159
11160 case CONSTRUCTOR:
11161 {
11162 unsigned HOST_WIDE_INT idx;
11163 constructor_elt *ce;
11164
11165 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11166 idx++)
11167 WALK_SUBTREE (ce->value);
11168 }
11169 break;
11170
11171 case SAVE_EXPR:
11172 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11173
11174 case BIND_EXPR:
11175 {
11176 tree decl;
11177 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11178 {
11179 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11180 into declarations that are just mentioned, rather than
11181 declared; they don't really belong to this part of the tree.
11182 And, we can see cycles: the initializer for a declaration
11183 can refer to the declaration itself. */
11184 WALK_SUBTREE (DECL_INITIAL (decl));
11185 WALK_SUBTREE (DECL_SIZE (decl));
11186 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11187 }
11188 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11189 }
11190
11191 case STATEMENT_LIST:
11192 {
11193 tree_stmt_iterator i;
11194 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11195 WALK_SUBTREE (*tsi_stmt_ptr (i));
11196 }
11197 break;
11198
11199 case OMP_CLAUSE:
11200 switch (OMP_CLAUSE_CODE (*tp))
11201 {
11202 case OMP_CLAUSE_GANG:
11203 case OMP_CLAUSE__GRIDDIM_:
11204 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11205 /* FALLTHRU */
11206
11207 case OMP_CLAUSE_ASYNC:
11208 case OMP_CLAUSE_WAIT:
11209 case OMP_CLAUSE_WORKER:
11210 case OMP_CLAUSE_VECTOR:
11211 case OMP_CLAUSE_NUM_GANGS:
11212 case OMP_CLAUSE_NUM_WORKERS:
11213 case OMP_CLAUSE_VECTOR_LENGTH:
11214 case OMP_CLAUSE_PRIVATE:
11215 case OMP_CLAUSE_SHARED:
11216 case OMP_CLAUSE_FIRSTPRIVATE:
11217 case OMP_CLAUSE_COPYIN:
11218 case OMP_CLAUSE_COPYPRIVATE:
11219 case OMP_CLAUSE_FINAL:
11220 case OMP_CLAUSE_IF:
11221 case OMP_CLAUSE_NUM_THREADS:
11222 case OMP_CLAUSE_SCHEDULE:
11223 case OMP_CLAUSE_UNIFORM:
11224 case OMP_CLAUSE_DEPEND:
11225 case OMP_CLAUSE_NUM_TEAMS:
11226 case OMP_CLAUSE_THREAD_LIMIT:
11227 case OMP_CLAUSE_DEVICE:
11228 case OMP_CLAUSE_DIST_SCHEDULE:
11229 case OMP_CLAUSE_SAFELEN:
11230 case OMP_CLAUSE_SIMDLEN:
11231 case OMP_CLAUSE_ORDERED:
11232 case OMP_CLAUSE_PRIORITY:
11233 case OMP_CLAUSE_GRAINSIZE:
11234 case OMP_CLAUSE_NUM_TASKS:
11235 case OMP_CLAUSE_HINT:
11236 case OMP_CLAUSE_TO_DECLARE:
11237 case OMP_CLAUSE_LINK:
11238 case OMP_CLAUSE_USE_DEVICE_PTR:
11239 case OMP_CLAUSE_IS_DEVICE_PTR:
11240 case OMP_CLAUSE__LOOPTEMP_:
11241 case OMP_CLAUSE__SIMDUID_:
11242 case OMP_CLAUSE__CILK_FOR_COUNT_:
11243 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11244 /* FALLTHRU */
11245
11246 case OMP_CLAUSE_INDEPENDENT:
11247 case OMP_CLAUSE_NOWAIT:
11248 case OMP_CLAUSE_DEFAULT:
11249 case OMP_CLAUSE_UNTIED:
11250 case OMP_CLAUSE_MERGEABLE:
11251 case OMP_CLAUSE_PROC_BIND:
11252 case OMP_CLAUSE_INBRANCH:
11253 case OMP_CLAUSE_NOTINBRANCH:
11254 case OMP_CLAUSE_FOR:
11255 case OMP_CLAUSE_PARALLEL:
11256 case OMP_CLAUSE_SECTIONS:
11257 case OMP_CLAUSE_TASKGROUP:
11258 case OMP_CLAUSE_NOGROUP:
11259 case OMP_CLAUSE_THREADS:
11260 case OMP_CLAUSE_SIMD:
11261 case OMP_CLAUSE_DEFAULTMAP:
11262 case OMP_CLAUSE_AUTO:
11263 case OMP_CLAUSE_SEQ:
11264 case OMP_CLAUSE_TILE:
11265 case OMP_CLAUSE__SIMT_:
11266 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11267
11268 case OMP_CLAUSE_LASTPRIVATE:
11269 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11270 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11271 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11272
11273 case OMP_CLAUSE_COLLAPSE:
11274 {
11275 int i;
11276 for (i = 0; i < 3; i++)
11277 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11278 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11279 }
11280
11281 case OMP_CLAUSE_LINEAR:
11282 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11283 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11284 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11285 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11286
11287 case OMP_CLAUSE_ALIGNED:
11288 case OMP_CLAUSE_FROM:
11289 case OMP_CLAUSE_TO:
11290 case OMP_CLAUSE_MAP:
11291 case OMP_CLAUSE__CACHE_:
11292 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11293 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11294 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11295
11296 case OMP_CLAUSE_REDUCTION:
11297 {
11298 int i;
11299 for (i = 0; i < 5; i++)
11300 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11301 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11302 }
11303
11304 default:
11305 gcc_unreachable ();
11306 }
11307 break;
11308
11309 case TARGET_EXPR:
11310 {
11311 int i, len;
11312
11313 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11314 But, we only want to walk once. */
11315 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11316 for (i = 0; i < len; ++i)
11317 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11318 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11319 }
11320
11321 case DECL_EXPR:
11322 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11323 defining. We only want to walk into these fields of a type in this
11324 case and not in the general case of a mere reference to the type.
11325
11326 The criterion is as follows: if the field can be an expression, it
11327 must be walked only here. This should be in keeping with the fields
11328 that are directly gimplified in gimplify_type_sizes in order for the
11329 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11330 variable-sized types.
11331
11332 Note that DECLs get walked as part of processing the BIND_EXPR. */
11333 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11334 {
11335 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11336 if (TREE_CODE (*type_p) == ERROR_MARK)
11337 return NULL_TREE;
11338
11339 /* Call the function for the type. See if it returns anything or
11340 doesn't want us to continue. If we are to continue, walk both
11341 the normal fields and those for the declaration case. */
11342 result = (*func) (type_p, &walk_subtrees, data);
11343 if (result || !walk_subtrees)
11344 return result;
11345
11346 /* But do not walk a pointed-to type since it may itself need to
11347 be walked in the declaration case if it isn't anonymous. */
11348 if (!POINTER_TYPE_P (*type_p))
11349 {
11350 result = walk_type_fields (*type_p, func, data, pset, lh);
11351 if (result)
11352 return result;
11353 }
11354
11355 /* If this is a record type, also walk the fields. */
11356 if (RECORD_OR_UNION_TYPE_P (*type_p))
11357 {
11358 tree field;
11359
11360 for (field = TYPE_FIELDS (*type_p); field;
11361 field = DECL_CHAIN (field))
11362 {
11363 /* We'd like to look at the type of the field, but we can
11364 easily get infinite recursion. So assume it's pointed
11365 to elsewhere in the tree. Also, ignore things that
11366 aren't fields. */
11367 if (TREE_CODE (field) != FIELD_DECL)
11368 continue;
11369
11370 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11371 WALK_SUBTREE (DECL_SIZE (field));
11372 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11373 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11374 WALK_SUBTREE (DECL_QUALIFIER (field));
11375 }
11376 }
11377
11378 /* Same for scalar types. */
11379 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11380 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11381 || TREE_CODE (*type_p) == INTEGER_TYPE
11382 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11383 || TREE_CODE (*type_p) == REAL_TYPE)
11384 {
11385 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11386 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11387 }
11388
11389 WALK_SUBTREE (TYPE_SIZE (*type_p));
11390 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11391 }
11392 /* FALLTHRU */
11393
11394 default:
11395 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11396 {
11397 int i, len;
11398
11399 /* Walk over all the sub-trees of this operand. */
11400 len = TREE_OPERAND_LENGTH (*tp);
11401
11402 /* Go through the subtrees. We need to do this in forward order so
11403 that the scope of a FOR_EXPR is handled properly. */
11404 if (len)
11405 {
11406 for (i = 0; i < len - 1; ++i)
11407 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11408 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11409 }
11410 }
11411 /* If this is a type, walk the needed fields in the type. */
11412 else if (TYPE_P (*tp))
11413 return walk_type_fields (*tp, func, data, pset, lh);
11414 break;
11415 }
11416
11417 /* We didn't find what we were looking for. */
11418 return NULL_TREE;
11419
11420 #undef WALK_SUBTREE_TAIL
11421 }
11422 #undef WALK_SUBTREE
11423
11424 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11425
11426 tree
11427 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11428 walk_tree_lh lh)
11429 {
11430 tree result;
11431
11432 hash_set<tree> pset;
11433 result = walk_tree_1 (tp, func, data, &pset, lh);
11434 return result;
11435 }
11436
11437
11438 tree
11439 tree_block (tree t)
11440 {
11441 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11442
11443 if (IS_EXPR_CODE_CLASS (c))
11444 return LOCATION_BLOCK (t->exp.locus);
11445 gcc_unreachable ();
11446 return NULL;
11447 }
11448
11449 void
11450 tree_set_block (tree t, tree b)
11451 {
11452 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11453
11454 if (IS_EXPR_CODE_CLASS (c))
11455 {
11456 t->exp.locus = set_block (t->exp.locus, b);
11457 }
11458 else
11459 gcc_unreachable ();
11460 }
11461
11462 /* Create a nameless artificial label and put it in the current
11463 function context. The label has a location of LOC. Returns the
11464 newly created label. */
11465
11466 tree
11467 create_artificial_label (location_t loc)
11468 {
11469 tree lab = build_decl (loc,
11470 LABEL_DECL, NULL_TREE, void_type_node);
11471
11472 DECL_ARTIFICIAL (lab) = 1;
11473 DECL_IGNORED_P (lab) = 1;
11474 DECL_CONTEXT (lab) = current_function_decl;
11475 return lab;
11476 }
11477
11478 /* Given a tree, try to return a useful variable name that we can use
11479 to prefix a temporary that is being assigned the value of the tree.
11480 I.E. given <temp> = &A, return A. */
11481
11482 const char *
11483 get_name (tree t)
11484 {
11485 tree stripped_decl;
11486
11487 stripped_decl = t;
11488 STRIP_NOPS (stripped_decl);
11489 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11490 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11491 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11492 {
11493 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11494 if (!name)
11495 return NULL;
11496 return IDENTIFIER_POINTER (name);
11497 }
11498 else
11499 {
11500 switch (TREE_CODE (stripped_decl))
11501 {
11502 case ADDR_EXPR:
11503 return get_name (TREE_OPERAND (stripped_decl, 0));
11504 default:
11505 return NULL;
11506 }
11507 }
11508 }
11509
11510 /* Return true if TYPE has a variable argument list. */
11511
11512 bool
11513 stdarg_p (const_tree fntype)
11514 {
11515 function_args_iterator args_iter;
11516 tree n = NULL_TREE, t;
11517
11518 if (!fntype)
11519 return false;
11520
11521 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11522 {
11523 n = t;
11524 }
11525
11526 return n != NULL_TREE && n != void_type_node;
11527 }
11528
11529 /* Return true if TYPE has a prototype. */
11530
11531 bool
11532 prototype_p (const_tree fntype)
11533 {
11534 tree t;
11535
11536 gcc_assert (fntype != NULL_TREE);
11537
11538 t = TYPE_ARG_TYPES (fntype);
11539 return (t != NULL_TREE);
11540 }
11541
11542 /* If BLOCK is inlined from an __attribute__((__artificial__))
11543 routine, return pointer to location from where it has been
11544 called. */
11545 location_t *
11546 block_nonartificial_location (tree block)
11547 {
11548 location_t *ret = NULL;
11549
11550 while (block && TREE_CODE (block) == BLOCK
11551 && BLOCK_ABSTRACT_ORIGIN (block))
11552 {
11553 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11554
11555 while (TREE_CODE (ao) == BLOCK
11556 && BLOCK_ABSTRACT_ORIGIN (ao)
11557 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11558 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11559
11560 if (TREE_CODE (ao) == FUNCTION_DECL)
11561 {
11562 /* If AO is an artificial inline, point RET to the
11563 call site locus at which it has been inlined and continue
11564 the loop, in case AO's caller is also an artificial
11565 inline. */
11566 if (DECL_DECLARED_INLINE_P (ao)
11567 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11568 ret = &BLOCK_SOURCE_LOCATION (block);
11569 else
11570 break;
11571 }
11572 else if (TREE_CODE (ao) != BLOCK)
11573 break;
11574
11575 block = BLOCK_SUPERCONTEXT (block);
11576 }
11577 return ret;
11578 }
11579
11580
11581 /* If EXP is inlined from an __attribute__((__artificial__))
11582 function, return the location of the original call expression. */
11583
11584 location_t
11585 tree_nonartificial_location (tree exp)
11586 {
11587 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11588
11589 if (loc)
11590 return *loc;
11591 else
11592 return EXPR_LOCATION (exp);
11593 }
11594
11595
11596 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11597 nodes. */
11598
11599 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11600
11601 hashval_t
11602 cl_option_hasher::hash (tree x)
11603 {
11604 const_tree const t = x;
11605 const char *p;
11606 size_t i;
11607 size_t len = 0;
11608 hashval_t hash = 0;
11609
11610 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11611 {
11612 p = (const char *)TREE_OPTIMIZATION (t);
11613 len = sizeof (struct cl_optimization);
11614 }
11615
11616 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11617 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11618
11619 else
11620 gcc_unreachable ();
11621
11622 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11623 something else. */
11624 for (i = 0; i < len; i++)
11625 if (p[i])
11626 hash = (hash << 4) ^ ((i << 2) | p[i]);
11627
11628 return hash;
11629 }
11630
11631 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11632 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11633 same. */
11634
11635 bool
11636 cl_option_hasher::equal (tree x, tree y)
11637 {
11638 const_tree const xt = x;
11639 const_tree const yt = y;
11640 const char *xp;
11641 const char *yp;
11642 size_t len;
11643
11644 if (TREE_CODE (xt) != TREE_CODE (yt))
11645 return 0;
11646
11647 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11648 {
11649 xp = (const char *)TREE_OPTIMIZATION (xt);
11650 yp = (const char *)TREE_OPTIMIZATION (yt);
11651 len = sizeof (struct cl_optimization);
11652 }
11653
11654 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11655 {
11656 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11657 TREE_TARGET_OPTION (yt));
11658 }
11659
11660 else
11661 gcc_unreachable ();
11662
11663 return (memcmp (xp, yp, len) == 0);
11664 }
11665
11666 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11667
11668 tree
11669 build_optimization_node (struct gcc_options *opts)
11670 {
11671 tree t;
11672
11673 /* Use the cache of optimization nodes. */
11674
11675 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11676 opts);
11677
11678 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11679 t = *slot;
11680 if (!t)
11681 {
11682 /* Insert this one into the hash table. */
11683 t = cl_optimization_node;
11684 *slot = t;
11685
11686 /* Make a new node for next time round. */
11687 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11688 }
11689
11690 return t;
11691 }
11692
11693 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11694
11695 tree
11696 build_target_option_node (struct gcc_options *opts)
11697 {
11698 tree t;
11699
11700 /* Use the cache of optimization nodes. */
11701
11702 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11703 opts);
11704
11705 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11706 t = *slot;
11707 if (!t)
11708 {
11709 /* Insert this one into the hash table. */
11710 t = cl_target_option_node;
11711 *slot = t;
11712
11713 /* Make a new node for next time round. */
11714 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11715 }
11716
11717 return t;
11718 }
11719
11720 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11721 so that they aren't saved during PCH writing. */
11722
11723 void
11724 prepare_target_option_nodes_for_pch (void)
11725 {
11726 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11727 for (; iter != cl_option_hash_table->end (); ++iter)
11728 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11729 TREE_TARGET_GLOBALS (*iter) = NULL;
11730 }
11731
11732 /* Determine the "ultimate origin" of a block. The block may be an inlined
11733 instance of an inlined instance of a block which is local to an inline
11734 function, so we have to trace all of the way back through the origin chain
11735 to find out what sort of node actually served as the original seed for the
11736 given block. */
11737
11738 tree
11739 block_ultimate_origin (const_tree block)
11740 {
11741 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11742
11743 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11744 we're trying to output the abstract instance of this function. */
11745 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11746 return NULL_TREE;
11747
11748 if (immediate_origin == NULL_TREE)
11749 return NULL_TREE;
11750 else
11751 {
11752 tree ret_val;
11753 tree lookahead = immediate_origin;
11754
11755 do
11756 {
11757 ret_val = lookahead;
11758 lookahead = (TREE_CODE (ret_val) == BLOCK
11759 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11760 }
11761 while (lookahead != NULL && lookahead != ret_val);
11762
11763 /* The block's abstract origin chain may not be the *ultimate* origin of
11764 the block. It could lead to a DECL that has an abstract origin set.
11765 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11766 will give us if it has one). Note that DECL's abstract origins are
11767 supposed to be the most distant ancestor (or so decl_ultimate_origin
11768 claims), so we don't need to loop following the DECL origins. */
11769 if (DECL_P (ret_val))
11770 return DECL_ORIGIN (ret_val);
11771
11772 return ret_val;
11773 }
11774 }
11775
11776 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11777 no instruction. */
11778
11779 bool
11780 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11781 {
11782 /* Do not strip casts into or out of differing address spaces. */
11783 if (POINTER_TYPE_P (outer_type)
11784 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11785 {
11786 if (!POINTER_TYPE_P (inner_type)
11787 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11788 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11789 return false;
11790 }
11791 else if (POINTER_TYPE_P (inner_type)
11792 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11793 {
11794 /* We already know that outer_type is not a pointer with
11795 a non-generic address space. */
11796 return false;
11797 }
11798
11799 /* Use precision rather then machine mode when we can, which gives
11800 the correct answer even for submode (bit-field) types. */
11801 if ((INTEGRAL_TYPE_P (outer_type)
11802 || POINTER_TYPE_P (outer_type)
11803 || TREE_CODE (outer_type) == OFFSET_TYPE)
11804 && (INTEGRAL_TYPE_P (inner_type)
11805 || POINTER_TYPE_P (inner_type)
11806 || TREE_CODE (inner_type) == OFFSET_TYPE))
11807 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11808
11809 /* Otherwise fall back on comparing machine modes (e.g. for
11810 aggregate types, floats). */
11811 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11812 }
11813
11814 /* Return true iff conversion in EXP generates no instruction. Mark
11815 it inline so that we fully inline into the stripping functions even
11816 though we have two uses of this function. */
11817
11818 static inline bool
11819 tree_nop_conversion (const_tree exp)
11820 {
11821 tree outer_type, inner_type;
11822
11823 if (!CONVERT_EXPR_P (exp)
11824 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11825 return false;
11826 if (TREE_OPERAND (exp, 0) == error_mark_node)
11827 return false;
11828
11829 outer_type = TREE_TYPE (exp);
11830 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11831
11832 if (!inner_type)
11833 return false;
11834
11835 return tree_nop_conversion_p (outer_type, inner_type);
11836 }
11837
11838 /* Return true iff conversion in EXP generates no instruction. Don't
11839 consider conversions changing the signedness. */
11840
11841 static bool
11842 tree_sign_nop_conversion (const_tree exp)
11843 {
11844 tree outer_type, inner_type;
11845
11846 if (!tree_nop_conversion (exp))
11847 return false;
11848
11849 outer_type = TREE_TYPE (exp);
11850 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11851
11852 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11853 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11854 }
11855
11856 /* Strip conversions from EXP according to tree_nop_conversion and
11857 return the resulting expression. */
11858
11859 tree
11860 tree_strip_nop_conversions (tree exp)
11861 {
11862 while (tree_nop_conversion (exp))
11863 exp = TREE_OPERAND (exp, 0);
11864 return exp;
11865 }
11866
11867 /* Strip conversions from EXP according to tree_sign_nop_conversion
11868 and return the resulting expression. */
11869
11870 tree
11871 tree_strip_sign_nop_conversions (tree exp)
11872 {
11873 while (tree_sign_nop_conversion (exp))
11874 exp = TREE_OPERAND (exp, 0);
11875 return exp;
11876 }
11877
11878 /* Avoid any floating point extensions from EXP. */
11879 tree
11880 strip_float_extensions (tree exp)
11881 {
11882 tree sub, expt, subt;
11883
11884 /* For floating point constant look up the narrowest type that can hold
11885 it properly and handle it like (type)(narrowest_type)constant.
11886 This way we can optimize for instance a=a*2.0 where "a" is float
11887 but 2.0 is double constant. */
11888 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11889 {
11890 REAL_VALUE_TYPE orig;
11891 tree type = NULL;
11892
11893 orig = TREE_REAL_CST (exp);
11894 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11895 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11896 type = float_type_node;
11897 else if (TYPE_PRECISION (TREE_TYPE (exp))
11898 > TYPE_PRECISION (double_type_node)
11899 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11900 type = double_type_node;
11901 if (type)
11902 return build_real_truncate (type, orig);
11903 }
11904
11905 if (!CONVERT_EXPR_P (exp))
11906 return exp;
11907
11908 sub = TREE_OPERAND (exp, 0);
11909 subt = TREE_TYPE (sub);
11910 expt = TREE_TYPE (exp);
11911
11912 if (!FLOAT_TYPE_P (subt))
11913 return exp;
11914
11915 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11916 return exp;
11917
11918 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11919 return exp;
11920
11921 return strip_float_extensions (sub);
11922 }
11923
11924 /* Strip out all handled components that produce invariant
11925 offsets. */
11926
11927 const_tree
11928 strip_invariant_refs (const_tree op)
11929 {
11930 while (handled_component_p (op))
11931 {
11932 switch (TREE_CODE (op))
11933 {
11934 case ARRAY_REF:
11935 case ARRAY_RANGE_REF:
11936 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11937 || TREE_OPERAND (op, 2) != NULL_TREE
11938 || TREE_OPERAND (op, 3) != NULL_TREE)
11939 return NULL;
11940 break;
11941
11942 case COMPONENT_REF:
11943 if (TREE_OPERAND (op, 2) != NULL_TREE)
11944 return NULL;
11945 break;
11946
11947 default:;
11948 }
11949 op = TREE_OPERAND (op, 0);
11950 }
11951
11952 return op;
11953 }
11954
11955 static GTY(()) tree gcc_eh_personality_decl;
11956
11957 /* Return the GCC personality function decl. */
11958
11959 tree
11960 lhd_gcc_personality (void)
11961 {
11962 if (!gcc_eh_personality_decl)
11963 gcc_eh_personality_decl = build_personality_function ("gcc");
11964 return gcc_eh_personality_decl;
11965 }
11966
11967 /* TARGET is a call target of GIMPLE call statement
11968 (obtained by gimple_call_fn). Return true if it is
11969 OBJ_TYPE_REF representing an virtual call of C++ method.
11970 (As opposed to OBJ_TYPE_REF representing objc calls
11971 through a cast where middle-end devirtualization machinery
11972 can't apply.) */
11973
11974 bool
11975 virtual_method_call_p (const_tree target)
11976 {
11977 if (TREE_CODE (target) != OBJ_TYPE_REF)
11978 return false;
11979 tree t = TREE_TYPE (target);
11980 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
11981 t = TREE_TYPE (t);
11982 if (TREE_CODE (t) == FUNCTION_TYPE)
11983 return false;
11984 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
11985 /* If we do not have BINFO associated, it means that type was built
11986 without devirtualization enabled. Do not consider this a virtual
11987 call. */
11988 if (!TYPE_BINFO (obj_type_ref_class (target)))
11989 return false;
11990 return true;
11991 }
11992
11993 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11994
11995 tree
11996 obj_type_ref_class (const_tree ref)
11997 {
11998 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11999 ref = TREE_TYPE (ref);
12000 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12001 ref = TREE_TYPE (ref);
12002 /* We look for type THIS points to. ObjC also builds
12003 OBJ_TYPE_REF with non-method calls, Their first parameter
12004 ID however also corresponds to class type. */
12005 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12006 || TREE_CODE (ref) == FUNCTION_TYPE);
12007 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12008 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12009 return TREE_TYPE (ref);
12010 }
12011
12012 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12013
12014 static tree
12015 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12016 {
12017 unsigned int i;
12018 tree base_binfo, b;
12019
12020 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12021 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12022 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12023 return base_binfo;
12024 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12025 return b;
12026 return NULL;
12027 }
12028
12029 /* Try to find a base info of BINFO that would have its field decl at offset
12030 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12031 found, return, otherwise return NULL_TREE. */
12032
12033 tree
12034 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12035 {
12036 tree type = BINFO_TYPE (binfo);
12037
12038 while (true)
12039 {
12040 HOST_WIDE_INT pos, size;
12041 tree fld;
12042 int i;
12043
12044 if (types_same_for_odr (type, expected_type))
12045 return binfo;
12046 if (offset < 0)
12047 return NULL_TREE;
12048
12049 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12050 {
12051 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12052 continue;
12053
12054 pos = int_bit_position (fld);
12055 size = tree_to_uhwi (DECL_SIZE (fld));
12056 if (pos <= offset && (pos + size) > offset)
12057 break;
12058 }
12059 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12060 return NULL_TREE;
12061
12062 /* Offset 0 indicates the primary base, whose vtable contents are
12063 represented in the binfo for the derived class. */
12064 else if (offset != 0)
12065 {
12066 tree found_binfo = NULL, base_binfo;
12067 /* Offsets in BINFO are in bytes relative to the whole structure
12068 while POS is in bits relative to the containing field. */
12069 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12070 / BITS_PER_UNIT);
12071
12072 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12073 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12074 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12075 {
12076 found_binfo = base_binfo;
12077 break;
12078 }
12079 if (found_binfo)
12080 binfo = found_binfo;
12081 else
12082 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12083 binfo_offset);
12084 }
12085
12086 type = TREE_TYPE (fld);
12087 offset -= pos;
12088 }
12089 }
12090
12091 /* Returns true if X is a typedef decl. */
12092
12093 bool
12094 is_typedef_decl (const_tree x)
12095 {
12096 return (x && TREE_CODE (x) == TYPE_DECL
12097 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12098 }
12099
12100 /* Returns true iff TYPE is a type variant created for a typedef. */
12101
12102 bool
12103 typedef_variant_p (const_tree type)
12104 {
12105 return is_typedef_decl (TYPE_NAME (type));
12106 }
12107
12108 /* Warn about a use of an identifier which was marked deprecated. */
12109 void
12110 warn_deprecated_use (tree node, tree attr)
12111 {
12112 const char *msg;
12113
12114 if (node == 0 || !warn_deprecated_decl)
12115 return;
12116
12117 if (!attr)
12118 {
12119 if (DECL_P (node))
12120 attr = DECL_ATTRIBUTES (node);
12121 else if (TYPE_P (node))
12122 {
12123 tree decl = TYPE_STUB_DECL (node);
12124 if (decl)
12125 attr = lookup_attribute ("deprecated",
12126 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12127 }
12128 }
12129
12130 if (attr)
12131 attr = lookup_attribute ("deprecated", attr);
12132
12133 if (attr)
12134 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12135 else
12136 msg = NULL;
12137
12138 bool w;
12139 if (DECL_P (node))
12140 {
12141 if (msg)
12142 w = warning (OPT_Wdeprecated_declarations,
12143 "%qD is deprecated: %s", node, msg);
12144 else
12145 w = warning (OPT_Wdeprecated_declarations,
12146 "%qD is deprecated", node);
12147 if (w)
12148 inform (DECL_SOURCE_LOCATION (node), "declared here");
12149 }
12150 else if (TYPE_P (node))
12151 {
12152 tree what = NULL_TREE;
12153 tree decl = TYPE_STUB_DECL (node);
12154
12155 if (TYPE_NAME (node))
12156 {
12157 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12158 what = TYPE_NAME (node);
12159 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12160 && DECL_NAME (TYPE_NAME (node)))
12161 what = DECL_NAME (TYPE_NAME (node));
12162 }
12163
12164 if (decl)
12165 {
12166 if (what)
12167 {
12168 if (msg)
12169 w = warning (OPT_Wdeprecated_declarations,
12170 "%qE is deprecated: %s", what, msg);
12171 else
12172 w = warning (OPT_Wdeprecated_declarations,
12173 "%qE is deprecated", what);
12174 }
12175 else
12176 {
12177 if (msg)
12178 w = warning (OPT_Wdeprecated_declarations,
12179 "type is deprecated: %s", msg);
12180 else
12181 w = warning (OPT_Wdeprecated_declarations,
12182 "type is deprecated");
12183 }
12184 if (w)
12185 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12186 }
12187 else
12188 {
12189 if (what)
12190 {
12191 if (msg)
12192 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12193 what, msg);
12194 else
12195 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12196 }
12197 else
12198 {
12199 if (msg)
12200 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12201 msg);
12202 else
12203 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12204 }
12205 }
12206 }
12207 }
12208
12209 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12210 somewhere in it. */
12211
12212 bool
12213 contains_bitfld_component_ref_p (const_tree ref)
12214 {
12215 while (handled_component_p (ref))
12216 {
12217 if (TREE_CODE (ref) == COMPONENT_REF
12218 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12219 return true;
12220 ref = TREE_OPERAND (ref, 0);
12221 }
12222
12223 return false;
12224 }
12225
12226 /* Try to determine whether a TRY_CATCH expression can fall through.
12227 This is a subroutine of block_may_fallthru. */
12228
12229 static bool
12230 try_catch_may_fallthru (const_tree stmt)
12231 {
12232 tree_stmt_iterator i;
12233
12234 /* If the TRY block can fall through, the whole TRY_CATCH can
12235 fall through. */
12236 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12237 return true;
12238
12239 i = tsi_start (TREE_OPERAND (stmt, 1));
12240 switch (TREE_CODE (tsi_stmt (i)))
12241 {
12242 case CATCH_EXPR:
12243 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12244 catch expression and a body. The whole TRY_CATCH may fall
12245 through iff any of the catch bodies falls through. */
12246 for (; !tsi_end_p (i); tsi_next (&i))
12247 {
12248 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12249 return true;
12250 }
12251 return false;
12252
12253 case EH_FILTER_EXPR:
12254 /* The exception filter expression only matters if there is an
12255 exception. If the exception does not match EH_FILTER_TYPES,
12256 we will execute EH_FILTER_FAILURE, and we will fall through
12257 if that falls through. If the exception does match
12258 EH_FILTER_TYPES, the stack unwinder will continue up the
12259 stack, so we will not fall through. We don't know whether we
12260 will throw an exception which matches EH_FILTER_TYPES or not,
12261 so we just ignore EH_FILTER_TYPES and assume that we might
12262 throw an exception which doesn't match. */
12263 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12264
12265 default:
12266 /* This case represents statements to be executed when an
12267 exception occurs. Those statements are implicitly followed
12268 by a RESX statement to resume execution after the exception.
12269 So in this case the TRY_CATCH never falls through. */
12270 return false;
12271 }
12272 }
12273
12274 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12275 need not be 100% accurate; simply be conservative and return true if we
12276 don't know. This is used only to avoid stupidly generating extra code.
12277 If we're wrong, we'll just delete the extra code later. */
12278
12279 bool
12280 block_may_fallthru (const_tree block)
12281 {
12282 /* This CONST_CAST is okay because expr_last returns its argument
12283 unmodified and we assign it to a const_tree. */
12284 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12285
12286 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12287 {
12288 case GOTO_EXPR:
12289 case RETURN_EXPR:
12290 /* Easy cases. If the last statement of the block implies
12291 control transfer, then we can't fall through. */
12292 return false;
12293
12294 case SWITCH_EXPR:
12295 /* If SWITCH_LABELS is set, this is lowered, and represents a
12296 branch to a selected label and hence can not fall through.
12297 Otherwise SWITCH_BODY is set, and the switch can fall
12298 through. */
12299 return SWITCH_LABELS (stmt) == NULL_TREE;
12300
12301 case COND_EXPR:
12302 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12303 return true;
12304 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12305
12306 case BIND_EXPR:
12307 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12308
12309 case TRY_CATCH_EXPR:
12310 return try_catch_may_fallthru (stmt);
12311
12312 case TRY_FINALLY_EXPR:
12313 /* The finally clause is always executed after the try clause,
12314 so if it does not fall through, then the try-finally will not
12315 fall through. Otherwise, if the try clause does not fall
12316 through, then when the finally clause falls through it will
12317 resume execution wherever the try clause was going. So the
12318 whole try-finally will only fall through if both the try
12319 clause and the finally clause fall through. */
12320 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12321 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12322
12323 case MODIFY_EXPR:
12324 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12325 stmt = TREE_OPERAND (stmt, 1);
12326 else
12327 return true;
12328 /* FALLTHRU */
12329
12330 case CALL_EXPR:
12331 /* Functions that do not return do not fall through. */
12332 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12333
12334 case CLEANUP_POINT_EXPR:
12335 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12336
12337 case TARGET_EXPR:
12338 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12339
12340 case ERROR_MARK:
12341 return true;
12342
12343 default:
12344 return lang_hooks.block_may_fallthru (stmt);
12345 }
12346 }
12347
12348 /* True if we are using EH to handle cleanups. */
12349 static bool using_eh_for_cleanups_flag = false;
12350
12351 /* This routine is called from front ends to indicate eh should be used for
12352 cleanups. */
12353 void
12354 using_eh_for_cleanups (void)
12355 {
12356 using_eh_for_cleanups_flag = true;
12357 }
12358
12359 /* Query whether EH is used for cleanups. */
12360 bool
12361 using_eh_for_cleanups_p (void)
12362 {
12363 return using_eh_for_cleanups_flag;
12364 }
12365
12366 /* Wrapper for tree_code_name to ensure that tree code is valid */
12367 const char *
12368 get_tree_code_name (enum tree_code code)
12369 {
12370 const char *invalid = "<invalid tree code>";
12371
12372 if (code >= MAX_TREE_CODES)
12373 return invalid;
12374
12375 return tree_code_name[code];
12376 }
12377
12378 /* Drops the TREE_OVERFLOW flag from T. */
12379
12380 tree
12381 drop_tree_overflow (tree t)
12382 {
12383 gcc_checking_assert (TREE_OVERFLOW (t));
12384
12385 /* For tree codes with a sharing machinery re-build the result. */
12386 if (TREE_CODE (t) == INTEGER_CST)
12387 return wide_int_to_tree (TREE_TYPE (t), t);
12388
12389 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12390 and drop the flag. */
12391 t = copy_node (t);
12392 TREE_OVERFLOW (t) = 0;
12393
12394 /* For constants that contain nested constants, drop the flag
12395 from those as well. */
12396 if (TREE_CODE (t) == COMPLEX_CST)
12397 {
12398 if (TREE_OVERFLOW (TREE_REALPART (t)))
12399 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12400 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12401 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12402 }
12403 if (TREE_CODE (t) == VECTOR_CST)
12404 {
12405 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
12406 {
12407 tree& elt = VECTOR_CST_ELT (t, i);
12408 if (TREE_OVERFLOW (elt))
12409 elt = drop_tree_overflow (elt);
12410 }
12411 }
12412 return t;
12413 }
12414
12415 /* Given a memory reference expression T, return its base address.
12416 The base address of a memory reference expression is the main
12417 object being referenced. For instance, the base address for
12418 'array[i].fld[j]' is 'array'. You can think of this as stripping
12419 away the offset part from a memory address.
12420
12421 This function calls handled_component_p to strip away all the inner
12422 parts of the memory reference until it reaches the base object. */
12423
12424 tree
12425 get_base_address (tree t)
12426 {
12427 while (handled_component_p (t))
12428 t = TREE_OPERAND (t, 0);
12429
12430 if ((TREE_CODE (t) == MEM_REF
12431 || TREE_CODE (t) == TARGET_MEM_REF)
12432 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12433 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12434
12435 /* ??? Either the alias oracle or all callers need to properly deal
12436 with WITH_SIZE_EXPRs before we can look through those. */
12437 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12438 return NULL_TREE;
12439
12440 return t;
12441 }
12442
12443 /* Return a tree of sizetype representing the size, in bytes, of the element
12444 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12445
12446 tree
12447 array_ref_element_size (tree exp)
12448 {
12449 tree aligned_size = TREE_OPERAND (exp, 3);
12450 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12451 location_t loc = EXPR_LOCATION (exp);
12452
12453 /* If a size was specified in the ARRAY_REF, it's the size measured
12454 in alignment units of the element type. So multiply by that value. */
12455 if (aligned_size)
12456 {
12457 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12458 sizetype from another type of the same width and signedness. */
12459 if (TREE_TYPE (aligned_size) != sizetype)
12460 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12461 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12462 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12463 }
12464
12465 /* Otherwise, take the size from that of the element type. Substitute
12466 any PLACEHOLDER_EXPR that we have. */
12467 else
12468 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12469 }
12470
12471 /* Return a tree representing the lower bound of the array mentioned in
12472 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12473
12474 tree
12475 array_ref_low_bound (tree exp)
12476 {
12477 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12478
12479 /* If a lower bound is specified in EXP, use it. */
12480 if (TREE_OPERAND (exp, 2))
12481 return TREE_OPERAND (exp, 2);
12482
12483 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12484 substituting for a PLACEHOLDER_EXPR as needed. */
12485 if (domain_type && TYPE_MIN_VALUE (domain_type))
12486 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12487
12488 /* Otherwise, return a zero of the appropriate type. */
12489 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12490 }
12491
12492 /* Return a tree representing the upper bound of the array mentioned in
12493 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12494
12495 tree
12496 array_ref_up_bound (tree exp)
12497 {
12498 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12499
12500 /* If there is a domain type and it has an upper bound, use it, substituting
12501 for a PLACEHOLDER_EXPR as needed. */
12502 if (domain_type && TYPE_MAX_VALUE (domain_type))
12503 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12504
12505 /* Otherwise fail. */
12506 return NULL_TREE;
12507 }
12508
12509 /* Returns true if REF is an array reference or a component reference
12510 to an array at the end of a structure.
12511 If this is the case, the array may be allocated larger
12512 than its upper bound implies. */
12513
12514 bool
12515 array_at_struct_end_p (tree ref)
12516 {
12517 tree atype;
12518
12519 if (TREE_CODE (ref) == ARRAY_REF
12520 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12521 {
12522 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12523 ref = TREE_OPERAND (ref, 0);
12524 }
12525 else if (TREE_CODE (ref) == COMPONENT_REF
12526 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12527 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12528 else
12529 return false;
12530
12531 while (handled_component_p (ref))
12532 {
12533 /* If the reference chain contains a component reference to a
12534 non-union type and there follows another field the reference
12535 is not at the end of a structure. */
12536 if (TREE_CODE (ref) == COMPONENT_REF)
12537 {
12538 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12539 {
12540 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12541 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12542 nextf = DECL_CHAIN (nextf);
12543 if (nextf)
12544 return false;
12545 }
12546 }
12547 /* If we have a multi-dimensional array we do not consider
12548 a non-innermost dimension as flex array if the whole
12549 multi-dimensional array is at struct end.
12550 Same for an array of aggregates with a trailing array
12551 member. */
12552 else if (TREE_CODE (ref) == ARRAY_REF)
12553 return false;
12554 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12555 ;
12556 /* If we view an underlying object as sth else then what we
12557 gathered up to now is what we have to rely on. */
12558 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12559 break;
12560 else
12561 gcc_unreachable ();
12562
12563 ref = TREE_OPERAND (ref, 0);
12564 }
12565
12566 /* The array now is at struct end. Treat flexible arrays as
12567 always subject to extend, even into just padding constrained by
12568 an underlying decl. */
12569 if (! TYPE_SIZE (atype))
12570 return true;
12571
12572 tree size = NULL;
12573
12574 if (TREE_CODE (ref) == MEM_REF
12575 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
12576 {
12577 size = TYPE_SIZE (TREE_TYPE (ref));
12578 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
12579 }
12580
12581 /* If the reference is based on a declared entity, the size of the array
12582 is constrained by its given domain. (Do not trust commons PR/69368). */
12583 if (DECL_P (ref)
12584 /* Be sure the size of MEM_REF target match. For example:
12585
12586 char buf[10];
12587 struct foo *str = (struct foo *)&buf;
12588
12589 str->trailin_array[2] = 1;
12590
12591 is valid because BUF allocate enough space. */
12592
12593 && (!size || (DECL_SIZE (ref) != NULL
12594 && operand_equal_p (DECL_SIZE (ref), size, 0)))
12595 && !(flag_unconstrained_commons
12596 && VAR_P (ref) && DECL_COMMON (ref)))
12597 return false;
12598
12599 return true;
12600 }
12601
12602 /* Return a tree representing the offset, in bytes, of the field referenced
12603 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12604
12605 tree
12606 component_ref_field_offset (tree exp)
12607 {
12608 tree aligned_offset = TREE_OPERAND (exp, 2);
12609 tree field = TREE_OPERAND (exp, 1);
12610 location_t loc = EXPR_LOCATION (exp);
12611
12612 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12613 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12614 value. */
12615 if (aligned_offset)
12616 {
12617 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12618 sizetype from another type of the same width and signedness. */
12619 if (TREE_TYPE (aligned_offset) != sizetype)
12620 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12621 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12622 size_int (DECL_OFFSET_ALIGN (field)
12623 / BITS_PER_UNIT));
12624 }
12625
12626 /* Otherwise, take the offset from that of the field. Substitute
12627 any PLACEHOLDER_EXPR that we have. */
12628 else
12629 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12630 }
12631
12632 /* Return the machine mode of T. For vectors, returns the mode of the
12633 inner type. The main use case is to feed the result to HONOR_NANS,
12634 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12635
12636 machine_mode
12637 element_mode (const_tree t)
12638 {
12639 if (!TYPE_P (t))
12640 t = TREE_TYPE (t);
12641 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12642 t = TREE_TYPE (t);
12643 return TYPE_MODE (t);
12644 }
12645
12646
12647 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12648 TV. TV should be the more specified variant (i.e. the main variant). */
12649
12650 static bool
12651 verify_type_variant (const_tree t, tree tv)
12652 {
12653 /* Type variant can differ by:
12654
12655 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12656 ENCODE_QUAL_ADDR_SPACE.
12657 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12658 in this case some values may not be set in the variant types
12659 (see TYPE_COMPLETE_P checks).
12660 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12661 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12662 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12663 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12664 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12665 this is necessary to make it possible to merge types form different TUs
12666 - arrays, pointers and references may have TREE_TYPE that is a variant
12667 of TREE_TYPE of their main variants.
12668 - aggregates may have new TYPE_FIELDS list that list variants of
12669 the main variant TYPE_FIELDS.
12670 - vector types may differ by TYPE_VECTOR_OPAQUE
12671 */
12672
12673 /* Convenience macro for matching individual fields. */
12674 #define verify_variant_match(flag) \
12675 do { \
12676 if (flag (tv) != flag (t)) \
12677 { \
12678 error ("type variant differs by " #flag "."); \
12679 debug_tree (tv); \
12680 return false; \
12681 } \
12682 } while (false)
12683
12684 /* tree_base checks. */
12685
12686 verify_variant_match (TREE_CODE);
12687 /* FIXME: Ada builds non-artificial variants of artificial types. */
12688 if (TYPE_ARTIFICIAL (tv) && 0)
12689 verify_variant_match (TYPE_ARTIFICIAL);
12690 if (POINTER_TYPE_P (tv))
12691 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12692 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12693 verify_variant_match (TYPE_UNSIGNED);
12694 verify_variant_match (TYPE_PACKED);
12695 if (TREE_CODE (t) == REFERENCE_TYPE)
12696 verify_variant_match (TYPE_REF_IS_RVALUE);
12697 if (AGGREGATE_TYPE_P (t))
12698 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
12699 else
12700 verify_variant_match (TYPE_SATURATING);
12701 /* FIXME: This check trigger during libstdc++ build. */
12702 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12703 verify_variant_match (TYPE_FINAL_P);
12704
12705 /* tree_type_common checks. */
12706
12707 if (COMPLETE_TYPE_P (t))
12708 {
12709 verify_variant_match (TYPE_MODE);
12710 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
12711 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
12712 verify_variant_match (TYPE_SIZE);
12713 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
12714 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
12715 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
12716 {
12717 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
12718 TYPE_SIZE_UNIT (tv), 0));
12719 error ("type variant has different TYPE_SIZE_UNIT");
12720 debug_tree (tv);
12721 error ("type variant's TYPE_SIZE_UNIT");
12722 debug_tree (TYPE_SIZE_UNIT (tv));
12723 error ("type's TYPE_SIZE_UNIT");
12724 debug_tree (TYPE_SIZE_UNIT (t));
12725 return false;
12726 }
12727 }
12728 verify_variant_match (TYPE_PRECISION);
12729 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12730 if (RECORD_OR_UNION_TYPE_P (t))
12731 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12732 else if (TREE_CODE (t) == ARRAY_TYPE)
12733 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12734 /* During LTO we merge variant lists from diferent translation units
12735 that may differ BY TYPE_CONTEXT that in turn may point
12736 to TRANSLATION_UNIT_DECL.
12737 Ada also builds variants of types with different TYPE_CONTEXT. */
12738 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12739 verify_variant_match (TYPE_CONTEXT);
12740 verify_variant_match (TYPE_STRING_FLAG);
12741 if (TYPE_ALIAS_SET_KNOWN_P (t))
12742 {
12743 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
12744 debug_tree (tv);
12745 return false;
12746 }
12747
12748 /* tree_type_non_common checks. */
12749
12750 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12751 and dangle the pointer from time to time. */
12752 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12753 && (in_lto_p || !TYPE_VFIELD (tv)
12754 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12755 {
12756 error ("type variant has different TYPE_VFIELD");
12757 debug_tree (tv);
12758 return false;
12759 }
12760 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
12761 || TREE_CODE (t) == INTEGER_TYPE
12762 || TREE_CODE (t) == BOOLEAN_TYPE
12763 || TREE_CODE (t) == REAL_TYPE
12764 || TREE_CODE (t) == FIXED_POINT_TYPE)
12765 {
12766 verify_variant_match (TYPE_MAX_VALUE);
12767 verify_variant_match (TYPE_MIN_VALUE);
12768 }
12769 if (TREE_CODE (t) == METHOD_TYPE)
12770 verify_variant_match (TYPE_METHOD_BASETYPE);
12771 if (TREE_CODE (t) == OFFSET_TYPE)
12772 verify_variant_match (TYPE_OFFSET_BASETYPE);
12773 if (TREE_CODE (t) == ARRAY_TYPE)
12774 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
12775 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
12776 or even type's main variant. This is needed to make bootstrap pass
12777 and the bug seems new in GCC 5.
12778 C++ FE should be updated to make this consistent and we should check
12779 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
12780 is a match with main variant.
12781
12782 Also disable the check for Java for now because of parser hack that builds
12783 first an dummy BINFO and then sometimes replace it by real BINFO in some
12784 of the copies. */
12785 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
12786 && TYPE_BINFO (t) != TYPE_BINFO (tv)
12787 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
12788 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
12789 at LTO time only. */
12790 && (in_lto_p && odr_type_p (t)))
12791 {
12792 error ("type variant has different TYPE_BINFO");
12793 debug_tree (tv);
12794 error ("type variant's TYPE_BINFO");
12795 debug_tree (TYPE_BINFO (tv));
12796 error ("type's TYPE_BINFO");
12797 debug_tree (TYPE_BINFO (t));
12798 return false;
12799 }
12800
12801 /* Check various uses of TYPE_VALUES_RAW. */
12802 if (TREE_CODE (t) == ENUMERAL_TYPE)
12803 verify_variant_match (TYPE_VALUES);
12804 else if (TREE_CODE (t) == ARRAY_TYPE)
12805 verify_variant_match (TYPE_DOMAIN);
12806 /* Permit incomplete variants of complete type. While FEs may complete
12807 all variants, this does not happen for C++ templates in all cases. */
12808 else if (RECORD_OR_UNION_TYPE_P (t)
12809 && COMPLETE_TYPE_P (t)
12810 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
12811 {
12812 tree f1, f2;
12813
12814 /* Fortran builds qualified variants as new records with items of
12815 qualified type. Verify that they looks same. */
12816 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
12817 f1 && f2;
12818 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
12819 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
12820 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
12821 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
12822 /* FIXME: gfc_nonrestricted_type builds all types as variants
12823 with exception of pointer types. It deeply copies the type
12824 which means that we may end up with a variant type
12825 referring non-variant pointer. We may change it to
12826 produce types as variants, too, like
12827 objc_get_protocol_qualified_type does. */
12828 && !POINTER_TYPE_P (TREE_TYPE (f1)))
12829 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
12830 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
12831 break;
12832 if (f1 || f2)
12833 {
12834 error ("type variant has different TYPE_FIELDS");
12835 debug_tree (tv);
12836 error ("first mismatch is field");
12837 debug_tree (f1);
12838 error ("and field");
12839 debug_tree (f2);
12840 return false;
12841 }
12842 }
12843 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
12844 verify_variant_match (TYPE_ARG_TYPES);
12845 /* For C++ the qualified variant of array type is really an array type
12846 of qualified TREE_TYPE.
12847 objc builds variants of pointer where pointer to type is a variant, too
12848 in objc_get_protocol_qualified_type. */
12849 if (TREE_TYPE (t) != TREE_TYPE (tv)
12850 && ((TREE_CODE (t) != ARRAY_TYPE
12851 && !POINTER_TYPE_P (t))
12852 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
12853 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
12854 {
12855 error ("type variant has different TREE_TYPE");
12856 debug_tree (tv);
12857 error ("type variant's TREE_TYPE");
12858 debug_tree (TREE_TYPE (tv));
12859 error ("type's TREE_TYPE");
12860 debug_tree (TREE_TYPE (t));
12861 return false;
12862 }
12863 if (type_with_alias_set_p (t)
12864 && !gimple_canonical_types_compatible_p (t, tv, false))
12865 {
12866 error ("type is not compatible with its variant");
12867 debug_tree (tv);
12868 error ("type variant's TREE_TYPE");
12869 debug_tree (TREE_TYPE (tv));
12870 error ("type's TREE_TYPE");
12871 debug_tree (TREE_TYPE (t));
12872 return false;
12873 }
12874 return true;
12875 #undef verify_variant_match
12876 }
12877
12878
12879 /* The TYPE_CANONICAL merging machinery. It should closely resemble
12880 the middle-end types_compatible_p function. It needs to avoid
12881 claiming types are different for types that should be treated
12882 the same with respect to TBAA. Canonical types are also used
12883 for IL consistency checks via the useless_type_conversion_p
12884 predicate which does not handle all type kinds itself but falls
12885 back to pointer-comparison of TYPE_CANONICAL for aggregates
12886 for example. */
12887
12888 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
12889 type calculation because we need to allow inter-operability between signed
12890 and unsigned variants. */
12891
12892 bool
12893 type_with_interoperable_signedness (const_tree type)
12894 {
12895 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
12896 signed char and unsigned char. Similarly fortran FE builds
12897 C_SIZE_T as signed type, while C defines it unsigned. */
12898
12899 return tree_code_for_canonical_type_merging (TREE_CODE (type))
12900 == INTEGER_TYPE
12901 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
12902 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
12903 }
12904
12905 /* Return true iff T1 and T2 are structurally identical for what
12906 TBAA is concerned.
12907 This function is used both by lto.c canonical type merging and by the
12908 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
12909 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
12910 only for LTO because only in these cases TYPE_CANONICAL equivalence
12911 correspond to one defined by gimple_canonical_types_compatible_p. */
12912
12913 bool
12914 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
12915 bool trust_type_canonical)
12916 {
12917 /* Type variants should be same as the main variant. When not doing sanity
12918 checking to verify this fact, go to main variants and save some work. */
12919 if (trust_type_canonical)
12920 {
12921 t1 = TYPE_MAIN_VARIANT (t1);
12922 t2 = TYPE_MAIN_VARIANT (t2);
12923 }
12924
12925 /* Check first for the obvious case of pointer identity. */
12926 if (t1 == t2)
12927 return true;
12928
12929 /* Check that we have two types to compare. */
12930 if (t1 == NULL_TREE || t2 == NULL_TREE)
12931 return false;
12932
12933 /* We consider complete types always compatible with incomplete type.
12934 This does not make sense for canonical type calculation and thus we
12935 need to ensure that we are never called on it.
12936
12937 FIXME: For more correctness the function probably should have three modes
12938 1) mode assuming that types are complete mathcing their structure
12939 2) mode allowing incomplete types but producing equivalence classes
12940 and thus ignoring all info from complete types
12941 3) mode allowing incomplete types to match complete but checking
12942 compatibility between complete types.
12943
12944 1 and 2 can be used for canonical type calculation. 3 is the real
12945 definition of type compatibility that can be used i.e. for warnings during
12946 declaration merging. */
12947
12948 gcc_assert (!trust_type_canonical
12949 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
12950 /* If the types have been previously registered and found equal
12951 they still are. */
12952
12953 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
12954 && trust_type_canonical)
12955 {
12956 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
12957 they are always NULL, but they are set to non-NULL for types
12958 constructed by build_pointer_type and variants. In this case the
12959 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
12960 all pointers are considered equal. Be sure to not return false
12961 negatives. */
12962 gcc_checking_assert (canonical_type_used_p (t1)
12963 && canonical_type_used_p (t2));
12964 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
12965 }
12966
12967 /* Can't be the same type if the types don't have the same code. */
12968 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
12969 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
12970 return false;
12971
12972 /* Qualifiers do not matter for canonical type comparison purposes. */
12973
12974 /* Void types and nullptr types are always the same. */
12975 if (TREE_CODE (t1) == VOID_TYPE
12976 || TREE_CODE (t1) == NULLPTR_TYPE)
12977 return true;
12978
12979 /* Can't be the same type if they have different mode. */
12980 if (TYPE_MODE (t1) != TYPE_MODE (t2))
12981 return false;
12982
12983 /* Non-aggregate types can be handled cheaply. */
12984 if (INTEGRAL_TYPE_P (t1)
12985 || SCALAR_FLOAT_TYPE_P (t1)
12986 || FIXED_POINT_TYPE_P (t1)
12987 || TREE_CODE (t1) == VECTOR_TYPE
12988 || TREE_CODE (t1) == COMPLEX_TYPE
12989 || TREE_CODE (t1) == OFFSET_TYPE
12990 || POINTER_TYPE_P (t1))
12991 {
12992 /* Can't be the same type if they have different recision. */
12993 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
12994 return false;
12995
12996 /* In some cases the signed and unsigned types are required to be
12997 inter-operable. */
12998 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
12999 && !type_with_interoperable_signedness (t1))
13000 return false;
13001
13002 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13003 interoperable with "signed char". Unless all frontends are revisited
13004 to agree on these types, we must ignore the flag completely. */
13005
13006 /* Fortran standard define C_PTR type that is compatible with every
13007 C pointer. For this reason we need to glob all pointers into one.
13008 Still pointers in different address spaces are not compatible. */
13009 if (POINTER_TYPE_P (t1))
13010 {
13011 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13012 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13013 return false;
13014 }
13015
13016 /* Tail-recurse to components. */
13017 if (TREE_CODE (t1) == VECTOR_TYPE
13018 || TREE_CODE (t1) == COMPLEX_TYPE)
13019 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13020 TREE_TYPE (t2),
13021 trust_type_canonical);
13022
13023 return true;
13024 }
13025
13026 /* Do type-specific comparisons. */
13027 switch (TREE_CODE (t1))
13028 {
13029 case ARRAY_TYPE:
13030 /* Array types are the same if the element types are the same and
13031 the number of elements are the same. */
13032 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13033 trust_type_canonical)
13034 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13035 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13036 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13037 return false;
13038 else
13039 {
13040 tree i1 = TYPE_DOMAIN (t1);
13041 tree i2 = TYPE_DOMAIN (t2);
13042
13043 /* For an incomplete external array, the type domain can be
13044 NULL_TREE. Check this condition also. */
13045 if (i1 == NULL_TREE && i2 == NULL_TREE)
13046 return true;
13047 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13048 return false;
13049 else
13050 {
13051 tree min1 = TYPE_MIN_VALUE (i1);
13052 tree min2 = TYPE_MIN_VALUE (i2);
13053 tree max1 = TYPE_MAX_VALUE (i1);
13054 tree max2 = TYPE_MAX_VALUE (i2);
13055
13056 /* The minimum/maximum values have to be the same. */
13057 if ((min1 == min2
13058 || (min1 && min2
13059 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13060 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13061 || operand_equal_p (min1, min2, 0))))
13062 && (max1 == max2
13063 || (max1 && max2
13064 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13065 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13066 || operand_equal_p (max1, max2, 0)))))
13067 return true;
13068 else
13069 return false;
13070 }
13071 }
13072
13073 case METHOD_TYPE:
13074 case FUNCTION_TYPE:
13075 /* Function types are the same if the return type and arguments types
13076 are the same. */
13077 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13078 trust_type_canonical))
13079 return false;
13080
13081 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13082 return true;
13083 else
13084 {
13085 tree parms1, parms2;
13086
13087 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13088 parms1 && parms2;
13089 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13090 {
13091 if (!gimple_canonical_types_compatible_p
13092 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13093 trust_type_canonical))
13094 return false;
13095 }
13096
13097 if (parms1 || parms2)
13098 return false;
13099
13100 return true;
13101 }
13102
13103 case RECORD_TYPE:
13104 case UNION_TYPE:
13105 case QUAL_UNION_TYPE:
13106 {
13107 tree f1, f2;
13108
13109 /* Don't try to compare variants of an incomplete type, before
13110 TYPE_FIELDS has been copied around. */
13111 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13112 return true;
13113
13114
13115 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13116 return false;
13117
13118 /* For aggregate types, all the fields must be the same. */
13119 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13120 f1 || f2;
13121 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13122 {
13123 /* Skip non-fields and zero-sized fields. */
13124 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13125 || (DECL_SIZE (f1)
13126 && integer_zerop (DECL_SIZE (f1)))))
13127 f1 = TREE_CHAIN (f1);
13128 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13129 || (DECL_SIZE (f2)
13130 && integer_zerop (DECL_SIZE (f2)))))
13131 f2 = TREE_CHAIN (f2);
13132 if (!f1 || !f2)
13133 break;
13134 /* The fields must have the same name, offset and type. */
13135 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13136 || !gimple_compare_field_offset (f1, f2)
13137 || !gimple_canonical_types_compatible_p
13138 (TREE_TYPE (f1), TREE_TYPE (f2),
13139 trust_type_canonical))
13140 return false;
13141 }
13142
13143 /* If one aggregate has more fields than the other, they
13144 are not the same. */
13145 if (f1 || f2)
13146 return false;
13147
13148 return true;
13149 }
13150
13151 default:
13152 /* Consider all types with language specific trees in them mutually
13153 compatible. This is executed only from verify_type and false
13154 positives can be tolerated. */
13155 gcc_assert (!in_lto_p);
13156 return true;
13157 }
13158 }
13159
13160 /* Verify type T. */
13161
13162 void
13163 verify_type (const_tree t)
13164 {
13165 bool error_found = false;
13166 tree mv = TYPE_MAIN_VARIANT (t);
13167 if (!mv)
13168 {
13169 error ("Main variant is not defined");
13170 error_found = true;
13171 }
13172 else if (mv != TYPE_MAIN_VARIANT (mv))
13173 {
13174 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13175 debug_tree (mv);
13176 error_found = true;
13177 }
13178 else if (t != mv && !verify_type_variant (t, mv))
13179 error_found = true;
13180
13181 tree ct = TYPE_CANONICAL (t);
13182 if (!ct)
13183 ;
13184 else if (TYPE_CANONICAL (t) != ct)
13185 {
13186 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13187 debug_tree (ct);
13188 error_found = true;
13189 }
13190 /* Method and function types can not be used to address memory and thus
13191 TYPE_CANONICAL really matters only for determining useless conversions.
13192
13193 FIXME: C++ FE produce declarations of builtin functions that are not
13194 compatible with main variants. */
13195 else if (TREE_CODE (t) == FUNCTION_TYPE)
13196 ;
13197 else if (t != ct
13198 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13199 with variably sized arrays because their sizes possibly
13200 gimplified to different variables. */
13201 && !variably_modified_type_p (ct, NULL)
13202 && !gimple_canonical_types_compatible_p (t, ct, false))
13203 {
13204 error ("TYPE_CANONICAL is not compatible");
13205 debug_tree (ct);
13206 error_found = true;
13207 }
13208
13209 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13210 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13211 {
13212 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13213 debug_tree (ct);
13214 error_found = true;
13215 }
13216 /* FIXME: this is violated by the C++ FE as discussed in PR70029, when
13217 FUNCTION_*_QUALIFIED flags are set. */
13218 if (0 && TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13219 {
13220 error ("TYPE_CANONICAL of main variant is not main variant");
13221 debug_tree (ct);
13222 debug_tree (TYPE_MAIN_VARIANT (ct));
13223 error_found = true;
13224 }
13225
13226
13227 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13228 if (RECORD_OR_UNION_TYPE_P (t))
13229 {
13230 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13231 and danagle the pointer from time to time. */
13232 if (TYPE_VFIELD (t)
13233 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13234 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13235 {
13236 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13237 debug_tree (TYPE_VFIELD (t));
13238 error_found = true;
13239 }
13240 }
13241 else if (TREE_CODE (t) == POINTER_TYPE)
13242 {
13243 if (TYPE_NEXT_PTR_TO (t)
13244 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13245 {
13246 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13247 debug_tree (TYPE_NEXT_PTR_TO (t));
13248 error_found = true;
13249 }
13250 }
13251 else if (TREE_CODE (t) == REFERENCE_TYPE)
13252 {
13253 if (TYPE_NEXT_REF_TO (t)
13254 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13255 {
13256 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13257 debug_tree (TYPE_NEXT_REF_TO (t));
13258 error_found = true;
13259 }
13260 }
13261 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13262 || TREE_CODE (t) == FIXED_POINT_TYPE)
13263 {
13264 /* FIXME: The following check should pass:
13265 useless_type_conversion_p (const_cast <tree> (t),
13266 TREE_TYPE (TYPE_MIN_VALUE (t))
13267 but does not for C sizetypes in LTO. */
13268 }
13269
13270 /* Check various uses of TYPE_MAXVAL. */
13271 if (RECORD_OR_UNION_TYPE_P (t))
13272 {
13273 }
13274 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13275 {
13276 if (TYPE_METHOD_BASETYPE (t)
13277 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13278 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13279 {
13280 error ("TYPE_METHOD_BASETYPE is not record nor union");
13281 debug_tree (TYPE_METHOD_BASETYPE (t));
13282 error_found = true;
13283 }
13284 }
13285 else if (TREE_CODE (t) == OFFSET_TYPE)
13286 {
13287 if (TYPE_OFFSET_BASETYPE (t)
13288 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13289 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13290 {
13291 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13292 debug_tree (TYPE_OFFSET_BASETYPE (t));
13293 error_found = true;
13294 }
13295 }
13296 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13297 || TREE_CODE (t) == FIXED_POINT_TYPE)
13298 {
13299 /* FIXME: The following check should pass:
13300 useless_type_conversion_p (const_cast <tree> (t),
13301 TREE_TYPE (TYPE_MAX_VALUE (t))
13302 but does not for C sizetypes in LTO. */
13303 }
13304 else if (TREE_CODE (t) == ARRAY_TYPE)
13305 {
13306 if (TYPE_ARRAY_MAX_SIZE (t)
13307 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13308 {
13309 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13310 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13311 error_found = true;
13312 }
13313 }
13314 else if (TYPE_MAX_VALUE_RAW (t))
13315 {
13316 error ("TYPE_MAX_VALUE_RAW non-NULL");
13317 debug_tree (TYPE_MAX_VALUE_RAW (t));
13318 error_found = true;
13319 }
13320
13321 /* Check various uses of TYPE_BINFO. */
13322 if (RECORD_OR_UNION_TYPE_P (t))
13323 {
13324 if (!TYPE_BINFO (t))
13325 ;
13326 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13327 {
13328 error ("TYPE_BINFO is not TREE_BINFO");
13329 debug_tree (TYPE_BINFO (t));
13330 error_found = true;
13331 }
13332 }
13333 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13334 {
13335 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13336 debug_tree (TYPE_LANG_SLOT_1 (t));
13337 error_found = true;
13338 }
13339
13340 /* Check various uses of TYPE_VALUES_RAW. */
13341 if (TREE_CODE (t) == ENUMERAL_TYPE)
13342 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13343 {
13344 tree value = TREE_VALUE (l);
13345 tree name = TREE_PURPOSE (l);
13346
13347 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13348 CONST_DECL of ENUMERAL TYPE. */
13349 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13350 {
13351 error ("Enum value is not CONST_DECL or INTEGER_CST");
13352 debug_tree (value);
13353 debug_tree (name);
13354 error_found = true;
13355 }
13356 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13357 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13358 {
13359 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13360 debug_tree (value);
13361 debug_tree (name);
13362 error_found = true;
13363 }
13364 if (TREE_CODE (name) != IDENTIFIER_NODE)
13365 {
13366 error ("Enum value name is not IDENTIFIER_NODE");
13367 debug_tree (value);
13368 debug_tree (name);
13369 error_found = true;
13370 }
13371 }
13372 else if (TREE_CODE (t) == ARRAY_TYPE)
13373 {
13374 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13375 {
13376 error ("Array TYPE_DOMAIN is not integer type");
13377 debug_tree (TYPE_DOMAIN (t));
13378 error_found = true;
13379 }
13380 }
13381 else if (RECORD_OR_UNION_TYPE_P (t))
13382 {
13383 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13384 {
13385 error ("TYPE_FIELDS defined in incomplete type");
13386 error_found = true;
13387 }
13388 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13389 {
13390 /* TODO: verify properties of decls. */
13391 if (TREE_CODE (fld) == FIELD_DECL)
13392 ;
13393 else if (TREE_CODE (fld) == TYPE_DECL)
13394 ;
13395 else if (TREE_CODE (fld) == CONST_DECL)
13396 ;
13397 else if (VAR_P (fld))
13398 ;
13399 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13400 ;
13401 else if (TREE_CODE (fld) == USING_DECL)
13402 ;
13403 else if (TREE_CODE (fld) == FUNCTION_DECL)
13404 ;
13405 else
13406 {
13407 error ("Wrong tree in TYPE_FIELDS list");
13408 debug_tree (fld);
13409 error_found = true;
13410 }
13411 }
13412 }
13413 else if (TREE_CODE (t) == INTEGER_TYPE
13414 || TREE_CODE (t) == BOOLEAN_TYPE
13415 || TREE_CODE (t) == OFFSET_TYPE
13416 || TREE_CODE (t) == REFERENCE_TYPE
13417 || TREE_CODE (t) == NULLPTR_TYPE
13418 || TREE_CODE (t) == POINTER_TYPE)
13419 {
13420 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13421 {
13422 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13423 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13424 error_found = true;
13425 }
13426 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13427 {
13428 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13429 debug_tree (TYPE_CACHED_VALUES (t));
13430 error_found = true;
13431 }
13432 /* Verify just enough of cache to ensure that no one copied it to new type.
13433 All copying should go by copy_node that should clear it. */
13434 else if (TYPE_CACHED_VALUES_P (t))
13435 {
13436 int i;
13437 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13438 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13439 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13440 {
13441 error ("wrong TYPE_CACHED_VALUES entry");
13442 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13443 error_found = true;
13444 break;
13445 }
13446 }
13447 }
13448 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13449 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13450 {
13451 /* C++ FE uses TREE_PURPOSE to store initial values. */
13452 if (TREE_PURPOSE (l) && in_lto_p)
13453 {
13454 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13455 debug_tree (l);
13456 error_found = true;
13457 }
13458 if (!TYPE_P (TREE_VALUE (l)))
13459 {
13460 error ("Wrong entry in TYPE_ARG_TYPES list");
13461 debug_tree (l);
13462 error_found = true;
13463 }
13464 }
13465 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13466 {
13467 error ("TYPE_VALUES_RAW field is non-NULL");
13468 debug_tree (TYPE_VALUES_RAW (t));
13469 error_found = true;
13470 }
13471 if (TREE_CODE (t) != INTEGER_TYPE
13472 && TREE_CODE (t) != BOOLEAN_TYPE
13473 && TREE_CODE (t) != OFFSET_TYPE
13474 && TREE_CODE (t) != REFERENCE_TYPE
13475 && TREE_CODE (t) != NULLPTR_TYPE
13476 && TREE_CODE (t) != POINTER_TYPE
13477 && TYPE_CACHED_VALUES_P (t))
13478 {
13479 error ("TYPE_CACHED_VALUES_P is set while it should not");
13480 error_found = true;
13481 }
13482 if (TYPE_STRING_FLAG (t)
13483 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13484 {
13485 error ("TYPE_STRING_FLAG is set on wrong type code");
13486 error_found = true;
13487 }
13488
13489 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13490 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13491 of a type. */
13492 if (TREE_CODE (t) == METHOD_TYPE
13493 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13494 {
13495 error ("TYPE_METHOD_BASETYPE is not main variant");
13496 error_found = true;
13497 }
13498
13499 if (error_found)
13500 {
13501 debug_tree (const_cast <tree> (t));
13502 internal_error ("verify_type failed");
13503 }
13504 }
13505
13506
13507 /* Return 1 if ARG interpreted as signed in its precision is known to be
13508 always positive or 2 if ARG is known to be always negative, or 3 if
13509 ARG may be positive or negative. */
13510
13511 int
13512 get_range_pos_neg (tree arg)
13513 {
13514 if (arg == error_mark_node)
13515 return 3;
13516
13517 int prec = TYPE_PRECISION (TREE_TYPE (arg));
13518 int cnt = 0;
13519 if (TREE_CODE (arg) == INTEGER_CST)
13520 {
13521 wide_int w = wi::sext (arg, prec);
13522 if (wi::neg_p (w))
13523 return 2;
13524 else
13525 return 1;
13526 }
13527 while (CONVERT_EXPR_P (arg)
13528 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
13529 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
13530 {
13531 arg = TREE_OPERAND (arg, 0);
13532 /* Narrower value zero extended into wider type
13533 will always result in positive values. */
13534 if (TYPE_UNSIGNED (TREE_TYPE (arg))
13535 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
13536 return 1;
13537 prec = TYPE_PRECISION (TREE_TYPE (arg));
13538 if (++cnt > 30)
13539 return 3;
13540 }
13541
13542 if (TREE_CODE (arg) != SSA_NAME)
13543 return 3;
13544 wide_int arg_min, arg_max;
13545 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
13546 {
13547 gimple *g = SSA_NAME_DEF_STMT (arg);
13548 if (is_gimple_assign (g)
13549 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
13550 {
13551 tree t = gimple_assign_rhs1 (g);
13552 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
13553 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
13554 {
13555 if (TYPE_UNSIGNED (TREE_TYPE (t))
13556 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
13557 return 1;
13558 prec = TYPE_PRECISION (TREE_TYPE (t));
13559 arg = t;
13560 if (++cnt > 30)
13561 return 3;
13562 continue;
13563 }
13564 }
13565 return 3;
13566 }
13567 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
13568 {
13569 /* For unsigned values, the "positive" range comes
13570 below the "negative" range. */
13571 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13572 return 1;
13573 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13574 return 2;
13575 }
13576 else
13577 {
13578 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13579 return 1;
13580 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13581 return 2;
13582 }
13583 return 3;
13584 }
13585
13586
13587
13588
13589 /* Return true if ARG is marked with the nonnull attribute in the
13590 current function signature. */
13591
13592 bool
13593 nonnull_arg_p (const_tree arg)
13594 {
13595 tree t, attrs, fntype;
13596 unsigned HOST_WIDE_INT arg_num;
13597
13598 gcc_assert (TREE_CODE (arg) == PARM_DECL
13599 && (POINTER_TYPE_P (TREE_TYPE (arg))
13600 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13601
13602 /* The static chain decl is always non null. */
13603 if (arg == cfun->static_chain_decl)
13604 return true;
13605
13606 /* THIS argument of method is always non-NULL. */
13607 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13608 && arg == DECL_ARGUMENTS (cfun->decl)
13609 && flag_delete_null_pointer_checks)
13610 return true;
13611
13612 /* Values passed by reference are always non-NULL. */
13613 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13614 && flag_delete_null_pointer_checks)
13615 return true;
13616
13617 fntype = TREE_TYPE (cfun->decl);
13618 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13619 {
13620 attrs = lookup_attribute ("nonnull", attrs);
13621
13622 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13623 if (attrs == NULL_TREE)
13624 return false;
13625
13626 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13627 if (TREE_VALUE (attrs) == NULL_TREE)
13628 return true;
13629
13630 /* Get the position number for ARG in the function signature. */
13631 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13632 t;
13633 t = DECL_CHAIN (t), arg_num++)
13634 {
13635 if (t == arg)
13636 break;
13637 }
13638
13639 gcc_assert (t == arg);
13640
13641 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13642 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13643 {
13644 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13645 return true;
13646 }
13647 }
13648
13649 return false;
13650 }
13651
13652 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13653 information. */
13654
13655 location_t
13656 set_block (location_t loc, tree block)
13657 {
13658 location_t pure_loc = get_pure_location (loc);
13659 source_range src_range = get_range_from_loc (line_table, loc);
13660 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
13661 }
13662
13663 location_t
13664 set_source_range (tree expr, location_t start, location_t finish)
13665 {
13666 source_range src_range;
13667 src_range.m_start = start;
13668 src_range.m_finish = finish;
13669 return set_source_range (expr, src_range);
13670 }
13671
13672 location_t
13673 set_source_range (tree expr, source_range src_range)
13674 {
13675 if (!EXPR_P (expr))
13676 return UNKNOWN_LOCATION;
13677
13678 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
13679 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
13680 pure_loc,
13681 src_range,
13682 NULL);
13683 SET_EXPR_LOCATION (expr, adhoc);
13684 return adhoc;
13685 }
13686
13687 /* Return the name of combined function FN, for debugging purposes. */
13688
13689 const char *
13690 combined_fn_name (combined_fn fn)
13691 {
13692 if (builtin_fn_p (fn))
13693 {
13694 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
13695 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
13696 }
13697 else
13698 return internal_fn_name (as_internal_fn (fn));
13699 }
13700
13701 /* Return a bitmap with a bit set corresponding to each argument in
13702 a function call type FNTYPE declared with attribute nonnull,
13703 or null if none of the function's argument are nonnull. The caller
13704 must free the bitmap. */
13705
13706 bitmap
13707 get_nonnull_args (const_tree fntype)
13708 {
13709 if (fntype == NULL_TREE)
13710 return NULL;
13711
13712 tree attrs = TYPE_ATTRIBUTES (fntype);
13713 if (!attrs)
13714 return NULL;
13715
13716 bitmap argmap = NULL;
13717
13718 /* A function declaration can specify multiple attribute nonnull,
13719 each with zero or more arguments. The loop below creates a bitmap
13720 representing a union of all the arguments. An empty (but non-null)
13721 bitmap means that all arguments have been declaraed nonnull. */
13722 for ( ; attrs; attrs = TREE_CHAIN (attrs))
13723 {
13724 attrs = lookup_attribute ("nonnull", attrs);
13725 if (!attrs)
13726 break;
13727
13728 if (!argmap)
13729 argmap = BITMAP_ALLOC (NULL);
13730
13731 if (!TREE_VALUE (attrs))
13732 {
13733 /* Clear the bitmap in case a previous attribute nonnull
13734 set it and this one overrides it for all arguments. */
13735 bitmap_clear (argmap);
13736 return argmap;
13737 }
13738
13739 /* Iterate over the indices of the format arguments declared nonnull
13740 and set a bit for each. */
13741 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
13742 {
13743 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
13744 bitmap_set_bit (argmap, val);
13745 }
13746 }
13747
13748 return argmap;
13749 }
13750
13751 /* List of pointer types used to declare builtins before we have seen their
13752 real declaration.
13753
13754 Keep the size up to date in tree.h ! */
13755 const builtin_structptr_type builtin_structptr_types[6] =
13756 {
13757 { fileptr_type_node, ptr_type_node, "FILE" },
13758 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
13759 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
13760 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
13761 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
13762 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
13763 };
13764
13765 #if CHECKING_P
13766
13767 namespace selftest {
13768
13769 /* Selftests for tree. */
13770
13771 /* Verify that integer constants are sane. */
13772
13773 static void
13774 test_integer_constants ()
13775 {
13776 ASSERT_TRUE (integer_type_node != NULL);
13777 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
13778
13779 tree type = integer_type_node;
13780
13781 tree zero = build_zero_cst (type);
13782 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
13783 ASSERT_EQ (type, TREE_TYPE (zero));
13784
13785 tree one = build_int_cst (type, 1);
13786 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
13787 ASSERT_EQ (type, TREE_TYPE (zero));
13788 }
13789
13790 /* Verify identifiers. */
13791
13792 static void
13793 test_identifiers ()
13794 {
13795 tree identifier = get_identifier ("foo");
13796 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
13797 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
13798 }
13799
13800 /* Verify LABEL_DECL. */
13801
13802 static void
13803 test_labels ()
13804 {
13805 tree identifier = get_identifier ("err");
13806 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
13807 identifier, void_type_node);
13808 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
13809 ASSERT_FALSE (FORCED_LABEL (label_decl));
13810 }
13811
13812 /* Run all of the selftests within this file. */
13813
13814 void
13815 tree_c_tests ()
13816 {
13817 test_integer_constants ();
13818 test_identifiers ();
13819 test_labels ();
13820 }
13821
13822 } // namespace selftest
13823
13824 #endif /* CHECKING_P */
13825
13826 #include "gt-tree.h"