gimple-fold.c (fold_gimple_assign): Do not dispatch to fold () on single RHSs.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "rtl.h"
36 #include "tree.h"
37 #include "gimple.h"
38 #include "tree-pass.h"
39 #include "tm_p.h"
40 #include "ssa.h"
41 #include "expmed.h"
42 #include "insn-config.h"
43 #include "emit-rtl.h"
44 #include "cgraph.h"
45 #include "diagnostic.h"
46 #include "flags.h"
47 #include "alias.h"
48 #include "fold-const.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "attribs.h"
52 #include "varasm.h"
53 #include "toplev.h" /* get_random_seed */
54 #include "output.h"
55 #include "common/common-target.h"
56 #include "langhooks.h"
57 #include "tree-inline.h"
58 #include "tree-iterator.h"
59 #include "internal-fn.h"
60 #include "gimple-iterator.h"
61 #include "gimplify.h"
62 #include "dojump.h"
63 #include "explow.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "tree-dfa.h"
67 #include "params.h"
68 #include "langhooks-def.h"
69 #include "tree-diagnostic.h"
70 #include "tree-pretty-print.h"
71 #include "except.h"
72 #include "debug.h"
73 #include "intl.h"
74 #include "builtins.h"
75 #include "print-tree.h"
76 #include "ipa-utils.h"
77
78 /* Tree code classes. */
79
80 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
81 #define END_OF_BASE_TREE_CODES tcc_exceptional,
82
83 const enum tree_code_class tree_code_type[] = {
84 #include "all-tree.def"
85 };
86
87 #undef DEFTREECODE
88 #undef END_OF_BASE_TREE_CODES
89
90 /* Table indexed by tree code giving number of expression
91 operands beyond the fixed part of the node structure.
92 Not used for types or decls. */
93
94 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
95 #define END_OF_BASE_TREE_CODES 0,
96
97 const unsigned char tree_code_length[] = {
98 #include "all-tree.def"
99 };
100
101 #undef DEFTREECODE
102 #undef END_OF_BASE_TREE_CODES
103
104 /* Names of tree components.
105 Used for printing out the tree and error messages. */
106 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
107 #define END_OF_BASE_TREE_CODES "@dummy",
108
109 static const char *const tree_code_name[] = {
110 #include "all-tree.def"
111 };
112
113 #undef DEFTREECODE
114 #undef END_OF_BASE_TREE_CODES
115
116 /* Each tree code class has an associated string representation.
117 These must correspond to the tree_code_class entries. */
118
119 const char *const tree_code_class_strings[] =
120 {
121 "exceptional",
122 "constant",
123 "type",
124 "declaration",
125 "reference",
126 "comparison",
127 "unary",
128 "binary",
129 "statement",
130 "vl_exp",
131 "expression"
132 };
133
134 /* obstack.[ch] explicitly declined to prototype this. */
135 extern int _obstack_allocated_p (struct obstack *h, void *obj);
136
137 /* Statistics-gathering stuff. */
138
139 static int tree_code_counts[MAX_TREE_CODES];
140 int tree_node_counts[(int) all_kinds];
141 int tree_node_sizes[(int) all_kinds];
142
143 /* Keep in sync with tree.h:enum tree_node_kind. */
144 static const char * const tree_node_kind_names[] = {
145 "decls",
146 "types",
147 "blocks",
148 "stmts",
149 "refs",
150 "exprs",
151 "constants",
152 "identifiers",
153 "vecs",
154 "binfos",
155 "ssa names",
156 "constructors",
157 "random kinds",
158 "lang_decl kinds",
159 "lang_type kinds",
160 "omp clauses",
161 };
162
163 /* Unique id for next decl created. */
164 static GTY(()) int next_decl_uid;
165 /* Unique id for next type created. */
166 static GTY(()) int next_type_uid = 1;
167 /* Unique id for next debug decl created. Use negative numbers,
168 to catch erroneous uses. */
169 static GTY(()) int next_debug_decl_uid;
170
171 /* Since we cannot rehash a type after it is in the table, we have to
172 keep the hash code. */
173
174 struct GTY((for_user)) type_hash {
175 unsigned long hash;
176 tree type;
177 };
178
179 /* Initial size of the hash table (rounded to next prime). */
180 #define TYPE_HASH_INITIAL_SIZE 1000
181
182 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
183 {
184 static hashval_t hash (type_hash *t) { return t->hash; }
185 static bool equal (type_hash *a, type_hash *b);
186
187 static int
188 keep_cache_entry (type_hash *&t)
189 {
190 return ggc_marked_p (t->type);
191 }
192 };
193
194 /* Now here is the hash table. When recording a type, it is added to
195 the slot whose index is the hash code. Note that the hash table is
196 used for several kinds of types (function types, array types and
197 array index range types, for now). While all these live in the
198 same table, they are completely independent, and the hash code is
199 computed differently for each of these. */
200
201 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
202
203 /* Hash table and temporary node for larger integer const values. */
204 static GTY (()) tree int_cst_node;
205
206 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
207 {
208 static hashval_t hash (tree t);
209 static bool equal (tree x, tree y);
210 };
211
212 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
213
214 /* Hash table for optimization flags and target option flags. Use the same
215 hash table for both sets of options. Nodes for building the current
216 optimization and target option nodes. The assumption is most of the time
217 the options created will already be in the hash table, so we avoid
218 allocating and freeing up a node repeatably. */
219 static GTY (()) tree cl_optimization_node;
220 static GTY (()) tree cl_target_option_node;
221
222 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
223 {
224 static hashval_t hash (tree t);
225 static bool equal (tree x, tree y);
226 };
227
228 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
229
230 /* General tree->tree mapping structure for use in hash tables. */
231
232
233 static GTY ((cache))
234 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
235
236 static GTY ((cache))
237 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
238
239 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
240 {
241 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
242
243 static bool
244 equal (tree_vec_map *a, tree_vec_map *b)
245 {
246 return a->base.from == b->base.from;
247 }
248
249 static int
250 keep_cache_entry (tree_vec_map *&m)
251 {
252 return ggc_marked_p (m->base.from);
253 }
254 };
255
256 static GTY ((cache))
257 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
258
259 static void set_type_quals (tree, int);
260 static void print_type_hash_statistics (void);
261 static void print_debug_expr_statistics (void);
262 static void print_value_expr_statistics (void);
263 static void type_hash_list (const_tree, inchash::hash &);
264 static void attribute_hash_list (const_tree, inchash::hash &);
265
266 tree global_trees[TI_MAX];
267 tree integer_types[itk_none];
268
269 bool int_n_enabled_p[NUM_INT_N_ENTS];
270 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
271
272 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
273
274 /* Number of operands for each OpenMP clause. */
275 unsigned const char omp_clause_num_ops[] =
276 {
277 0, /* OMP_CLAUSE_ERROR */
278 1, /* OMP_CLAUSE_PRIVATE */
279 1, /* OMP_CLAUSE_SHARED */
280 1, /* OMP_CLAUSE_FIRSTPRIVATE */
281 2, /* OMP_CLAUSE_LASTPRIVATE */
282 5, /* OMP_CLAUSE_REDUCTION */
283 1, /* OMP_CLAUSE_COPYIN */
284 1, /* OMP_CLAUSE_COPYPRIVATE */
285 3, /* OMP_CLAUSE_LINEAR */
286 2, /* OMP_CLAUSE_ALIGNED */
287 1, /* OMP_CLAUSE_DEPEND */
288 1, /* OMP_CLAUSE_UNIFORM */
289 1, /* OMP_CLAUSE_TO_DECLARE */
290 1, /* OMP_CLAUSE_LINK */
291 2, /* OMP_CLAUSE_FROM */
292 2, /* OMP_CLAUSE_TO */
293 2, /* OMP_CLAUSE_MAP */
294 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
295 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
296 2, /* OMP_CLAUSE__CACHE_ */
297 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
298 1, /* OMP_CLAUSE_USE_DEVICE */
299 2, /* OMP_CLAUSE_GANG */
300 1, /* OMP_CLAUSE_ASYNC */
301 1, /* OMP_CLAUSE_WAIT */
302 0, /* OMP_CLAUSE_AUTO */
303 0, /* OMP_CLAUSE_SEQ */
304 1, /* OMP_CLAUSE__LOOPTEMP_ */
305 1, /* OMP_CLAUSE_IF */
306 1, /* OMP_CLAUSE_NUM_THREADS */
307 1, /* OMP_CLAUSE_SCHEDULE */
308 0, /* OMP_CLAUSE_NOWAIT */
309 1, /* OMP_CLAUSE_ORDERED */
310 0, /* OMP_CLAUSE_DEFAULT */
311 3, /* OMP_CLAUSE_COLLAPSE */
312 0, /* OMP_CLAUSE_UNTIED */
313 1, /* OMP_CLAUSE_FINAL */
314 0, /* OMP_CLAUSE_MERGEABLE */
315 1, /* OMP_CLAUSE_DEVICE */
316 1, /* OMP_CLAUSE_DIST_SCHEDULE */
317 0, /* OMP_CLAUSE_INBRANCH */
318 0, /* OMP_CLAUSE_NOTINBRANCH */
319 1, /* OMP_CLAUSE_NUM_TEAMS */
320 1, /* OMP_CLAUSE_THREAD_LIMIT */
321 0, /* OMP_CLAUSE_PROC_BIND */
322 1, /* OMP_CLAUSE_SAFELEN */
323 1, /* OMP_CLAUSE_SIMDLEN */
324 0, /* OMP_CLAUSE_FOR */
325 0, /* OMP_CLAUSE_PARALLEL */
326 0, /* OMP_CLAUSE_SECTIONS */
327 0, /* OMP_CLAUSE_TASKGROUP */
328 1, /* OMP_CLAUSE_PRIORITY */
329 1, /* OMP_CLAUSE_GRAINSIZE */
330 1, /* OMP_CLAUSE_NUM_TASKS */
331 0, /* OMP_CLAUSE_NOGROUP */
332 0, /* OMP_CLAUSE_THREADS */
333 0, /* OMP_CLAUSE_SIMD */
334 1, /* OMP_CLAUSE_HINT */
335 0, /* OMP_CLAUSE_DEFALTMAP */
336 1, /* OMP_CLAUSE__SIMDUID_ */
337 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
338 0, /* OMP_CLAUSE_INDEPENDENT */
339 1, /* OMP_CLAUSE_WORKER */
340 1, /* OMP_CLAUSE_VECTOR */
341 1, /* OMP_CLAUSE_NUM_GANGS */
342 1, /* OMP_CLAUSE_NUM_WORKERS */
343 1, /* OMP_CLAUSE_VECTOR_LENGTH */
344 };
345
346 const char * const omp_clause_code_name[] =
347 {
348 "error_clause",
349 "private",
350 "shared",
351 "firstprivate",
352 "lastprivate",
353 "reduction",
354 "copyin",
355 "copyprivate",
356 "linear",
357 "aligned",
358 "depend",
359 "uniform",
360 "to",
361 "link",
362 "from",
363 "to",
364 "map",
365 "use_device_ptr",
366 "is_device_ptr",
367 "_cache_",
368 "device_resident",
369 "use_device",
370 "gang",
371 "async",
372 "wait",
373 "auto",
374 "seq",
375 "_looptemp_",
376 "if",
377 "num_threads",
378 "schedule",
379 "nowait",
380 "ordered",
381 "default",
382 "collapse",
383 "untied",
384 "final",
385 "mergeable",
386 "device",
387 "dist_schedule",
388 "inbranch",
389 "notinbranch",
390 "num_teams",
391 "thread_limit",
392 "proc_bind",
393 "safelen",
394 "simdlen",
395 "for",
396 "parallel",
397 "sections",
398 "taskgroup",
399 "priority",
400 "grainsize",
401 "num_tasks",
402 "nogroup",
403 "threads",
404 "simd",
405 "hint",
406 "defaultmap",
407 "_simduid_",
408 "_Cilk_for_count_",
409 "independent",
410 "worker",
411 "vector",
412 "num_gangs",
413 "num_workers",
414 "vector_length"
415 };
416
417
418 /* Return the tree node structure used by tree code CODE. */
419
420 static inline enum tree_node_structure_enum
421 tree_node_structure_for_code (enum tree_code code)
422 {
423 switch (TREE_CODE_CLASS (code))
424 {
425 case tcc_declaration:
426 {
427 switch (code)
428 {
429 case FIELD_DECL:
430 return TS_FIELD_DECL;
431 case PARM_DECL:
432 return TS_PARM_DECL;
433 case VAR_DECL:
434 return TS_VAR_DECL;
435 case LABEL_DECL:
436 return TS_LABEL_DECL;
437 case RESULT_DECL:
438 return TS_RESULT_DECL;
439 case DEBUG_EXPR_DECL:
440 return TS_DECL_WRTL;
441 case CONST_DECL:
442 return TS_CONST_DECL;
443 case TYPE_DECL:
444 return TS_TYPE_DECL;
445 case FUNCTION_DECL:
446 return TS_FUNCTION_DECL;
447 case TRANSLATION_UNIT_DECL:
448 return TS_TRANSLATION_UNIT_DECL;
449 default:
450 return TS_DECL_NON_COMMON;
451 }
452 }
453 case tcc_type:
454 return TS_TYPE_NON_COMMON;
455 case tcc_reference:
456 case tcc_comparison:
457 case tcc_unary:
458 case tcc_binary:
459 case tcc_expression:
460 case tcc_statement:
461 case tcc_vl_exp:
462 return TS_EXP;
463 default: /* tcc_constant and tcc_exceptional */
464 break;
465 }
466 switch (code)
467 {
468 /* tcc_constant cases. */
469 case VOID_CST: return TS_TYPED;
470 case INTEGER_CST: return TS_INT_CST;
471 case REAL_CST: return TS_REAL_CST;
472 case FIXED_CST: return TS_FIXED_CST;
473 case COMPLEX_CST: return TS_COMPLEX;
474 case VECTOR_CST: return TS_VECTOR;
475 case STRING_CST: return TS_STRING;
476 /* tcc_exceptional cases. */
477 case ERROR_MARK: return TS_COMMON;
478 case IDENTIFIER_NODE: return TS_IDENTIFIER;
479 case TREE_LIST: return TS_LIST;
480 case TREE_VEC: return TS_VEC;
481 case SSA_NAME: return TS_SSA_NAME;
482 case PLACEHOLDER_EXPR: return TS_COMMON;
483 case STATEMENT_LIST: return TS_STATEMENT_LIST;
484 case BLOCK: return TS_BLOCK;
485 case CONSTRUCTOR: return TS_CONSTRUCTOR;
486 case TREE_BINFO: return TS_BINFO;
487 case OMP_CLAUSE: return TS_OMP_CLAUSE;
488 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
489 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
490
491 default:
492 gcc_unreachable ();
493 }
494 }
495
496
497 /* Initialize tree_contains_struct to describe the hierarchy of tree
498 nodes. */
499
500 static void
501 initialize_tree_contains_struct (void)
502 {
503 unsigned i;
504
505 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
506 {
507 enum tree_code code;
508 enum tree_node_structure_enum ts_code;
509
510 code = (enum tree_code) i;
511 ts_code = tree_node_structure_for_code (code);
512
513 /* Mark the TS structure itself. */
514 tree_contains_struct[code][ts_code] = 1;
515
516 /* Mark all the structures that TS is derived from. */
517 switch (ts_code)
518 {
519 case TS_TYPED:
520 case TS_BLOCK:
521 MARK_TS_BASE (code);
522 break;
523
524 case TS_COMMON:
525 case TS_INT_CST:
526 case TS_REAL_CST:
527 case TS_FIXED_CST:
528 case TS_VECTOR:
529 case TS_STRING:
530 case TS_COMPLEX:
531 case TS_SSA_NAME:
532 case TS_CONSTRUCTOR:
533 case TS_EXP:
534 case TS_STATEMENT_LIST:
535 MARK_TS_TYPED (code);
536 break;
537
538 case TS_IDENTIFIER:
539 case TS_DECL_MINIMAL:
540 case TS_TYPE_COMMON:
541 case TS_LIST:
542 case TS_VEC:
543 case TS_BINFO:
544 case TS_OMP_CLAUSE:
545 case TS_OPTIMIZATION:
546 case TS_TARGET_OPTION:
547 MARK_TS_COMMON (code);
548 break;
549
550 case TS_TYPE_WITH_LANG_SPECIFIC:
551 MARK_TS_TYPE_COMMON (code);
552 break;
553
554 case TS_TYPE_NON_COMMON:
555 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
556 break;
557
558 case TS_DECL_COMMON:
559 MARK_TS_DECL_MINIMAL (code);
560 break;
561
562 case TS_DECL_WRTL:
563 case TS_CONST_DECL:
564 MARK_TS_DECL_COMMON (code);
565 break;
566
567 case TS_DECL_NON_COMMON:
568 MARK_TS_DECL_WITH_VIS (code);
569 break;
570
571 case TS_DECL_WITH_VIS:
572 case TS_PARM_DECL:
573 case TS_LABEL_DECL:
574 case TS_RESULT_DECL:
575 MARK_TS_DECL_WRTL (code);
576 break;
577
578 case TS_FIELD_DECL:
579 MARK_TS_DECL_COMMON (code);
580 break;
581
582 case TS_VAR_DECL:
583 MARK_TS_DECL_WITH_VIS (code);
584 break;
585
586 case TS_TYPE_DECL:
587 case TS_FUNCTION_DECL:
588 MARK_TS_DECL_NON_COMMON (code);
589 break;
590
591 case TS_TRANSLATION_UNIT_DECL:
592 MARK_TS_DECL_COMMON (code);
593 break;
594
595 default:
596 gcc_unreachable ();
597 }
598 }
599
600 /* Basic consistency checks for attributes used in fold. */
601 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
602 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
603 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
604 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
605 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
606 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
607 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
608 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
609 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
610 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
611 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
612 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
613 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
614 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
615 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
616 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
617 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
618 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
619 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
620 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
621 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
622 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
623 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
624 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
625 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
626 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
627 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
628 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
629 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
630 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
631 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
632 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
633 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
634 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
635 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
636 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
637 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
638 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
640 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
641 }
642
643
644 /* Init tree.c. */
645
646 void
647 init_ttree (void)
648 {
649 /* Initialize the hash table of types. */
650 type_hash_table
651 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
652
653 debug_expr_for_decl
654 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
655
656 value_expr_for_decl
657 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
658
659 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
660
661 int_cst_node = make_int_cst (1, 1);
662
663 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
664
665 cl_optimization_node = make_node (OPTIMIZATION_NODE);
666 cl_target_option_node = make_node (TARGET_OPTION_NODE);
667
668 /* Initialize the tree_contains_struct array. */
669 initialize_tree_contains_struct ();
670 lang_hooks.init_ts ();
671 }
672
673 \f
674 /* The name of the object as the assembler will see it (but before any
675 translations made by ASM_OUTPUT_LABELREF). Often this is the same
676 as DECL_NAME. It is an IDENTIFIER_NODE. */
677 tree
678 decl_assembler_name (tree decl)
679 {
680 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
681 lang_hooks.set_decl_assembler_name (decl);
682 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
683 }
684
685 /* When the target supports COMDAT groups, this indicates which group the
686 DECL is associated with. This can be either an IDENTIFIER_NODE or a
687 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
688 tree
689 decl_comdat_group (const_tree node)
690 {
691 struct symtab_node *snode = symtab_node::get (node);
692 if (!snode)
693 return NULL;
694 return snode->get_comdat_group ();
695 }
696
697 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
698 tree
699 decl_comdat_group_id (const_tree node)
700 {
701 struct symtab_node *snode = symtab_node::get (node);
702 if (!snode)
703 return NULL;
704 return snode->get_comdat_group_id ();
705 }
706
707 /* When the target supports named section, return its name as IDENTIFIER_NODE
708 or NULL if it is in no section. */
709 const char *
710 decl_section_name (const_tree node)
711 {
712 struct symtab_node *snode = symtab_node::get (node);
713 if (!snode)
714 return NULL;
715 return snode->get_section ();
716 }
717
718 /* Set section name of NODE to VALUE (that is expected to be
719 identifier node) */
720 void
721 set_decl_section_name (tree node, const char *value)
722 {
723 struct symtab_node *snode;
724
725 if (value == NULL)
726 {
727 snode = symtab_node::get (node);
728 if (!snode)
729 return;
730 }
731 else if (TREE_CODE (node) == VAR_DECL)
732 snode = varpool_node::get_create (node);
733 else
734 snode = cgraph_node::get_create (node);
735 snode->set_section (value);
736 }
737
738 /* Return TLS model of a variable NODE. */
739 enum tls_model
740 decl_tls_model (const_tree node)
741 {
742 struct varpool_node *snode = varpool_node::get (node);
743 if (!snode)
744 return TLS_MODEL_NONE;
745 return snode->tls_model;
746 }
747
748 /* Set TLS model of variable NODE to MODEL. */
749 void
750 set_decl_tls_model (tree node, enum tls_model model)
751 {
752 struct varpool_node *vnode;
753
754 if (model == TLS_MODEL_NONE)
755 {
756 vnode = varpool_node::get (node);
757 if (!vnode)
758 return;
759 }
760 else
761 vnode = varpool_node::get_create (node);
762 vnode->tls_model = model;
763 }
764
765 /* Compute the number of bytes occupied by a tree with code CODE.
766 This function cannot be used for nodes that have variable sizes,
767 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
768 size_t
769 tree_code_size (enum tree_code code)
770 {
771 switch (TREE_CODE_CLASS (code))
772 {
773 case tcc_declaration: /* A decl node */
774 {
775 switch (code)
776 {
777 case FIELD_DECL:
778 return sizeof (struct tree_field_decl);
779 case PARM_DECL:
780 return sizeof (struct tree_parm_decl);
781 case VAR_DECL:
782 return sizeof (struct tree_var_decl);
783 case LABEL_DECL:
784 return sizeof (struct tree_label_decl);
785 case RESULT_DECL:
786 return sizeof (struct tree_result_decl);
787 case CONST_DECL:
788 return sizeof (struct tree_const_decl);
789 case TYPE_DECL:
790 return sizeof (struct tree_type_decl);
791 case FUNCTION_DECL:
792 return sizeof (struct tree_function_decl);
793 case DEBUG_EXPR_DECL:
794 return sizeof (struct tree_decl_with_rtl);
795 case TRANSLATION_UNIT_DECL:
796 return sizeof (struct tree_translation_unit_decl);
797 case NAMESPACE_DECL:
798 case IMPORTED_DECL:
799 case NAMELIST_DECL:
800 return sizeof (struct tree_decl_non_common);
801 default:
802 return lang_hooks.tree_size (code);
803 }
804 }
805
806 case tcc_type: /* a type node */
807 return sizeof (struct tree_type_non_common);
808
809 case tcc_reference: /* a reference */
810 case tcc_expression: /* an expression */
811 case tcc_statement: /* an expression with side effects */
812 case tcc_comparison: /* a comparison expression */
813 case tcc_unary: /* a unary arithmetic expression */
814 case tcc_binary: /* a binary arithmetic expression */
815 return (sizeof (struct tree_exp)
816 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
817
818 case tcc_constant: /* a constant */
819 switch (code)
820 {
821 case VOID_CST: return sizeof (struct tree_typed);
822 case INTEGER_CST: gcc_unreachable ();
823 case REAL_CST: return sizeof (struct tree_real_cst);
824 case FIXED_CST: return sizeof (struct tree_fixed_cst);
825 case COMPLEX_CST: return sizeof (struct tree_complex);
826 case VECTOR_CST: return sizeof (struct tree_vector);
827 case STRING_CST: gcc_unreachable ();
828 default:
829 return lang_hooks.tree_size (code);
830 }
831
832 case tcc_exceptional: /* something random, like an identifier. */
833 switch (code)
834 {
835 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
836 case TREE_LIST: return sizeof (struct tree_list);
837
838 case ERROR_MARK:
839 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
840
841 case TREE_VEC:
842 case OMP_CLAUSE: gcc_unreachable ();
843
844 case SSA_NAME: return sizeof (struct tree_ssa_name);
845
846 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
847 case BLOCK: return sizeof (struct tree_block);
848 case CONSTRUCTOR: return sizeof (struct tree_constructor);
849 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
850 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
851
852 default:
853 return lang_hooks.tree_size (code);
854 }
855
856 default:
857 gcc_unreachable ();
858 }
859 }
860
861 /* Compute the number of bytes occupied by NODE. This routine only
862 looks at TREE_CODE, except for those nodes that have variable sizes. */
863 size_t
864 tree_size (const_tree node)
865 {
866 const enum tree_code code = TREE_CODE (node);
867 switch (code)
868 {
869 case INTEGER_CST:
870 return (sizeof (struct tree_int_cst)
871 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
872
873 case TREE_BINFO:
874 return (offsetof (struct tree_binfo, base_binfos)
875 + vec<tree, va_gc>
876 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
877
878 case TREE_VEC:
879 return (sizeof (struct tree_vec)
880 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
881
882 case VECTOR_CST:
883 return (sizeof (struct tree_vector)
884 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
885
886 case STRING_CST:
887 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
888
889 case OMP_CLAUSE:
890 return (sizeof (struct tree_omp_clause)
891 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
892 * sizeof (tree));
893
894 default:
895 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
896 return (sizeof (struct tree_exp)
897 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
898 else
899 return tree_code_size (code);
900 }
901 }
902
903 /* Record interesting allocation statistics for a tree node with CODE
904 and LENGTH. */
905
906 static void
907 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
908 size_t length ATTRIBUTE_UNUSED)
909 {
910 enum tree_code_class type = TREE_CODE_CLASS (code);
911 tree_node_kind kind;
912
913 if (!GATHER_STATISTICS)
914 return;
915
916 switch (type)
917 {
918 case tcc_declaration: /* A decl node */
919 kind = d_kind;
920 break;
921
922 case tcc_type: /* a type node */
923 kind = t_kind;
924 break;
925
926 case tcc_statement: /* an expression with side effects */
927 kind = s_kind;
928 break;
929
930 case tcc_reference: /* a reference */
931 kind = r_kind;
932 break;
933
934 case tcc_expression: /* an expression */
935 case tcc_comparison: /* a comparison expression */
936 case tcc_unary: /* a unary arithmetic expression */
937 case tcc_binary: /* a binary arithmetic expression */
938 kind = e_kind;
939 break;
940
941 case tcc_constant: /* a constant */
942 kind = c_kind;
943 break;
944
945 case tcc_exceptional: /* something random, like an identifier. */
946 switch (code)
947 {
948 case IDENTIFIER_NODE:
949 kind = id_kind;
950 break;
951
952 case TREE_VEC:
953 kind = vec_kind;
954 break;
955
956 case TREE_BINFO:
957 kind = binfo_kind;
958 break;
959
960 case SSA_NAME:
961 kind = ssa_name_kind;
962 break;
963
964 case BLOCK:
965 kind = b_kind;
966 break;
967
968 case CONSTRUCTOR:
969 kind = constr_kind;
970 break;
971
972 case OMP_CLAUSE:
973 kind = omp_clause_kind;
974 break;
975
976 default:
977 kind = x_kind;
978 break;
979 }
980 break;
981
982 case tcc_vl_exp:
983 kind = e_kind;
984 break;
985
986 default:
987 gcc_unreachable ();
988 }
989
990 tree_code_counts[(int) code]++;
991 tree_node_counts[(int) kind]++;
992 tree_node_sizes[(int) kind] += length;
993 }
994
995 /* Allocate and return a new UID from the DECL_UID namespace. */
996
997 int
998 allocate_decl_uid (void)
999 {
1000 return next_decl_uid++;
1001 }
1002
1003 /* Return a newly allocated node of code CODE. For decl and type
1004 nodes, some other fields are initialized. The rest of the node is
1005 initialized to zero. This function cannot be used for TREE_VEC,
1006 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1007 tree_code_size.
1008
1009 Achoo! I got a code in the node. */
1010
1011 tree
1012 make_node_stat (enum tree_code code MEM_STAT_DECL)
1013 {
1014 tree t;
1015 enum tree_code_class type = TREE_CODE_CLASS (code);
1016 size_t length = tree_code_size (code);
1017
1018 record_node_allocation_statistics (code, length);
1019
1020 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1021 TREE_SET_CODE (t, code);
1022
1023 switch (type)
1024 {
1025 case tcc_statement:
1026 TREE_SIDE_EFFECTS (t) = 1;
1027 break;
1028
1029 case tcc_declaration:
1030 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1031 {
1032 if (code == FUNCTION_DECL)
1033 {
1034 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1035 DECL_MODE (t) = FUNCTION_MODE;
1036 }
1037 else
1038 DECL_ALIGN (t) = 1;
1039 }
1040 DECL_SOURCE_LOCATION (t) = input_location;
1041 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1042 DECL_UID (t) = --next_debug_decl_uid;
1043 else
1044 {
1045 DECL_UID (t) = allocate_decl_uid ();
1046 SET_DECL_PT_UID (t, -1);
1047 }
1048 if (TREE_CODE (t) == LABEL_DECL)
1049 LABEL_DECL_UID (t) = -1;
1050
1051 break;
1052
1053 case tcc_type:
1054 TYPE_UID (t) = next_type_uid++;
1055 TYPE_ALIGN (t) = BITS_PER_UNIT;
1056 TYPE_USER_ALIGN (t) = 0;
1057 TYPE_MAIN_VARIANT (t) = t;
1058 TYPE_CANONICAL (t) = t;
1059
1060 /* Default to no attributes for type, but let target change that. */
1061 TYPE_ATTRIBUTES (t) = NULL_TREE;
1062 targetm.set_default_type_attributes (t);
1063
1064 /* We have not yet computed the alias set for this type. */
1065 TYPE_ALIAS_SET (t) = -1;
1066 break;
1067
1068 case tcc_constant:
1069 TREE_CONSTANT (t) = 1;
1070 break;
1071
1072 case tcc_expression:
1073 switch (code)
1074 {
1075 case INIT_EXPR:
1076 case MODIFY_EXPR:
1077 case VA_ARG_EXPR:
1078 case PREDECREMENT_EXPR:
1079 case PREINCREMENT_EXPR:
1080 case POSTDECREMENT_EXPR:
1081 case POSTINCREMENT_EXPR:
1082 /* All of these have side-effects, no matter what their
1083 operands are. */
1084 TREE_SIDE_EFFECTS (t) = 1;
1085 break;
1086
1087 default:
1088 break;
1089 }
1090 break;
1091
1092 case tcc_exceptional:
1093 switch (code)
1094 {
1095 case TARGET_OPTION_NODE:
1096 TREE_TARGET_OPTION(t)
1097 = ggc_cleared_alloc<struct cl_target_option> ();
1098 break;
1099
1100 case OPTIMIZATION_NODE:
1101 TREE_OPTIMIZATION (t)
1102 = ggc_cleared_alloc<struct cl_optimization> ();
1103 break;
1104
1105 default:
1106 break;
1107 }
1108 break;
1109
1110 default:
1111 /* Other classes need no special treatment. */
1112 break;
1113 }
1114
1115 return t;
1116 }
1117 \f
1118 /* Return a new node with the same contents as NODE except that its
1119 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1120
1121 tree
1122 copy_node_stat (tree node MEM_STAT_DECL)
1123 {
1124 tree t;
1125 enum tree_code code = TREE_CODE (node);
1126 size_t length;
1127
1128 gcc_assert (code != STATEMENT_LIST);
1129
1130 length = tree_size (node);
1131 record_node_allocation_statistics (code, length);
1132 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1133 memcpy (t, node, length);
1134
1135 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1136 TREE_CHAIN (t) = 0;
1137 TREE_ASM_WRITTEN (t) = 0;
1138 TREE_VISITED (t) = 0;
1139
1140 if (TREE_CODE_CLASS (code) == tcc_declaration)
1141 {
1142 if (code == DEBUG_EXPR_DECL)
1143 DECL_UID (t) = --next_debug_decl_uid;
1144 else
1145 {
1146 DECL_UID (t) = allocate_decl_uid ();
1147 if (DECL_PT_UID_SET_P (node))
1148 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1149 }
1150 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1151 && DECL_HAS_VALUE_EXPR_P (node))
1152 {
1153 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1154 DECL_HAS_VALUE_EXPR_P (t) = 1;
1155 }
1156 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1157 if (TREE_CODE (node) == VAR_DECL)
1158 {
1159 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1160 t->decl_with_vis.symtab_node = NULL;
1161 }
1162 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1163 {
1164 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1165 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1166 }
1167 if (TREE_CODE (node) == FUNCTION_DECL)
1168 {
1169 DECL_STRUCT_FUNCTION (t) = NULL;
1170 t->decl_with_vis.symtab_node = NULL;
1171 }
1172 }
1173 else if (TREE_CODE_CLASS (code) == tcc_type)
1174 {
1175 TYPE_UID (t) = next_type_uid++;
1176 /* The following is so that the debug code for
1177 the copy is different from the original type.
1178 The two statements usually duplicate each other
1179 (because they clear fields of the same union),
1180 but the optimizer should catch that. */
1181 TYPE_SYMTAB_POINTER (t) = 0;
1182 TYPE_SYMTAB_ADDRESS (t) = 0;
1183
1184 /* Do not copy the values cache. */
1185 if (TYPE_CACHED_VALUES_P (t))
1186 {
1187 TYPE_CACHED_VALUES_P (t) = 0;
1188 TYPE_CACHED_VALUES (t) = NULL_TREE;
1189 }
1190 }
1191 else if (code == TARGET_OPTION_NODE)
1192 {
1193 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1194 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1195 sizeof (struct cl_target_option));
1196 }
1197 else if (code == OPTIMIZATION_NODE)
1198 {
1199 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1200 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1201 sizeof (struct cl_optimization));
1202 }
1203
1204 return t;
1205 }
1206
1207 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1208 For example, this can copy a list made of TREE_LIST nodes. */
1209
1210 tree
1211 copy_list (tree list)
1212 {
1213 tree head;
1214 tree prev, next;
1215
1216 if (list == 0)
1217 return 0;
1218
1219 head = prev = copy_node (list);
1220 next = TREE_CHAIN (list);
1221 while (next)
1222 {
1223 TREE_CHAIN (prev) = copy_node (next);
1224 prev = TREE_CHAIN (prev);
1225 next = TREE_CHAIN (next);
1226 }
1227 return head;
1228 }
1229
1230 \f
1231 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1232 INTEGER_CST with value CST and type TYPE. */
1233
1234 static unsigned int
1235 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1236 {
1237 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1238 /* We need an extra zero HWI if CST is an unsigned integer with its
1239 upper bit set, and if CST occupies a whole number of HWIs. */
1240 if (TYPE_UNSIGNED (type)
1241 && wi::neg_p (cst)
1242 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1243 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1244 return cst.get_len ();
1245 }
1246
1247 /* Return a new INTEGER_CST with value CST and type TYPE. */
1248
1249 static tree
1250 build_new_int_cst (tree type, const wide_int &cst)
1251 {
1252 unsigned int len = cst.get_len ();
1253 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1254 tree nt = make_int_cst (len, ext_len);
1255
1256 if (len < ext_len)
1257 {
1258 --ext_len;
1259 TREE_INT_CST_ELT (nt, ext_len) = 0;
1260 for (unsigned int i = len; i < ext_len; ++i)
1261 TREE_INT_CST_ELT (nt, i) = -1;
1262 }
1263 else if (TYPE_UNSIGNED (type)
1264 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1265 {
1266 len--;
1267 TREE_INT_CST_ELT (nt, len)
1268 = zext_hwi (cst.elt (len),
1269 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1270 }
1271
1272 for (unsigned int i = 0; i < len; i++)
1273 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1274 TREE_TYPE (nt) = type;
1275 return nt;
1276 }
1277
1278 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1279
1280 tree
1281 build_int_cst (tree type, HOST_WIDE_INT low)
1282 {
1283 /* Support legacy code. */
1284 if (!type)
1285 type = integer_type_node;
1286
1287 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1288 }
1289
1290 tree
1291 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1292 {
1293 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1294 }
1295
1296 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1297
1298 tree
1299 build_int_cst_type (tree type, HOST_WIDE_INT low)
1300 {
1301 gcc_assert (type);
1302 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1303 }
1304
1305 /* Constructs tree in type TYPE from with value given by CST. Signedness
1306 of CST is assumed to be the same as the signedness of TYPE. */
1307
1308 tree
1309 double_int_to_tree (tree type, double_int cst)
1310 {
1311 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1312 }
1313
1314 /* We force the wide_int CST to the range of the type TYPE by sign or
1315 zero extending it. OVERFLOWABLE indicates if we are interested in
1316 overflow of the value, when >0 we are only interested in signed
1317 overflow, for <0 we are interested in any overflow. OVERFLOWED
1318 indicates whether overflow has already occurred. CONST_OVERFLOWED
1319 indicates whether constant overflow has already occurred. We force
1320 T's value to be within range of T's type (by setting to 0 or 1 all
1321 the bits outside the type's range). We set TREE_OVERFLOWED if,
1322 OVERFLOWED is nonzero,
1323 or OVERFLOWABLE is >0 and signed overflow occurs
1324 or OVERFLOWABLE is <0 and any overflow occurs
1325 We return a new tree node for the extended wide_int. The node
1326 is shared if no overflow flags are set. */
1327
1328
1329 tree
1330 force_fit_type (tree type, const wide_int_ref &cst,
1331 int overflowable, bool overflowed)
1332 {
1333 signop sign = TYPE_SIGN (type);
1334
1335 /* If we need to set overflow flags, return a new unshared node. */
1336 if (overflowed || !wi::fits_to_tree_p (cst, type))
1337 {
1338 if (overflowed
1339 || overflowable < 0
1340 || (overflowable > 0 && sign == SIGNED))
1341 {
1342 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1343 tree t = build_new_int_cst (type, tmp);
1344 TREE_OVERFLOW (t) = 1;
1345 return t;
1346 }
1347 }
1348
1349 /* Else build a shared node. */
1350 return wide_int_to_tree (type, cst);
1351 }
1352
1353 /* These are the hash table functions for the hash table of INTEGER_CST
1354 nodes of a sizetype. */
1355
1356 /* Return the hash code X, an INTEGER_CST. */
1357
1358 hashval_t
1359 int_cst_hasher::hash (tree x)
1360 {
1361 const_tree const t = x;
1362 hashval_t code = TYPE_UID (TREE_TYPE (t));
1363 int i;
1364
1365 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1366 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1367
1368 return code;
1369 }
1370
1371 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1372 is the same as that given by *Y, which is the same. */
1373
1374 bool
1375 int_cst_hasher::equal (tree x, tree y)
1376 {
1377 const_tree const xt = x;
1378 const_tree const yt = y;
1379
1380 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1381 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1382 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1383 return false;
1384
1385 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1386 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1387 return false;
1388
1389 return true;
1390 }
1391
1392 /* Create an INT_CST node of TYPE and value CST.
1393 The returned node is always shared. For small integers we use a
1394 per-type vector cache, for larger ones we use a single hash table.
1395 The value is extended from its precision according to the sign of
1396 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1397 the upper bits and ensures that hashing and value equality based
1398 upon the underlying HOST_WIDE_INTs works without masking. */
1399
1400 tree
1401 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1402 {
1403 tree t;
1404 int ix = -1;
1405 int limit = 0;
1406
1407 gcc_assert (type);
1408 unsigned int prec = TYPE_PRECISION (type);
1409 signop sgn = TYPE_SIGN (type);
1410
1411 /* Verify that everything is canonical. */
1412 int l = pcst.get_len ();
1413 if (l > 1)
1414 {
1415 if (pcst.elt (l - 1) == 0)
1416 gcc_checking_assert (pcst.elt (l - 2) < 0);
1417 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1418 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1419 }
1420
1421 wide_int cst = wide_int::from (pcst, prec, sgn);
1422 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1423
1424 if (ext_len == 1)
1425 {
1426 /* We just need to store a single HOST_WIDE_INT. */
1427 HOST_WIDE_INT hwi;
1428 if (TYPE_UNSIGNED (type))
1429 hwi = cst.to_uhwi ();
1430 else
1431 hwi = cst.to_shwi ();
1432
1433 switch (TREE_CODE (type))
1434 {
1435 case NULLPTR_TYPE:
1436 gcc_assert (hwi == 0);
1437 /* Fallthru. */
1438
1439 case POINTER_TYPE:
1440 case REFERENCE_TYPE:
1441 case POINTER_BOUNDS_TYPE:
1442 /* Cache NULL pointer and zero bounds. */
1443 if (hwi == 0)
1444 {
1445 limit = 1;
1446 ix = 0;
1447 }
1448 break;
1449
1450 case BOOLEAN_TYPE:
1451 /* Cache false or true. */
1452 limit = 2;
1453 if (hwi < 2)
1454 ix = hwi;
1455 break;
1456
1457 case INTEGER_TYPE:
1458 case OFFSET_TYPE:
1459 if (TYPE_SIGN (type) == UNSIGNED)
1460 {
1461 /* Cache [0, N). */
1462 limit = INTEGER_SHARE_LIMIT;
1463 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1464 ix = hwi;
1465 }
1466 else
1467 {
1468 /* Cache [-1, N). */
1469 limit = INTEGER_SHARE_LIMIT + 1;
1470 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1471 ix = hwi + 1;
1472 }
1473 break;
1474
1475 case ENUMERAL_TYPE:
1476 break;
1477
1478 default:
1479 gcc_unreachable ();
1480 }
1481
1482 if (ix >= 0)
1483 {
1484 /* Look for it in the type's vector of small shared ints. */
1485 if (!TYPE_CACHED_VALUES_P (type))
1486 {
1487 TYPE_CACHED_VALUES_P (type) = 1;
1488 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1489 }
1490
1491 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1492 if (t)
1493 /* Make sure no one is clobbering the shared constant. */
1494 gcc_checking_assert (TREE_TYPE (t) == type
1495 && TREE_INT_CST_NUNITS (t) == 1
1496 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1497 && TREE_INT_CST_EXT_NUNITS (t) == 1
1498 && TREE_INT_CST_ELT (t, 0) == hwi);
1499 else
1500 {
1501 /* Create a new shared int. */
1502 t = build_new_int_cst (type, cst);
1503 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1504 }
1505 }
1506 else
1507 {
1508 /* Use the cache of larger shared ints, using int_cst_node as
1509 a temporary. */
1510
1511 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1512 TREE_TYPE (int_cst_node) = type;
1513
1514 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1515 t = *slot;
1516 if (!t)
1517 {
1518 /* Insert this one into the hash table. */
1519 t = int_cst_node;
1520 *slot = t;
1521 /* Make a new node for next time round. */
1522 int_cst_node = make_int_cst (1, 1);
1523 }
1524 }
1525 }
1526 else
1527 {
1528 /* The value either hashes properly or we drop it on the floor
1529 for the gc to take care of. There will not be enough of them
1530 to worry about. */
1531
1532 tree nt = build_new_int_cst (type, cst);
1533 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1534 t = *slot;
1535 if (!t)
1536 {
1537 /* Insert this one into the hash table. */
1538 t = nt;
1539 *slot = t;
1540 }
1541 }
1542
1543 return t;
1544 }
1545
1546 void
1547 cache_integer_cst (tree t)
1548 {
1549 tree type = TREE_TYPE (t);
1550 int ix = -1;
1551 int limit = 0;
1552 int prec = TYPE_PRECISION (type);
1553
1554 gcc_assert (!TREE_OVERFLOW (t));
1555
1556 switch (TREE_CODE (type))
1557 {
1558 case NULLPTR_TYPE:
1559 gcc_assert (integer_zerop (t));
1560 /* Fallthru. */
1561
1562 case POINTER_TYPE:
1563 case REFERENCE_TYPE:
1564 /* Cache NULL pointer. */
1565 if (integer_zerop (t))
1566 {
1567 limit = 1;
1568 ix = 0;
1569 }
1570 break;
1571
1572 case BOOLEAN_TYPE:
1573 /* Cache false or true. */
1574 limit = 2;
1575 if (wi::ltu_p (t, 2))
1576 ix = TREE_INT_CST_ELT (t, 0);
1577 break;
1578
1579 case INTEGER_TYPE:
1580 case OFFSET_TYPE:
1581 if (TYPE_UNSIGNED (type))
1582 {
1583 /* Cache 0..N */
1584 limit = INTEGER_SHARE_LIMIT;
1585
1586 /* This is a little hokie, but if the prec is smaller than
1587 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1588 obvious test will not get the correct answer. */
1589 if (prec < HOST_BITS_PER_WIDE_INT)
1590 {
1591 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1592 ix = tree_to_uhwi (t);
1593 }
1594 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1595 ix = tree_to_uhwi (t);
1596 }
1597 else
1598 {
1599 /* Cache -1..N */
1600 limit = INTEGER_SHARE_LIMIT + 1;
1601
1602 if (integer_minus_onep (t))
1603 ix = 0;
1604 else if (!wi::neg_p (t))
1605 {
1606 if (prec < HOST_BITS_PER_WIDE_INT)
1607 {
1608 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1609 ix = tree_to_shwi (t) + 1;
1610 }
1611 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1612 ix = tree_to_shwi (t) + 1;
1613 }
1614 }
1615 break;
1616
1617 case ENUMERAL_TYPE:
1618 break;
1619
1620 default:
1621 gcc_unreachable ();
1622 }
1623
1624 if (ix >= 0)
1625 {
1626 /* Look for it in the type's vector of small shared ints. */
1627 if (!TYPE_CACHED_VALUES_P (type))
1628 {
1629 TYPE_CACHED_VALUES_P (type) = 1;
1630 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1631 }
1632
1633 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1634 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1635 }
1636 else
1637 {
1638 /* Use the cache of larger shared ints. */
1639 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1640 /* If there is already an entry for the number verify it's the
1641 same. */
1642 if (*slot)
1643 gcc_assert (wi::eq_p (tree (*slot), t));
1644 else
1645 /* Otherwise insert this one into the hash table. */
1646 *slot = t;
1647 }
1648 }
1649
1650
1651 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1652 and the rest are zeros. */
1653
1654 tree
1655 build_low_bits_mask (tree type, unsigned bits)
1656 {
1657 gcc_assert (bits <= TYPE_PRECISION (type));
1658
1659 return wide_int_to_tree (type, wi::mask (bits, false,
1660 TYPE_PRECISION (type)));
1661 }
1662
1663 /* Checks that X is integer constant that can be expressed in (unsigned)
1664 HOST_WIDE_INT without loss of precision. */
1665
1666 bool
1667 cst_and_fits_in_hwi (const_tree x)
1668 {
1669 if (TREE_CODE (x) != INTEGER_CST)
1670 return false;
1671
1672 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1673 return false;
1674
1675 return TREE_INT_CST_NUNITS (x) == 1;
1676 }
1677
1678 /* Build a newly constructed VECTOR_CST node of length LEN. */
1679
1680 tree
1681 make_vector_stat (unsigned len MEM_STAT_DECL)
1682 {
1683 tree t;
1684 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1685
1686 record_node_allocation_statistics (VECTOR_CST, length);
1687
1688 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1689
1690 TREE_SET_CODE (t, VECTOR_CST);
1691 TREE_CONSTANT (t) = 1;
1692
1693 return t;
1694 }
1695
1696 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1697 are in a list pointed to by VALS. */
1698
1699 tree
1700 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1701 {
1702 int over = 0;
1703 unsigned cnt = 0;
1704 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1705 TREE_TYPE (v) = type;
1706
1707 /* Iterate through elements and check for overflow. */
1708 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1709 {
1710 tree value = vals[cnt];
1711
1712 VECTOR_CST_ELT (v, cnt) = value;
1713
1714 /* Don't crash if we get an address constant. */
1715 if (!CONSTANT_CLASS_P (value))
1716 continue;
1717
1718 over |= TREE_OVERFLOW (value);
1719 }
1720
1721 TREE_OVERFLOW (v) = over;
1722 return v;
1723 }
1724
1725 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1726 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1727
1728 tree
1729 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1730 {
1731 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1732 unsigned HOST_WIDE_INT idx, pos = 0;
1733 tree value;
1734
1735 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1736 {
1737 if (TREE_CODE (value) == VECTOR_CST)
1738 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1739 vec[pos++] = VECTOR_CST_ELT (value, i);
1740 else
1741 vec[pos++] = value;
1742 }
1743 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1744 vec[pos++] = build_zero_cst (TREE_TYPE (type));
1745
1746 return build_vector (type, vec);
1747 }
1748
1749 /* Build a vector of type VECTYPE where all the elements are SCs. */
1750 tree
1751 build_vector_from_val (tree vectype, tree sc)
1752 {
1753 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1754
1755 if (sc == error_mark_node)
1756 return sc;
1757
1758 /* Verify that the vector type is suitable for SC. Note that there
1759 is some inconsistency in the type-system with respect to restrict
1760 qualifications of pointers. Vector types always have a main-variant
1761 element type and the qualification is applied to the vector-type.
1762 So TREE_TYPE (vector-type) does not return a properly qualified
1763 vector element-type. */
1764 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1765 TREE_TYPE (vectype)));
1766
1767 if (CONSTANT_CLASS_P (sc))
1768 {
1769 tree *v = XALLOCAVEC (tree, nunits);
1770 for (i = 0; i < nunits; ++i)
1771 v[i] = sc;
1772 return build_vector (vectype, v);
1773 }
1774 else
1775 {
1776 vec<constructor_elt, va_gc> *v;
1777 vec_alloc (v, nunits);
1778 for (i = 0; i < nunits; ++i)
1779 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1780 return build_constructor (vectype, v);
1781 }
1782 }
1783
1784 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1785 are in the vec pointed to by VALS. */
1786 tree
1787 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1788 {
1789 tree c = make_node (CONSTRUCTOR);
1790 unsigned int i;
1791 constructor_elt *elt;
1792 bool constant_p = true;
1793 bool side_effects_p = false;
1794
1795 TREE_TYPE (c) = type;
1796 CONSTRUCTOR_ELTS (c) = vals;
1797
1798 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1799 {
1800 /* Mostly ctors will have elts that don't have side-effects, so
1801 the usual case is to scan all the elements. Hence a single
1802 loop for both const and side effects, rather than one loop
1803 each (with early outs). */
1804 if (!TREE_CONSTANT (elt->value))
1805 constant_p = false;
1806 if (TREE_SIDE_EFFECTS (elt->value))
1807 side_effects_p = true;
1808 }
1809
1810 TREE_SIDE_EFFECTS (c) = side_effects_p;
1811 TREE_CONSTANT (c) = constant_p;
1812
1813 return c;
1814 }
1815
1816 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1817 INDEX and VALUE. */
1818 tree
1819 build_constructor_single (tree type, tree index, tree value)
1820 {
1821 vec<constructor_elt, va_gc> *v;
1822 constructor_elt elt = {index, value};
1823
1824 vec_alloc (v, 1);
1825 v->quick_push (elt);
1826
1827 return build_constructor (type, v);
1828 }
1829
1830
1831 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1832 are in a list pointed to by VALS. */
1833 tree
1834 build_constructor_from_list (tree type, tree vals)
1835 {
1836 tree t;
1837 vec<constructor_elt, va_gc> *v = NULL;
1838
1839 if (vals)
1840 {
1841 vec_alloc (v, list_length (vals));
1842 for (t = vals; t; t = TREE_CHAIN (t))
1843 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1844 }
1845
1846 return build_constructor (type, v);
1847 }
1848
1849 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1850 of elements, provided as index/value pairs. */
1851
1852 tree
1853 build_constructor_va (tree type, int nelts, ...)
1854 {
1855 vec<constructor_elt, va_gc> *v = NULL;
1856 va_list p;
1857
1858 va_start (p, nelts);
1859 vec_alloc (v, nelts);
1860 while (nelts--)
1861 {
1862 tree index = va_arg (p, tree);
1863 tree value = va_arg (p, tree);
1864 CONSTRUCTOR_APPEND_ELT (v, index, value);
1865 }
1866 va_end (p);
1867 return build_constructor (type, v);
1868 }
1869
1870 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1871
1872 tree
1873 build_fixed (tree type, FIXED_VALUE_TYPE f)
1874 {
1875 tree v;
1876 FIXED_VALUE_TYPE *fp;
1877
1878 v = make_node (FIXED_CST);
1879 fp = ggc_alloc<fixed_value> ();
1880 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1881
1882 TREE_TYPE (v) = type;
1883 TREE_FIXED_CST_PTR (v) = fp;
1884 return v;
1885 }
1886
1887 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1888
1889 tree
1890 build_real (tree type, REAL_VALUE_TYPE d)
1891 {
1892 tree v;
1893 REAL_VALUE_TYPE *dp;
1894 int overflow = 0;
1895
1896 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1897 Consider doing it via real_convert now. */
1898
1899 v = make_node (REAL_CST);
1900 dp = ggc_alloc<real_value> ();
1901 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1902
1903 TREE_TYPE (v) = type;
1904 TREE_REAL_CST_PTR (v) = dp;
1905 TREE_OVERFLOW (v) = overflow;
1906 return v;
1907 }
1908
1909 /* Like build_real, but first truncate D to the type. */
1910
1911 tree
1912 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1913 {
1914 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1915 }
1916
1917 /* Return a new REAL_CST node whose type is TYPE
1918 and whose value is the integer value of the INTEGER_CST node I. */
1919
1920 REAL_VALUE_TYPE
1921 real_value_from_int_cst (const_tree type, const_tree i)
1922 {
1923 REAL_VALUE_TYPE d;
1924
1925 /* Clear all bits of the real value type so that we can later do
1926 bitwise comparisons to see if two values are the same. */
1927 memset (&d, 0, sizeof d);
1928
1929 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1930 TYPE_SIGN (TREE_TYPE (i)));
1931 return d;
1932 }
1933
1934 /* Given a tree representing an integer constant I, return a tree
1935 representing the same value as a floating-point constant of type TYPE. */
1936
1937 tree
1938 build_real_from_int_cst (tree type, const_tree i)
1939 {
1940 tree v;
1941 int overflow = TREE_OVERFLOW (i);
1942
1943 v = build_real (type, real_value_from_int_cst (type, i));
1944
1945 TREE_OVERFLOW (v) |= overflow;
1946 return v;
1947 }
1948
1949 /* Return a newly constructed STRING_CST node whose value is
1950 the LEN characters at STR.
1951 Note that for a C string literal, LEN should include the trailing NUL.
1952 The TREE_TYPE is not initialized. */
1953
1954 tree
1955 build_string (int len, const char *str)
1956 {
1957 tree s;
1958 size_t length;
1959
1960 /* Do not waste bytes provided by padding of struct tree_string. */
1961 length = len + offsetof (struct tree_string, str) + 1;
1962
1963 record_node_allocation_statistics (STRING_CST, length);
1964
1965 s = (tree) ggc_internal_alloc (length);
1966
1967 memset (s, 0, sizeof (struct tree_typed));
1968 TREE_SET_CODE (s, STRING_CST);
1969 TREE_CONSTANT (s) = 1;
1970 TREE_STRING_LENGTH (s) = len;
1971 memcpy (s->string.str, str, len);
1972 s->string.str[len] = '\0';
1973
1974 return s;
1975 }
1976
1977 /* Return a newly constructed COMPLEX_CST node whose value is
1978 specified by the real and imaginary parts REAL and IMAG.
1979 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1980 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1981
1982 tree
1983 build_complex (tree type, tree real, tree imag)
1984 {
1985 tree t = make_node (COMPLEX_CST);
1986
1987 TREE_REALPART (t) = real;
1988 TREE_IMAGPART (t) = imag;
1989 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1990 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1991 return t;
1992 }
1993
1994 /* Build a complex (inf +- 0i), such as for the result of cproj.
1995 TYPE is the complex tree type of the result. If NEG is true, the
1996 imaginary zero is negative. */
1997
1998 tree
1999 build_complex_inf (tree type, bool neg)
2000 {
2001 REAL_VALUE_TYPE rinf, rzero = dconst0;
2002
2003 real_inf (&rinf);
2004 rzero.sign = neg;
2005 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2006 build_real (TREE_TYPE (type), rzero));
2007 }
2008
2009 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2010 element is set to 1. In particular, this is 1 + i for complex types. */
2011
2012 tree
2013 build_each_one_cst (tree type)
2014 {
2015 if (TREE_CODE (type) == COMPLEX_TYPE)
2016 {
2017 tree scalar = build_one_cst (TREE_TYPE (type));
2018 return build_complex (type, scalar, scalar);
2019 }
2020 else
2021 return build_one_cst (type);
2022 }
2023
2024 /* Return a constant of arithmetic type TYPE which is the
2025 multiplicative identity of the set TYPE. */
2026
2027 tree
2028 build_one_cst (tree type)
2029 {
2030 switch (TREE_CODE (type))
2031 {
2032 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2033 case POINTER_TYPE: case REFERENCE_TYPE:
2034 case OFFSET_TYPE:
2035 return build_int_cst (type, 1);
2036
2037 case REAL_TYPE:
2038 return build_real (type, dconst1);
2039
2040 case FIXED_POINT_TYPE:
2041 /* We can only generate 1 for accum types. */
2042 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2043 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2044
2045 case VECTOR_TYPE:
2046 {
2047 tree scalar = build_one_cst (TREE_TYPE (type));
2048
2049 return build_vector_from_val (type, scalar);
2050 }
2051
2052 case COMPLEX_TYPE:
2053 return build_complex (type,
2054 build_one_cst (TREE_TYPE (type)),
2055 build_zero_cst (TREE_TYPE (type)));
2056
2057 default:
2058 gcc_unreachable ();
2059 }
2060 }
2061
2062 /* Return an integer of type TYPE containing all 1's in as much precision as
2063 it contains, or a complex or vector whose subparts are such integers. */
2064
2065 tree
2066 build_all_ones_cst (tree type)
2067 {
2068 if (TREE_CODE (type) == COMPLEX_TYPE)
2069 {
2070 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2071 return build_complex (type, scalar, scalar);
2072 }
2073 else
2074 return build_minus_one_cst (type);
2075 }
2076
2077 /* Return a constant of arithmetic type TYPE which is the
2078 opposite of the multiplicative identity of the set TYPE. */
2079
2080 tree
2081 build_minus_one_cst (tree type)
2082 {
2083 switch (TREE_CODE (type))
2084 {
2085 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2086 case POINTER_TYPE: case REFERENCE_TYPE:
2087 case OFFSET_TYPE:
2088 return build_int_cst (type, -1);
2089
2090 case REAL_TYPE:
2091 return build_real (type, dconstm1);
2092
2093 case FIXED_POINT_TYPE:
2094 /* We can only generate 1 for accum types. */
2095 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2096 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2097 TYPE_MODE (type)));
2098
2099 case VECTOR_TYPE:
2100 {
2101 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2102
2103 return build_vector_from_val (type, scalar);
2104 }
2105
2106 case COMPLEX_TYPE:
2107 return build_complex (type,
2108 build_minus_one_cst (TREE_TYPE (type)),
2109 build_zero_cst (TREE_TYPE (type)));
2110
2111 default:
2112 gcc_unreachable ();
2113 }
2114 }
2115
2116 /* Build 0 constant of type TYPE. This is used by constructor folding
2117 and thus the constant should be represented in memory by
2118 zero(es). */
2119
2120 tree
2121 build_zero_cst (tree type)
2122 {
2123 switch (TREE_CODE (type))
2124 {
2125 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2126 case POINTER_TYPE: case REFERENCE_TYPE:
2127 case OFFSET_TYPE: case NULLPTR_TYPE:
2128 return build_int_cst (type, 0);
2129
2130 case REAL_TYPE:
2131 return build_real (type, dconst0);
2132
2133 case FIXED_POINT_TYPE:
2134 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2135
2136 case VECTOR_TYPE:
2137 {
2138 tree scalar = build_zero_cst (TREE_TYPE (type));
2139
2140 return build_vector_from_val (type, scalar);
2141 }
2142
2143 case COMPLEX_TYPE:
2144 {
2145 tree zero = build_zero_cst (TREE_TYPE (type));
2146
2147 return build_complex (type, zero, zero);
2148 }
2149
2150 default:
2151 if (!AGGREGATE_TYPE_P (type))
2152 return fold_convert (type, integer_zero_node);
2153 return build_constructor (type, NULL);
2154 }
2155 }
2156
2157
2158 /* Build a BINFO with LEN language slots. */
2159
2160 tree
2161 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2162 {
2163 tree t;
2164 size_t length = (offsetof (struct tree_binfo, base_binfos)
2165 + vec<tree, va_gc>::embedded_size (base_binfos));
2166
2167 record_node_allocation_statistics (TREE_BINFO, length);
2168
2169 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2170
2171 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2172
2173 TREE_SET_CODE (t, TREE_BINFO);
2174
2175 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2176
2177 return t;
2178 }
2179
2180 /* Create a CASE_LABEL_EXPR tree node and return it. */
2181
2182 tree
2183 build_case_label (tree low_value, tree high_value, tree label_decl)
2184 {
2185 tree t = make_node (CASE_LABEL_EXPR);
2186
2187 TREE_TYPE (t) = void_type_node;
2188 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2189
2190 CASE_LOW (t) = low_value;
2191 CASE_HIGH (t) = high_value;
2192 CASE_LABEL (t) = label_decl;
2193 CASE_CHAIN (t) = NULL_TREE;
2194
2195 return t;
2196 }
2197
2198 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2199 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2200 The latter determines the length of the HOST_WIDE_INT vector. */
2201
2202 tree
2203 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2204 {
2205 tree t;
2206 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2207 + sizeof (struct tree_int_cst));
2208
2209 gcc_assert (len);
2210 record_node_allocation_statistics (INTEGER_CST, length);
2211
2212 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2213
2214 TREE_SET_CODE (t, INTEGER_CST);
2215 TREE_INT_CST_NUNITS (t) = len;
2216 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2217 /* to_offset can only be applied to trees that are offset_int-sized
2218 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2219 must be exactly the precision of offset_int and so LEN is correct. */
2220 if (ext_len <= OFFSET_INT_ELTS)
2221 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2222 else
2223 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2224
2225 TREE_CONSTANT (t) = 1;
2226
2227 return t;
2228 }
2229
2230 /* Build a newly constructed TREE_VEC node of length LEN. */
2231
2232 tree
2233 make_tree_vec_stat (int len MEM_STAT_DECL)
2234 {
2235 tree t;
2236 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2237
2238 record_node_allocation_statistics (TREE_VEC, length);
2239
2240 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2241
2242 TREE_SET_CODE (t, TREE_VEC);
2243 TREE_VEC_LENGTH (t) = len;
2244
2245 return t;
2246 }
2247
2248 /* Grow a TREE_VEC node to new length LEN. */
2249
2250 tree
2251 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2252 {
2253 gcc_assert (TREE_CODE (v) == TREE_VEC);
2254
2255 int oldlen = TREE_VEC_LENGTH (v);
2256 gcc_assert (len > oldlen);
2257
2258 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2259 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2260
2261 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2262
2263 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2264
2265 TREE_VEC_LENGTH (v) = len;
2266
2267 return v;
2268 }
2269 \f
2270 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2271 fixed, and scalar, complex or vector. */
2272
2273 int
2274 zerop (const_tree expr)
2275 {
2276 return (integer_zerop (expr)
2277 || real_zerop (expr)
2278 || fixed_zerop (expr));
2279 }
2280
2281 /* Return 1 if EXPR is the integer constant zero or a complex constant
2282 of zero. */
2283
2284 int
2285 integer_zerop (const_tree expr)
2286 {
2287 STRIP_NOPS (expr);
2288
2289 switch (TREE_CODE (expr))
2290 {
2291 case INTEGER_CST:
2292 return wi::eq_p (expr, 0);
2293 case COMPLEX_CST:
2294 return (integer_zerop (TREE_REALPART (expr))
2295 && integer_zerop (TREE_IMAGPART (expr)));
2296 case VECTOR_CST:
2297 {
2298 unsigned i;
2299 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2300 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2301 return false;
2302 return true;
2303 }
2304 default:
2305 return false;
2306 }
2307 }
2308
2309 /* Return 1 if EXPR is the integer constant one or the corresponding
2310 complex constant. */
2311
2312 int
2313 integer_onep (const_tree expr)
2314 {
2315 STRIP_NOPS (expr);
2316
2317 switch (TREE_CODE (expr))
2318 {
2319 case INTEGER_CST:
2320 return wi::eq_p (wi::to_widest (expr), 1);
2321 case COMPLEX_CST:
2322 return (integer_onep (TREE_REALPART (expr))
2323 && integer_zerop (TREE_IMAGPART (expr)));
2324 case VECTOR_CST:
2325 {
2326 unsigned i;
2327 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2328 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2329 return false;
2330 return true;
2331 }
2332 default:
2333 return false;
2334 }
2335 }
2336
2337 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2338 return 1 if every piece is the integer constant one. */
2339
2340 int
2341 integer_each_onep (const_tree expr)
2342 {
2343 STRIP_NOPS (expr);
2344
2345 if (TREE_CODE (expr) == COMPLEX_CST)
2346 return (integer_onep (TREE_REALPART (expr))
2347 && integer_onep (TREE_IMAGPART (expr)));
2348 else
2349 return integer_onep (expr);
2350 }
2351
2352 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2353 it contains, or a complex or vector whose subparts are such integers. */
2354
2355 int
2356 integer_all_onesp (const_tree expr)
2357 {
2358 STRIP_NOPS (expr);
2359
2360 if (TREE_CODE (expr) == COMPLEX_CST
2361 && integer_all_onesp (TREE_REALPART (expr))
2362 && integer_all_onesp (TREE_IMAGPART (expr)))
2363 return 1;
2364
2365 else if (TREE_CODE (expr) == VECTOR_CST)
2366 {
2367 unsigned i;
2368 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2369 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2370 return 0;
2371 return 1;
2372 }
2373
2374 else if (TREE_CODE (expr) != INTEGER_CST)
2375 return 0;
2376
2377 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2378 }
2379
2380 /* Return 1 if EXPR is the integer constant minus one. */
2381
2382 int
2383 integer_minus_onep (const_tree expr)
2384 {
2385 STRIP_NOPS (expr);
2386
2387 if (TREE_CODE (expr) == COMPLEX_CST)
2388 return (integer_all_onesp (TREE_REALPART (expr))
2389 && integer_zerop (TREE_IMAGPART (expr)));
2390 else
2391 return integer_all_onesp (expr);
2392 }
2393
2394 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2395 one bit on). */
2396
2397 int
2398 integer_pow2p (const_tree expr)
2399 {
2400 STRIP_NOPS (expr);
2401
2402 if (TREE_CODE (expr) == COMPLEX_CST
2403 && integer_pow2p (TREE_REALPART (expr))
2404 && integer_zerop (TREE_IMAGPART (expr)))
2405 return 1;
2406
2407 if (TREE_CODE (expr) != INTEGER_CST)
2408 return 0;
2409
2410 return wi::popcount (expr) == 1;
2411 }
2412
2413 /* Return 1 if EXPR is an integer constant other than zero or a
2414 complex constant other than zero. */
2415
2416 int
2417 integer_nonzerop (const_tree expr)
2418 {
2419 STRIP_NOPS (expr);
2420
2421 return ((TREE_CODE (expr) == INTEGER_CST
2422 && !wi::eq_p (expr, 0))
2423 || (TREE_CODE (expr) == COMPLEX_CST
2424 && (integer_nonzerop (TREE_REALPART (expr))
2425 || integer_nonzerop (TREE_IMAGPART (expr)))));
2426 }
2427
2428 /* Return 1 if EXPR is the integer constant one. For vector,
2429 return 1 if every piece is the integer constant minus one
2430 (representing the value TRUE). */
2431
2432 int
2433 integer_truep (const_tree expr)
2434 {
2435 STRIP_NOPS (expr);
2436
2437 if (TREE_CODE (expr) == VECTOR_CST)
2438 return integer_all_onesp (expr);
2439 return integer_onep (expr);
2440 }
2441
2442 /* Return 1 if EXPR is the fixed-point constant zero. */
2443
2444 int
2445 fixed_zerop (const_tree expr)
2446 {
2447 return (TREE_CODE (expr) == FIXED_CST
2448 && TREE_FIXED_CST (expr).data.is_zero ());
2449 }
2450
2451 /* Return the power of two represented by a tree node known to be a
2452 power of two. */
2453
2454 int
2455 tree_log2 (const_tree expr)
2456 {
2457 STRIP_NOPS (expr);
2458
2459 if (TREE_CODE (expr) == COMPLEX_CST)
2460 return tree_log2 (TREE_REALPART (expr));
2461
2462 return wi::exact_log2 (expr);
2463 }
2464
2465 /* Similar, but return the largest integer Y such that 2 ** Y is less
2466 than or equal to EXPR. */
2467
2468 int
2469 tree_floor_log2 (const_tree expr)
2470 {
2471 STRIP_NOPS (expr);
2472
2473 if (TREE_CODE (expr) == COMPLEX_CST)
2474 return tree_log2 (TREE_REALPART (expr));
2475
2476 return wi::floor_log2 (expr);
2477 }
2478
2479 /* Return number of known trailing zero bits in EXPR, or, if the value of
2480 EXPR is known to be zero, the precision of it's type. */
2481
2482 unsigned int
2483 tree_ctz (const_tree expr)
2484 {
2485 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2486 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2487 return 0;
2488
2489 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2490 switch (TREE_CODE (expr))
2491 {
2492 case INTEGER_CST:
2493 ret1 = wi::ctz (expr);
2494 return MIN (ret1, prec);
2495 case SSA_NAME:
2496 ret1 = wi::ctz (get_nonzero_bits (expr));
2497 return MIN (ret1, prec);
2498 case PLUS_EXPR:
2499 case MINUS_EXPR:
2500 case BIT_IOR_EXPR:
2501 case BIT_XOR_EXPR:
2502 case MIN_EXPR:
2503 case MAX_EXPR:
2504 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2505 if (ret1 == 0)
2506 return ret1;
2507 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2508 return MIN (ret1, ret2);
2509 case POINTER_PLUS_EXPR:
2510 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2511 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2512 /* Second operand is sizetype, which could be in theory
2513 wider than pointer's precision. Make sure we never
2514 return more than prec. */
2515 ret2 = MIN (ret2, prec);
2516 return MIN (ret1, ret2);
2517 case BIT_AND_EXPR:
2518 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2519 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2520 return MAX (ret1, ret2);
2521 case MULT_EXPR:
2522 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2523 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2524 return MIN (ret1 + ret2, prec);
2525 case LSHIFT_EXPR:
2526 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2527 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2528 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2529 {
2530 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2531 return MIN (ret1 + ret2, prec);
2532 }
2533 return ret1;
2534 case RSHIFT_EXPR:
2535 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2536 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2537 {
2538 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2539 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2540 if (ret1 > ret2)
2541 return ret1 - ret2;
2542 }
2543 return 0;
2544 case TRUNC_DIV_EXPR:
2545 case CEIL_DIV_EXPR:
2546 case FLOOR_DIV_EXPR:
2547 case ROUND_DIV_EXPR:
2548 case EXACT_DIV_EXPR:
2549 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2550 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2551 {
2552 int l = tree_log2 (TREE_OPERAND (expr, 1));
2553 if (l >= 0)
2554 {
2555 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2556 ret2 = l;
2557 if (ret1 > ret2)
2558 return ret1 - ret2;
2559 }
2560 }
2561 return 0;
2562 CASE_CONVERT:
2563 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2564 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2565 ret1 = prec;
2566 return MIN (ret1, prec);
2567 case SAVE_EXPR:
2568 return tree_ctz (TREE_OPERAND (expr, 0));
2569 case COND_EXPR:
2570 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2571 if (ret1 == 0)
2572 return 0;
2573 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2574 return MIN (ret1, ret2);
2575 case COMPOUND_EXPR:
2576 return tree_ctz (TREE_OPERAND (expr, 1));
2577 case ADDR_EXPR:
2578 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2579 if (ret1 > BITS_PER_UNIT)
2580 {
2581 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2582 return MIN (ret1, prec);
2583 }
2584 return 0;
2585 default:
2586 return 0;
2587 }
2588 }
2589
2590 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2591 decimal float constants, so don't return 1 for them. */
2592
2593 int
2594 real_zerop (const_tree expr)
2595 {
2596 STRIP_NOPS (expr);
2597
2598 switch (TREE_CODE (expr))
2599 {
2600 case REAL_CST:
2601 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2602 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2603 case COMPLEX_CST:
2604 return real_zerop (TREE_REALPART (expr))
2605 && real_zerop (TREE_IMAGPART (expr));
2606 case VECTOR_CST:
2607 {
2608 unsigned i;
2609 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2610 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2611 return false;
2612 return true;
2613 }
2614 default:
2615 return false;
2616 }
2617 }
2618
2619 /* Return 1 if EXPR is the real constant one in real or complex form.
2620 Trailing zeroes matter for decimal float constants, so don't return
2621 1 for them. */
2622
2623 int
2624 real_onep (const_tree expr)
2625 {
2626 STRIP_NOPS (expr);
2627
2628 switch (TREE_CODE (expr))
2629 {
2630 case REAL_CST:
2631 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2632 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2633 case COMPLEX_CST:
2634 return real_onep (TREE_REALPART (expr))
2635 && real_zerop (TREE_IMAGPART (expr));
2636 case VECTOR_CST:
2637 {
2638 unsigned i;
2639 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2640 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2641 return false;
2642 return true;
2643 }
2644 default:
2645 return false;
2646 }
2647 }
2648
2649 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2650 matter for decimal float constants, so don't return 1 for them. */
2651
2652 int
2653 real_minus_onep (const_tree expr)
2654 {
2655 STRIP_NOPS (expr);
2656
2657 switch (TREE_CODE (expr))
2658 {
2659 case REAL_CST:
2660 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2661 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2662 case COMPLEX_CST:
2663 return real_minus_onep (TREE_REALPART (expr))
2664 && real_zerop (TREE_IMAGPART (expr));
2665 case VECTOR_CST:
2666 {
2667 unsigned i;
2668 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2669 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2670 return false;
2671 return true;
2672 }
2673 default:
2674 return false;
2675 }
2676 }
2677
2678 /* Nonzero if EXP is a constant or a cast of a constant. */
2679
2680 int
2681 really_constant_p (const_tree exp)
2682 {
2683 /* This is not quite the same as STRIP_NOPS. It does more. */
2684 while (CONVERT_EXPR_P (exp)
2685 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2686 exp = TREE_OPERAND (exp, 0);
2687 return TREE_CONSTANT (exp);
2688 }
2689 \f
2690 /* Return first list element whose TREE_VALUE is ELEM.
2691 Return 0 if ELEM is not in LIST. */
2692
2693 tree
2694 value_member (tree elem, tree list)
2695 {
2696 while (list)
2697 {
2698 if (elem == TREE_VALUE (list))
2699 return list;
2700 list = TREE_CHAIN (list);
2701 }
2702 return NULL_TREE;
2703 }
2704
2705 /* Return first list element whose TREE_PURPOSE is ELEM.
2706 Return 0 if ELEM is not in LIST. */
2707
2708 tree
2709 purpose_member (const_tree elem, tree list)
2710 {
2711 while (list)
2712 {
2713 if (elem == TREE_PURPOSE (list))
2714 return list;
2715 list = TREE_CHAIN (list);
2716 }
2717 return NULL_TREE;
2718 }
2719
2720 /* Return true if ELEM is in V. */
2721
2722 bool
2723 vec_member (const_tree elem, vec<tree, va_gc> *v)
2724 {
2725 unsigned ix;
2726 tree t;
2727 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2728 if (elem == t)
2729 return true;
2730 return false;
2731 }
2732
2733 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2734 NULL_TREE. */
2735
2736 tree
2737 chain_index (int idx, tree chain)
2738 {
2739 for (; chain && idx > 0; --idx)
2740 chain = TREE_CHAIN (chain);
2741 return chain;
2742 }
2743
2744 /* Return nonzero if ELEM is part of the chain CHAIN. */
2745
2746 int
2747 chain_member (const_tree elem, const_tree chain)
2748 {
2749 while (chain)
2750 {
2751 if (elem == chain)
2752 return 1;
2753 chain = DECL_CHAIN (chain);
2754 }
2755
2756 return 0;
2757 }
2758
2759 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2760 We expect a null pointer to mark the end of the chain.
2761 This is the Lisp primitive `length'. */
2762
2763 int
2764 list_length (const_tree t)
2765 {
2766 const_tree p = t;
2767 #ifdef ENABLE_TREE_CHECKING
2768 const_tree q = t;
2769 #endif
2770 int len = 0;
2771
2772 while (p)
2773 {
2774 p = TREE_CHAIN (p);
2775 #ifdef ENABLE_TREE_CHECKING
2776 if (len % 2)
2777 q = TREE_CHAIN (q);
2778 gcc_assert (p != q);
2779 #endif
2780 len++;
2781 }
2782
2783 return len;
2784 }
2785
2786 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2787 UNION_TYPE TYPE, or NULL_TREE if none. */
2788
2789 tree
2790 first_field (const_tree type)
2791 {
2792 tree t = TYPE_FIELDS (type);
2793 while (t && TREE_CODE (t) != FIELD_DECL)
2794 t = TREE_CHAIN (t);
2795 return t;
2796 }
2797
2798 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2799 by modifying the last node in chain 1 to point to chain 2.
2800 This is the Lisp primitive `nconc'. */
2801
2802 tree
2803 chainon (tree op1, tree op2)
2804 {
2805 tree t1;
2806
2807 if (!op1)
2808 return op2;
2809 if (!op2)
2810 return op1;
2811
2812 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2813 continue;
2814 TREE_CHAIN (t1) = op2;
2815
2816 #ifdef ENABLE_TREE_CHECKING
2817 {
2818 tree t2;
2819 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2820 gcc_assert (t2 != t1);
2821 }
2822 #endif
2823
2824 return op1;
2825 }
2826
2827 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2828
2829 tree
2830 tree_last (tree chain)
2831 {
2832 tree next;
2833 if (chain)
2834 while ((next = TREE_CHAIN (chain)))
2835 chain = next;
2836 return chain;
2837 }
2838
2839 /* Reverse the order of elements in the chain T,
2840 and return the new head of the chain (old last element). */
2841
2842 tree
2843 nreverse (tree t)
2844 {
2845 tree prev = 0, decl, next;
2846 for (decl = t; decl; decl = next)
2847 {
2848 /* We shouldn't be using this function to reverse BLOCK chains; we
2849 have blocks_nreverse for that. */
2850 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2851 next = TREE_CHAIN (decl);
2852 TREE_CHAIN (decl) = prev;
2853 prev = decl;
2854 }
2855 return prev;
2856 }
2857 \f
2858 /* Return a newly created TREE_LIST node whose
2859 purpose and value fields are PARM and VALUE. */
2860
2861 tree
2862 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2863 {
2864 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2865 TREE_PURPOSE (t) = parm;
2866 TREE_VALUE (t) = value;
2867 return t;
2868 }
2869
2870 /* Build a chain of TREE_LIST nodes from a vector. */
2871
2872 tree
2873 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2874 {
2875 tree ret = NULL_TREE;
2876 tree *pp = &ret;
2877 unsigned int i;
2878 tree t;
2879 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2880 {
2881 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2882 pp = &TREE_CHAIN (*pp);
2883 }
2884 return ret;
2885 }
2886
2887 /* Return a newly created TREE_LIST node whose
2888 purpose and value fields are PURPOSE and VALUE
2889 and whose TREE_CHAIN is CHAIN. */
2890
2891 tree
2892 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2893 {
2894 tree node;
2895
2896 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2897 memset (node, 0, sizeof (struct tree_common));
2898
2899 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2900
2901 TREE_SET_CODE (node, TREE_LIST);
2902 TREE_CHAIN (node) = chain;
2903 TREE_PURPOSE (node) = purpose;
2904 TREE_VALUE (node) = value;
2905 return node;
2906 }
2907
2908 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2909 trees. */
2910
2911 vec<tree, va_gc> *
2912 ctor_to_vec (tree ctor)
2913 {
2914 vec<tree, va_gc> *vec;
2915 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2916 unsigned int ix;
2917 tree val;
2918
2919 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2920 vec->quick_push (val);
2921
2922 return vec;
2923 }
2924 \f
2925 /* Return the size nominally occupied by an object of type TYPE
2926 when it resides in memory. The value is measured in units of bytes,
2927 and its data type is that normally used for type sizes
2928 (which is the first type created by make_signed_type or
2929 make_unsigned_type). */
2930
2931 tree
2932 size_in_bytes (const_tree type)
2933 {
2934 tree t;
2935
2936 if (type == error_mark_node)
2937 return integer_zero_node;
2938
2939 type = TYPE_MAIN_VARIANT (type);
2940 t = TYPE_SIZE_UNIT (type);
2941
2942 if (t == 0)
2943 {
2944 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2945 return size_zero_node;
2946 }
2947
2948 return t;
2949 }
2950
2951 /* Return the size of TYPE (in bytes) as a wide integer
2952 or return -1 if the size can vary or is larger than an integer. */
2953
2954 HOST_WIDE_INT
2955 int_size_in_bytes (const_tree type)
2956 {
2957 tree t;
2958
2959 if (type == error_mark_node)
2960 return 0;
2961
2962 type = TYPE_MAIN_VARIANT (type);
2963 t = TYPE_SIZE_UNIT (type);
2964
2965 if (t && tree_fits_uhwi_p (t))
2966 return TREE_INT_CST_LOW (t);
2967 else
2968 return -1;
2969 }
2970
2971 /* Return the maximum size of TYPE (in bytes) as a wide integer
2972 or return -1 if the size can vary or is larger than an integer. */
2973
2974 HOST_WIDE_INT
2975 max_int_size_in_bytes (const_tree type)
2976 {
2977 HOST_WIDE_INT size = -1;
2978 tree size_tree;
2979
2980 /* If this is an array type, check for a possible MAX_SIZE attached. */
2981
2982 if (TREE_CODE (type) == ARRAY_TYPE)
2983 {
2984 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2985
2986 if (size_tree && tree_fits_uhwi_p (size_tree))
2987 size = tree_to_uhwi (size_tree);
2988 }
2989
2990 /* If we still haven't been able to get a size, see if the language
2991 can compute a maximum size. */
2992
2993 if (size == -1)
2994 {
2995 size_tree = lang_hooks.types.max_size (type);
2996
2997 if (size_tree && tree_fits_uhwi_p (size_tree))
2998 size = tree_to_uhwi (size_tree);
2999 }
3000
3001 return size;
3002 }
3003 \f
3004 /* Return the bit position of FIELD, in bits from the start of the record.
3005 This is a tree of type bitsizetype. */
3006
3007 tree
3008 bit_position (const_tree field)
3009 {
3010 return bit_from_pos (DECL_FIELD_OFFSET (field),
3011 DECL_FIELD_BIT_OFFSET (field));
3012 }
3013 \f
3014 /* Return the byte position of FIELD, in bytes from the start of the record.
3015 This is a tree of type sizetype. */
3016
3017 tree
3018 byte_position (const_tree field)
3019 {
3020 return byte_from_pos (DECL_FIELD_OFFSET (field),
3021 DECL_FIELD_BIT_OFFSET (field));
3022 }
3023
3024 /* Likewise, but return as an integer. It must be representable in
3025 that way (since it could be a signed value, we don't have the
3026 option of returning -1 like int_size_in_byte can. */
3027
3028 HOST_WIDE_INT
3029 int_byte_position (const_tree field)
3030 {
3031 return tree_to_shwi (byte_position (field));
3032 }
3033 \f
3034 /* Return the strictest alignment, in bits, that T is known to have. */
3035
3036 unsigned int
3037 expr_align (const_tree t)
3038 {
3039 unsigned int align0, align1;
3040
3041 switch (TREE_CODE (t))
3042 {
3043 CASE_CONVERT: case NON_LVALUE_EXPR:
3044 /* If we have conversions, we know that the alignment of the
3045 object must meet each of the alignments of the types. */
3046 align0 = expr_align (TREE_OPERAND (t, 0));
3047 align1 = TYPE_ALIGN (TREE_TYPE (t));
3048 return MAX (align0, align1);
3049
3050 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3051 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3052 case CLEANUP_POINT_EXPR:
3053 /* These don't change the alignment of an object. */
3054 return expr_align (TREE_OPERAND (t, 0));
3055
3056 case COND_EXPR:
3057 /* The best we can do is say that the alignment is the least aligned
3058 of the two arms. */
3059 align0 = expr_align (TREE_OPERAND (t, 1));
3060 align1 = expr_align (TREE_OPERAND (t, 2));
3061 return MIN (align0, align1);
3062
3063 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3064 meaningfully, it's always 1. */
3065 case LABEL_DECL: case CONST_DECL:
3066 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3067 case FUNCTION_DECL:
3068 gcc_assert (DECL_ALIGN (t) != 0);
3069 return DECL_ALIGN (t);
3070
3071 default:
3072 break;
3073 }
3074
3075 /* Otherwise take the alignment from that of the type. */
3076 return TYPE_ALIGN (TREE_TYPE (t));
3077 }
3078 \f
3079 /* Return, as a tree node, the number of elements for TYPE (which is an
3080 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3081
3082 tree
3083 array_type_nelts (const_tree type)
3084 {
3085 tree index_type, min, max;
3086
3087 /* If they did it with unspecified bounds, then we should have already
3088 given an error about it before we got here. */
3089 if (! TYPE_DOMAIN (type))
3090 return error_mark_node;
3091
3092 index_type = TYPE_DOMAIN (type);
3093 min = TYPE_MIN_VALUE (index_type);
3094 max = TYPE_MAX_VALUE (index_type);
3095
3096 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3097 if (!max)
3098 return error_mark_node;
3099
3100 return (integer_zerop (min)
3101 ? max
3102 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3103 }
3104 \f
3105 /* If arg is static -- a reference to an object in static storage -- then
3106 return the object. This is not the same as the C meaning of `static'.
3107 If arg isn't static, return NULL. */
3108
3109 tree
3110 staticp (tree arg)
3111 {
3112 switch (TREE_CODE (arg))
3113 {
3114 case FUNCTION_DECL:
3115 /* Nested functions are static, even though taking their address will
3116 involve a trampoline as we unnest the nested function and create
3117 the trampoline on the tree level. */
3118 return arg;
3119
3120 case VAR_DECL:
3121 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3122 && ! DECL_THREAD_LOCAL_P (arg)
3123 && ! DECL_DLLIMPORT_P (arg)
3124 ? arg : NULL);
3125
3126 case CONST_DECL:
3127 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3128 ? arg : NULL);
3129
3130 case CONSTRUCTOR:
3131 return TREE_STATIC (arg) ? arg : NULL;
3132
3133 case LABEL_DECL:
3134 case STRING_CST:
3135 return arg;
3136
3137 case COMPONENT_REF:
3138 /* If the thing being referenced is not a field, then it is
3139 something language specific. */
3140 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3141
3142 /* If we are referencing a bitfield, we can't evaluate an
3143 ADDR_EXPR at compile time and so it isn't a constant. */
3144 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3145 return NULL;
3146
3147 return staticp (TREE_OPERAND (arg, 0));
3148
3149 case BIT_FIELD_REF:
3150 return NULL;
3151
3152 case INDIRECT_REF:
3153 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3154
3155 case ARRAY_REF:
3156 case ARRAY_RANGE_REF:
3157 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3158 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3159 return staticp (TREE_OPERAND (arg, 0));
3160 else
3161 return NULL;
3162
3163 case COMPOUND_LITERAL_EXPR:
3164 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3165
3166 default:
3167 return NULL;
3168 }
3169 }
3170
3171 \f
3172
3173
3174 /* Return whether OP is a DECL whose address is function-invariant. */
3175
3176 bool
3177 decl_address_invariant_p (const_tree op)
3178 {
3179 /* The conditions below are slightly less strict than the one in
3180 staticp. */
3181
3182 switch (TREE_CODE (op))
3183 {
3184 case PARM_DECL:
3185 case RESULT_DECL:
3186 case LABEL_DECL:
3187 case FUNCTION_DECL:
3188 return true;
3189
3190 case VAR_DECL:
3191 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3192 || DECL_THREAD_LOCAL_P (op)
3193 || DECL_CONTEXT (op) == current_function_decl
3194 || decl_function_context (op) == current_function_decl)
3195 return true;
3196 break;
3197
3198 case CONST_DECL:
3199 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3200 || decl_function_context (op) == current_function_decl)
3201 return true;
3202 break;
3203
3204 default:
3205 break;
3206 }
3207
3208 return false;
3209 }
3210
3211 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3212
3213 bool
3214 decl_address_ip_invariant_p (const_tree op)
3215 {
3216 /* The conditions below are slightly less strict than the one in
3217 staticp. */
3218
3219 switch (TREE_CODE (op))
3220 {
3221 case LABEL_DECL:
3222 case FUNCTION_DECL:
3223 case STRING_CST:
3224 return true;
3225
3226 case VAR_DECL:
3227 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3228 && !DECL_DLLIMPORT_P (op))
3229 || DECL_THREAD_LOCAL_P (op))
3230 return true;
3231 break;
3232
3233 case CONST_DECL:
3234 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3235 return true;
3236 break;
3237
3238 default:
3239 break;
3240 }
3241
3242 return false;
3243 }
3244
3245
3246 /* Return true if T is function-invariant (internal function, does
3247 not handle arithmetic; that's handled in skip_simple_arithmetic and
3248 tree_invariant_p). */
3249
3250 static bool tree_invariant_p (tree t);
3251
3252 static bool
3253 tree_invariant_p_1 (tree t)
3254 {
3255 tree op;
3256
3257 if (TREE_CONSTANT (t)
3258 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3259 return true;
3260
3261 switch (TREE_CODE (t))
3262 {
3263 case SAVE_EXPR:
3264 return true;
3265
3266 case ADDR_EXPR:
3267 op = TREE_OPERAND (t, 0);
3268 while (handled_component_p (op))
3269 {
3270 switch (TREE_CODE (op))
3271 {
3272 case ARRAY_REF:
3273 case ARRAY_RANGE_REF:
3274 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3275 || TREE_OPERAND (op, 2) != NULL_TREE
3276 || TREE_OPERAND (op, 3) != NULL_TREE)
3277 return false;
3278 break;
3279
3280 case COMPONENT_REF:
3281 if (TREE_OPERAND (op, 2) != NULL_TREE)
3282 return false;
3283 break;
3284
3285 default:;
3286 }
3287 op = TREE_OPERAND (op, 0);
3288 }
3289
3290 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3291
3292 default:
3293 break;
3294 }
3295
3296 return false;
3297 }
3298
3299 /* Return true if T is function-invariant. */
3300
3301 static bool
3302 tree_invariant_p (tree t)
3303 {
3304 tree inner = skip_simple_arithmetic (t);
3305 return tree_invariant_p_1 (inner);
3306 }
3307
3308 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3309 Do this to any expression which may be used in more than one place,
3310 but must be evaluated only once.
3311
3312 Normally, expand_expr would reevaluate the expression each time.
3313 Calling save_expr produces something that is evaluated and recorded
3314 the first time expand_expr is called on it. Subsequent calls to
3315 expand_expr just reuse the recorded value.
3316
3317 The call to expand_expr that generates code that actually computes
3318 the value is the first call *at compile time*. Subsequent calls
3319 *at compile time* generate code to use the saved value.
3320 This produces correct result provided that *at run time* control
3321 always flows through the insns made by the first expand_expr
3322 before reaching the other places where the save_expr was evaluated.
3323 You, the caller of save_expr, must make sure this is so.
3324
3325 Constants, and certain read-only nodes, are returned with no
3326 SAVE_EXPR because that is safe. Expressions containing placeholders
3327 are not touched; see tree.def for an explanation of what these
3328 are used for. */
3329
3330 tree
3331 save_expr (tree expr)
3332 {
3333 tree t = fold (expr);
3334 tree inner;
3335
3336 /* If the tree evaluates to a constant, then we don't want to hide that
3337 fact (i.e. this allows further folding, and direct checks for constants).
3338 However, a read-only object that has side effects cannot be bypassed.
3339 Since it is no problem to reevaluate literals, we just return the
3340 literal node. */
3341 inner = skip_simple_arithmetic (t);
3342 if (TREE_CODE (inner) == ERROR_MARK)
3343 return inner;
3344
3345 if (tree_invariant_p_1 (inner))
3346 return t;
3347
3348 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3349 it means that the size or offset of some field of an object depends on
3350 the value within another field.
3351
3352 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3353 and some variable since it would then need to be both evaluated once and
3354 evaluated more than once. Front-ends must assure this case cannot
3355 happen by surrounding any such subexpressions in their own SAVE_EXPR
3356 and forcing evaluation at the proper time. */
3357 if (contains_placeholder_p (inner))
3358 return t;
3359
3360 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3361 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3362
3363 /* This expression might be placed ahead of a jump to ensure that the
3364 value was computed on both sides of the jump. So make sure it isn't
3365 eliminated as dead. */
3366 TREE_SIDE_EFFECTS (t) = 1;
3367 return t;
3368 }
3369
3370 /* Look inside EXPR into any simple arithmetic operations. Return the
3371 outermost non-arithmetic or non-invariant node. */
3372
3373 tree
3374 skip_simple_arithmetic (tree expr)
3375 {
3376 /* We don't care about whether this can be used as an lvalue in this
3377 context. */
3378 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3379 expr = TREE_OPERAND (expr, 0);
3380
3381 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3382 a constant, it will be more efficient to not make another SAVE_EXPR since
3383 it will allow better simplification and GCSE will be able to merge the
3384 computations if they actually occur. */
3385 while (true)
3386 {
3387 if (UNARY_CLASS_P (expr))
3388 expr = TREE_OPERAND (expr, 0);
3389 else if (BINARY_CLASS_P (expr))
3390 {
3391 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3392 expr = TREE_OPERAND (expr, 0);
3393 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3394 expr = TREE_OPERAND (expr, 1);
3395 else
3396 break;
3397 }
3398 else
3399 break;
3400 }
3401
3402 return expr;
3403 }
3404
3405 /* Look inside EXPR into simple arithmetic operations involving constants.
3406 Return the outermost non-arithmetic or non-constant node. */
3407
3408 tree
3409 skip_simple_constant_arithmetic (tree expr)
3410 {
3411 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3412 expr = TREE_OPERAND (expr, 0);
3413
3414 while (true)
3415 {
3416 if (UNARY_CLASS_P (expr))
3417 expr = TREE_OPERAND (expr, 0);
3418 else if (BINARY_CLASS_P (expr))
3419 {
3420 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3421 expr = TREE_OPERAND (expr, 0);
3422 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3423 expr = TREE_OPERAND (expr, 1);
3424 else
3425 break;
3426 }
3427 else
3428 break;
3429 }
3430
3431 return expr;
3432 }
3433
3434 /* Return which tree structure is used by T. */
3435
3436 enum tree_node_structure_enum
3437 tree_node_structure (const_tree t)
3438 {
3439 const enum tree_code code = TREE_CODE (t);
3440 return tree_node_structure_for_code (code);
3441 }
3442
3443 /* Set various status flags when building a CALL_EXPR object T. */
3444
3445 static void
3446 process_call_operands (tree t)
3447 {
3448 bool side_effects = TREE_SIDE_EFFECTS (t);
3449 bool read_only = false;
3450 int i = call_expr_flags (t);
3451
3452 /* Calls have side-effects, except those to const or pure functions. */
3453 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3454 side_effects = true;
3455 /* Propagate TREE_READONLY of arguments for const functions. */
3456 if (i & ECF_CONST)
3457 read_only = true;
3458
3459 if (!side_effects || read_only)
3460 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3461 {
3462 tree op = TREE_OPERAND (t, i);
3463 if (op && TREE_SIDE_EFFECTS (op))
3464 side_effects = true;
3465 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3466 read_only = false;
3467 }
3468
3469 TREE_SIDE_EFFECTS (t) = side_effects;
3470 TREE_READONLY (t) = read_only;
3471 }
3472 \f
3473 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3474 size or offset that depends on a field within a record. */
3475
3476 bool
3477 contains_placeholder_p (const_tree exp)
3478 {
3479 enum tree_code code;
3480
3481 if (!exp)
3482 return 0;
3483
3484 code = TREE_CODE (exp);
3485 if (code == PLACEHOLDER_EXPR)
3486 return 1;
3487
3488 switch (TREE_CODE_CLASS (code))
3489 {
3490 case tcc_reference:
3491 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3492 position computations since they will be converted into a
3493 WITH_RECORD_EXPR involving the reference, which will assume
3494 here will be valid. */
3495 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3496
3497 case tcc_exceptional:
3498 if (code == TREE_LIST)
3499 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3500 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3501 break;
3502
3503 case tcc_unary:
3504 case tcc_binary:
3505 case tcc_comparison:
3506 case tcc_expression:
3507 switch (code)
3508 {
3509 case COMPOUND_EXPR:
3510 /* Ignoring the first operand isn't quite right, but works best. */
3511 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3512
3513 case COND_EXPR:
3514 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3515 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3516 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3517
3518 case SAVE_EXPR:
3519 /* The save_expr function never wraps anything containing
3520 a PLACEHOLDER_EXPR. */
3521 return 0;
3522
3523 default:
3524 break;
3525 }
3526
3527 switch (TREE_CODE_LENGTH (code))
3528 {
3529 case 1:
3530 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3531 case 2:
3532 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3533 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3534 default:
3535 return 0;
3536 }
3537
3538 case tcc_vl_exp:
3539 switch (code)
3540 {
3541 case CALL_EXPR:
3542 {
3543 const_tree arg;
3544 const_call_expr_arg_iterator iter;
3545 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3546 if (CONTAINS_PLACEHOLDER_P (arg))
3547 return 1;
3548 return 0;
3549 }
3550 default:
3551 return 0;
3552 }
3553
3554 default:
3555 return 0;
3556 }
3557 return 0;
3558 }
3559
3560 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3561 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3562 field positions. */
3563
3564 static bool
3565 type_contains_placeholder_1 (const_tree type)
3566 {
3567 /* If the size contains a placeholder or the parent type (component type in
3568 the case of arrays) type involves a placeholder, this type does. */
3569 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3570 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3571 || (!POINTER_TYPE_P (type)
3572 && TREE_TYPE (type)
3573 && type_contains_placeholder_p (TREE_TYPE (type))))
3574 return true;
3575
3576 /* Now do type-specific checks. Note that the last part of the check above
3577 greatly limits what we have to do below. */
3578 switch (TREE_CODE (type))
3579 {
3580 case VOID_TYPE:
3581 case POINTER_BOUNDS_TYPE:
3582 case COMPLEX_TYPE:
3583 case ENUMERAL_TYPE:
3584 case BOOLEAN_TYPE:
3585 case POINTER_TYPE:
3586 case OFFSET_TYPE:
3587 case REFERENCE_TYPE:
3588 case METHOD_TYPE:
3589 case FUNCTION_TYPE:
3590 case VECTOR_TYPE:
3591 case NULLPTR_TYPE:
3592 return false;
3593
3594 case INTEGER_TYPE:
3595 case REAL_TYPE:
3596 case FIXED_POINT_TYPE:
3597 /* Here we just check the bounds. */
3598 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3599 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3600
3601 case ARRAY_TYPE:
3602 /* We have already checked the component type above, so just check the
3603 domain type. */
3604 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3605
3606 case RECORD_TYPE:
3607 case UNION_TYPE:
3608 case QUAL_UNION_TYPE:
3609 {
3610 tree field;
3611
3612 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3613 if (TREE_CODE (field) == FIELD_DECL
3614 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3615 || (TREE_CODE (type) == QUAL_UNION_TYPE
3616 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3617 || type_contains_placeholder_p (TREE_TYPE (field))))
3618 return true;
3619
3620 return false;
3621 }
3622
3623 default:
3624 gcc_unreachable ();
3625 }
3626 }
3627
3628 /* Wrapper around above function used to cache its result. */
3629
3630 bool
3631 type_contains_placeholder_p (tree type)
3632 {
3633 bool result;
3634
3635 /* If the contains_placeholder_bits field has been initialized,
3636 then we know the answer. */
3637 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3638 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3639
3640 /* Indicate that we've seen this type node, and the answer is false.
3641 This is what we want to return if we run into recursion via fields. */
3642 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3643
3644 /* Compute the real value. */
3645 result = type_contains_placeholder_1 (type);
3646
3647 /* Store the real value. */
3648 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3649
3650 return result;
3651 }
3652 \f
3653 /* Push tree EXP onto vector QUEUE if it is not already present. */
3654
3655 static void
3656 push_without_duplicates (tree exp, vec<tree> *queue)
3657 {
3658 unsigned int i;
3659 tree iter;
3660
3661 FOR_EACH_VEC_ELT (*queue, i, iter)
3662 if (simple_cst_equal (iter, exp) == 1)
3663 break;
3664
3665 if (!iter)
3666 queue->safe_push (exp);
3667 }
3668
3669 /* Given a tree EXP, find all occurrences of references to fields
3670 in a PLACEHOLDER_EXPR and place them in vector REFS without
3671 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3672 we assume here that EXP contains only arithmetic expressions
3673 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3674 argument list. */
3675
3676 void
3677 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3678 {
3679 enum tree_code code = TREE_CODE (exp);
3680 tree inner;
3681 int i;
3682
3683 /* We handle TREE_LIST and COMPONENT_REF separately. */
3684 if (code == TREE_LIST)
3685 {
3686 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3687 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3688 }
3689 else if (code == COMPONENT_REF)
3690 {
3691 for (inner = TREE_OPERAND (exp, 0);
3692 REFERENCE_CLASS_P (inner);
3693 inner = TREE_OPERAND (inner, 0))
3694 ;
3695
3696 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3697 push_without_duplicates (exp, refs);
3698 else
3699 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3700 }
3701 else
3702 switch (TREE_CODE_CLASS (code))
3703 {
3704 case tcc_constant:
3705 break;
3706
3707 case tcc_declaration:
3708 /* Variables allocated to static storage can stay. */
3709 if (!TREE_STATIC (exp))
3710 push_without_duplicates (exp, refs);
3711 break;
3712
3713 case tcc_expression:
3714 /* This is the pattern built in ada/make_aligning_type. */
3715 if (code == ADDR_EXPR
3716 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3717 {
3718 push_without_duplicates (exp, refs);
3719 break;
3720 }
3721
3722 /* Fall through... */
3723
3724 case tcc_exceptional:
3725 case tcc_unary:
3726 case tcc_binary:
3727 case tcc_comparison:
3728 case tcc_reference:
3729 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3730 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3731 break;
3732
3733 case tcc_vl_exp:
3734 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3735 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3736 break;
3737
3738 default:
3739 gcc_unreachable ();
3740 }
3741 }
3742
3743 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3744 return a tree with all occurrences of references to F in a
3745 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3746 CONST_DECLs. Note that we assume here that EXP contains only
3747 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3748 occurring only in their argument list. */
3749
3750 tree
3751 substitute_in_expr (tree exp, tree f, tree r)
3752 {
3753 enum tree_code code = TREE_CODE (exp);
3754 tree op0, op1, op2, op3;
3755 tree new_tree;
3756
3757 /* We handle TREE_LIST and COMPONENT_REF separately. */
3758 if (code == TREE_LIST)
3759 {
3760 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3761 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3762 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3763 return exp;
3764
3765 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3766 }
3767 else if (code == COMPONENT_REF)
3768 {
3769 tree inner;
3770
3771 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3772 and it is the right field, replace it with R. */
3773 for (inner = TREE_OPERAND (exp, 0);
3774 REFERENCE_CLASS_P (inner);
3775 inner = TREE_OPERAND (inner, 0))
3776 ;
3777
3778 /* The field. */
3779 op1 = TREE_OPERAND (exp, 1);
3780
3781 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3782 return r;
3783
3784 /* If this expression hasn't been completed let, leave it alone. */
3785 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3786 return exp;
3787
3788 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3789 if (op0 == TREE_OPERAND (exp, 0))
3790 return exp;
3791
3792 new_tree
3793 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3794 }
3795 else
3796 switch (TREE_CODE_CLASS (code))
3797 {
3798 case tcc_constant:
3799 return exp;
3800
3801 case tcc_declaration:
3802 if (exp == f)
3803 return r;
3804 else
3805 return exp;
3806
3807 case tcc_expression:
3808 if (exp == f)
3809 return r;
3810
3811 /* Fall through... */
3812
3813 case tcc_exceptional:
3814 case tcc_unary:
3815 case tcc_binary:
3816 case tcc_comparison:
3817 case tcc_reference:
3818 switch (TREE_CODE_LENGTH (code))
3819 {
3820 case 0:
3821 return exp;
3822
3823 case 1:
3824 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3825 if (op0 == TREE_OPERAND (exp, 0))
3826 return exp;
3827
3828 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3829 break;
3830
3831 case 2:
3832 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3833 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3834
3835 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3836 return exp;
3837
3838 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3839 break;
3840
3841 case 3:
3842 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3843 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3844 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3845
3846 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3847 && op2 == TREE_OPERAND (exp, 2))
3848 return exp;
3849
3850 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3851 break;
3852
3853 case 4:
3854 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3855 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3856 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3857 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3858
3859 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3860 && op2 == TREE_OPERAND (exp, 2)
3861 && op3 == TREE_OPERAND (exp, 3))
3862 return exp;
3863
3864 new_tree
3865 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3866 break;
3867
3868 default:
3869 gcc_unreachable ();
3870 }
3871 break;
3872
3873 case tcc_vl_exp:
3874 {
3875 int i;
3876
3877 new_tree = NULL_TREE;
3878
3879 /* If we are trying to replace F with a constant, inline back
3880 functions which do nothing else than computing a value from
3881 the arguments they are passed. This makes it possible to
3882 fold partially or entirely the replacement expression. */
3883 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3884 {
3885 tree t = maybe_inline_call_in_expr (exp);
3886 if (t)
3887 return SUBSTITUTE_IN_EXPR (t, f, r);
3888 }
3889
3890 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3891 {
3892 tree op = TREE_OPERAND (exp, i);
3893 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3894 if (new_op != op)
3895 {
3896 if (!new_tree)
3897 new_tree = copy_node (exp);
3898 TREE_OPERAND (new_tree, i) = new_op;
3899 }
3900 }
3901
3902 if (new_tree)
3903 {
3904 new_tree = fold (new_tree);
3905 if (TREE_CODE (new_tree) == CALL_EXPR)
3906 process_call_operands (new_tree);
3907 }
3908 else
3909 return exp;
3910 }
3911 break;
3912
3913 default:
3914 gcc_unreachable ();
3915 }
3916
3917 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3918
3919 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3920 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3921
3922 return new_tree;
3923 }
3924
3925 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3926 for it within OBJ, a tree that is an object or a chain of references. */
3927
3928 tree
3929 substitute_placeholder_in_expr (tree exp, tree obj)
3930 {
3931 enum tree_code code = TREE_CODE (exp);
3932 tree op0, op1, op2, op3;
3933 tree new_tree;
3934
3935 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3936 in the chain of OBJ. */
3937 if (code == PLACEHOLDER_EXPR)
3938 {
3939 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3940 tree elt;
3941
3942 for (elt = obj; elt != 0;
3943 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3944 || TREE_CODE (elt) == COND_EXPR)
3945 ? TREE_OPERAND (elt, 1)
3946 : (REFERENCE_CLASS_P (elt)
3947 || UNARY_CLASS_P (elt)
3948 || BINARY_CLASS_P (elt)
3949 || VL_EXP_CLASS_P (elt)
3950 || EXPRESSION_CLASS_P (elt))
3951 ? TREE_OPERAND (elt, 0) : 0))
3952 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3953 return elt;
3954
3955 for (elt = obj; elt != 0;
3956 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3957 || TREE_CODE (elt) == COND_EXPR)
3958 ? TREE_OPERAND (elt, 1)
3959 : (REFERENCE_CLASS_P (elt)
3960 || UNARY_CLASS_P (elt)
3961 || BINARY_CLASS_P (elt)
3962 || VL_EXP_CLASS_P (elt)
3963 || EXPRESSION_CLASS_P (elt))
3964 ? TREE_OPERAND (elt, 0) : 0))
3965 if (POINTER_TYPE_P (TREE_TYPE (elt))
3966 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3967 == need_type))
3968 return fold_build1 (INDIRECT_REF, need_type, elt);
3969
3970 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3971 survives until RTL generation, there will be an error. */
3972 return exp;
3973 }
3974
3975 /* TREE_LIST is special because we need to look at TREE_VALUE
3976 and TREE_CHAIN, not TREE_OPERANDS. */
3977 else if (code == TREE_LIST)
3978 {
3979 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3980 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3981 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3982 return exp;
3983
3984 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3985 }
3986 else
3987 switch (TREE_CODE_CLASS (code))
3988 {
3989 case tcc_constant:
3990 case tcc_declaration:
3991 return exp;
3992
3993 case tcc_exceptional:
3994 case tcc_unary:
3995 case tcc_binary:
3996 case tcc_comparison:
3997 case tcc_expression:
3998 case tcc_reference:
3999 case tcc_statement:
4000 switch (TREE_CODE_LENGTH (code))
4001 {
4002 case 0:
4003 return exp;
4004
4005 case 1:
4006 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4007 if (op0 == TREE_OPERAND (exp, 0))
4008 return exp;
4009
4010 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4011 break;
4012
4013 case 2:
4014 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4015 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4016
4017 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4018 return exp;
4019
4020 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4021 break;
4022
4023 case 3:
4024 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4025 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4026 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4027
4028 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4029 && op2 == TREE_OPERAND (exp, 2))
4030 return exp;
4031
4032 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4033 break;
4034
4035 case 4:
4036 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4037 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4038 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4039 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4040
4041 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4042 && op2 == TREE_OPERAND (exp, 2)
4043 && op3 == TREE_OPERAND (exp, 3))
4044 return exp;
4045
4046 new_tree
4047 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4048 break;
4049
4050 default:
4051 gcc_unreachable ();
4052 }
4053 break;
4054
4055 case tcc_vl_exp:
4056 {
4057 int i;
4058
4059 new_tree = NULL_TREE;
4060
4061 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4062 {
4063 tree op = TREE_OPERAND (exp, i);
4064 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4065 if (new_op != op)
4066 {
4067 if (!new_tree)
4068 new_tree = copy_node (exp);
4069 TREE_OPERAND (new_tree, i) = new_op;
4070 }
4071 }
4072
4073 if (new_tree)
4074 {
4075 new_tree = fold (new_tree);
4076 if (TREE_CODE (new_tree) == CALL_EXPR)
4077 process_call_operands (new_tree);
4078 }
4079 else
4080 return exp;
4081 }
4082 break;
4083
4084 default:
4085 gcc_unreachable ();
4086 }
4087
4088 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4089
4090 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4091 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4092
4093 return new_tree;
4094 }
4095 \f
4096
4097 /* Subroutine of stabilize_reference; this is called for subtrees of
4098 references. Any expression with side-effects must be put in a SAVE_EXPR
4099 to ensure that it is only evaluated once.
4100
4101 We don't put SAVE_EXPR nodes around everything, because assigning very
4102 simple expressions to temporaries causes us to miss good opportunities
4103 for optimizations. Among other things, the opportunity to fold in the
4104 addition of a constant into an addressing mode often gets lost, e.g.
4105 "y[i+1] += x;". In general, we take the approach that we should not make
4106 an assignment unless we are forced into it - i.e., that any non-side effect
4107 operator should be allowed, and that cse should take care of coalescing
4108 multiple utterances of the same expression should that prove fruitful. */
4109
4110 static tree
4111 stabilize_reference_1 (tree e)
4112 {
4113 tree result;
4114 enum tree_code code = TREE_CODE (e);
4115
4116 /* We cannot ignore const expressions because it might be a reference
4117 to a const array but whose index contains side-effects. But we can
4118 ignore things that are actual constant or that already have been
4119 handled by this function. */
4120
4121 if (tree_invariant_p (e))
4122 return e;
4123
4124 switch (TREE_CODE_CLASS (code))
4125 {
4126 case tcc_exceptional:
4127 case tcc_type:
4128 case tcc_declaration:
4129 case tcc_comparison:
4130 case tcc_statement:
4131 case tcc_expression:
4132 case tcc_reference:
4133 case tcc_vl_exp:
4134 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4135 so that it will only be evaluated once. */
4136 /* The reference (r) and comparison (<) classes could be handled as
4137 below, but it is generally faster to only evaluate them once. */
4138 if (TREE_SIDE_EFFECTS (e))
4139 return save_expr (e);
4140 return e;
4141
4142 case tcc_constant:
4143 /* Constants need no processing. In fact, we should never reach
4144 here. */
4145 return e;
4146
4147 case tcc_binary:
4148 /* Division is slow and tends to be compiled with jumps,
4149 especially the division by powers of 2 that is often
4150 found inside of an array reference. So do it just once. */
4151 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4152 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4153 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4154 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4155 return save_expr (e);
4156 /* Recursively stabilize each operand. */
4157 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4158 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4159 break;
4160
4161 case tcc_unary:
4162 /* Recursively stabilize each operand. */
4163 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4164 break;
4165
4166 default:
4167 gcc_unreachable ();
4168 }
4169
4170 TREE_TYPE (result) = TREE_TYPE (e);
4171 TREE_READONLY (result) = TREE_READONLY (e);
4172 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4173 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4174
4175 return result;
4176 }
4177
4178 /* Stabilize a reference so that we can use it any number of times
4179 without causing its operands to be evaluated more than once.
4180 Returns the stabilized reference. This works by means of save_expr,
4181 so see the caveats in the comments about save_expr.
4182
4183 Also allows conversion expressions whose operands are references.
4184 Any other kind of expression is returned unchanged. */
4185
4186 tree
4187 stabilize_reference (tree ref)
4188 {
4189 tree result;
4190 enum tree_code code = TREE_CODE (ref);
4191
4192 switch (code)
4193 {
4194 case VAR_DECL:
4195 case PARM_DECL:
4196 case RESULT_DECL:
4197 /* No action is needed in this case. */
4198 return ref;
4199
4200 CASE_CONVERT:
4201 case FLOAT_EXPR:
4202 case FIX_TRUNC_EXPR:
4203 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4204 break;
4205
4206 case INDIRECT_REF:
4207 result = build_nt (INDIRECT_REF,
4208 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4209 break;
4210
4211 case COMPONENT_REF:
4212 result = build_nt (COMPONENT_REF,
4213 stabilize_reference (TREE_OPERAND (ref, 0)),
4214 TREE_OPERAND (ref, 1), NULL_TREE);
4215 break;
4216
4217 case BIT_FIELD_REF:
4218 result = build_nt (BIT_FIELD_REF,
4219 stabilize_reference (TREE_OPERAND (ref, 0)),
4220 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4221 break;
4222
4223 case ARRAY_REF:
4224 result = build_nt (ARRAY_REF,
4225 stabilize_reference (TREE_OPERAND (ref, 0)),
4226 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4227 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4228 break;
4229
4230 case ARRAY_RANGE_REF:
4231 result = build_nt (ARRAY_RANGE_REF,
4232 stabilize_reference (TREE_OPERAND (ref, 0)),
4233 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4234 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4235 break;
4236
4237 case COMPOUND_EXPR:
4238 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4239 it wouldn't be ignored. This matters when dealing with
4240 volatiles. */
4241 return stabilize_reference_1 (ref);
4242
4243 /* If arg isn't a kind of lvalue we recognize, make no change.
4244 Caller should recognize the error for an invalid lvalue. */
4245 default:
4246 return ref;
4247
4248 case ERROR_MARK:
4249 return error_mark_node;
4250 }
4251
4252 TREE_TYPE (result) = TREE_TYPE (ref);
4253 TREE_READONLY (result) = TREE_READONLY (ref);
4254 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4255 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4256
4257 return result;
4258 }
4259 \f
4260 /* Low-level constructors for expressions. */
4261
4262 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4263 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4264
4265 void
4266 recompute_tree_invariant_for_addr_expr (tree t)
4267 {
4268 tree node;
4269 bool tc = true, se = false;
4270
4271 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4272
4273 /* We started out assuming this address is both invariant and constant, but
4274 does not have side effects. Now go down any handled components and see if
4275 any of them involve offsets that are either non-constant or non-invariant.
4276 Also check for side-effects.
4277
4278 ??? Note that this code makes no attempt to deal with the case where
4279 taking the address of something causes a copy due to misalignment. */
4280
4281 #define UPDATE_FLAGS(NODE) \
4282 do { tree _node = (NODE); \
4283 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4284 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4285
4286 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4287 node = TREE_OPERAND (node, 0))
4288 {
4289 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4290 array reference (probably made temporarily by the G++ front end),
4291 so ignore all the operands. */
4292 if ((TREE_CODE (node) == ARRAY_REF
4293 || TREE_CODE (node) == ARRAY_RANGE_REF)
4294 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4295 {
4296 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4297 if (TREE_OPERAND (node, 2))
4298 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4299 if (TREE_OPERAND (node, 3))
4300 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4301 }
4302 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4303 FIELD_DECL, apparently. The G++ front end can put something else
4304 there, at least temporarily. */
4305 else if (TREE_CODE (node) == COMPONENT_REF
4306 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4307 {
4308 if (TREE_OPERAND (node, 2))
4309 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4310 }
4311 }
4312
4313 node = lang_hooks.expr_to_decl (node, &tc, &se);
4314
4315 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4316 the address, since &(*a)->b is a form of addition. If it's a constant, the
4317 address is constant too. If it's a decl, its address is constant if the
4318 decl is static. Everything else is not constant and, furthermore,
4319 taking the address of a volatile variable is not volatile. */
4320 if (TREE_CODE (node) == INDIRECT_REF
4321 || TREE_CODE (node) == MEM_REF)
4322 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4323 else if (CONSTANT_CLASS_P (node))
4324 ;
4325 else if (DECL_P (node))
4326 tc &= (staticp (node) != NULL_TREE);
4327 else
4328 {
4329 tc = false;
4330 se |= TREE_SIDE_EFFECTS (node);
4331 }
4332
4333
4334 TREE_CONSTANT (t) = tc;
4335 TREE_SIDE_EFFECTS (t) = se;
4336 #undef UPDATE_FLAGS
4337 }
4338
4339 /* Build an expression of code CODE, data type TYPE, and operands as
4340 specified. Expressions and reference nodes can be created this way.
4341 Constants, decls, types and misc nodes cannot be.
4342
4343 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4344 enough for all extant tree codes. */
4345
4346 tree
4347 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4348 {
4349 tree t;
4350
4351 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4352
4353 t = make_node_stat (code PASS_MEM_STAT);
4354 TREE_TYPE (t) = tt;
4355
4356 return t;
4357 }
4358
4359 tree
4360 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4361 {
4362 int length = sizeof (struct tree_exp);
4363 tree t;
4364
4365 record_node_allocation_statistics (code, length);
4366
4367 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4368
4369 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4370
4371 memset (t, 0, sizeof (struct tree_common));
4372
4373 TREE_SET_CODE (t, code);
4374
4375 TREE_TYPE (t) = type;
4376 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4377 TREE_OPERAND (t, 0) = node;
4378 if (node && !TYPE_P (node))
4379 {
4380 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4381 TREE_READONLY (t) = TREE_READONLY (node);
4382 }
4383
4384 if (TREE_CODE_CLASS (code) == tcc_statement)
4385 TREE_SIDE_EFFECTS (t) = 1;
4386 else switch (code)
4387 {
4388 case VA_ARG_EXPR:
4389 /* All of these have side-effects, no matter what their
4390 operands are. */
4391 TREE_SIDE_EFFECTS (t) = 1;
4392 TREE_READONLY (t) = 0;
4393 break;
4394
4395 case INDIRECT_REF:
4396 /* Whether a dereference is readonly has nothing to do with whether
4397 its operand is readonly. */
4398 TREE_READONLY (t) = 0;
4399 break;
4400
4401 case ADDR_EXPR:
4402 if (node)
4403 recompute_tree_invariant_for_addr_expr (t);
4404 break;
4405
4406 default:
4407 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4408 && node && !TYPE_P (node)
4409 && TREE_CONSTANT (node))
4410 TREE_CONSTANT (t) = 1;
4411 if (TREE_CODE_CLASS (code) == tcc_reference
4412 && node && TREE_THIS_VOLATILE (node))
4413 TREE_THIS_VOLATILE (t) = 1;
4414 break;
4415 }
4416
4417 return t;
4418 }
4419
4420 #define PROCESS_ARG(N) \
4421 do { \
4422 TREE_OPERAND (t, N) = arg##N; \
4423 if (arg##N &&!TYPE_P (arg##N)) \
4424 { \
4425 if (TREE_SIDE_EFFECTS (arg##N)) \
4426 side_effects = 1; \
4427 if (!TREE_READONLY (arg##N) \
4428 && !CONSTANT_CLASS_P (arg##N)) \
4429 (void) (read_only = 0); \
4430 if (!TREE_CONSTANT (arg##N)) \
4431 (void) (constant = 0); \
4432 } \
4433 } while (0)
4434
4435 tree
4436 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4437 {
4438 bool constant, read_only, side_effects;
4439 tree t;
4440
4441 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4442
4443 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4444 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4445 /* When sizetype precision doesn't match that of pointers
4446 we need to be able to build explicit extensions or truncations
4447 of the offset argument. */
4448 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4449 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4450 && TREE_CODE (arg1) == INTEGER_CST);
4451
4452 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4453 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4454 && ptrofftype_p (TREE_TYPE (arg1)));
4455
4456 t = make_node_stat (code PASS_MEM_STAT);
4457 TREE_TYPE (t) = tt;
4458
4459 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4460 result based on those same flags for the arguments. But if the
4461 arguments aren't really even `tree' expressions, we shouldn't be trying
4462 to do this. */
4463
4464 /* Expressions without side effects may be constant if their
4465 arguments are as well. */
4466 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4467 || TREE_CODE_CLASS (code) == tcc_binary);
4468 read_only = 1;
4469 side_effects = TREE_SIDE_EFFECTS (t);
4470
4471 PROCESS_ARG (0);
4472 PROCESS_ARG (1);
4473
4474 TREE_SIDE_EFFECTS (t) = side_effects;
4475 if (code == MEM_REF)
4476 {
4477 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4478 {
4479 tree o = TREE_OPERAND (arg0, 0);
4480 TREE_READONLY (t) = TREE_READONLY (o);
4481 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4482 }
4483 }
4484 else
4485 {
4486 TREE_READONLY (t) = read_only;
4487 TREE_CONSTANT (t) = constant;
4488 TREE_THIS_VOLATILE (t)
4489 = (TREE_CODE_CLASS (code) == tcc_reference
4490 && arg0 && TREE_THIS_VOLATILE (arg0));
4491 }
4492
4493 return t;
4494 }
4495
4496
4497 tree
4498 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4499 tree arg2 MEM_STAT_DECL)
4500 {
4501 bool constant, read_only, side_effects;
4502 tree t;
4503
4504 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4505 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4506
4507 t = make_node_stat (code PASS_MEM_STAT);
4508 TREE_TYPE (t) = tt;
4509
4510 read_only = 1;
4511
4512 /* As a special exception, if COND_EXPR has NULL branches, we
4513 assume that it is a gimple statement and always consider
4514 it to have side effects. */
4515 if (code == COND_EXPR
4516 && tt == void_type_node
4517 && arg1 == NULL_TREE
4518 && arg2 == NULL_TREE)
4519 side_effects = true;
4520 else
4521 side_effects = TREE_SIDE_EFFECTS (t);
4522
4523 PROCESS_ARG (0);
4524 PROCESS_ARG (1);
4525 PROCESS_ARG (2);
4526
4527 if (code == COND_EXPR)
4528 TREE_READONLY (t) = read_only;
4529
4530 TREE_SIDE_EFFECTS (t) = side_effects;
4531 TREE_THIS_VOLATILE (t)
4532 = (TREE_CODE_CLASS (code) == tcc_reference
4533 && arg0 && TREE_THIS_VOLATILE (arg0));
4534
4535 return t;
4536 }
4537
4538 tree
4539 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4540 tree arg2, tree arg3 MEM_STAT_DECL)
4541 {
4542 bool constant, read_only, side_effects;
4543 tree t;
4544
4545 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4546
4547 t = make_node_stat (code PASS_MEM_STAT);
4548 TREE_TYPE (t) = tt;
4549
4550 side_effects = TREE_SIDE_EFFECTS (t);
4551
4552 PROCESS_ARG (0);
4553 PROCESS_ARG (1);
4554 PROCESS_ARG (2);
4555 PROCESS_ARG (3);
4556
4557 TREE_SIDE_EFFECTS (t) = side_effects;
4558 TREE_THIS_VOLATILE (t)
4559 = (TREE_CODE_CLASS (code) == tcc_reference
4560 && arg0 && TREE_THIS_VOLATILE (arg0));
4561
4562 return t;
4563 }
4564
4565 tree
4566 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4567 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4568 {
4569 bool constant, read_only, side_effects;
4570 tree t;
4571
4572 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4573
4574 t = make_node_stat (code PASS_MEM_STAT);
4575 TREE_TYPE (t) = tt;
4576
4577 side_effects = TREE_SIDE_EFFECTS (t);
4578
4579 PROCESS_ARG (0);
4580 PROCESS_ARG (1);
4581 PROCESS_ARG (2);
4582 PROCESS_ARG (3);
4583 PROCESS_ARG (4);
4584
4585 TREE_SIDE_EFFECTS (t) = side_effects;
4586 if (code == TARGET_MEM_REF)
4587 {
4588 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4589 {
4590 tree o = TREE_OPERAND (arg0, 0);
4591 TREE_READONLY (t) = TREE_READONLY (o);
4592 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4593 }
4594 }
4595 else
4596 TREE_THIS_VOLATILE (t)
4597 = (TREE_CODE_CLASS (code) == tcc_reference
4598 && arg0 && TREE_THIS_VOLATILE (arg0));
4599
4600 return t;
4601 }
4602
4603 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4604 on the pointer PTR. */
4605
4606 tree
4607 build_simple_mem_ref_loc (location_t loc, tree ptr)
4608 {
4609 HOST_WIDE_INT offset = 0;
4610 tree ptype = TREE_TYPE (ptr);
4611 tree tem;
4612 /* For convenience allow addresses that collapse to a simple base
4613 and offset. */
4614 if (TREE_CODE (ptr) == ADDR_EXPR
4615 && (handled_component_p (TREE_OPERAND (ptr, 0))
4616 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4617 {
4618 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4619 gcc_assert (ptr);
4620 ptr = build_fold_addr_expr (ptr);
4621 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4622 }
4623 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4624 ptr, build_int_cst (ptype, offset));
4625 SET_EXPR_LOCATION (tem, loc);
4626 return tem;
4627 }
4628
4629 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4630
4631 offset_int
4632 mem_ref_offset (const_tree t)
4633 {
4634 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4635 }
4636
4637 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4638 offsetted by OFFSET units. */
4639
4640 tree
4641 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4642 {
4643 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4644 build_fold_addr_expr (base),
4645 build_int_cst (ptr_type_node, offset));
4646 tree addr = build1 (ADDR_EXPR, type, ref);
4647 recompute_tree_invariant_for_addr_expr (addr);
4648 return addr;
4649 }
4650
4651 /* Similar except don't specify the TREE_TYPE
4652 and leave the TREE_SIDE_EFFECTS as 0.
4653 It is permissible for arguments to be null,
4654 or even garbage if their values do not matter. */
4655
4656 tree
4657 build_nt (enum tree_code code, ...)
4658 {
4659 tree t;
4660 int length;
4661 int i;
4662 va_list p;
4663
4664 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4665
4666 va_start (p, code);
4667
4668 t = make_node (code);
4669 length = TREE_CODE_LENGTH (code);
4670
4671 for (i = 0; i < length; i++)
4672 TREE_OPERAND (t, i) = va_arg (p, tree);
4673
4674 va_end (p);
4675 return t;
4676 }
4677
4678 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4679 tree vec. */
4680
4681 tree
4682 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4683 {
4684 tree ret, t;
4685 unsigned int ix;
4686
4687 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4688 CALL_EXPR_FN (ret) = fn;
4689 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4690 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4691 CALL_EXPR_ARG (ret, ix) = t;
4692 return ret;
4693 }
4694 \f
4695 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4696 We do NOT enter this node in any sort of symbol table.
4697
4698 LOC is the location of the decl.
4699
4700 layout_decl is used to set up the decl's storage layout.
4701 Other slots are initialized to 0 or null pointers. */
4702
4703 tree
4704 build_decl_stat (location_t loc, enum tree_code code, tree name,
4705 tree type MEM_STAT_DECL)
4706 {
4707 tree t;
4708
4709 t = make_node_stat (code PASS_MEM_STAT);
4710 DECL_SOURCE_LOCATION (t) = loc;
4711
4712 /* if (type == error_mark_node)
4713 type = integer_type_node; */
4714 /* That is not done, deliberately, so that having error_mark_node
4715 as the type can suppress useless errors in the use of this variable. */
4716
4717 DECL_NAME (t) = name;
4718 TREE_TYPE (t) = type;
4719
4720 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4721 layout_decl (t, 0);
4722
4723 return t;
4724 }
4725
4726 /* Builds and returns function declaration with NAME and TYPE. */
4727
4728 tree
4729 build_fn_decl (const char *name, tree type)
4730 {
4731 tree id = get_identifier (name);
4732 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4733
4734 DECL_EXTERNAL (decl) = 1;
4735 TREE_PUBLIC (decl) = 1;
4736 DECL_ARTIFICIAL (decl) = 1;
4737 TREE_NOTHROW (decl) = 1;
4738
4739 return decl;
4740 }
4741
4742 vec<tree, va_gc> *all_translation_units;
4743
4744 /* Builds a new translation-unit decl with name NAME, queues it in the
4745 global list of translation-unit decls and returns it. */
4746
4747 tree
4748 build_translation_unit_decl (tree name)
4749 {
4750 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4751 name, NULL_TREE);
4752 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4753 vec_safe_push (all_translation_units, tu);
4754 return tu;
4755 }
4756
4757 \f
4758 /* BLOCK nodes are used to represent the structure of binding contours
4759 and declarations, once those contours have been exited and their contents
4760 compiled. This information is used for outputting debugging info. */
4761
4762 tree
4763 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4764 {
4765 tree block = make_node (BLOCK);
4766
4767 BLOCK_VARS (block) = vars;
4768 BLOCK_SUBBLOCKS (block) = subblocks;
4769 BLOCK_SUPERCONTEXT (block) = supercontext;
4770 BLOCK_CHAIN (block) = chain;
4771 return block;
4772 }
4773
4774 \f
4775 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4776
4777 LOC is the location to use in tree T. */
4778
4779 void
4780 protected_set_expr_location (tree t, location_t loc)
4781 {
4782 if (CAN_HAVE_LOCATION_P (t))
4783 SET_EXPR_LOCATION (t, loc);
4784 }
4785 \f
4786 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4787 is ATTRIBUTE. */
4788
4789 tree
4790 build_decl_attribute_variant (tree ddecl, tree attribute)
4791 {
4792 DECL_ATTRIBUTES (ddecl) = attribute;
4793 return ddecl;
4794 }
4795
4796 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4797 is ATTRIBUTE and its qualifiers are QUALS.
4798
4799 Record such modified types already made so we don't make duplicates. */
4800
4801 tree
4802 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4803 {
4804 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4805 {
4806 inchash::hash hstate;
4807 tree ntype;
4808 int i;
4809 tree t;
4810 enum tree_code code = TREE_CODE (ttype);
4811
4812 /* Building a distinct copy of a tagged type is inappropriate; it
4813 causes breakage in code that expects there to be a one-to-one
4814 relationship between a struct and its fields.
4815 build_duplicate_type is another solution (as used in
4816 handle_transparent_union_attribute), but that doesn't play well
4817 with the stronger C++ type identity model. */
4818 if (TREE_CODE (ttype) == RECORD_TYPE
4819 || TREE_CODE (ttype) == UNION_TYPE
4820 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4821 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4822 {
4823 warning (OPT_Wattributes,
4824 "ignoring attributes applied to %qT after definition",
4825 TYPE_MAIN_VARIANT (ttype));
4826 return build_qualified_type (ttype, quals);
4827 }
4828
4829 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4830 ntype = build_distinct_type_copy (ttype);
4831
4832 TYPE_ATTRIBUTES (ntype) = attribute;
4833
4834 hstate.add_int (code);
4835 if (TREE_TYPE (ntype))
4836 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4837 attribute_hash_list (attribute, hstate);
4838
4839 switch (TREE_CODE (ntype))
4840 {
4841 case FUNCTION_TYPE:
4842 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4843 break;
4844 case ARRAY_TYPE:
4845 if (TYPE_DOMAIN (ntype))
4846 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4847 break;
4848 case INTEGER_TYPE:
4849 t = TYPE_MAX_VALUE (ntype);
4850 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4851 hstate.add_object (TREE_INT_CST_ELT (t, i));
4852 break;
4853 case REAL_TYPE:
4854 case FIXED_POINT_TYPE:
4855 {
4856 unsigned int precision = TYPE_PRECISION (ntype);
4857 hstate.add_object (precision);
4858 }
4859 break;
4860 default:
4861 break;
4862 }
4863
4864 ntype = type_hash_canon (hstate.end(), ntype);
4865
4866 /* If the target-dependent attributes make NTYPE different from
4867 its canonical type, we will need to use structural equality
4868 checks for this type. */
4869 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4870 || !comp_type_attributes (ntype, ttype))
4871 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4872 else if (TYPE_CANONICAL (ntype) == ntype)
4873 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4874
4875 ttype = build_qualified_type (ntype, quals);
4876 }
4877 else if (TYPE_QUALS (ttype) != quals)
4878 ttype = build_qualified_type (ttype, quals);
4879
4880 return ttype;
4881 }
4882
4883 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4884 the same. */
4885
4886 static bool
4887 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4888 {
4889 tree cl1, cl2;
4890 for (cl1 = clauses1, cl2 = clauses2;
4891 cl1 && cl2;
4892 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4893 {
4894 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4895 return false;
4896 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4897 {
4898 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4899 OMP_CLAUSE_DECL (cl2)) != 1)
4900 return false;
4901 }
4902 switch (OMP_CLAUSE_CODE (cl1))
4903 {
4904 case OMP_CLAUSE_ALIGNED:
4905 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4906 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4907 return false;
4908 break;
4909 case OMP_CLAUSE_LINEAR:
4910 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4911 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4912 return false;
4913 break;
4914 case OMP_CLAUSE_SIMDLEN:
4915 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4916 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4917 return false;
4918 default:
4919 break;
4920 }
4921 }
4922 return true;
4923 }
4924
4925 /* Compare two constructor-element-type constants. Return 1 if the lists
4926 are known to be equal; otherwise return 0. */
4927
4928 static bool
4929 simple_cst_list_equal (const_tree l1, const_tree l2)
4930 {
4931 while (l1 != NULL_TREE && l2 != NULL_TREE)
4932 {
4933 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4934 return false;
4935
4936 l1 = TREE_CHAIN (l1);
4937 l2 = TREE_CHAIN (l2);
4938 }
4939
4940 return l1 == l2;
4941 }
4942
4943 /* Compare two identifier nodes representing attributes. Either one may
4944 be in wrapped __ATTR__ form. Return true if they are the same, false
4945 otherwise. */
4946
4947 static bool
4948 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4949 {
4950 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4951 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4952 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4953
4954 /* Identifiers can be compared directly for equality. */
4955 if (attr1 == attr2)
4956 return true;
4957
4958 /* If they are not equal, they may still be one in the form
4959 'text' while the other one is in the form '__text__'. TODO:
4960 If we were storing attributes in normalized 'text' form, then
4961 this could all go away and we could take full advantage of
4962 the fact that we're comparing identifiers. :-) */
4963 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4964 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4965
4966 if (attr2_len == attr1_len + 4)
4967 {
4968 const char *p = IDENTIFIER_POINTER (attr2);
4969 const char *q = IDENTIFIER_POINTER (attr1);
4970 if (p[0] == '_' && p[1] == '_'
4971 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4972 && strncmp (q, p + 2, attr1_len) == 0)
4973 return true;;
4974 }
4975 else if (attr2_len + 4 == attr1_len)
4976 {
4977 const char *p = IDENTIFIER_POINTER (attr2);
4978 const char *q = IDENTIFIER_POINTER (attr1);
4979 if (q[0] == '_' && q[1] == '_'
4980 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4981 && strncmp (q + 2, p, attr2_len) == 0)
4982 return true;
4983 }
4984
4985 return false;
4986 }
4987
4988 /* Compare two attributes for their value identity. Return true if the
4989 attribute values are known to be equal; otherwise return false. */
4990
4991 bool
4992 attribute_value_equal (const_tree attr1, const_tree attr2)
4993 {
4994 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4995 return true;
4996
4997 if (TREE_VALUE (attr1) != NULL_TREE
4998 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4999 && TREE_VALUE (attr2) != NULL_TREE
5000 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
5001 {
5002 /* Handle attribute format. */
5003 if (is_attribute_p ("format", TREE_PURPOSE (attr1)))
5004 {
5005 attr1 = TREE_VALUE (attr1);
5006 attr2 = TREE_VALUE (attr2);
5007 /* Compare the archetypes (printf/scanf/strftime/...). */
5008 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
5009 TREE_VALUE (attr2)))
5010 return false;
5011 /* Archetypes are the same. Compare the rest. */
5012 return (simple_cst_list_equal (TREE_CHAIN (attr1),
5013 TREE_CHAIN (attr2)) == 1);
5014 }
5015 return (simple_cst_list_equal (TREE_VALUE (attr1),
5016 TREE_VALUE (attr2)) == 1);
5017 }
5018
5019 if ((flag_openmp || flag_openmp_simd)
5020 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
5021 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
5022 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
5023 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
5024 TREE_VALUE (attr2));
5025
5026 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
5027 }
5028
5029 /* Return 0 if the attributes for two types are incompatible, 1 if they
5030 are compatible, and 2 if they are nearly compatible (which causes a
5031 warning to be generated). */
5032 int
5033 comp_type_attributes (const_tree type1, const_tree type2)
5034 {
5035 const_tree a1 = TYPE_ATTRIBUTES (type1);
5036 const_tree a2 = TYPE_ATTRIBUTES (type2);
5037 const_tree a;
5038
5039 if (a1 == a2)
5040 return 1;
5041 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
5042 {
5043 const struct attribute_spec *as;
5044 const_tree attr;
5045
5046 as = lookup_attribute_spec (get_attribute_name (a));
5047 if (!as || as->affects_type_identity == false)
5048 continue;
5049
5050 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
5051 if (!attr || !attribute_value_equal (a, attr))
5052 break;
5053 }
5054 if (!a)
5055 {
5056 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
5057 {
5058 const struct attribute_spec *as;
5059
5060 as = lookup_attribute_spec (get_attribute_name (a));
5061 if (!as || as->affects_type_identity == false)
5062 continue;
5063
5064 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5065 break;
5066 /* We don't need to compare trees again, as we did this
5067 already in first loop. */
5068 }
5069 /* All types - affecting identity - are equal, so
5070 there is no need to call target hook for comparison. */
5071 if (!a)
5072 return 1;
5073 }
5074 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
5075 return 0;
5076 /* As some type combinations - like default calling-convention - might
5077 be compatible, we have to call the target hook to get the final result. */
5078 return targetm.comp_type_attributes (type1, type2);
5079 }
5080
5081 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5082 is ATTRIBUTE.
5083
5084 Record such modified types already made so we don't make duplicates. */
5085
5086 tree
5087 build_type_attribute_variant (tree ttype, tree attribute)
5088 {
5089 return build_type_attribute_qual_variant (ttype, attribute,
5090 TYPE_QUALS (ttype));
5091 }
5092
5093
5094 /* Reset the expression *EXPR_P, a size or position.
5095
5096 ??? We could reset all non-constant sizes or positions. But it's cheap
5097 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5098
5099 We need to reset self-referential sizes or positions because they cannot
5100 be gimplified and thus can contain a CALL_EXPR after the gimplification
5101 is finished, which will run afoul of LTO streaming. And they need to be
5102 reset to something essentially dummy but not constant, so as to preserve
5103 the properties of the object they are attached to. */
5104
5105 static inline void
5106 free_lang_data_in_one_sizepos (tree *expr_p)
5107 {
5108 tree expr = *expr_p;
5109 if (CONTAINS_PLACEHOLDER_P (expr))
5110 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5111 }
5112
5113
5114 /* Reset all the fields in a binfo node BINFO. We only keep
5115 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5116
5117 static void
5118 free_lang_data_in_binfo (tree binfo)
5119 {
5120 unsigned i;
5121 tree t;
5122
5123 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5124
5125 BINFO_VIRTUALS (binfo) = NULL_TREE;
5126 BINFO_BASE_ACCESSES (binfo) = NULL;
5127 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5128 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5129
5130 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5131 free_lang_data_in_binfo (t);
5132 }
5133
5134
5135 /* Reset all language specific information still present in TYPE. */
5136
5137 static void
5138 free_lang_data_in_type (tree type)
5139 {
5140 gcc_assert (TYPE_P (type));
5141
5142 /* Give the FE a chance to remove its own data first. */
5143 lang_hooks.free_lang_data (type);
5144
5145 TREE_LANG_FLAG_0 (type) = 0;
5146 TREE_LANG_FLAG_1 (type) = 0;
5147 TREE_LANG_FLAG_2 (type) = 0;
5148 TREE_LANG_FLAG_3 (type) = 0;
5149 TREE_LANG_FLAG_4 (type) = 0;
5150 TREE_LANG_FLAG_5 (type) = 0;
5151 TREE_LANG_FLAG_6 (type) = 0;
5152
5153 if (TREE_CODE (type) == FUNCTION_TYPE)
5154 {
5155 /* Remove the const and volatile qualifiers from arguments. The
5156 C++ front end removes them, but the C front end does not,
5157 leading to false ODR violation errors when merging two
5158 instances of the same function signature compiled by
5159 different front ends. */
5160 tree p;
5161
5162 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5163 {
5164 tree arg_type = TREE_VALUE (p);
5165
5166 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5167 {
5168 int quals = TYPE_QUALS (arg_type)
5169 & ~TYPE_QUAL_CONST
5170 & ~TYPE_QUAL_VOLATILE;
5171 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5172 free_lang_data_in_type (TREE_VALUE (p));
5173 }
5174 /* C++ FE uses TREE_PURPOSE to store initial values. */
5175 TREE_PURPOSE (p) = NULL;
5176 }
5177 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5178 TYPE_MINVAL (type) = NULL;
5179 }
5180 if (TREE_CODE (type) == METHOD_TYPE)
5181 {
5182 tree p;
5183
5184 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5185 {
5186 /* C++ FE uses TREE_PURPOSE to store initial values. */
5187 TREE_PURPOSE (p) = NULL;
5188 }
5189 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5190 TYPE_MINVAL (type) = NULL;
5191 }
5192
5193 /* Remove members that are not actually FIELD_DECLs from the field
5194 list of an aggregate. These occur in C++. */
5195 if (RECORD_OR_UNION_TYPE_P (type))
5196 {
5197 tree prev, member;
5198
5199 /* Note that TYPE_FIELDS can be shared across distinct
5200 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5201 to be removed, we cannot set its TREE_CHAIN to NULL.
5202 Otherwise, we would not be able to find all the other fields
5203 in the other instances of this TREE_TYPE.
5204
5205 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5206 prev = NULL_TREE;
5207 member = TYPE_FIELDS (type);
5208 while (member)
5209 {
5210 if (TREE_CODE (member) == FIELD_DECL
5211 || TREE_CODE (member) == TYPE_DECL)
5212 {
5213 if (prev)
5214 TREE_CHAIN (prev) = member;
5215 else
5216 TYPE_FIELDS (type) = member;
5217 prev = member;
5218 }
5219
5220 member = TREE_CHAIN (member);
5221 }
5222
5223 if (prev)
5224 TREE_CHAIN (prev) = NULL_TREE;
5225 else
5226 TYPE_FIELDS (type) = NULL_TREE;
5227
5228 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5229 and danagle the pointer from time to time. */
5230 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5231 TYPE_VFIELD (type) = NULL_TREE;
5232
5233 /* Remove TYPE_METHODS list. While it would be nice to keep it
5234 to enable ODR warnings about different method lists, doing so
5235 seems to impractically increase size of LTO data streamed.
5236 Keep the infrmation if TYPE_METHODS was non-NULL. This is used
5237 by function.c and pretty printers. */
5238 if (TYPE_METHODS (type))
5239 TYPE_METHODS (type) = error_mark_node;
5240 if (TYPE_BINFO (type))
5241 {
5242 free_lang_data_in_binfo (TYPE_BINFO (type));
5243 /* We need to preserve link to bases and virtual table for all
5244 polymorphic types to make devirtualization machinery working.
5245 Debug output cares only about bases, but output also
5246 virtual table pointers so merging of -fdevirtualize and
5247 -fno-devirtualize units is easier. */
5248 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5249 || !flag_devirtualize)
5250 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5251 && !BINFO_VTABLE (TYPE_BINFO (type)))
5252 || debug_info_level != DINFO_LEVEL_NONE))
5253 TYPE_BINFO (type) = NULL;
5254 }
5255 }
5256 else
5257 {
5258 /* For non-aggregate types, clear out the language slot (which
5259 overloads TYPE_BINFO). */
5260 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5261
5262 if (INTEGRAL_TYPE_P (type)
5263 || SCALAR_FLOAT_TYPE_P (type)
5264 || FIXED_POINT_TYPE_P (type))
5265 {
5266 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5267 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5268 }
5269 }
5270
5271 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5272 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5273
5274 if (TYPE_CONTEXT (type)
5275 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5276 {
5277 tree ctx = TYPE_CONTEXT (type);
5278 do
5279 {
5280 ctx = BLOCK_SUPERCONTEXT (ctx);
5281 }
5282 while (ctx && TREE_CODE (ctx) == BLOCK);
5283 TYPE_CONTEXT (type) = ctx;
5284 }
5285 }
5286
5287
5288 /* Return true if DECL may need an assembler name to be set. */
5289
5290 static inline bool
5291 need_assembler_name_p (tree decl)
5292 {
5293 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5294 Rule merging. This makes type_odr_p to return true on those types during
5295 LTO and by comparing the mangled name, we can say what types are intended
5296 to be equivalent across compilation unit.
5297
5298 We do not store names of type_in_anonymous_namespace_p.
5299
5300 Record, union and enumeration type have linkage that allows use
5301 to check type_in_anonymous_namespace_p. We do not mangle compound types
5302 that always can be compared structurally.
5303
5304 Similarly for builtin types, we compare properties of their main variant.
5305 A special case are integer types where mangling do make differences
5306 between char/signed char/unsigned char etc. Storing name for these makes
5307 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5308 See cp/mangle.c:write_builtin_type for details. */
5309
5310 if (flag_lto_odr_type_mering
5311 && TREE_CODE (decl) == TYPE_DECL
5312 && DECL_NAME (decl)
5313 && decl == TYPE_NAME (TREE_TYPE (decl))
5314 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5315 && (type_with_linkage_p (TREE_TYPE (decl))
5316 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5317 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5318 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5319 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5320 if (TREE_CODE (decl) != FUNCTION_DECL
5321 && TREE_CODE (decl) != VAR_DECL)
5322 return false;
5323
5324 /* If DECL already has its assembler name set, it does not need a
5325 new one. */
5326 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5327 || DECL_ASSEMBLER_NAME_SET_P (decl))
5328 return false;
5329
5330 /* Abstract decls do not need an assembler name. */
5331 if (DECL_ABSTRACT_P (decl))
5332 return false;
5333
5334 /* For VAR_DECLs, only static, public and external symbols need an
5335 assembler name. */
5336 if (TREE_CODE (decl) == VAR_DECL
5337 && !TREE_STATIC (decl)
5338 && !TREE_PUBLIC (decl)
5339 && !DECL_EXTERNAL (decl))
5340 return false;
5341
5342 if (TREE_CODE (decl) == FUNCTION_DECL)
5343 {
5344 /* Do not set assembler name on builtins. Allow RTL expansion to
5345 decide whether to expand inline or via a regular call. */
5346 if (DECL_BUILT_IN (decl)
5347 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5348 return false;
5349
5350 /* Functions represented in the callgraph need an assembler name. */
5351 if (cgraph_node::get (decl) != NULL)
5352 return true;
5353
5354 /* Unused and not public functions don't need an assembler name. */
5355 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5356 return false;
5357 }
5358
5359 return true;
5360 }
5361
5362
5363 /* Reset all language specific information still present in symbol
5364 DECL. */
5365
5366 static void
5367 free_lang_data_in_decl (tree decl)
5368 {
5369 gcc_assert (DECL_P (decl));
5370
5371 /* Give the FE a chance to remove its own data first. */
5372 lang_hooks.free_lang_data (decl);
5373
5374 TREE_LANG_FLAG_0 (decl) = 0;
5375 TREE_LANG_FLAG_1 (decl) = 0;
5376 TREE_LANG_FLAG_2 (decl) = 0;
5377 TREE_LANG_FLAG_3 (decl) = 0;
5378 TREE_LANG_FLAG_4 (decl) = 0;
5379 TREE_LANG_FLAG_5 (decl) = 0;
5380 TREE_LANG_FLAG_6 (decl) = 0;
5381
5382 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5383 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5384 if (TREE_CODE (decl) == FIELD_DECL)
5385 {
5386 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5387 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5388 DECL_QUALIFIER (decl) = NULL_TREE;
5389 }
5390
5391 if (TREE_CODE (decl) == FUNCTION_DECL)
5392 {
5393 struct cgraph_node *node;
5394 if (!(node = cgraph_node::get (decl))
5395 || (!node->definition && !node->clones))
5396 {
5397 if (node)
5398 node->release_body ();
5399 else
5400 {
5401 release_function_body (decl);
5402 DECL_ARGUMENTS (decl) = NULL;
5403 DECL_RESULT (decl) = NULL;
5404 DECL_INITIAL (decl) = error_mark_node;
5405 }
5406 }
5407 if (gimple_has_body_p (decl))
5408 {
5409 tree t;
5410
5411 /* If DECL has a gimple body, then the context for its
5412 arguments must be DECL. Otherwise, it doesn't really
5413 matter, as we will not be emitting any code for DECL. In
5414 general, there may be other instances of DECL created by
5415 the front end and since PARM_DECLs are generally shared,
5416 their DECL_CONTEXT changes as the replicas of DECL are
5417 created. The only time where DECL_CONTEXT is important
5418 is for the FUNCTION_DECLs that have a gimple body (since
5419 the PARM_DECL will be used in the function's body). */
5420 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5421 DECL_CONTEXT (t) = decl;
5422 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5423 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5424 = target_option_default_node;
5425 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5426 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5427 = optimization_default_node;
5428 }
5429
5430 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5431 At this point, it is not needed anymore. */
5432 DECL_SAVED_TREE (decl) = NULL_TREE;
5433
5434 /* Clear the abstract origin if it refers to a method. Otherwise
5435 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5436 origin will not be output correctly. */
5437 if (DECL_ABSTRACT_ORIGIN (decl)
5438 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5439 && RECORD_OR_UNION_TYPE_P
5440 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5441 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5442
5443 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5444 DECL_VINDEX referring to itself into a vtable slot number as it
5445 should. Happens with functions that are copied and then forgotten
5446 about. Just clear it, it won't matter anymore. */
5447 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5448 DECL_VINDEX (decl) = NULL_TREE;
5449 }
5450 else if (TREE_CODE (decl) == VAR_DECL)
5451 {
5452 if ((DECL_EXTERNAL (decl)
5453 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5454 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5455 DECL_INITIAL (decl) = NULL_TREE;
5456 }
5457 else if (TREE_CODE (decl) == TYPE_DECL
5458 || TREE_CODE (decl) == FIELD_DECL)
5459 DECL_INITIAL (decl) = NULL_TREE;
5460 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5461 && DECL_INITIAL (decl)
5462 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5463 {
5464 /* Strip builtins from the translation-unit BLOCK. We still have targets
5465 without builtin_decl_explicit support and also builtins are shared
5466 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5467 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5468 while (*nextp)
5469 {
5470 tree var = *nextp;
5471 if (TREE_CODE (var) == FUNCTION_DECL
5472 && DECL_BUILT_IN (var))
5473 *nextp = TREE_CHAIN (var);
5474 else
5475 nextp = &TREE_CHAIN (var);
5476 }
5477 }
5478 }
5479
5480
5481 /* Data used when collecting DECLs and TYPEs for language data removal. */
5482
5483 struct free_lang_data_d
5484 {
5485 /* Worklist to avoid excessive recursion. */
5486 vec<tree> worklist;
5487
5488 /* Set of traversed objects. Used to avoid duplicate visits. */
5489 hash_set<tree> *pset;
5490
5491 /* Array of symbols to process with free_lang_data_in_decl. */
5492 vec<tree> decls;
5493
5494 /* Array of types to process with free_lang_data_in_type. */
5495 vec<tree> types;
5496 };
5497
5498
5499 /* Save all language fields needed to generate proper debug information
5500 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5501
5502 static void
5503 save_debug_info_for_decl (tree t)
5504 {
5505 /*struct saved_debug_info_d *sdi;*/
5506
5507 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5508
5509 /* FIXME. Partial implementation for saving debug info removed. */
5510 }
5511
5512
5513 /* Save all language fields needed to generate proper debug information
5514 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5515
5516 static void
5517 save_debug_info_for_type (tree t)
5518 {
5519 /*struct saved_debug_info_d *sdi;*/
5520
5521 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5522
5523 /* FIXME. Partial implementation for saving debug info removed. */
5524 }
5525
5526
5527 /* Add type or decl T to one of the list of tree nodes that need their
5528 language data removed. The lists are held inside FLD. */
5529
5530 static void
5531 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5532 {
5533 if (DECL_P (t))
5534 {
5535 fld->decls.safe_push (t);
5536 if (debug_info_level > DINFO_LEVEL_TERSE)
5537 save_debug_info_for_decl (t);
5538 }
5539 else if (TYPE_P (t))
5540 {
5541 fld->types.safe_push (t);
5542 if (debug_info_level > DINFO_LEVEL_TERSE)
5543 save_debug_info_for_type (t);
5544 }
5545 else
5546 gcc_unreachable ();
5547 }
5548
5549 /* Push tree node T into FLD->WORKLIST. */
5550
5551 static inline void
5552 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5553 {
5554 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5555 fld->worklist.safe_push ((t));
5556 }
5557
5558
5559 /* Operand callback helper for free_lang_data_in_node. *TP is the
5560 subtree operand being considered. */
5561
5562 static tree
5563 find_decls_types_r (tree *tp, int *ws, void *data)
5564 {
5565 tree t = *tp;
5566 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5567
5568 if (TREE_CODE (t) == TREE_LIST)
5569 return NULL_TREE;
5570
5571 /* Language specific nodes will be removed, so there is no need
5572 to gather anything under them. */
5573 if (is_lang_specific (t))
5574 {
5575 *ws = 0;
5576 return NULL_TREE;
5577 }
5578
5579 if (DECL_P (t))
5580 {
5581 /* Note that walk_tree does not traverse every possible field in
5582 decls, so we have to do our own traversals here. */
5583 add_tree_to_fld_list (t, fld);
5584
5585 fld_worklist_push (DECL_NAME (t), fld);
5586 fld_worklist_push (DECL_CONTEXT (t), fld);
5587 fld_worklist_push (DECL_SIZE (t), fld);
5588 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5589
5590 /* We are going to remove everything under DECL_INITIAL for
5591 TYPE_DECLs. No point walking them. */
5592 if (TREE_CODE (t) != TYPE_DECL)
5593 fld_worklist_push (DECL_INITIAL (t), fld);
5594
5595 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5596 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5597
5598 if (TREE_CODE (t) == FUNCTION_DECL)
5599 {
5600 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5601 fld_worklist_push (DECL_RESULT (t), fld);
5602 }
5603 else if (TREE_CODE (t) == TYPE_DECL)
5604 {
5605 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5606 }
5607 else if (TREE_CODE (t) == FIELD_DECL)
5608 {
5609 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5610 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5611 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5612 fld_worklist_push (DECL_FCONTEXT (t), fld);
5613 }
5614
5615 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5616 && DECL_HAS_VALUE_EXPR_P (t))
5617 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5618
5619 if (TREE_CODE (t) != FIELD_DECL
5620 && TREE_CODE (t) != TYPE_DECL)
5621 fld_worklist_push (TREE_CHAIN (t), fld);
5622 *ws = 0;
5623 }
5624 else if (TYPE_P (t))
5625 {
5626 /* Note that walk_tree does not traverse every possible field in
5627 types, so we have to do our own traversals here. */
5628 add_tree_to_fld_list (t, fld);
5629
5630 if (!RECORD_OR_UNION_TYPE_P (t))
5631 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5632 fld_worklist_push (TYPE_SIZE (t), fld);
5633 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5634 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5635 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5636 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5637 fld_worklist_push (TYPE_NAME (t), fld);
5638 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5639 them and thus do not and want not to reach unused pointer types
5640 this way. */
5641 if (!POINTER_TYPE_P (t))
5642 fld_worklist_push (TYPE_MINVAL (t), fld);
5643 if (!RECORD_OR_UNION_TYPE_P (t))
5644 fld_worklist_push (TYPE_MAXVAL (t), fld);
5645 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5646 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5647 do not and want not to reach unused variants this way. */
5648 if (TYPE_CONTEXT (t))
5649 {
5650 tree ctx = TYPE_CONTEXT (t);
5651 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5652 So push that instead. */
5653 while (ctx && TREE_CODE (ctx) == BLOCK)
5654 ctx = BLOCK_SUPERCONTEXT (ctx);
5655 fld_worklist_push (ctx, fld);
5656 }
5657 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5658 and want not to reach unused types this way. */
5659
5660 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5661 {
5662 unsigned i;
5663 tree tem;
5664 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5665 fld_worklist_push (TREE_TYPE (tem), fld);
5666 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5667 if (tem
5668 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5669 && TREE_CODE (tem) == TREE_LIST)
5670 do
5671 {
5672 fld_worklist_push (TREE_VALUE (tem), fld);
5673 tem = TREE_CHAIN (tem);
5674 }
5675 while (tem);
5676 }
5677 if (RECORD_OR_UNION_TYPE_P (t))
5678 {
5679 tree tem;
5680 /* Push all TYPE_FIELDS - there can be interleaving interesting
5681 and non-interesting things. */
5682 tem = TYPE_FIELDS (t);
5683 while (tem)
5684 {
5685 if (TREE_CODE (tem) == FIELD_DECL
5686 || TREE_CODE (tem) == TYPE_DECL)
5687 fld_worklist_push (tem, fld);
5688 tem = TREE_CHAIN (tem);
5689 }
5690 }
5691
5692 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5693 *ws = 0;
5694 }
5695 else if (TREE_CODE (t) == BLOCK)
5696 {
5697 tree tem;
5698 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5699 fld_worklist_push (tem, fld);
5700 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5701 fld_worklist_push (tem, fld);
5702 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5703 }
5704
5705 if (TREE_CODE (t) != IDENTIFIER_NODE
5706 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5707 fld_worklist_push (TREE_TYPE (t), fld);
5708
5709 return NULL_TREE;
5710 }
5711
5712
5713 /* Find decls and types in T. */
5714
5715 static void
5716 find_decls_types (tree t, struct free_lang_data_d *fld)
5717 {
5718 while (1)
5719 {
5720 if (!fld->pset->contains (t))
5721 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5722 if (fld->worklist.is_empty ())
5723 break;
5724 t = fld->worklist.pop ();
5725 }
5726 }
5727
5728 /* Translate all the types in LIST with the corresponding runtime
5729 types. */
5730
5731 static tree
5732 get_eh_types_for_runtime (tree list)
5733 {
5734 tree head, prev;
5735
5736 if (list == NULL_TREE)
5737 return NULL_TREE;
5738
5739 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5740 prev = head;
5741 list = TREE_CHAIN (list);
5742 while (list)
5743 {
5744 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5745 TREE_CHAIN (prev) = n;
5746 prev = TREE_CHAIN (prev);
5747 list = TREE_CHAIN (list);
5748 }
5749
5750 return head;
5751 }
5752
5753
5754 /* Find decls and types referenced in EH region R and store them in
5755 FLD->DECLS and FLD->TYPES. */
5756
5757 static void
5758 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5759 {
5760 switch (r->type)
5761 {
5762 case ERT_CLEANUP:
5763 break;
5764
5765 case ERT_TRY:
5766 {
5767 eh_catch c;
5768
5769 /* The types referenced in each catch must first be changed to the
5770 EH types used at runtime. This removes references to FE types
5771 in the region. */
5772 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5773 {
5774 c->type_list = get_eh_types_for_runtime (c->type_list);
5775 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5776 }
5777 }
5778 break;
5779
5780 case ERT_ALLOWED_EXCEPTIONS:
5781 r->u.allowed.type_list
5782 = get_eh_types_for_runtime (r->u.allowed.type_list);
5783 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5784 break;
5785
5786 case ERT_MUST_NOT_THROW:
5787 walk_tree (&r->u.must_not_throw.failure_decl,
5788 find_decls_types_r, fld, fld->pset);
5789 break;
5790 }
5791 }
5792
5793
5794 /* Find decls and types referenced in cgraph node N and store them in
5795 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5796 look for *every* kind of DECL and TYPE node reachable from N,
5797 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5798 NAMESPACE_DECLs, etc). */
5799
5800 static void
5801 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5802 {
5803 basic_block bb;
5804 struct function *fn;
5805 unsigned ix;
5806 tree t;
5807
5808 find_decls_types (n->decl, fld);
5809
5810 if (!gimple_has_body_p (n->decl))
5811 return;
5812
5813 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5814
5815 fn = DECL_STRUCT_FUNCTION (n->decl);
5816
5817 /* Traverse locals. */
5818 FOR_EACH_LOCAL_DECL (fn, ix, t)
5819 find_decls_types (t, fld);
5820
5821 /* Traverse EH regions in FN. */
5822 {
5823 eh_region r;
5824 FOR_ALL_EH_REGION_FN (r, fn)
5825 find_decls_types_in_eh_region (r, fld);
5826 }
5827
5828 /* Traverse every statement in FN. */
5829 FOR_EACH_BB_FN (bb, fn)
5830 {
5831 gphi_iterator psi;
5832 gimple_stmt_iterator si;
5833 unsigned i;
5834
5835 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5836 {
5837 gphi *phi = psi.phi ();
5838
5839 for (i = 0; i < gimple_phi_num_args (phi); i++)
5840 {
5841 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5842 find_decls_types (*arg_p, fld);
5843 }
5844 }
5845
5846 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5847 {
5848 gimple *stmt = gsi_stmt (si);
5849
5850 if (is_gimple_call (stmt))
5851 find_decls_types (gimple_call_fntype (stmt), fld);
5852
5853 for (i = 0; i < gimple_num_ops (stmt); i++)
5854 {
5855 tree arg = gimple_op (stmt, i);
5856 find_decls_types (arg, fld);
5857 }
5858 }
5859 }
5860 }
5861
5862
5863 /* Find decls and types referenced in varpool node N and store them in
5864 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5865 look for *every* kind of DECL and TYPE node reachable from N,
5866 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5867 NAMESPACE_DECLs, etc). */
5868
5869 static void
5870 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5871 {
5872 find_decls_types (v->decl, fld);
5873 }
5874
5875 /* If T needs an assembler name, have one created for it. */
5876
5877 void
5878 assign_assembler_name_if_neeeded (tree t)
5879 {
5880 if (need_assembler_name_p (t))
5881 {
5882 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5883 diagnostics that use input_location to show locus
5884 information. The problem here is that, at this point,
5885 input_location is generally anchored to the end of the file
5886 (since the parser is long gone), so we don't have a good
5887 position to pin it to.
5888
5889 To alleviate this problem, this uses the location of T's
5890 declaration. Examples of this are
5891 testsuite/g++.dg/template/cond2.C and
5892 testsuite/g++.dg/template/pr35240.C. */
5893 location_t saved_location = input_location;
5894 input_location = DECL_SOURCE_LOCATION (t);
5895
5896 decl_assembler_name (t);
5897
5898 input_location = saved_location;
5899 }
5900 }
5901
5902
5903 /* Free language specific information for every operand and expression
5904 in every node of the call graph. This process operates in three stages:
5905
5906 1- Every callgraph node and varpool node is traversed looking for
5907 decls and types embedded in them. This is a more exhaustive
5908 search than that done by find_referenced_vars, because it will
5909 also collect individual fields, decls embedded in types, etc.
5910
5911 2- All the decls found are sent to free_lang_data_in_decl.
5912
5913 3- All the types found are sent to free_lang_data_in_type.
5914
5915 The ordering between decls and types is important because
5916 free_lang_data_in_decl sets assembler names, which includes
5917 mangling. So types cannot be freed up until assembler names have
5918 been set up. */
5919
5920 static void
5921 free_lang_data_in_cgraph (void)
5922 {
5923 struct cgraph_node *n;
5924 varpool_node *v;
5925 struct free_lang_data_d fld;
5926 tree t;
5927 unsigned i;
5928 alias_pair *p;
5929
5930 /* Initialize sets and arrays to store referenced decls and types. */
5931 fld.pset = new hash_set<tree>;
5932 fld.worklist.create (0);
5933 fld.decls.create (100);
5934 fld.types.create (100);
5935
5936 /* Find decls and types in the body of every function in the callgraph. */
5937 FOR_EACH_FUNCTION (n)
5938 find_decls_types_in_node (n, &fld);
5939
5940 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5941 find_decls_types (p->decl, &fld);
5942
5943 /* Find decls and types in every varpool symbol. */
5944 FOR_EACH_VARIABLE (v)
5945 find_decls_types_in_var (v, &fld);
5946
5947 /* Set the assembler name on every decl found. We need to do this
5948 now because free_lang_data_in_decl will invalidate data needed
5949 for mangling. This breaks mangling on interdependent decls. */
5950 FOR_EACH_VEC_ELT (fld.decls, i, t)
5951 assign_assembler_name_if_neeeded (t);
5952
5953 /* Traverse every decl found freeing its language data. */
5954 FOR_EACH_VEC_ELT (fld.decls, i, t)
5955 free_lang_data_in_decl (t);
5956
5957 /* Traverse every type found freeing its language data. */
5958 FOR_EACH_VEC_ELT (fld.types, i, t)
5959 free_lang_data_in_type (t);
5960 if (flag_checking)
5961 {
5962 FOR_EACH_VEC_ELT (fld.types, i, t)
5963 verify_type (t);
5964 }
5965
5966 delete fld.pset;
5967 fld.worklist.release ();
5968 fld.decls.release ();
5969 fld.types.release ();
5970 }
5971
5972
5973 /* Free resources that are used by FE but are not needed once they are done. */
5974
5975 static unsigned
5976 free_lang_data (void)
5977 {
5978 unsigned i;
5979
5980 /* If we are the LTO frontend we have freed lang-specific data already. */
5981 if (in_lto_p
5982 || (!flag_generate_lto && !flag_generate_offload))
5983 return 0;
5984
5985 /* Allocate and assign alias sets to the standard integer types
5986 while the slots are still in the way the frontends generated them. */
5987 for (i = 0; i < itk_none; ++i)
5988 if (integer_types[i])
5989 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5990
5991 /* Traverse the IL resetting language specific information for
5992 operands, expressions, etc. */
5993 free_lang_data_in_cgraph ();
5994
5995 /* Create gimple variants for common types. */
5996 ptrdiff_type_node = integer_type_node;
5997 fileptr_type_node = ptr_type_node;
5998
5999 /* Reset some langhooks. Do not reset types_compatible_p, it may
6000 still be used indirectly via the get_alias_set langhook. */
6001 lang_hooks.dwarf_name = lhd_dwarf_name;
6002 lang_hooks.decl_printable_name = gimple_decl_printable_name;
6003 lang_hooks.gimplify_expr = lhd_gimplify_expr;
6004
6005 /* We do not want the default decl_assembler_name implementation,
6006 rather if we have fixed everything we want a wrapper around it
6007 asserting that all non-local symbols already got their assembler
6008 name and only produce assembler names for local symbols. Or rather
6009 make sure we never call decl_assembler_name on local symbols and
6010 devise a separate, middle-end private scheme for it. */
6011
6012 /* Reset diagnostic machinery. */
6013 tree_diagnostics_defaults (global_dc);
6014
6015 return 0;
6016 }
6017
6018
6019 namespace {
6020
6021 const pass_data pass_data_ipa_free_lang_data =
6022 {
6023 SIMPLE_IPA_PASS, /* type */
6024 "*free_lang_data", /* name */
6025 OPTGROUP_NONE, /* optinfo_flags */
6026 TV_IPA_FREE_LANG_DATA, /* tv_id */
6027 0, /* properties_required */
6028 0, /* properties_provided */
6029 0, /* properties_destroyed */
6030 0, /* todo_flags_start */
6031 0, /* todo_flags_finish */
6032 };
6033
6034 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6035 {
6036 public:
6037 pass_ipa_free_lang_data (gcc::context *ctxt)
6038 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6039 {}
6040
6041 /* opt_pass methods: */
6042 virtual unsigned int execute (function *) { return free_lang_data (); }
6043
6044 }; // class pass_ipa_free_lang_data
6045
6046 } // anon namespace
6047
6048 simple_ipa_opt_pass *
6049 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6050 {
6051 return new pass_ipa_free_lang_data (ctxt);
6052 }
6053
6054 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
6055 ATTR_NAME. Also used internally by remove_attribute(). */
6056 bool
6057 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
6058 {
6059 size_t ident_len = IDENTIFIER_LENGTH (ident);
6060
6061 if (ident_len == attr_len)
6062 {
6063 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
6064 return true;
6065 }
6066 else if (ident_len == attr_len + 4)
6067 {
6068 /* There is the possibility that ATTR is 'text' and IDENT is
6069 '__text__'. */
6070 const char *p = IDENTIFIER_POINTER (ident);
6071 if (p[0] == '_' && p[1] == '_'
6072 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6073 && strncmp (attr_name, p + 2, attr_len) == 0)
6074 return true;
6075 }
6076
6077 return false;
6078 }
6079
6080 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6081 of ATTR_NAME, and LIST is not NULL_TREE. */
6082 tree
6083 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6084 {
6085 while (list)
6086 {
6087 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6088
6089 if (ident_len == attr_len)
6090 {
6091 if (!strcmp (attr_name,
6092 IDENTIFIER_POINTER (get_attribute_name (list))))
6093 break;
6094 }
6095 /* TODO: If we made sure that attributes were stored in the
6096 canonical form without '__...__' (ie, as in 'text' as opposed
6097 to '__text__') then we could avoid the following case. */
6098 else if (ident_len == attr_len + 4)
6099 {
6100 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6101 if (p[0] == '_' && p[1] == '_'
6102 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6103 && strncmp (attr_name, p + 2, attr_len) == 0)
6104 break;
6105 }
6106 list = TREE_CHAIN (list);
6107 }
6108
6109 return list;
6110 }
6111
6112 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6113 return a pointer to the attribute's list first element if the attribute
6114 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6115 '__text__'). */
6116
6117 tree
6118 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6119 tree list)
6120 {
6121 while (list)
6122 {
6123 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6124
6125 if (attr_len > ident_len)
6126 {
6127 list = TREE_CHAIN (list);
6128 continue;
6129 }
6130
6131 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6132
6133 if (strncmp (attr_name, p, attr_len) == 0)
6134 break;
6135
6136 /* TODO: If we made sure that attributes were stored in the
6137 canonical form without '__...__' (ie, as in 'text' as opposed
6138 to '__text__') then we could avoid the following case. */
6139 if (p[0] == '_' && p[1] == '_' &&
6140 strncmp (attr_name, p + 2, attr_len) == 0)
6141 break;
6142
6143 list = TREE_CHAIN (list);
6144 }
6145
6146 return list;
6147 }
6148
6149
6150 /* A variant of lookup_attribute() that can be used with an identifier
6151 as the first argument, and where the identifier can be either
6152 'text' or '__text__'.
6153
6154 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6155 return a pointer to the attribute's list element if the attribute
6156 is part of the list, or NULL_TREE if not found. If the attribute
6157 appears more than once, this only returns the first occurrence; the
6158 TREE_CHAIN of the return value should be passed back in if further
6159 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6160 can be in the form 'text' or '__text__'. */
6161 static tree
6162 lookup_ident_attribute (tree attr_identifier, tree list)
6163 {
6164 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6165
6166 while (list)
6167 {
6168 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6169 == IDENTIFIER_NODE);
6170
6171 if (cmp_attrib_identifiers (attr_identifier,
6172 get_attribute_name (list)))
6173 /* Found it. */
6174 break;
6175 list = TREE_CHAIN (list);
6176 }
6177
6178 return list;
6179 }
6180
6181 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6182 modified list. */
6183
6184 tree
6185 remove_attribute (const char *attr_name, tree list)
6186 {
6187 tree *p;
6188 size_t attr_len = strlen (attr_name);
6189
6190 gcc_checking_assert (attr_name[0] != '_');
6191
6192 for (p = &list; *p; )
6193 {
6194 tree l = *p;
6195 /* TODO: If we were storing attributes in normalized form, here
6196 we could use a simple strcmp(). */
6197 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6198 *p = TREE_CHAIN (l);
6199 else
6200 p = &TREE_CHAIN (l);
6201 }
6202
6203 return list;
6204 }
6205
6206 /* Return an attribute list that is the union of a1 and a2. */
6207
6208 tree
6209 merge_attributes (tree a1, tree a2)
6210 {
6211 tree attributes;
6212
6213 /* Either one unset? Take the set one. */
6214
6215 if ((attributes = a1) == 0)
6216 attributes = a2;
6217
6218 /* One that completely contains the other? Take it. */
6219
6220 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6221 {
6222 if (attribute_list_contained (a2, a1))
6223 attributes = a2;
6224 else
6225 {
6226 /* Pick the longest list, and hang on the other list. */
6227
6228 if (list_length (a1) < list_length (a2))
6229 attributes = a2, a2 = a1;
6230
6231 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6232 {
6233 tree a;
6234 for (a = lookup_ident_attribute (get_attribute_name (a2),
6235 attributes);
6236 a != NULL_TREE && !attribute_value_equal (a, a2);
6237 a = lookup_ident_attribute (get_attribute_name (a2),
6238 TREE_CHAIN (a)))
6239 ;
6240 if (a == NULL_TREE)
6241 {
6242 a1 = copy_node (a2);
6243 TREE_CHAIN (a1) = attributes;
6244 attributes = a1;
6245 }
6246 }
6247 }
6248 }
6249 return attributes;
6250 }
6251
6252 /* Given types T1 and T2, merge their attributes and return
6253 the result. */
6254
6255 tree
6256 merge_type_attributes (tree t1, tree t2)
6257 {
6258 return merge_attributes (TYPE_ATTRIBUTES (t1),
6259 TYPE_ATTRIBUTES (t2));
6260 }
6261
6262 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6263 the result. */
6264
6265 tree
6266 merge_decl_attributes (tree olddecl, tree newdecl)
6267 {
6268 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6269 DECL_ATTRIBUTES (newdecl));
6270 }
6271
6272 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6273
6274 /* Specialization of merge_decl_attributes for various Windows targets.
6275
6276 This handles the following situation:
6277
6278 __declspec (dllimport) int foo;
6279 int foo;
6280
6281 The second instance of `foo' nullifies the dllimport. */
6282
6283 tree
6284 merge_dllimport_decl_attributes (tree old, tree new_tree)
6285 {
6286 tree a;
6287 int delete_dllimport_p = 1;
6288
6289 /* What we need to do here is remove from `old' dllimport if it doesn't
6290 appear in `new'. dllimport behaves like extern: if a declaration is
6291 marked dllimport and a definition appears later, then the object
6292 is not dllimport'd. We also remove a `new' dllimport if the old list
6293 contains dllexport: dllexport always overrides dllimport, regardless
6294 of the order of declaration. */
6295 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6296 delete_dllimport_p = 0;
6297 else if (DECL_DLLIMPORT_P (new_tree)
6298 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6299 {
6300 DECL_DLLIMPORT_P (new_tree) = 0;
6301 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6302 "dllimport ignored", new_tree);
6303 }
6304 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6305 {
6306 /* Warn about overriding a symbol that has already been used, e.g.:
6307 extern int __attribute__ ((dllimport)) foo;
6308 int* bar () {return &foo;}
6309 int foo;
6310 */
6311 if (TREE_USED (old))
6312 {
6313 warning (0, "%q+D redeclared without dllimport attribute "
6314 "after being referenced with dll linkage", new_tree);
6315 /* If we have used a variable's address with dllimport linkage,
6316 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6317 decl may already have had TREE_CONSTANT computed.
6318 We still remove the attribute so that assembler code refers
6319 to '&foo rather than '_imp__foo'. */
6320 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6321 DECL_DLLIMPORT_P (new_tree) = 1;
6322 }
6323
6324 /* Let an inline definition silently override the external reference,
6325 but otherwise warn about attribute inconsistency. */
6326 else if (TREE_CODE (new_tree) == VAR_DECL
6327 || !DECL_DECLARED_INLINE_P (new_tree))
6328 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6329 "previous dllimport ignored", new_tree);
6330 }
6331 else
6332 delete_dllimport_p = 0;
6333
6334 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6335
6336 if (delete_dllimport_p)
6337 a = remove_attribute ("dllimport", a);
6338
6339 return a;
6340 }
6341
6342 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6343 struct attribute_spec.handler. */
6344
6345 tree
6346 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6347 bool *no_add_attrs)
6348 {
6349 tree node = *pnode;
6350 bool is_dllimport;
6351
6352 /* These attributes may apply to structure and union types being created,
6353 but otherwise should pass to the declaration involved. */
6354 if (!DECL_P (node))
6355 {
6356 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6357 | (int) ATTR_FLAG_ARRAY_NEXT))
6358 {
6359 *no_add_attrs = true;
6360 return tree_cons (name, args, NULL_TREE);
6361 }
6362 if (TREE_CODE (node) == RECORD_TYPE
6363 || TREE_CODE (node) == UNION_TYPE)
6364 {
6365 node = TYPE_NAME (node);
6366 if (!node)
6367 return NULL_TREE;
6368 }
6369 else
6370 {
6371 warning (OPT_Wattributes, "%qE attribute ignored",
6372 name);
6373 *no_add_attrs = true;
6374 return NULL_TREE;
6375 }
6376 }
6377
6378 if (TREE_CODE (node) != FUNCTION_DECL
6379 && TREE_CODE (node) != VAR_DECL
6380 && TREE_CODE (node) != TYPE_DECL)
6381 {
6382 *no_add_attrs = true;
6383 warning (OPT_Wattributes, "%qE attribute ignored",
6384 name);
6385 return NULL_TREE;
6386 }
6387
6388 if (TREE_CODE (node) == TYPE_DECL
6389 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6390 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6391 {
6392 *no_add_attrs = true;
6393 warning (OPT_Wattributes, "%qE attribute ignored",
6394 name);
6395 return NULL_TREE;
6396 }
6397
6398 is_dllimport = is_attribute_p ("dllimport", name);
6399
6400 /* Report error on dllimport ambiguities seen now before they cause
6401 any damage. */
6402 if (is_dllimport)
6403 {
6404 /* Honor any target-specific overrides. */
6405 if (!targetm.valid_dllimport_attribute_p (node))
6406 *no_add_attrs = true;
6407
6408 else if (TREE_CODE (node) == FUNCTION_DECL
6409 && DECL_DECLARED_INLINE_P (node))
6410 {
6411 warning (OPT_Wattributes, "inline function %q+D declared as "
6412 " dllimport: attribute ignored", node);
6413 *no_add_attrs = true;
6414 }
6415 /* Like MS, treat definition of dllimported variables and
6416 non-inlined functions on declaration as syntax errors. */
6417 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6418 {
6419 error ("function %q+D definition is marked dllimport", node);
6420 *no_add_attrs = true;
6421 }
6422
6423 else if (TREE_CODE (node) == VAR_DECL)
6424 {
6425 if (DECL_INITIAL (node))
6426 {
6427 error ("variable %q+D definition is marked dllimport",
6428 node);
6429 *no_add_attrs = true;
6430 }
6431
6432 /* `extern' needn't be specified with dllimport.
6433 Specify `extern' now and hope for the best. Sigh. */
6434 DECL_EXTERNAL (node) = 1;
6435 /* Also, implicitly give dllimport'd variables declared within
6436 a function global scope, unless declared static. */
6437 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6438 TREE_PUBLIC (node) = 1;
6439 }
6440
6441 if (*no_add_attrs == false)
6442 DECL_DLLIMPORT_P (node) = 1;
6443 }
6444 else if (TREE_CODE (node) == FUNCTION_DECL
6445 && DECL_DECLARED_INLINE_P (node)
6446 && flag_keep_inline_dllexport)
6447 /* An exported function, even if inline, must be emitted. */
6448 DECL_EXTERNAL (node) = 0;
6449
6450 /* Report error if symbol is not accessible at global scope. */
6451 if (!TREE_PUBLIC (node)
6452 && (TREE_CODE (node) == VAR_DECL
6453 || TREE_CODE (node) == FUNCTION_DECL))
6454 {
6455 error ("external linkage required for symbol %q+D because of "
6456 "%qE attribute", node, name);
6457 *no_add_attrs = true;
6458 }
6459
6460 /* A dllexport'd entity must have default visibility so that other
6461 program units (shared libraries or the main executable) can see
6462 it. A dllimport'd entity must have default visibility so that
6463 the linker knows that undefined references within this program
6464 unit can be resolved by the dynamic linker. */
6465 if (!*no_add_attrs)
6466 {
6467 if (DECL_VISIBILITY_SPECIFIED (node)
6468 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6469 error ("%qE implies default visibility, but %qD has already "
6470 "been declared with a different visibility",
6471 name, node);
6472 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6473 DECL_VISIBILITY_SPECIFIED (node) = 1;
6474 }
6475
6476 return NULL_TREE;
6477 }
6478
6479 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6480 \f
6481 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6482 of the various TYPE_QUAL values. */
6483
6484 static void
6485 set_type_quals (tree type, int type_quals)
6486 {
6487 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6488 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6489 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6490 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6491 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6492 }
6493
6494 /* Returns true iff unqualified CAND and BASE are equivalent. */
6495
6496 bool
6497 check_base_type (const_tree cand, const_tree base)
6498 {
6499 return (TYPE_NAME (cand) == TYPE_NAME (base)
6500 /* Apparently this is needed for Objective-C. */
6501 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6502 /* Check alignment. */
6503 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6504 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6505 TYPE_ATTRIBUTES (base)));
6506 }
6507
6508 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6509
6510 bool
6511 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6512 {
6513 return (TYPE_QUALS (cand) == type_quals
6514 && check_base_type (cand, base));
6515 }
6516
6517 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6518
6519 static bool
6520 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6521 {
6522 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6523 && TYPE_NAME (cand) == TYPE_NAME (base)
6524 /* Apparently this is needed for Objective-C. */
6525 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6526 /* Check alignment. */
6527 && TYPE_ALIGN (cand) == align
6528 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6529 TYPE_ATTRIBUTES (base)));
6530 }
6531
6532 /* This function checks to see if TYPE matches the size one of the built-in
6533 atomic types, and returns that core atomic type. */
6534
6535 static tree
6536 find_atomic_core_type (tree type)
6537 {
6538 tree base_atomic_type;
6539
6540 /* Only handle complete types. */
6541 if (TYPE_SIZE (type) == NULL_TREE)
6542 return NULL_TREE;
6543
6544 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6545 switch (type_size)
6546 {
6547 case 8:
6548 base_atomic_type = atomicQI_type_node;
6549 break;
6550
6551 case 16:
6552 base_atomic_type = atomicHI_type_node;
6553 break;
6554
6555 case 32:
6556 base_atomic_type = atomicSI_type_node;
6557 break;
6558
6559 case 64:
6560 base_atomic_type = atomicDI_type_node;
6561 break;
6562
6563 case 128:
6564 base_atomic_type = atomicTI_type_node;
6565 break;
6566
6567 default:
6568 base_atomic_type = NULL_TREE;
6569 }
6570
6571 return base_atomic_type;
6572 }
6573
6574 /* Return a version of the TYPE, qualified as indicated by the
6575 TYPE_QUALS, if one exists. If no qualified version exists yet,
6576 return NULL_TREE. */
6577
6578 tree
6579 get_qualified_type (tree type, int type_quals)
6580 {
6581 tree t;
6582
6583 if (TYPE_QUALS (type) == type_quals)
6584 return type;
6585
6586 /* Search the chain of variants to see if there is already one there just
6587 like the one we need to have. If so, use that existing one. We must
6588 preserve the TYPE_NAME, since there is code that depends on this. */
6589 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6590 if (check_qualified_type (t, type, type_quals))
6591 return t;
6592
6593 return NULL_TREE;
6594 }
6595
6596 /* Like get_qualified_type, but creates the type if it does not
6597 exist. This function never returns NULL_TREE. */
6598
6599 tree
6600 build_qualified_type (tree type, int type_quals)
6601 {
6602 tree t;
6603
6604 /* See if we already have the appropriate qualified variant. */
6605 t = get_qualified_type (type, type_quals);
6606
6607 /* If not, build it. */
6608 if (!t)
6609 {
6610 t = build_variant_type_copy (type);
6611 set_type_quals (t, type_quals);
6612
6613 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6614 {
6615 /* See if this object can map to a basic atomic type. */
6616 tree atomic_type = find_atomic_core_type (type);
6617 if (atomic_type)
6618 {
6619 /* Ensure the alignment of this type is compatible with
6620 the required alignment of the atomic type. */
6621 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6622 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6623 }
6624 }
6625
6626 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6627 /* Propagate structural equality. */
6628 SET_TYPE_STRUCTURAL_EQUALITY (t);
6629 else if (TYPE_CANONICAL (type) != type)
6630 /* Build the underlying canonical type, since it is different
6631 from TYPE. */
6632 {
6633 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6634 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6635 }
6636 else
6637 /* T is its own canonical type. */
6638 TYPE_CANONICAL (t) = t;
6639
6640 }
6641
6642 return t;
6643 }
6644
6645 /* Create a variant of type T with alignment ALIGN. */
6646
6647 tree
6648 build_aligned_type (tree type, unsigned int align)
6649 {
6650 tree t;
6651
6652 if (TYPE_PACKED (type)
6653 || TYPE_ALIGN (type) == align)
6654 return type;
6655
6656 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6657 if (check_aligned_type (t, type, align))
6658 return t;
6659
6660 t = build_variant_type_copy (type);
6661 TYPE_ALIGN (t) = align;
6662
6663 return t;
6664 }
6665
6666 /* Create a new distinct copy of TYPE. The new type is made its own
6667 MAIN_VARIANT. If TYPE requires structural equality checks, the
6668 resulting type requires structural equality checks; otherwise, its
6669 TYPE_CANONICAL points to itself. */
6670
6671 tree
6672 build_distinct_type_copy (tree type)
6673 {
6674 tree t = copy_node (type);
6675
6676 TYPE_POINTER_TO (t) = 0;
6677 TYPE_REFERENCE_TO (t) = 0;
6678
6679 /* Set the canonical type either to a new equivalence class, or
6680 propagate the need for structural equality checks. */
6681 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6682 SET_TYPE_STRUCTURAL_EQUALITY (t);
6683 else
6684 TYPE_CANONICAL (t) = t;
6685
6686 /* Make it its own variant. */
6687 TYPE_MAIN_VARIANT (t) = t;
6688 TYPE_NEXT_VARIANT (t) = 0;
6689
6690 /* We do not record methods in type copies nor variants
6691 so we do not need to keep them up to date when new method
6692 is inserted. */
6693 if (RECORD_OR_UNION_TYPE_P (t))
6694 TYPE_METHODS (t) = NULL_TREE;
6695
6696 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6697 whose TREE_TYPE is not t. This can also happen in the Ada
6698 frontend when using subtypes. */
6699
6700 return t;
6701 }
6702
6703 /* Create a new variant of TYPE, equivalent but distinct. This is so
6704 the caller can modify it. TYPE_CANONICAL for the return type will
6705 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6706 are considered equal by the language itself (or that both types
6707 require structural equality checks). */
6708
6709 tree
6710 build_variant_type_copy (tree type)
6711 {
6712 tree t, m = TYPE_MAIN_VARIANT (type);
6713
6714 t = build_distinct_type_copy (type);
6715
6716 /* Since we're building a variant, assume that it is a non-semantic
6717 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6718 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6719
6720 /* Add the new type to the chain of variants of TYPE. */
6721 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6722 TYPE_NEXT_VARIANT (m) = t;
6723 TYPE_MAIN_VARIANT (t) = m;
6724
6725 return t;
6726 }
6727 \f
6728 /* Return true if the from tree in both tree maps are equal. */
6729
6730 int
6731 tree_map_base_eq (const void *va, const void *vb)
6732 {
6733 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6734 *const b = (const struct tree_map_base *) vb;
6735 return (a->from == b->from);
6736 }
6737
6738 /* Hash a from tree in a tree_base_map. */
6739
6740 unsigned int
6741 tree_map_base_hash (const void *item)
6742 {
6743 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6744 }
6745
6746 /* Return true if this tree map structure is marked for garbage collection
6747 purposes. We simply return true if the from tree is marked, so that this
6748 structure goes away when the from tree goes away. */
6749
6750 int
6751 tree_map_base_marked_p (const void *p)
6752 {
6753 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6754 }
6755
6756 /* Hash a from tree in a tree_map. */
6757
6758 unsigned int
6759 tree_map_hash (const void *item)
6760 {
6761 return (((const struct tree_map *) item)->hash);
6762 }
6763
6764 /* Hash a from tree in a tree_decl_map. */
6765
6766 unsigned int
6767 tree_decl_map_hash (const void *item)
6768 {
6769 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6770 }
6771
6772 /* Return the initialization priority for DECL. */
6773
6774 priority_type
6775 decl_init_priority_lookup (tree decl)
6776 {
6777 symtab_node *snode = symtab_node::get (decl);
6778
6779 if (!snode)
6780 return DEFAULT_INIT_PRIORITY;
6781 return
6782 snode->get_init_priority ();
6783 }
6784
6785 /* Return the finalization priority for DECL. */
6786
6787 priority_type
6788 decl_fini_priority_lookup (tree decl)
6789 {
6790 cgraph_node *node = cgraph_node::get (decl);
6791
6792 if (!node)
6793 return DEFAULT_INIT_PRIORITY;
6794 return
6795 node->get_fini_priority ();
6796 }
6797
6798 /* Set the initialization priority for DECL to PRIORITY. */
6799
6800 void
6801 decl_init_priority_insert (tree decl, priority_type priority)
6802 {
6803 struct symtab_node *snode;
6804
6805 if (priority == DEFAULT_INIT_PRIORITY)
6806 {
6807 snode = symtab_node::get (decl);
6808 if (!snode)
6809 return;
6810 }
6811 else if (TREE_CODE (decl) == VAR_DECL)
6812 snode = varpool_node::get_create (decl);
6813 else
6814 snode = cgraph_node::get_create (decl);
6815 snode->set_init_priority (priority);
6816 }
6817
6818 /* Set the finalization priority for DECL to PRIORITY. */
6819
6820 void
6821 decl_fini_priority_insert (tree decl, priority_type priority)
6822 {
6823 struct cgraph_node *node;
6824
6825 if (priority == DEFAULT_INIT_PRIORITY)
6826 {
6827 node = cgraph_node::get (decl);
6828 if (!node)
6829 return;
6830 }
6831 else
6832 node = cgraph_node::get_create (decl);
6833 node->set_fini_priority (priority);
6834 }
6835
6836 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6837
6838 static void
6839 print_debug_expr_statistics (void)
6840 {
6841 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6842 (long) debug_expr_for_decl->size (),
6843 (long) debug_expr_for_decl->elements (),
6844 debug_expr_for_decl->collisions ());
6845 }
6846
6847 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6848
6849 static void
6850 print_value_expr_statistics (void)
6851 {
6852 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6853 (long) value_expr_for_decl->size (),
6854 (long) value_expr_for_decl->elements (),
6855 value_expr_for_decl->collisions ());
6856 }
6857
6858 /* Lookup a debug expression for FROM, and return it if we find one. */
6859
6860 tree
6861 decl_debug_expr_lookup (tree from)
6862 {
6863 struct tree_decl_map *h, in;
6864 in.base.from = from;
6865
6866 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6867 if (h)
6868 return h->to;
6869 return NULL_TREE;
6870 }
6871
6872 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6873
6874 void
6875 decl_debug_expr_insert (tree from, tree to)
6876 {
6877 struct tree_decl_map *h;
6878
6879 h = ggc_alloc<tree_decl_map> ();
6880 h->base.from = from;
6881 h->to = to;
6882 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6883 }
6884
6885 /* Lookup a value expression for FROM, and return it if we find one. */
6886
6887 tree
6888 decl_value_expr_lookup (tree from)
6889 {
6890 struct tree_decl_map *h, in;
6891 in.base.from = from;
6892
6893 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6894 if (h)
6895 return h->to;
6896 return NULL_TREE;
6897 }
6898
6899 /* Insert a mapping FROM->TO in the value expression hashtable. */
6900
6901 void
6902 decl_value_expr_insert (tree from, tree to)
6903 {
6904 struct tree_decl_map *h;
6905
6906 h = ggc_alloc<tree_decl_map> ();
6907 h->base.from = from;
6908 h->to = to;
6909 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6910 }
6911
6912 /* Lookup a vector of debug arguments for FROM, and return it if we
6913 find one. */
6914
6915 vec<tree, va_gc> **
6916 decl_debug_args_lookup (tree from)
6917 {
6918 struct tree_vec_map *h, in;
6919
6920 if (!DECL_HAS_DEBUG_ARGS_P (from))
6921 return NULL;
6922 gcc_checking_assert (debug_args_for_decl != NULL);
6923 in.base.from = from;
6924 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6925 if (h)
6926 return &h->to;
6927 return NULL;
6928 }
6929
6930 /* Insert a mapping FROM->empty vector of debug arguments in the value
6931 expression hashtable. */
6932
6933 vec<tree, va_gc> **
6934 decl_debug_args_insert (tree from)
6935 {
6936 struct tree_vec_map *h;
6937 tree_vec_map **loc;
6938
6939 if (DECL_HAS_DEBUG_ARGS_P (from))
6940 return decl_debug_args_lookup (from);
6941 if (debug_args_for_decl == NULL)
6942 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6943 h = ggc_alloc<tree_vec_map> ();
6944 h->base.from = from;
6945 h->to = NULL;
6946 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6947 *loc = h;
6948 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6949 return &h->to;
6950 }
6951
6952 /* Hashing of types so that we don't make duplicates.
6953 The entry point is `type_hash_canon'. */
6954
6955 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6956 with types in the TREE_VALUE slots), by adding the hash codes
6957 of the individual types. */
6958
6959 static void
6960 type_hash_list (const_tree list, inchash::hash &hstate)
6961 {
6962 const_tree tail;
6963
6964 for (tail = list; tail; tail = TREE_CHAIN (tail))
6965 if (TREE_VALUE (tail) != error_mark_node)
6966 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6967 }
6968
6969 /* These are the Hashtable callback functions. */
6970
6971 /* Returns true iff the types are equivalent. */
6972
6973 bool
6974 type_cache_hasher::equal (type_hash *a, type_hash *b)
6975 {
6976 /* First test the things that are the same for all types. */
6977 if (a->hash != b->hash
6978 || TREE_CODE (a->type) != TREE_CODE (b->type)
6979 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6980 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6981 TYPE_ATTRIBUTES (b->type))
6982 || (TREE_CODE (a->type) != COMPLEX_TYPE
6983 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6984 return 0;
6985
6986 /* Be careful about comparing arrays before and after the element type
6987 has been completed; don't compare TYPE_ALIGN unless both types are
6988 complete. */
6989 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6990 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6991 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6992 return 0;
6993
6994 switch (TREE_CODE (a->type))
6995 {
6996 case VOID_TYPE:
6997 case COMPLEX_TYPE:
6998 case POINTER_TYPE:
6999 case REFERENCE_TYPE:
7000 case NULLPTR_TYPE:
7001 return 1;
7002
7003 case VECTOR_TYPE:
7004 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
7005
7006 case ENUMERAL_TYPE:
7007 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
7008 && !(TYPE_VALUES (a->type)
7009 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
7010 && TYPE_VALUES (b->type)
7011 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7012 && type_list_equal (TYPE_VALUES (a->type),
7013 TYPE_VALUES (b->type))))
7014 return 0;
7015
7016 /* ... fall through ... */
7017
7018 case INTEGER_TYPE:
7019 case REAL_TYPE:
7020 case BOOLEAN_TYPE:
7021 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7022 return false;
7023 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7024 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7025 TYPE_MAX_VALUE (b->type)))
7026 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7027 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7028 TYPE_MIN_VALUE (b->type))));
7029
7030 case FIXED_POINT_TYPE:
7031 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7032
7033 case OFFSET_TYPE:
7034 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7035
7036 case METHOD_TYPE:
7037 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7038 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7039 || (TYPE_ARG_TYPES (a->type)
7040 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7041 && TYPE_ARG_TYPES (b->type)
7042 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7043 && type_list_equal (TYPE_ARG_TYPES (a->type),
7044 TYPE_ARG_TYPES (b->type)))))
7045 break;
7046 return 0;
7047 case ARRAY_TYPE:
7048 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
7049
7050 case RECORD_TYPE:
7051 case UNION_TYPE:
7052 case QUAL_UNION_TYPE:
7053 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7054 || (TYPE_FIELDS (a->type)
7055 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7056 && TYPE_FIELDS (b->type)
7057 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7058 && type_list_equal (TYPE_FIELDS (a->type),
7059 TYPE_FIELDS (b->type))));
7060
7061 case FUNCTION_TYPE:
7062 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7063 || (TYPE_ARG_TYPES (a->type)
7064 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7065 && TYPE_ARG_TYPES (b->type)
7066 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7067 && type_list_equal (TYPE_ARG_TYPES (a->type),
7068 TYPE_ARG_TYPES (b->type))))
7069 break;
7070 return 0;
7071
7072 default:
7073 return 0;
7074 }
7075
7076 if (lang_hooks.types.type_hash_eq != NULL)
7077 return lang_hooks.types.type_hash_eq (a->type, b->type);
7078
7079 return 1;
7080 }
7081
7082 /* Given TYPE, and HASHCODE its hash code, return the canonical
7083 object for an identical type if one already exists.
7084 Otherwise, return TYPE, and record it as the canonical object.
7085
7086 To use this function, first create a type of the sort you want.
7087 Then compute its hash code from the fields of the type that
7088 make it different from other similar types.
7089 Then call this function and use the value. */
7090
7091 tree
7092 type_hash_canon (unsigned int hashcode, tree type)
7093 {
7094 type_hash in;
7095 type_hash **loc;
7096
7097 /* The hash table only contains main variants, so ensure that's what we're
7098 being passed. */
7099 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7100
7101 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7102 must call that routine before comparing TYPE_ALIGNs. */
7103 layout_type (type);
7104
7105 in.hash = hashcode;
7106 in.type = type;
7107
7108 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7109 if (*loc)
7110 {
7111 tree t1 = ((type_hash *) *loc)->type;
7112 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7113 if (GATHER_STATISTICS)
7114 {
7115 tree_code_counts[(int) TREE_CODE (type)]--;
7116 tree_node_counts[(int) t_kind]--;
7117 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
7118 }
7119 return t1;
7120 }
7121 else
7122 {
7123 struct type_hash *h;
7124
7125 h = ggc_alloc<type_hash> ();
7126 h->hash = hashcode;
7127 h->type = type;
7128 *loc = h;
7129
7130 return type;
7131 }
7132 }
7133
7134 static void
7135 print_type_hash_statistics (void)
7136 {
7137 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7138 (long) type_hash_table->size (),
7139 (long) type_hash_table->elements (),
7140 type_hash_table->collisions ());
7141 }
7142
7143 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7144 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7145 by adding the hash codes of the individual attributes. */
7146
7147 static void
7148 attribute_hash_list (const_tree list, inchash::hash &hstate)
7149 {
7150 const_tree tail;
7151
7152 for (tail = list; tail; tail = TREE_CHAIN (tail))
7153 /* ??? Do we want to add in TREE_VALUE too? */
7154 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7155 }
7156
7157 /* Given two lists of attributes, return true if list l2 is
7158 equivalent to l1. */
7159
7160 int
7161 attribute_list_equal (const_tree l1, const_tree l2)
7162 {
7163 if (l1 == l2)
7164 return 1;
7165
7166 return attribute_list_contained (l1, l2)
7167 && attribute_list_contained (l2, l1);
7168 }
7169
7170 /* Given two lists of attributes, return true if list L2 is
7171 completely contained within L1. */
7172 /* ??? This would be faster if attribute names were stored in a canonicalized
7173 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7174 must be used to show these elements are equivalent (which they are). */
7175 /* ??? It's not clear that attributes with arguments will always be handled
7176 correctly. */
7177
7178 int
7179 attribute_list_contained (const_tree l1, const_tree l2)
7180 {
7181 const_tree t1, t2;
7182
7183 /* First check the obvious, maybe the lists are identical. */
7184 if (l1 == l2)
7185 return 1;
7186
7187 /* Maybe the lists are similar. */
7188 for (t1 = l1, t2 = l2;
7189 t1 != 0 && t2 != 0
7190 && get_attribute_name (t1) == get_attribute_name (t2)
7191 && TREE_VALUE (t1) == TREE_VALUE (t2);
7192 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7193 ;
7194
7195 /* Maybe the lists are equal. */
7196 if (t1 == 0 && t2 == 0)
7197 return 1;
7198
7199 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7200 {
7201 const_tree attr;
7202 /* This CONST_CAST is okay because lookup_attribute does not
7203 modify its argument and the return value is assigned to a
7204 const_tree. */
7205 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7206 CONST_CAST_TREE (l1));
7207 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7208 attr = lookup_ident_attribute (get_attribute_name (t2),
7209 TREE_CHAIN (attr)))
7210 ;
7211
7212 if (attr == NULL_TREE)
7213 return 0;
7214 }
7215
7216 return 1;
7217 }
7218
7219 /* Given two lists of types
7220 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7221 return 1 if the lists contain the same types in the same order.
7222 Also, the TREE_PURPOSEs must match. */
7223
7224 int
7225 type_list_equal (const_tree l1, const_tree l2)
7226 {
7227 const_tree t1, t2;
7228
7229 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7230 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7231 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7232 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7233 && (TREE_TYPE (TREE_PURPOSE (t1))
7234 == TREE_TYPE (TREE_PURPOSE (t2))))))
7235 return 0;
7236
7237 return t1 == t2;
7238 }
7239
7240 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7241 given by TYPE. If the argument list accepts variable arguments,
7242 then this function counts only the ordinary arguments. */
7243
7244 int
7245 type_num_arguments (const_tree type)
7246 {
7247 int i = 0;
7248 tree t;
7249
7250 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7251 /* If the function does not take a variable number of arguments,
7252 the last element in the list will have type `void'. */
7253 if (VOID_TYPE_P (TREE_VALUE (t)))
7254 break;
7255 else
7256 ++i;
7257
7258 return i;
7259 }
7260
7261 /* Nonzero if integer constants T1 and T2
7262 represent the same constant value. */
7263
7264 int
7265 tree_int_cst_equal (const_tree t1, const_tree t2)
7266 {
7267 if (t1 == t2)
7268 return 1;
7269
7270 if (t1 == 0 || t2 == 0)
7271 return 0;
7272
7273 if (TREE_CODE (t1) == INTEGER_CST
7274 && TREE_CODE (t2) == INTEGER_CST
7275 && wi::to_widest (t1) == wi::to_widest (t2))
7276 return 1;
7277
7278 return 0;
7279 }
7280
7281 /* Return true if T is an INTEGER_CST whose numerical value (extended
7282 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7283
7284 bool
7285 tree_fits_shwi_p (const_tree t)
7286 {
7287 return (t != NULL_TREE
7288 && TREE_CODE (t) == INTEGER_CST
7289 && wi::fits_shwi_p (wi::to_widest (t)));
7290 }
7291
7292 /* Return true if T is an INTEGER_CST whose numerical value (extended
7293 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7294
7295 bool
7296 tree_fits_uhwi_p (const_tree t)
7297 {
7298 return (t != NULL_TREE
7299 && TREE_CODE (t) == INTEGER_CST
7300 && wi::fits_uhwi_p (wi::to_widest (t)));
7301 }
7302
7303 /* T is an INTEGER_CST whose numerical value (extended according to
7304 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7305 HOST_WIDE_INT. */
7306
7307 HOST_WIDE_INT
7308 tree_to_shwi (const_tree t)
7309 {
7310 gcc_assert (tree_fits_shwi_p (t));
7311 return TREE_INT_CST_LOW (t);
7312 }
7313
7314 /* T is an INTEGER_CST whose numerical value (extended according to
7315 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7316 HOST_WIDE_INT. */
7317
7318 unsigned HOST_WIDE_INT
7319 tree_to_uhwi (const_tree t)
7320 {
7321 gcc_assert (tree_fits_uhwi_p (t));
7322 return TREE_INT_CST_LOW (t);
7323 }
7324
7325 /* Return the most significant (sign) bit of T. */
7326
7327 int
7328 tree_int_cst_sign_bit (const_tree t)
7329 {
7330 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7331
7332 return wi::extract_uhwi (t, bitno, 1);
7333 }
7334
7335 /* Return an indication of the sign of the integer constant T.
7336 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7337 Note that -1 will never be returned if T's type is unsigned. */
7338
7339 int
7340 tree_int_cst_sgn (const_tree t)
7341 {
7342 if (wi::eq_p (t, 0))
7343 return 0;
7344 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7345 return 1;
7346 else if (wi::neg_p (t))
7347 return -1;
7348 else
7349 return 1;
7350 }
7351
7352 /* Return the minimum number of bits needed to represent VALUE in a
7353 signed or unsigned type, UNSIGNEDP says which. */
7354
7355 unsigned int
7356 tree_int_cst_min_precision (tree value, signop sgn)
7357 {
7358 /* If the value is negative, compute its negative minus 1. The latter
7359 adjustment is because the absolute value of the largest negative value
7360 is one larger than the largest positive value. This is equivalent to
7361 a bit-wise negation, so use that operation instead. */
7362
7363 if (tree_int_cst_sgn (value) < 0)
7364 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7365
7366 /* Return the number of bits needed, taking into account the fact
7367 that we need one more bit for a signed than unsigned type.
7368 If value is 0 or -1, the minimum precision is 1 no matter
7369 whether unsignedp is true or false. */
7370
7371 if (integer_zerop (value))
7372 return 1;
7373 else
7374 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7375 }
7376
7377 /* Return truthvalue of whether T1 is the same tree structure as T2.
7378 Return 1 if they are the same.
7379 Return 0 if they are understandably different.
7380 Return -1 if either contains tree structure not understood by
7381 this function. */
7382
7383 int
7384 simple_cst_equal (const_tree t1, const_tree t2)
7385 {
7386 enum tree_code code1, code2;
7387 int cmp;
7388 int i;
7389
7390 if (t1 == t2)
7391 return 1;
7392 if (t1 == 0 || t2 == 0)
7393 return 0;
7394
7395 code1 = TREE_CODE (t1);
7396 code2 = TREE_CODE (t2);
7397
7398 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7399 {
7400 if (CONVERT_EXPR_CODE_P (code2)
7401 || code2 == NON_LVALUE_EXPR)
7402 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7403 else
7404 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7405 }
7406
7407 else if (CONVERT_EXPR_CODE_P (code2)
7408 || code2 == NON_LVALUE_EXPR)
7409 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7410
7411 if (code1 != code2)
7412 return 0;
7413
7414 switch (code1)
7415 {
7416 case INTEGER_CST:
7417 return wi::to_widest (t1) == wi::to_widest (t2);
7418
7419 case REAL_CST:
7420 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7421
7422 case FIXED_CST:
7423 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7424
7425 case STRING_CST:
7426 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7427 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7428 TREE_STRING_LENGTH (t1)));
7429
7430 case CONSTRUCTOR:
7431 {
7432 unsigned HOST_WIDE_INT idx;
7433 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7434 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7435
7436 if (vec_safe_length (v1) != vec_safe_length (v2))
7437 return false;
7438
7439 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7440 /* ??? Should we handle also fields here? */
7441 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7442 return false;
7443 return true;
7444 }
7445
7446 case SAVE_EXPR:
7447 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7448
7449 case CALL_EXPR:
7450 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7451 if (cmp <= 0)
7452 return cmp;
7453 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7454 return 0;
7455 {
7456 const_tree arg1, arg2;
7457 const_call_expr_arg_iterator iter1, iter2;
7458 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7459 arg2 = first_const_call_expr_arg (t2, &iter2);
7460 arg1 && arg2;
7461 arg1 = next_const_call_expr_arg (&iter1),
7462 arg2 = next_const_call_expr_arg (&iter2))
7463 {
7464 cmp = simple_cst_equal (arg1, arg2);
7465 if (cmp <= 0)
7466 return cmp;
7467 }
7468 return arg1 == arg2;
7469 }
7470
7471 case TARGET_EXPR:
7472 /* Special case: if either target is an unallocated VAR_DECL,
7473 it means that it's going to be unified with whatever the
7474 TARGET_EXPR is really supposed to initialize, so treat it
7475 as being equivalent to anything. */
7476 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7477 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7478 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7479 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7480 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7481 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7482 cmp = 1;
7483 else
7484 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7485
7486 if (cmp <= 0)
7487 return cmp;
7488
7489 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7490
7491 case WITH_CLEANUP_EXPR:
7492 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7493 if (cmp <= 0)
7494 return cmp;
7495
7496 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7497
7498 case COMPONENT_REF:
7499 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7500 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7501
7502 return 0;
7503
7504 case VAR_DECL:
7505 case PARM_DECL:
7506 case CONST_DECL:
7507 case FUNCTION_DECL:
7508 return 0;
7509
7510 default:
7511 break;
7512 }
7513
7514 /* This general rule works for most tree codes. All exceptions should be
7515 handled above. If this is a language-specific tree code, we can't
7516 trust what might be in the operand, so say we don't know
7517 the situation. */
7518 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7519 return -1;
7520
7521 switch (TREE_CODE_CLASS (code1))
7522 {
7523 case tcc_unary:
7524 case tcc_binary:
7525 case tcc_comparison:
7526 case tcc_expression:
7527 case tcc_reference:
7528 case tcc_statement:
7529 cmp = 1;
7530 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7531 {
7532 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7533 if (cmp <= 0)
7534 return cmp;
7535 }
7536
7537 return cmp;
7538
7539 default:
7540 return -1;
7541 }
7542 }
7543
7544 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7545 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7546 than U, respectively. */
7547
7548 int
7549 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7550 {
7551 if (tree_int_cst_sgn (t) < 0)
7552 return -1;
7553 else if (!tree_fits_uhwi_p (t))
7554 return 1;
7555 else if (TREE_INT_CST_LOW (t) == u)
7556 return 0;
7557 else if (TREE_INT_CST_LOW (t) < u)
7558 return -1;
7559 else
7560 return 1;
7561 }
7562
7563 /* Return true if SIZE represents a constant size that is in bounds of
7564 what the middle-end and the backend accepts (covering not more than
7565 half of the address-space). */
7566
7567 bool
7568 valid_constant_size_p (const_tree size)
7569 {
7570 if (! tree_fits_uhwi_p (size)
7571 || TREE_OVERFLOW (size)
7572 || tree_int_cst_sign_bit (size) != 0)
7573 return false;
7574 return true;
7575 }
7576
7577 /* Return the precision of the type, or for a complex or vector type the
7578 precision of the type of its elements. */
7579
7580 unsigned int
7581 element_precision (const_tree type)
7582 {
7583 if (!TYPE_P (type))
7584 type = TREE_TYPE (type);
7585 enum tree_code code = TREE_CODE (type);
7586 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7587 type = TREE_TYPE (type);
7588
7589 return TYPE_PRECISION (type);
7590 }
7591
7592 /* Return true if CODE represents an associative tree code. Otherwise
7593 return false. */
7594 bool
7595 associative_tree_code (enum tree_code code)
7596 {
7597 switch (code)
7598 {
7599 case BIT_IOR_EXPR:
7600 case BIT_AND_EXPR:
7601 case BIT_XOR_EXPR:
7602 case PLUS_EXPR:
7603 case MULT_EXPR:
7604 case MIN_EXPR:
7605 case MAX_EXPR:
7606 return true;
7607
7608 default:
7609 break;
7610 }
7611 return false;
7612 }
7613
7614 /* Return true if CODE represents a commutative tree code. Otherwise
7615 return false. */
7616 bool
7617 commutative_tree_code (enum tree_code code)
7618 {
7619 switch (code)
7620 {
7621 case PLUS_EXPR:
7622 case MULT_EXPR:
7623 case MULT_HIGHPART_EXPR:
7624 case MIN_EXPR:
7625 case MAX_EXPR:
7626 case BIT_IOR_EXPR:
7627 case BIT_XOR_EXPR:
7628 case BIT_AND_EXPR:
7629 case NE_EXPR:
7630 case EQ_EXPR:
7631 case UNORDERED_EXPR:
7632 case ORDERED_EXPR:
7633 case UNEQ_EXPR:
7634 case LTGT_EXPR:
7635 case TRUTH_AND_EXPR:
7636 case TRUTH_XOR_EXPR:
7637 case TRUTH_OR_EXPR:
7638 case WIDEN_MULT_EXPR:
7639 case VEC_WIDEN_MULT_HI_EXPR:
7640 case VEC_WIDEN_MULT_LO_EXPR:
7641 case VEC_WIDEN_MULT_EVEN_EXPR:
7642 case VEC_WIDEN_MULT_ODD_EXPR:
7643 return true;
7644
7645 default:
7646 break;
7647 }
7648 return false;
7649 }
7650
7651 /* Return true if CODE represents a ternary tree code for which the
7652 first two operands are commutative. Otherwise return false. */
7653 bool
7654 commutative_ternary_tree_code (enum tree_code code)
7655 {
7656 switch (code)
7657 {
7658 case WIDEN_MULT_PLUS_EXPR:
7659 case WIDEN_MULT_MINUS_EXPR:
7660 case DOT_PROD_EXPR:
7661 case FMA_EXPR:
7662 return true;
7663
7664 default:
7665 break;
7666 }
7667 return false;
7668 }
7669
7670 /* Returns true if CODE can overflow. */
7671
7672 bool
7673 operation_can_overflow (enum tree_code code)
7674 {
7675 switch (code)
7676 {
7677 case PLUS_EXPR:
7678 case MINUS_EXPR:
7679 case MULT_EXPR:
7680 case LSHIFT_EXPR:
7681 /* Can overflow in various ways. */
7682 return true;
7683 case TRUNC_DIV_EXPR:
7684 case EXACT_DIV_EXPR:
7685 case FLOOR_DIV_EXPR:
7686 case CEIL_DIV_EXPR:
7687 /* For INT_MIN / -1. */
7688 return true;
7689 case NEGATE_EXPR:
7690 case ABS_EXPR:
7691 /* For -INT_MIN. */
7692 return true;
7693 default:
7694 /* These operators cannot overflow. */
7695 return false;
7696 }
7697 }
7698
7699 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7700 ftrapv doesn't generate trapping insns for CODE. */
7701
7702 bool
7703 operation_no_trapping_overflow (tree type, enum tree_code code)
7704 {
7705 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7706
7707 /* We don't generate instructions that trap on overflow for complex or vector
7708 types. */
7709 if (!INTEGRAL_TYPE_P (type))
7710 return true;
7711
7712 if (!TYPE_OVERFLOW_TRAPS (type))
7713 return true;
7714
7715 switch (code)
7716 {
7717 case PLUS_EXPR:
7718 case MINUS_EXPR:
7719 case MULT_EXPR:
7720 case NEGATE_EXPR:
7721 case ABS_EXPR:
7722 /* These operators can overflow, and -ftrapv generates trapping code for
7723 these. */
7724 return false;
7725 case TRUNC_DIV_EXPR:
7726 case EXACT_DIV_EXPR:
7727 case FLOOR_DIV_EXPR:
7728 case CEIL_DIV_EXPR:
7729 case LSHIFT_EXPR:
7730 /* These operators can overflow, but -ftrapv does not generate trapping
7731 code for these. */
7732 return true;
7733 default:
7734 /* These operators cannot overflow. */
7735 return true;
7736 }
7737 }
7738
7739 namespace inchash
7740 {
7741
7742 /* Generate a hash value for an expression. This can be used iteratively
7743 by passing a previous result as the HSTATE argument.
7744
7745 This function is intended to produce the same hash for expressions which
7746 would compare equal using operand_equal_p. */
7747 void
7748 add_expr (const_tree t, inchash::hash &hstate)
7749 {
7750 int i;
7751 enum tree_code code;
7752 enum tree_code_class tclass;
7753
7754 if (t == NULL_TREE)
7755 {
7756 hstate.merge_hash (0);
7757 return;
7758 }
7759
7760 code = TREE_CODE (t);
7761
7762 switch (code)
7763 {
7764 /* Alas, constants aren't shared, so we can't rely on pointer
7765 identity. */
7766 case VOID_CST:
7767 hstate.merge_hash (0);
7768 return;
7769 case INTEGER_CST:
7770 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7771 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7772 return;
7773 case REAL_CST:
7774 {
7775 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7776 hstate.merge_hash (val2);
7777 return;
7778 }
7779 case FIXED_CST:
7780 {
7781 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7782 hstate.merge_hash (val2);
7783 return;
7784 }
7785 case STRING_CST:
7786 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7787 return;
7788 case COMPLEX_CST:
7789 inchash::add_expr (TREE_REALPART (t), hstate);
7790 inchash::add_expr (TREE_IMAGPART (t), hstate);
7791 return;
7792 case VECTOR_CST:
7793 {
7794 unsigned i;
7795 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7796 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7797 return;
7798 }
7799 case SSA_NAME:
7800 /* We can just compare by pointer. */
7801 hstate.add_wide_int (SSA_NAME_VERSION (t));
7802 return;
7803 case PLACEHOLDER_EXPR:
7804 /* The node itself doesn't matter. */
7805 return;
7806 case TREE_LIST:
7807 /* A list of expressions, for a CALL_EXPR or as the elements of a
7808 VECTOR_CST. */
7809 for (; t; t = TREE_CHAIN (t))
7810 inchash::add_expr (TREE_VALUE (t), hstate);
7811 return;
7812 case CONSTRUCTOR:
7813 {
7814 unsigned HOST_WIDE_INT idx;
7815 tree field, value;
7816 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7817 {
7818 inchash::add_expr (field, hstate);
7819 inchash::add_expr (value, hstate);
7820 }
7821 return;
7822 }
7823 case FUNCTION_DECL:
7824 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7825 Otherwise nodes that compare equal according to operand_equal_p might
7826 get different hash codes. However, don't do this for machine specific
7827 or front end builtins, since the function code is overloaded in those
7828 cases. */
7829 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7830 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7831 {
7832 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7833 code = TREE_CODE (t);
7834 }
7835 /* FALL THROUGH */
7836 default:
7837 tclass = TREE_CODE_CLASS (code);
7838
7839 if (tclass == tcc_declaration)
7840 {
7841 /* DECL's have a unique ID */
7842 hstate.add_wide_int (DECL_UID (t));
7843 }
7844 else
7845 {
7846 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7847
7848 hstate.add_object (code);
7849
7850 /* Don't hash the type, that can lead to having nodes which
7851 compare equal according to operand_equal_p, but which
7852 have different hash codes. */
7853 if (CONVERT_EXPR_CODE_P (code)
7854 || code == NON_LVALUE_EXPR)
7855 {
7856 /* Make sure to include signness in the hash computation. */
7857 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7858 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7859 }
7860
7861 else if (commutative_tree_code (code))
7862 {
7863 /* It's a commutative expression. We want to hash it the same
7864 however it appears. We do this by first hashing both operands
7865 and then rehashing based on the order of their independent
7866 hashes. */
7867 inchash::hash one, two;
7868 inchash::add_expr (TREE_OPERAND (t, 0), one);
7869 inchash::add_expr (TREE_OPERAND (t, 1), two);
7870 hstate.add_commutative (one, two);
7871 }
7872 else
7873 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7874 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7875 }
7876 return;
7877 }
7878 }
7879
7880 }
7881
7882 /* Constructors for pointer, array and function types.
7883 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7884 constructed by language-dependent code, not here.) */
7885
7886 /* Construct, lay out and return the type of pointers to TO_TYPE with
7887 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7888 reference all of memory. If such a type has already been
7889 constructed, reuse it. */
7890
7891 tree
7892 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7893 bool can_alias_all)
7894 {
7895 tree t;
7896 bool could_alias = can_alias_all;
7897
7898 if (to_type == error_mark_node)
7899 return error_mark_node;
7900
7901 /* If the pointed-to type has the may_alias attribute set, force
7902 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7903 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7904 can_alias_all = true;
7905
7906 /* In some cases, languages will have things that aren't a POINTER_TYPE
7907 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7908 In that case, return that type without regard to the rest of our
7909 operands.
7910
7911 ??? This is a kludge, but consistent with the way this function has
7912 always operated and there doesn't seem to be a good way to avoid this
7913 at the moment. */
7914 if (TYPE_POINTER_TO (to_type) != 0
7915 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7916 return TYPE_POINTER_TO (to_type);
7917
7918 /* First, if we already have a type for pointers to TO_TYPE and it's
7919 the proper mode, use it. */
7920 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7921 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7922 return t;
7923
7924 t = make_node (POINTER_TYPE);
7925
7926 TREE_TYPE (t) = to_type;
7927 SET_TYPE_MODE (t, mode);
7928 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7929 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7930 TYPE_POINTER_TO (to_type) = t;
7931
7932 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7933 SET_TYPE_STRUCTURAL_EQUALITY (t);
7934 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7935 TYPE_CANONICAL (t)
7936 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7937 mode, false);
7938
7939 /* Lay out the type. This function has many callers that are concerned
7940 with expression-construction, and this simplifies them all. */
7941 layout_type (t);
7942
7943 return t;
7944 }
7945
7946 /* By default build pointers in ptr_mode. */
7947
7948 tree
7949 build_pointer_type (tree to_type)
7950 {
7951 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7952 : TYPE_ADDR_SPACE (to_type);
7953 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7954 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7955 }
7956
7957 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7958
7959 tree
7960 build_reference_type_for_mode (tree to_type, machine_mode mode,
7961 bool can_alias_all)
7962 {
7963 tree t;
7964 bool could_alias = can_alias_all;
7965
7966 if (to_type == error_mark_node)
7967 return error_mark_node;
7968
7969 /* If the pointed-to type has the may_alias attribute set, force
7970 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7971 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7972 can_alias_all = true;
7973
7974 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7975 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7976 In that case, return that type without regard to the rest of our
7977 operands.
7978
7979 ??? This is a kludge, but consistent with the way this function has
7980 always operated and there doesn't seem to be a good way to avoid this
7981 at the moment. */
7982 if (TYPE_REFERENCE_TO (to_type) != 0
7983 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7984 return TYPE_REFERENCE_TO (to_type);
7985
7986 /* First, if we already have a type for pointers to TO_TYPE and it's
7987 the proper mode, use it. */
7988 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7989 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7990 return t;
7991
7992 t = make_node (REFERENCE_TYPE);
7993
7994 TREE_TYPE (t) = to_type;
7995 SET_TYPE_MODE (t, mode);
7996 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7997 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7998 TYPE_REFERENCE_TO (to_type) = t;
7999
8000 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
8001 SET_TYPE_STRUCTURAL_EQUALITY (t);
8002 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8003 TYPE_CANONICAL (t)
8004 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
8005 mode, false);
8006
8007 layout_type (t);
8008
8009 return t;
8010 }
8011
8012
8013 /* Build the node for the type of references-to-TO_TYPE by default
8014 in ptr_mode. */
8015
8016 tree
8017 build_reference_type (tree to_type)
8018 {
8019 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8020 : TYPE_ADDR_SPACE (to_type);
8021 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8022 return build_reference_type_for_mode (to_type, pointer_mode, false);
8023 }
8024
8025 #define MAX_INT_CACHED_PREC \
8026 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8027 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8028
8029 /* Builds a signed or unsigned integer type of precision PRECISION.
8030 Used for C bitfields whose precision does not match that of
8031 built-in target types. */
8032 tree
8033 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8034 int unsignedp)
8035 {
8036 tree itype, ret;
8037
8038 if (unsignedp)
8039 unsignedp = MAX_INT_CACHED_PREC + 1;
8040
8041 if (precision <= MAX_INT_CACHED_PREC)
8042 {
8043 itype = nonstandard_integer_type_cache[precision + unsignedp];
8044 if (itype)
8045 return itype;
8046 }
8047
8048 itype = make_node (INTEGER_TYPE);
8049 TYPE_PRECISION (itype) = precision;
8050
8051 if (unsignedp)
8052 fixup_unsigned_type (itype);
8053 else
8054 fixup_signed_type (itype);
8055
8056 ret = itype;
8057 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
8058 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
8059 if (precision <= MAX_INT_CACHED_PREC)
8060 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8061
8062 return ret;
8063 }
8064
8065 #define MAX_BOOL_CACHED_PREC \
8066 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8067 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8068
8069 /* Builds a boolean type of precision PRECISION.
8070 Used for boolean vectors to choose proper vector element size. */
8071 tree
8072 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8073 {
8074 tree type;
8075
8076 if (precision <= MAX_BOOL_CACHED_PREC)
8077 {
8078 type = nonstandard_boolean_type_cache[precision];
8079 if (type)
8080 return type;
8081 }
8082
8083 type = make_node (BOOLEAN_TYPE);
8084 TYPE_PRECISION (type) = precision;
8085 fixup_unsigned_type (type);
8086
8087 if (precision <= MAX_INT_CACHED_PREC)
8088 nonstandard_boolean_type_cache[precision] = type;
8089
8090 return type;
8091 }
8092
8093 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8094 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8095 is true, reuse such a type that has already been constructed. */
8096
8097 static tree
8098 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8099 {
8100 tree itype = make_node (INTEGER_TYPE);
8101 inchash::hash hstate;
8102
8103 TREE_TYPE (itype) = type;
8104
8105 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8106 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8107
8108 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8109 SET_TYPE_MODE (itype, TYPE_MODE (type));
8110 TYPE_SIZE (itype) = TYPE_SIZE (type);
8111 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8112 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
8113 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8114
8115 if (!shared)
8116 return itype;
8117
8118 if ((TYPE_MIN_VALUE (itype)
8119 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8120 || (TYPE_MAX_VALUE (itype)
8121 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8122 {
8123 /* Since we cannot reliably merge this type, we need to compare it using
8124 structural equality checks. */
8125 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8126 return itype;
8127 }
8128
8129 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
8130 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8131 hstate.merge_hash (TYPE_HASH (type));
8132 itype = type_hash_canon (hstate.end (), itype);
8133
8134 return itype;
8135 }
8136
8137 /* Wrapper around build_range_type_1 with SHARED set to true. */
8138
8139 tree
8140 build_range_type (tree type, tree lowval, tree highval)
8141 {
8142 return build_range_type_1 (type, lowval, highval, true);
8143 }
8144
8145 /* Wrapper around build_range_type_1 with SHARED set to false. */
8146
8147 tree
8148 build_nonshared_range_type (tree type, tree lowval, tree highval)
8149 {
8150 return build_range_type_1 (type, lowval, highval, false);
8151 }
8152
8153 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8154 MAXVAL should be the maximum value in the domain
8155 (one less than the length of the array).
8156
8157 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8158 We don't enforce this limit, that is up to caller (e.g. language front end).
8159 The limit exists because the result is a signed type and we don't handle
8160 sizes that use more than one HOST_WIDE_INT. */
8161
8162 tree
8163 build_index_type (tree maxval)
8164 {
8165 return build_range_type (sizetype, size_zero_node, maxval);
8166 }
8167
8168 /* Return true if the debug information for TYPE, a subtype, should be emitted
8169 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8170 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8171 debug info and doesn't reflect the source code. */
8172
8173 bool
8174 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8175 {
8176 tree base_type = TREE_TYPE (type), low, high;
8177
8178 /* Subrange types have a base type which is an integral type. */
8179 if (!INTEGRAL_TYPE_P (base_type))
8180 return false;
8181
8182 /* Get the real bounds of the subtype. */
8183 if (lang_hooks.types.get_subrange_bounds)
8184 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8185 else
8186 {
8187 low = TYPE_MIN_VALUE (type);
8188 high = TYPE_MAX_VALUE (type);
8189 }
8190
8191 /* If the type and its base type have the same representation and the same
8192 name, then the type is not a subrange but a copy of the base type. */
8193 if ((TREE_CODE (base_type) == INTEGER_TYPE
8194 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8195 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8196 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8197 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8198 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8199 return false;
8200
8201 if (lowval)
8202 *lowval = low;
8203 if (highval)
8204 *highval = high;
8205 return true;
8206 }
8207
8208 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8209 and number of elements specified by the range of values of INDEX_TYPE.
8210 If SHARED is true, reuse such a type that has already been constructed. */
8211
8212 static tree
8213 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8214 {
8215 tree t;
8216
8217 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8218 {
8219 error ("arrays of functions are not meaningful");
8220 elt_type = integer_type_node;
8221 }
8222
8223 t = make_node (ARRAY_TYPE);
8224 TREE_TYPE (t) = elt_type;
8225 TYPE_DOMAIN (t) = index_type;
8226 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8227 layout_type (t);
8228
8229 /* If the element type is incomplete at this point we get marked for
8230 structural equality. Do not record these types in the canonical
8231 type hashtable. */
8232 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8233 return t;
8234
8235 if (shared)
8236 {
8237 inchash::hash hstate;
8238 hstate.add_object (TYPE_HASH (elt_type));
8239 if (index_type)
8240 hstate.add_object (TYPE_HASH (index_type));
8241 t = type_hash_canon (hstate.end (), t);
8242 }
8243
8244 if (TYPE_CANONICAL (t) == t)
8245 {
8246 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8247 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
8248 SET_TYPE_STRUCTURAL_EQUALITY (t);
8249 else if (TYPE_CANONICAL (elt_type) != elt_type
8250 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8251 TYPE_CANONICAL (t)
8252 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8253 index_type
8254 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8255 shared);
8256 }
8257
8258 return t;
8259 }
8260
8261 /* Wrapper around build_array_type_1 with SHARED set to true. */
8262
8263 tree
8264 build_array_type (tree elt_type, tree index_type)
8265 {
8266 return build_array_type_1 (elt_type, index_type, true);
8267 }
8268
8269 /* Wrapper around build_array_type_1 with SHARED set to false. */
8270
8271 tree
8272 build_nonshared_array_type (tree elt_type, tree index_type)
8273 {
8274 return build_array_type_1 (elt_type, index_type, false);
8275 }
8276
8277 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8278 sizetype. */
8279
8280 tree
8281 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8282 {
8283 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8284 }
8285
8286 /* Recursively examines the array elements of TYPE, until a non-array
8287 element type is found. */
8288
8289 tree
8290 strip_array_types (tree type)
8291 {
8292 while (TREE_CODE (type) == ARRAY_TYPE)
8293 type = TREE_TYPE (type);
8294
8295 return type;
8296 }
8297
8298 /* Computes the canonical argument types from the argument type list
8299 ARGTYPES.
8300
8301 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8302 on entry to this function, or if any of the ARGTYPES are
8303 structural.
8304
8305 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8306 true on entry to this function, or if any of the ARGTYPES are
8307 non-canonical.
8308
8309 Returns a canonical argument list, which may be ARGTYPES when the
8310 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8311 true) or would not differ from ARGTYPES. */
8312
8313 static tree
8314 maybe_canonicalize_argtypes (tree argtypes,
8315 bool *any_structural_p,
8316 bool *any_noncanonical_p)
8317 {
8318 tree arg;
8319 bool any_noncanonical_argtypes_p = false;
8320
8321 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8322 {
8323 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8324 /* Fail gracefully by stating that the type is structural. */
8325 *any_structural_p = true;
8326 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8327 *any_structural_p = true;
8328 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8329 || TREE_PURPOSE (arg))
8330 /* If the argument has a default argument, we consider it
8331 non-canonical even though the type itself is canonical.
8332 That way, different variants of function and method types
8333 with default arguments will all point to the variant with
8334 no defaults as their canonical type. */
8335 any_noncanonical_argtypes_p = true;
8336 }
8337
8338 if (*any_structural_p)
8339 return argtypes;
8340
8341 if (any_noncanonical_argtypes_p)
8342 {
8343 /* Build the canonical list of argument types. */
8344 tree canon_argtypes = NULL_TREE;
8345 bool is_void = false;
8346
8347 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8348 {
8349 if (arg == void_list_node)
8350 is_void = true;
8351 else
8352 canon_argtypes = tree_cons (NULL_TREE,
8353 TYPE_CANONICAL (TREE_VALUE (arg)),
8354 canon_argtypes);
8355 }
8356
8357 canon_argtypes = nreverse (canon_argtypes);
8358 if (is_void)
8359 canon_argtypes = chainon (canon_argtypes, void_list_node);
8360
8361 /* There is a non-canonical type. */
8362 *any_noncanonical_p = true;
8363 return canon_argtypes;
8364 }
8365
8366 /* The canonical argument types are the same as ARGTYPES. */
8367 return argtypes;
8368 }
8369
8370 /* Construct, lay out and return
8371 the type of functions returning type VALUE_TYPE
8372 given arguments of types ARG_TYPES.
8373 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8374 are data type nodes for the arguments of the function.
8375 If such a type has already been constructed, reuse it. */
8376
8377 tree
8378 build_function_type (tree value_type, tree arg_types)
8379 {
8380 tree t;
8381 inchash::hash hstate;
8382 bool any_structural_p, any_noncanonical_p;
8383 tree canon_argtypes;
8384
8385 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8386 {
8387 error ("function return type cannot be function");
8388 value_type = integer_type_node;
8389 }
8390
8391 /* Make a node of the sort we want. */
8392 t = make_node (FUNCTION_TYPE);
8393 TREE_TYPE (t) = value_type;
8394 TYPE_ARG_TYPES (t) = arg_types;
8395
8396 /* If we already have such a type, use the old one. */
8397 hstate.add_object (TYPE_HASH (value_type));
8398 type_hash_list (arg_types, hstate);
8399 t = type_hash_canon (hstate.end (), t);
8400
8401 /* Set up the canonical type. */
8402 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8403 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8404 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8405 &any_structural_p,
8406 &any_noncanonical_p);
8407 if (any_structural_p)
8408 SET_TYPE_STRUCTURAL_EQUALITY (t);
8409 else if (any_noncanonical_p)
8410 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8411 canon_argtypes);
8412
8413 if (!COMPLETE_TYPE_P (t))
8414 layout_type (t);
8415 return t;
8416 }
8417
8418 /* Build a function type. The RETURN_TYPE is the type returned by the
8419 function. If VAARGS is set, no void_type_node is appended to the
8420 the list. ARGP must be always be terminated be a NULL_TREE. */
8421
8422 static tree
8423 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8424 {
8425 tree t, args, last;
8426
8427 t = va_arg (argp, tree);
8428 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8429 args = tree_cons (NULL_TREE, t, args);
8430
8431 if (vaargs)
8432 {
8433 last = args;
8434 if (args != NULL_TREE)
8435 args = nreverse (args);
8436 gcc_assert (last != void_list_node);
8437 }
8438 else if (args == NULL_TREE)
8439 args = void_list_node;
8440 else
8441 {
8442 last = args;
8443 args = nreverse (args);
8444 TREE_CHAIN (last) = void_list_node;
8445 }
8446 args = build_function_type (return_type, args);
8447
8448 return args;
8449 }
8450
8451 /* Build a function type. The RETURN_TYPE is the type returned by the
8452 function. If additional arguments are provided, they are
8453 additional argument types. The list of argument types must always
8454 be terminated by NULL_TREE. */
8455
8456 tree
8457 build_function_type_list (tree return_type, ...)
8458 {
8459 tree args;
8460 va_list p;
8461
8462 va_start (p, return_type);
8463 args = build_function_type_list_1 (false, return_type, p);
8464 va_end (p);
8465 return args;
8466 }
8467
8468 /* Build a variable argument function type. The RETURN_TYPE is the
8469 type returned by the function. If additional arguments are provided,
8470 they are additional argument types. The list of argument types must
8471 always be terminated by NULL_TREE. */
8472
8473 tree
8474 build_varargs_function_type_list (tree return_type, ...)
8475 {
8476 tree args;
8477 va_list p;
8478
8479 va_start (p, return_type);
8480 args = build_function_type_list_1 (true, return_type, p);
8481 va_end (p);
8482
8483 return args;
8484 }
8485
8486 /* Build a function type. RETURN_TYPE is the type returned by the
8487 function; VAARGS indicates whether the function takes varargs. The
8488 function takes N named arguments, the types of which are provided in
8489 ARG_TYPES. */
8490
8491 static tree
8492 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8493 tree *arg_types)
8494 {
8495 int i;
8496 tree t = vaargs ? NULL_TREE : void_list_node;
8497
8498 for (i = n - 1; i >= 0; i--)
8499 t = tree_cons (NULL_TREE, arg_types[i], t);
8500
8501 return build_function_type (return_type, t);
8502 }
8503
8504 /* Build a function type. RETURN_TYPE is the type returned by the
8505 function. The function takes N named arguments, the types of which
8506 are provided in ARG_TYPES. */
8507
8508 tree
8509 build_function_type_array (tree return_type, int n, tree *arg_types)
8510 {
8511 return build_function_type_array_1 (false, return_type, n, arg_types);
8512 }
8513
8514 /* Build a variable argument function type. RETURN_TYPE is the type
8515 returned by the function. The function takes N named arguments, the
8516 types of which are provided in ARG_TYPES. */
8517
8518 tree
8519 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8520 {
8521 return build_function_type_array_1 (true, return_type, n, arg_types);
8522 }
8523
8524 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8525 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8526 for the method. An implicit additional parameter (of type
8527 pointer-to-BASETYPE) is added to the ARGTYPES. */
8528
8529 tree
8530 build_method_type_directly (tree basetype,
8531 tree rettype,
8532 tree argtypes)
8533 {
8534 tree t;
8535 tree ptype;
8536 inchash::hash hstate;
8537 bool any_structural_p, any_noncanonical_p;
8538 tree canon_argtypes;
8539
8540 /* Make a node of the sort we want. */
8541 t = make_node (METHOD_TYPE);
8542
8543 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8544 TREE_TYPE (t) = rettype;
8545 ptype = build_pointer_type (basetype);
8546
8547 /* The actual arglist for this function includes a "hidden" argument
8548 which is "this". Put it into the list of argument types. */
8549 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8550 TYPE_ARG_TYPES (t) = argtypes;
8551
8552 /* If we already have such a type, use the old one. */
8553 hstate.add_object (TYPE_HASH (basetype));
8554 hstate.add_object (TYPE_HASH (rettype));
8555 type_hash_list (argtypes, hstate);
8556 t = type_hash_canon (hstate.end (), t);
8557
8558 /* Set up the canonical type. */
8559 any_structural_p
8560 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8561 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8562 any_noncanonical_p
8563 = (TYPE_CANONICAL (basetype) != basetype
8564 || TYPE_CANONICAL (rettype) != rettype);
8565 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8566 &any_structural_p,
8567 &any_noncanonical_p);
8568 if (any_structural_p)
8569 SET_TYPE_STRUCTURAL_EQUALITY (t);
8570 else if (any_noncanonical_p)
8571 TYPE_CANONICAL (t)
8572 = build_method_type_directly (TYPE_CANONICAL (basetype),
8573 TYPE_CANONICAL (rettype),
8574 canon_argtypes);
8575 if (!COMPLETE_TYPE_P (t))
8576 layout_type (t);
8577
8578 return t;
8579 }
8580
8581 /* Construct, lay out and return the type of methods belonging to class
8582 BASETYPE and whose arguments and values are described by TYPE.
8583 If that type exists already, reuse it.
8584 TYPE must be a FUNCTION_TYPE node. */
8585
8586 tree
8587 build_method_type (tree basetype, tree type)
8588 {
8589 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8590
8591 return build_method_type_directly (basetype,
8592 TREE_TYPE (type),
8593 TYPE_ARG_TYPES (type));
8594 }
8595
8596 /* Construct, lay out and return the type of offsets to a value
8597 of type TYPE, within an object of type BASETYPE.
8598 If a suitable offset type exists already, reuse it. */
8599
8600 tree
8601 build_offset_type (tree basetype, tree type)
8602 {
8603 tree t;
8604 inchash::hash hstate;
8605
8606 /* Make a node of the sort we want. */
8607 t = make_node (OFFSET_TYPE);
8608
8609 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8610 TREE_TYPE (t) = type;
8611
8612 /* If we already have such a type, use the old one. */
8613 hstate.add_object (TYPE_HASH (basetype));
8614 hstate.add_object (TYPE_HASH (type));
8615 t = type_hash_canon (hstate.end (), t);
8616
8617 if (!COMPLETE_TYPE_P (t))
8618 layout_type (t);
8619
8620 if (TYPE_CANONICAL (t) == t)
8621 {
8622 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8623 || TYPE_STRUCTURAL_EQUALITY_P (type))
8624 SET_TYPE_STRUCTURAL_EQUALITY (t);
8625 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8626 || TYPE_CANONICAL (type) != type)
8627 TYPE_CANONICAL (t)
8628 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8629 TYPE_CANONICAL (type));
8630 }
8631
8632 return t;
8633 }
8634
8635 /* Create a complex type whose components are COMPONENT_TYPE. */
8636
8637 tree
8638 build_complex_type (tree component_type)
8639 {
8640 tree t;
8641 inchash::hash hstate;
8642
8643 gcc_assert (INTEGRAL_TYPE_P (component_type)
8644 || SCALAR_FLOAT_TYPE_P (component_type)
8645 || FIXED_POINT_TYPE_P (component_type));
8646
8647 /* Make a node of the sort we want. */
8648 t = make_node (COMPLEX_TYPE);
8649
8650 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8651
8652 /* If we already have such a type, use the old one. */
8653 hstate.add_object (TYPE_HASH (component_type));
8654 t = type_hash_canon (hstate.end (), t);
8655
8656 if (!COMPLETE_TYPE_P (t))
8657 layout_type (t);
8658
8659 if (TYPE_CANONICAL (t) == t)
8660 {
8661 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8662 SET_TYPE_STRUCTURAL_EQUALITY (t);
8663 else if (TYPE_CANONICAL (component_type) != component_type)
8664 TYPE_CANONICAL (t)
8665 = build_complex_type (TYPE_CANONICAL (component_type));
8666 }
8667
8668 /* We need to create a name, since complex is a fundamental type. */
8669 if (! TYPE_NAME (t))
8670 {
8671 const char *name;
8672 if (component_type == char_type_node)
8673 name = "complex char";
8674 else if (component_type == signed_char_type_node)
8675 name = "complex signed char";
8676 else if (component_type == unsigned_char_type_node)
8677 name = "complex unsigned char";
8678 else if (component_type == short_integer_type_node)
8679 name = "complex short int";
8680 else if (component_type == short_unsigned_type_node)
8681 name = "complex short unsigned int";
8682 else if (component_type == integer_type_node)
8683 name = "complex int";
8684 else if (component_type == unsigned_type_node)
8685 name = "complex unsigned int";
8686 else if (component_type == long_integer_type_node)
8687 name = "complex long int";
8688 else if (component_type == long_unsigned_type_node)
8689 name = "complex long unsigned int";
8690 else if (component_type == long_long_integer_type_node)
8691 name = "complex long long int";
8692 else if (component_type == long_long_unsigned_type_node)
8693 name = "complex long long unsigned int";
8694 else
8695 name = 0;
8696
8697 if (name != 0)
8698 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8699 get_identifier (name), t);
8700 }
8701
8702 return build_qualified_type (t, TYPE_QUALS (component_type));
8703 }
8704
8705 /* If TYPE is a real or complex floating-point type and the target
8706 does not directly support arithmetic on TYPE then return the wider
8707 type to be used for arithmetic on TYPE. Otherwise, return
8708 NULL_TREE. */
8709
8710 tree
8711 excess_precision_type (tree type)
8712 {
8713 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8714 {
8715 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8716 switch (TREE_CODE (type))
8717 {
8718 case REAL_TYPE:
8719 switch (flt_eval_method)
8720 {
8721 case 1:
8722 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8723 return double_type_node;
8724 break;
8725 case 2:
8726 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8727 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8728 return long_double_type_node;
8729 break;
8730 default:
8731 gcc_unreachable ();
8732 }
8733 break;
8734 case COMPLEX_TYPE:
8735 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8736 return NULL_TREE;
8737 switch (flt_eval_method)
8738 {
8739 case 1:
8740 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8741 return complex_double_type_node;
8742 break;
8743 case 2:
8744 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8745 || (TYPE_MODE (TREE_TYPE (type))
8746 == TYPE_MODE (double_type_node)))
8747 return complex_long_double_type_node;
8748 break;
8749 default:
8750 gcc_unreachable ();
8751 }
8752 break;
8753 default:
8754 break;
8755 }
8756 }
8757 return NULL_TREE;
8758 }
8759 \f
8760 /* Return OP, stripped of any conversions to wider types as much as is safe.
8761 Converting the value back to OP's type makes a value equivalent to OP.
8762
8763 If FOR_TYPE is nonzero, we return a value which, if converted to
8764 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8765
8766 OP must have integer, real or enumeral type. Pointers are not allowed!
8767
8768 There are some cases where the obvious value we could return
8769 would regenerate to OP if converted to OP's type,
8770 but would not extend like OP to wider types.
8771 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8772 For example, if OP is (unsigned short)(signed char)-1,
8773 we avoid returning (signed char)-1 if FOR_TYPE is int,
8774 even though extending that to an unsigned short would regenerate OP,
8775 since the result of extending (signed char)-1 to (int)
8776 is different from (int) OP. */
8777
8778 tree
8779 get_unwidened (tree op, tree for_type)
8780 {
8781 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8782 tree type = TREE_TYPE (op);
8783 unsigned final_prec
8784 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8785 int uns
8786 = (for_type != 0 && for_type != type
8787 && final_prec > TYPE_PRECISION (type)
8788 && TYPE_UNSIGNED (type));
8789 tree win = op;
8790
8791 while (CONVERT_EXPR_P (op))
8792 {
8793 int bitschange;
8794
8795 /* TYPE_PRECISION on vector types has different meaning
8796 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8797 so avoid them here. */
8798 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8799 break;
8800
8801 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8802 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8803
8804 /* Truncations are many-one so cannot be removed.
8805 Unless we are later going to truncate down even farther. */
8806 if (bitschange < 0
8807 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8808 break;
8809
8810 /* See what's inside this conversion. If we decide to strip it,
8811 we will set WIN. */
8812 op = TREE_OPERAND (op, 0);
8813
8814 /* If we have not stripped any zero-extensions (uns is 0),
8815 we can strip any kind of extension.
8816 If we have previously stripped a zero-extension,
8817 only zero-extensions can safely be stripped.
8818 Any extension can be stripped if the bits it would produce
8819 are all going to be discarded later by truncating to FOR_TYPE. */
8820
8821 if (bitschange > 0)
8822 {
8823 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8824 win = op;
8825 /* TYPE_UNSIGNED says whether this is a zero-extension.
8826 Let's avoid computing it if it does not affect WIN
8827 and if UNS will not be needed again. */
8828 if ((uns
8829 || CONVERT_EXPR_P (op))
8830 && TYPE_UNSIGNED (TREE_TYPE (op)))
8831 {
8832 uns = 1;
8833 win = op;
8834 }
8835 }
8836 }
8837
8838 /* If we finally reach a constant see if it fits in for_type and
8839 in that case convert it. */
8840 if (for_type
8841 && TREE_CODE (win) == INTEGER_CST
8842 && TREE_TYPE (win) != for_type
8843 && int_fits_type_p (win, for_type))
8844 win = fold_convert (for_type, win);
8845
8846 return win;
8847 }
8848 \f
8849 /* Return OP or a simpler expression for a narrower value
8850 which can be sign-extended or zero-extended to give back OP.
8851 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8852 or 0 if the value should be sign-extended. */
8853
8854 tree
8855 get_narrower (tree op, int *unsignedp_ptr)
8856 {
8857 int uns = 0;
8858 int first = 1;
8859 tree win = op;
8860 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8861
8862 while (TREE_CODE (op) == NOP_EXPR)
8863 {
8864 int bitschange
8865 = (TYPE_PRECISION (TREE_TYPE (op))
8866 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8867
8868 /* Truncations are many-one so cannot be removed. */
8869 if (bitschange < 0)
8870 break;
8871
8872 /* See what's inside this conversion. If we decide to strip it,
8873 we will set WIN. */
8874
8875 if (bitschange > 0)
8876 {
8877 op = TREE_OPERAND (op, 0);
8878 /* An extension: the outermost one can be stripped,
8879 but remember whether it is zero or sign extension. */
8880 if (first)
8881 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8882 /* Otherwise, if a sign extension has been stripped,
8883 only sign extensions can now be stripped;
8884 if a zero extension has been stripped, only zero-extensions. */
8885 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8886 break;
8887 first = 0;
8888 }
8889 else /* bitschange == 0 */
8890 {
8891 /* A change in nominal type can always be stripped, but we must
8892 preserve the unsignedness. */
8893 if (first)
8894 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8895 first = 0;
8896 op = TREE_OPERAND (op, 0);
8897 /* Keep trying to narrow, but don't assign op to win if it
8898 would turn an integral type into something else. */
8899 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8900 continue;
8901 }
8902
8903 win = op;
8904 }
8905
8906 if (TREE_CODE (op) == COMPONENT_REF
8907 /* Since type_for_size always gives an integer type. */
8908 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8909 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8910 /* Ensure field is laid out already. */
8911 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8912 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8913 {
8914 unsigned HOST_WIDE_INT innerprec
8915 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8916 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8917 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8918 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8919
8920 /* We can get this structure field in a narrower type that fits it,
8921 but the resulting extension to its nominal type (a fullword type)
8922 must satisfy the same conditions as for other extensions.
8923
8924 Do this only for fields that are aligned (not bit-fields),
8925 because when bit-field insns will be used there is no
8926 advantage in doing this. */
8927
8928 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8929 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8930 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8931 && type != 0)
8932 {
8933 if (first)
8934 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8935 win = fold_convert (type, op);
8936 }
8937 }
8938
8939 *unsignedp_ptr = uns;
8940 return win;
8941 }
8942 \f
8943 /* Returns true if integer constant C has a value that is permissible
8944 for type TYPE (an INTEGER_TYPE). */
8945
8946 bool
8947 int_fits_type_p (const_tree c, const_tree type)
8948 {
8949 tree type_low_bound, type_high_bound;
8950 bool ok_for_low_bound, ok_for_high_bound;
8951 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8952
8953 retry:
8954 type_low_bound = TYPE_MIN_VALUE (type);
8955 type_high_bound = TYPE_MAX_VALUE (type);
8956
8957 /* If at least one bound of the type is a constant integer, we can check
8958 ourselves and maybe make a decision. If no such decision is possible, but
8959 this type is a subtype, try checking against that. Otherwise, use
8960 fits_to_tree_p, which checks against the precision.
8961
8962 Compute the status for each possibly constant bound, and return if we see
8963 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8964 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8965 for "constant known to fit". */
8966
8967 /* Check if c >= type_low_bound. */
8968 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8969 {
8970 if (tree_int_cst_lt (c, type_low_bound))
8971 return false;
8972 ok_for_low_bound = true;
8973 }
8974 else
8975 ok_for_low_bound = false;
8976
8977 /* Check if c <= type_high_bound. */
8978 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8979 {
8980 if (tree_int_cst_lt (type_high_bound, c))
8981 return false;
8982 ok_for_high_bound = true;
8983 }
8984 else
8985 ok_for_high_bound = false;
8986
8987 /* If the constant fits both bounds, the result is known. */
8988 if (ok_for_low_bound && ok_for_high_bound)
8989 return true;
8990
8991 /* Perform some generic filtering which may allow making a decision
8992 even if the bounds are not constant. First, negative integers
8993 never fit in unsigned types, */
8994 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8995 return false;
8996
8997 /* Second, narrower types always fit in wider ones. */
8998 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8999 return true;
9000
9001 /* Third, unsigned integers with top bit set never fit signed types. */
9002 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9003 {
9004 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
9005 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9006 {
9007 /* When a tree_cst is converted to a wide-int, the precision
9008 is taken from the type. However, if the precision of the
9009 mode underneath the type is smaller than that, it is
9010 possible that the value will not fit. The test below
9011 fails if any bit is set between the sign bit of the
9012 underlying mode and the top bit of the type. */
9013 if (wi::ne_p (wi::zext (c, prec - 1), c))
9014 return false;
9015 }
9016 else if (wi::neg_p (c))
9017 return false;
9018 }
9019
9020 /* If we haven't been able to decide at this point, there nothing more we
9021 can check ourselves here. Look at the base type if we have one and it
9022 has the same precision. */
9023 if (TREE_CODE (type) == INTEGER_TYPE
9024 && TREE_TYPE (type) != 0
9025 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9026 {
9027 type = TREE_TYPE (type);
9028 goto retry;
9029 }
9030
9031 /* Or to fits_to_tree_p, if nothing else. */
9032 return wi::fits_to_tree_p (c, type);
9033 }
9034
9035 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9036 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9037 represented (assuming two's-complement arithmetic) within the bit
9038 precision of the type are returned instead. */
9039
9040 void
9041 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9042 {
9043 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9044 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9045 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
9046 else
9047 {
9048 if (TYPE_UNSIGNED (type))
9049 mpz_set_ui (min, 0);
9050 else
9051 {
9052 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9053 wi::to_mpz (mn, min, SIGNED);
9054 }
9055 }
9056
9057 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9058 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9059 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
9060 else
9061 {
9062 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9063 wi::to_mpz (mn, max, TYPE_SIGN (type));
9064 }
9065 }
9066
9067 /* Return true if VAR is an automatic variable defined in function FN. */
9068
9069 bool
9070 auto_var_in_fn_p (const_tree var, const_tree fn)
9071 {
9072 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9073 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
9074 || TREE_CODE (var) == PARM_DECL)
9075 && ! TREE_STATIC (var))
9076 || TREE_CODE (var) == LABEL_DECL
9077 || TREE_CODE (var) == RESULT_DECL));
9078 }
9079
9080 /* Subprogram of following function. Called by walk_tree.
9081
9082 Return *TP if it is an automatic variable or parameter of the
9083 function passed in as DATA. */
9084
9085 static tree
9086 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9087 {
9088 tree fn = (tree) data;
9089
9090 if (TYPE_P (*tp))
9091 *walk_subtrees = 0;
9092
9093 else if (DECL_P (*tp)
9094 && auto_var_in_fn_p (*tp, fn))
9095 return *tp;
9096
9097 return NULL_TREE;
9098 }
9099
9100 /* Returns true if T is, contains, or refers to a type with variable
9101 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9102 arguments, but not the return type. If FN is nonzero, only return
9103 true if a modifier of the type or position of FN is a variable or
9104 parameter inside FN.
9105
9106 This concept is more general than that of C99 'variably modified types':
9107 in C99, a struct type is never variably modified because a VLA may not
9108 appear as a structure member. However, in GNU C code like:
9109
9110 struct S { int i[f()]; };
9111
9112 is valid, and other languages may define similar constructs. */
9113
9114 bool
9115 variably_modified_type_p (tree type, tree fn)
9116 {
9117 tree t;
9118
9119 /* Test if T is either variable (if FN is zero) or an expression containing
9120 a variable in FN. If TYPE isn't gimplified, return true also if
9121 gimplify_one_sizepos would gimplify the expression into a local
9122 variable. */
9123 #define RETURN_TRUE_IF_VAR(T) \
9124 do { tree _t = (T); \
9125 if (_t != NULL_TREE \
9126 && _t != error_mark_node \
9127 && TREE_CODE (_t) != INTEGER_CST \
9128 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9129 && (!fn \
9130 || (!TYPE_SIZES_GIMPLIFIED (type) \
9131 && !is_gimple_sizepos (_t)) \
9132 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9133 return true; } while (0)
9134
9135 if (type == error_mark_node)
9136 return false;
9137
9138 /* If TYPE itself has variable size, it is variably modified. */
9139 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9140 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9141
9142 switch (TREE_CODE (type))
9143 {
9144 case POINTER_TYPE:
9145 case REFERENCE_TYPE:
9146 case VECTOR_TYPE:
9147 if (variably_modified_type_p (TREE_TYPE (type), fn))
9148 return true;
9149 break;
9150
9151 case FUNCTION_TYPE:
9152 case METHOD_TYPE:
9153 /* If TYPE is a function type, it is variably modified if the
9154 return type is variably modified. */
9155 if (variably_modified_type_p (TREE_TYPE (type), fn))
9156 return true;
9157 break;
9158
9159 case INTEGER_TYPE:
9160 case REAL_TYPE:
9161 case FIXED_POINT_TYPE:
9162 case ENUMERAL_TYPE:
9163 case BOOLEAN_TYPE:
9164 /* Scalar types are variably modified if their end points
9165 aren't constant. */
9166 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9167 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9168 break;
9169
9170 case RECORD_TYPE:
9171 case UNION_TYPE:
9172 case QUAL_UNION_TYPE:
9173 /* We can't see if any of the fields are variably-modified by the
9174 definition we normally use, since that would produce infinite
9175 recursion via pointers. */
9176 /* This is variably modified if some field's type is. */
9177 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9178 if (TREE_CODE (t) == FIELD_DECL)
9179 {
9180 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9181 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9182 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9183
9184 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9185 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9186 }
9187 break;
9188
9189 case ARRAY_TYPE:
9190 /* Do not call ourselves to avoid infinite recursion. This is
9191 variably modified if the element type is. */
9192 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9193 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9194 break;
9195
9196 default:
9197 break;
9198 }
9199
9200 /* The current language may have other cases to check, but in general,
9201 all other types are not variably modified. */
9202 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9203
9204 #undef RETURN_TRUE_IF_VAR
9205 }
9206
9207 /* Given a DECL or TYPE, return the scope in which it was declared, or
9208 NULL_TREE if there is no containing scope. */
9209
9210 tree
9211 get_containing_scope (const_tree t)
9212 {
9213 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9214 }
9215
9216 /* Return the innermost context enclosing DECL that is
9217 a FUNCTION_DECL, or zero if none. */
9218
9219 tree
9220 decl_function_context (const_tree decl)
9221 {
9222 tree context;
9223
9224 if (TREE_CODE (decl) == ERROR_MARK)
9225 return 0;
9226
9227 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9228 where we look up the function at runtime. Such functions always take
9229 a first argument of type 'pointer to real context'.
9230
9231 C++ should really be fixed to use DECL_CONTEXT for the real context,
9232 and use something else for the "virtual context". */
9233 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9234 context
9235 = TYPE_MAIN_VARIANT
9236 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9237 else
9238 context = DECL_CONTEXT (decl);
9239
9240 while (context && TREE_CODE (context) != FUNCTION_DECL)
9241 {
9242 if (TREE_CODE (context) == BLOCK)
9243 context = BLOCK_SUPERCONTEXT (context);
9244 else
9245 context = get_containing_scope (context);
9246 }
9247
9248 return context;
9249 }
9250
9251 /* Return the innermost context enclosing DECL that is
9252 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9253 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9254
9255 tree
9256 decl_type_context (const_tree decl)
9257 {
9258 tree context = DECL_CONTEXT (decl);
9259
9260 while (context)
9261 switch (TREE_CODE (context))
9262 {
9263 case NAMESPACE_DECL:
9264 case TRANSLATION_UNIT_DECL:
9265 return NULL_TREE;
9266
9267 case RECORD_TYPE:
9268 case UNION_TYPE:
9269 case QUAL_UNION_TYPE:
9270 return context;
9271
9272 case TYPE_DECL:
9273 case FUNCTION_DECL:
9274 context = DECL_CONTEXT (context);
9275 break;
9276
9277 case BLOCK:
9278 context = BLOCK_SUPERCONTEXT (context);
9279 break;
9280
9281 default:
9282 gcc_unreachable ();
9283 }
9284
9285 return NULL_TREE;
9286 }
9287
9288 /* CALL is a CALL_EXPR. Return the declaration for the function
9289 called, or NULL_TREE if the called function cannot be
9290 determined. */
9291
9292 tree
9293 get_callee_fndecl (const_tree call)
9294 {
9295 tree addr;
9296
9297 if (call == error_mark_node)
9298 return error_mark_node;
9299
9300 /* It's invalid to call this function with anything but a
9301 CALL_EXPR. */
9302 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9303
9304 /* The first operand to the CALL is the address of the function
9305 called. */
9306 addr = CALL_EXPR_FN (call);
9307
9308 /* If there is no function, return early. */
9309 if (addr == NULL_TREE)
9310 return NULL_TREE;
9311
9312 STRIP_NOPS (addr);
9313
9314 /* If this is a readonly function pointer, extract its initial value. */
9315 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9316 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9317 && DECL_INITIAL (addr))
9318 addr = DECL_INITIAL (addr);
9319
9320 /* If the address is just `&f' for some function `f', then we know
9321 that `f' is being called. */
9322 if (TREE_CODE (addr) == ADDR_EXPR
9323 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9324 return TREE_OPERAND (addr, 0);
9325
9326 /* We couldn't figure out what was being called. */
9327 return NULL_TREE;
9328 }
9329
9330 #define TREE_MEM_USAGE_SPACES 40
9331
9332 /* Print debugging information about tree nodes generated during the compile,
9333 and any language-specific information. */
9334
9335 void
9336 dump_tree_statistics (void)
9337 {
9338 if (GATHER_STATISTICS)
9339 {
9340 int i;
9341 int total_nodes, total_bytes;
9342 fprintf (stderr, "\nKind Nodes Bytes\n");
9343 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9344 total_nodes = total_bytes = 0;
9345 for (i = 0; i < (int) all_kinds; i++)
9346 {
9347 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9348 tree_node_counts[i], tree_node_sizes[i]);
9349 total_nodes += tree_node_counts[i];
9350 total_bytes += tree_node_sizes[i];
9351 }
9352 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9353 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9354 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9355 fprintf (stderr, "Code Nodes\n");
9356 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9357 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9358 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9359 tree_code_counts[i]);
9360 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9361 fprintf (stderr, "\n");
9362 ssanames_print_statistics ();
9363 fprintf (stderr, "\n");
9364 phinodes_print_statistics ();
9365 fprintf (stderr, "\n");
9366 }
9367 else
9368 fprintf (stderr, "(No per-node statistics)\n");
9369
9370 print_type_hash_statistics ();
9371 print_debug_expr_statistics ();
9372 print_value_expr_statistics ();
9373 lang_hooks.print_statistics ();
9374 }
9375 \f
9376 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9377
9378 /* Generate a crc32 of a byte. */
9379
9380 static unsigned
9381 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9382 {
9383 unsigned ix;
9384
9385 for (ix = bits; ix--; value <<= 1)
9386 {
9387 unsigned feedback;
9388
9389 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9390 chksum <<= 1;
9391 chksum ^= feedback;
9392 }
9393 return chksum;
9394 }
9395
9396 /* Generate a crc32 of a 32-bit unsigned. */
9397
9398 unsigned
9399 crc32_unsigned (unsigned chksum, unsigned value)
9400 {
9401 return crc32_unsigned_bits (chksum, value, 32);
9402 }
9403
9404 /* Generate a crc32 of a byte. */
9405
9406 unsigned
9407 crc32_byte (unsigned chksum, char byte)
9408 {
9409 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9410 }
9411
9412 /* Generate a crc32 of a string. */
9413
9414 unsigned
9415 crc32_string (unsigned chksum, const char *string)
9416 {
9417 do
9418 {
9419 chksum = crc32_byte (chksum, *string);
9420 }
9421 while (*string++);
9422 return chksum;
9423 }
9424
9425 /* P is a string that will be used in a symbol. Mask out any characters
9426 that are not valid in that context. */
9427
9428 void
9429 clean_symbol_name (char *p)
9430 {
9431 for (; *p; p++)
9432 if (! (ISALNUM (*p)
9433 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9434 || *p == '$'
9435 #endif
9436 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9437 || *p == '.'
9438 #endif
9439 ))
9440 *p = '_';
9441 }
9442
9443 /* For anonymous aggregate types, we need some sort of name to
9444 hold on to. In practice, this should not appear, but it should
9445 not be harmful if it does. */
9446 bool
9447 anon_aggrname_p(const_tree id_node)
9448 {
9449 #ifndef NO_DOT_IN_LABEL
9450 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9451 && IDENTIFIER_POINTER (id_node)[1] == '_');
9452 #else /* NO_DOT_IN_LABEL */
9453 #ifndef NO_DOLLAR_IN_LABEL
9454 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9455 && IDENTIFIER_POINTER (id_node)[1] == '_');
9456 #else /* NO_DOLLAR_IN_LABEL */
9457 #define ANON_AGGRNAME_PREFIX "__anon_"
9458 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9459 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9460 #endif /* NO_DOLLAR_IN_LABEL */
9461 #endif /* NO_DOT_IN_LABEL */
9462 }
9463
9464 /* Return a format for an anonymous aggregate name. */
9465 const char *
9466 anon_aggrname_format()
9467 {
9468 #ifndef NO_DOT_IN_LABEL
9469 return "._%d";
9470 #else /* NO_DOT_IN_LABEL */
9471 #ifndef NO_DOLLAR_IN_LABEL
9472 return "$_%d";
9473 #else /* NO_DOLLAR_IN_LABEL */
9474 return "__anon_%d";
9475 #endif /* NO_DOLLAR_IN_LABEL */
9476 #endif /* NO_DOT_IN_LABEL */
9477 }
9478
9479 /* Generate a name for a special-purpose function.
9480 The generated name may need to be unique across the whole link.
9481 Changes to this function may also require corresponding changes to
9482 xstrdup_mask_random.
9483 TYPE is some string to identify the purpose of this function to the
9484 linker or collect2; it must start with an uppercase letter,
9485 one of:
9486 I - for constructors
9487 D - for destructors
9488 N - for C++ anonymous namespaces
9489 F - for DWARF unwind frame information. */
9490
9491 tree
9492 get_file_function_name (const char *type)
9493 {
9494 char *buf;
9495 const char *p;
9496 char *q;
9497
9498 /* If we already have a name we know to be unique, just use that. */
9499 if (first_global_object_name)
9500 p = q = ASTRDUP (first_global_object_name);
9501 /* If the target is handling the constructors/destructors, they
9502 will be local to this file and the name is only necessary for
9503 debugging purposes.
9504 We also assign sub_I and sub_D sufixes to constructors called from
9505 the global static constructors. These are always local. */
9506 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9507 || (strncmp (type, "sub_", 4) == 0
9508 && (type[4] == 'I' || type[4] == 'D')))
9509 {
9510 const char *file = main_input_filename;
9511 if (! file)
9512 file = LOCATION_FILE (input_location);
9513 /* Just use the file's basename, because the full pathname
9514 might be quite long. */
9515 p = q = ASTRDUP (lbasename (file));
9516 }
9517 else
9518 {
9519 /* Otherwise, the name must be unique across the entire link.
9520 We don't have anything that we know to be unique to this translation
9521 unit, so use what we do have and throw in some randomness. */
9522 unsigned len;
9523 const char *name = weak_global_object_name;
9524 const char *file = main_input_filename;
9525
9526 if (! name)
9527 name = "";
9528 if (! file)
9529 file = LOCATION_FILE (input_location);
9530
9531 len = strlen (file);
9532 q = (char *) alloca (9 + 17 + len + 1);
9533 memcpy (q, file, len + 1);
9534
9535 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9536 crc32_string (0, name), get_random_seed (false));
9537
9538 p = q;
9539 }
9540
9541 clean_symbol_name (q);
9542 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9543 + strlen (type));
9544
9545 /* Set up the name of the file-level functions we may need.
9546 Use a global object (which is already required to be unique over
9547 the program) rather than the file name (which imposes extra
9548 constraints). */
9549 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9550
9551 return get_identifier (buf);
9552 }
9553 \f
9554 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9555
9556 /* Complain that the tree code of NODE does not match the expected 0
9557 terminated list of trailing codes. The trailing code list can be
9558 empty, for a more vague error message. FILE, LINE, and FUNCTION
9559 are of the caller. */
9560
9561 void
9562 tree_check_failed (const_tree node, const char *file,
9563 int line, const char *function, ...)
9564 {
9565 va_list args;
9566 const char *buffer;
9567 unsigned length = 0;
9568 enum tree_code code;
9569
9570 va_start (args, function);
9571 while ((code = (enum tree_code) va_arg (args, int)))
9572 length += 4 + strlen (get_tree_code_name (code));
9573 va_end (args);
9574 if (length)
9575 {
9576 char *tmp;
9577 va_start (args, function);
9578 length += strlen ("expected ");
9579 buffer = tmp = (char *) alloca (length);
9580 length = 0;
9581 while ((code = (enum tree_code) va_arg (args, int)))
9582 {
9583 const char *prefix = length ? " or " : "expected ";
9584
9585 strcpy (tmp + length, prefix);
9586 length += strlen (prefix);
9587 strcpy (tmp + length, get_tree_code_name (code));
9588 length += strlen (get_tree_code_name (code));
9589 }
9590 va_end (args);
9591 }
9592 else
9593 buffer = "unexpected node";
9594
9595 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9596 buffer, get_tree_code_name (TREE_CODE (node)),
9597 function, trim_filename (file), line);
9598 }
9599
9600 /* Complain that the tree code of NODE does match the expected 0
9601 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9602 the caller. */
9603
9604 void
9605 tree_not_check_failed (const_tree node, const char *file,
9606 int line, const char *function, ...)
9607 {
9608 va_list args;
9609 char *buffer;
9610 unsigned length = 0;
9611 enum tree_code code;
9612
9613 va_start (args, function);
9614 while ((code = (enum tree_code) va_arg (args, int)))
9615 length += 4 + strlen (get_tree_code_name (code));
9616 va_end (args);
9617 va_start (args, function);
9618 buffer = (char *) alloca (length);
9619 length = 0;
9620 while ((code = (enum tree_code) va_arg (args, int)))
9621 {
9622 if (length)
9623 {
9624 strcpy (buffer + length, " or ");
9625 length += 4;
9626 }
9627 strcpy (buffer + length, get_tree_code_name (code));
9628 length += strlen (get_tree_code_name (code));
9629 }
9630 va_end (args);
9631
9632 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9633 buffer, get_tree_code_name (TREE_CODE (node)),
9634 function, trim_filename (file), line);
9635 }
9636
9637 /* Similar to tree_check_failed, except that we check for a class of tree
9638 code, given in CL. */
9639
9640 void
9641 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9642 const char *file, int line, const char *function)
9643 {
9644 internal_error
9645 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9646 TREE_CODE_CLASS_STRING (cl),
9647 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9648 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9649 }
9650
9651 /* Similar to tree_check_failed, except that instead of specifying a
9652 dozen codes, use the knowledge that they're all sequential. */
9653
9654 void
9655 tree_range_check_failed (const_tree node, const char *file, int line,
9656 const char *function, enum tree_code c1,
9657 enum tree_code c2)
9658 {
9659 char *buffer;
9660 unsigned length = 0;
9661 unsigned int c;
9662
9663 for (c = c1; c <= c2; ++c)
9664 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9665
9666 length += strlen ("expected ");
9667 buffer = (char *) alloca (length);
9668 length = 0;
9669
9670 for (c = c1; c <= c2; ++c)
9671 {
9672 const char *prefix = length ? " or " : "expected ";
9673
9674 strcpy (buffer + length, prefix);
9675 length += strlen (prefix);
9676 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9677 length += strlen (get_tree_code_name ((enum tree_code) c));
9678 }
9679
9680 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9681 buffer, get_tree_code_name (TREE_CODE (node)),
9682 function, trim_filename (file), line);
9683 }
9684
9685
9686 /* Similar to tree_check_failed, except that we check that a tree does
9687 not have the specified code, given in CL. */
9688
9689 void
9690 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9691 const char *file, int line, const char *function)
9692 {
9693 internal_error
9694 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9695 TREE_CODE_CLASS_STRING (cl),
9696 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9697 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9698 }
9699
9700
9701 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9702
9703 void
9704 omp_clause_check_failed (const_tree node, const char *file, int line,
9705 const char *function, enum omp_clause_code code)
9706 {
9707 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9708 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9709 function, trim_filename (file), line);
9710 }
9711
9712
9713 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9714
9715 void
9716 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9717 const char *function, enum omp_clause_code c1,
9718 enum omp_clause_code c2)
9719 {
9720 char *buffer;
9721 unsigned length = 0;
9722 unsigned int c;
9723
9724 for (c = c1; c <= c2; ++c)
9725 length += 4 + strlen (omp_clause_code_name[c]);
9726
9727 length += strlen ("expected ");
9728 buffer = (char *) alloca (length);
9729 length = 0;
9730
9731 for (c = c1; c <= c2; ++c)
9732 {
9733 const char *prefix = length ? " or " : "expected ";
9734
9735 strcpy (buffer + length, prefix);
9736 length += strlen (prefix);
9737 strcpy (buffer + length, omp_clause_code_name[c]);
9738 length += strlen (omp_clause_code_name[c]);
9739 }
9740
9741 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9742 buffer, omp_clause_code_name[TREE_CODE (node)],
9743 function, trim_filename (file), line);
9744 }
9745
9746
9747 #undef DEFTREESTRUCT
9748 #define DEFTREESTRUCT(VAL, NAME) NAME,
9749
9750 static const char *ts_enum_names[] = {
9751 #include "treestruct.def"
9752 };
9753 #undef DEFTREESTRUCT
9754
9755 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9756
9757 /* Similar to tree_class_check_failed, except that we check for
9758 whether CODE contains the tree structure identified by EN. */
9759
9760 void
9761 tree_contains_struct_check_failed (const_tree node,
9762 const enum tree_node_structure_enum en,
9763 const char *file, int line,
9764 const char *function)
9765 {
9766 internal_error
9767 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9768 TS_ENUM_NAME (en),
9769 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9770 }
9771
9772
9773 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9774 (dynamically sized) vector. */
9775
9776 void
9777 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9778 const char *function)
9779 {
9780 internal_error
9781 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9782 idx + 1, len, function, trim_filename (file), line);
9783 }
9784
9785 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9786 (dynamically sized) vector. */
9787
9788 void
9789 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9790 const char *function)
9791 {
9792 internal_error
9793 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9794 idx + 1, len, function, trim_filename (file), line);
9795 }
9796
9797 /* Similar to above, except that the check is for the bounds of the operand
9798 vector of an expression node EXP. */
9799
9800 void
9801 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9802 int line, const char *function)
9803 {
9804 enum tree_code code = TREE_CODE (exp);
9805 internal_error
9806 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9807 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9808 function, trim_filename (file), line);
9809 }
9810
9811 /* Similar to above, except that the check is for the number of
9812 operands of an OMP_CLAUSE node. */
9813
9814 void
9815 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9816 int line, const char *function)
9817 {
9818 internal_error
9819 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9820 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9821 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9822 trim_filename (file), line);
9823 }
9824 #endif /* ENABLE_TREE_CHECKING */
9825 \f
9826 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9827 and mapped to the machine mode MODE. Initialize its fields and build
9828 the information necessary for debugging output. */
9829
9830 static tree
9831 make_vector_type (tree innertype, int nunits, machine_mode mode)
9832 {
9833 tree t;
9834 inchash::hash hstate;
9835
9836 t = make_node (VECTOR_TYPE);
9837 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9838 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9839 SET_TYPE_MODE (t, mode);
9840
9841 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9842 SET_TYPE_STRUCTURAL_EQUALITY (t);
9843 else if ((TYPE_CANONICAL (innertype) != innertype
9844 || mode != VOIDmode)
9845 && !VECTOR_BOOLEAN_TYPE_P (t))
9846 TYPE_CANONICAL (t)
9847 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9848
9849 layout_type (t);
9850
9851 hstate.add_wide_int (VECTOR_TYPE);
9852 hstate.add_wide_int (nunits);
9853 hstate.add_wide_int (mode);
9854 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9855 t = type_hash_canon (hstate.end (), t);
9856
9857 /* We have built a main variant, based on the main variant of the
9858 inner type. Use it to build the variant we return. */
9859 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9860 && TREE_TYPE (t) != innertype)
9861 return build_type_attribute_qual_variant (t,
9862 TYPE_ATTRIBUTES (innertype),
9863 TYPE_QUALS (innertype));
9864
9865 return t;
9866 }
9867
9868 static tree
9869 make_or_reuse_type (unsigned size, int unsignedp)
9870 {
9871 int i;
9872
9873 if (size == INT_TYPE_SIZE)
9874 return unsignedp ? unsigned_type_node : integer_type_node;
9875 if (size == CHAR_TYPE_SIZE)
9876 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9877 if (size == SHORT_TYPE_SIZE)
9878 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9879 if (size == LONG_TYPE_SIZE)
9880 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9881 if (size == LONG_LONG_TYPE_SIZE)
9882 return (unsignedp ? long_long_unsigned_type_node
9883 : long_long_integer_type_node);
9884
9885 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9886 if (size == int_n_data[i].bitsize
9887 && int_n_enabled_p[i])
9888 return (unsignedp ? int_n_trees[i].unsigned_type
9889 : int_n_trees[i].signed_type);
9890
9891 if (unsignedp)
9892 return make_unsigned_type (size);
9893 else
9894 return make_signed_type (size);
9895 }
9896
9897 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9898
9899 static tree
9900 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9901 {
9902 if (satp)
9903 {
9904 if (size == SHORT_FRACT_TYPE_SIZE)
9905 return unsignedp ? sat_unsigned_short_fract_type_node
9906 : sat_short_fract_type_node;
9907 if (size == FRACT_TYPE_SIZE)
9908 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9909 if (size == LONG_FRACT_TYPE_SIZE)
9910 return unsignedp ? sat_unsigned_long_fract_type_node
9911 : sat_long_fract_type_node;
9912 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9913 return unsignedp ? sat_unsigned_long_long_fract_type_node
9914 : sat_long_long_fract_type_node;
9915 }
9916 else
9917 {
9918 if (size == SHORT_FRACT_TYPE_SIZE)
9919 return unsignedp ? unsigned_short_fract_type_node
9920 : short_fract_type_node;
9921 if (size == FRACT_TYPE_SIZE)
9922 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9923 if (size == LONG_FRACT_TYPE_SIZE)
9924 return unsignedp ? unsigned_long_fract_type_node
9925 : long_fract_type_node;
9926 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9927 return unsignedp ? unsigned_long_long_fract_type_node
9928 : long_long_fract_type_node;
9929 }
9930
9931 return make_fract_type (size, unsignedp, satp);
9932 }
9933
9934 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9935
9936 static tree
9937 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9938 {
9939 if (satp)
9940 {
9941 if (size == SHORT_ACCUM_TYPE_SIZE)
9942 return unsignedp ? sat_unsigned_short_accum_type_node
9943 : sat_short_accum_type_node;
9944 if (size == ACCUM_TYPE_SIZE)
9945 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9946 if (size == LONG_ACCUM_TYPE_SIZE)
9947 return unsignedp ? sat_unsigned_long_accum_type_node
9948 : sat_long_accum_type_node;
9949 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9950 return unsignedp ? sat_unsigned_long_long_accum_type_node
9951 : sat_long_long_accum_type_node;
9952 }
9953 else
9954 {
9955 if (size == SHORT_ACCUM_TYPE_SIZE)
9956 return unsignedp ? unsigned_short_accum_type_node
9957 : short_accum_type_node;
9958 if (size == ACCUM_TYPE_SIZE)
9959 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9960 if (size == LONG_ACCUM_TYPE_SIZE)
9961 return unsignedp ? unsigned_long_accum_type_node
9962 : long_accum_type_node;
9963 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9964 return unsignedp ? unsigned_long_long_accum_type_node
9965 : long_long_accum_type_node;
9966 }
9967
9968 return make_accum_type (size, unsignedp, satp);
9969 }
9970
9971
9972 /* Create an atomic variant node for TYPE. This routine is called
9973 during initialization of data types to create the 5 basic atomic
9974 types. The generic build_variant_type function requires these to
9975 already be set up in order to function properly, so cannot be
9976 called from there. If ALIGN is non-zero, then ensure alignment is
9977 overridden to this value. */
9978
9979 static tree
9980 build_atomic_base (tree type, unsigned int align)
9981 {
9982 tree t;
9983
9984 /* Make sure its not already registered. */
9985 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9986 return t;
9987
9988 t = build_variant_type_copy (type);
9989 set_type_quals (t, TYPE_QUAL_ATOMIC);
9990
9991 if (align)
9992 TYPE_ALIGN (t) = align;
9993
9994 return t;
9995 }
9996
9997 /* Create nodes for all integer types (and error_mark_node) using the sizes
9998 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9999 SHORT_DOUBLE specifies whether double should be of the same precision
10000 as float. */
10001
10002 void
10003 build_common_tree_nodes (bool signed_char, bool short_double)
10004 {
10005 int i;
10006
10007 error_mark_node = make_node (ERROR_MARK);
10008 TREE_TYPE (error_mark_node) = error_mark_node;
10009
10010 initialize_sizetypes ();
10011
10012 /* Define both `signed char' and `unsigned char'. */
10013 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10014 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10015 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10016 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10017
10018 /* Define `char', which is like either `signed char' or `unsigned char'
10019 but not the same as either. */
10020 char_type_node
10021 = (signed_char
10022 ? make_signed_type (CHAR_TYPE_SIZE)
10023 : make_unsigned_type (CHAR_TYPE_SIZE));
10024 TYPE_STRING_FLAG (char_type_node) = 1;
10025
10026 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10027 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10028 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10029 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10030 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10031 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10032 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10033 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10034
10035 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10036 {
10037 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10038 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10039 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
10040 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
10041
10042 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
10043 && int_n_enabled_p[i])
10044 {
10045 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10046 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10047 }
10048 }
10049
10050 /* Define a boolean type. This type only represents boolean values but
10051 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10052 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10053 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10054 TYPE_PRECISION (boolean_type_node) = 1;
10055 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10056
10057 /* Define what type to use for size_t. */
10058 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10059 size_type_node = unsigned_type_node;
10060 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10061 size_type_node = long_unsigned_type_node;
10062 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10063 size_type_node = long_long_unsigned_type_node;
10064 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10065 size_type_node = short_unsigned_type_node;
10066 else
10067 {
10068 int i;
10069
10070 size_type_node = NULL_TREE;
10071 for (i = 0; i < NUM_INT_N_ENTS; i++)
10072 if (int_n_enabled_p[i])
10073 {
10074 char name[50];
10075 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10076
10077 if (strcmp (name, SIZE_TYPE) == 0)
10078 {
10079 size_type_node = int_n_trees[i].unsigned_type;
10080 }
10081 }
10082 if (size_type_node == NULL_TREE)
10083 gcc_unreachable ();
10084 }
10085
10086 /* Fill in the rest of the sized types. Reuse existing type nodes
10087 when possible. */
10088 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10089 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10090 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10091 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10092 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10093
10094 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10095 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10096 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10097 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10098 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10099
10100 /* Don't call build_qualified type for atomics. That routine does
10101 special processing for atomics, and until they are initialized
10102 it's better not to make that call.
10103
10104 Check to see if there is a target override for atomic types. */
10105
10106 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10107 targetm.atomic_align_for_mode (QImode));
10108 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10109 targetm.atomic_align_for_mode (HImode));
10110 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10111 targetm.atomic_align_for_mode (SImode));
10112 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10113 targetm.atomic_align_for_mode (DImode));
10114 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10115 targetm.atomic_align_for_mode (TImode));
10116
10117 access_public_node = get_identifier ("public");
10118 access_protected_node = get_identifier ("protected");
10119 access_private_node = get_identifier ("private");
10120
10121 /* Define these next since types below may used them. */
10122 integer_zero_node = build_int_cst (integer_type_node, 0);
10123 integer_one_node = build_int_cst (integer_type_node, 1);
10124 integer_three_node = build_int_cst (integer_type_node, 3);
10125 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10126
10127 size_zero_node = size_int (0);
10128 size_one_node = size_int (1);
10129 bitsize_zero_node = bitsize_int (0);
10130 bitsize_one_node = bitsize_int (1);
10131 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10132
10133 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10134 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10135
10136 void_type_node = make_node (VOID_TYPE);
10137 layout_type (void_type_node);
10138
10139 pointer_bounds_type_node = targetm.chkp_bound_type ();
10140
10141 /* We are not going to have real types in C with less than byte alignment,
10142 so we might as well not have any types that claim to have it. */
10143 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
10144 TYPE_USER_ALIGN (void_type_node) = 0;
10145
10146 void_node = make_node (VOID_CST);
10147 TREE_TYPE (void_node) = void_type_node;
10148
10149 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10150 layout_type (TREE_TYPE (null_pointer_node));
10151
10152 ptr_type_node = build_pointer_type (void_type_node);
10153 const_ptr_type_node
10154 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10155 fileptr_type_node = ptr_type_node;
10156
10157 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10158
10159 float_type_node = make_node (REAL_TYPE);
10160 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10161 layout_type (float_type_node);
10162
10163 double_type_node = make_node (REAL_TYPE);
10164 if (short_double)
10165 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
10166 else
10167 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10168 layout_type (double_type_node);
10169
10170 long_double_type_node = make_node (REAL_TYPE);
10171 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10172 layout_type (long_double_type_node);
10173
10174 float_ptr_type_node = build_pointer_type (float_type_node);
10175 double_ptr_type_node = build_pointer_type (double_type_node);
10176 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10177 integer_ptr_type_node = build_pointer_type (integer_type_node);
10178
10179 /* Fixed size integer types. */
10180 uint16_type_node = make_or_reuse_type (16, 1);
10181 uint32_type_node = make_or_reuse_type (32, 1);
10182 uint64_type_node = make_or_reuse_type (64, 1);
10183
10184 /* Decimal float types. */
10185 dfloat32_type_node = make_node (REAL_TYPE);
10186 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10187 layout_type (dfloat32_type_node);
10188 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10189 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10190
10191 dfloat64_type_node = make_node (REAL_TYPE);
10192 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10193 layout_type (dfloat64_type_node);
10194 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10195 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10196
10197 dfloat128_type_node = make_node (REAL_TYPE);
10198 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10199 layout_type (dfloat128_type_node);
10200 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10201 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10202
10203 complex_integer_type_node = build_complex_type (integer_type_node);
10204 complex_float_type_node = build_complex_type (float_type_node);
10205 complex_double_type_node = build_complex_type (double_type_node);
10206 complex_long_double_type_node = build_complex_type (long_double_type_node);
10207
10208 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10209 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10210 sat_ ## KIND ## _type_node = \
10211 make_sat_signed_ ## KIND ## _type (SIZE); \
10212 sat_unsigned_ ## KIND ## _type_node = \
10213 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10214 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10215 unsigned_ ## KIND ## _type_node = \
10216 make_unsigned_ ## KIND ## _type (SIZE);
10217
10218 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10219 sat_ ## WIDTH ## KIND ## _type_node = \
10220 make_sat_signed_ ## KIND ## _type (SIZE); \
10221 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10222 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10223 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10224 unsigned_ ## WIDTH ## KIND ## _type_node = \
10225 make_unsigned_ ## KIND ## _type (SIZE);
10226
10227 /* Make fixed-point type nodes based on four different widths. */
10228 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10229 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10230 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10231 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10232 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10233
10234 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10235 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10236 NAME ## _type_node = \
10237 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10238 u ## NAME ## _type_node = \
10239 make_or_reuse_unsigned_ ## KIND ## _type \
10240 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10241 sat_ ## NAME ## _type_node = \
10242 make_or_reuse_sat_signed_ ## KIND ## _type \
10243 (GET_MODE_BITSIZE (MODE ## mode)); \
10244 sat_u ## NAME ## _type_node = \
10245 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10246 (GET_MODE_BITSIZE (U ## MODE ## mode));
10247
10248 /* Fixed-point type and mode nodes. */
10249 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10250 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10251 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10252 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10253 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10254 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10255 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10256 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10257 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10258 MAKE_FIXED_MODE_NODE (accum, da, DA)
10259 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10260
10261 {
10262 tree t = targetm.build_builtin_va_list ();
10263
10264 /* Many back-ends define record types without setting TYPE_NAME.
10265 If we copied the record type here, we'd keep the original
10266 record type without a name. This breaks name mangling. So,
10267 don't copy record types and let c_common_nodes_and_builtins()
10268 declare the type to be __builtin_va_list. */
10269 if (TREE_CODE (t) != RECORD_TYPE)
10270 t = build_variant_type_copy (t);
10271
10272 va_list_type_node = t;
10273 }
10274 }
10275
10276 /* Modify DECL for given flags.
10277 TM_PURE attribute is set only on types, so the function will modify
10278 DECL's type when ECF_TM_PURE is used. */
10279
10280 void
10281 set_call_expr_flags (tree decl, int flags)
10282 {
10283 if (flags & ECF_NOTHROW)
10284 TREE_NOTHROW (decl) = 1;
10285 if (flags & ECF_CONST)
10286 TREE_READONLY (decl) = 1;
10287 if (flags & ECF_PURE)
10288 DECL_PURE_P (decl) = 1;
10289 if (flags & ECF_LOOPING_CONST_OR_PURE)
10290 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10291 if (flags & ECF_NOVOPS)
10292 DECL_IS_NOVOPS (decl) = 1;
10293 if (flags & ECF_NORETURN)
10294 TREE_THIS_VOLATILE (decl) = 1;
10295 if (flags & ECF_MALLOC)
10296 DECL_IS_MALLOC (decl) = 1;
10297 if (flags & ECF_RETURNS_TWICE)
10298 DECL_IS_RETURNS_TWICE (decl) = 1;
10299 if (flags & ECF_LEAF)
10300 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10301 NULL, DECL_ATTRIBUTES (decl));
10302 if ((flags & ECF_TM_PURE) && flag_tm)
10303 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10304 /* Looping const or pure is implied by noreturn.
10305 There is currently no way to declare looping const or looping pure alone. */
10306 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10307 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10308 }
10309
10310
10311 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10312
10313 static void
10314 local_define_builtin (const char *name, tree type, enum built_in_function code,
10315 const char *library_name, int ecf_flags)
10316 {
10317 tree decl;
10318
10319 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10320 library_name, NULL_TREE);
10321 set_call_expr_flags (decl, ecf_flags);
10322
10323 set_builtin_decl (code, decl, true);
10324 }
10325
10326 /* Call this function after instantiating all builtins that the language
10327 front end cares about. This will build the rest of the builtins
10328 and internal functions that are relied upon by the tree optimizers and
10329 the middle-end. */
10330
10331 void
10332 build_common_builtin_nodes (void)
10333 {
10334 tree tmp, ftype;
10335 int ecf_flags;
10336
10337 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10338 {
10339 ftype = build_function_type (void_type_node, void_list_node);
10340 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10341 "__builtin_unreachable",
10342 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10343 | ECF_CONST);
10344 }
10345
10346 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10347 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10348 {
10349 ftype = build_function_type_list (ptr_type_node,
10350 ptr_type_node, const_ptr_type_node,
10351 size_type_node, NULL_TREE);
10352
10353 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10354 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10355 "memcpy", ECF_NOTHROW | ECF_LEAF);
10356 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10357 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10358 "memmove", ECF_NOTHROW | ECF_LEAF);
10359 }
10360
10361 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10362 {
10363 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10364 const_ptr_type_node, size_type_node,
10365 NULL_TREE);
10366 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10367 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10368 }
10369
10370 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10371 {
10372 ftype = build_function_type_list (ptr_type_node,
10373 ptr_type_node, integer_type_node,
10374 size_type_node, NULL_TREE);
10375 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10376 "memset", ECF_NOTHROW | ECF_LEAF);
10377 }
10378
10379 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10380 {
10381 ftype = build_function_type_list (ptr_type_node,
10382 size_type_node, NULL_TREE);
10383 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10384 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10385 }
10386
10387 ftype = build_function_type_list (ptr_type_node, size_type_node,
10388 size_type_node, NULL_TREE);
10389 local_define_builtin ("__builtin_alloca_with_align", ftype,
10390 BUILT_IN_ALLOCA_WITH_ALIGN,
10391 "__builtin_alloca_with_align",
10392 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10393
10394 /* If we're checking the stack, `alloca' can throw. */
10395 if (flag_stack_check)
10396 {
10397 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10398 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10399 }
10400
10401 ftype = build_function_type_list (void_type_node,
10402 ptr_type_node, ptr_type_node,
10403 ptr_type_node, NULL_TREE);
10404 local_define_builtin ("__builtin_init_trampoline", ftype,
10405 BUILT_IN_INIT_TRAMPOLINE,
10406 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10407 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10408 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10409 "__builtin_init_heap_trampoline",
10410 ECF_NOTHROW | ECF_LEAF);
10411
10412 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10413 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10414 BUILT_IN_ADJUST_TRAMPOLINE,
10415 "__builtin_adjust_trampoline",
10416 ECF_CONST | ECF_NOTHROW);
10417
10418 ftype = build_function_type_list (void_type_node,
10419 ptr_type_node, ptr_type_node, NULL_TREE);
10420 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10421 BUILT_IN_NONLOCAL_GOTO,
10422 "__builtin_nonlocal_goto",
10423 ECF_NORETURN | ECF_NOTHROW);
10424
10425 ftype = build_function_type_list (void_type_node,
10426 ptr_type_node, ptr_type_node, NULL_TREE);
10427 local_define_builtin ("__builtin_setjmp_setup", ftype,
10428 BUILT_IN_SETJMP_SETUP,
10429 "__builtin_setjmp_setup", ECF_NOTHROW);
10430
10431 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10432 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10433 BUILT_IN_SETJMP_RECEIVER,
10434 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10435
10436 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10437 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10438 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10439
10440 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10441 local_define_builtin ("__builtin_stack_restore", ftype,
10442 BUILT_IN_STACK_RESTORE,
10443 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10444
10445 /* If there's a possibility that we might use the ARM EABI, build the
10446 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10447 if (targetm.arm_eabi_unwinder)
10448 {
10449 ftype = build_function_type_list (void_type_node, NULL_TREE);
10450 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10451 BUILT_IN_CXA_END_CLEANUP,
10452 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10453 }
10454
10455 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10456 local_define_builtin ("__builtin_unwind_resume", ftype,
10457 BUILT_IN_UNWIND_RESUME,
10458 ((targetm_common.except_unwind_info (&global_options)
10459 == UI_SJLJ)
10460 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10461 ECF_NORETURN);
10462
10463 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10464 {
10465 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10466 NULL_TREE);
10467 local_define_builtin ("__builtin_return_address", ftype,
10468 BUILT_IN_RETURN_ADDRESS,
10469 "__builtin_return_address",
10470 ECF_NOTHROW);
10471 }
10472
10473 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10474 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10475 {
10476 ftype = build_function_type_list (void_type_node, ptr_type_node,
10477 ptr_type_node, NULL_TREE);
10478 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10479 local_define_builtin ("__cyg_profile_func_enter", ftype,
10480 BUILT_IN_PROFILE_FUNC_ENTER,
10481 "__cyg_profile_func_enter", 0);
10482 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10483 local_define_builtin ("__cyg_profile_func_exit", ftype,
10484 BUILT_IN_PROFILE_FUNC_EXIT,
10485 "__cyg_profile_func_exit", 0);
10486 }
10487
10488 /* The exception object and filter values from the runtime. The argument
10489 must be zero before exception lowering, i.e. from the front end. After
10490 exception lowering, it will be the region number for the exception
10491 landing pad. These functions are PURE instead of CONST to prevent
10492 them from being hoisted past the exception edge that will initialize
10493 its value in the landing pad. */
10494 ftype = build_function_type_list (ptr_type_node,
10495 integer_type_node, NULL_TREE);
10496 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10497 /* Only use TM_PURE if we have TM language support. */
10498 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10499 ecf_flags |= ECF_TM_PURE;
10500 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10501 "__builtin_eh_pointer", ecf_flags);
10502
10503 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10504 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10505 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10506 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10507
10508 ftype = build_function_type_list (void_type_node,
10509 integer_type_node, integer_type_node,
10510 NULL_TREE);
10511 local_define_builtin ("__builtin_eh_copy_values", ftype,
10512 BUILT_IN_EH_COPY_VALUES,
10513 "__builtin_eh_copy_values", ECF_NOTHROW);
10514
10515 /* Complex multiplication and division. These are handled as builtins
10516 rather than optabs because emit_library_call_value doesn't support
10517 complex. Further, we can do slightly better with folding these
10518 beasties if the real and complex parts of the arguments are separate. */
10519 {
10520 int mode;
10521
10522 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10523 {
10524 char mode_name_buf[4], *q;
10525 const char *p;
10526 enum built_in_function mcode, dcode;
10527 tree type, inner_type;
10528 const char *prefix = "__";
10529
10530 if (targetm.libfunc_gnu_prefix)
10531 prefix = "__gnu_";
10532
10533 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10534 if (type == NULL)
10535 continue;
10536 inner_type = TREE_TYPE (type);
10537
10538 ftype = build_function_type_list (type, inner_type, inner_type,
10539 inner_type, inner_type, NULL_TREE);
10540
10541 mcode = ((enum built_in_function)
10542 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10543 dcode = ((enum built_in_function)
10544 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10545
10546 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10547 *q = TOLOWER (*p);
10548 *q = '\0';
10549
10550 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10551 NULL);
10552 local_define_builtin (built_in_names[mcode], ftype, mcode,
10553 built_in_names[mcode],
10554 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10555
10556 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10557 NULL);
10558 local_define_builtin (built_in_names[dcode], ftype, dcode,
10559 built_in_names[dcode],
10560 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10561 }
10562 }
10563
10564 init_internal_fns ();
10565 }
10566
10567 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10568 better way.
10569
10570 If we requested a pointer to a vector, build up the pointers that
10571 we stripped off while looking for the inner type. Similarly for
10572 return values from functions.
10573
10574 The argument TYPE is the top of the chain, and BOTTOM is the
10575 new type which we will point to. */
10576
10577 tree
10578 reconstruct_complex_type (tree type, tree bottom)
10579 {
10580 tree inner, outer;
10581
10582 if (TREE_CODE (type) == POINTER_TYPE)
10583 {
10584 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10585 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10586 TYPE_REF_CAN_ALIAS_ALL (type));
10587 }
10588 else if (TREE_CODE (type) == REFERENCE_TYPE)
10589 {
10590 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10591 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10592 TYPE_REF_CAN_ALIAS_ALL (type));
10593 }
10594 else if (TREE_CODE (type) == ARRAY_TYPE)
10595 {
10596 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10597 outer = build_array_type (inner, TYPE_DOMAIN (type));
10598 }
10599 else if (TREE_CODE (type) == FUNCTION_TYPE)
10600 {
10601 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10602 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10603 }
10604 else if (TREE_CODE (type) == METHOD_TYPE)
10605 {
10606 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10607 /* The build_method_type_directly() routine prepends 'this' to argument list,
10608 so we must compensate by getting rid of it. */
10609 outer
10610 = build_method_type_directly
10611 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10612 inner,
10613 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10614 }
10615 else if (TREE_CODE (type) == OFFSET_TYPE)
10616 {
10617 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10618 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10619 }
10620 else
10621 return bottom;
10622
10623 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10624 TYPE_QUALS (type));
10625 }
10626
10627 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10628 the inner type. */
10629 tree
10630 build_vector_type_for_mode (tree innertype, machine_mode mode)
10631 {
10632 int nunits;
10633
10634 switch (GET_MODE_CLASS (mode))
10635 {
10636 case MODE_VECTOR_INT:
10637 case MODE_VECTOR_FLOAT:
10638 case MODE_VECTOR_FRACT:
10639 case MODE_VECTOR_UFRACT:
10640 case MODE_VECTOR_ACCUM:
10641 case MODE_VECTOR_UACCUM:
10642 nunits = GET_MODE_NUNITS (mode);
10643 break;
10644
10645 case MODE_INT:
10646 /* Check that there are no leftover bits. */
10647 gcc_assert (GET_MODE_BITSIZE (mode)
10648 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10649
10650 nunits = GET_MODE_BITSIZE (mode)
10651 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10652 break;
10653
10654 default:
10655 gcc_unreachable ();
10656 }
10657
10658 return make_vector_type (innertype, nunits, mode);
10659 }
10660
10661 /* Similarly, but takes the inner type and number of units, which must be
10662 a power of two. */
10663
10664 tree
10665 build_vector_type (tree innertype, int nunits)
10666 {
10667 return make_vector_type (innertype, nunits, VOIDmode);
10668 }
10669
10670 /* Build truth vector with specified length and number of units. */
10671
10672 tree
10673 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10674 {
10675 machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits,
10676 vector_size);
10677
10678 gcc_assert (mask_mode != VOIDmode);
10679
10680 unsigned HOST_WIDE_INT vsize;
10681 if (mask_mode == BLKmode)
10682 vsize = vector_size * BITS_PER_UNIT;
10683 else
10684 vsize = GET_MODE_BITSIZE (mask_mode);
10685
10686 unsigned HOST_WIDE_INT esize = vsize / nunits;
10687 gcc_assert (esize * nunits == vsize);
10688
10689 tree bool_type = build_nonstandard_boolean_type (esize);
10690
10691 return make_vector_type (bool_type, nunits, mask_mode);
10692 }
10693
10694 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10695
10696 tree
10697 build_same_sized_truth_vector_type (tree vectype)
10698 {
10699 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10700 return vectype;
10701
10702 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10703
10704 if (!size)
10705 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10706
10707 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10708 }
10709
10710 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10711
10712 tree
10713 build_opaque_vector_type (tree innertype, int nunits)
10714 {
10715 tree t = make_vector_type (innertype, nunits, VOIDmode);
10716 tree cand;
10717 /* We always build the non-opaque variant before the opaque one,
10718 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10719 cand = TYPE_NEXT_VARIANT (t);
10720 if (cand
10721 && TYPE_VECTOR_OPAQUE (cand)
10722 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10723 return cand;
10724 /* Othewise build a variant type and make sure to queue it after
10725 the non-opaque type. */
10726 cand = build_distinct_type_copy (t);
10727 TYPE_VECTOR_OPAQUE (cand) = true;
10728 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10729 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10730 TYPE_NEXT_VARIANT (t) = cand;
10731 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10732 return cand;
10733 }
10734
10735
10736 /* Given an initializer INIT, return TRUE if INIT is zero or some
10737 aggregate of zeros. Otherwise return FALSE. */
10738 bool
10739 initializer_zerop (const_tree init)
10740 {
10741 tree elt;
10742
10743 STRIP_NOPS (init);
10744
10745 switch (TREE_CODE (init))
10746 {
10747 case INTEGER_CST:
10748 return integer_zerop (init);
10749
10750 case REAL_CST:
10751 /* ??? Note that this is not correct for C4X float formats. There,
10752 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10753 negative exponent. */
10754 return real_zerop (init)
10755 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10756
10757 case FIXED_CST:
10758 return fixed_zerop (init);
10759
10760 case COMPLEX_CST:
10761 return integer_zerop (init)
10762 || (real_zerop (init)
10763 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10764 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10765
10766 case VECTOR_CST:
10767 {
10768 unsigned i;
10769 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10770 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10771 return false;
10772 return true;
10773 }
10774
10775 case CONSTRUCTOR:
10776 {
10777 unsigned HOST_WIDE_INT idx;
10778
10779 if (TREE_CLOBBER_P (init))
10780 return false;
10781 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10782 if (!initializer_zerop (elt))
10783 return false;
10784 return true;
10785 }
10786
10787 case STRING_CST:
10788 {
10789 int i;
10790
10791 /* We need to loop through all elements to handle cases like
10792 "\0" and "\0foobar". */
10793 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10794 if (TREE_STRING_POINTER (init)[i] != '\0')
10795 return false;
10796
10797 return true;
10798 }
10799
10800 default:
10801 return false;
10802 }
10803 }
10804
10805 /* Check if vector VEC consists of all the equal elements and
10806 that the number of elements corresponds to the type of VEC.
10807 The function returns first element of the vector
10808 or NULL_TREE if the vector is not uniform. */
10809 tree
10810 uniform_vector_p (const_tree vec)
10811 {
10812 tree first, t;
10813 unsigned i;
10814
10815 if (vec == NULL_TREE)
10816 return NULL_TREE;
10817
10818 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10819
10820 if (TREE_CODE (vec) == VECTOR_CST)
10821 {
10822 first = VECTOR_CST_ELT (vec, 0);
10823 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10824 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10825 return NULL_TREE;
10826
10827 return first;
10828 }
10829
10830 else if (TREE_CODE (vec) == CONSTRUCTOR)
10831 {
10832 first = error_mark_node;
10833
10834 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10835 {
10836 if (i == 0)
10837 {
10838 first = t;
10839 continue;
10840 }
10841 if (!operand_equal_p (first, t, 0))
10842 return NULL_TREE;
10843 }
10844 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10845 return NULL_TREE;
10846
10847 return first;
10848 }
10849
10850 return NULL_TREE;
10851 }
10852
10853 /* Build an empty statement at location LOC. */
10854
10855 tree
10856 build_empty_stmt (location_t loc)
10857 {
10858 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10859 SET_EXPR_LOCATION (t, loc);
10860 return t;
10861 }
10862
10863
10864 /* Build an OpenMP clause with code CODE. LOC is the location of the
10865 clause. */
10866
10867 tree
10868 build_omp_clause (location_t loc, enum omp_clause_code code)
10869 {
10870 tree t;
10871 int size, length;
10872
10873 length = omp_clause_num_ops[code];
10874 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10875
10876 record_node_allocation_statistics (OMP_CLAUSE, size);
10877
10878 t = (tree) ggc_internal_alloc (size);
10879 memset (t, 0, size);
10880 TREE_SET_CODE (t, OMP_CLAUSE);
10881 OMP_CLAUSE_SET_CODE (t, code);
10882 OMP_CLAUSE_LOCATION (t) = loc;
10883
10884 return t;
10885 }
10886
10887 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10888 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10889 Except for the CODE and operand count field, other storage for the
10890 object is initialized to zeros. */
10891
10892 tree
10893 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10894 {
10895 tree t;
10896 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10897
10898 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10899 gcc_assert (len >= 1);
10900
10901 record_node_allocation_statistics (code, length);
10902
10903 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10904
10905 TREE_SET_CODE (t, code);
10906
10907 /* Can't use TREE_OPERAND to store the length because if checking is
10908 enabled, it will try to check the length before we store it. :-P */
10909 t->exp.operands[0] = build_int_cst (sizetype, len);
10910
10911 return t;
10912 }
10913
10914 /* Helper function for build_call_* functions; build a CALL_EXPR with
10915 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10916 the argument slots. */
10917
10918 static tree
10919 build_call_1 (tree return_type, tree fn, int nargs)
10920 {
10921 tree t;
10922
10923 t = build_vl_exp (CALL_EXPR, nargs + 3);
10924 TREE_TYPE (t) = return_type;
10925 CALL_EXPR_FN (t) = fn;
10926 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10927
10928 return t;
10929 }
10930
10931 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10932 FN and a null static chain slot. NARGS is the number of call arguments
10933 which are specified as "..." arguments. */
10934
10935 tree
10936 build_call_nary (tree return_type, tree fn, int nargs, ...)
10937 {
10938 tree ret;
10939 va_list args;
10940 va_start (args, nargs);
10941 ret = build_call_valist (return_type, fn, nargs, args);
10942 va_end (args);
10943 return ret;
10944 }
10945
10946 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10947 FN and a null static chain slot. NARGS is the number of call arguments
10948 which are specified as a va_list ARGS. */
10949
10950 tree
10951 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10952 {
10953 tree t;
10954 int i;
10955
10956 t = build_call_1 (return_type, fn, nargs);
10957 for (i = 0; i < nargs; i++)
10958 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10959 process_call_operands (t);
10960 return t;
10961 }
10962
10963 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10964 FN and a null static chain slot. NARGS is the number of call arguments
10965 which are specified as a tree array ARGS. */
10966
10967 tree
10968 build_call_array_loc (location_t loc, tree return_type, tree fn,
10969 int nargs, const tree *args)
10970 {
10971 tree t;
10972 int i;
10973
10974 t = build_call_1 (return_type, fn, nargs);
10975 for (i = 0; i < nargs; i++)
10976 CALL_EXPR_ARG (t, i) = args[i];
10977 process_call_operands (t);
10978 SET_EXPR_LOCATION (t, loc);
10979 return t;
10980 }
10981
10982 /* Like build_call_array, but takes a vec. */
10983
10984 tree
10985 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10986 {
10987 tree ret, t;
10988 unsigned int ix;
10989
10990 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10991 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10992 CALL_EXPR_ARG (ret, ix) = t;
10993 process_call_operands (ret);
10994 return ret;
10995 }
10996
10997 /* Conveniently construct a function call expression. FNDECL names the
10998 function to be called and N arguments are passed in the array
10999 ARGARRAY. */
11000
11001 tree
11002 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11003 {
11004 tree fntype = TREE_TYPE (fndecl);
11005 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11006
11007 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11008 }
11009
11010 /* Conveniently construct a function call expression. FNDECL names the
11011 function to be called and the arguments are passed in the vector
11012 VEC. */
11013
11014 tree
11015 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11016 {
11017 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11018 vec_safe_address (vec));
11019 }
11020
11021
11022 /* Conveniently construct a function call expression. FNDECL names the
11023 function to be called, N is the number of arguments, and the "..."
11024 parameters are the argument expressions. */
11025
11026 tree
11027 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11028 {
11029 va_list ap;
11030 tree *argarray = XALLOCAVEC (tree, n);
11031 int i;
11032
11033 va_start (ap, n);
11034 for (i = 0; i < n; i++)
11035 argarray[i] = va_arg (ap, tree);
11036 va_end (ap);
11037 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11038 }
11039
11040 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11041 varargs macros aren't supported by all bootstrap compilers. */
11042
11043 tree
11044 build_call_expr (tree fndecl, int n, ...)
11045 {
11046 va_list ap;
11047 tree *argarray = XALLOCAVEC (tree, n);
11048 int i;
11049
11050 va_start (ap, n);
11051 for (i = 0; i < n; i++)
11052 argarray[i] = va_arg (ap, tree);
11053 va_end (ap);
11054 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11055 }
11056
11057 /* Build internal call expression. This is just like CALL_EXPR, except
11058 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11059 internal function. */
11060
11061 tree
11062 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11063 tree type, int n, ...)
11064 {
11065 va_list ap;
11066 int i;
11067
11068 tree fn = build_call_1 (type, NULL_TREE, n);
11069 va_start (ap, n);
11070 for (i = 0; i < n; i++)
11071 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
11072 va_end (ap);
11073 SET_EXPR_LOCATION (fn, loc);
11074 CALL_EXPR_IFN (fn) = ifn;
11075 return fn;
11076 }
11077
11078 /* Create a new constant string literal and return a char* pointer to it.
11079 The STRING_CST value is the LEN characters at STR. */
11080 tree
11081 build_string_literal (int len, const char *str)
11082 {
11083 tree t, elem, index, type;
11084
11085 t = build_string (len, str);
11086 elem = build_type_variant (char_type_node, 1, 0);
11087 index = build_index_type (size_int (len - 1));
11088 type = build_array_type (elem, index);
11089 TREE_TYPE (t) = type;
11090 TREE_CONSTANT (t) = 1;
11091 TREE_READONLY (t) = 1;
11092 TREE_STATIC (t) = 1;
11093
11094 type = build_pointer_type (elem);
11095 t = build1 (ADDR_EXPR, type,
11096 build4 (ARRAY_REF, elem,
11097 t, integer_zero_node, NULL_TREE, NULL_TREE));
11098 return t;
11099 }
11100
11101
11102
11103 /* Return true if T (assumed to be a DECL) must be assigned a memory
11104 location. */
11105
11106 bool
11107 needs_to_live_in_memory (const_tree t)
11108 {
11109 return (TREE_ADDRESSABLE (t)
11110 || is_global_var (t)
11111 || (TREE_CODE (t) == RESULT_DECL
11112 && !DECL_BY_REFERENCE (t)
11113 && aggregate_value_p (t, current_function_decl)));
11114 }
11115
11116 /* Return value of a constant X and sign-extend it. */
11117
11118 HOST_WIDE_INT
11119 int_cst_value (const_tree x)
11120 {
11121 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11122 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11123
11124 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11125 gcc_assert (cst_and_fits_in_hwi (x));
11126
11127 if (bits < HOST_BITS_PER_WIDE_INT)
11128 {
11129 bool negative = ((val >> (bits - 1)) & 1) != 0;
11130 if (negative)
11131 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
11132 else
11133 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
11134 }
11135
11136 return val;
11137 }
11138
11139 /* If TYPE is an integral or pointer type, return an integer type with
11140 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11141 if TYPE is already an integer type of signedness UNSIGNEDP. */
11142
11143 tree
11144 signed_or_unsigned_type_for (int unsignedp, tree type)
11145 {
11146 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11147 return type;
11148
11149 if (TREE_CODE (type) == VECTOR_TYPE)
11150 {
11151 tree inner = TREE_TYPE (type);
11152 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11153 if (!inner2)
11154 return NULL_TREE;
11155 if (inner == inner2)
11156 return type;
11157 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11158 }
11159
11160 if (!INTEGRAL_TYPE_P (type)
11161 && !POINTER_TYPE_P (type)
11162 && TREE_CODE (type) != OFFSET_TYPE)
11163 return NULL_TREE;
11164
11165 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11166 }
11167
11168 /* If TYPE is an integral or pointer type, return an integer type with
11169 the same precision which is unsigned, or itself if TYPE is already an
11170 unsigned integer type. */
11171
11172 tree
11173 unsigned_type_for (tree type)
11174 {
11175 return signed_or_unsigned_type_for (1, type);
11176 }
11177
11178 /* If TYPE is an integral or pointer type, return an integer type with
11179 the same precision which is signed, or itself if TYPE is already a
11180 signed integer type. */
11181
11182 tree
11183 signed_type_for (tree type)
11184 {
11185 return signed_or_unsigned_type_for (0, type);
11186 }
11187
11188 /* If TYPE is a vector type, return a signed integer vector type with the
11189 same width and number of subparts. Otherwise return boolean_type_node. */
11190
11191 tree
11192 truth_type_for (tree type)
11193 {
11194 if (TREE_CODE (type) == VECTOR_TYPE)
11195 {
11196 if (VECTOR_BOOLEAN_TYPE_P (type))
11197 return type;
11198 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11199 GET_MODE_SIZE (TYPE_MODE (type)));
11200 }
11201 else
11202 return boolean_type_node;
11203 }
11204
11205 /* Returns the largest value obtainable by casting something in INNER type to
11206 OUTER type. */
11207
11208 tree
11209 upper_bound_in_type (tree outer, tree inner)
11210 {
11211 unsigned int det = 0;
11212 unsigned oprec = TYPE_PRECISION (outer);
11213 unsigned iprec = TYPE_PRECISION (inner);
11214 unsigned prec;
11215
11216 /* Compute a unique number for every combination. */
11217 det |= (oprec > iprec) ? 4 : 0;
11218 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11219 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11220
11221 /* Determine the exponent to use. */
11222 switch (det)
11223 {
11224 case 0:
11225 case 1:
11226 /* oprec <= iprec, outer: signed, inner: don't care. */
11227 prec = oprec - 1;
11228 break;
11229 case 2:
11230 case 3:
11231 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11232 prec = oprec;
11233 break;
11234 case 4:
11235 /* oprec > iprec, outer: signed, inner: signed. */
11236 prec = iprec - 1;
11237 break;
11238 case 5:
11239 /* oprec > iprec, outer: signed, inner: unsigned. */
11240 prec = iprec;
11241 break;
11242 case 6:
11243 /* oprec > iprec, outer: unsigned, inner: signed. */
11244 prec = oprec;
11245 break;
11246 case 7:
11247 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11248 prec = iprec;
11249 break;
11250 default:
11251 gcc_unreachable ();
11252 }
11253
11254 return wide_int_to_tree (outer,
11255 wi::mask (prec, false, TYPE_PRECISION (outer)));
11256 }
11257
11258 /* Returns the smallest value obtainable by casting something in INNER type to
11259 OUTER type. */
11260
11261 tree
11262 lower_bound_in_type (tree outer, tree inner)
11263 {
11264 unsigned oprec = TYPE_PRECISION (outer);
11265 unsigned iprec = TYPE_PRECISION (inner);
11266
11267 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11268 and obtain 0. */
11269 if (TYPE_UNSIGNED (outer)
11270 /* If we are widening something of an unsigned type, OUTER type
11271 contains all values of INNER type. In particular, both INNER
11272 and OUTER types have zero in common. */
11273 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11274 return build_int_cst (outer, 0);
11275 else
11276 {
11277 /* If we are widening a signed type to another signed type, we
11278 want to obtain -2^^(iprec-1). If we are keeping the
11279 precision or narrowing to a signed type, we want to obtain
11280 -2^(oprec-1). */
11281 unsigned prec = oprec > iprec ? iprec : oprec;
11282 return wide_int_to_tree (outer,
11283 wi::mask (prec - 1, true,
11284 TYPE_PRECISION (outer)));
11285 }
11286 }
11287
11288 /* Return nonzero if two operands that are suitable for PHI nodes are
11289 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11290 SSA_NAME or invariant. Note that this is strictly an optimization.
11291 That is, callers of this function can directly call operand_equal_p
11292 and get the same result, only slower. */
11293
11294 int
11295 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11296 {
11297 if (arg0 == arg1)
11298 return 1;
11299 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11300 return 0;
11301 return operand_equal_p (arg0, arg1, 0);
11302 }
11303
11304 /* Returns number of zeros at the end of binary representation of X. */
11305
11306 tree
11307 num_ending_zeros (const_tree x)
11308 {
11309 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11310 }
11311
11312
11313 #define WALK_SUBTREE(NODE) \
11314 do \
11315 { \
11316 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11317 if (result) \
11318 return result; \
11319 } \
11320 while (0)
11321
11322 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11323 be walked whenever a type is seen in the tree. Rest of operands and return
11324 value are as for walk_tree. */
11325
11326 static tree
11327 walk_type_fields (tree type, walk_tree_fn func, void *data,
11328 hash_set<tree> *pset, walk_tree_lh lh)
11329 {
11330 tree result = NULL_TREE;
11331
11332 switch (TREE_CODE (type))
11333 {
11334 case POINTER_TYPE:
11335 case REFERENCE_TYPE:
11336 case VECTOR_TYPE:
11337 /* We have to worry about mutually recursive pointers. These can't
11338 be written in C. They can in Ada. It's pathological, but
11339 there's an ACATS test (c38102a) that checks it. Deal with this
11340 by checking if we're pointing to another pointer, that one
11341 points to another pointer, that one does too, and we have no htab.
11342 If so, get a hash table. We check three levels deep to avoid
11343 the cost of the hash table if we don't need one. */
11344 if (POINTER_TYPE_P (TREE_TYPE (type))
11345 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11346 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11347 && !pset)
11348 {
11349 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11350 func, data);
11351 if (result)
11352 return result;
11353
11354 break;
11355 }
11356
11357 /* ... fall through ... */
11358
11359 case COMPLEX_TYPE:
11360 WALK_SUBTREE (TREE_TYPE (type));
11361 break;
11362
11363 case METHOD_TYPE:
11364 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11365
11366 /* Fall through. */
11367
11368 case FUNCTION_TYPE:
11369 WALK_SUBTREE (TREE_TYPE (type));
11370 {
11371 tree arg;
11372
11373 /* We never want to walk into default arguments. */
11374 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11375 WALK_SUBTREE (TREE_VALUE (arg));
11376 }
11377 break;
11378
11379 case ARRAY_TYPE:
11380 /* Don't follow this nodes's type if a pointer for fear that
11381 we'll have infinite recursion. If we have a PSET, then we
11382 need not fear. */
11383 if (pset
11384 || (!POINTER_TYPE_P (TREE_TYPE (type))
11385 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11386 WALK_SUBTREE (TREE_TYPE (type));
11387 WALK_SUBTREE (TYPE_DOMAIN (type));
11388 break;
11389
11390 case OFFSET_TYPE:
11391 WALK_SUBTREE (TREE_TYPE (type));
11392 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11393 break;
11394
11395 default:
11396 break;
11397 }
11398
11399 return NULL_TREE;
11400 }
11401
11402 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11403 called with the DATA and the address of each sub-tree. If FUNC returns a
11404 non-NULL value, the traversal is stopped, and the value returned by FUNC
11405 is returned. If PSET is non-NULL it is used to record the nodes visited,
11406 and to avoid visiting a node more than once. */
11407
11408 tree
11409 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11410 hash_set<tree> *pset, walk_tree_lh lh)
11411 {
11412 enum tree_code code;
11413 int walk_subtrees;
11414 tree result;
11415
11416 #define WALK_SUBTREE_TAIL(NODE) \
11417 do \
11418 { \
11419 tp = & (NODE); \
11420 goto tail_recurse; \
11421 } \
11422 while (0)
11423
11424 tail_recurse:
11425 /* Skip empty subtrees. */
11426 if (!*tp)
11427 return NULL_TREE;
11428
11429 /* Don't walk the same tree twice, if the user has requested
11430 that we avoid doing so. */
11431 if (pset && pset->add (*tp))
11432 return NULL_TREE;
11433
11434 /* Call the function. */
11435 walk_subtrees = 1;
11436 result = (*func) (tp, &walk_subtrees, data);
11437
11438 /* If we found something, return it. */
11439 if (result)
11440 return result;
11441
11442 code = TREE_CODE (*tp);
11443
11444 /* Even if we didn't, FUNC may have decided that there was nothing
11445 interesting below this point in the tree. */
11446 if (!walk_subtrees)
11447 {
11448 /* But we still need to check our siblings. */
11449 if (code == TREE_LIST)
11450 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11451 else if (code == OMP_CLAUSE)
11452 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11453 else
11454 return NULL_TREE;
11455 }
11456
11457 if (lh)
11458 {
11459 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11460 if (result || !walk_subtrees)
11461 return result;
11462 }
11463
11464 switch (code)
11465 {
11466 case ERROR_MARK:
11467 case IDENTIFIER_NODE:
11468 case INTEGER_CST:
11469 case REAL_CST:
11470 case FIXED_CST:
11471 case VECTOR_CST:
11472 case STRING_CST:
11473 case BLOCK:
11474 case PLACEHOLDER_EXPR:
11475 case SSA_NAME:
11476 case FIELD_DECL:
11477 case RESULT_DECL:
11478 /* None of these have subtrees other than those already walked
11479 above. */
11480 break;
11481
11482 case TREE_LIST:
11483 WALK_SUBTREE (TREE_VALUE (*tp));
11484 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11485 break;
11486
11487 case TREE_VEC:
11488 {
11489 int len = TREE_VEC_LENGTH (*tp);
11490
11491 if (len == 0)
11492 break;
11493
11494 /* Walk all elements but the first. */
11495 while (--len)
11496 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11497
11498 /* Now walk the first one as a tail call. */
11499 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11500 }
11501
11502 case COMPLEX_CST:
11503 WALK_SUBTREE (TREE_REALPART (*tp));
11504 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11505
11506 case CONSTRUCTOR:
11507 {
11508 unsigned HOST_WIDE_INT idx;
11509 constructor_elt *ce;
11510
11511 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11512 idx++)
11513 WALK_SUBTREE (ce->value);
11514 }
11515 break;
11516
11517 case SAVE_EXPR:
11518 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11519
11520 case BIND_EXPR:
11521 {
11522 tree decl;
11523 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11524 {
11525 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11526 into declarations that are just mentioned, rather than
11527 declared; they don't really belong to this part of the tree.
11528 And, we can see cycles: the initializer for a declaration
11529 can refer to the declaration itself. */
11530 WALK_SUBTREE (DECL_INITIAL (decl));
11531 WALK_SUBTREE (DECL_SIZE (decl));
11532 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11533 }
11534 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11535 }
11536
11537 case STATEMENT_LIST:
11538 {
11539 tree_stmt_iterator i;
11540 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11541 WALK_SUBTREE (*tsi_stmt_ptr (i));
11542 }
11543 break;
11544
11545 case OMP_CLAUSE:
11546 switch (OMP_CLAUSE_CODE (*tp))
11547 {
11548 case OMP_CLAUSE_GANG:
11549 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11550 /* FALLTHRU */
11551
11552 case OMP_CLAUSE_DEVICE_RESIDENT:
11553 case OMP_CLAUSE_USE_DEVICE:
11554 case OMP_CLAUSE_ASYNC:
11555 case OMP_CLAUSE_WAIT:
11556 case OMP_CLAUSE_WORKER:
11557 case OMP_CLAUSE_VECTOR:
11558 case OMP_CLAUSE_NUM_GANGS:
11559 case OMP_CLAUSE_NUM_WORKERS:
11560 case OMP_CLAUSE_VECTOR_LENGTH:
11561 case OMP_CLAUSE_PRIVATE:
11562 case OMP_CLAUSE_SHARED:
11563 case OMP_CLAUSE_FIRSTPRIVATE:
11564 case OMP_CLAUSE_COPYIN:
11565 case OMP_CLAUSE_COPYPRIVATE:
11566 case OMP_CLAUSE_FINAL:
11567 case OMP_CLAUSE_IF:
11568 case OMP_CLAUSE_NUM_THREADS:
11569 case OMP_CLAUSE_SCHEDULE:
11570 case OMP_CLAUSE_UNIFORM:
11571 case OMP_CLAUSE_DEPEND:
11572 case OMP_CLAUSE_NUM_TEAMS:
11573 case OMP_CLAUSE_THREAD_LIMIT:
11574 case OMP_CLAUSE_DEVICE:
11575 case OMP_CLAUSE_DIST_SCHEDULE:
11576 case OMP_CLAUSE_SAFELEN:
11577 case OMP_CLAUSE_SIMDLEN:
11578 case OMP_CLAUSE_ORDERED:
11579 case OMP_CLAUSE_PRIORITY:
11580 case OMP_CLAUSE_GRAINSIZE:
11581 case OMP_CLAUSE_NUM_TASKS:
11582 case OMP_CLAUSE_HINT:
11583 case OMP_CLAUSE_TO_DECLARE:
11584 case OMP_CLAUSE_LINK:
11585 case OMP_CLAUSE_USE_DEVICE_PTR:
11586 case OMP_CLAUSE_IS_DEVICE_PTR:
11587 case OMP_CLAUSE__LOOPTEMP_:
11588 case OMP_CLAUSE__SIMDUID_:
11589 case OMP_CLAUSE__CILK_FOR_COUNT_:
11590 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11591 /* FALLTHRU */
11592
11593 case OMP_CLAUSE_INDEPENDENT:
11594 case OMP_CLAUSE_NOWAIT:
11595 case OMP_CLAUSE_DEFAULT:
11596 case OMP_CLAUSE_UNTIED:
11597 case OMP_CLAUSE_MERGEABLE:
11598 case OMP_CLAUSE_PROC_BIND:
11599 case OMP_CLAUSE_INBRANCH:
11600 case OMP_CLAUSE_NOTINBRANCH:
11601 case OMP_CLAUSE_FOR:
11602 case OMP_CLAUSE_PARALLEL:
11603 case OMP_CLAUSE_SECTIONS:
11604 case OMP_CLAUSE_TASKGROUP:
11605 case OMP_CLAUSE_NOGROUP:
11606 case OMP_CLAUSE_THREADS:
11607 case OMP_CLAUSE_SIMD:
11608 case OMP_CLAUSE_DEFAULTMAP:
11609 case OMP_CLAUSE_AUTO:
11610 case OMP_CLAUSE_SEQ:
11611 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11612
11613 case OMP_CLAUSE_LASTPRIVATE:
11614 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11615 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11616 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11617
11618 case OMP_CLAUSE_COLLAPSE:
11619 {
11620 int i;
11621 for (i = 0; i < 3; i++)
11622 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11623 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11624 }
11625
11626 case OMP_CLAUSE_LINEAR:
11627 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11628 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11629 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11630 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11631
11632 case OMP_CLAUSE_ALIGNED:
11633 case OMP_CLAUSE_FROM:
11634 case OMP_CLAUSE_TO:
11635 case OMP_CLAUSE_MAP:
11636 case OMP_CLAUSE__CACHE_:
11637 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11638 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11639 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11640
11641 case OMP_CLAUSE_REDUCTION:
11642 {
11643 int i;
11644 for (i = 0; i < 5; i++)
11645 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11646 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11647 }
11648
11649 default:
11650 gcc_unreachable ();
11651 }
11652 break;
11653
11654 case TARGET_EXPR:
11655 {
11656 int i, len;
11657
11658 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11659 But, we only want to walk once. */
11660 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11661 for (i = 0; i < len; ++i)
11662 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11663 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11664 }
11665
11666 case DECL_EXPR:
11667 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11668 defining. We only want to walk into these fields of a type in this
11669 case and not in the general case of a mere reference to the type.
11670
11671 The criterion is as follows: if the field can be an expression, it
11672 must be walked only here. This should be in keeping with the fields
11673 that are directly gimplified in gimplify_type_sizes in order for the
11674 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11675 variable-sized types.
11676
11677 Note that DECLs get walked as part of processing the BIND_EXPR. */
11678 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11679 {
11680 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11681 if (TREE_CODE (*type_p) == ERROR_MARK)
11682 return NULL_TREE;
11683
11684 /* Call the function for the type. See if it returns anything or
11685 doesn't want us to continue. If we are to continue, walk both
11686 the normal fields and those for the declaration case. */
11687 result = (*func) (type_p, &walk_subtrees, data);
11688 if (result || !walk_subtrees)
11689 return result;
11690
11691 /* But do not walk a pointed-to type since it may itself need to
11692 be walked in the declaration case if it isn't anonymous. */
11693 if (!POINTER_TYPE_P (*type_p))
11694 {
11695 result = walk_type_fields (*type_p, func, data, pset, lh);
11696 if (result)
11697 return result;
11698 }
11699
11700 /* If this is a record type, also walk the fields. */
11701 if (RECORD_OR_UNION_TYPE_P (*type_p))
11702 {
11703 tree field;
11704
11705 for (field = TYPE_FIELDS (*type_p); field;
11706 field = DECL_CHAIN (field))
11707 {
11708 /* We'd like to look at the type of the field, but we can
11709 easily get infinite recursion. So assume it's pointed
11710 to elsewhere in the tree. Also, ignore things that
11711 aren't fields. */
11712 if (TREE_CODE (field) != FIELD_DECL)
11713 continue;
11714
11715 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11716 WALK_SUBTREE (DECL_SIZE (field));
11717 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11718 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11719 WALK_SUBTREE (DECL_QUALIFIER (field));
11720 }
11721 }
11722
11723 /* Same for scalar types. */
11724 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11725 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11726 || TREE_CODE (*type_p) == INTEGER_TYPE
11727 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11728 || TREE_CODE (*type_p) == REAL_TYPE)
11729 {
11730 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11731 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11732 }
11733
11734 WALK_SUBTREE (TYPE_SIZE (*type_p));
11735 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11736 }
11737 /* FALLTHRU */
11738
11739 default:
11740 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11741 {
11742 int i, len;
11743
11744 /* Walk over all the sub-trees of this operand. */
11745 len = TREE_OPERAND_LENGTH (*tp);
11746
11747 /* Go through the subtrees. We need to do this in forward order so
11748 that the scope of a FOR_EXPR is handled properly. */
11749 if (len)
11750 {
11751 for (i = 0; i < len - 1; ++i)
11752 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11753 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11754 }
11755 }
11756 /* If this is a type, walk the needed fields in the type. */
11757 else if (TYPE_P (*tp))
11758 return walk_type_fields (*tp, func, data, pset, lh);
11759 break;
11760 }
11761
11762 /* We didn't find what we were looking for. */
11763 return NULL_TREE;
11764
11765 #undef WALK_SUBTREE_TAIL
11766 }
11767 #undef WALK_SUBTREE
11768
11769 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11770
11771 tree
11772 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11773 walk_tree_lh lh)
11774 {
11775 tree result;
11776
11777 hash_set<tree> pset;
11778 result = walk_tree_1 (tp, func, data, &pset, lh);
11779 return result;
11780 }
11781
11782
11783 tree
11784 tree_block (tree t)
11785 {
11786 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11787
11788 if (IS_EXPR_CODE_CLASS (c))
11789 return LOCATION_BLOCK (t->exp.locus);
11790 gcc_unreachable ();
11791 return NULL;
11792 }
11793
11794 void
11795 tree_set_block (tree t, tree b)
11796 {
11797 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11798
11799 if (IS_EXPR_CODE_CLASS (c))
11800 {
11801 if (b)
11802 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11803 else
11804 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11805 }
11806 else
11807 gcc_unreachable ();
11808 }
11809
11810 /* Create a nameless artificial label and put it in the current
11811 function context. The label has a location of LOC. Returns the
11812 newly created label. */
11813
11814 tree
11815 create_artificial_label (location_t loc)
11816 {
11817 tree lab = build_decl (loc,
11818 LABEL_DECL, NULL_TREE, void_type_node);
11819
11820 DECL_ARTIFICIAL (lab) = 1;
11821 DECL_IGNORED_P (lab) = 1;
11822 DECL_CONTEXT (lab) = current_function_decl;
11823 return lab;
11824 }
11825
11826 /* Given a tree, try to return a useful variable name that we can use
11827 to prefix a temporary that is being assigned the value of the tree.
11828 I.E. given <temp> = &A, return A. */
11829
11830 const char *
11831 get_name (tree t)
11832 {
11833 tree stripped_decl;
11834
11835 stripped_decl = t;
11836 STRIP_NOPS (stripped_decl);
11837 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11838 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11839 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11840 {
11841 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11842 if (!name)
11843 return NULL;
11844 return IDENTIFIER_POINTER (name);
11845 }
11846 else
11847 {
11848 switch (TREE_CODE (stripped_decl))
11849 {
11850 case ADDR_EXPR:
11851 return get_name (TREE_OPERAND (stripped_decl, 0));
11852 default:
11853 return NULL;
11854 }
11855 }
11856 }
11857
11858 /* Return true if TYPE has a variable argument list. */
11859
11860 bool
11861 stdarg_p (const_tree fntype)
11862 {
11863 function_args_iterator args_iter;
11864 tree n = NULL_TREE, t;
11865
11866 if (!fntype)
11867 return false;
11868
11869 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11870 {
11871 n = t;
11872 }
11873
11874 return n != NULL_TREE && n != void_type_node;
11875 }
11876
11877 /* Return true if TYPE has a prototype. */
11878
11879 bool
11880 prototype_p (const_tree fntype)
11881 {
11882 tree t;
11883
11884 gcc_assert (fntype != NULL_TREE);
11885
11886 t = TYPE_ARG_TYPES (fntype);
11887 return (t != NULL_TREE);
11888 }
11889
11890 /* If BLOCK is inlined from an __attribute__((__artificial__))
11891 routine, return pointer to location from where it has been
11892 called. */
11893 location_t *
11894 block_nonartificial_location (tree block)
11895 {
11896 location_t *ret = NULL;
11897
11898 while (block && TREE_CODE (block) == BLOCK
11899 && BLOCK_ABSTRACT_ORIGIN (block))
11900 {
11901 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11902
11903 while (TREE_CODE (ao) == BLOCK
11904 && BLOCK_ABSTRACT_ORIGIN (ao)
11905 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11906 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11907
11908 if (TREE_CODE (ao) == FUNCTION_DECL)
11909 {
11910 /* If AO is an artificial inline, point RET to the
11911 call site locus at which it has been inlined and continue
11912 the loop, in case AO's caller is also an artificial
11913 inline. */
11914 if (DECL_DECLARED_INLINE_P (ao)
11915 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11916 ret = &BLOCK_SOURCE_LOCATION (block);
11917 else
11918 break;
11919 }
11920 else if (TREE_CODE (ao) != BLOCK)
11921 break;
11922
11923 block = BLOCK_SUPERCONTEXT (block);
11924 }
11925 return ret;
11926 }
11927
11928
11929 /* If EXP is inlined from an __attribute__((__artificial__))
11930 function, return the location of the original call expression. */
11931
11932 location_t
11933 tree_nonartificial_location (tree exp)
11934 {
11935 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11936
11937 if (loc)
11938 return *loc;
11939 else
11940 return EXPR_LOCATION (exp);
11941 }
11942
11943
11944 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11945 nodes. */
11946
11947 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11948
11949 hashval_t
11950 cl_option_hasher::hash (tree x)
11951 {
11952 const_tree const t = x;
11953 const char *p;
11954 size_t i;
11955 size_t len = 0;
11956 hashval_t hash = 0;
11957
11958 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11959 {
11960 p = (const char *)TREE_OPTIMIZATION (t);
11961 len = sizeof (struct cl_optimization);
11962 }
11963
11964 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11965 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11966
11967 else
11968 gcc_unreachable ();
11969
11970 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11971 something else. */
11972 for (i = 0; i < len; i++)
11973 if (p[i])
11974 hash = (hash << 4) ^ ((i << 2) | p[i]);
11975
11976 return hash;
11977 }
11978
11979 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11980 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11981 same. */
11982
11983 bool
11984 cl_option_hasher::equal (tree x, tree y)
11985 {
11986 const_tree const xt = x;
11987 const_tree const yt = y;
11988 const char *xp;
11989 const char *yp;
11990 size_t len;
11991
11992 if (TREE_CODE (xt) != TREE_CODE (yt))
11993 return 0;
11994
11995 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11996 {
11997 xp = (const char *)TREE_OPTIMIZATION (xt);
11998 yp = (const char *)TREE_OPTIMIZATION (yt);
11999 len = sizeof (struct cl_optimization);
12000 }
12001
12002 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12003 {
12004 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12005 TREE_TARGET_OPTION (yt));
12006 }
12007
12008 else
12009 gcc_unreachable ();
12010
12011 return (memcmp (xp, yp, len) == 0);
12012 }
12013
12014 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12015
12016 tree
12017 build_optimization_node (struct gcc_options *opts)
12018 {
12019 tree t;
12020
12021 /* Use the cache of optimization nodes. */
12022
12023 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12024 opts);
12025
12026 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12027 t = *slot;
12028 if (!t)
12029 {
12030 /* Insert this one into the hash table. */
12031 t = cl_optimization_node;
12032 *slot = t;
12033
12034 /* Make a new node for next time round. */
12035 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12036 }
12037
12038 return t;
12039 }
12040
12041 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12042
12043 tree
12044 build_target_option_node (struct gcc_options *opts)
12045 {
12046 tree t;
12047
12048 /* Use the cache of optimization nodes. */
12049
12050 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12051 opts);
12052
12053 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12054 t = *slot;
12055 if (!t)
12056 {
12057 /* Insert this one into the hash table. */
12058 t = cl_target_option_node;
12059 *slot = t;
12060
12061 /* Make a new node for next time round. */
12062 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12063 }
12064
12065 return t;
12066 }
12067
12068 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12069 so that they aren't saved during PCH writing. */
12070
12071 void
12072 prepare_target_option_nodes_for_pch (void)
12073 {
12074 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12075 for (; iter != cl_option_hash_table->end (); ++iter)
12076 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12077 TREE_TARGET_GLOBALS (*iter) = NULL;
12078 }
12079
12080 /* Determine the "ultimate origin" of a block. The block may be an inlined
12081 instance of an inlined instance of a block which is local to an inline
12082 function, so we have to trace all of the way back through the origin chain
12083 to find out what sort of node actually served as the original seed for the
12084 given block. */
12085
12086 tree
12087 block_ultimate_origin (const_tree block)
12088 {
12089 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
12090
12091 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12092 we're trying to output the abstract instance of this function. */
12093 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12094 return NULL_TREE;
12095
12096 if (immediate_origin == NULL_TREE)
12097 return NULL_TREE;
12098 else
12099 {
12100 tree ret_val;
12101 tree lookahead = immediate_origin;
12102
12103 do
12104 {
12105 ret_val = lookahead;
12106 lookahead = (TREE_CODE (ret_val) == BLOCK
12107 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12108 }
12109 while (lookahead != NULL && lookahead != ret_val);
12110
12111 /* The block's abstract origin chain may not be the *ultimate* origin of
12112 the block. It could lead to a DECL that has an abstract origin set.
12113 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12114 will give us if it has one). Note that DECL's abstract origins are
12115 supposed to be the most distant ancestor (or so decl_ultimate_origin
12116 claims), so we don't need to loop following the DECL origins. */
12117 if (DECL_P (ret_val))
12118 return DECL_ORIGIN (ret_val);
12119
12120 return ret_val;
12121 }
12122 }
12123
12124 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12125 no instruction. */
12126
12127 bool
12128 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12129 {
12130 /* Use precision rather then machine mode when we can, which gives
12131 the correct answer even for submode (bit-field) types. */
12132 if ((INTEGRAL_TYPE_P (outer_type)
12133 || POINTER_TYPE_P (outer_type)
12134 || TREE_CODE (outer_type) == OFFSET_TYPE)
12135 && (INTEGRAL_TYPE_P (inner_type)
12136 || POINTER_TYPE_P (inner_type)
12137 || TREE_CODE (inner_type) == OFFSET_TYPE))
12138 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12139
12140 /* Otherwise fall back on comparing machine modes (e.g. for
12141 aggregate types, floats). */
12142 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12143 }
12144
12145 /* Return true iff conversion in EXP generates no instruction. Mark
12146 it inline so that we fully inline into the stripping functions even
12147 though we have two uses of this function. */
12148
12149 static inline bool
12150 tree_nop_conversion (const_tree exp)
12151 {
12152 tree outer_type, inner_type;
12153
12154 if (!CONVERT_EXPR_P (exp)
12155 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12156 return false;
12157 if (TREE_OPERAND (exp, 0) == error_mark_node)
12158 return false;
12159
12160 outer_type = TREE_TYPE (exp);
12161 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12162
12163 if (!inner_type)
12164 return false;
12165
12166 return tree_nop_conversion_p (outer_type, inner_type);
12167 }
12168
12169 /* Return true iff conversion in EXP generates no instruction. Don't
12170 consider conversions changing the signedness. */
12171
12172 static bool
12173 tree_sign_nop_conversion (const_tree exp)
12174 {
12175 tree outer_type, inner_type;
12176
12177 if (!tree_nop_conversion (exp))
12178 return false;
12179
12180 outer_type = TREE_TYPE (exp);
12181 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12182
12183 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12184 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12185 }
12186
12187 /* Strip conversions from EXP according to tree_nop_conversion and
12188 return the resulting expression. */
12189
12190 tree
12191 tree_strip_nop_conversions (tree exp)
12192 {
12193 while (tree_nop_conversion (exp))
12194 exp = TREE_OPERAND (exp, 0);
12195 return exp;
12196 }
12197
12198 /* Strip conversions from EXP according to tree_sign_nop_conversion
12199 and return the resulting expression. */
12200
12201 tree
12202 tree_strip_sign_nop_conversions (tree exp)
12203 {
12204 while (tree_sign_nop_conversion (exp))
12205 exp = TREE_OPERAND (exp, 0);
12206 return exp;
12207 }
12208
12209 /* Avoid any floating point extensions from EXP. */
12210 tree
12211 strip_float_extensions (tree exp)
12212 {
12213 tree sub, expt, subt;
12214
12215 /* For floating point constant look up the narrowest type that can hold
12216 it properly and handle it like (type)(narrowest_type)constant.
12217 This way we can optimize for instance a=a*2.0 where "a" is float
12218 but 2.0 is double constant. */
12219 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12220 {
12221 REAL_VALUE_TYPE orig;
12222 tree type = NULL;
12223
12224 orig = TREE_REAL_CST (exp);
12225 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12226 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12227 type = float_type_node;
12228 else if (TYPE_PRECISION (TREE_TYPE (exp))
12229 > TYPE_PRECISION (double_type_node)
12230 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12231 type = double_type_node;
12232 if (type)
12233 return build_real_truncate (type, orig);
12234 }
12235
12236 if (!CONVERT_EXPR_P (exp))
12237 return exp;
12238
12239 sub = TREE_OPERAND (exp, 0);
12240 subt = TREE_TYPE (sub);
12241 expt = TREE_TYPE (exp);
12242
12243 if (!FLOAT_TYPE_P (subt))
12244 return exp;
12245
12246 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12247 return exp;
12248
12249 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12250 return exp;
12251
12252 return strip_float_extensions (sub);
12253 }
12254
12255 /* Strip out all handled components that produce invariant
12256 offsets. */
12257
12258 const_tree
12259 strip_invariant_refs (const_tree op)
12260 {
12261 while (handled_component_p (op))
12262 {
12263 switch (TREE_CODE (op))
12264 {
12265 case ARRAY_REF:
12266 case ARRAY_RANGE_REF:
12267 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12268 || TREE_OPERAND (op, 2) != NULL_TREE
12269 || TREE_OPERAND (op, 3) != NULL_TREE)
12270 return NULL;
12271 break;
12272
12273 case COMPONENT_REF:
12274 if (TREE_OPERAND (op, 2) != NULL_TREE)
12275 return NULL;
12276 break;
12277
12278 default:;
12279 }
12280 op = TREE_OPERAND (op, 0);
12281 }
12282
12283 return op;
12284 }
12285
12286 static GTY(()) tree gcc_eh_personality_decl;
12287
12288 /* Return the GCC personality function decl. */
12289
12290 tree
12291 lhd_gcc_personality (void)
12292 {
12293 if (!gcc_eh_personality_decl)
12294 gcc_eh_personality_decl = build_personality_function ("gcc");
12295 return gcc_eh_personality_decl;
12296 }
12297
12298 /* TARGET is a call target of GIMPLE call statement
12299 (obtained by gimple_call_fn). Return true if it is
12300 OBJ_TYPE_REF representing an virtual call of C++ method.
12301 (As opposed to OBJ_TYPE_REF representing objc calls
12302 through a cast where middle-end devirtualization machinery
12303 can't apply.) */
12304
12305 bool
12306 virtual_method_call_p (const_tree target)
12307 {
12308 if (TREE_CODE (target) != OBJ_TYPE_REF)
12309 return false;
12310 tree t = TREE_TYPE (target);
12311 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12312 t = TREE_TYPE (t);
12313 if (TREE_CODE (t) == FUNCTION_TYPE)
12314 return false;
12315 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12316 /* If we do not have BINFO associated, it means that type was built
12317 without devirtualization enabled. Do not consider this a virtual
12318 call. */
12319 if (!TYPE_BINFO (obj_type_ref_class (target)))
12320 return false;
12321 return true;
12322 }
12323
12324 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12325
12326 tree
12327 obj_type_ref_class (const_tree ref)
12328 {
12329 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12330 ref = TREE_TYPE (ref);
12331 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12332 ref = TREE_TYPE (ref);
12333 /* We look for type THIS points to. ObjC also builds
12334 OBJ_TYPE_REF with non-method calls, Their first parameter
12335 ID however also corresponds to class type. */
12336 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12337 || TREE_CODE (ref) == FUNCTION_TYPE);
12338 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12339 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12340 return TREE_TYPE (ref);
12341 }
12342
12343 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12344
12345 static tree
12346 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12347 {
12348 unsigned int i;
12349 tree base_binfo, b;
12350
12351 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12352 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12353 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12354 return base_binfo;
12355 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12356 return b;
12357 return NULL;
12358 }
12359
12360 /* Try to find a base info of BINFO that would have its field decl at offset
12361 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12362 found, return, otherwise return NULL_TREE. */
12363
12364 tree
12365 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12366 {
12367 tree type = BINFO_TYPE (binfo);
12368
12369 while (true)
12370 {
12371 HOST_WIDE_INT pos, size;
12372 tree fld;
12373 int i;
12374
12375 if (types_same_for_odr (type, expected_type))
12376 return binfo;
12377 if (offset < 0)
12378 return NULL_TREE;
12379
12380 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12381 {
12382 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12383 continue;
12384
12385 pos = int_bit_position (fld);
12386 size = tree_to_uhwi (DECL_SIZE (fld));
12387 if (pos <= offset && (pos + size) > offset)
12388 break;
12389 }
12390 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12391 return NULL_TREE;
12392
12393 /* Offset 0 indicates the primary base, whose vtable contents are
12394 represented in the binfo for the derived class. */
12395 else if (offset != 0)
12396 {
12397 tree found_binfo = NULL, base_binfo;
12398 /* Offsets in BINFO are in bytes relative to the whole structure
12399 while POS is in bits relative to the containing field. */
12400 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12401 / BITS_PER_UNIT);
12402
12403 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12404 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12405 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12406 {
12407 found_binfo = base_binfo;
12408 break;
12409 }
12410 if (found_binfo)
12411 binfo = found_binfo;
12412 else
12413 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12414 binfo_offset);
12415 }
12416
12417 type = TREE_TYPE (fld);
12418 offset -= pos;
12419 }
12420 }
12421
12422 /* Returns true if X is a typedef decl. */
12423
12424 bool
12425 is_typedef_decl (const_tree x)
12426 {
12427 return (x && TREE_CODE (x) == TYPE_DECL
12428 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12429 }
12430
12431 /* Returns true iff TYPE is a type variant created for a typedef. */
12432
12433 bool
12434 typedef_variant_p (const_tree type)
12435 {
12436 return is_typedef_decl (TYPE_NAME (type));
12437 }
12438
12439 /* Warn about a use of an identifier which was marked deprecated. */
12440 void
12441 warn_deprecated_use (tree node, tree attr)
12442 {
12443 const char *msg;
12444
12445 if (node == 0 || !warn_deprecated_decl)
12446 return;
12447
12448 if (!attr)
12449 {
12450 if (DECL_P (node))
12451 attr = DECL_ATTRIBUTES (node);
12452 else if (TYPE_P (node))
12453 {
12454 tree decl = TYPE_STUB_DECL (node);
12455 if (decl)
12456 attr = lookup_attribute ("deprecated",
12457 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12458 }
12459 }
12460
12461 if (attr)
12462 attr = lookup_attribute ("deprecated", attr);
12463
12464 if (attr)
12465 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12466 else
12467 msg = NULL;
12468
12469 bool w;
12470 if (DECL_P (node))
12471 {
12472 if (msg)
12473 w = warning (OPT_Wdeprecated_declarations,
12474 "%qD is deprecated: %s", node, msg);
12475 else
12476 w = warning (OPT_Wdeprecated_declarations,
12477 "%qD is deprecated", node);
12478 if (w)
12479 inform (DECL_SOURCE_LOCATION (node), "declared here");
12480 }
12481 else if (TYPE_P (node))
12482 {
12483 tree what = NULL_TREE;
12484 tree decl = TYPE_STUB_DECL (node);
12485
12486 if (TYPE_NAME (node))
12487 {
12488 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12489 what = TYPE_NAME (node);
12490 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12491 && DECL_NAME (TYPE_NAME (node)))
12492 what = DECL_NAME (TYPE_NAME (node));
12493 }
12494
12495 if (decl)
12496 {
12497 if (what)
12498 {
12499 if (msg)
12500 w = warning (OPT_Wdeprecated_declarations,
12501 "%qE is deprecated: %s", what, msg);
12502 else
12503 w = warning (OPT_Wdeprecated_declarations,
12504 "%qE is deprecated", what);
12505 }
12506 else
12507 {
12508 if (msg)
12509 w = warning (OPT_Wdeprecated_declarations,
12510 "type is deprecated: %s", msg);
12511 else
12512 w = warning (OPT_Wdeprecated_declarations,
12513 "type is deprecated");
12514 }
12515 if (w)
12516 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12517 }
12518 else
12519 {
12520 if (what)
12521 {
12522 if (msg)
12523 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12524 what, msg);
12525 else
12526 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12527 }
12528 else
12529 {
12530 if (msg)
12531 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12532 msg);
12533 else
12534 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12535 }
12536 }
12537 }
12538 }
12539
12540 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12541 somewhere in it. */
12542
12543 bool
12544 contains_bitfld_component_ref_p (const_tree ref)
12545 {
12546 while (handled_component_p (ref))
12547 {
12548 if (TREE_CODE (ref) == COMPONENT_REF
12549 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12550 return true;
12551 ref = TREE_OPERAND (ref, 0);
12552 }
12553
12554 return false;
12555 }
12556
12557 /* Try to determine whether a TRY_CATCH expression can fall through.
12558 This is a subroutine of block_may_fallthru. */
12559
12560 static bool
12561 try_catch_may_fallthru (const_tree stmt)
12562 {
12563 tree_stmt_iterator i;
12564
12565 /* If the TRY block can fall through, the whole TRY_CATCH can
12566 fall through. */
12567 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12568 return true;
12569
12570 i = tsi_start (TREE_OPERAND (stmt, 1));
12571 switch (TREE_CODE (tsi_stmt (i)))
12572 {
12573 case CATCH_EXPR:
12574 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12575 catch expression and a body. The whole TRY_CATCH may fall
12576 through iff any of the catch bodies falls through. */
12577 for (; !tsi_end_p (i); tsi_next (&i))
12578 {
12579 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12580 return true;
12581 }
12582 return false;
12583
12584 case EH_FILTER_EXPR:
12585 /* The exception filter expression only matters if there is an
12586 exception. If the exception does not match EH_FILTER_TYPES,
12587 we will execute EH_FILTER_FAILURE, and we will fall through
12588 if that falls through. If the exception does match
12589 EH_FILTER_TYPES, the stack unwinder will continue up the
12590 stack, so we will not fall through. We don't know whether we
12591 will throw an exception which matches EH_FILTER_TYPES or not,
12592 so we just ignore EH_FILTER_TYPES and assume that we might
12593 throw an exception which doesn't match. */
12594 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12595
12596 default:
12597 /* This case represents statements to be executed when an
12598 exception occurs. Those statements are implicitly followed
12599 by a RESX statement to resume execution after the exception.
12600 So in this case the TRY_CATCH never falls through. */
12601 return false;
12602 }
12603 }
12604
12605 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12606 need not be 100% accurate; simply be conservative and return true if we
12607 don't know. This is used only to avoid stupidly generating extra code.
12608 If we're wrong, we'll just delete the extra code later. */
12609
12610 bool
12611 block_may_fallthru (const_tree block)
12612 {
12613 /* This CONST_CAST is okay because expr_last returns its argument
12614 unmodified and we assign it to a const_tree. */
12615 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12616
12617 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12618 {
12619 case GOTO_EXPR:
12620 case RETURN_EXPR:
12621 /* Easy cases. If the last statement of the block implies
12622 control transfer, then we can't fall through. */
12623 return false;
12624
12625 case SWITCH_EXPR:
12626 /* If SWITCH_LABELS is set, this is lowered, and represents a
12627 branch to a selected label and hence can not fall through.
12628 Otherwise SWITCH_BODY is set, and the switch can fall
12629 through. */
12630 return SWITCH_LABELS (stmt) == NULL_TREE;
12631
12632 case COND_EXPR:
12633 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12634 return true;
12635 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12636
12637 case BIND_EXPR:
12638 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12639
12640 case TRY_CATCH_EXPR:
12641 return try_catch_may_fallthru (stmt);
12642
12643 case TRY_FINALLY_EXPR:
12644 /* The finally clause is always executed after the try clause,
12645 so if it does not fall through, then the try-finally will not
12646 fall through. Otherwise, if the try clause does not fall
12647 through, then when the finally clause falls through it will
12648 resume execution wherever the try clause was going. So the
12649 whole try-finally will only fall through if both the try
12650 clause and the finally clause fall through. */
12651 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12652 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12653
12654 case MODIFY_EXPR:
12655 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12656 stmt = TREE_OPERAND (stmt, 1);
12657 else
12658 return true;
12659 /* FALLTHRU */
12660
12661 case CALL_EXPR:
12662 /* Functions that do not return do not fall through. */
12663 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12664
12665 case CLEANUP_POINT_EXPR:
12666 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12667
12668 case TARGET_EXPR:
12669 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12670
12671 case ERROR_MARK:
12672 return true;
12673
12674 default:
12675 return lang_hooks.block_may_fallthru (stmt);
12676 }
12677 }
12678
12679 /* True if we are using EH to handle cleanups. */
12680 static bool using_eh_for_cleanups_flag = false;
12681
12682 /* This routine is called from front ends to indicate eh should be used for
12683 cleanups. */
12684 void
12685 using_eh_for_cleanups (void)
12686 {
12687 using_eh_for_cleanups_flag = true;
12688 }
12689
12690 /* Query whether EH is used for cleanups. */
12691 bool
12692 using_eh_for_cleanups_p (void)
12693 {
12694 return using_eh_for_cleanups_flag;
12695 }
12696
12697 /* Wrapper for tree_code_name to ensure that tree code is valid */
12698 const char *
12699 get_tree_code_name (enum tree_code code)
12700 {
12701 const char *invalid = "<invalid tree code>";
12702
12703 if (code >= MAX_TREE_CODES)
12704 return invalid;
12705
12706 return tree_code_name[code];
12707 }
12708
12709 /* Drops the TREE_OVERFLOW flag from T. */
12710
12711 tree
12712 drop_tree_overflow (tree t)
12713 {
12714 gcc_checking_assert (TREE_OVERFLOW (t));
12715
12716 /* For tree codes with a sharing machinery re-build the result. */
12717 if (TREE_CODE (t) == INTEGER_CST)
12718 return wide_int_to_tree (TREE_TYPE (t), t);
12719
12720 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12721 and drop the flag. */
12722 t = copy_node (t);
12723 TREE_OVERFLOW (t) = 0;
12724 return t;
12725 }
12726
12727 /* Given a memory reference expression T, return its base address.
12728 The base address of a memory reference expression is the main
12729 object being referenced. For instance, the base address for
12730 'array[i].fld[j]' is 'array'. You can think of this as stripping
12731 away the offset part from a memory address.
12732
12733 This function calls handled_component_p to strip away all the inner
12734 parts of the memory reference until it reaches the base object. */
12735
12736 tree
12737 get_base_address (tree t)
12738 {
12739 while (handled_component_p (t))
12740 t = TREE_OPERAND (t, 0);
12741
12742 if ((TREE_CODE (t) == MEM_REF
12743 || TREE_CODE (t) == TARGET_MEM_REF)
12744 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12745 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12746
12747 /* ??? Either the alias oracle or all callers need to properly deal
12748 with WITH_SIZE_EXPRs before we can look through those. */
12749 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12750 return NULL_TREE;
12751
12752 return t;
12753 }
12754
12755 /* Return a tree of sizetype representing the size, in bytes, of the element
12756 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12757
12758 tree
12759 array_ref_element_size (tree exp)
12760 {
12761 tree aligned_size = TREE_OPERAND (exp, 3);
12762 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12763 location_t loc = EXPR_LOCATION (exp);
12764
12765 /* If a size was specified in the ARRAY_REF, it's the size measured
12766 in alignment units of the element type. So multiply by that value. */
12767 if (aligned_size)
12768 {
12769 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12770 sizetype from another type of the same width and signedness. */
12771 if (TREE_TYPE (aligned_size) != sizetype)
12772 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12773 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12774 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12775 }
12776
12777 /* Otherwise, take the size from that of the element type. Substitute
12778 any PLACEHOLDER_EXPR that we have. */
12779 else
12780 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12781 }
12782
12783 /* Return a tree representing the lower bound of the array mentioned in
12784 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12785
12786 tree
12787 array_ref_low_bound (tree exp)
12788 {
12789 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12790
12791 /* If a lower bound is specified in EXP, use it. */
12792 if (TREE_OPERAND (exp, 2))
12793 return TREE_OPERAND (exp, 2);
12794
12795 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12796 substituting for a PLACEHOLDER_EXPR as needed. */
12797 if (domain_type && TYPE_MIN_VALUE (domain_type))
12798 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12799
12800 /* Otherwise, return a zero of the appropriate type. */
12801 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12802 }
12803
12804 /* Return a tree representing the upper bound of the array mentioned in
12805 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12806
12807 tree
12808 array_ref_up_bound (tree exp)
12809 {
12810 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12811
12812 /* If there is a domain type and it has an upper bound, use it, substituting
12813 for a PLACEHOLDER_EXPR as needed. */
12814 if (domain_type && TYPE_MAX_VALUE (domain_type))
12815 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12816
12817 /* Otherwise fail. */
12818 return NULL_TREE;
12819 }
12820
12821 /* Returns true if REF is an array reference to an array at the end of
12822 a structure. If this is the case, the array may be allocated larger
12823 than its upper bound implies. */
12824
12825 bool
12826 array_at_struct_end_p (tree ref)
12827 {
12828 if (TREE_CODE (ref) != ARRAY_REF
12829 && TREE_CODE (ref) != ARRAY_RANGE_REF)
12830 return false;
12831
12832 while (handled_component_p (ref))
12833 {
12834 /* If the reference chain contains a component reference to a
12835 non-union type and there follows another field the reference
12836 is not at the end of a structure. */
12837 if (TREE_CODE (ref) == COMPONENT_REF
12838 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12839 {
12840 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12841 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12842 nextf = DECL_CHAIN (nextf);
12843 if (nextf)
12844 return false;
12845 }
12846
12847 ref = TREE_OPERAND (ref, 0);
12848 }
12849
12850 /* If the reference is based on a declared entity, the size of the array
12851 is constrained by its given domain. */
12852 if (DECL_P (ref))
12853 return false;
12854
12855 return true;
12856 }
12857
12858 /* Return a tree representing the offset, in bytes, of the field referenced
12859 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12860
12861 tree
12862 component_ref_field_offset (tree exp)
12863 {
12864 tree aligned_offset = TREE_OPERAND (exp, 2);
12865 tree field = TREE_OPERAND (exp, 1);
12866 location_t loc = EXPR_LOCATION (exp);
12867
12868 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12869 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12870 value. */
12871 if (aligned_offset)
12872 {
12873 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12874 sizetype from another type of the same width and signedness. */
12875 if (TREE_TYPE (aligned_offset) != sizetype)
12876 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12877 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12878 size_int (DECL_OFFSET_ALIGN (field)
12879 / BITS_PER_UNIT));
12880 }
12881
12882 /* Otherwise, take the offset from that of the field. Substitute
12883 any PLACEHOLDER_EXPR that we have. */
12884 else
12885 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12886 }
12887
12888 /* Return the machine mode of T. For vectors, returns the mode of the
12889 inner type. The main use case is to feed the result to HONOR_NANS,
12890 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12891
12892 machine_mode
12893 element_mode (const_tree t)
12894 {
12895 if (!TYPE_P (t))
12896 t = TREE_TYPE (t);
12897 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12898 t = TREE_TYPE (t);
12899 return TYPE_MODE (t);
12900 }
12901
12902
12903 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12904 TV. TV should be the more specified variant (i.e. the main variant). */
12905
12906 static bool
12907 verify_type_variant (const_tree t, tree tv)
12908 {
12909 /* Type variant can differ by:
12910
12911 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12912 ENCODE_QUAL_ADDR_SPACE.
12913 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12914 in this case some values may not be set in the variant types
12915 (see TYPE_COMPLETE_P checks).
12916 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12917 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12918 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12919 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12920 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12921 this is necessary to make it possible to merge types form different TUs
12922 - arrays, pointers and references may have TREE_TYPE that is a variant
12923 of TREE_TYPE of their main variants.
12924 - aggregates may have new TYPE_FIELDS list that list variants of
12925 the main variant TYPE_FIELDS.
12926 - vector types may differ by TYPE_VECTOR_OPAQUE
12927 - TYPE_METHODS is always NULL for vairant types and maintained for
12928 main variant only.
12929 */
12930
12931 /* Convenience macro for matching individual fields. */
12932 #define verify_variant_match(flag) \
12933 do { \
12934 if (flag (tv) != flag (t)) \
12935 { \
12936 error ("type variant differs by " #flag "."); \
12937 debug_tree (tv); \
12938 return false; \
12939 } \
12940 } while (false)
12941
12942 /* tree_base checks. */
12943
12944 verify_variant_match (TREE_CODE);
12945 /* FIXME: Ada builds non-artificial variants of artificial types. */
12946 if (TYPE_ARTIFICIAL (tv) && 0)
12947 verify_variant_match (TYPE_ARTIFICIAL);
12948 if (POINTER_TYPE_P (tv))
12949 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12950 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12951 verify_variant_match (TYPE_UNSIGNED);
12952 verify_variant_match (TYPE_ALIGN_OK);
12953 verify_variant_match (TYPE_PACKED);
12954 if (TREE_CODE (t) == REFERENCE_TYPE)
12955 verify_variant_match (TYPE_REF_IS_RVALUE);
12956 verify_variant_match (TYPE_SATURATING);
12957 /* FIXME: This check trigger during libstdc++ build. */
12958 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12959 verify_variant_match (TYPE_FINAL_P);
12960
12961 /* tree_type_common checks. */
12962
12963 if (COMPLETE_TYPE_P (t))
12964 {
12965 verify_variant_match (TYPE_SIZE);
12966 verify_variant_match (TYPE_MODE);
12967 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
12968 /* FIXME: ideally we should compare pointer equality, but java FE
12969 produce variants where size is INTEGER_CST of different type (int
12970 wrt size_type) during libjava biuld. */
12971 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
12972 {
12973 error ("type variant has different TYPE_SIZE_UNIT");
12974 debug_tree (tv);
12975 error ("type variant's TYPE_SIZE_UNIT");
12976 debug_tree (TYPE_SIZE_UNIT (tv));
12977 error ("type's TYPE_SIZE_UNIT");
12978 debug_tree (TYPE_SIZE_UNIT (t));
12979 return false;
12980 }
12981 }
12982 verify_variant_match (TYPE_PRECISION);
12983 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12984 if (RECORD_OR_UNION_TYPE_P (t))
12985 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12986 else if (TREE_CODE (t) == ARRAY_TYPE)
12987 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12988 /* During LTO we merge variant lists from diferent translation units
12989 that may differ BY TYPE_CONTEXT that in turn may point
12990 to TRANSLATION_UNIT_DECL.
12991 Ada also builds variants of types with different TYPE_CONTEXT. */
12992 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12993 verify_variant_match (TYPE_CONTEXT);
12994 verify_variant_match (TYPE_STRING_FLAG);
12995 if (TYPE_ALIAS_SET_KNOWN_P (t) && TYPE_ALIAS_SET_KNOWN_P (tv))
12996 verify_variant_match (TYPE_ALIAS_SET);
12997
12998 /* tree_type_non_common checks. */
12999
13000 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13001 and dangle the pointer from time to time. */
13002 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13003 && (in_lto_p || !TYPE_VFIELD (tv)
13004 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13005 {
13006 error ("type variant has different TYPE_VFIELD");
13007 debug_tree (tv);
13008 return false;
13009 }
13010 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13011 || TREE_CODE (t) == INTEGER_TYPE
13012 || TREE_CODE (t) == BOOLEAN_TYPE
13013 || TREE_CODE (t) == REAL_TYPE
13014 || TREE_CODE (t) == FIXED_POINT_TYPE)
13015 {
13016 verify_variant_match (TYPE_MAX_VALUE);
13017 verify_variant_match (TYPE_MIN_VALUE);
13018 }
13019 if (TREE_CODE (t) == METHOD_TYPE)
13020 verify_variant_match (TYPE_METHOD_BASETYPE);
13021 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
13022 {
13023 error ("type variant has TYPE_METHODS");
13024 debug_tree (tv);
13025 return false;
13026 }
13027 if (TREE_CODE (t) == OFFSET_TYPE)
13028 verify_variant_match (TYPE_OFFSET_BASETYPE);
13029 if (TREE_CODE (t) == ARRAY_TYPE)
13030 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13031 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13032 or even type's main variant. This is needed to make bootstrap pass
13033 and the bug seems new in GCC 5.
13034 C++ FE should be updated to make this consistent and we should check
13035 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13036 is a match with main variant.
13037
13038 Also disable the check for Java for now because of parser hack that builds
13039 first an dummy BINFO and then sometimes replace it by real BINFO in some
13040 of the copies. */
13041 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13042 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13043 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13044 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13045 at LTO time only. */
13046 && (in_lto_p && odr_type_p (t)))
13047 {
13048 error ("type variant has different TYPE_BINFO");
13049 debug_tree (tv);
13050 error ("type variant's TYPE_BINFO");
13051 debug_tree (TYPE_BINFO (tv));
13052 error ("type's TYPE_BINFO");
13053 debug_tree (TYPE_BINFO (t));
13054 return false;
13055 }
13056
13057 /* Check various uses of TYPE_VALUES_RAW. */
13058 if (TREE_CODE (t) == ENUMERAL_TYPE)
13059 verify_variant_match (TYPE_VALUES);
13060 else if (TREE_CODE (t) == ARRAY_TYPE)
13061 verify_variant_match (TYPE_DOMAIN);
13062 /* Permit incomplete variants of complete type. While FEs may complete
13063 all variants, this does not happen for C++ templates in all cases. */
13064 else if (RECORD_OR_UNION_TYPE_P (t)
13065 && COMPLETE_TYPE_P (t)
13066 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13067 {
13068 tree f1, f2;
13069
13070 /* Fortran builds qualified variants as new records with items of
13071 qualified type. Verify that they looks same. */
13072 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13073 f1 && f2;
13074 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13075 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13076 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13077 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13078 /* FIXME: gfc_nonrestricted_type builds all types as variants
13079 with exception of pointer types. It deeply copies the type
13080 which means that we may end up with a variant type
13081 referring non-variant pointer. We may change it to
13082 produce types as variants, too, like
13083 objc_get_protocol_qualified_type does. */
13084 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13085 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13086 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13087 break;
13088 if (f1 || f2)
13089 {
13090 error ("type variant has different TYPE_FIELDS");
13091 debug_tree (tv);
13092 error ("first mismatch is field");
13093 debug_tree (f1);
13094 error ("and field");
13095 debug_tree (f2);
13096 return false;
13097 }
13098 }
13099 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13100 verify_variant_match (TYPE_ARG_TYPES);
13101 /* For C++ the qualified variant of array type is really an array type
13102 of qualified TREE_TYPE.
13103 objc builds variants of pointer where pointer to type is a variant, too
13104 in objc_get_protocol_qualified_type. */
13105 if (TREE_TYPE (t) != TREE_TYPE (tv)
13106 && ((TREE_CODE (t) != ARRAY_TYPE
13107 && !POINTER_TYPE_P (t))
13108 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13109 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13110 {
13111 error ("type variant has different TREE_TYPE");
13112 debug_tree (tv);
13113 error ("type variant's TREE_TYPE");
13114 debug_tree (TREE_TYPE (tv));
13115 error ("type's TREE_TYPE");
13116 debug_tree (TREE_TYPE (t));
13117 return false;
13118 }
13119 if (type_with_alias_set_p (t)
13120 && !gimple_canonical_types_compatible_p (t, tv, false))
13121 {
13122 error ("type is not compatible with its vairant");
13123 debug_tree (tv);
13124 error ("type variant's TREE_TYPE");
13125 debug_tree (TREE_TYPE (tv));
13126 error ("type's TREE_TYPE");
13127 debug_tree (TREE_TYPE (t));
13128 return false;
13129 }
13130 return true;
13131 #undef verify_variant_match
13132 }
13133
13134
13135 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13136 the middle-end types_compatible_p function. It needs to avoid
13137 claiming types are different for types that should be treated
13138 the same with respect to TBAA. Canonical types are also used
13139 for IL consistency checks via the useless_type_conversion_p
13140 predicate which does not handle all type kinds itself but falls
13141 back to pointer-comparison of TYPE_CANONICAL for aggregates
13142 for example. */
13143
13144 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13145 type calculation because we need to allow inter-operability between signed
13146 and unsigned variants. */
13147
13148 bool
13149 type_with_interoperable_signedness (const_tree type)
13150 {
13151 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13152 signed char and unsigned char. Similarly fortran FE builds
13153 C_SIZE_T as signed type, while C defines it unsigned. */
13154
13155 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13156 == INTEGER_TYPE
13157 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13158 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13159 }
13160
13161 /* Return true iff T1 and T2 are structurally identical for what
13162 TBAA is concerned.
13163 This function is used both by lto.c canonical type merging and by the
13164 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13165 that have TYPE_CANONICAL defined and assume them equivalent. */
13166
13167 bool
13168 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13169 bool trust_type_canonical)
13170 {
13171 /* Type variants should be same as the main variant. When not doing sanity
13172 checking to verify this fact, go to main variants and save some work. */
13173 if (trust_type_canonical)
13174 {
13175 t1 = TYPE_MAIN_VARIANT (t1);
13176 t2 = TYPE_MAIN_VARIANT (t2);
13177 }
13178
13179 /* Check first for the obvious case of pointer identity. */
13180 if (t1 == t2)
13181 return true;
13182
13183 /* Check that we have two types to compare. */
13184 if (t1 == NULL_TREE || t2 == NULL_TREE)
13185 return false;
13186
13187 /* We consider complete types always compatible with incomplete type.
13188 This does not make sense for canonical type calculation and thus we
13189 need to ensure that we are never called on it.
13190
13191 FIXME: For more correctness the function probably should have three modes
13192 1) mode assuming that types are complete mathcing their structure
13193 2) mode allowing incomplete types but producing equivalence classes
13194 and thus ignoring all info from complete types
13195 3) mode allowing incomplete types to match complete but checking
13196 compatibility between complete types.
13197
13198 1 and 2 can be used for canonical type calculation. 3 is the real
13199 definition of type compatibility that can be used i.e. for warnings during
13200 declaration merging. */
13201
13202 gcc_assert (!trust_type_canonical
13203 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13204 /* If the types have been previously registered and found equal
13205 they still are. */
13206 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13207 && trust_type_canonical)
13208 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13209
13210 /* Can't be the same type if the types don't have the same code. */
13211 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13212 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13213 return false;
13214
13215 /* Qualifiers do not matter for canonical type comparison purposes. */
13216
13217 /* Void types and nullptr types are always the same. */
13218 if (TREE_CODE (t1) == VOID_TYPE
13219 || TREE_CODE (t1) == NULLPTR_TYPE)
13220 return true;
13221
13222 /* Can't be the same type if they have different mode. */
13223 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13224 return false;
13225
13226 /* Non-aggregate types can be handled cheaply. */
13227 if (INTEGRAL_TYPE_P (t1)
13228 || SCALAR_FLOAT_TYPE_P (t1)
13229 || FIXED_POINT_TYPE_P (t1)
13230 || TREE_CODE (t1) == VECTOR_TYPE
13231 || TREE_CODE (t1) == COMPLEX_TYPE
13232 || TREE_CODE (t1) == OFFSET_TYPE
13233 || POINTER_TYPE_P (t1))
13234 {
13235 /* Can't be the same type if they have different recision. */
13236 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13237 return false;
13238
13239 /* In some cases the signed and unsigned types are required to be
13240 inter-operable. */
13241 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13242 && !type_with_interoperable_signedness (t1))
13243 return false;
13244
13245 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13246 interoperable with "signed char". Unless all frontends are revisited
13247 to agree on these types, we must ignore the flag completely. */
13248
13249 /* Fortran standard define C_PTR type that is compatible with every
13250 C pointer. For this reason we need to glob all pointers into one.
13251 Still pointers in different address spaces are not compatible. */
13252 if (POINTER_TYPE_P (t1))
13253 {
13254 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13255 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13256 return false;
13257 }
13258
13259 /* Tail-recurse to components. */
13260 if (TREE_CODE (t1) == VECTOR_TYPE
13261 || TREE_CODE (t1) == COMPLEX_TYPE)
13262 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13263 TREE_TYPE (t2),
13264 trust_type_canonical);
13265
13266 return true;
13267 }
13268
13269 /* Do type-specific comparisons. */
13270 switch (TREE_CODE (t1))
13271 {
13272 case ARRAY_TYPE:
13273 /* Array types are the same if the element types are the same and
13274 the number of elements are the same. */
13275 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13276 trust_type_canonical)
13277 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13278 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13279 return false;
13280 else
13281 {
13282 tree i1 = TYPE_DOMAIN (t1);
13283 tree i2 = TYPE_DOMAIN (t2);
13284
13285 /* For an incomplete external array, the type domain can be
13286 NULL_TREE. Check this condition also. */
13287 if (i1 == NULL_TREE && i2 == NULL_TREE)
13288 return true;
13289 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13290 return false;
13291 else
13292 {
13293 tree min1 = TYPE_MIN_VALUE (i1);
13294 tree min2 = TYPE_MIN_VALUE (i2);
13295 tree max1 = TYPE_MAX_VALUE (i1);
13296 tree max2 = TYPE_MAX_VALUE (i2);
13297
13298 /* The minimum/maximum values have to be the same. */
13299 if ((min1 == min2
13300 || (min1 && min2
13301 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13302 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13303 || operand_equal_p (min1, min2, 0))))
13304 && (max1 == max2
13305 || (max1 && max2
13306 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13307 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13308 || operand_equal_p (max1, max2, 0)))))
13309 return true;
13310 else
13311 return false;
13312 }
13313 }
13314
13315 case METHOD_TYPE:
13316 case FUNCTION_TYPE:
13317 /* Function types are the same if the return type and arguments types
13318 are the same. */
13319 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13320 trust_type_canonical))
13321 return false;
13322
13323 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13324 return true;
13325 else
13326 {
13327 tree parms1, parms2;
13328
13329 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13330 parms1 && parms2;
13331 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13332 {
13333 if (!gimple_canonical_types_compatible_p
13334 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13335 trust_type_canonical))
13336 return false;
13337 }
13338
13339 if (parms1 || parms2)
13340 return false;
13341
13342 return true;
13343 }
13344
13345 case RECORD_TYPE:
13346 case UNION_TYPE:
13347 case QUAL_UNION_TYPE:
13348 {
13349 tree f1, f2;
13350
13351 /* For aggregate types, all the fields must be the same. */
13352 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13353 f1 || f2;
13354 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13355 {
13356 /* Skip non-fields. */
13357 while (f1 && TREE_CODE (f1) != FIELD_DECL)
13358 f1 = TREE_CHAIN (f1);
13359 while (f2 && TREE_CODE (f2) != FIELD_DECL)
13360 f2 = TREE_CHAIN (f2);
13361 if (!f1 || !f2)
13362 break;
13363 /* The fields must have the same name, offset and type. */
13364 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13365 || !gimple_compare_field_offset (f1, f2)
13366 || !gimple_canonical_types_compatible_p
13367 (TREE_TYPE (f1), TREE_TYPE (f2),
13368 trust_type_canonical))
13369 return false;
13370 }
13371
13372 /* If one aggregate has more fields than the other, they
13373 are not the same. */
13374 if (f1 || f2)
13375 return false;
13376
13377 return true;
13378 }
13379
13380 default:
13381 /* Consider all types with language specific trees in them mutually
13382 compatible. This is executed only from verify_type and false
13383 positives can be tolerated. */
13384 gcc_assert (!in_lto_p);
13385 return true;
13386 }
13387 }
13388
13389 /* Verify type T. */
13390
13391 void
13392 verify_type (const_tree t)
13393 {
13394 bool error_found = false;
13395 tree mv = TYPE_MAIN_VARIANT (t);
13396 if (!mv)
13397 {
13398 error ("Main variant is not defined");
13399 error_found = true;
13400 }
13401 else if (mv != TYPE_MAIN_VARIANT (mv))
13402 {
13403 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13404 debug_tree (mv);
13405 error_found = true;
13406 }
13407 else if (t != mv && !verify_type_variant (t, mv))
13408 error_found = true;
13409
13410 tree ct = TYPE_CANONICAL (t);
13411 if (!ct)
13412 ;
13413 else if (TYPE_CANONICAL (t) != ct)
13414 {
13415 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13416 debug_tree (ct);
13417 error_found = true;
13418 }
13419 /* Method and function types can not be used to address memory and thus
13420 TYPE_CANONICAL really matters only for determining useless conversions.
13421
13422 FIXME: C++ FE produce declarations of builtin functions that are not
13423 compatible with main variants. */
13424 else if (TREE_CODE (t) == FUNCTION_TYPE)
13425 ;
13426 else if (t != ct
13427 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13428 with variably sized arrays because their sizes possibly
13429 gimplified to different variables. */
13430 && !variably_modified_type_p (ct, NULL)
13431 && !gimple_canonical_types_compatible_p (t, ct, false))
13432 {
13433 error ("TYPE_CANONICAL is not compatible");
13434 debug_tree (ct);
13435 error_found = true;
13436 }
13437
13438 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13439 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13440 {
13441 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13442 debug_tree (ct);
13443 error_found = true;
13444 }
13445
13446
13447 /* Check various uses of TYPE_MINVAL. */
13448 if (RECORD_OR_UNION_TYPE_P (t))
13449 {
13450 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13451 and danagle the pointer from time to time. */
13452 if (TYPE_VFIELD (t)
13453 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13454 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13455 {
13456 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13457 debug_tree (TYPE_VFIELD (t));
13458 error_found = true;
13459 }
13460 }
13461 else if (TREE_CODE (t) == POINTER_TYPE)
13462 {
13463 if (TYPE_NEXT_PTR_TO (t)
13464 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13465 {
13466 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13467 debug_tree (TYPE_NEXT_PTR_TO (t));
13468 error_found = true;
13469 }
13470 }
13471 else if (TREE_CODE (t) == REFERENCE_TYPE)
13472 {
13473 if (TYPE_NEXT_REF_TO (t)
13474 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13475 {
13476 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13477 debug_tree (TYPE_NEXT_REF_TO (t));
13478 error_found = true;
13479 }
13480 }
13481 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13482 || TREE_CODE (t) == FIXED_POINT_TYPE)
13483 {
13484 /* FIXME: The following check should pass:
13485 useless_type_conversion_p (const_cast <tree> (t),
13486 TREE_TYPE (TYPE_MIN_VALUE (t))
13487 but does not for C sizetypes in LTO. */
13488 }
13489 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13490 else if (TYPE_MINVAL (t)
13491 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13492 || in_lto_p))
13493 {
13494 error ("TYPE_MINVAL non-NULL");
13495 debug_tree (TYPE_MINVAL (t));
13496 error_found = true;
13497 }
13498
13499 /* Check various uses of TYPE_MAXVAL. */
13500 if (RECORD_OR_UNION_TYPE_P (t))
13501 {
13502 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13503 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13504 && TYPE_METHODS (t) != error_mark_node)
13505 {
13506 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13507 debug_tree (TYPE_METHODS (t));
13508 error_found = true;
13509 }
13510 }
13511 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13512 {
13513 if (TYPE_METHOD_BASETYPE (t)
13514 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13515 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13516 {
13517 error ("TYPE_METHOD_BASETYPE is not record nor union");
13518 debug_tree (TYPE_METHOD_BASETYPE (t));
13519 error_found = true;
13520 }
13521 }
13522 else if (TREE_CODE (t) == OFFSET_TYPE)
13523 {
13524 if (TYPE_OFFSET_BASETYPE (t)
13525 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13526 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13527 {
13528 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13529 debug_tree (TYPE_OFFSET_BASETYPE (t));
13530 error_found = true;
13531 }
13532 }
13533 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13534 || TREE_CODE (t) == FIXED_POINT_TYPE)
13535 {
13536 /* FIXME: The following check should pass:
13537 useless_type_conversion_p (const_cast <tree> (t),
13538 TREE_TYPE (TYPE_MAX_VALUE (t))
13539 but does not for C sizetypes in LTO. */
13540 }
13541 else if (TREE_CODE (t) == ARRAY_TYPE)
13542 {
13543 if (TYPE_ARRAY_MAX_SIZE (t)
13544 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13545 {
13546 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13547 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13548 error_found = true;
13549 }
13550 }
13551 else if (TYPE_MAXVAL (t))
13552 {
13553 error ("TYPE_MAXVAL non-NULL");
13554 debug_tree (TYPE_MAXVAL (t));
13555 error_found = true;
13556 }
13557
13558 /* Check various uses of TYPE_BINFO. */
13559 if (RECORD_OR_UNION_TYPE_P (t))
13560 {
13561 if (!TYPE_BINFO (t))
13562 ;
13563 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13564 {
13565 error ("TYPE_BINFO is not TREE_BINFO");
13566 debug_tree (TYPE_BINFO (t));
13567 error_found = true;
13568 }
13569 /* FIXME: Java builds invalid empty binfos that do not have
13570 TREE_TYPE set. */
13571 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13572 {
13573 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13574 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13575 error_found = true;
13576 }
13577 }
13578 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13579 {
13580 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13581 debug_tree (TYPE_LANG_SLOT_1 (t));
13582 error_found = true;
13583 }
13584
13585 /* Check various uses of TYPE_VALUES_RAW. */
13586 if (TREE_CODE (t) == ENUMERAL_TYPE)
13587 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13588 {
13589 tree value = TREE_VALUE (l);
13590 tree name = TREE_PURPOSE (l);
13591
13592 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13593 CONST_DECL of ENUMERAL TYPE. */
13594 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13595 {
13596 error ("Enum value is not CONST_DECL or INTEGER_CST");
13597 debug_tree (value);
13598 debug_tree (name);
13599 error_found = true;
13600 }
13601 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13602 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13603 {
13604 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13605 debug_tree (value);
13606 debug_tree (name);
13607 error_found = true;
13608 }
13609 if (TREE_CODE (name) != IDENTIFIER_NODE)
13610 {
13611 error ("Enum value name is not IDENTIFIER_NODE");
13612 debug_tree (value);
13613 debug_tree (name);
13614 error_found = true;
13615 }
13616 }
13617 else if (TREE_CODE (t) == ARRAY_TYPE)
13618 {
13619 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13620 {
13621 error ("Array TYPE_DOMAIN is not integer type");
13622 debug_tree (TYPE_DOMAIN (t));
13623 error_found = true;
13624 }
13625 }
13626 else if (RECORD_OR_UNION_TYPE_P (t))
13627 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13628 {
13629 /* TODO: verify properties of decls. */
13630 if (TREE_CODE (fld) == FIELD_DECL)
13631 ;
13632 else if (TREE_CODE (fld) == TYPE_DECL)
13633 ;
13634 else if (TREE_CODE (fld) == CONST_DECL)
13635 ;
13636 else if (TREE_CODE (fld) == VAR_DECL)
13637 ;
13638 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13639 ;
13640 else if (TREE_CODE (fld) == USING_DECL)
13641 ;
13642 else
13643 {
13644 error ("Wrong tree in TYPE_FIELDS list");
13645 debug_tree (fld);
13646 error_found = true;
13647 }
13648 }
13649 else if (TREE_CODE (t) == INTEGER_TYPE
13650 || TREE_CODE (t) == BOOLEAN_TYPE
13651 || TREE_CODE (t) == OFFSET_TYPE
13652 || TREE_CODE (t) == REFERENCE_TYPE
13653 || TREE_CODE (t) == NULLPTR_TYPE
13654 || TREE_CODE (t) == POINTER_TYPE)
13655 {
13656 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13657 {
13658 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13659 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13660 error_found = true;
13661 }
13662 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13663 {
13664 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13665 debug_tree (TYPE_CACHED_VALUES (t));
13666 error_found = true;
13667 }
13668 /* Verify just enough of cache to ensure that no one copied it to new type.
13669 All copying should go by copy_node that should clear it. */
13670 else if (TYPE_CACHED_VALUES_P (t))
13671 {
13672 int i;
13673 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13674 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13675 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13676 {
13677 error ("wrong TYPE_CACHED_VALUES entry");
13678 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13679 error_found = true;
13680 break;
13681 }
13682 }
13683 }
13684 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13685 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13686 {
13687 /* C++ FE uses TREE_PURPOSE to store initial values. */
13688 if (TREE_PURPOSE (l) && in_lto_p)
13689 {
13690 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13691 debug_tree (l);
13692 error_found = true;
13693 }
13694 if (!TYPE_P (TREE_VALUE (l)))
13695 {
13696 error ("Wrong entry in TYPE_ARG_TYPES list");
13697 debug_tree (l);
13698 error_found = true;
13699 }
13700 }
13701 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13702 {
13703 error ("TYPE_VALUES_RAW field is non-NULL");
13704 debug_tree (TYPE_VALUES_RAW (t));
13705 error_found = true;
13706 }
13707 if (TREE_CODE (t) != INTEGER_TYPE
13708 && TREE_CODE (t) != BOOLEAN_TYPE
13709 && TREE_CODE (t) != OFFSET_TYPE
13710 && TREE_CODE (t) != REFERENCE_TYPE
13711 && TREE_CODE (t) != NULLPTR_TYPE
13712 && TREE_CODE (t) != POINTER_TYPE
13713 && TYPE_CACHED_VALUES_P (t))
13714 {
13715 error ("TYPE_CACHED_VALUES_P is set while it should not");
13716 error_found = true;
13717 }
13718 if (TYPE_STRING_FLAG (t)
13719 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13720 {
13721 error ("TYPE_STRING_FLAG is set on wrong type code");
13722 error_found = true;
13723 }
13724 else if (TYPE_STRING_FLAG (t))
13725 {
13726 const_tree b = t;
13727 if (TREE_CODE (b) == ARRAY_TYPE)
13728 b = TREE_TYPE (t);
13729 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13730 that is 32bits. */
13731 if (TREE_CODE (b) != INTEGER_TYPE)
13732 {
13733 error ("TYPE_STRING_FLAG is set on type that does not look like "
13734 "char nor array of chars");
13735 error_found = true;
13736 }
13737 }
13738
13739 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13740 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13741 of a type. */
13742 if (TREE_CODE (t) == METHOD_TYPE
13743 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13744 {
13745 error ("TYPE_METHOD_BASETYPE is not main variant");
13746 error_found = true;
13747 }
13748
13749 if (error_found)
13750 {
13751 debug_tree (const_cast <tree> (t));
13752 internal_error ("verify_type failed");
13753 }
13754 }
13755
13756
13757 /* Return true if ARG is marked with the nonnull attribute in the
13758 current function signature. */
13759
13760 bool
13761 nonnull_arg_p (const_tree arg)
13762 {
13763 tree t, attrs, fntype;
13764 unsigned HOST_WIDE_INT arg_num;
13765
13766 gcc_assert (TREE_CODE (arg) == PARM_DECL && POINTER_TYPE_P (TREE_TYPE (arg)));
13767
13768 /* The static chain decl is always non null. */
13769 if (arg == cfun->static_chain_decl)
13770 return true;
13771
13772 /* THIS argument of method is always non-NULL. */
13773 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13774 && arg == DECL_ARGUMENTS (cfun->decl)
13775 && flag_delete_null_pointer_checks)
13776 return true;
13777
13778 /* Values passed by reference are always non-NULL. */
13779 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13780 && flag_delete_null_pointer_checks)
13781 return true;
13782
13783 fntype = TREE_TYPE (cfun->decl);
13784 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13785 {
13786 attrs = lookup_attribute ("nonnull", attrs);
13787
13788 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13789 if (attrs == NULL_TREE)
13790 return false;
13791
13792 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13793 if (TREE_VALUE (attrs) == NULL_TREE)
13794 return true;
13795
13796 /* Get the position number for ARG in the function signature. */
13797 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13798 t;
13799 t = DECL_CHAIN (t), arg_num++)
13800 {
13801 if (t == arg)
13802 break;
13803 }
13804
13805 gcc_assert (t == arg);
13806
13807 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13808 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13809 {
13810 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13811 return true;
13812 }
13813 }
13814
13815 return false;
13816 }
13817
13818
13819 #include "gt-tree.h"