re PR c++/66243 (enum class value is allowed to be initialized by value from other...
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "hash-set.h"
36 #include "machmode.h"
37 #include "vec.h"
38 #include "double-int.h"
39 #include "input.h"
40 #include "alias.h"
41 #include "symtab.h"
42 #include "wide-int.h"
43 #include "inchash.h"
44 #include "tree.h"
45 #include "fold-const.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "attribs.h"
49 #include "varasm.h"
50 #include "tm_p.h"
51 #include "hashtab.h"
52 #include "hard-reg-set.h"
53 #include "function.h"
54 #include "obstack.h"
55 #include "toplev.h" /* get_random_seed */
56 #include "filenames.h"
57 #include "output.h"
58 #include "target.h"
59 #include "common/common-target.h"
60 #include "langhooks.h"
61 #include "tree-inline.h"
62 #include "tree-iterator.h"
63 #include "predict.h"
64 #include "dominance.h"
65 #include "cfg.h"
66 #include "basic-block.h"
67 #include "bitmap.h"
68 #include "tree-ssa-alias.h"
69 #include "internal-fn.h"
70 #include "gimple-expr.h"
71 #include "is-a.h"
72 #include "gimple.h"
73 #include "gimple-iterator.h"
74 #include "gimplify.h"
75 #include "gimple-ssa.h"
76 #include "hash-map.h"
77 #include "plugin-api.h"
78 #include "ipa-ref.h"
79 #include "cgraph.h"
80 #include "tree-phinodes.h"
81 #include "stringpool.h"
82 #include "tree-ssanames.h"
83 #include "rtl.h"
84 #include "statistics.h"
85 #include "real.h"
86 #include "fixed-value.h"
87 #include "insn-config.h"
88 #include "expmed.h"
89 #include "dojump.h"
90 #include "explow.h"
91 #include "emit-rtl.h"
92 #include "stmt.h"
93 #include "expr.h"
94 #include "tree-dfa.h"
95 #include "params.h"
96 #include "tree-pass.h"
97 #include "langhooks-def.h"
98 #include "diagnostic.h"
99 #include "tree-diagnostic.h"
100 #include "tree-pretty-print.h"
101 #include "except.h"
102 #include "debug.h"
103 #include "intl.h"
104 #include "builtins.h"
105 #include "print-tree.h"
106 #include "ipa-utils.h"
107
108 /* Tree code classes. */
109
110 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
111 #define END_OF_BASE_TREE_CODES tcc_exceptional,
112
113 const enum tree_code_class tree_code_type[] = {
114 #include "all-tree.def"
115 };
116
117 #undef DEFTREECODE
118 #undef END_OF_BASE_TREE_CODES
119
120 /* Table indexed by tree code giving number of expression
121 operands beyond the fixed part of the node structure.
122 Not used for types or decls. */
123
124 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
125 #define END_OF_BASE_TREE_CODES 0,
126
127 const unsigned char tree_code_length[] = {
128 #include "all-tree.def"
129 };
130
131 #undef DEFTREECODE
132 #undef END_OF_BASE_TREE_CODES
133
134 /* Names of tree components.
135 Used for printing out the tree and error messages. */
136 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
137 #define END_OF_BASE_TREE_CODES "@dummy",
138
139 static const char *const tree_code_name[] = {
140 #include "all-tree.def"
141 };
142
143 #undef DEFTREECODE
144 #undef END_OF_BASE_TREE_CODES
145
146 /* Each tree code class has an associated string representation.
147 These must correspond to the tree_code_class entries. */
148
149 const char *const tree_code_class_strings[] =
150 {
151 "exceptional",
152 "constant",
153 "type",
154 "declaration",
155 "reference",
156 "comparison",
157 "unary",
158 "binary",
159 "statement",
160 "vl_exp",
161 "expression"
162 };
163
164 /* obstack.[ch] explicitly declined to prototype this. */
165 extern int _obstack_allocated_p (struct obstack *h, void *obj);
166
167 /* Statistics-gathering stuff. */
168
169 static int tree_code_counts[MAX_TREE_CODES];
170 int tree_node_counts[(int) all_kinds];
171 int tree_node_sizes[(int) all_kinds];
172
173 /* Keep in sync with tree.h:enum tree_node_kind. */
174 static const char * const tree_node_kind_names[] = {
175 "decls",
176 "types",
177 "blocks",
178 "stmts",
179 "refs",
180 "exprs",
181 "constants",
182 "identifiers",
183 "vecs",
184 "binfos",
185 "ssa names",
186 "constructors",
187 "random kinds",
188 "lang_decl kinds",
189 "lang_type kinds",
190 "omp clauses",
191 };
192
193 /* Unique id for next decl created. */
194 static GTY(()) int next_decl_uid;
195 /* Unique id for next type created. */
196 static GTY(()) int next_type_uid = 1;
197 /* Unique id for next debug decl created. Use negative numbers,
198 to catch erroneous uses. */
199 static GTY(()) int next_debug_decl_uid;
200
201 /* Since we cannot rehash a type after it is in the table, we have to
202 keep the hash code. */
203
204 struct GTY((for_user)) type_hash {
205 unsigned long hash;
206 tree type;
207 };
208
209 /* Initial size of the hash table (rounded to next prime). */
210 #define TYPE_HASH_INITIAL_SIZE 1000
211
212 struct type_cache_hasher : ggc_cache_hasher<type_hash *>
213 {
214 static hashval_t hash (type_hash *t) { return t->hash; }
215 static bool equal (type_hash *a, type_hash *b);
216
217 static void
218 handle_cache_entry (type_hash *&t)
219 {
220 extern void gt_ggc_mx (type_hash *&);
221 if (t == HTAB_DELETED_ENTRY || t == HTAB_EMPTY_ENTRY)
222 return;
223 else if (ggc_marked_p (t->type))
224 gt_ggc_mx (t);
225 else
226 t = static_cast<type_hash *> (HTAB_DELETED_ENTRY);
227 }
228 };
229
230 /* Now here is the hash table. When recording a type, it is added to
231 the slot whose index is the hash code. Note that the hash table is
232 used for several kinds of types (function types, array types and
233 array index range types, for now). While all these live in the
234 same table, they are completely independent, and the hash code is
235 computed differently for each of these. */
236
237 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
238
239 /* Hash table and temporary node for larger integer const values. */
240 static GTY (()) tree int_cst_node;
241
242 struct int_cst_hasher : ggc_cache_hasher<tree>
243 {
244 static hashval_t hash (tree t);
245 static bool equal (tree x, tree y);
246 };
247
248 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
249
250 /* Hash table for optimization flags and target option flags. Use the same
251 hash table for both sets of options. Nodes for building the current
252 optimization and target option nodes. The assumption is most of the time
253 the options created will already be in the hash table, so we avoid
254 allocating and freeing up a node repeatably. */
255 static GTY (()) tree cl_optimization_node;
256 static GTY (()) tree cl_target_option_node;
257
258 struct cl_option_hasher : ggc_cache_hasher<tree>
259 {
260 static hashval_t hash (tree t);
261 static bool equal (tree x, tree y);
262 };
263
264 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
265
266 /* General tree->tree mapping structure for use in hash tables. */
267
268
269 static GTY ((cache))
270 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
271
272 static GTY ((cache))
273 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
274
275 struct tree_vec_map_cache_hasher : ggc_cache_hasher<tree_vec_map *>
276 {
277 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
278
279 static bool
280 equal (tree_vec_map *a, tree_vec_map *b)
281 {
282 return a->base.from == b->base.from;
283 }
284
285 static void
286 handle_cache_entry (tree_vec_map *&m)
287 {
288 extern void gt_ggc_mx (tree_vec_map *&);
289 if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
290 return;
291 else if (ggc_marked_p (m->base.from))
292 gt_ggc_mx (m);
293 else
294 m = static_cast<tree_vec_map *> (HTAB_DELETED_ENTRY);
295 }
296 };
297
298 static GTY ((cache))
299 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
300
301 static void set_type_quals (tree, int);
302 static void print_type_hash_statistics (void);
303 static void print_debug_expr_statistics (void);
304 static void print_value_expr_statistics (void);
305 static void type_hash_list (const_tree, inchash::hash &);
306 static void attribute_hash_list (const_tree, inchash::hash &);
307
308 tree global_trees[TI_MAX];
309 tree integer_types[itk_none];
310
311 bool int_n_enabled_p[NUM_INT_N_ENTS];
312 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
313
314 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
315
316 /* Number of operands for each OpenMP clause. */
317 unsigned const char omp_clause_num_ops[] =
318 {
319 0, /* OMP_CLAUSE_ERROR */
320 1, /* OMP_CLAUSE_PRIVATE */
321 1, /* OMP_CLAUSE_SHARED */
322 1, /* OMP_CLAUSE_FIRSTPRIVATE */
323 2, /* OMP_CLAUSE_LASTPRIVATE */
324 4, /* OMP_CLAUSE_REDUCTION */
325 1, /* OMP_CLAUSE_COPYIN */
326 1, /* OMP_CLAUSE_COPYPRIVATE */
327 3, /* OMP_CLAUSE_LINEAR */
328 2, /* OMP_CLAUSE_ALIGNED */
329 1, /* OMP_CLAUSE_DEPEND */
330 1, /* OMP_CLAUSE_UNIFORM */
331 2, /* OMP_CLAUSE_FROM */
332 2, /* OMP_CLAUSE_TO */
333 2, /* OMP_CLAUSE_MAP */
334 2, /* OMP_CLAUSE__CACHE_ */
335 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
336 1, /* OMP_CLAUSE_USE_DEVICE */
337 2, /* OMP_CLAUSE_GANG */
338 1, /* OMP_CLAUSE_ASYNC */
339 1, /* OMP_CLAUSE_WAIT */
340 0, /* OMP_CLAUSE_AUTO */
341 0, /* OMP_CLAUSE_SEQ */
342 1, /* OMP_CLAUSE__LOOPTEMP_ */
343 1, /* OMP_CLAUSE_IF */
344 1, /* OMP_CLAUSE_NUM_THREADS */
345 1, /* OMP_CLAUSE_SCHEDULE */
346 0, /* OMP_CLAUSE_NOWAIT */
347 0, /* OMP_CLAUSE_ORDERED */
348 0, /* OMP_CLAUSE_DEFAULT */
349 3, /* OMP_CLAUSE_COLLAPSE */
350 0, /* OMP_CLAUSE_UNTIED */
351 1, /* OMP_CLAUSE_FINAL */
352 0, /* OMP_CLAUSE_MERGEABLE */
353 1, /* OMP_CLAUSE_DEVICE */
354 1, /* OMP_CLAUSE_DIST_SCHEDULE */
355 0, /* OMP_CLAUSE_INBRANCH */
356 0, /* OMP_CLAUSE_NOTINBRANCH */
357 1, /* OMP_CLAUSE_NUM_TEAMS */
358 1, /* OMP_CLAUSE_THREAD_LIMIT */
359 0, /* OMP_CLAUSE_PROC_BIND */
360 1, /* OMP_CLAUSE_SAFELEN */
361 1, /* OMP_CLAUSE_SIMDLEN */
362 0, /* OMP_CLAUSE_FOR */
363 0, /* OMP_CLAUSE_PARALLEL */
364 0, /* OMP_CLAUSE_SECTIONS */
365 0, /* OMP_CLAUSE_TASKGROUP */
366 1, /* OMP_CLAUSE__SIMDUID_ */
367 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
368 0, /* OMP_CLAUSE_INDEPENDENT */
369 1, /* OMP_CLAUSE_WORKER */
370 1, /* OMP_CLAUSE_VECTOR */
371 1, /* OMP_CLAUSE_NUM_GANGS */
372 1, /* OMP_CLAUSE_NUM_WORKERS */
373 1, /* OMP_CLAUSE_VECTOR_LENGTH */
374 };
375
376 const char * const omp_clause_code_name[] =
377 {
378 "error_clause",
379 "private",
380 "shared",
381 "firstprivate",
382 "lastprivate",
383 "reduction",
384 "copyin",
385 "copyprivate",
386 "linear",
387 "aligned",
388 "depend",
389 "uniform",
390 "from",
391 "to",
392 "map",
393 "_cache_",
394 "device_resident",
395 "use_device",
396 "gang",
397 "async",
398 "wait",
399 "auto",
400 "seq",
401 "_looptemp_",
402 "if",
403 "num_threads",
404 "schedule",
405 "nowait",
406 "ordered",
407 "default",
408 "collapse",
409 "untied",
410 "final",
411 "mergeable",
412 "device",
413 "dist_schedule",
414 "inbranch",
415 "notinbranch",
416 "num_teams",
417 "thread_limit",
418 "proc_bind",
419 "safelen",
420 "simdlen",
421 "for",
422 "parallel",
423 "sections",
424 "taskgroup",
425 "_simduid_",
426 "_Cilk_for_count_",
427 "independent",
428 "worker",
429 "vector",
430 "num_gangs",
431 "num_workers",
432 "vector_length"
433 };
434
435
436 /* Return the tree node structure used by tree code CODE. */
437
438 static inline enum tree_node_structure_enum
439 tree_node_structure_for_code (enum tree_code code)
440 {
441 switch (TREE_CODE_CLASS (code))
442 {
443 case tcc_declaration:
444 {
445 switch (code)
446 {
447 case FIELD_DECL:
448 return TS_FIELD_DECL;
449 case PARM_DECL:
450 return TS_PARM_DECL;
451 case VAR_DECL:
452 return TS_VAR_DECL;
453 case LABEL_DECL:
454 return TS_LABEL_DECL;
455 case RESULT_DECL:
456 return TS_RESULT_DECL;
457 case DEBUG_EXPR_DECL:
458 return TS_DECL_WRTL;
459 case CONST_DECL:
460 return TS_CONST_DECL;
461 case TYPE_DECL:
462 return TS_TYPE_DECL;
463 case FUNCTION_DECL:
464 return TS_FUNCTION_DECL;
465 case TRANSLATION_UNIT_DECL:
466 return TS_TRANSLATION_UNIT_DECL;
467 default:
468 return TS_DECL_NON_COMMON;
469 }
470 }
471 case tcc_type:
472 return TS_TYPE_NON_COMMON;
473 case tcc_reference:
474 case tcc_comparison:
475 case tcc_unary:
476 case tcc_binary:
477 case tcc_expression:
478 case tcc_statement:
479 case tcc_vl_exp:
480 return TS_EXP;
481 default: /* tcc_constant and tcc_exceptional */
482 break;
483 }
484 switch (code)
485 {
486 /* tcc_constant cases. */
487 case VOID_CST: return TS_TYPED;
488 case INTEGER_CST: return TS_INT_CST;
489 case REAL_CST: return TS_REAL_CST;
490 case FIXED_CST: return TS_FIXED_CST;
491 case COMPLEX_CST: return TS_COMPLEX;
492 case VECTOR_CST: return TS_VECTOR;
493 case STRING_CST: return TS_STRING;
494 /* tcc_exceptional cases. */
495 case ERROR_MARK: return TS_COMMON;
496 case IDENTIFIER_NODE: return TS_IDENTIFIER;
497 case TREE_LIST: return TS_LIST;
498 case TREE_VEC: return TS_VEC;
499 case SSA_NAME: return TS_SSA_NAME;
500 case PLACEHOLDER_EXPR: return TS_COMMON;
501 case STATEMENT_LIST: return TS_STATEMENT_LIST;
502 case BLOCK: return TS_BLOCK;
503 case CONSTRUCTOR: return TS_CONSTRUCTOR;
504 case TREE_BINFO: return TS_BINFO;
505 case OMP_CLAUSE: return TS_OMP_CLAUSE;
506 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
507 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
508
509 default:
510 gcc_unreachable ();
511 }
512 }
513
514
515 /* Initialize tree_contains_struct to describe the hierarchy of tree
516 nodes. */
517
518 static void
519 initialize_tree_contains_struct (void)
520 {
521 unsigned i;
522
523 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
524 {
525 enum tree_code code;
526 enum tree_node_structure_enum ts_code;
527
528 code = (enum tree_code) i;
529 ts_code = tree_node_structure_for_code (code);
530
531 /* Mark the TS structure itself. */
532 tree_contains_struct[code][ts_code] = 1;
533
534 /* Mark all the structures that TS is derived from. */
535 switch (ts_code)
536 {
537 case TS_TYPED:
538 case TS_BLOCK:
539 MARK_TS_BASE (code);
540 break;
541
542 case TS_COMMON:
543 case TS_INT_CST:
544 case TS_REAL_CST:
545 case TS_FIXED_CST:
546 case TS_VECTOR:
547 case TS_STRING:
548 case TS_COMPLEX:
549 case TS_SSA_NAME:
550 case TS_CONSTRUCTOR:
551 case TS_EXP:
552 case TS_STATEMENT_LIST:
553 MARK_TS_TYPED (code);
554 break;
555
556 case TS_IDENTIFIER:
557 case TS_DECL_MINIMAL:
558 case TS_TYPE_COMMON:
559 case TS_LIST:
560 case TS_VEC:
561 case TS_BINFO:
562 case TS_OMP_CLAUSE:
563 case TS_OPTIMIZATION:
564 case TS_TARGET_OPTION:
565 MARK_TS_COMMON (code);
566 break;
567
568 case TS_TYPE_WITH_LANG_SPECIFIC:
569 MARK_TS_TYPE_COMMON (code);
570 break;
571
572 case TS_TYPE_NON_COMMON:
573 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
574 break;
575
576 case TS_DECL_COMMON:
577 MARK_TS_DECL_MINIMAL (code);
578 break;
579
580 case TS_DECL_WRTL:
581 case TS_CONST_DECL:
582 MARK_TS_DECL_COMMON (code);
583 break;
584
585 case TS_DECL_NON_COMMON:
586 MARK_TS_DECL_WITH_VIS (code);
587 break;
588
589 case TS_DECL_WITH_VIS:
590 case TS_PARM_DECL:
591 case TS_LABEL_DECL:
592 case TS_RESULT_DECL:
593 MARK_TS_DECL_WRTL (code);
594 break;
595
596 case TS_FIELD_DECL:
597 MARK_TS_DECL_COMMON (code);
598 break;
599
600 case TS_VAR_DECL:
601 MARK_TS_DECL_WITH_VIS (code);
602 break;
603
604 case TS_TYPE_DECL:
605 case TS_FUNCTION_DECL:
606 MARK_TS_DECL_NON_COMMON (code);
607 break;
608
609 case TS_TRANSLATION_UNIT_DECL:
610 MARK_TS_DECL_COMMON (code);
611 break;
612
613 default:
614 gcc_unreachable ();
615 }
616 }
617
618 /* Basic consistency checks for attributes used in fold. */
619 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
620 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
621 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
622 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
623 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
624 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
625 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
626 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
627 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
628 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
629 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
630 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
631 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
632 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
633 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
634 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
635 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
636 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
637 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
638 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
639 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
640 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
641 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
642 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
643 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
644 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
645 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
646 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
647 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
648 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
649 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
650 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
651 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
652 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
653 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
654 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
655 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
657 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
659 }
660
661
662 /* Init tree.c. */
663
664 void
665 init_ttree (void)
666 {
667 /* Initialize the hash table of types. */
668 type_hash_table
669 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
670
671 debug_expr_for_decl
672 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
673
674 value_expr_for_decl
675 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
676
677 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
678
679 int_cst_node = make_int_cst (1, 1);
680
681 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
682
683 cl_optimization_node = make_node (OPTIMIZATION_NODE);
684 cl_target_option_node = make_node (TARGET_OPTION_NODE);
685
686 /* Initialize the tree_contains_struct array. */
687 initialize_tree_contains_struct ();
688 lang_hooks.init_ts ();
689 }
690
691 \f
692 /* The name of the object as the assembler will see it (but before any
693 translations made by ASM_OUTPUT_LABELREF). Often this is the same
694 as DECL_NAME. It is an IDENTIFIER_NODE. */
695 tree
696 decl_assembler_name (tree decl)
697 {
698 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
699 lang_hooks.set_decl_assembler_name (decl);
700 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
701 }
702
703 /* When the target supports COMDAT groups, this indicates which group the
704 DECL is associated with. This can be either an IDENTIFIER_NODE or a
705 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
706 tree
707 decl_comdat_group (const_tree node)
708 {
709 struct symtab_node *snode = symtab_node::get (node);
710 if (!snode)
711 return NULL;
712 return snode->get_comdat_group ();
713 }
714
715 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
716 tree
717 decl_comdat_group_id (const_tree node)
718 {
719 struct symtab_node *snode = symtab_node::get (node);
720 if (!snode)
721 return NULL;
722 return snode->get_comdat_group_id ();
723 }
724
725 /* When the target supports named section, return its name as IDENTIFIER_NODE
726 or NULL if it is in no section. */
727 const char *
728 decl_section_name (const_tree node)
729 {
730 struct symtab_node *snode = symtab_node::get (node);
731 if (!snode)
732 return NULL;
733 return snode->get_section ();
734 }
735
736 /* Set section section name of NODE to VALUE (that is expected to
737 be identifier node) */
738 void
739 set_decl_section_name (tree node, const char *value)
740 {
741 struct symtab_node *snode;
742
743 if (value == NULL)
744 {
745 snode = symtab_node::get (node);
746 if (!snode)
747 return;
748 }
749 else if (TREE_CODE (node) == VAR_DECL)
750 snode = varpool_node::get_create (node);
751 else
752 snode = cgraph_node::get_create (node);
753 snode->set_section (value);
754 }
755
756 /* Return TLS model of a variable NODE. */
757 enum tls_model
758 decl_tls_model (const_tree node)
759 {
760 struct varpool_node *snode = varpool_node::get (node);
761 if (!snode)
762 return TLS_MODEL_NONE;
763 return snode->tls_model;
764 }
765
766 /* Set TLS model of variable NODE to MODEL. */
767 void
768 set_decl_tls_model (tree node, enum tls_model model)
769 {
770 struct varpool_node *vnode;
771
772 if (model == TLS_MODEL_NONE)
773 {
774 vnode = varpool_node::get (node);
775 if (!vnode)
776 return;
777 }
778 else
779 vnode = varpool_node::get_create (node);
780 vnode->tls_model = model;
781 }
782
783 /* Compute the number of bytes occupied by a tree with code CODE.
784 This function cannot be used for nodes that have variable sizes,
785 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
786 size_t
787 tree_code_size (enum tree_code code)
788 {
789 switch (TREE_CODE_CLASS (code))
790 {
791 case tcc_declaration: /* A decl node */
792 {
793 switch (code)
794 {
795 case FIELD_DECL:
796 return sizeof (struct tree_field_decl);
797 case PARM_DECL:
798 return sizeof (struct tree_parm_decl);
799 case VAR_DECL:
800 return sizeof (struct tree_var_decl);
801 case LABEL_DECL:
802 return sizeof (struct tree_label_decl);
803 case RESULT_DECL:
804 return sizeof (struct tree_result_decl);
805 case CONST_DECL:
806 return sizeof (struct tree_const_decl);
807 case TYPE_DECL:
808 return sizeof (struct tree_type_decl);
809 case FUNCTION_DECL:
810 return sizeof (struct tree_function_decl);
811 case DEBUG_EXPR_DECL:
812 return sizeof (struct tree_decl_with_rtl);
813 case TRANSLATION_UNIT_DECL:
814 return sizeof (struct tree_translation_unit_decl);
815 case NAMESPACE_DECL:
816 case IMPORTED_DECL:
817 case NAMELIST_DECL:
818 return sizeof (struct tree_decl_non_common);
819 default:
820 return lang_hooks.tree_size (code);
821 }
822 }
823
824 case tcc_type: /* a type node */
825 return sizeof (struct tree_type_non_common);
826
827 case tcc_reference: /* a reference */
828 case tcc_expression: /* an expression */
829 case tcc_statement: /* an expression with side effects */
830 case tcc_comparison: /* a comparison expression */
831 case tcc_unary: /* a unary arithmetic expression */
832 case tcc_binary: /* a binary arithmetic expression */
833 return (sizeof (struct tree_exp)
834 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
835
836 case tcc_constant: /* a constant */
837 switch (code)
838 {
839 case VOID_CST: return sizeof (struct tree_typed);
840 case INTEGER_CST: gcc_unreachable ();
841 case REAL_CST: return sizeof (struct tree_real_cst);
842 case FIXED_CST: return sizeof (struct tree_fixed_cst);
843 case COMPLEX_CST: return sizeof (struct tree_complex);
844 case VECTOR_CST: return sizeof (struct tree_vector);
845 case STRING_CST: gcc_unreachable ();
846 default:
847 return lang_hooks.tree_size (code);
848 }
849
850 case tcc_exceptional: /* something random, like an identifier. */
851 switch (code)
852 {
853 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
854 case TREE_LIST: return sizeof (struct tree_list);
855
856 case ERROR_MARK:
857 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
858
859 case TREE_VEC:
860 case OMP_CLAUSE: gcc_unreachable ();
861
862 case SSA_NAME: return sizeof (struct tree_ssa_name);
863
864 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
865 case BLOCK: return sizeof (struct tree_block);
866 case CONSTRUCTOR: return sizeof (struct tree_constructor);
867 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
868 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
869
870 default:
871 return lang_hooks.tree_size (code);
872 }
873
874 default:
875 gcc_unreachable ();
876 }
877 }
878
879 /* Compute the number of bytes occupied by NODE. This routine only
880 looks at TREE_CODE, except for those nodes that have variable sizes. */
881 size_t
882 tree_size (const_tree node)
883 {
884 const enum tree_code code = TREE_CODE (node);
885 switch (code)
886 {
887 case INTEGER_CST:
888 return (sizeof (struct tree_int_cst)
889 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
890
891 case TREE_BINFO:
892 return (offsetof (struct tree_binfo, base_binfos)
893 + vec<tree, va_gc>
894 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
895
896 case TREE_VEC:
897 return (sizeof (struct tree_vec)
898 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
899
900 case VECTOR_CST:
901 return (sizeof (struct tree_vector)
902 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
903
904 case STRING_CST:
905 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
906
907 case OMP_CLAUSE:
908 return (sizeof (struct tree_omp_clause)
909 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
910 * sizeof (tree));
911
912 default:
913 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
914 return (sizeof (struct tree_exp)
915 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
916 else
917 return tree_code_size (code);
918 }
919 }
920
921 /* Record interesting allocation statistics for a tree node with CODE
922 and LENGTH. */
923
924 static void
925 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
926 size_t length ATTRIBUTE_UNUSED)
927 {
928 enum tree_code_class type = TREE_CODE_CLASS (code);
929 tree_node_kind kind;
930
931 if (!GATHER_STATISTICS)
932 return;
933
934 switch (type)
935 {
936 case tcc_declaration: /* A decl node */
937 kind = d_kind;
938 break;
939
940 case tcc_type: /* a type node */
941 kind = t_kind;
942 break;
943
944 case tcc_statement: /* an expression with side effects */
945 kind = s_kind;
946 break;
947
948 case tcc_reference: /* a reference */
949 kind = r_kind;
950 break;
951
952 case tcc_expression: /* an expression */
953 case tcc_comparison: /* a comparison expression */
954 case tcc_unary: /* a unary arithmetic expression */
955 case tcc_binary: /* a binary arithmetic expression */
956 kind = e_kind;
957 break;
958
959 case tcc_constant: /* a constant */
960 kind = c_kind;
961 break;
962
963 case tcc_exceptional: /* something random, like an identifier. */
964 switch (code)
965 {
966 case IDENTIFIER_NODE:
967 kind = id_kind;
968 break;
969
970 case TREE_VEC:
971 kind = vec_kind;
972 break;
973
974 case TREE_BINFO:
975 kind = binfo_kind;
976 break;
977
978 case SSA_NAME:
979 kind = ssa_name_kind;
980 break;
981
982 case BLOCK:
983 kind = b_kind;
984 break;
985
986 case CONSTRUCTOR:
987 kind = constr_kind;
988 break;
989
990 case OMP_CLAUSE:
991 kind = omp_clause_kind;
992 break;
993
994 default:
995 kind = x_kind;
996 break;
997 }
998 break;
999
1000 case tcc_vl_exp:
1001 kind = e_kind;
1002 break;
1003
1004 default:
1005 gcc_unreachable ();
1006 }
1007
1008 tree_code_counts[(int) code]++;
1009 tree_node_counts[(int) kind]++;
1010 tree_node_sizes[(int) kind] += length;
1011 }
1012
1013 /* Allocate and return a new UID from the DECL_UID namespace. */
1014
1015 int
1016 allocate_decl_uid (void)
1017 {
1018 return next_decl_uid++;
1019 }
1020
1021 /* Return a newly allocated node of code CODE. For decl and type
1022 nodes, some other fields are initialized. The rest of the node is
1023 initialized to zero. This function cannot be used for TREE_VEC,
1024 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1025 tree_code_size.
1026
1027 Achoo! I got a code in the node. */
1028
1029 tree
1030 make_node_stat (enum tree_code code MEM_STAT_DECL)
1031 {
1032 tree t;
1033 enum tree_code_class type = TREE_CODE_CLASS (code);
1034 size_t length = tree_code_size (code);
1035
1036 record_node_allocation_statistics (code, length);
1037
1038 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1039 TREE_SET_CODE (t, code);
1040
1041 switch (type)
1042 {
1043 case tcc_statement:
1044 TREE_SIDE_EFFECTS (t) = 1;
1045 break;
1046
1047 case tcc_declaration:
1048 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1049 {
1050 if (code == FUNCTION_DECL)
1051 {
1052 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1053 DECL_MODE (t) = FUNCTION_MODE;
1054 }
1055 else
1056 DECL_ALIGN (t) = 1;
1057 }
1058 DECL_SOURCE_LOCATION (t) = input_location;
1059 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1060 DECL_UID (t) = --next_debug_decl_uid;
1061 else
1062 {
1063 DECL_UID (t) = allocate_decl_uid ();
1064 SET_DECL_PT_UID (t, -1);
1065 }
1066 if (TREE_CODE (t) == LABEL_DECL)
1067 LABEL_DECL_UID (t) = -1;
1068
1069 break;
1070
1071 case tcc_type:
1072 TYPE_UID (t) = next_type_uid++;
1073 TYPE_ALIGN (t) = BITS_PER_UNIT;
1074 TYPE_USER_ALIGN (t) = 0;
1075 TYPE_MAIN_VARIANT (t) = t;
1076 TYPE_CANONICAL (t) = t;
1077
1078 /* Default to no attributes for type, but let target change that. */
1079 TYPE_ATTRIBUTES (t) = NULL_TREE;
1080 targetm.set_default_type_attributes (t);
1081
1082 /* We have not yet computed the alias set for this type. */
1083 TYPE_ALIAS_SET (t) = -1;
1084 break;
1085
1086 case tcc_constant:
1087 TREE_CONSTANT (t) = 1;
1088 break;
1089
1090 case tcc_expression:
1091 switch (code)
1092 {
1093 case INIT_EXPR:
1094 case MODIFY_EXPR:
1095 case VA_ARG_EXPR:
1096 case PREDECREMENT_EXPR:
1097 case PREINCREMENT_EXPR:
1098 case POSTDECREMENT_EXPR:
1099 case POSTINCREMENT_EXPR:
1100 /* All of these have side-effects, no matter what their
1101 operands are. */
1102 TREE_SIDE_EFFECTS (t) = 1;
1103 break;
1104
1105 default:
1106 break;
1107 }
1108 break;
1109
1110 default:
1111 /* Other classes need no special treatment. */
1112 break;
1113 }
1114
1115 return t;
1116 }
1117 \f
1118 /* Return a new node with the same contents as NODE except that its
1119 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1120
1121 tree
1122 copy_node_stat (tree node MEM_STAT_DECL)
1123 {
1124 tree t;
1125 enum tree_code code = TREE_CODE (node);
1126 size_t length;
1127
1128 gcc_assert (code != STATEMENT_LIST);
1129
1130 length = tree_size (node);
1131 record_node_allocation_statistics (code, length);
1132 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1133 memcpy (t, node, length);
1134
1135 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1136 TREE_CHAIN (t) = 0;
1137 TREE_ASM_WRITTEN (t) = 0;
1138 TREE_VISITED (t) = 0;
1139
1140 if (TREE_CODE_CLASS (code) == tcc_declaration)
1141 {
1142 if (code == DEBUG_EXPR_DECL)
1143 DECL_UID (t) = --next_debug_decl_uid;
1144 else
1145 {
1146 DECL_UID (t) = allocate_decl_uid ();
1147 if (DECL_PT_UID_SET_P (node))
1148 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1149 }
1150 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1151 && DECL_HAS_VALUE_EXPR_P (node))
1152 {
1153 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1154 DECL_HAS_VALUE_EXPR_P (t) = 1;
1155 }
1156 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1157 if (TREE_CODE (node) == VAR_DECL)
1158 {
1159 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1160 t->decl_with_vis.symtab_node = NULL;
1161 }
1162 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1163 {
1164 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1165 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1166 }
1167 if (TREE_CODE (node) == FUNCTION_DECL)
1168 {
1169 DECL_STRUCT_FUNCTION (t) = NULL;
1170 t->decl_with_vis.symtab_node = NULL;
1171 }
1172 }
1173 else if (TREE_CODE_CLASS (code) == tcc_type)
1174 {
1175 TYPE_UID (t) = next_type_uid++;
1176 /* The following is so that the debug code for
1177 the copy is different from the original type.
1178 The two statements usually duplicate each other
1179 (because they clear fields of the same union),
1180 but the optimizer should catch that. */
1181 TYPE_SYMTAB_POINTER (t) = 0;
1182 TYPE_SYMTAB_ADDRESS (t) = 0;
1183
1184 /* Do not copy the values cache. */
1185 if (TYPE_CACHED_VALUES_P (t))
1186 {
1187 TYPE_CACHED_VALUES_P (t) = 0;
1188 TYPE_CACHED_VALUES (t) = NULL_TREE;
1189 }
1190 }
1191
1192 return t;
1193 }
1194
1195 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1196 For example, this can copy a list made of TREE_LIST nodes. */
1197
1198 tree
1199 copy_list (tree list)
1200 {
1201 tree head;
1202 tree prev, next;
1203
1204 if (list == 0)
1205 return 0;
1206
1207 head = prev = copy_node (list);
1208 next = TREE_CHAIN (list);
1209 while (next)
1210 {
1211 TREE_CHAIN (prev) = copy_node (next);
1212 prev = TREE_CHAIN (prev);
1213 next = TREE_CHAIN (next);
1214 }
1215 return head;
1216 }
1217
1218 \f
1219 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1220 INTEGER_CST with value CST and type TYPE. */
1221
1222 static unsigned int
1223 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1224 {
1225 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1226 /* We need an extra zero HWI if CST is an unsigned integer with its
1227 upper bit set, and if CST occupies a whole number of HWIs. */
1228 if (TYPE_UNSIGNED (type)
1229 && wi::neg_p (cst)
1230 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1231 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1232 return cst.get_len ();
1233 }
1234
1235 /* Return a new INTEGER_CST with value CST and type TYPE. */
1236
1237 static tree
1238 build_new_int_cst (tree type, const wide_int &cst)
1239 {
1240 unsigned int len = cst.get_len ();
1241 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1242 tree nt = make_int_cst (len, ext_len);
1243
1244 if (len < ext_len)
1245 {
1246 --ext_len;
1247 TREE_INT_CST_ELT (nt, ext_len) = 0;
1248 for (unsigned int i = len; i < ext_len; ++i)
1249 TREE_INT_CST_ELT (nt, i) = -1;
1250 }
1251 else if (TYPE_UNSIGNED (type)
1252 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1253 {
1254 len--;
1255 TREE_INT_CST_ELT (nt, len)
1256 = zext_hwi (cst.elt (len),
1257 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1258 }
1259
1260 for (unsigned int i = 0; i < len; i++)
1261 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1262 TREE_TYPE (nt) = type;
1263 return nt;
1264 }
1265
1266 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1267
1268 tree
1269 build_int_cst (tree type, HOST_WIDE_INT low)
1270 {
1271 /* Support legacy code. */
1272 if (!type)
1273 type = integer_type_node;
1274
1275 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1276 }
1277
1278 tree
1279 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1280 {
1281 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1282 }
1283
1284 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1285
1286 tree
1287 build_int_cst_type (tree type, HOST_WIDE_INT low)
1288 {
1289 gcc_assert (type);
1290 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1291 }
1292
1293 /* Constructs tree in type TYPE from with value given by CST. Signedness
1294 of CST is assumed to be the same as the signedness of TYPE. */
1295
1296 tree
1297 double_int_to_tree (tree type, double_int cst)
1298 {
1299 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1300 }
1301
1302 /* We force the wide_int CST to the range of the type TYPE by sign or
1303 zero extending it. OVERFLOWABLE indicates if we are interested in
1304 overflow of the value, when >0 we are only interested in signed
1305 overflow, for <0 we are interested in any overflow. OVERFLOWED
1306 indicates whether overflow has already occurred. CONST_OVERFLOWED
1307 indicates whether constant overflow has already occurred. We force
1308 T's value to be within range of T's type (by setting to 0 or 1 all
1309 the bits outside the type's range). We set TREE_OVERFLOWED if,
1310 OVERFLOWED is nonzero,
1311 or OVERFLOWABLE is >0 and signed overflow occurs
1312 or OVERFLOWABLE is <0 and any overflow occurs
1313 We return a new tree node for the extended wide_int. The node
1314 is shared if no overflow flags are set. */
1315
1316
1317 tree
1318 force_fit_type (tree type, const wide_int_ref &cst,
1319 int overflowable, bool overflowed)
1320 {
1321 signop sign = TYPE_SIGN (type);
1322
1323 /* If we need to set overflow flags, return a new unshared node. */
1324 if (overflowed || !wi::fits_to_tree_p (cst, type))
1325 {
1326 if (overflowed
1327 || overflowable < 0
1328 || (overflowable > 0 && sign == SIGNED))
1329 {
1330 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1331 tree t = build_new_int_cst (type, tmp);
1332 TREE_OVERFLOW (t) = 1;
1333 return t;
1334 }
1335 }
1336
1337 /* Else build a shared node. */
1338 return wide_int_to_tree (type, cst);
1339 }
1340
1341 /* These are the hash table functions for the hash table of INTEGER_CST
1342 nodes of a sizetype. */
1343
1344 /* Return the hash code code X, an INTEGER_CST. */
1345
1346 hashval_t
1347 int_cst_hasher::hash (tree x)
1348 {
1349 const_tree const t = x;
1350 hashval_t code = TYPE_UID (TREE_TYPE (t));
1351 int i;
1352
1353 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1354 code ^= TREE_INT_CST_ELT (t, i);
1355
1356 return code;
1357 }
1358
1359 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1360 is the same as that given by *Y, which is the same. */
1361
1362 bool
1363 int_cst_hasher::equal (tree x, tree y)
1364 {
1365 const_tree const xt = x;
1366 const_tree const yt = y;
1367
1368 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1369 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1370 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1371 return false;
1372
1373 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1374 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1375 return false;
1376
1377 return true;
1378 }
1379
1380 /* Create an INT_CST node of TYPE and value CST.
1381 The returned node is always shared. For small integers we use a
1382 per-type vector cache, for larger ones we use a single hash table.
1383 The value is extended from its precision according to the sign of
1384 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1385 the upper bits and ensures that hashing and value equality based
1386 upon the underlying HOST_WIDE_INTs works without masking. */
1387
1388 tree
1389 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1390 {
1391 tree t;
1392 int ix = -1;
1393 int limit = 0;
1394
1395 gcc_assert (type);
1396 unsigned int prec = TYPE_PRECISION (type);
1397 signop sgn = TYPE_SIGN (type);
1398
1399 /* Verify that everything is canonical. */
1400 int l = pcst.get_len ();
1401 if (l > 1)
1402 {
1403 if (pcst.elt (l - 1) == 0)
1404 gcc_checking_assert (pcst.elt (l - 2) < 0);
1405 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1406 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1407 }
1408
1409 wide_int cst = wide_int::from (pcst, prec, sgn);
1410 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1411
1412 if (ext_len == 1)
1413 {
1414 /* We just need to store a single HOST_WIDE_INT. */
1415 HOST_WIDE_INT hwi;
1416 if (TYPE_UNSIGNED (type))
1417 hwi = cst.to_uhwi ();
1418 else
1419 hwi = cst.to_shwi ();
1420
1421 switch (TREE_CODE (type))
1422 {
1423 case NULLPTR_TYPE:
1424 gcc_assert (hwi == 0);
1425 /* Fallthru. */
1426
1427 case POINTER_TYPE:
1428 case REFERENCE_TYPE:
1429 case POINTER_BOUNDS_TYPE:
1430 /* Cache NULL pointer and zero bounds. */
1431 if (hwi == 0)
1432 {
1433 limit = 1;
1434 ix = 0;
1435 }
1436 break;
1437
1438 case BOOLEAN_TYPE:
1439 /* Cache false or true. */
1440 limit = 2;
1441 if (hwi < 2)
1442 ix = hwi;
1443 break;
1444
1445 case INTEGER_TYPE:
1446 case OFFSET_TYPE:
1447 if (TYPE_SIGN (type) == UNSIGNED)
1448 {
1449 /* Cache [0, N). */
1450 limit = INTEGER_SHARE_LIMIT;
1451 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1452 ix = hwi;
1453 }
1454 else
1455 {
1456 /* Cache [-1, N). */
1457 limit = INTEGER_SHARE_LIMIT + 1;
1458 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1459 ix = hwi + 1;
1460 }
1461 break;
1462
1463 case ENUMERAL_TYPE:
1464 break;
1465
1466 default:
1467 gcc_unreachable ();
1468 }
1469
1470 if (ix >= 0)
1471 {
1472 /* Look for it in the type's vector of small shared ints. */
1473 if (!TYPE_CACHED_VALUES_P (type))
1474 {
1475 TYPE_CACHED_VALUES_P (type) = 1;
1476 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1477 }
1478
1479 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1480 if (t)
1481 /* Make sure no one is clobbering the shared constant. */
1482 gcc_checking_assert (TREE_TYPE (t) == type
1483 && TREE_INT_CST_NUNITS (t) == 1
1484 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1485 && TREE_INT_CST_EXT_NUNITS (t) == 1
1486 && TREE_INT_CST_ELT (t, 0) == hwi);
1487 else
1488 {
1489 /* Create a new shared int. */
1490 t = build_new_int_cst (type, cst);
1491 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1492 }
1493 }
1494 else
1495 {
1496 /* Use the cache of larger shared ints, using int_cst_node as
1497 a temporary. */
1498
1499 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1500 TREE_TYPE (int_cst_node) = type;
1501
1502 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1503 t = *slot;
1504 if (!t)
1505 {
1506 /* Insert this one into the hash table. */
1507 t = int_cst_node;
1508 *slot = t;
1509 /* Make a new node for next time round. */
1510 int_cst_node = make_int_cst (1, 1);
1511 }
1512 }
1513 }
1514 else
1515 {
1516 /* The value either hashes properly or we drop it on the floor
1517 for the gc to take care of. There will not be enough of them
1518 to worry about. */
1519
1520 tree nt = build_new_int_cst (type, cst);
1521 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1522 t = *slot;
1523 if (!t)
1524 {
1525 /* Insert this one into the hash table. */
1526 t = nt;
1527 *slot = t;
1528 }
1529 }
1530
1531 return t;
1532 }
1533
1534 void
1535 cache_integer_cst (tree t)
1536 {
1537 tree type = TREE_TYPE (t);
1538 int ix = -1;
1539 int limit = 0;
1540 int prec = TYPE_PRECISION (type);
1541
1542 gcc_assert (!TREE_OVERFLOW (t));
1543
1544 switch (TREE_CODE (type))
1545 {
1546 case NULLPTR_TYPE:
1547 gcc_assert (integer_zerop (t));
1548 /* Fallthru. */
1549
1550 case POINTER_TYPE:
1551 case REFERENCE_TYPE:
1552 /* Cache NULL pointer. */
1553 if (integer_zerop (t))
1554 {
1555 limit = 1;
1556 ix = 0;
1557 }
1558 break;
1559
1560 case BOOLEAN_TYPE:
1561 /* Cache false or true. */
1562 limit = 2;
1563 if (wi::ltu_p (t, 2))
1564 ix = TREE_INT_CST_ELT (t, 0);
1565 break;
1566
1567 case INTEGER_TYPE:
1568 case OFFSET_TYPE:
1569 if (TYPE_UNSIGNED (type))
1570 {
1571 /* Cache 0..N */
1572 limit = INTEGER_SHARE_LIMIT;
1573
1574 /* This is a little hokie, but if the prec is smaller than
1575 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1576 obvious test will not get the correct answer. */
1577 if (prec < HOST_BITS_PER_WIDE_INT)
1578 {
1579 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1580 ix = tree_to_uhwi (t);
1581 }
1582 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1583 ix = tree_to_uhwi (t);
1584 }
1585 else
1586 {
1587 /* Cache -1..N */
1588 limit = INTEGER_SHARE_LIMIT + 1;
1589
1590 if (integer_minus_onep (t))
1591 ix = 0;
1592 else if (!wi::neg_p (t))
1593 {
1594 if (prec < HOST_BITS_PER_WIDE_INT)
1595 {
1596 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1597 ix = tree_to_shwi (t) + 1;
1598 }
1599 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1600 ix = tree_to_shwi (t) + 1;
1601 }
1602 }
1603 break;
1604
1605 case ENUMERAL_TYPE:
1606 break;
1607
1608 default:
1609 gcc_unreachable ();
1610 }
1611
1612 if (ix >= 0)
1613 {
1614 /* Look for it in the type's vector of small shared ints. */
1615 if (!TYPE_CACHED_VALUES_P (type))
1616 {
1617 TYPE_CACHED_VALUES_P (type) = 1;
1618 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1619 }
1620
1621 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1622 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1623 }
1624 else
1625 {
1626 /* Use the cache of larger shared ints. */
1627 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1628 /* If there is already an entry for the number verify it's the
1629 same. */
1630 if (*slot)
1631 gcc_assert (wi::eq_p (tree (*slot), t));
1632 else
1633 /* Otherwise insert this one into the hash table. */
1634 *slot = t;
1635 }
1636 }
1637
1638
1639 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1640 and the rest are zeros. */
1641
1642 tree
1643 build_low_bits_mask (tree type, unsigned bits)
1644 {
1645 gcc_assert (bits <= TYPE_PRECISION (type));
1646
1647 return wide_int_to_tree (type, wi::mask (bits, false,
1648 TYPE_PRECISION (type)));
1649 }
1650
1651 /* Checks that X is integer constant that can be expressed in (unsigned)
1652 HOST_WIDE_INT without loss of precision. */
1653
1654 bool
1655 cst_and_fits_in_hwi (const_tree x)
1656 {
1657 if (TREE_CODE (x) != INTEGER_CST)
1658 return false;
1659
1660 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1661 return false;
1662
1663 return TREE_INT_CST_NUNITS (x) == 1;
1664 }
1665
1666 /* Build a newly constructed TREE_VEC node of length LEN. */
1667
1668 tree
1669 make_vector_stat (unsigned len MEM_STAT_DECL)
1670 {
1671 tree t;
1672 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1673
1674 record_node_allocation_statistics (VECTOR_CST, length);
1675
1676 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1677
1678 TREE_SET_CODE (t, VECTOR_CST);
1679 TREE_CONSTANT (t) = 1;
1680
1681 return t;
1682 }
1683
1684 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1685 are in a list pointed to by VALS. */
1686
1687 tree
1688 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1689 {
1690 int over = 0;
1691 unsigned cnt = 0;
1692 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1693 TREE_TYPE (v) = type;
1694
1695 /* Iterate through elements and check for overflow. */
1696 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1697 {
1698 tree value = vals[cnt];
1699
1700 VECTOR_CST_ELT (v, cnt) = value;
1701
1702 /* Don't crash if we get an address constant. */
1703 if (!CONSTANT_CLASS_P (value))
1704 continue;
1705
1706 over |= TREE_OVERFLOW (value);
1707 }
1708
1709 TREE_OVERFLOW (v) = over;
1710 return v;
1711 }
1712
1713 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1714 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1715
1716 tree
1717 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1718 {
1719 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1720 unsigned HOST_WIDE_INT idx;
1721 tree value;
1722
1723 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1724 vec[idx] = value;
1725 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1726 vec[idx] = build_zero_cst (TREE_TYPE (type));
1727
1728 return build_vector (type, vec);
1729 }
1730
1731 /* Build a vector of type VECTYPE where all the elements are SCs. */
1732 tree
1733 build_vector_from_val (tree vectype, tree sc)
1734 {
1735 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1736
1737 if (sc == error_mark_node)
1738 return sc;
1739
1740 /* Verify that the vector type is suitable for SC. Note that there
1741 is some inconsistency in the type-system with respect to restrict
1742 qualifications of pointers. Vector types always have a main-variant
1743 element type and the qualification is applied to the vector-type.
1744 So TREE_TYPE (vector-type) does not return a properly qualified
1745 vector element-type. */
1746 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1747 TREE_TYPE (vectype)));
1748
1749 if (CONSTANT_CLASS_P (sc))
1750 {
1751 tree *v = XALLOCAVEC (tree, nunits);
1752 for (i = 0; i < nunits; ++i)
1753 v[i] = sc;
1754 return build_vector (vectype, v);
1755 }
1756 else
1757 {
1758 vec<constructor_elt, va_gc> *v;
1759 vec_alloc (v, nunits);
1760 for (i = 0; i < nunits; ++i)
1761 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1762 return build_constructor (vectype, v);
1763 }
1764 }
1765
1766 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1767 are in the vec pointed to by VALS. */
1768 tree
1769 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1770 {
1771 tree c = make_node (CONSTRUCTOR);
1772 unsigned int i;
1773 constructor_elt *elt;
1774 bool constant_p = true;
1775 bool side_effects_p = false;
1776
1777 TREE_TYPE (c) = type;
1778 CONSTRUCTOR_ELTS (c) = vals;
1779
1780 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1781 {
1782 /* Mostly ctors will have elts that don't have side-effects, so
1783 the usual case is to scan all the elements. Hence a single
1784 loop for both const and side effects, rather than one loop
1785 each (with early outs). */
1786 if (!TREE_CONSTANT (elt->value))
1787 constant_p = false;
1788 if (TREE_SIDE_EFFECTS (elt->value))
1789 side_effects_p = true;
1790 }
1791
1792 TREE_SIDE_EFFECTS (c) = side_effects_p;
1793 TREE_CONSTANT (c) = constant_p;
1794
1795 return c;
1796 }
1797
1798 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1799 INDEX and VALUE. */
1800 tree
1801 build_constructor_single (tree type, tree index, tree value)
1802 {
1803 vec<constructor_elt, va_gc> *v;
1804 constructor_elt elt = {index, value};
1805
1806 vec_alloc (v, 1);
1807 v->quick_push (elt);
1808
1809 return build_constructor (type, v);
1810 }
1811
1812
1813 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1814 are in a list pointed to by VALS. */
1815 tree
1816 build_constructor_from_list (tree type, tree vals)
1817 {
1818 tree t;
1819 vec<constructor_elt, va_gc> *v = NULL;
1820
1821 if (vals)
1822 {
1823 vec_alloc (v, list_length (vals));
1824 for (t = vals; t; t = TREE_CHAIN (t))
1825 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1826 }
1827
1828 return build_constructor (type, v);
1829 }
1830
1831 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1832 of elements, provided as index/value pairs. */
1833
1834 tree
1835 build_constructor_va (tree type, int nelts, ...)
1836 {
1837 vec<constructor_elt, va_gc> *v = NULL;
1838 va_list p;
1839
1840 va_start (p, nelts);
1841 vec_alloc (v, nelts);
1842 while (nelts--)
1843 {
1844 tree index = va_arg (p, tree);
1845 tree value = va_arg (p, tree);
1846 CONSTRUCTOR_APPEND_ELT (v, index, value);
1847 }
1848 va_end (p);
1849 return build_constructor (type, v);
1850 }
1851
1852 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1853
1854 tree
1855 build_fixed (tree type, FIXED_VALUE_TYPE f)
1856 {
1857 tree v;
1858 FIXED_VALUE_TYPE *fp;
1859
1860 v = make_node (FIXED_CST);
1861 fp = ggc_alloc<fixed_value> ();
1862 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1863
1864 TREE_TYPE (v) = type;
1865 TREE_FIXED_CST_PTR (v) = fp;
1866 return v;
1867 }
1868
1869 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1870
1871 tree
1872 build_real (tree type, REAL_VALUE_TYPE d)
1873 {
1874 tree v;
1875 REAL_VALUE_TYPE *dp;
1876 int overflow = 0;
1877
1878 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1879 Consider doing it via real_convert now. */
1880
1881 v = make_node (REAL_CST);
1882 dp = ggc_alloc<real_value> ();
1883 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1884
1885 TREE_TYPE (v) = type;
1886 TREE_REAL_CST_PTR (v) = dp;
1887 TREE_OVERFLOW (v) = overflow;
1888 return v;
1889 }
1890
1891 /* Return a new REAL_CST node whose type is TYPE
1892 and whose value is the integer value of the INTEGER_CST node I. */
1893
1894 REAL_VALUE_TYPE
1895 real_value_from_int_cst (const_tree type, const_tree i)
1896 {
1897 REAL_VALUE_TYPE d;
1898
1899 /* Clear all bits of the real value type so that we can later do
1900 bitwise comparisons to see if two values are the same. */
1901 memset (&d, 0, sizeof d);
1902
1903 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1904 TYPE_SIGN (TREE_TYPE (i)));
1905 return d;
1906 }
1907
1908 /* Given a tree representing an integer constant I, return a tree
1909 representing the same value as a floating-point constant of type TYPE. */
1910
1911 tree
1912 build_real_from_int_cst (tree type, const_tree i)
1913 {
1914 tree v;
1915 int overflow = TREE_OVERFLOW (i);
1916
1917 v = build_real (type, real_value_from_int_cst (type, i));
1918
1919 TREE_OVERFLOW (v) |= overflow;
1920 return v;
1921 }
1922
1923 /* Return a newly constructed STRING_CST node whose value is
1924 the LEN characters at STR.
1925 Note that for a C string literal, LEN should include the trailing NUL.
1926 The TREE_TYPE is not initialized. */
1927
1928 tree
1929 build_string (int len, const char *str)
1930 {
1931 tree s;
1932 size_t length;
1933
1934 /* Do not waste bytes provided by padding of struct tree_string. */
1935 length = len + offsetof (struct tree_string, str) + 1;
1936
1937 record_node_allocation_statistics (STRING_CST, length);
1938
1939 s = (tree) ggc_internal_alloc (length);
1940
1941 memset (s, 0, sizeof (struct tree_typed));
1942 TREE_SET_CODE (s, STRING_CST);
1943 TREE_CONSTANT (s) = 1;
1944 TREE_STRING_LENGTH (s) = len;
1945 memcpy (s->string.str, str, len);
1946 s->string.str[len] = '\0';
1947
1948 return s;
1949 }
1950
1951 /* Return a newly constructed COMPLEX_CST node whose value is
1952 specified by the real and imaginary parts REAL and IMAG.
1953 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1954 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1955
1956 tree
1957 build_complex (tree type, tree real, tree imag)
1958 {
1959 tree t = make_node (COMPLEX_CST);
1960
1961 TREE_REALPART (t) = real;
1962 TREE_IMAGPART (t) = imag;
1963 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1964 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1965 return t;
1966 }
1967
1968 /* Return a constant of arithmetic type TYPE which is the
1969 multiplicative identity of the set TYPE. */
1970
1971 tree
1972 build_one_cst (tree type)
1973 {
1974 switch (TREE_CODE (type))
1975 {
1976 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1977 case POINTER_TYPE: case REFERENCE_TYPE:
1978 case OFFSET_TYPE:
1979 return build_int_cst (type, 1);
1980
1981 case REAL_TYPE:
1982 return build_real (type, dconst1);
1983
1984 case FIXED_POINT_TYPE:
1985 /* We can only generate 1 for accum types. */
1986 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1987 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1988
1989 case VECTOR_TYPE:
1990 {
1991 tree scalar = build_one_cst (TREE_TYPE (type));
1992
1993 return build_vector_from_val (type, scalar);
1994 }
1995
1996 case COMPLEX_TYPE:
1997 return build_complex (type,
1998 build_one_cst (TREE_TYPE (type)),
1999 build_zero_cst (TREE_TYPE (type)));
2000
2001 default:
2002 gcc_unreachable ();
2003 }
2004 }
2005
2006 /* Return an integer of type TYPE containing all 1's in as much precision as
2007 it contains, or a complex or vector whose subparts are such integers. */
2008
2009 tree
2010 build_all_ones_cst (tree type)
2011 {
2012 if (TREE_CODE (type) == COMPLEX_TYPE)
2013 {
2014 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2015 return build_complex (type, scalar, scalar);
2016 }
2017 else
2018 return build_minus_one_cst (type);
2019 }
2020
2021 /* Return a constant of arithmetic type TYPE which is the
2022 opposite of the multiplicative identity of the set TYPE. */
2023
2024 tree
2025 build_minus_one_cst (tree type)
2026 {
2027 switch (TREE_CODE (type))
2028 {
2029 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2030 case POINTER_TYPE: case REFERENCE_TYPE:
2031 case OFFSET_TYPE:
2032 return build_int_cst (type, -1);
2033
2034 case REAL_TYPE:
2035 return build_real (type, dconstm1);
2036
2037 case FIXED_POINT_TYPE:
2038 /* We can only generate 1 for accum types. */
2039 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2040 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2041 TYPE_MODE (type)));
2042
2043 case VECTOR_TYPE:
2044 {
2045 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2046
2047 return build_vector_from_val (type, scalar);
2048 }
2049
2050 case COMPLEX_TYPE:
2051 return build_complex (type,
2052 build_minus_one_cst (TREE_TYPE (type)),
2053 build_zero_cst (TREE_TYPE (type)));
2054
2055 default:
2056 gcc_unreachable ();
2057 }
2058 }
2059
2060 /* Build 0 constant of type TYPE. This is used by constructor folding
2061 and thus the constant should be represented in memory by
2062 zero(es). */
2063
2064 tree
2065 build_zero_cst (tree type)
2066 {
2067 switch (TREE_CODE (type))
2068 {
2069 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2070 case POINTER_TYPE: case REFERENCE_TYPE:
2071 case OFFSET_TYPE: case NULLPTR_TYPE:
2072 return build_int_cst (type, 0);
2073
2074 case REAL_TYPE:
2075 return build_real (type, dconst0);
2076
2077 case FIXED_POINT_TYPE:
2078 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2079
2080 case VECTOR_TYPE:
2081 {
2082 tree scalar = build_zero_cst (TREE_TYPE (type));
2083
2084 return build_vector_from_val (type, scalar);
2085 }
2086
2087 case COMPLEX_TYPE:
2088 {
2089 tree zero = build_zero_cst (TREE_TYPE (type));
2090
2091 return build_complex (type, zero, zero);
2092 }
2093
2094 default:
2095 if (!AGGREGATE_TYPE_P (type))
2096 return fold_convert (type, integer_zero_node);
2097 return build_constructor (type, NULL);
2098 }
2099 }
2100
2101
2102 /* Build a BINFO with LEN language slots. */
2103
2104 tree
2105 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2106 {
2107 tree t;
2108 size_t length = (offsetof (struct tree_binfo, base_binfos)
2109 + vec<tree, va_gc>::embedded_size (base_binfos));
2110
2111 record_node_allocation_statistics (TREE_BINFO, length);
2112
2113 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2114
2115 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2116
2117 TREE_SET_CODE (t, TREE_BINFO);
2118
2119 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2120
2121 return t;
2122 }
2123
2124 /* Create a CASE_LABEL_EXPR tree node and return it. */
2125
2126 tree
2127 build_case_label (tree low_value, tree high_value, tree label_decl)
2128 {
2129 tree t = make_node (CASE_LABEL_EXPR);
2130
2131 TREE_TYPE (t) = void_type_node;
2132 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2133
2134 CASE_LOW (t) = low_value;
2135 CASE_HIGH (t) = high_value;
2136 CASE_LABEL (t) = label_decl;
2137 CASE_CHAIN (t) = NULL_TREE;
2138
2139 return t;
2140 }
2141
2142 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2143 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2144 The latter determines the length of the HOST_WIDE_INT vector. */
2145
2146 tree
2147 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2148 {
2149 tree t;
2150 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2151 + sizeof (struct tree_int_cst));
2152
2153 gcc_assert (len);
2154 record_node_allocation_statistics (INTEGER_CST, length);
2155
2156 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2157
2158 TREE_SET_CODE (t, INTEGER_CST);
2159 TREE_INT_CST_NUNITS (t) = len;
2160 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2161 /* to_offset can only be applied to trees that are offset_int-sized
2162 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2163 must be exactly the precision of offset_int and so LEN is correct. */
2164 if (ext_len <= OFFSET_INT_ELTS)
2165 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2166 else
2167 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2168
2169 TREE_CONSTANT (t) = 1;
2170
2171 return t;
2172 }
2173
2174 /* Build a newly constructed TREE_VEC node of length LEN. */
2175
2176 tree
2177 make_tree_vec_stat (int len MEM_STAT_DECL)
2178 {
2179 tree t;
2180 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2181
2182 record_node_allocation_statistics (TREE_VEC, length);
2183
2184 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2185
2186 TREE_SET_CODE (t, TREE_VEC);
2187 TREE_VEC_LENGTH (t) = len;
2188
2189 return t;
2190 }
2191
2192 /* Grow a TREE_VEC node to new length LEN. */
2193
2194 tree
2195 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2196 {
2197 gcc_assert (TREE_CODE (v) == TREE_VEC);
2198
2199 int oldlen = TREE_VEC_LENGTH (v);
2200 gcc_assert (len > oldlen);
2201
2202 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2203 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2204
2205 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2206
2207 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2208
2209 TREE_VEC_LENGTH (v) = len;
2210
2211 return v;
2212 }
2213 \f
2214 /* Return 1 if EXPR is the integer constant zero or a complex constant
2215 of zero. */
2216
2217 int
2218 integer_zerop (const_tree expr)
2219 {
2220 STRIP_NOPS (expr);
2221
2222 switch (TREE_CODE (expr))
2223 {
2224 case INTEGER_CST:
2225 return wi::eq_p (expr, 0);
2226 case COMPLEX_CST:
2227 return (integer_zerop (TREE_REALPART (expr))
2228 && integer_zerop (TREE_IMAGPART (expr)));
2229 case VECTOR_CST:
2230 {
2231 unsigned i;
2232 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2233 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2234 return false;
2235 return true;
2236 }
2237 default:
2238 return false;
2239 }
2240 }
2241
2242 /* Return 1 if EXPR is the integer constant one or the corresponding
2243 complex constant. */
2244
2245 int
2246 integer_onep (const_tree expr)
2247 {
2248 STRIP_NOPS (expr);
2249
2250 switch (TREE_CODE (expr))
2251 {
2252 case INTEGER_CST:
2253 return wi::eq_p (wi::to_widest (expr), 1);
2254 case COMPLEX_CST:
2255 return (integer_onep (TREE_REALPART (expr))
2256 && integer_zerop (TREE_IMAGPART (expr)));
2257 case VECTOR_CST:
2258 {
2259 unsigned i;
2260 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2261 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2262 return false;
2263 return true;
2264 }
2265 default:
2266 return false;
2267 }
2268 }
2269
2270 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2271 return 1 if every piece is the integer constant one. */
2272
2273 int
2274 integer_each_onep (const_tree expr)
2275 {
2276 STRIP_NOPS (expr);
2277
2278 if (TREE_CODE (expr) == COMPLEX_CST)
2279 return (integer_onep (TREE_REALPART (expr))
2280 && integer_onep (TREE_IMAGPART (expr)));
2281 else
2282 return integer_onep (expr);
2283 }
2284
2285 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2286 it contains, or a complex or vector whose subparts are such integers. */
2287
2288 int
2289 integer_all_onesp (const_tree expr)
2290 {
2291 STRIP_NOPS (expr);
2292
2293 if (TREE_CODE (expr) == COMPLEX_CST
2294 && integer_all_onesp (TREE_REALPART (expr))
2295 && integer_all_onesp (TREE_IMAGPART (expr)))
2296 return 1;
2297
2298 else if (TREE_CODE (expr) == VECTOR_CST)
2299 {
2300 unsigned i;
2301 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2302 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2303 return 0;
2304 return 1;
2305 }
2306
2307 else if (TREE_CODE (expr) != INTEGER_CST)
2308 return 0;
2309
2310 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2311 }
2312
2313 /* Return 1 if EXPR is the integer constant minus one. */
2314
2315 int
2316 integer_minus_onep (const_tree expr)
2317 {
2318 STRIP_NOPS (expr);
2319
2320 if (TREE_CODE (expr) == COMPLEX_CST)
2321 return (integer_all_onesp (TREE_REALPART (expr))
2322 && integer_zerop (TREE_IMAGPART (expr)));
2323 else
2324 return integer_all_onesp (expr);
2325 }
2326
2327 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2328 one bit on). */
2329
2330 int
2331 integer_pow2p (const_tree expr)
2332 {
2333 STRIP_NOPS (expr);
2334
2335 if (TREE_CODE (expr) == COMPLEX_CST
2336 && integer_pow2p (TREE_REALPART (expr))
2337 && integer_zerop (TREE_IMAGPART (expr)))
2338 return 1;
2339
2340 if (TREE_CODE (expr) != INTEGER_CST)
2341 return 0;
2342
2343 return wi::popcount (expr) == 1;
2344 }
2345
2346 /* Return 1 if EXPR is an integer constant other than zero or a
2347 complex constant other than zero. */
2348
2349 int
2350 integer_nonzerop (const_tree expr)
2351 {
2352 STRIP_NOPS (expr);
2353
2354 return ((TREE_CODE (expr) == INTEGER_CST
2355 && !wi::eq_p (expr, 0))
2356 || (TREE_CODE (expr) == COMPLEX_CST
2357 && (integer_nonzerop (TREE_REALPART (expr))
2358 || integer_nonzerop (TREE_IMAGPART (expr)))));
2359 }
2360
2361 /* Return 1 if EXPR is the integer constant one. For vector,
2362 return 1 if every piece is the integer constant minus one
2363 (representing the value TRUE). */
2364
2365 int
2366 integer_truep (const_tree expr)
2367 {
2368 STRIP_NOPS (expr);
2369
2370 if (TREE_CODE (expr) == VECTOR_CST)
2371 return integer_all_onesp (expr);
2372 return integer_onep (expr);
2373 }
2374
2375 /* Return 1 if EXPR is the fixed-point constant zero. */
2376
2377 int
2378 fixed_zerop (const_tree expr)
2379 {
2380 return (TREE_CODE (expr) == FIXED_CST
2381 && TREE_FIXED_CST (expr).data.is_zero ());
2382 }
2383
2384 /* Return the power of two represented by a tree node known to be a
2385 power of two. */
2386
2387 int
2388 tree_log2 (const_tree expr)
2389 {
2390 STRIP_NOPS (expr);
2391
2392 if (TREE_CODE (expr) == COMPLEX_CST)
2393 return tree_log2 (TREE_REALPART (expr));
2394
2395 return wi::exact_log2 (expr);
2396 }
2397
2398 /* Similar, but return the largest integer Y such that 2 ** Y is less
2399 than or equal to EXPR. */
2400
2401 int
2402 tree_floor_log2 (const_tree expr)
2403 {
2404 STRIP_NOPS (expr);
2405
2406 if (TREE_CODE (expr) == COMPLEX_CST)
2407 return tree_log2 (TREE_REALPART (expr));
2408
2409 return wi::floor_log2 (expr);
2410 }
2411
2412 /* Return number of known trailing zero bits in EXPR, or, if the value of
2413 EXPR is known to be zero, the precision of it's type. */
2414
2415 unsigned int
2416 tree_ctz (const_tree expr)
2417 {
2418 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2419 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2420 return 0;
2421
2422 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2423 switch (TREE_CODE (expr))
2424 {
2425 case INTEGER_CST:
2426 ret1 = wi::ctz (expr);
2427 return MIN (ret1, prec);
2428 case SSA_NAME:
2429 ret1 = wi::ctz (get_nonzero_bits (expr));
2430 return MIN (ret1, prec);
2431 case PLUS_EXPR:
2432 case MINUS_EXPR:
2433 case BIT_IOR_EXPR:
2434 case BIT_XOR_EXPR:
2435 case MIN_EXPR:
2436 case MAX_EXPR:
2437 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2438 if (ret1 == 0)
2439 return ret1;
2440 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2441 return MIN (ret1, ret2);
2442 case POINTER_PLUS_EXPR:
2443 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2444 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2445 /* Second operand is sizetype, which could be in theory
2446 wider than pointer's precision. Make sure we never
2447 return more than prec. */
2448 ret2 = MIN (ret2, prec);
2449 return MIN (ret1, ret2);
2450 case BIT_AND_EXPR:
2451 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2452 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2453 return MAX (ret1, ret2);
2454 case MULT_EXPR:
2455 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2456 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2457 return MIN (ret1 + ret2, prec);
2458 case LSHIFT_EXPR:
2459 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2460 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2461 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2462 {
2463 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2464 return MIN (ret1 + ret2, prec);
2465 }
2466 return ret1;
2467 case RSHIFT_EXPR:
2468 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2469 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2470 {
2471 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2472 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2473 if (ret1 > ret2)
2474 return ret1 - ret2;
2475 }
2476 return 0;
2477 case TRUNC_DIV_EXPR:
2478 case CEIL_DIV_EXPR:
2479 case FLOOR_DIV_EXPR:
2480 case ROUND_DIV_EXPR:
2481 case EXACT_DIV_EXPR:
2482 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2483 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2484 {
2485 int l = tree_log2 (TREE_OPERAND (expr, 1));
2486 if (l >= 0)
2487 {
2488 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2489 ret2 = l;
2490 if (ret1 > ret2)
2491 return ret1 - ret2;
2492 }
2493 }
2494 return 0;
2495 CASE_CONVERT:
2496 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2497 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2498 ret1 = prec;
2499 return MIN (ret1, prec);
2500 case SAVE_EXPR:
2501 return tree_ctz (TREE_OPERAND (expr, 0));
2502 case COND_EXPR:
2503 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2504 if (ret1 == 0)
2505 return 0;
2506 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2507 return MIN (ret1, ret2);
2508 case COMPOUND_EXPR:
2509 return tree_ctz (TREE_OPERAND (expr, 1));
2510 case ADDR_EXPR:
2511 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2512 if (ret1 > BITS_PER_UNIT)
2513 {
2514 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2515 return MIN (ret1, prec);
2516 }
2517 return 0;
2518 default:
2519 return 0;
2520 }
2521 }
2522
2523 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2524 decimal float constants, so don't return 1 for them. */
2525
2526 int
2527 real_zerop (const_tree expr)
2528 {
2529 STRIP_NOPS (expr);
2530
2531 switch (TREE_CODE (expr))
2532 {
2533 case REAL_CST:
2534 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2535 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2536 case COMPLEX_CST:
2537 return real_zerop (TREE_REALPART (expr))
2538 && real_zerop (TREE_IMAGPART (expr));
2539 case VECTOR_CST:
2540 {
2541 unsigned i;
2542 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2543 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2544 return false;
2545 return true;
2546 }
2547 default:
2548 return false;
2549 }
2550 }
2551
2552 /* Return 1 if EXPR is the real constant one in real or complex form.
2553 Trailing zeroes matter for decimal float constants, so don't return
2554 1 for them. */
2555
2556 int
2557 real_onep (const_tree expr)
2558 {
2559 STRIP_NOPS (expr);
2560
2561 switch (TREE_CODE (expr))
2562 {
2563 case REAL_CST:
2564 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2565 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2566 case COMPLEX_CST:
2567 return real_onep (TREE_REALPART (expr))
2568 && real_zerop (TREE_IMAGPART (expr));
2569 case VECTOR_CST:
2570 {
2571 unsigned i;
2572 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2573 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2574 return false;
2575 return true;
2576 }
2577 default:
2578 return false;
2579 }
2580 }
2581
2582 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2583 matter for decimal float constants, so don't return 1 for them. */
2584
2585 int
2586 real_minus_onep (const_tree expr)
2587 {
2588 STRIP_NOPS (expr);
2589
2590 switch (TREE_CODE (expr))
2591 {
2592 case REAL_CST:
2593 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2594 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2595 case COMPLEX_CST:
2596 return real_minus_onep (TREE_REALPART (expr))
2597 && real_zerop (TREE_IMAGPART (expr));
2598 case VECTOR_CST:
2599 {
2600 unsigned i;
2601 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2602 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2603 return false;
2604 return true;
2605 }
2606 default:
2607 return false;
2608 }
2609 }
2610
2611 /* Nonzero if EXP is a constant or a cast of a constant. */
2612
2613 int
2614 really_constant_p (const_tree exp)
2615 {
2616 /* This is not quite the same as STRIP_NOPS. It does more. */
2617 while (CONVERT_EXPR_P (exp)
2618 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2619 exp = TREE_OPERAND (exp, 0);
2620 return TREE_CONSTANT (exp);
2621 }
2622 \f
2623 /* Return first list element whose TREE_VALUE is ELEM.
2624 Return 0 if ELEM is not in LIST. */
2625
2626 tree
2627 value_member (tree elem, tree list)
2628 {
2629 while (list)
2630 {
2631 if (elem == TREE_VALUE (list))
2632 return list;
2633 list = TREE_CHAIN (list);
2634 }
2635 return NULL_TREE;
2636 }
2637
2638 /* Return first list element whose TREE_PURPOSE is ELEM.
2639 Return 0 if ELEM is not in LIST. */
2640
2641 tree
2642 purpose_member (const_tree elem, tree list)
2643 {
2644 while (list)
2645 {
2646 if (elem == TREE_PURPOSE (list))
2647 return list;
2648 list = TREE_CHAIN (list);
2649 }
2650 return NULL_TREE;
2651 }
2652
2653 /* Return true if ELEM is in V. */
2654
2655 bool
2656 vec_member (const_tree elem, vec<tree, va_gc> *v)
2657 {
2658 unsigned ix;
2659 tree t;
2660 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2661 if (elem == t)
2662 return true;
2663 return false;
2664 }
2665
2666 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2667 NULL_TREE. */
2668
2669 tree
2670 chain_index (int idx, tree chain)
2671 {
2672 for (; chain && idx > 0; --idx)
2673 chain = TREE_CHAIN (chain);
2674 return chain;
2675 }
2676
2677 /* Return nonzero if ELEM is part of the chain CHAIN. */
2678
2679 int
2680 chain_member (const_tree elem, const_tree chain)
2681 {
2682 while (chain)
2683 {
2684 if (elem == chain)
2685 return 1;
2686 chain = DECL_CHAIN (chain);
2687 }
2688
2689 return 0;
2690 }
2691
2692 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2693 We expect a null pointer to mark the end of the chain.
2694 This is the Lisp primitive `length'. */
2695
2696 int
2697 list_length (const_tree t)
2698 {
2699 const_tree p = t;
2700 #ifdef ENABLE_TREE_CHECKING
2701 const_tree q = t;
2702 #endif
2703 int len = 0;
2704
2705 while (p)
2706 {
2707 p = TREE_CHAIN (p);
2708 #ifdef ENABLE_TREE_CHECKING
2709 if (len % 2)
2710 q = TREE_CHAIN (q);
2711 gcc_assert (p != q);
2712 #endif
2713 len++;
2714 }
2715
2716 return len;
2717 }
2718
2719 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2720 UNION_TYPE TYPE, or NULL_TREE if none. */
2721
2722 tree
2723 first_field (const_tree type)
2724 {
2725 tree t = TYPE_FIELDS (type);
2726 while (t && TREE_CODE (t) != FIELD_DECL)
2727 t = TREE_CHAIN (t);
2728 return t;
2729 }
2730
2731 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2732 by modifying the last node in chain 1 to point to chain 2.
2733 This is the Lisp primitive `nconc'. */
2734
2735 tree
2736 chainon (tree op1, tree op2)
2737 {
2738 tree t1;
2739
2740 if (!op1)
2741 return op2;
2742 if (!op2)
2743 return op1;
2744
2745 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2746 continue;
2747 TREE_CHAIN (t1) = op2;
2748
2749 #ifdef ENABLE_TREE_CHECKING
2750 {
2751 tree t2;
2752 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2753 gcc_assert (t2 != t1);
2754 }
2755 #endif
2756
2757 return op1;
2758 }
2759
2760 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2761
2762 tree
2763 tree_last (tree chain)
2764 {
2765 tree next;
2766 if (chain)
2767 while ((next = TREE_CHAIN (chain)))
2768 chain = next;
2769 return chain;
2770 }
2771
2772 /* Reverse the order of elements in the chain T,
2773 and return the new head of the chain (old last element). */
2774
2775 tree
2776 nreverse (tree t)
2777 {
2778 tree prev = 0, decl, next;
2779 for (decl = t; decl; decl = next)
2780 {
2781 /* We shouldn't be using this function to reverse BLOCK chains; we
2782 have blocks_nreverse for that. */
2783 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2784 next = TREE_CHAIN (decl);
2785 TREE_CHAIN (decl) = prev;
2786 prev = decl;
2787 }
2788 return prev;
2789 }
2790 \f
2791 /* Return a newly created TREE_LIST node whose
2792 purpose and value fields are PARM and VALUE. */
2793
2794 tree
2795 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2796 {
2797 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2798 TREE_PURPOSE (t) = parm;
2799 TREE_VALUE (t) = value;
2800 return t;
2801 }
2802
2803 /* Build a chain of TREE_LIST nodes from a vector. */
2804
2805 tree
2806 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2807 {
2808 tree ret = NULL_TREE;
2809 tree *pp = &ret;
2810 unsigned int i;
2811 tree t;
2812 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2813 {
2814 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2815 pp = &TREE_CHAIN (*pp);
2816 }
2817 return ret;
2818 }
2819
2820 /* Return a newly created TREE_LIST node whose
2821 purpose and value fields are PURPOSE and VALUE
2822 and whose TREE_CHAIN is CHAIN. */
2823
2824 tree
2825 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2826 {
2827 tree node;
2828
2829 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2830 memset (node, 0, sizeof (struct tree_common));
2831
2832 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2833
2834 TREE_SET_CODE (node, TREE_LIST);
2835 TREE_CHAIN (node) = chain;
2836 TREE_PURPOSE (node) = purpose;
2837 TREE_VALUE (node) = value;
2838 return node;
2839 }
2840
2841 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2842 trees. */
2843
2844 vec<tree, va_gc> *
2845 ctor_to_vec (tree ctor)
2846 {
2847 vec<tree, va_gc> *vec;
2848 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2849 unsigned int ix;
2850 tree val;
2851
2852 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2853 vec->quick_push (val);
2854
2855 return vec;
2856 }
2857 \f
2858 /* Return the size nominally occupied by an object of type TYPE
2859 when it resides in memory. The value is measured in units of bytes,
2860 and its data type is that normally used for type sizes
2861 (which is the first type created by make_signed_type or
2862 make_unsigned_type). */
2863
2864 tree
2865 size_in_bytes (const_tree type)
2866 {
2867 tree t;
2868
2869 if (type == error_mark_node)
2870 return integer_zero_node;
2871
2872 type = TYPE_MAIN_VARIANT (type);
2873 t = TYPE_SIZE_UNIT (type);
2874
2875 if (t == 0)
2876 {
2877 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2878 return size_zero_node;
2879 }
2880
2881 return t;
2882 }
2883
2884 /* Return the size of TYPE (in bytes) as a wide integer
2885 or return -1 if the size can vary or is larger than an integer. */
2886
2887 HOST_WIDE_INT
2888 int_size_in_bytes (const_tree type)
2889 {
2890 tree t;
2891
2892 if (type == error_mark_node)
2893 return 0;
2894
2895 type = TYPE_MAIN_VARIANT (type);
2896 t = TYPE_SIZE_UNIT (type);
2897
2898 if (t && tree_fits_uhwi_p (t))
2899 return TREE_INT_CST_LOW (t);
2900 else
2901 return -1;
2902 }
2903
2904 /* Return the maximum size of TYPE (in bytes) as a wide integer
2905 or return -1 if the size can vary or is larger than an integer. */
2906
2907 HOST_WIDE_INT
2908 max_int_size_in_bytes (const_tree type)
2909 {
2910 HOST_WIDE_INT size = -1;
2911 tree size_tree;
2912
2913 /* If this is an array type, check for a possible MAX_SIZE attached. */
2914
2915 if (TREE_CODE (type) == ARRAY_TYPE)
2916 {
2917 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2918
2919 if (size_tree && tree_fits_uhwi_p (size_tree))
2920 size = tree_to_uhwi (size_tree);
2921 }
2922
2923 /* If we still haven't been able to get a size, see if the language
2924 can compute a maximum size. */
2925
2926 if (size == -1)
2927 {
2928 size_tree = lang_hooks.types.max_size (type);
2929
2930 if (size_tree && tree_fits_uhwi_p (size_tree))
2931 size = tree_to_uhwi (size_tree);
2932 }
2933
2934 return size;
2935 }
2936 \f
2937 /* Return the bit position of FIELD, in bits from the start of the record.
2938 This is a tree of type bitsizetype. */
2939
2940 tree
2941 bit_position (const_tree field)
2942 {
2943 return bit_from_pos (DECL_FIELD_OFFSET (field),
2944 DECL_FIELD_BIT_OFFSET (field));
2945 }
2946 \f
2947 /* Return the byte position of FIELD, in bytes from the start of the record.
2948 This is a tree of type sizetype. */
2949
2950 tree
2951 byte_position (const_tree field)
2952 {
2953 return byte_from_pos (DECL_FIELD_OFFSET (field),
2954 DECL_FIELD_BIT_OFFSET (field));
2955 }
2956
2957 /* Likewise, but return as an integer. It must be representable in
2958 that way (since it could be a signed value, we don't have the
2959 option of returning -1 like int_size_in_byte can. */
2960
2961 HOST_WIDE_INT
2962 int_byte_position (const_tree field)
2963 {
2964 return tree_to_shwi (byte_position (field));
2965 }
2966 \f
2967 /* Return the strictest alignment, in bits, that T is known to have. */
2968
2969 unsigned int
2970 expr_align (const_tree t)
2971 {
2972 unsigned int align0, align1;
2973
2974 switch (TREE_CODE (t))
2975 {
2976 CASE_CONVERT: case NON_LVALUE_EXPR:
2977 /* If we have conversions, we know that the alignment of the
2978 object must meet each of the alignments of the types. */
2979 align0 = expr_align (TREE_OPERAND (t, 0));
2980 align1 = TYPE_ALIGN (TREE_TYPE (t));
2981 return MAX (align0, align1);
2982
2983 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2984 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2985 case CLEANUP_POINT_EXPR:
2986 /* These don't change the alignment of an object. */
2987 return expr_align (TREE_OPERAND (t, 0));
2988
2989 case COND_EXPR:
2990 /* The best we can do is say that the alignment is the least aligned
2991 of the two arms. */
2992 align0 = expr_align (TREE_OPERAND (t, 1));
2993 align1 = expr_align (TREE_OPERAND (t, 2));
2994 return MIN (align0, align1);
2995
2996 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2997 meaningfully, it's always 1. */
2998 case LABEL_DECL: case CONST_DECL:
2999 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3000 case FUNCTION_DECL:
3001 gcc_assert (DECL_ALIGN (t) != 0);
3002 return DECL_ALIGN (t);
3003
3004 default:
3005 break;
3006 }
3007
3008 /* Otherwise take the alignment from that of the type. */
3009 return TYPE_ALIGN (TREE_TYPE (t));
3010 }
3011 \f
3012 /* Return, as a tree node, the number of elements for TYPE (which is an
3013 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3014
3015 tree
3016 array_type_nelts (const_tree type)
3017 {
3018 tree index_type, min, max;
3019
3020 /* If they did it with unspecified bounds, then we should have already
3021 given an error about it before we got here. */
3022 if (! TYPE_DOMAIN (type))
3023 return error_mark_node;
3024
3025 index_type = TYPE_DOMAIN (type);
3026 min = TYPE_MIN_VALUE (index_type);
3027 max = TYPE_MAX_VALUE (index_type);
3028
3029 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3030 if (!max)
3031 return error_mark_node;
3032
3033 return (integer_zerop (min)
3034 ? max
3035 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3036 }
3037 \f
3038 /* If arg is static -- a reference to an object in static storage -- then
3039 return the object. This is not the same as the C meaning of `static'.
3040 If arg isn't static, return NULL. */
3041
3042 tree
3043 staticp (tree arg)
3044 {
3045 switch (TREE_CODE (arg))
3046 {
3047 case FUNCTION_DECL:
3048 /* Nested functions are static, even though taking their address will
3049 involve a trampoline as we unnest the nested function and create
3050 the trampoline on the tree level. */
3051 return arg;
3052
3053 case VAR_DECL:
3054 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3055 && ! DECL_THREAD_LOCAL_P (arg)
3056 && ! DECL_DLLIMPORT_P (arg)
3057 ? arg : NULL);
3058
3059 case CONST_DECL:
3060 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3061 ? arg : NULL);
3062
3063 case CONSTRUCTOR:
3064 return TREE_STATIC (arg) ? arg : NULL;
3065
3066 case LABEL_DECL:
3067 case STRING_CST:
3068 return arg;
3069
3070 case COMPONENT_REF:
3071 /* If the thing being referenced is not a field, then it is
3072 something language specific. */
3073 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3074
3075 /* If we are referencing a bitfield, we can't evaluate an
3076 ADDR_EXPR at compile time and so it isn't a constant. */
3077 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3078 return NULL;
3079
3080 return staticp (TREE_OPERAND (arg, 0));
3081
3082 case BIT_FIELD_REF:
3083 return NULL;
3084
3085 case INDIRECT_REF:
3086 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3087
3088 case ARRAY_REF:
3089 case ARRAY_RANGE_REF:
3090 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3091 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3092 return staticp (TREE_OPERAND (arg, 0));
3093 else
3094 return NULL;
3095
3096 case COMPOUND_LITERAL_EXPR:
3097 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3098
3099 default:
3100 return NULL;
3101 }
3102 }
3103
3104 \f
3105
3106
3107 /* Return whether OP is a DECL whose address is function-invariant. */
3108
3109 bool
3110 decl_address_invariant_p (const_tree op)
3111 {
3112 /* The conditions below are slightly less strict than the one in
3113 staticp. */
3114
3115 switch (TREE_CODE (op))
3116 {
3117 case PARM_DECL:
3118 case RESULT_DECL:
3119 case LABEL_DECL:
3120 case FUNCTION_DECL:
3121 return true;
3122
3123 case VAR_DECL:
3124 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3125 || DECL_THREAD_LOCAL_P (op)
3126 || DECL_CONTEXT (op) == current_function_decl
3127 || decl_function_context (op) == current_function_decl)
3128 return true;
3129 break;
3130
3131 case CONST_DECL:
3132 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3133 || decl_function_context (op) == current_function_decl)
3134 return true;
3135 break;
3136
3137 default:
3138 break;
3139 }
3140
3141 return false;
3142 }
3143
3144 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3145
3146 bool
3147 decl_address_ip_invariant_p (const_tree op)
3148 {
3149 /* The conditions below are slightly less strict than the one in
3150 staticp. */
3151
3152 switch (TREE_CODE (op))
3153 {
3154 case LABEL_DECL:
3155 case FUNCTION_DECL:
3156 case STRING_CST:
3157 return true;
3158
3159 case VAR_DECL:
3160 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3161 && !DECL_DLLIMPORT_P (op))
3162 || DECL_THREAD_LOCAL_P (op))
3163 return true;
3164 break;
3165
3166 case CONST_DECL:
3167 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3168 return true;
3169 break;
3170
3171 default:
3172 break;
3173 }
3174
3175 return false;
3176 }
3177
3178
3179 /* Return true if T is function-invariant (internal function, does
3180 not handle arithmetic; that's handled in skip_simple_arithmetic and
3181 tree_invariant_p). */
3182
3183 static bool tree_invariant_p (tree t);
3184
3185 static bool
3186 tree_invariant_p_1 (tree t)
3187 {
3188 tree op;
3189
3190 if (TREE_CONSTANT (t)
3191 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3192 return true;
3193
3194 switch (TREE_CODE (t))
3195 {
3196 case SAVE_EXPR:
3197 return true;
3198
3199 case ADDR_EXPR:
3200 op = TREE_OPERAND (t, 0);
3201 while (handled_component_p (op))
3202 {
3203 switch (TREE_CODE (op))
3204 {
3205 case ARRAY_REF:
3206 case ARRAY_RANGE_REF:
3207 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3208 || TREE_OPERAND (op, 2) != NULL_TREE
3209 || TREE_OPERAND (op, 3) != NULL_TREE)
3210 return false;
3211 break;
3212
3213 case COMPONENT_REF:
3214 if (TREE_OPERAND (op, 2) != NULL_TREE)
3215 return false;
3216 break;
3217
3218 default:;
3219 }
3220 op = TREE_OPERAND (op, 0);
3221 }
3222
3223 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3224
3225 default:
3226 break;
3227 }
3228
3229 return false;
3230 }
3231
3232 /* Return true if T is function-invariant. */
3233
3234 static bool
3235 tree_invariant_p (tree t)
3236 {
3237 tree inner = skip_simple_arithmetic (t);
3238 return tree_invariant_p_1 (inner);
3239 }
3240
3241 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3242 Do this to any expression which may be used in more than one place,
3243 but must be evaluated only once.
3244
3245 Normally, expand_expr would reevaluate the expression each time.
3246 Calling save_expr produces something that is evaluated and recorded
3247 the first time expand_expr is called on it. Subsequent calls to
3248 expand_expr just reuse the recorded value.
3249
3250 The call to expand_expr that generates code that actually computes
3251 the value is the first call *at compile time*. Subsequent calls
3252 *at compile time* generate code to use the saved value.
3253 This produces correct result provided that *at run time* control
3254 always flows through the insns made by the first expand_expr
3255 before reaching the other places where the save_expr was evaluated.
3256 You, the caller of save_expr, must make sure this is so.
3257
3258 Constants, and certain read-only nodes, are returned with no
3259 SAVE_EXPR because that is safe. Expressions containing placeholders
3260 are not touched; see tree.def for an explanation of what these
3261 are used for. */
3262
3263 tree
3264 save_expr (tree expr)
3265 {
3266 tree t = fold (expr);
3267 tree inner;
3268
3269 /* If the tree evaluates to a constant, then we don't want to hide that
3270 fact (i.e. this allows further folding, and direct checks for constants).
3271 However, a read-only object that has side effects cannot be bypassed.
3272 Since it is no problem to reevaluate literals, we just return the
3273 literal node. */
3274 inner = skip_simple_arithmetic (t);
3275 if (TREE_CODE (inner) == ERROR_MARK)
3276 return inner;
3277
3278 if (tree_invariant_p_1 (inner))
3279 return t;
3280
3281 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3282 it means that the size or offset of some field of an object depends on
3283 the value within another field.
3284
3285 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3286 and some variable since it would then need to be both evaluated once and
3287 evaluated more than once. Front-ends must assure this case cannot
3288 happen by surrounding any such subexpressions in their own SAVE_EXPR
3289 and forcing evaluation at the proper time. */
3290 if (contains_placeholder_p (inner))
3291 return t;
3292
3293 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3294 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3295
3296 /* This expression might be placed ahead of a jump to ensure that the
3297 value was computed on both sides of the jump. So make sure it isn't
3298 eliminated as dead. */
3299 TREE_SIDE_EFFECTS (t) = 1;
3300 return t;
3301 }
3302
3303 /* Look inside EXPR into any simple arithmetic operations. Return the
3304 outermost non-arithmetic or non-invariant node. */
3305
3306 tree
3307 skip_simple_arithmetic (tree expr)
3308 {
3309 /* We don't care about whether this can be used as an lvalue in this
3310 context. */
3311 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3312 expr = TREE_OPERAND (expr, 0);
3313
3314 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3315 a constant, it will be more efficient to not make another SAVE_EXPR since
3316 it will allow better simplification and GCSE will be able to merge the
3317 computations if they actually occur. */
3318 while (true)
3319 {
3320 if (UNARY_CLASS_P (expr))
3321 expr = TREE_OPERAND (expr, 0);
3322 else if (BINARY_CLASS_P (expr))
3323 {
3324 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3325 expr = TREE_OPERAND (expr, 0);
3326 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3327 expr = TREE_OPERAND (expr, 1);
3328 else
3329 break;
3330 }
3331 else
3332 break;
3333 }
3334
3335 return expr;
3336 }
3337
3338 /* Look inside EXPR into simple arithmetic operations involving constants.
3339 Return the outermost non-arithmetic or non-constant node. */
3340
3341 tree
3342 skip_simple_constant_arithmetic (tree expr)
3343 {
3344 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3345 expr = TREE_OPERAND (expr, 0);
3346
3347 while (true)
3348 {
3349 if (UNARY_CLASS_P (expr))
3350 expr = TREE_OPERAND (expr, 0);
3351 else if (BINARY_CLASS_P (expr))
3352 {
3353 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3354 expr = TREE_OPERAND (expr, 0);
3355 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3356 expr = TREE_OPERAND (expr, 1);
3357 else
3358 break;
3359 }
3360 else
3361 break;
3362 }
3363
3364 return expr;
3365 }
3366
3367 /* Return which tree structure is used by T. */
3368
3369 enum tree_node_structure_enum
3370 tree_node_structure (const_tree t)
3371 {
3372 const enum tree_code code = TREE_CODE (t);
3373 return tree_node_structure_for_code (code);
3374 }
3375
3376 /* Set various status flags when building a CALL_EXPR object T. */
3377
3378 static void
3379 process_call_operands (tree t)
3380 {
3381 bool side_effects = TREE_SIDE_EFFECTS (t);
3382 bool read_only = false;
3383 int i = call_expr_flags (t);
3384
3385 /* Calls have side-effects, except those to const or pure functions. */
3386 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3387 side_effects = true;
3388 /* Propagate TREE_READONLY of arguments for const functions. */
3389 if (i & ECF_CONST)
3390 read_only = true;
3391
3392 if (!side_effects || read_only)
3393 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3394 {
3395 tree op = TREE_OPERAND (t, i);
3396 if (op && TREE_SIDE_EFFECTS (op))
3397 side_effects = true;
3398 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3399 read_only = false;
3400 }
3401
3402 TREE_SIDE_EFFECTS (t) = side_effects;
3403 TREE_READONLY (t) = read_only;
3404 }
3405 \f
3406 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3407 size or offset that depends on a field within a record. */
3408
3409 bool
3410 contains_placeholder_p (const_tree exp)
3411 {
3412 enum tree_code code;
3413
3414 if (!exp)
3415 return 0;
3416
3417 code = TREE_CODE (exp);
3418 if (code == PLACEHOLDER_EXPR)
3419 return 1;
3420
3421 switch (TREE_CODE_CLASS (code))
3422 {
3423 case tcc_reference:
3424 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3425 position computations since they will be converted into a
3426 WITH_RECORD_EXPR involving the reference, which will assume
3427 here will be valid. */
3428 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3429
3430 case tcc_exceptional:
3431 if (code == TREE_LIST)
3432 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3433 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3434 break;
3435
3436 case tcc_unary:
3437 case tcc_binary:
3438 case tcc_comparison:
3439 case tcc_expression:
3440 switch (code)
3441 {
3442 case COMPOUND_EXPR:
3443 /* Ignoring the first operand isn't quite right, but works best. */
3444 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3445
3446 case COND_EXPR:
3447 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3448 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3449 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3450
3451 case SAVE_EXPR:
3452 /* The save_expr function never wraps anything containing
3453 a PLACEHOLDER_EXPR. */
3454 return 0;
3455
3456 default:
3457 break;
3458 }
3459
3460 switch (TREE_CODE_LENGTH (code))
3461 {
3462 case 1:
3463 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3464 case 2:
3465 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3466 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3467 default:
3468 return 0;
3469 }
3470
3471 case tcc_vl_exp:
3472 switch (code)
3473 {
3474 case CALL_EXPR:
3475 {
3476 const_tree arg;
3477 const_call_expr_arg_iterator iter;
3478 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3479 if (CONTAINS_PLACEHOLDER_P (arg))
3480 return 1;
3481 return 0;
3482 }
3483 default:
3484 return 0;
3485 }
3486
3487 default:
3488 return 0;
3489 }
3490 return 0;
3491 }
3492
3493 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3494 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3495 field positions. */
3496
3497 static bool
3498 type_contains_placeholder_1 (const_tree type)
3499 {
3500 /* If the size contains a placeholder or the parent type (component type in
3501 the case of arrays) type involves a placeholder, this type does. */
3502 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3503 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3504 || (!POINTER_TYPE_P (type)
3505 && TREE_TYPE (type)
3506 && type_contains_placeholder_p (TREE_TYPE (type))))
3507 return true;
3508
3509 /* Now do type-specific checks. Note that the last part of the check above
3510 greatly limits what we have to do below. */
3511 switch (TREE_CODE (type))
3512 {
3513 case VOID_TYPE:
3514 case POINTER_BOUNDS_TYPE:
3515 case COMPLEX_TYPE:
3516 case ENUMERAL_TYPE:
3517 case BOOLEAN_TYPE:
3518 case POINTER_TYPE:
3519 case OFFSET_TYPE:
3520 case REFERENCE_TYPE:
3521 case METHOD_TYPE:
3522 case FUNCTION_TYPE:
3523 case VECTOR_TYPE:
3524 case NULLPTR_TYPE:
3525 return false;
3526
3527 case INTEGER_TYPE:
3528 case REAL_TYPE:
3529 case FIXED_POINT_TYPE:
3530 /* Here we just check the bounds. */
3531 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3532 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3533
3534 case ARRAY_TYPE:
3535 /* We have already checked the component type above, so just check the
3536 domain type. */
3537 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3538
3539 case RECORD_TYPE:
3540 case UNION_TYPE:
3541 case QUAL_UNION_TYPE:
3542 {
3543 tree field;
3544
3545 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3546 if (TREE_CODE (field) == FIELD_DECL
3547 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3548 || (TREE_CODE (type) == QUAL_UNION_TYPE
3549 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3550 || type_contains_placeholder_p (TREE_TYPE (field))))
3551 return true;
3552
3553 return false;
3554 }
3555
3556 default:
3557 gcc_unreachable ();
3558 }
3559 }
3560
3561 /* Wrapper around above function used to cache its result. */
3562
3563 bool
3564 type_contains_placeholder_p (tree type)
3565 {
3566 bool result;
3567
3568 /* If the contains_placeholder_bits field has been initialized,
3569 then we know the answer. */
3570 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3571 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3572
3573 /* Indicate that we've seen this type node, and the answer is false.
3574 This is what we want to return if we run into recursion via fields. */
3575 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3576
3577 /* Compute the real value. */
3578 result = type_contains_placeholder_1 (type);
3579
3580 /* Store the real value. */
3581 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3582
3583 return result;
3584 }
3585 \f
3586 /* Push tree EXP onto vector QUEUE if it is not already present. */
3587
3588 static void
3589 push_without_duplicates (tree exp, vec<tree> *queue)
3590 {
3591 unsigned int i;
3592 tree iter;
3593
3594 FOR_EACH_VEC_ELT (*queue, i, iter)
3595 if (simple_cst_equal (iter, exp) == 1)
3596 break;
3597
3598 if (!iter)
3599 queue->safe_push (exp);
3600 }
3601
3602 /* Given a tree EXP, find all occurrences of references to fields
3603 in a PLACEHOLDER_EXPR and place them in vector REFS without
3604 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3605 we assume here that EXP contains only arithmetic expressions
3606 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3607 argument list. */
3608
3609 void
3610 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3611 {
3612 enum tree_code code = TREE_CODE (exp);
3613 tree inner;
3614 int i;
3615
3616 /* We handle TREE_LIST and COMPONENT_REF separately. */
3617 if (code == TREE_LIST)
3618 {
3619 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3620 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3621 }
3622 else if (code == COMPONENT_REF)
3623 {
3624 for (inner = TREE_OPERAND (exp, 0);
3625 REFERENCE_CLASS_P (inner);
3626 inner = TREE_OPERAND (inner, 0))
3627 ;
3628
3629 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3630 push_without_duplicates (exp, refs);
3631 else
3632 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3633 }
3634 else
3635 switch (TREE_CODE_CLASS (code))
3636 {
3637 case tcc_constant:
3638 break;
3639
3640 case tcc_declaration:
3641 /* Variables allocated to static storage can stay. */
3642 if (!TREE_STATIC (exp))
3643 push_without_duplicates (exp, refs);
3644 break;
3645
3646 case tcc_expression:
3647 /* This is the pattern built in ada/make_aligning_type. */
3648 if (code == ADDR_EXPR
3649 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3650 {
3651 push_without_duplicates (exp, refs);
3652 break;
3653 }
3654
3655 /* Fall through... */
3656
3657 case tcc_exceptional:
3658 case tcc_unary:
3659 case tcc_binary:
3660 case tcc_comparison:
3661 case tcc_reference:
3662 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3663 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3664 break;
3665
3666 case tcc_vl_exp:
3667 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3668 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3669 break;
3670
3671 default:
3672 gcc_unreachable ();
3673 }
3674 }
3675
3676 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3677 return a tree with all occurrences of references to F in a
3678 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3679 CONST_DECLs. Note that we assume here that EXP contains only
3680 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3681 occurring only in their argument list. */
3682
3683 tree
3684 substitute_in_expr (tree exp, tree f, tree r)
3685 {
3686 enum tree_code code = TREE_CODE (exp);
3687 tree op0, op1, op2, op3;
3688 tree new_tree;
3689
3690 /* We handle TREE_LIST and COMPONENT_REF separately. */
3691 if (code == TREE_LIST)
3692 {
3693 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3694 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3695 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3696 return exp;
3697
3698 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3699 }
3700 else if (code == COMPONENT_REF)
3701 {
3702 tree inner;
3703
3704 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3705 and it is the right field, replace it with R. */
3706 for (inner = TREE_OPERAND (exp, 0);
3707 REFERENCE_CLASS_P (inner);
3708 inner = TREE_OPERAND (inner, 0))
3709 ;
3710
3711 /* The field. */
3712 op1 = TREE_OPERAND (exp, 1);
3713
3714 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3715 return r;
3716
3717 /* If this expression hasn't been completed let, leave it alone. */
3718 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3719 return exp;
3720
3721 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3722 if (op0 == TREE_OPERAND (exp, 0))
3723 return exp;
3724
3725 new_tree
3726 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3727 }
3728 else
3729 switch (TREE_CODE_CLASS (code))
3730 {
3731 case tcc_constant:
3732 return exp;
3733
3734 case tcc_declaration:
3735 if (exp == f)
3736 return r;
3737 else
3738 return exp;
3739
3740 case tcc_expression:
3741 if (exp == f)
3742 return r;
3743
3744 /* Fall through... */
3745
3746 case tcc_exceptional:
3747 case tcc_unary:
3748 case tcc_binary:
3749 case tcc_comparison:
3750 case tcc_reference:
3751 switch (TREE_CODE_LENGTH (code))
3752 {
3753 case 0:
3754 return exp;
3755
3756 case 1:
3757 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3758 if (op0 == TREE_OPERAND (exp, 0))
3759 return exp;
3760
3761 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3762 break;
3763
3764 case 2:
3765 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3766 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3767
3768 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3769 return exp;
3770
3771 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3772 break;
3773
3774 case 3:
3775 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3776 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3777 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3778
3779 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3780 && op2 == TREE_OPERAND (exp, 2))
3781 return exp;
3782
3783 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3784 break;
3785
3786 case 4:
3787 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3788 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3789 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3790 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3791
3792 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3793 && op2 == TREE_OPERAND (exp, 2)
3794 && op3 == TREE_OPERAND (exp, 3))
3795 return exp;
3796
3797 new_tree
3798 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3799 break;
3800
3801 default:
3802 gcc_unreachable ();
3803 }
3804 break;
3805
3806 case tcc_vl_exp:
3807 {
3808 int i;
3809
3810 new_tree = NULL_TREE;
3811
3812 /* If we are trying to replace F with a constant, inline back
3813 functions which do nothing else than computing a value from
3814 the arguments they are passed. This makes it possible to
3815 fold partially or entirely the replacement expression. */
3816 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3817 {
3818 tree t = maybe_inline_call_in_expr (exp);
3819 if (t)
3820 return SUBSTITUTE_IN_EXPR (t, f, r);
3821 }
3822
3823 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3824 {
3825 tree op = TREE_OPERAND (exp, i);
3826 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3827 if (new_op != op)
3828 {
3829 if (!new_tree)
3830 new_tree = copy_node (exp);
3831 TREE_OPERAND (new_tree, i) = new_op;
3832 }
3833 }
3834
3835 if (new_tree)
3836 {
3837 new_tree = fold (new_tree);
3838 if (TREE_CODE (new_tree) == CALL_EXPR)
3839 process_call_operands (new_tree);
3840 }
3841 else
3842 return exp;
3843 }
3844 break;
3845
3846 default:
3847 gcc_unreachable ();
3848 }
3849
3850 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3851
3852 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3853 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3854
3855 return new_tree;
3856 }
3857
3858 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3859 for it within OBJ, a tree that is an object or a chain of references. */
3860
3861 tree
3862 substitute_placeholder_in_expr (tree exp, tree obj)
3863 {
3864 enum tree_code code = TREE_CODE (exp);
3865 tree op0, op1, op2, op3;
3866 tree new_tree;
3867
3868 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3869 in the chain of OBJ. */
3870 if (code == PLACEHOLDER_EXPR)
3871 {
3872 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3873 tree elt;
3874
3875 for (elt = obj; elt != 0;
3876 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3877 || TREE_CODE (elt) == COND_EXPR)
3878 ? TREE_OPERAND (elt, 1)
3879 : (REFERENCE_CLASS_P (elt)
3880 || UNARY_CLASS_P (elt)
3881 || BINARY_CLASS_P (elt)
3882 || VL_EXP_CLASS_P (elt)
3883 || EXPRESSION_CLASS_P (elt))
3884 ? TREE_OPERAND (elt, 0) : 0))
3885 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3886 return elt;
3887
3888 for (elt = obj; elt != 0;
3889 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3890 || TREE_CODE (elt) == COND_EXPR)
3891 ? TREE_OPERAND (elt, 1)
3892 : (REFERENCE_CLASS_P (elt)
3893 || UNARY_CLASS_P (elt)
3894 || BINARY_CLASS_P (elt)
3895 || VL_EXP_CLASS_P (elt)
3896 || EXPRESSION_CLASS_P (elt))
3897 ? TREE_OPERAND (elt, 0) : 0))
3898 if (POINTER_TYPE_P (TREE_TYPE (elt))
3899 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3900 == need_type))
3901 return fold_build1 (INDIRECT_REF, need_type, elt);
3902
3903 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3904 survives until RTL generation, there will be an error. */
3905 return exp;
3906 }
3907
3908 /* TREE_LIST is special because we need to look at TREE_VALUE
3909 and TREE_CHAIN, not TREE_OPERANDS. */
3910 else if (code == TREE_LIST)
3911 {
3912 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3913 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3914 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3915 return exp;
3916
3917 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3918 }
3919 else
3920 switch (TREE_CODE_CLASS (code))
3921 {
3922 case tcc_constant:
3923 case tcc_declaration:
3924 return exp;
3925
3926 case tcc_exceptional:
3927 case tcc_unary:
3928 case tcc_binary:
3929 case tcc_comparison:
3930 case tcc_expression:
3931 case tcc_reference:
3932 case tcc_statement:
3933 switch (TREE_CODE_LENGTH (code))
3934 {
3935 case 0:
3936 return exp;
3937
3938 case 1:
3939 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3940 if (op0 == TREE_OPERAND (exp, 0))
3941 return exp;
3942
3943 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3944 break;
3945
3946 case 2:
3947 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3948 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3949
3950 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3951 return exp;
3952
3953 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3954 break;
3955
3956 case 3:
3957 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3958 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3959 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3960
3961 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3962 && op2 == TREE_OPERAND (exp, 2))
3963 return exp;
3964
3965 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3966 break;
3967
3968 case 4:
3969 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3970 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3971 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3972 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3973
3974 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3975 && op2 == TREE_OPERAND (exp, 2)
3976 && op3 == TREE_OPERAND (exp, 3))
3977 return exp;
3978
3979 new_tree
3980 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3981 break;
3982
3983 default:
3984 gcc_unreachable ();
3985 }
3986 break;
3987
3988 case tcc_vl_exp:
3989 {
3990 int i;
3991
3992 new_tree = NULL_TREE;
3993
3994 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3995 {
3996 tree op = TREE_OPERAND (exp, i);
3997 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3998 if (new_op != op)
3999 {
4000 if (!new_tree)
4001 new_tree = copy_node (exp);
4002 TREE_OPERAND (new_tree, i) = new_op;
4003 }
4004 }
4005
4006 if (new_tree)
4007 {
4008 new_tree = fold (new_tree);
4009 if (TREE_CODE (new_tree) == CALL_EXPR)
4010 process_call_operands (new_tree);
4011 }
4012 else
4013 return exp;
4014 }
4015 break;
4016
4017 default:
4018 gcc_unreachable ();
4019 }
4020
4021 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4022
4023 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4024 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4025
4026 return new_tree;
4027 }
4028 \f
4029
4030 /* Subroutine of stabilize_reference; this is called for subtrees of
4031 references. Any expression with side-effects must be put in a SAVE_EXPR
4032 to ensure that it is only evaluated once.
4033
4034 We don't put SAVE_EXPR nodes around everything, because assigning very
4035 simple expressions to temporaries causes us to miss good opportunities
4036 for optimizations. Among other things, the opportunity to fold in the
4037 addition of a constant into an addressing mode often gets lost, e.g.
4038 "y[i+1] += x;". In general, we take the approach that we should not make
4039 an assignment unless we are forced into it - i.e., that any non-side effect
4040 operator should be allowed, and that cse should take care of coalescing
4041 multiple utterances of the same expression should that prove fruitful. */
4042
4043 static tree
4044 stabilize_reference_1 (tree e)
4045 {
4046 tree result;
4047 enum tree_code code = TREE_CODE (e);
4048
4049 /* We cannot ignore const expressions because it might be a reference
4050 to a const array but whose index contains side-effects. But we can
4051 ignore things that are actual constant or that already have been
4052 handled by this function. */
4053
4054 if (tree_invariant_p (e))
4055 return e;
4056
4057 switch (TREE_CODE_CLASS (code))
4058 {
4059 case tcc_exceptional:
4060 case tcc_type:
4061 case tcc_declaration:
4062 case tcc_comparison:
4063 case tcc_statement:
4064 case tcc_expression:
4065 case tcc_reference:
4066 case tcc_vl_exp:
4067 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4068 so that it will only be evaluated once. */
4069 /* The reference (r) and comparison (<) classes could be handled as
4070 below, but it is generally faster to only evaluate them once. */
4071 if (TREE_SIDE_EFFECTS (e))
4072 return save_expr (e);
4073 return e;
4074
4075 case tcc_constant:
4076 /* Constants need no processing. In fact, we should never reach
4077 here. */
4078 return e;
4079
4080 case tcc_binary:
4081 /* Division is slow and tends to be compiled with jumps,
4082 especially the division by powers of 2 that is often
4083 found inside of an array reference. So do it just once. */
4084 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4085 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4086 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4087 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4088 return save_expr (e);
4089 /* Recursively stabilize each operand. */
4090 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4091 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4092 break;
4093
4094 case tcc_unary:
4095 /* Recursively stabilize each operand. */
4096 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4097 break;
4098
4099 default:
4100 gcc_unreachable ();
4101 }
4102
4103 TREE_TYPE (result) = TREE_TYPE (e);
4104 TREE_READONLY (result) = TREE_READONLY (e);
4105 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4106 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4107
4108 return result;
4109 }
4110
4111 /* Stabilize a reference so that we can use it any number of times
4112 without causing its operands to be evaluated more than once.
4113 Returns the stabilized reference. This works by means of save_expr,
4114 so see the caveats in the comments about save_expr.
4115
4116 Also allows conversion expressions whose operands are references.
4117 Any other kind of expression is returned unchanged. */
4118
4119 tree
4120 stabilize_reference (tree ref)
4121 {
4122 tree result;
4123 enum tree_code code = TREE_CODE (ref);
4124
4125 switch (code)
4126 {
4127 case VAR_DECL:
4128 case PARM_DECL:
4129 case RESULT_DECL:
4130 /* No action is needed in this case. */
4131 return ref;
4132
4133 CASE_CONVERT:
4134 case FLOAT_EXPR:
4135 case FIX_TRUNC_EXPR:
4136 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4137 break;
4138
4139 case INDIRECT_REF:
4140 result = build_nt (INDIRECT_REF,
4141 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4142 break;
4143
4144 case COMPONENT_REF:
4145 result = build_nt (COMPONENT_REF,
4146 stabilize_reference (TREE_OPERAND (ref, 0)),
4147 TREE_OPERAND (ref, 1), NULL_TREE);
4148 break;
4149
4150 case BIT_FIELD_REF:
4151 result = build_nt (BIT_FIELD_REF,
4152 stabilize_reference (TREE_OPERAND (ref, 0)),
4153 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4154 break;
4155
4156 case ARRAY_REF:
4157 result = build_nt (ARRAY_REF,
4158 stabilize_reference (TREE_OPERAND (ref, 0)),
4159 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4160 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4161 break;
4162
4163 case ARRAY_RANGE_REF:
4164 result = build_nt (ARRAY_RANGE_REF,
4165 stabilize_reference (TREE_OPERAND (ref, 0)),
4166 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4167 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4168 break;
4169
4170 case COMPOUND_EXPR:
4171 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4172 it wouldn't be ignored. This matters when dealing with
4173 volatiles. */
4174 return stabilize_reference_1 (ref);
4175
4176 /* If arg isn't a kind of lvalue we recognize, make no change.
4177 Caller should recognize the error for an invalid lvalue. */
4178 default:
4179 return ref;
4180
4181 case ERROR_MARK:
4182 return error_mark_node;
4183 }
4184
4185 TREE_TYPE (result) = TREE_TYPE (ref);
4186 TREE_READONLY (result) = TREE_READONLY (ref);
4187 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4188 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4189
4190 return result;
4191 }
4192 \f
4193 /* Low-level constructors for expressions. */
4194
4195 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4196 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4197
4198 void
4199 recompute_tree_invariant_for_addr_expr (tree t)
4200 {
4201 tree node;
4202 bool tc = true, se = false;
4203
4204 /* We started out assuming this address is both invariant and constant, but
4205 does not have side effects. Now go down any handled components and see if
4206 any of them involve offsets that are either non-constant or non-invariant.
4207 Also check for side-effects.
4208
4209 ??? Note that this code makes no attempt to deal with the case where
4210 taking the address of something causes a copy due to misalignment. */
4211
4212 #define UPDATE_FLAGS(NODE) \
4213 do { tree _node = (NODE); \
4214 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4215 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4216
4217 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4218 node = TREE_OPERAND (node, 0))
4219 {
4220 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4221 array reference (probably made temporarily by the G++ front end),
4222 so ignore all the operands. */
4223 if ((TREE_CODE (node) == ARRAY_REF
4224 || TREE_CODE (node) == ARRAY_RANGE_REF)
4225 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4226 {
4227 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4228 if (TREE_OPERAND (node, 2))
4229 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4230 if (TREE_OPERAND (node, 3))
4231 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4232 }
4233 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4234 FIELD_DECL, apparently. The G++ front end can put something else
4235 there, at least temporarily. */
4236 else if (TREE_CODE (node) == COMPONENT_REF
4237 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4238 {
4239 if (TREE_OPERAND (node, 2))
4240 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4241 }
4242 }
4243
4244 node = lang_hooks.expr_to_decl (node, &tc, &se);
4245
4246 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4247 the address, since &(*a)->b is a form of addition. If it's a constant, the
4248 address is constant too. If it's a decl, its address is constant if the
4249 decl is static. Everything else is not constant and, furthermore,
4250 taking the address of a volatile variable is not volatile. */
4251 if (TREE_CODE (node) == INDIRECT_REF
4252 || TREE_CODE (node) == MEM_REF)
4253 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4254 else if (CONSTANT_CLASS_P (node))
4255 ;
4256 else if (DECL_P (node))
4257 tc &= (staticp (node) != NULL_TREE);
4258 else
4259 {
4260 tc = false;
4261 se |= TREE_SIDE_EFFECTS (node);
4262 }
4263
4264
4265 TREE_CONSTANT (t) = tc;
4266 TREE_SIDE_EFFECTS (t) = se;
4267 #undef UPDATE_FLAGS
4268 }
4269
4270 /* Build an expression of code CODE, data type TYPE, and operands as
4271 specified. Expressions and reference nodes can be created this way.
4272 Constants, decls, types and misc nodes cannot be.
4273
4274 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4275 enough for all extant tree codes. */
4276
4277 tree
4278 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4279 {
4280 tree t;
4281
4282 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4283
4284 t = make_node_stat (code PASS_MEM_STAT);
4285 TREE_TYPE (t) = tt;
4286
4287 return t;
4288 }
4289
4290 tree
4291 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4292 {
4293 int length = sizeof (struct tree_exp);
4294 tree t;
4295
4296 record_node_allocation_statistics (code, length);
4297
4298 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4299
4300 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4301
4302 memset (t, 0, sizeof (struct tree_common));
4303
4304 TREE_SET_CODE (t, code);
4305
4306 TREE_TYPE (t) = type;
4307 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4308 TREE_OPERAND (t, 0) = node;
4309 if (node && !TYPE_P (node))
4310 {
4311 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4312 TREE_READONLY (t) = TREE_READONLY (node);
4313 }
4314
4315 if (TREE_CODE_CLASS (code) == tcc_statement)
4316 TREE_SIDE_EFFECTS (t) = 1;
4317 else switch (code)
4318 {
4319 case VA_ARG_EXPR:
4320 /* All of these have side-effects, no matter what their
4321 operands are. */
4322 TREE_SIDE_EFFECTS (t) = 1;
4323 TREE_READONLY (t) = 0;
4324 break;
4325
4326 case INDIRECT_REF:
4327 /* Whether a dereference is readonly has nothing to do with whether
4328 its operand is readonly. */
4329 TREE_READONLY (t) = 0;
4330 break;
4331
4332 case ADDR_EXPR:
4333 if (node)
4334 recompute_tree_invariant_for_addr_expr (t);
4335 break;
4336
4337 default:
4338 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4339 && node && !TYPE_P (node)
4340 && TREE_CONSTANT (node))
4341 TREE_CONSTANT (t) = 1;
4342 if (TREE_CODE_CLASS (code) == tcc_reference
4343 && node && TREE_THIS_VOLATILE (node))
4344 TREE_THIS_VOLATILE (t) = 1;
4345 break;
4346 }
4347
4348 return t;
4349 }
4350
4351 #define PROCESS_ARG(N) \
4352 do { \
4353 TREE_OPERAND (t, N) = arg##N; \
4354 if (arg##N &&!TYPE_P (arg##N)) \
4355 { \
4356 if (TREE_SIDE_EFFECTS (arg##N)) \
4357 side_effects = 1; \
4358 if (!TREE_READONLY (arg##N) \
4359 && !CONSTANT_CLASS_P (arg##N)) \
4360 (void) (read_only = 0); \
4361 if (!TREE_CONSTANT (arg##N)) \
4362 (void) (constant = 0); \
4363 } \
4364 } while (0)
4365
4366 tree
4367 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4368 {
4369 bool constant, read_only, side_effects;
4370 tree t;
4371
4372 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4373
4374 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4375 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4376 /* When sizetype precision doesn't match that of pointers
4377 we need to be able to build explicit extensions or truncations
4378 of the offset argument. */
4379 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4380 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4381 && TREE_CODE (arg1) == INTEGER_CST);
4382
4383 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4384 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4385 && ptrofftype_p (TREE_TYPE (arg1)));
4386
4387 t = make_node_stat (code PASS_MEM_STAT);
4388 TREE_TYPE (t) = tt;
4389
4390 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4391 result based on those same flags for the arguments. But if the
4392 arguments aren't really even `tree' expressions, we shouldn't be trying
4393 to do this. */
4394
4395 /* Expressions without side effects may be constant if their
4396 arguments are as well. */
4397 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4398 || TREE_CODE_CLASS (code) == tcc_binary);
4399 read_only = 1;
4400 side_effects = TREE_SIDE_EFFECTS (t);
4401
4402 PROCESS_ARG (0);
4403 PROCESS_ARG (1);
4404
4405 TREE_SIDE_EFFECTS (t) = side_effects;
4406 if (code == MEM_REF)
4407 {
4408 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4409 {
4410 tree o = TREE_OPERAND (arg0, 0);
4411 TREE_READONLY (t) = TREE_READONLY (o);
4412 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4413 }
4414 }
4415 else
4416 {
4417 TREE_READONLY (t) = read_only;
4418 TREE_CONSTANT (t) = constant;
4419 TREE_THIS_VOLATILE (t)
4420 = (TREE_CODE_CLASS (code) == tcc_reference
4421 && arg0 && TREE_THIS_VOLATILE (arg0));
4422 }
4423
4424 return t;
4425 }
4426
4427
4428 tree
4429 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4430 tree arg2 MEM_STAT_DECL)
4431 {
4432 bool constant, read_only, side_effects;
4433 tree t;
4434
4435 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4436 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4437
4438 t = make_node_stat (code PASS_MEM_STAT);
4439 TREE_TYPE (t) = tt;
4440
4441 read_only = 1;
4442
4443 /* As a special exception, if COND_EXPR has NULL branches, we
4444 assume that it is a gimple statement and always consider
4445 it to have side effects. */
4446 if (code == COND_EXPR
4447 && tt == void_type_node
4448 && arg1 == NULL_TREE
4449 && arg2 == NULL_TREE)
4450 side_effects = true;
4451 else
4452 side_effects = TREE_SIDE_EFFECTS (t);
4453
4454 PROCESS_ARG (0);
4455 PROCESS_ARG (1);
4456 PROCESS_ARG (2);
4457
4458 if (code == COND_EXPR)
4459 TREE_READONLY (t) = read_only;
4460
4461 TREE_SIDE_EFFECTS (t) = side_effects;
4462 TREE_THIS_VOLATILE (t)
4463 = (TREE_CODE_CLASS (code) == tcc_reference
4464 && arg0 && TREE_THIS_VOLATILE (arg0));
4465
4466 return t;
4467 }
4468
4469 tree
4470 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4471 tree arg2, tree arg3 MEM_STAT_DECL)
4472 {
4473 bool constant, read_only, side_effects;
4474 tree t;
4475
4476 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4477
4478 t = make_node_stat (code PASS_MEM_STAT);
4479 TREE_TYPE (t) = tt;
4480
4481 side_effects = TREE_SIDE_EFFECTS (t);
4482
4483 PROCESS_ARG (0);
4484 PROCESS_ARG (1);
4485 PROCESS_ARG (2);
4486 PROCESS_ARG (3);
4487
4488 TREE_SIDE_EFFECTS (t) = side_effects;
4489 TREE_THIS_VOLATILE (t)
4490 = (TREE_CODE_CLASS (code) == tcc_reference
4491 && arg0 && TREE_THIS_VOLATILE (arg0));
4492
4493 return t;
4494 }
4495
4496 tree
4497 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4498 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4499 {
4500 bool constant, read_only, side_effects;
4501 tree t;
4502
4503 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4504
4505 t = make_node_stat (code PASS_MEM_STAT);
4506 TREE_TYPE (t) = tt;
4507
4508 side_effects = TREE_SIDE_EFFECTS (t);
4509
4510 PROCESS_ARG (0);
4511 PROCESS_ARG (1);
4512 PROCESS_ARG (2);
4513 PROCESS_ARG (3);
4514 PROCESS_ARG (4);
4515
4516 TREE_SIDE_EFFECTS (t) = side_effects;
4517 if (code == TARGET_MEM_REF)
4518 {
4519 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4520 {
4521 tree o = TREE_OPERAND (arg0, 0);
4522 TREE_READONLY (t) = TREE_READONLY (o);
4523 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4524 }
4525 }
4526 else
4527 TREE_THIS_VOLATILE (t)
4528 = (TREE_CODE_CLASS (code) == tcc_reference
4529 && arg0 && TREE_THIS_VOLATILE (arg0));
4530
4531 return t;
4532 }
4533
4534 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4535 on the pointer PTR. */
4536
4537 tree
4538 build_simple_mem_ref_loc (location_t loc, tree ptr)
4539 {
4540 HOST_WIDE_INT offset = 0;
4541 tree ptype = TREE_TYPE (ptr);
4542 tree tem;
4543 /* For convenience allow addresses that collapse to a simple base
4544 and offset. */
4545 if (TREE_CODE (ptr) == ADDR_EXPR
4546 && (handled_component_p (TREE_OPERAND (ptr, 0))
4547 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4548 {
4549 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4550 gcc_assert (ptr);
4551 ptr = build_fold_addr_expr (ptr);
4552 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4553 }
4554 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4555 ptr, build_int_cst (ptype, offset));
4556 SET_EXPR_LOCATION (tem, loc);
4557 return tem;
4558 }
4559
4560 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4561
4562 offset_int
4563 mem_ref_offset (const_tree t)
4564 {
4565 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4566 }
4567
4568 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4569 offsetted by OFFSET units. */
4570
4571 tree
4572 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4573 {
4574 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4575 build_fold_addr_expr (base),
4576 build_int_cst (ptr_type_node, offset));
4577 tree addr = build1 (ADDR_EXPR, type, ref);
4578 recompute_tree_invariant_for_addr_expr (addr);
4579 return addr;
4580 }
4581
4582 /* Similar except don't specify the TREE_TYPE
4583 and leave the TREE_SIDE_EFFECTS as 0.
4584 It is permissible for arguments to be null,
4585 or even garbage if their values do not matter. */
4586
4587 tree
4588 build_nt (enum tree_code code, ...)
4589 {
4590 tree t;
4591 int length;
4592 int i;
4593 va_list p;
4594
4595 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4596
4597 va_start (p, code);
4598
4599 t = make_node (code);
4600 length = TREE_CODE_LENGTH (code);
4601
4602 for (i = 0; i < length; i++)
4603 TREE_OPERAND (t, i) = va_arg (p, tree);
4604
4605 va_end (p);
4606 return t;
4607 }
4608
4609 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4610 tree vec. */
4611
4612 tree
4613 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4614 {
4615 tree ret, t;
4616 unsigned int ix;
4617
4618 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4619 CALL_EXPR_FN (ret) = fn;
4620 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4621 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4622 CALL_EXPR_ARG (ret, ix) = t;
4623 return ret;
4624 }
4625 \f
4626 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4627 We do NOT enter this node in any sort of symbol table.
4628
4629 LOC is the location of the decl.
4630
4631 layout_decl is used to set up the decl's storage layout.
4632 Other slots are initialized to 0 or null pointers. */
4633
4634 tree
4635 build_decl_stat (location_t loc, enum tree_code code, tree name,
4636 tree type MEM_STAT_DECL)
4637 {
4638 tree t;
4639
4640 t = make_node_stat (code PASS_MEM_STAT);
4641 DECL_SOURCE_LOCATION (t) = loc;
4642
4643 /* if (type == error_mark_node)
4644 type = integer_type_node; */
4645 /* That is not done, deliberately, so that having error_mark_node
4646 as the type can suppress useless errors in the use of this variable. */
4647
4648 DECL_NAME (t) = name;
4649 TREE_TYPE (t) = type;
4650
4651 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4652 layout_decl (t, 0);
4653
4654 return t;
4655 }
4656
4657 /* Builds and returns function declaration with NAME and TYPE. */
4658
4659 tree
4660 build_fn_decl (const char *name, tree type)
4661 {
4662 tree id = get_identifier (name);
4663 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4664
4665 DECL_EXTERNAL (decl) = 1;
4666 TREE_PUBLIC (decl) = 1;
4667 DECL_ARTIFICIAL (decl) = 1;
4668 TREE_NOTHROW (decl) = 1;
4669
4670 return decl;
4671 }
4672
4673 vec<tree, va_gc> *all_translation_units;
4674
4675 /* Builds a new translation-unit decl with name NAME, queues it in the
4676 global list of translation-unit decls and returns it. */
4677
4678 tree
4679 build_translation_unit_decl (tree name)
4680 {
4681 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4682 name, NULL_TREE);
4683 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4684 vec_safe_push (all_translation_units, tu);
4685 return tu;
4686 }
4687
4688 \f
4689 /* BLOCK nodes are used to represent the structure of binding contours
4690 and declarations, once those contours have been exited and their contents
4691 compiled. This information is used for outputting debugging info. */
4692
4693 tree
4694 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4695 {
4696 tree block = make_node (BLOCK);
4697
4698 BLOCK_VARS (block) = vars;
4699 BLOCK_SUBBLOCKS (block) = subblocks;
4700 BLOCK_SUPERCONTEXT (block) = supercontext;
4701 BLOCK_CHAIN (block) = chain;
4702 return block;
4703 }
4704
4705 \f
4706 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4707
4708 LOC is the location to use in tree T. */
4709
4710 void
4711 protected_set_expr_location (tree t, location_t loc)
4712 {
4713 if (CAN_HAVE_LOCATION_P (t))
4714 SET_EXPR_LOCATION (t, loc);
4715 }
4716 \f
4717 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4718 is ATTRIBUTE. */
4719
4720 tree
4721 build_decl_attribute_variant (tree ddecl, tree attribute)
4722 {
4723 DECL_ATTRIBUTES (ddecl) = attribute;
4724 return ddecl;
4725 }
4726
4727 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4728 is ATTRIBUTE and its qualifiers are QUALS.
4729
4730 Record such modified types already made so we don't make duplicates. */
4731
4732 tree
4733 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4734 {
4735 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4736 {
4737 inchash::hash hstate;
4738 tree ntype;
4739 int i;
4740 tree t;
4741 enum tree_code code = TREE_CODE (ttype);
4742
4743 /* Building a distinct copy of a tagged type is inappropriate; it
4744 causes breakage in code that expects there to be a one-to-one
4745 relationship between a struct and its fields.
4746 build_duplicate_type is another solution (as used in
4747 handle_transparent_union_attribute), but that doesn't play well
4748 with the stronger C++ type identity model. */
4749 if (TREE_CODE (ttype) == RECORD_TYPE
4750 || TREE_CODE (ttype) == UNION_TYPE
4751 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4752 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4753 {
4754 warning (OPT_Wattributes,
4755 "ignoring attributes applied to %qT after definition",
4756 TYPE_MAIN_VARIANT (ttype));
4757 return build_qualified_type (ttype, quals);
4758 }
4759
4760 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4761 ntype = build_distinct_type_copy (ttype);
4762
4763 TYPE_ATTRIBUTES (ntype) = attribute;
4764
4765 hstate.add_int (code);
4766 if (TREE_TYPE (ntype))
4767 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4768 attribute_hash_list (attribute, hstate);
4769
4770 switch (TREE_CODE (ntype))
4771 {
4772 case FUNCTION_TYPE:
4773 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4774 break;
4775 case ARRAY_TYPE:
4776 if (TYPE_DOMAIN (ntype))
4777 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4778 break;
4779 case INTEGER_TYPE:
4780 t = TYPE_MAX_VALUE (ntype);
4781 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4782 hstate.add_object (TREE_INT_CST_ELT (t, i));
4783 break;
4784 case REAL_TYPE:
4785 case FIXED_POINT_TYPE:
4786 {
4787 unsigned int precision = TYPE_PRECISION (ntype);
4788 hstate.add_object (precision);
4789 }
4790 break;
4791 default:
4792 break;
4793 }
4794
4795 ntype = type_hash_canon (hstate.end(), ntype);
4796
4797 /* If the target-dependent attributes make NTYPE different from
4798 its canonical type, we will need to use structural equality
4799 checks for this type. */
4800 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4801 || !comp_type_attributes (ntype, ttype))
4802 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4803 else if (TYPE_CANONICAL (ntype) == ntype)
4804 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4805
4806 ttype = build_qualified_type (ntype, quals);
4807 }
4808 else if (TYPE_QUALS (ttype) != quals)
4809 ttype = build_qualified_type (ttype, quals);
4810
4811 return ttype;
4812 }
4813
4814 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4815 the same. */
4816
4817 static bool
4818 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4819 {
4820 tree cl1, cl2;
4821 for (cl1 = clauses1, cl2 = clauses2;
4822 cl1 && cl2;
4823 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4824 {
4825 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4826 return false;
4827 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4828 {
4829 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4830 OMP_CLAUSE_DECL (cl2)) != 1)
4831 return false;
4832 }
4833 switch (OMP_CLAUSE_CODE (cl1))
4834 {
4835 case OMP_CLAUSE_ALIGNED:
4836 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4837 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4838 return false;
4839 break;
4840 case OMP_CLAUSE_LINEAR:
4841 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4842 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4843 return false;
4844 break;
4845 case OMP_CLAUSE_SIMDLEN:
4846 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4847 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4848 return false;
4849 default:
4850 break;
4851 }
4852 }
4853 return true;
4854 }
4855
4856 /* Compare two constructor-element-type constants. Return 1 if the lists
4857 are known to be equal; otherwise return 0. */
4858
4859 static bool
4860 simple_cst_list_equal (const_tree l1, const_tree l2)
4861 {
4862 while (l1 != NULL_TREE && l2 != NULL_TREE)
4863 {
4864 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4865 return false;
4866
4867 l1 = TREE_CHAIN (l1);
4868 l2 = TREE_CHAIN (l2);
4869 }
4870
4871 return l1 == l2;
4872 }
4873
4874 /* Compare two attributes for their value identity. Return true if the
4875 attribute values are known to be equal; otherwise return false.
4876 */
4877
4878 bool
4879 attribute_value_equal (const_tree attr1, const_tree attr2)
4880 {
4881 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4882 return true;
4883
4884 if (TREE_VALUE (attr1) != NULL_TREE
4885 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4886 && TREE_VALUE (attr2) != NULL
4887 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4888 return (simple_cst_list_equal (TREE_VALUE (attr1),
4889 TREE_VALUE (attr2)) == 1);
4890
4891 if ((flag_openmp || flag_openmp_simd)
4892 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4893 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4894 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4895 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4896 TREE_VALUE (attr2));
4897
4898 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4899 }
4900
4901 /* Return 0 if the attributes for two types are incompatible, 1 if they
4902 are compatible, and 2 if they are nearly compatible (which causes a
4903 warning to be generated). */
4904 int
4905 comp_type_attributes (const_tree type1, const_tree type2)
4906 {
4907 const_tree a1 = TYPE_ATTRIBUTES (type1);
4908 const_tree a2 = TYPE_ATTRIBUTES (type2);
4909 const_tree a;
4910
4911 if (a1 == a2)
4912 return 1;
4913 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4914 {
4915 const struct attribute_spec *as;
4916 const_tree attr;
4917
4918 as = lookup_attribute_spec (get_attribute_name (a));
4919 if (!as || as->affects_type_identity == false)
4920 continue;
4921
4922 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4923 if (!attr || !attribute_value_equal (a, attr))
4924 break;
4925 }
4926 if (!a)
4927 {
4928 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4929 {
4930 const struct attribute_spec *as;
4931
4932 as = lookup_attribute_spec (get_attribute_name (a));
4933 if (!as || as->affects_type_identity == false)
4934 continue;
4935
4936 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4937 break;
4938 /* We don't need to compare trees again, as we did this
4939 already in first loop. */
4940 }
4941 /* All types - affecting identity - are equal, so
4942 there is no need to call target hook for comparison. */
4943 if (!a)
4944 return 1;
4945 }
4946 /* As some type combinations - like default calling-convention - might
4947 be compatible, we have to call the target hook to get the final result. */
4948 return targetm.comp_type_attributes (type1, type2);
4949 }
4950
4951 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4952 is ATTRIBUTE.
4953
4954 Record such modified types already made so we don't make duplicates. */
4955
4956 tree
4957 build_type_attribute_variant (tree ttype, tree attribute)
4958 {
4959 return build_type_attribute_qual_variant (ttype, attribute,
4960 TYPE_QUALS (ttype));
4961 }
4962
4963
4964 /* Reset the expression *EXPR_P, a size or position.
4965
4966 ??? We could reset all non-constant sizes or positions. But it's cheap
4967 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4968
4969 We need to reset self-referential sizes or positions because they cannot
4970 be gimplified and thus can contain a CALL_EXPR after the gimplification
4971 is finished, which will run afoul of LTO streaming. And they need to be
4972 reset to something essentially dummy but not constant, so as to preserve
4973 the properties of the object they are attached to. */
4974
4975 static inline void
4976 free_lang_data_in_one_sizepos (tree *expr_p)
4977 {
4978 tree expr = *expr_p;
4979 if (CONTAINS_PLACEHOLDER_P (expr))
4980 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4981 }
4982
4983
4984 /* Reset all the fields in a binfo node BINFO. We only keep
4985 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4986
4987 static void
4988 free_lang_data_in_binfo (tree binfo)
4989 {
4990 unsigned i;
4991 tree t;
4992
4993 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4994
4995 BINFO_VIRTUALS (binfo) = NULL_TREE;
4996 BINFO_BASE_ACCESSES (binfo) = NULL;
4997 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4998 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4999
5000 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5001 free_lang_data_in_binfo (t);
5002 }
5003
5004
5005 /* Reset all language specific information still present in TYPE. */
5006
5007 static void
5008 free_lang_data_in_type (tree type)
5009 {
5010 gcc_assert (TYPE_P (type));
5011
5012 /* Give the FE a chance to remove its own data first. */
5013 lang_hooks.free_lang_data (type);
5014
5015 TREE_LANG_FLAG_0 (type) = 0;
5016 TREE_LANG_FLAG_1 (type) = 0;
5017 TREE_LANG_FLAG_2 (type) = 0;
5018 TREE_LANG_FLAG_3 (type) = 0;
5019 TREE_LANG_FLAG_4 (type) = 0;
5020 TREE_LANG_FLAG_5 (type) = 0;
5021 TREE_LANG_FLAG_6 (type) = 0;
5022
5023 if (TREE_CODE (type) == FUNCTION_TYPE)
5024 {
5025 /* Remove the const and volatile qualifiers from arguments. The
5026 C++ front end removes them, but the C front end does not,
5027 leading to false ODR violation errors when merging two
5028 instances of the same function signature compiled by
5029 different front ends. */
5030 tree p;
5031
5032 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5033 {
5034 tree arg_type = TREE_VALUE (p);
5035
5036 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5037 {
5038 int quals = TYPE_QUALS (arg_type)
5039 & ~TYPE_QUAL_CONST
5040 & ~TYPE_QUAL_VOLATILE;
5041 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5042 free_lang_data_in_type (TREE_VALUE (p));
5043 }
5044 /* C++ FE uses TREE_PURPOSE to store initial values. */
5045 TREE_PURPOSE (p) = NULL;
5046 }
5047 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5048 TYPE_MINVAL (type) = NULL;
5049 }
5050 if (TREE_CODE (type) == METHOD_TYPE)
5051 {
5052 tree p;
5053
5054 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5055 {
5056 /* C++ FE uses TREE_PURPOSE to store initial values. */
5057 TREE_PURPOSE (p) = NULL;
5058 }
5059 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5060 TYPE_MINVAL (type) = NULL;
5061 }
5062
5063 /* Remove members that are not actually FIELD_DECLs from the field
5064 list of an aggregate. These occur in C++. */
5065 if (RECORD_OR_UNION_TYPE_P (type))
5066 {
5067 tree prev, member;
5068
5069 /* Note that TYPE_FIELDS can be shared across distinct
5070 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5071 to be removed, we cannot set its TREE_CHAIN to NULL.
5072 Otherwise, we would not be able to find all the other fields
5073 in the other instances of this TREE_TYPE.
5074
5075 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5076 prev = NULL_TREE;
5077 member = TYPE_FIELDS (type);
5078 while (member)
5079 {
5080 if (TREE_CODE (member) == FIELD_DECL
5081 || TREE_CODE (member) == TYPE_DECL)
5082 {
5083 if (prev)
5084 TREE_CHAIN (prev) = member;
5085 else
5086 TYPE_FIELDS (type) = member;
5087 prev = member;
5088 }
5089
5090 member = TREE_CHAIN (member);
5091 }
5092
5093 if (prev)
5094 TREE_CHAIN (prev) = NULL_TREE;
5095 else
5096 TYPE_FIELDS (type) = NULL_TREE;
5097
5098 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5099 and danagle the pointer from time to time. */
5100 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5101 TYPE_VFIELD (type) = NULL_TREE;
5102
5103 /* Remove TYPE_METHODS list. While it would be nice to keep it
5104 to enable ODR warnings about different method lists, doing so
5105 seems to impractically increase size of LTO data streamed.
5106 Keep the infrmation if TYPE_METHODS was non-NULL. This is used
5107 by function.c and pretty printers. */
5108 if (TYPE_METHODS (type))
5109 TYPE_METHODS (type) = error_mark_node;
5110 if (TYPE_BINFO (type))
5111 {
5112 free_lang_data_in_binfo (TYPE_BINFO (type));
5113 /* We need to preserve link to bases and virtual table for all
5114 polymorphic types to make devirtualization machinery working.
5115 Debug output cares only about bases, but output also
5116 virtual table pointers so merging of -fdevirtualize and
5117 -fno-devirtualize units is easier. */
5118 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5119 || !flag_devirtualize)
5120 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5121 && !BINFO_VTABLE (TYPE_BINFO (type)))
5122 || debug_info_level != DINFO_LEVEL_NONE))
5123 TYPE_BINFO (type) = NULL;
5124 }
5125 }
5126 else
5127 {
5128 /* For non-aggregate types, clear out the language slot (which
5129 overloads TYPE_BINFO). */
5130 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5131
5132 if (INTEGRAL_TYPE_P (type)
5133 || SCALAR_FLOAT_TYPE_P (type)
5134 || FIXED_POINT_TYPE_P (type))
5135 {
5136 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5137 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5138 }
5139 }
5140
5141 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5142 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5143
5144 if (TYPE_CONTEXT (type)
5145 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5146 {
5147 tree ctx = TYPE_CONTEXT (type);
5148 do
5149 {
5150 ctx = BLOCK_SUPERCONTEXT (ctx);
5151 }
5152 while (ctx && TREE_CODE (ctx) == BLOCK);
5153 TYPE_CONTEXT (type) = ctx;
5154 }
5155 }
5156
5157
5158 /* Return true if DECL may need an assembler name to be set. */
5159
5160 static inline bool
5161 need_assembler_name_p (tree decl)
5162 {
5163 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5164 Rule merging. This makes type_odr_p to return true on those types during
5165 LTO and by comparing the mangled name, we can say what types are intended
5166 to be equivalent across compilation unit.
5167
5168 We do not store names of type_in_anonymous_namespace_p.
5169
5170 Record, union and enumeration type have linkage that allows use
5171 to check type_in_anonymous_namespace_p. We do not mangle compound types
5172 that always can be compared structurally.
5173
5174 Similarly for builtin types, we compare properties of their main variant.
5175 A special case are integer types where mangling do make differences
5176 between char/signed char/unsigned char etc. Storing name for these makes
5177 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5178 See cp/mangle.c:write_builtin_type for details. */
5179
5180 if (flag_lto_odr_type_mering
5181 && TREE_CODE (decl) == TYPE_DECL
5182 && DECL_NAME (decl)
5183 && decl == TYPE_NAME (TREE_TYPE (decl))
5184 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5185 && (type_with_linkage_p (TREE_TYPE (decl))
5186 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5187 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5188 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5189 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5190 if (TREE_CODE (decl) != FUNCTION_DECL
5191 && TREE_CODE (decl) != VAR_DECL)
5192 return false;
5193
5194 /* If DECL already has its assembler name set, it does not need a
5195 new one. */
5196 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5197 || DECL_ASSEMBLER_NAME_SET_P (decl))
5198 return false;
5199
5200 /* Abstract decls do not need an assembler name. */
5201 if (DECL_ABSTRACT_P (decl))
5202 return false;
5203
5204 /* For VAR_DECLs, only static, public and external symbols need an
5205 assembler name. */
5206 if (TREE_CODE (decl) == VAR_DECL
5207 && !TREE_STATIC (decl)
5208 && !TREE_PUBLIC (decl)
5209 && !DECL_EXTERNAL (decl))
5210 return false;
5211
5212 if (TREE_CODE (decl) == FUNCTION_DECL)
5213 {
5214 /* Do not set assembler name on builtins. Allow RTL expansion to
5215 decide whether to expand inline or via a regular call. */
5216 if (DECL_BUILT_IN (decl)
5217 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5218 return false;
5219
5220 /* Functions represented in the callgraph need an assembler name. */
5221 if (cgraph_node::get (decl) != NULL)
5222 return true;
5223
5224 /* Unused and not public functions don't need an assembler name. */
5225 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5226 return false;
5227 }
5228
5229 return true;
5230 }
5231
5232
5233 /* Reset all language specific information still present in symbol
5234 DECL. */
5235
5236 static void
5237 free_lang_data_in_decl (tree decl)
5238 {
5239 gcc_assert (DECL_P (decl));
5240
5241 /* Give the FE a chance to remove its own data first. */
5242 lang_hooks.free_lang_data (decl);
5243
5244 TREE_LANG_FLAG_0 (decl) = 0;
5245 TREE_LANG_FLAG_1 (decl) = 0;
5246 TREE_LANG_FLAG_2 (decl) = 0;
5247 TREE_LANG_FLAG_3 (decl) = 0;
5248 TREE_LANG_FLAG_4 (decl) = 0;
5249 TREE_LANG_FLAG_5 (decl) = 0;
5250 TREE_LANG_FLAG_6 (decl) = 0;
5251
5252 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5253 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5254 if (TREE_CODE (decl) == FIELD_DECL)
5255 {
5256 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5257 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5258 DECL_QUALIFIER (decl) = NULL_TREE;
5259 }
5260
5261 if (TREE_CODE (decl) == FUNCTION_DECL)
5262 {
5263 struct cgraph_node *node;
5264 if (!(node = cgraph_node::get (decl))
5265 || (!node->definition && !node->clones))
5266 {
5267 if (node)
5268 node->release_body ();
5269 else
5270 {
5271 release_function_body (decl);
5272 DECL_ARGUMENTS (decl) = NULL;
5273 DECL_RESULT (decl) = NULL;
5274 DECL_INITIAL (decl) = error_mark_node;
5275 }
5276 }
5277 if (gimple_has_body_p (decl))
5278 {
5279 tree t;
5280
5281 /* If DECL has a gimple body, then the context for its
5282 arguments must be DECL. Otherwise, it doesn't really
5283 matter, as we will not be emitting any code for DECL. In
5284 general, there may be other instances of DECL created by
5285 the front end and since PARM_DECLs are generally shared,
5286 their DECL_CONTEXT changes as the replicas of DECL are
5287 created. The only time where DECL_CONTEXT is important
5288 is for the FUNCTION_DECLs that have a gimple body (since
5289 the PARM_DECL will be used in the function's body). */
5290 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5291 DECL_CONTEXT (t) = decl;
5292 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5293 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5294 = target_option_default_node;
5295 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5296 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5297 = optimization_default_node;
5298 }
5299
5300 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5301 At this point, it is not needed anymore. */
5302 DECL_SAVED_TREE (decl) = NULL_TREE;
5303
5304 /* Clear the abstract origin if it refers to a method. Otherwise
5305 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5306 origin will not be output correctly. */
5307 if (DECL_ABSTRACT_ORIGIN (decl)
5308 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5309 && RECORD_OR_UNION_TYPE_P
5310 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5311 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5312
5313 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5314 DECL_VINDEX referring to itself into a vtable slot number as it
5315 should. Happens with functions that are copied and then forgotten
5316 about. Just clear it, it won't matter anymore. */
5317 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5318 DECL_VINDEX (decl) = NULL_TREE;
5319 }
5320 else if (TREE_CODE (decl) == VAR_DECL)
5321 {
5322 if ((DECL_EXTERNAL (decl)
5323 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5324 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5325 DECL_INITIAL (decl) = NULL_TREE;
5326 }
5327 else if (TREE_CODE (decl) == TYPE_DECL
5328 || TREE_CODE (decl) == FIELD_DECL)
5329 DECL_INITIAL (decl) = NULL_TREE;
5330 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5331 && DECL_INITIAL (decl)
5332 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5333 {
5334 /* Strip builtins from the translation-unit BLOCK. We still have targets
5335 without builtin_decl_explicit support and also builtins are shared
5336 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5337 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5338 while (*nextp)
5339 {
5340 tree var = *nextp;
5341 if (TREE_CODE (var) == FUNCTION_DECL
5342 && DECL_BUILT_IN (var))
5343 *nextp = TREE_CHAIN (var);
5344 else
5345 nextp = &TREE_CHAIN (var);
5346 }
5347 }
5348 }
5349
5350
5351 /* Data used when collecting DECLs and TYPEs for language data removal. */
5352
5353 struct free_lang_data_d
5354 {
5355 /* Worklist to avoid excessive recursion. */
5356 vec<tree> worklist;
5357
5358 /* Set of traversed objects. Used to avoid duplicate visits. */
5359 hash_set<tree> *pset;
5360
5361 /* Array of symbols to process with free_lang_data_in_decl. */
5362 vec<tree> decls;
5363
5364 /* Array of types to process with free_lang_data_in_type. */
5365 vec<tree> types;
5366 };
5367
5368
5369 /* Save all language fields needed to generate proper debug information
5370 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5371
5372 static void
5373 save_debug_info_for_decl (tree t)
5374 {
5375 /*struct saved_debug_info_d *sdi;*/
5376
5377 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5378
5379 /* FIXME. Partial implementation for saving debug info removed. */
5380 }
5381
5382
5383 /* Save all language fields needed to generate proper debug information
5384 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5385
5386 static void
5387 save_debug_info_for_type (tree t)
5388 {
5389 /*struct saved_debug_info_d *sdi;*/
5390
5391 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5392
5393 /* FIXME. Partial implementation for saving debug info removed. */
5394 }
5395
5396
5397 /* Add type or decl T to one of the list of tree nodes that need their
5398 language data removed. The lists are held inside FLD. */
5399
5400 static void
5401 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5402 {
5403 if (DECL_P (t))
5404 {
5405 fld->decls.safe_push (t);
5406 if (debug_info_level > DINFO_LEVEL_TERSE)
5407 save_debug_info_for_decl (t);
5408 }
5409 else if (TYPE_P (t))
5410 {
5411 fld->types.safe_push (t);
5412 if (debug_info_level > DINFO_LEVEL_TERSE)
5413 save_debug_info_for_type (t);
5414 }
5415 else
5416 gcc_unreachable ();
5417 }
5418
5419 /* Push tree node T into FLD->WORKLIST. */
5420
5421 static inline void
5422 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5423 {
5424 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5425 fld->worklist.safe_push ((t));
5426 }
5427
5428
5429 /* Operand callback helper for free_lang_data_in_node. *TP is the
5430 subtree operand being considered. */
5431
5432 static tree
5433 find_decls_types_r (tree *tp, int *ws, void *data)
5434 {
5435 tree t = *tp;
5436 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5437
5438 if (TREE_CODE (t) == TREE_LIST)
5439 return NULL_TREE;
5440
5441 /* Language specific nodes will be removed, so there is no need
5442 to gather anything under them. */
5443 if (is_lang_specific (t))
5444 {
5445 *ws = 0;
5446 return NULL_TREE;
5447 }
5448
5449 if (DECL_P (t))
5450 {
5451 /* Note that walk_tree does not traverse every possible field in
5452 decls, so we have to do our own traversals here. */
5453 add_tree_to_fld_list (t, fld);
5454
5455 fld_worklist_push (DECL_NAME (t), fld);
5456 fld_worklist_push (DECL_CONTEXT (t), fld);
5457 fld_worklist_push (DECL_SIZE (t), fld);
5458 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5459
5460 /* We are going to remove everything under DECL_INITIAL for
5461 TYPE_DECLs. No point walking them. */
5462 if (TREE_CODE (t) != TYPE_DECL)
5463 fld_worklist_push (DECL_INITIAL (t), fld);
5464
5465 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5466 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5467
5468 if (TREE_CODE (t) == FUNCTION_DECL)
5469 {
5470 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5471 fld_worklist_push (DECL_RESULT (t), fld);
5472 }
5473 else if (TREE_CODE (t) == TYPE_DECL)
5474 {
5475 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5476 }
5477 else if (TREE_CODE (t) == FIELD_DECL)
5478 {
5479 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5480 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5481 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5482 fld_worklist_push (DECL_FCONTEXT (t), fld);
5483 }
5484
5485 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5486 && DECL_HAS_VALUE_EXPR_P (t))
5487 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5488
5489 if (TREE_CODE (t) != FIELD_DECL
5490 && TREE_CODE (t) != TYPE_DECL)
5491 fld_worklist_push (TREE_CHAIN (t), fld);
5492 *ws = 0;
5493 }
5494 else if (TYPE_P (t))
5495 {
5496 /* Note that walk_tree does not traverse every possible field in
5497 types, so we have to do our own traversals here. */
5498 add_tree_to_fld_list (t, fld);
5499
5500 if (!RECORD_OR_UNION_TYPE_P (t))
5501 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5502 fld_worklist_push (TYPE_SIZE (t), fld);
5503 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5504 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5505 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5506 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5507 fld_worklist_push (TYPE_NAME (t), fld);
5508 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5509 them and thus do not and want not to reach unused pointer types
5510 this way. */
5511 if (!POINTER_TYPE_P (t))
5512 fld_worklist_push (TYPE_MINVAL (t), fld);
5513 if (!RECORD_OR_UNION_TYPE_P (t))
5514 fld_worklist_push (TYPE_MAXVAL (t), fld);
5515 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5516 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5517 do not and want not to reach unused variants this way. */
5518 if (TYPE_CONTEXT (t))
5519 {
5520 tree ctx = TYPE_CONTEXT (t);
5521 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5522 So push that instead. */
5523 while (ctx && TREE_CODE (ctx) == BLOCK)
5524 ctx = BLOCK_SUPERCONTEXT (ctx);
5525 fld_worklist_push (ctx, fld);
5526 }
5527 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5528 and want not to reach unused types this way. */
5529
5530 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5531 {
5532 unsigned i;
5533 tree tem;
5534 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5535 fld_worklist_push (TREE_TYPE (tem), fld);
5536 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5537 if (tem
5538 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5539 && TREE_CODE (tem) == TREE_LIST)
5540 do
5541 {
5542 fld_worklist_push (TREE_VALUE (tem), fld);
5543 tem = TREE_CHAIN (tem);
5544 }
5545 while (tem);
5546 }
5547 if (RECORD_OR_UNION_TYPE_P (t))
5548 {
5549 tree tem;
5550 /* Push all TYPE_FIELDS - there can be interleaving interesting
5551 and non-interesting things. */
5552 tem = TYPE_FIELDS (t);
5553 while (tem)
5554 {
5555 if (TREE_CODE (tem) == FIELD_DECL
5556 || TREE_CODE (tem) == TYPE_DECL)
5557 fld_worklist_push (tem, fld);
5558 tem = TREE_CHAIN (tem);
5559 }
5560 }
5561
5562 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5563 *ws = 0;
5564 }
5565 else if (TREE_CODE (t) == BLOCK)
5566 {
5567 tree tem;
5568 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5569 fld_worklist_push (tem, fld);
5570 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5571 fld_worklist_push (tem, fld);
5572 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5573 }
5574
5575 if (TREE_CODE (t) != IDENTIFIER_NODE
5576 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5577 fld_worklist_push (TREE_TYPE (t), fld);
5578
5579 return NULL_TREE;
5580 }
5581
5582
5583 /* Find decls and types in T. */
5584
5585 static void
5586 find_decls_types (tree t, struct free_lang_data_d *fld)
5587 {
5588 while (1)
5589 {
5590 if (!fld->pset->contains (t))
5591 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5592 if (fld->worklist.is_empty ())
5593 break;
5594 t = fld->worklist.pop ();
5595 }
5596 }
5597
5598 /* Translate all the types in LIST with the corresponding runtime
5599 types. */
5600
5601 static tree
5602 get_eh_types_for_runtime (tree list)
5603 {
5604 tree head, prev;
5605
5606 if (list == NULL_TREE)
5607 return NULL_TREE;
5608
5609 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5610 prev = head;
5611 list = TREE_CHAIN (list);
5612 while (list)
5613 {
5614 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5615 TREE_CHAIN (prev) = n;
5616 prev = TREE_CHAIN (prev);
5617 list = TREE_CHAIN (list);
5618 }
5619
5620 return head;
5621 }
5622
5623
5624 /* Find decls and types referenced in EH region R and store them in
5625 FLD->DECLS and FLD->TYPES. */
5626
5627 static void
5628 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5629 {
5630 switch (r->type)
5631 {
5632 case ERT_CLEANUP:
5633 break;
5634
5635 case ERT_TRY:
5636 {
5637 eh_catch c;
5638
5639 /* The types referenced in each catch must first be changed to the
5640 EH types used at runtime. This removes references to FE types
5641 in the region. */
5642 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5643 {
5644 c->type_list = get_eh_types_for_runtime (c->type_list);
5645 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5646 }
5647 }
5648 break;
5649
5650 case ERT_ALLOWED_EXCEPTIONS:
5651 r->u.allowed.type_list
5652 = get_eh_types_for_runtime (r->u.allowed.type_list);
5653 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5654 break;
5655
5656 case ERT_MUST_NOT_THROW:
5657 walk_tree (&r->u.must_not_throw.failure_decl,
5658 find_decls_types_r, fld, fld->pset);
5659 break;
5660 }
5661 }
5662
5663
5664 /* Find decls and types referenced in cgraph node N and store them in
5665 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5666 look for *every* kind of DECL and TYPE node reachable from N,
5667 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5668 NAMESPACE_DECLs, etc). */
5669
5670 static void
5671 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5672 {
5673 basic_block bb;
5674 struct function *fn;
5675 unsigned ix;
5676 tree t;
5677
5678 find_decls_types (n->decl, fld);
5679
5680 if (!gimple_has_body_p (n->decl))
5681 return;
5682
5683 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5684
5685 fn = DECL_STRUCT_FUNCTION (n->decl);
5686
5687 /* Traverse locals. */
5688 FOR_EACH_LOCAL_DECL (fn, ix, t)
5689 find_decls_types (t, fld);
5690
5691 /* Traverse EH regions in FN. */
5692 {
5693 eh_region r;
5694 FOR_ALL_EH_REGION_FN (r, fn)
5695 find_decls_types_in_eh_region (r, fld);
5696 }
5697
5698 /* Traverse every statement in FN. */
5699 FOR_EACH_BB_FN (bb, fn)
5700 {
5701 gphi_iterator psi;
5702 gimple_stmt_iterator si;
5703 unsigned i;
5704
5705 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5706 {
5707 gphi *phi = psi.phi ();
5708
5709 for (i = 0; i < gimple_phi_num_args (phi); i++)
5710 {
5711 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5712 find_decls_types (*arg_p, fld);
5713 }
5714 }
5715
5716 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5717 {
5718 gimple stmt = gsi_stmt (si);
5719
5720 if (is_gimple_call (stmt))
5721 find_decls_types (gimple_call_fntype (stmt), fld);
5722
5723 for (i = 0; i < gimple_num_ops (stmt); i++)
5724 {
5725 tree arg = gimple_op (stmt, i);
5726 find_decls_types (arg, fld);
5727 }
5728 }
5729 }
5730 }
5731
5732
5733 /* Find decls and types referenced in varpool node N and store them in
5734 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5735 look for *every* kind of DECL and TYPE node reachable from N,
5736 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5737 NAMESPACE_DECLs, etc). */
5738
5739 static void
5740 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5741 {
5742 find_decls_types (v->decl, fld);
5743 }
5744
5745 /* If T needs an assembler name, have one created for it. */
5746
5747 void
5748 assign_assembler_name_if_neeeded (tree t)
5749 {
5750 if (need_assembler_name_p (t))
5751 {
5752 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5753 diagnostics that use input_location to show locus
5754 information. The problem here is that, at this point,
5755 input_location is generally anchored to the end of the file
5756 (since the parser is long gone), so we don't have a good
5757 position to pin it to.
5758
5759 To alleviate this problem, this uses the location of T's
5760 declaration. Examples of this are
5761 testsuite/g++.dg/template/cond2.C and
5762 testsuite/g++.dg/template/pr35240.C. */
5763 location_t saved_location = input_location;
5764 input_location = DECL_SOURCE_LOCATION (t);
5765
5766 decl_assembler_name (t);
5767
5768 input_location = saved_location;
5769 }
5770 }
5771
5772
5773 /* Free language specific information for every operand and expression
5774 in every node of the call graph. This process operates in three stages:
5775
5776 1- Every callgraph node and varpool node is traversed looking for
5777 decls and types embedded in them. This is a more exhaustive
5778 search than that done by find_referenced_vars, because it will
5779 also collect individual fields, decls embedded in types, etc.
5780
5781 2- All the decls found are sent to free_lang_data_in_decl.
5782
5783 3- All the types found are sent to free_lang_data_in_type.
5784
5785 The ordering between decls and types is important because
5786 free_lang_data_in_decl sets assembler names, which includes
5787 mangling. So types cannot be freed up until assembler names have
5788 been set up. */
5789
5790 static void
5791 free_lang_data_in_cgraph (void)
5792 {
5793 struct cgraph_node *n;
5794 varpool_node *v;
5795 struct free_lang_data_d fld;
5796 tree t;
5797 unsigned i;
5798 alias_pair *p;
5799
5800 /* Initialize sets and arrays to store referenced decls and types. */
5801 fld.pset = new hash_set<tree>;
5802 fld.worklist.create (0);
5803 fld.decls.create (100);
5804 fld.types.create (100);
5805
5806 /* Find decls and types in the body of every function in the callgraph. */
5807 FOR_EACH_FUNCTION (n)
5808 find_decls_types_in_node (n, &fld);
5809
5810 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5811 find_decls_types (p->decl, &fld);
5812
5813 /* Find decls and types in every varpool symbol. */
5814 FOR_EACH_VARIABLE (v)
5815 find_decls_types_in_var (v, &fld);
5816
5817 /* Set the assembler name on every decl found. We need to do this
5818 now because free_lang_data_in_decl will invalidate data needed
5819 for mangling. This breaks mangling on interdependent decls. */
5820 FOR_EACH_VEC_ELT (fld.decls, i, t)
5821 assign_assembler_name_if_neeeded (t);
5822
5823 /* Traverse every decl found freeing its language data. */
5824 FOR_EACH_VEC_ELT (fld.decls, i, t)
5825 free_lang_data_in_decl (t);
5826
5827 /* Traverse every type found freeing its language data. */
5828 FOR_EACH_VEC_ELT (fld.types, i, t)
5829 free_lang_data_in_type (t);
5830 #ifdef ENABLE_CHECKING
5831 FOR_EACH_VEC_ELT (fld.types, i, t)
5832 verify_type (t);
5833 #endif
5834
5835 delete fld.pset;
5836 fld.worklist.release ();
5837 fld.decls.release ();
5838 fld.types.release ();
5839 }
5840
5841
5842 /* Free resources that are used by FE but are not needed once they are done. */
5843
5844 static unsigned
5845 free_lang_data (void)
5846 {
5847 unsigned i;
5848
5849 /* If we are the LTO frontend we have freed lang-specific data already. */
5850 if (in_lto_p
5851 || (!flag_generate_lto && !flag_generate_offload))
5852 return 0;
5853
5854 /* Allocate and assign alias sets to the standard integer types
5855 while the slots are still in the way the frontends generated them. */
5856 for (i = 0; i < itk_none; ++i)
5857 if (integer_types[i])
5858 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5859
5860 /* Traverse the IL resetting language specific information for
5861 operands, expressions, etc. */
5862 free_lang_data_in_cgraph ();
5863
5864 /* Create gimple variants for common types. */
5865 ptrdiff_type_node = integer_type_node;
5866 fileptr_type_node = ptr_type_node;
5867
5868 /* Reset some langhooks. Do not reset types_compatible_p, it may
5869 still be used indirectly via the get_alias_set langhook. */
5870 lang_hooks.dwarf_name = lhd_dwarf_name;
5871 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5872 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5873
5874 /* We do not want the default decl_assembler_name implementation,
5875 rather if we have fixed everything we want a wrapper around it
5876 asserting that all non-local symbols already got their assembler
5877 name and only produce assembler names for local symbols. Or rather
5878 make sure we never call decl_assembler_name on local symbols and
5879 devise a separate, middle-end private scheme for it. */
5880
5881 /* Reset diagnostic machinery. */
5882 tree_diagnostics_defaults (global_dc);
5883
5884 return 0;
5885 }
5886
5887
5888 namespace {
5889
5890 const pass_data pass_data_ipa_free_lang_data =
5891 {
5892 SIMPLE_IPA_PASS, /* type */
5893 "*free_lang_data", /* name */
5894 OPTGROUP_NONE, /* optinfo_flags */
5895 TV_IPA_FREE_LANG_DATA, /* tv_id */
5896 0, /* properties_required */
5897 0, /* properties_provided */
5898 0, /* properties_destroyed */
5899 0, /* todo_flags_start */
5900 0, /* todo_flags_finish */
5901 };
5902
5903 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5904 {
5905 public:
5906 pass_ipa_free_lang_data (gcc::context *ctxt)
5907 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5908 {}
5909
5910 /* opt_pass methods: */
5911 virtual unsigned int execute (function *) { return free_lang_data (); }
5912
5913 }; // class pass_ipa_free_lang_data
5914
5915 } // anon namespace
5916
5917 simple_ipa_opt_pass *
5918 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5919 {
5920 return new pass_ipa_free_lang_data (ctxt);
5921 }
5922
5923 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5924 ATTR_NAME. Also used internally by remove_attribute(). */
5925 bool
5926 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5927 {
5928 size_t ident_len = IDENTIFIER_LENGTH (ident);
5929
5930 if (ident_len == attr_len)
5931 {
5932 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5933 return true;
5934 }
5935 else if (ident_len == attr_len + 4)
5936 {
5937 /* There is the possibility that ATTR is 'text' and IDENT is
5938 '__text__'. */
5939 const char *p = IDENTIFIER_POINTER (ident);
5940 if (p[0] == '_' && p[1] == '_'
5941 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5942 && strncmp (attr_name, p + 2, attr_len) == 0)
5943 return true;
5944 }
5945
5946 return false;
5947 }
5948
5949 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5950 of ATTR_NAME, and LIST is not NULL_TREE. */
5951 tree
5952 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5953 {
5954 while (list)
5955 {
5956 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5957
5958 if (ident_len == attr_len)
5959 {
5960 if (!strcmp (attr_name,
5961 IDENTIFIER_POINTER (get_attribute_name (list))))
5962 break;
5963 }
5964 /* TODO: If we made sure that attributes were stored in the
5965 canonical form without '__...__' (ie, as in 'text' as opposed
5966 to '__text__') then we could avoid the following case. */
5967 else if (ident_len == attr_len + 4)
5968 {
5969 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5970 if (p[0] == '_' && p[1] == '_'
5971 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5972 && strncmp (attr_name, p + 2, attr_len) == 0)
5973 break;
5974 }
5975 list = TREE_CHAIN (list);
5976 }
5977
5978 return list;
5979 }
5980
5981 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5982 return a pointer to the attribute's list first element if the attribute
5983 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5984 '__text__'). */
5985
5986 tree
5987 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5988 tree list)
5989 {
5990 while (list)
5991 {
5992 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5993
5994 if (attr_len > ident_len)
5995 {
5996 list = TREE_CHAIN (list);
5997 continue;
5998 }
5999
6000 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6001
6002 if (strncmp (attr_name, p, attr_len) == 0)
6003 break;
6004
6005 /* TODO: If we made sure that attributes were stored in the
6006 canonical form without '__...__' (ie, as in 'text' as opposed
6007 to '__text__') then we could avoid the following case. */
6008 if (p[0] == '_' && p[1] == '_' &&
6009 strncmp (attr_name, p + 2, attr_len) == 0)
6010 break;
6011
6012 list = TREE_CHAIN (list);
6013 }
6014
6015 return list;
6016 }
6017
6018
6019 /* A variant of lookup_attribute() that can be used with an identifier
6020 as the first argument, and where the identifier can be either
6021 'text' or '__text__'.
6022
6023 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6024 return a pointer to the attribute's list element if the attribute
6025 is part of the list, or NULL_TREE if not found. If the attribute
6026 appears more than once, this only returns the first occurrence; the
6027 TREE_CHAIN of the return value should be passed back in if further
6028 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6029 can be in the form 'text' or '__text__'. */
6030 static tree
6031 lookup_ident_attribute (tree attr_identifier, tree list)
6032 {
6033 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6034
6035 while (list)
6036 {
6037 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6038 == IDENTIFIER_NODE);
6039
6040 /* Identifiers can be compared directly for equality. */
6041 if (attr_identifier == get_attribute_name (list))
6042 break;
6043
6044 /* If they are not equal, they may still be one in the form
6045 'text' while the other one is in the form '__text__'. TODO:
6046 If we were storing attributes in normalized 'text' form, then
6047 this could all go away and we could take full advantage of
6048 the fact that we're comparing identifiers. :-) */
6049 {
6050 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
6051 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6052
6053 if (ident_len == attr_len + 4)
6054 {
6055 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6056 const char *q = IDENTIFIER_POINTER (attr_identifier);
6057 if (p[0] == '_' && p[1] == '_'
6058 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6059 && strncmp (q, p + 2, attr_len) == 0)
6060 break;
6061 }
6062 else if (ident_len + 4 == attr_len)
6063 {
6064 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6065 const char *q = IDENTIFIER_POINTER (attr_identifier);
6066 if (q[0] == '_' && q[1] == '_'
6067 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
6068 && strncmp (q + 2, p, ident_len) == 0)
6069 break;
6070 }
6071 }
6072 list = TREE_CHAIN (list);
6073 }
6074
6075 return list;
6076 }
6077
6078 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6079 modified list. */
6080
6081 tree
6082 remove_attribute (const char *attr_name, tree list)
6083 {
6084 tree *p;
6085 size_t attr_len = strlen (attr_name);
6086
6087 gcc_checking_assert (attr_name[0] != '_');
6088
6089 for (p = &list; *p; )
6090 {
6091 tree l = *p;
6092 /* TODO: If we were storing attributes in normalized form, here
6093 we could use a simple strcmp(). */
6094 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6095 *p = TREE_CHAIN (l);
6096 else
6097 p = &TREE_CHAIN (l);
6098 }
6099
6100 return list;
6101 }
6102
6103 /* Return an attribute list that is the union of a1 and a2. */
6104
6105 tree
6106 merge_attributes (tree a1, tree a2)
6107 {
6108 tree attributes;
6109
6110 /* Either one unset? Take the set one. */
6111
6112 if ((attributes = a1) == 0)
6113 attributes = a2;
6114
6115 /* One that completely contains the other? Take it. */
6116
6117 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6118 {
6119 if (attribute_list_contained (a2, a1))
6120 attributes = a2;
6121 else
6122 {
6123 /* Pick the longest list, and hang on the other list. */
6124
6125 if (list_length (a1) < list_length (a2))
6126 attributes = a2, a2 = a1;
6127
6128 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6129 {
6130 tree a;
6131 for (a = lookup_ident_attribute (get_attribute_name (a2),
6132 attributes);
6133 a != NULL_TREE && !attribute_value_equal (a, a2);
6134 a = lookup_ident_attribute (get_attribute_name (a2),
6135 TREE_CHAIN (a)))
6136 ;
6137 if (a == NULL_TREE)
6138 {
6139 a1 = copy_node (a2);
6140 TREE_CHAIN (a1) = attributes;
6141 attributes = a1;
6142 }
6143 }
6144 }
6145 }
6146 return attributes;
6147 }
6148
6149 /* Given types T1 and T2, merge their attributes and return
6150 the result. */
6151
6152 tree
6153 merge_type_attributes (tree t1, tree t2)
6154 {
6155 return merge_attributes (TYPE_ATTRIBUTES (t1),
6156 TYPE_ATTRIBUTES (t2));
6157 }
6158
6159 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6160 the result. */
6161
6162 tree
6163 merge_decl_attributes (tree olddecl, tree newdecl)
6164 {
6165 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6166 DECL_ATTRIBUTES (newdecl));
6167 }
6168
6169 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6170
6171 /* Specialization of merge_decl_attributes for various Windows targets.
6172
6173 This handles the following situation:
6174
6175 __declspec (dllimport) int foo;
6176 int foo;
6177
6178 The second instance of `foo' nullifies the dllimport. */
6179
6180 tree
6181 merge_dllimport_decl_attributes (tree old, tree new_tree)
6182 {
6183 tree a;
6184 int delete_dllimport_p = 1;
6185
6186 /* What we need to do here is remove from `old' dllimport if it doesn't
6187 appear in `new'. dllimport behaves like extern: if a declaration is
6188 marked dllimport and a definition appears later, then the object
6189 is not dllimport'd. We also remove a `new' dllimport if the old list
6190 contains dllexport: dllexport always overrides dllimport, regardless
6191 of the order of declaration. */
6192 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6193 delete_dllimport_p = 0;
6194 else if (DECL_DLLIMPORT_P (new_tree)
6195 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6196 {
6197 DECL_DLLIMPORT_P (new_tree) = 0;
6198 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6199 "dllimport ignored", new_tree);
6200 }
6201 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6202 {
6203 /* Warn about overriding a symbol that has already been used, e.g.:
6204 extern int __attribute__ ((dllimport)) foo;
6205 int* bar () {return &foo;}
6206 int foo;
6207 */
6208 if (TREE_USED (old))
6209 {
6210 warning (0, "%q+D redeclared without dllimport attribute "
6211 "after being referenced with dll linkage", new_tree);
6212 /* If we have used a variable's address with dllimport linkage,
6213 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6214 decl may already have had TREE_CONSTANT computed.
6215 We still remove the attribute so that assembler code refers
6216 to '&foo rather than '_imp__foo'. */
6217 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6218 DECL_DLLIMPORT_P (new_tree) = 1;
6219 }
6220
6221 /* Let an inline definition silently override the external reference,
6222 but otherwise warn about attribute inconsistency. */
6223 else if (TREE_CODE (new_tree) == VAR_DECL
6224 || !DECL_DECLARED_INLINE_P (new_tree))
6225 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6226 "previous dllimport ignored", new_tree);
6227 }
6228 else
6229 delete_dllimport_p = 0;
6230
6231 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6232
6233 if (delete_dllimport_p)
6234 a = remove_attribute ("dllimport", a);
6235
6236 return a;
6237 }
6238
6239 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6240 struct attribute_spec.handler. */
6241
6242 tree
6243 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6244 bool *no_add_attrs)
6245 {
6246 tree node = *pnode;
6247 bool is_dllimport;
6248
6249 /* These attributes may apply to structure and union types being created,
6250 but otherwise should pass to the declaration involved. */
6251 if (!DECL_P (node))
6252 {
6253 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6254 | (int) ATTR_FLAG_ARRAY_NEXT))
6255 {
6256 *no_add_attrs = true;
6257 return tree_cons (name, args, NULL_TREE);
6258 }
6259 if (TREE_CODE (node) == RECORD_TYPE
6260 || TREE_CODE (node) == UNION_TYPE)
6261 {
6262 node = TYPE_NAME (node);
6263 if (!node)
6264 return NULL_TREE;
6265 }
6266 else
6267 {
6268 warning (OPT_Wattributes, "%qE attribute ignored",
6269 name);
6270 *no_add_attrs = true;
6271 return NULL_TREE;
6272 }
6273 }
6274
6275 if (TREE_CODE (node) != FUNCTION_DECL
6276 && TREE_CODE (node) != VAR_DECL
6277 && TREE_CODE (node) != TYPE_DECL)
6278 {
6279 *no_add_attrs = true;
6280 warning (OPT_Wattributes, "%qE attribute ignored",
6281 name);
6282 return NULL_TREE;
6283 }
6284
6285 if (TREE_CODE (node) == TYPE_DECL
6286 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6287 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6288 {
6289 *no_add_attrs = true;
6290 warning (OPT_Wattributes, "%qE attribute ignored",
6291 name);
6292 return NULL_TREE;
6293 }
6294
6295 is_dllimport = is_attribute_p ("dllimport", name);
6296
6297 /* Report error on dllimport ambiguities seen now before they cause
6298 any damage. */
6299 if (is_dllimport)
6300 {
6301 /* Honor any target-specific overrides. */
6302 if (!targetm.valid_dllimport_attribute_p (node))
6303 *no_add_attrs = true;
6304
6305 else if (TREE_CODE (node) == FUNCTION_DECL
6306 && DECL_DECLARED_INLINE_P (node))
6307 {
6308 warning (OPT_Wattributes, "inline function %q+D declared as "
6309 " dllimport: attribute ignored", node);
6310 *no_add_attrs = true;
6311 }
6312 /* Like MS, treat definition of dllimported variables and
6313 non-inlined functions on declaration as syntax errors. */
6314 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6315 {
6316 error ("function %q+D definition is marked dllimport", node);
6317 *no_add_attrs = true;
6318 }
6319
6320 else if (TREE_CODE (node) == VAR_DECL)
6321 {
6322 if (DECL_INITIAL (node))
6323 {
6324 error ("variable %q+D definition is marked dllimport",
6325 node);
6326 *no_add_attrs = true;
6327 }
6328
6329 /* `extern' needn't be specified with dllimport.
6330 Specify `extern' now and hope for the best. Sigh. */
6331 DECL_EXTERNAL (node) = 1;
6332 /* Also, implicitly give dllimport'd variables declared within
6333 a function global scope, unless declared static. */
6334 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6335 TREE_PUBLIC (node) = 1;
6336 }
6337
6338 if (*no_add_attrs == false)
6339 DECL_DLLIMPORT_P (node) = 1;
6340 }
6341 else if (TREE_CODE (node) == FUNCTION_DECL
6342 && DECL_DECLARED_INLINE_P (node)
6343 && flag_keep_inline_dllexport)
6344 /* An exported function, even if inline, must be emitted. */
6345 DECL_EXTERNAL (node) = 0;
6346
6347 /* Report error if symbol is not accessible at global scope. */
6348 if (!TREE_PUBLIC (node)
6349 && (TREE_CODE (node) == VAR_DECL
6350 || TREE_CODE (node) == FUNCTION_DECL))
6351 {
6352 error ("external linkage required for symbol %q+D because of "
6353 "%qE attribute", node, name);
6354 *no_add_attrs = true;
6355 }
6356
6357 /* A dllexport'd entity must have default visibility so that other
6358 program units (shared libraries or the main executable) can see
6359 it. A dllimport'd entity must have default visibility so that
6360 the linker knows that undefined references within this program
6361 unit can be resolved by the dynamic linker. */
6362 if (!*no_add_attrs)
6363 {
6364 if (DECL_VISIBILITY_SPECIFIED (node)
6365 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6366 error ("%qE implies default visibility, but %qD has already "
6367 "been declared with a different visibility",
6368 name, node);
6369 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6370 DECL_VISIBILITY_SPECIFIED (node) = 1;
6371 }
6372
6373 return NULL_TREE;
6374 }
6375
6376 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6377 \f
6378 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6379 of the various TYPE_QUAL values. */
6380
6381 static void
6382 set_type_quals (tree type, int type_quals)
6383 {
6384 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6385 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6386 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6387 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6388 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6389 }
6390
6391 /* Returns true iff unqualified CAND and BASE are equivalent. */
6392
6393 bool
6394 check_base_type (const_tree cand, const_tree base)
6395 {
6396 return (TYPE_NAME (cand) == TYPE_NAME (base)
6397 /* Apparently this is needed for Objective-C. */
6398 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6399 /* Check alignment. */
6400 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6401 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6402 TYPE_ATTRIBUTES (base)));
6403 }
6404
6405 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6406
6407 bool
6408 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6409 {
6410 return (TYPE_QUALS (cand) == type_quals
6411 && check_base_type (cand, base));
6412 }
6413
6414 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6415
6416 static bool
6417 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6418 {
6419 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6420 && TYPE_NAME (cand) == TYPE_NAME (base)
6421 /* Apparently this is needed for Objective-C. */
6422 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6423 /* Check alignment. */
6424 && TYPE_ALIGN (cand) == align
6425 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6426 TYPE_ATTRIBUTES (base)));
6427 }
6428
6429 /* This function checks to see if TYPE matches the size one of the built-in
6430 atomic types, and returns that core atomic type. */
6431
6432 static tree
6433 find_atomic_core_type (tree type)
6434 {
6435 tree base_atomic_type;
6436
6437 /* Only handle complete types. */
6438 if (TYPE_SIZE (type) == NULL_TREE)
6439 return NULL_TREE;
6440
6441 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6442 switch (type_size)
6443 {
6444 case 8:
6445 base_atomic_type = atomicQI_type_node;
6446 break;
6447
6448 case 16:
6449 base_atomic_type = atomicHI_type_node;
6450 break;
6451
6452 case 32:
6453 base_atomic_type = atomicSI_type_node;
6454 break;
6455
6456 case 64:
6457 base_atomic_type = atomicDI_type_node;
6458 break;
6459
6460 case 128:
6461 base_atomic_type = atomicTI_type_node;
6462 break;
6463
6464 default:
6465 base_atomic_type = NULL_TREE;
6466 }
6467
6468 return base_atomic_type;
6469 }
6470
6471 /* Return a version of the TYPE, qualified as indicated by the
6472 TYPE_QUALS, if one exists. If no qualified version exists yet,
6473 return NULL_TREE. */
6474
6475 tree
6476 get_qualified_type (tree type, int type_quals)
6477 {
6478 tree t;
6479
6480 if (TYPE_QUALS (type) == type_quals)
6481 return type;
6482
6483 /* Search the chain of variants to see if there is already one there just
6484 like the one we need to have. If so, use that existing one. We must
6485 preserve the TYPE_NAME, since there is code that depends on this. */
6486 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6487 if (check_qualified_type (t, type, type_quals))
6488 return t;
6489
6490 return NULL_TREE;
6491 }
6492
6493 /* Like get_qualified_type, but creates the type if it does not
6494 exist. This function never returns NULL_TREE. */
6495
6496 tree
6497 build_qualified_type (tree type, int type_quals)
6498 {
6499 tree t;
6500
6501 /* See if we already have the appropriate qualified variant. */
6502 t = get_qualified_type (type, type_quals);
6503
6504 /* If not, build it. */
6505 if (!t)
6506 {
6507 t = build_variant_type_copy (type);
6508 set_type_quals (t, type_quals);
6509
6510 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6511 {
6512 /* See if this object can map to a basic atomic type. */
6513 tree atomic_type = find_atomic_core_type (type);
6514 if (atomic_type)
6515 {
6516 /* Ensure the alignment of this type is compatible with
6517 the required alignment of the atomic type. */
6518 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6519 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6520 }
6521 }
6522
6523 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6524 /* Propagate structural equality. */
6525 SET_TYPE_STRUCTURAL_EQUALITY (t);
6526 else if (TYPE_CANONICAL (type) != type)
6527 /* Build the underlying canonical type, since it is different
6528 from TYPE. */
6529 {
6530 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6531 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6532 }
6533 else
6534 /* T is its own canonical type. */
6535 TYPE_CANONICAL (t) = t;
6536
6537 }
6538
6539 return t;
6540 }
6541
6542 /* Create a variant of type T with alignment ALIGN. */
6543
6544 tree
6545 build_aligned_type (tree type, unsigned int align)
6546 {
6547 tree t;
6548
6549 if (TYPE_PACKED (type)
6550 || TYPE_ALIGN (type) == align)
6551 return type;
6552
6553 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6554 if (check_aligned_type (t, type, align))
6555 return t;
6556
6557 t = build_variant_type_copy (type);
6558 TYPE_ALIGN (t) = align;
6559
6560 return t;
6561 }
6562
6563 /* Create a new distinct copy of TYPE. The new type is made its own
6564 MAIN_VARIANT. If TYPE requires structural equality checks, the
6565 resulting type requires structural equality checks; otherwise, its
6566 TYPE_CANONICAL points to itself. */
6567
6568 tree
6569 build_distinct_type_copy (tree type)
6570 {
6571 tree t = copy_node (type);
6572
6573 TYPE_POINTER_TO (t) = 0;
6574 TYPE_REFERENCE_TO (t) = 0;
6575
6576 /* Set the canonical type either to a new equivalence class, or
6577 propagate the need for structural equality checks. */
6578 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6579 SET_TYPE_STRUCTURAL_EQUALITY (t);
6580 else
6581 TYPE_CANONICAL (t) = t;
6582
6583 /* Make it its own variant. */
6584 TYPE_MAIN_VARIANT (t) = t;
6585 TYPE_NEXT_VARIANT (t) = 0;
6586
6587 /* We do not record methods in type copies nor variants
6588 so we do not need to keep them up to date when new method
6589 is inserted. */
6590 if (RECORD_OR_UNION_TYPE_P (t))
6591 TYPE_METHODS (t) = NULL_TREE;
6592
6593 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6594 whose TREE_TYPE is not t. This can also happen in the Ada
6595 frontend when using subtypes. */
6596
6597 return t;
6598 }
6599
6600 /* Create a new variant of TYPE, equivalent but distinct. This is so
6601 the caller can modify it. TYPE_CANONICAL for the return type will
6602 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6603 are considered equal by the language itself (or that both types
6604 require structural equality checks). */
6605
6606 tree
6607 build_variant_type_copy (tree type)
6608 {
6609 tree t, m = TYPE_MAIN_VARIANT (type);
6610
6611 t = build_distinct_type_copy (type);
6612
6613 /* Since we're building a variant, assume that it is a non-semantic
6614 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6615 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6616
6617 /* Add the new type to the chain of variants of TYPE. */
6618 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6619 TYPE_NEXT_VARIANT (m) = t;
6620 TYPE_MAIN_VARIANT (t) = m;
6621
6622 return t;
6623 }
6624 \f
6625 /* Return true if the from tree in both tree maps are equal. */
6626
6627 int
6628 tree_map_base_eq (const void *va, const void *vb)
6629 {
6630 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6631 *const b = (const struct tree_map_base *) vb;
6632 return (a->from == b->from);
6633 }
6634
6635 /* Hash a from tree in a tree_base_map. */
6636
6637 unsigned int
6638 tree_map_base_hash (const void *item)
6639 {
6640 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6641 }
6642
6643 /* Return true if this tree map structure is marked for garbage collection
6644 purposes. We simply return true if the from tree is marked, so that this
6645 structure goes away when the from tree goes away. */
6646
6647 int
6648 tree_map_base_marked_p (const void *p)
6649 {
6650 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6651 }
6652
6653 /* Hash a from tree in a tree_map. */
6654
6655 unsigned int
6656 tree_map_hash (const void *item)
6657 {
6658 return (((const struct tree_map *) item)->hash);
6659 }
6660
6661 /* Hash a from tree in a tree_decl_map. */
6662
6663 unsigned int
6664 tree_decl_map_hash (const void *item)
6665 {
6666 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6667 }
6668
6669 /* Return the initialization priority for DECL. */
6670
6671 priority_type
6672 decl_init_priority_lookup (tree decl)
6673 {
6674 symtab_node *snode = symtab_node::get (decl);
6675
6676 if (!snode)
6677 return DEFAULT_INIT_PRIORITY;
6678 return
6679 snode->get_init_priority ();
6680 }
6681
6682 /* Return the finalization priority for DECL. */
6683
6684 priority_type
6685 decl_fini_priority_lookup (tree decl)
6686 {
6687 cgraph_node *node = cgraph_node::get (decl);
6688
6689 if (!node)
6690 return DEFAULT_INIT_PRIORITY;
6691 return
6692 node->get_fini_priority ();
6693 }
6694
6695 /* Set the initialization priority for DECL to PRIORITY. */
6696
6697 void
6698 decl_init_priority_insert (tree decl, priority_type priority)
6699 {
6700 struct symtab_node *snode;
6701
6702 if (priority == DEFAULT_INIT_PRIORITY)
6703 {
6704 snode = symtab_node::get (decl);
6705 if (!snode)
6706 return;
6707 }
6708 else if (TREE_CODE (decl) == VAR_DECL)
6709 snode = varpool_node::get_create (decl);
6710 else
6711 snode = cgraph_node::get_create (decl);
6712 snode->set_init_priority (priority);
6713 }
6714
6715 /* Set the finalization priority for DECL to PRIORITY. */
6716
6717 void
6718 decl_fini_priority_insert (tree decl, priority_type priority)
6719 {
6720 struct cgraph_node *node;
6721
6722 if (priority == DEFAULT_INIT_PRIORITY)
6723 {
6724 node = cgraph_node::get (decl);
6725 if (!node)
6726 return;
6727 }
6728 else
6729 node = cgraph_node::get_create (decl);
6730 node->set_fini_priority (priority);
6731 }
6732
6733 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6734
6735 static void
6736 print_debug_expr_statistics (void)
6737 {
6738 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6739 (long) debug_expr_for_decl->size (),
6740 (long) debug_expr_for_decl->elements (),
6741 debug_expr_for_decl->collisions ());
6742 }
6743
6744 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6745
6746 static void
6747 print_value_expr_statistics (void)
6748 {
6749 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6750 (long) value_expr_for_decl->size (),
6751 (long) value_expr_for_decl->elements (),
6752 value_expr_for_decl->collisions ());
6753 }
6754
6755 /* Lookup a debug expression for FROM, and return it if we find one. */
6756
6757 tree
6758 decl_debug_expr_lookup (tree from)
6759 {
6760 struct tree_decl_map *h, in;
6761 in.base.from = from;
6762
6763 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6764 if (h)
6765 return h->to;
6766 return NULL_TREE;
6767 }
6768
6769 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6770
6771 void
6772 decl_debug_expr_insert (tree from, tree to)
6773 {
6774 struct tree_decl_map *h;
6775
6776 h = ggc_alloc<tree_decl_map> ();
6777 h->base.from = from;
6778 h->to = to;
6779 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6780 }
6781
6782 /* Lookup a value expression for FROM, and return it if we find one. */
6783
6784 tree
6785 decl_value_expr_lookup (tree from)
6786 {
6787 struct tree_decl_map *h, in;
6788 in.base.from = from;
6789
6790 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6791 if (h)
6792 return h->to;
6793 return NULL_TREE;
6794 }
6795
6796 /* Insert a mapping FROM->TO in the value expression hashtable. */
6797
6798 void
6799 decl_value_expr_insert (tree from, tree to)
6800 {
6801 struct tree_decl_map *h;
6802
6803 h = ggc_alloc<tree_decl_map> ();
6804 h->base.from = from;
6805 h->to = to;
6806 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6807 }
6808
6809 /* Lookup a vector of debug arguments for FROM, and return it if we
6810 find one. */
6811
6812 vec<tree, va_gc> **
6813 decl_debug_args_lookup (tree from)
6814 {
6815 struct tree_vec_map *h, in;
6816
6817 if (!DECL_HAS_DEBUG_ARGS_P (from))
6818 return NULL;
6819 gcc_checking_assert (debug_args_for_decl != NULL);
6820 in.base.from = from;
6821 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6822 if (h)
6823 return &h->to;
6824 return NULL;
6825 }
6826
6827 /* Insert a mapping FROM->empty vector of debug arguments in the value
6828 expression hashtable. */
6829
6830 vec<tree, va_gc> **
6831 decl_debug_args_insert (tree from)
6832 {
6833 struct tree_vec_map *h;
6834 tree_vec_map **loc;
6835
6836 if (DECL_HAS_DEBUG_ARGS_P (from))
6837 return decl_debug_args_lookup (from);
6838 if (debug_args_for_decl == NULL)
6839 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6840 h = ggc_alloc<tree_vec_map> ();
6841 h->base.from = from;
6842 h->to = NULL;
6843 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6844 *loc = h;
6845 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6846 return &h->to;
6847 }
6848
6849 /* Hashing of types so that we don't make duplicates.
6850 The entry point is `type_hash_canon'. */
6851
6852 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6853 with types in the TREE_VALUE slots), by adding the hash codes
6854 of the individual types. */
6855
6856 static void
6857 type_hash_list (const_tree list, inchash::hash &hstate)
6858 {
6859 const_tree tail;
6860
6861 for (tail = list; tail; tail = TREE_CHAIN (tail))
6862 if (TREE_VALUE (tail) != error_mark_node)
6863 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6864 }
6865
6866 /* These are the Hashtable callback functions. */
6867
6868 /* Returns true iff the types are equivalent. */
6869
6870 bool
6871 type_cache_hasher::equal (type_hash *a, type_hash *b)
6872 {
6873 /* First test the things that are the same for all types. */
6874 if (a->hash != b->hash
6875 || TREE_CODE (a->type) != TREE_CODE (b->type)
6876 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6877 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6878 TYPE_ATTRIBUTES (b->type))
6879 || (TREE_CODE (a->type) != COMPLEX_TYPE
6880 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6881 return 0;
6882
6883 /* Be careful about comparing arrays before and after the element type
6884 has been completed; don't compare TYPE_ALIGN unless both types are
6885 complete. */
6886 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6887 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6888 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6889 return 0;
6890
6891 switch (TREE_CODE (a->type))
6892 {
6893 case VOID_TYPE:
6894 case COMPLEX_TYPE:
6895 case POINTER_TYPE:
6896 case REFERENCE_TYPE:
6897 case NULLPTR_TYPE:
6898 return 1;
6899
6900 case VECTOR_TYPE:
6901 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6902
6903 case ENUMERAL_TYPE:
6904 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6905 && !(TYPE_VALUES (a->type)
6906 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6907 && TYPE_VALUES (b->type)
6908 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6909 && type_list_equal (TYPE_VALUES (a->type),
6910 TYPE_VALUES (b->type))))
6911 return 0;
6912
6913 /* ... fall through ... */
6914
6915 case INTEGER_TYPE:
6916 case REAL_TYPE:
6917 case BOOLEAN_TYPE:
6918 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6919 return false;
6920 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6921 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6922 TYPE_MAX_VALUE (b->type)))
6923 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6924 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6925 TYPE_MIN_VALUE (b->type))));
6926
6927 case FIXED_POINT_TYPE:
6928 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6929
6930 case OFFSET_TYPE:
6931 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6932
6933 case METHOD_TYPE:
6934 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6935 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6936 || (TYPE_ARG_TYPES (a->type)
6937 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6938 && TYPE_ARG_TYPES (b->type)
6939 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6940 && type_list_equal (TYPE_ARG_TYPES (a->type),
6941 TYPE_ARG_TYPES (b->type)))))
6942 break;
6943 return 0;
6944 case ARRAY_TYPE:
6945 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6946
6947 case RECORD_TYPE:
6948 case UNION_TYPE:
6949 case QUAL_UNION_TYPE:
6950 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6951 || (TYPE_FIELDS (a->type)
6952 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6953 && TYPE_FIELDS (b->type)
6954 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6955 && type_list_equal (TYPE_FIELDS (a->type),
6956 TYPE_FIELDS (b->type))));
6957
6958 case FUNCTION_TYPE:
6959 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6960 || (TYPE_ARG_TYPES (a->type)
6961 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6962 && TYPE_ARG_TYPES (b->type)
6963 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6964 && type_list_equal (TYPE_ARG_TYPES (a->type),
6965 TYPE_ARG_TYPES (b->type))))
6966 break;
6967 return 0;
6968
6969 default:
6970 return 0;
6971 }
6972
6973 if (lang_hooks.types.type_hash_eq != NULL)
6974 return lang_hooks.types.type_hash_eq (a->type, b->type);
6975
6976 return 1;
6977 }
6978
6979 /* Given TYPE, and HASHCODE its hash code, return the canonical
6980 object for an identical type if one already exists.
6981 Otherwise, return TYPE, and record it as the canonical object.
6982
6983 To use this function, first create a type of the sort you want.
6984 Then compute its hash code from the fields of the type that
6985 make it different from other similar types.
6986 Then call this function and use the value. */
6987
6988 tree
6989 type_hash_canon (unsigned int hashcode, tree type)
6990 {
6991 type_hash in;
6992 type_hash **loc;
6993
6994 /* The hash table only contains main variants, so ensure that's what we're
6995 being passed. */
6996 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6997
6998 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6999 must call that routine before comparing TYPE_ALIGNs. */
7000 layout_type (type);
7001
7002 in.hash = hashcode;
7003 in.type = type;
7004
7005 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7006 if (*loc)
7007 {
7008 tree t1 = ((type_hash *) *loc)->type;
7009 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7010 if (GATHER_STATISTICS)
7011 {
7012 tree_code_counts[(int) TREE_CODE (type)]--;
7013 tree_node_counts[(int) t_kind]--;
7014 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
7015 }
7016 return t1;
7017 }
7018 else
7019 {
7020 struct type_hash *h;
7021
7022 h = ggc_alloc<type_hash> ();
7023 h->hash = hashcode;
7024 h->type = type;
7025 *loc = h;
7026
7027 return type;
7028 }
7029 }
7030
7031 static void
7032 print_type_hash_statistics (void)
7033 {
7034 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7035 (long) type_hash_table->size (),
7036 (long) type_hash_table->elements (),
7037 type_hash_table->collisions ());
7038 }
7039
7040 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7041 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7042 by adding the hash codes of the individual attributes. */
7043
7044 static void
7045 attribute_hash_list (const_tree list, inchash::hash &hstate)
7046 {
7047 const_tree tail;
7048
7049 for (tail = list; tail; tail = TREE_CHAIN (tail))
7050 /* ??? Do we want to add in TREE_VALUE too? */
7051 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7052 }
7053
7054 /* Given two lists of attributes, return true if list l2 is
7055 equivalent to l1. */
7056
7057 int
7058 attribute_list_equal (const_tree l1, const_tree l2)
7059 {
7060 if (l1 == l2)
7061 return 1;
7062
7063 return attribute_list_contained (l1, l2)
7064 && attribute_list_contained (l2, l1);
7065 }
7066
7067 /* Given two lists of attributes, return true if list L2 is
7068 completely contained within L1. */
7069 /* ??? This would be faster if attribute names were stored in a canonicalized
7070 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7071 must be used to show these elements are equivalent (which they are). */
7072 /* ??? It's not clear that attributes with arguments will always be handled
7073 correctly. */
7074
7075 int
7076 attribute_list_contained (const_tree l1, const_tree l2)
7077 {
7078 const_tree t1, t2;
7079
7080 /* First check the obvious, maybe the lists are identical. */
7081 if (l1 == l2)
7082 return 1;
7083
7084 /* Maybe the lists are similar. */
7085 for (t1 = l1, t2 = l2;
7086 t1 != 0 && t2 != 0
7087 && get_attribute_name (t1) == get_attribute_name (t2)
7088 && TREE_VALUE (t1) == TREE_VALUE (t2);
7089 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7090 ;
7091
7092 /* Maybe the lists are equal. */
7093 if (t1 == 0 && t2 == 0)
7094 return 1;
7095
7096 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7097 {
7098 const_tree attr;
7099 /* This CONST_CAST is okay because lookup_attribute does not
7100 modify its argument and the return value is assigned to a
7101 const_tree. */
7102 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7103 CONST_CAST_TREE (l1));
7104 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7105 attr = lookup_ident_attribute (get_attribute_name (t2),
7106 TREE_CHAIN (attr)))
7107 ;
7108
7109 if (attr == NULL_TREE)
7110 return 0;
7111 }
7112
7113 return 1;
7114 }
7115
7116 /* Given two lists of types
7117 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7118 return 1 if the lists contain the same types in the same order.
7119 Also, the TREE_PURPOSEs must match. */
7120
7121 int
7122 type_list_equal (const_tree l1, const_tree l2)
7123 {
7124 const_tree t1, t2;
7125
7126 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7127 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7128 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7129 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7130 && (TREE_TYPE (TREE_PURPOSE (t1))
7131 == TREE_TYPE (TREE_PURPOSE (t2))))))
7132 return 0;
7133
7134 return t1 == t2;
7135 }
7136
7137 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7138 given by TYPE. If the argument list accepts variable arguments,
7139 then this function counts only the ordinary arguments. */
7140
7141 int
7142 type_num_arguments (const_tree type)
7143 {
7144 int i = 0;
7145 tree t;
7146
7147 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7148 /* If the function does not take a variable number of arguments,
7149 the last element in the list will have type `void'. */
7150 if (VOID_TYPE_P (TREE_VALUE (t)))
7151 break;
7152 else
7153 ++i;
7154
7155 return i;
7156 }
7157
7158 /* Nonzero if integer constants T1 and T2
7159 represent the same constant value. */
7160
7161 int
7162 tree_int_cst_equal (const_tree t1, const_tree t2)
7163 {
7164 if (t1 == t2)
7165 return 1;
7166
7167 if (t1 == 0 || t2 == 0)
7168 return 0;
7169
7170 if (TREE_CODE (t1) == INTEGER_CST
7171 && TREE_CODE (t2) == INTEGER_CST
7172 && wi::to_widest (t1) == wi::to_widest (t2))
7173 return 1;
7174
7175 return 0;
7176 }
7177
7178 /* Return true if T is an INTEGER_CST whose numerical value (extended
7179 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7180
7181 bool
7182 tree_fits_shwi_p (const_tree t)
7183 {
7184 return (t != NULL_TREE
7185 && TREE_CODE (t) == INTEGER_CST
7186 && wi::fits_shwi_p (wi::to_widest (t)));
7187 }
7188
7189 /* Return true if T is an INTEGER_CST whose numerical value (extended
7190 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7191
7192 bool
7193 tree_fits_uhwi_p (const_tree t)
7194 {
7195 return (t != NULL_TREE
7196 && TREE_CODE (t) == INTEGER_CST
7197 && wi::fits_uhwi_p (wi::to_widest (t)));
7198 }
7199
7200 /* T is an INTEGER_CST whose numerical value (extended according to
7201 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7202 HOST_WIDE_INT. */
7203
7204 HOST_WIDE_INT
7205 tree_to_shwi (const_tree t)
7206 {
7207 gcc_assert (tree_fits_shwi_p (t));
7208 return TREE_INT_CST_LOW (t);
7209 }
7210
7211 /* T is an INTEGER_CST whose numerical value (extended according to
7212 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7213 HOST_WIDE_INT. */
7214
7215 unsigned HOST_WIDE_INT
7216 tree_to_uhwi (const_tree t)
7217 {
7218 gcc_assert (tree_fits_uhwi_p (t));
7219 return TREE_INT_CST_LOW (t);
7220 }
7221
7222 /* Return the most significant (sign) bit of T. */
7223
7224 int
7225 tree_int_cst_sign_bit (const_tree t)
7226 {
7227 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7228
7229 return wi::extract_uhwi (t, bitno, 1);
7230 }
7231
7232 /* Return an indication of the sign of the integer constant T.
7233 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7234 Note that -1 will never be returned if T's type is unsigned. */
7235
7236 int
7237 tree_int_cst_sgn (const_tree t)
7238 {
7239 if (wi::eq_p (t, 0))
7240 return 0;
7241 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7242 return 1;
7243 else if (wi::neg_p (t))
7244 return -1;
7245 else
7246 return 1;
7247 }
7248
7249 /* Return the minimum number of bits needed to represent VALUE in a
7250 signed or unsigned type, UNSIGNEDP says which. */
7251
7252 unsigned int
7253 tree_int_cst_min_precision (tree value, signop sgn)
7254 {
7255 /* If the value is negative, compute its negative minus 1. The latter
7256 adjustment is because the absolute value of the largest negative value
7257 is one larger than the largest positive value. This is equivalent to
7258 a bit-wise negation, so use that operation instead. */
7259
7260 if (tree_int_cst_sgn (value) < 0)
7261 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7262
7263 /* Return the number of bits needed, taking into account the fact
7264 that we need one more bit for a signed than unsigned type.
7265 If value is 0 or -1, the minimum precision is 1 no matter
7266 whether unsignedp is true or false. */
7267
7268 if (integer_zerop (value))
7269 return 1;
7270 else
7271 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7272 }
7273
7274 /* Return truthvalue of whether T1 is the same tree structure as T2.
7275 Return 1 if they are the same.
7276 Return 0 if they are understandably different.
7277 Return -1 if either contains tree structure not understood by
7278 this function. */
7279
7280 int
7281 simple_cst_equal (const_tree t1, const_tree t2)
7282 {
7283 enum tree_code code1, code2;
7284 int cmp;
7285 int i;
7286
7287 if (t1 == t2)
7288 return 1;
7289 if (t1 == 0 || t2 == 0)
7290 return 0;
7291
7292 code1 = TREE_CODE (t1);
7293 code2 = TREE_CODE (t2);
7294
7295 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7296 {
7297 if (CONVERT_EXPR_CODE_P (code2)
7298 || code2 == NON_LVALUE_EXPR)
7299 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7300 else
7301 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7302 }
7303
7304 else if (CONVERT_EXPR_CODE_P (code2)
7305 || code2 == NON_LVALUE_EXPR)
7306 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7307
7308 if (code1 != code2)
7309 return 0;
7310
7311 switch (code1)
7312 {
7313 case INTEGER_CST:
7314 return wi::to_widest (t1) == wi::to_widest (t2);
7315
7316 case REAL_CST:
7317 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7318
7319 case FIXED_CST:
7320 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7321
7322 case STRING_CST:
7323 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7324 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7325 TREE_STRING_LENGTH (t1)));
7326
7327 case CONSTRUCTOR:
7328 {
7329 unsigned HOST_WIDE_INT idx;
7330 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7331 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7332
7333 if (vec_safe_length (v1) != vec_safe_length (v2))
7334 return false;
7335
7336 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7337 /* ??? Should we handle also fields here? */
7338 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7339 return false;
7340 return true;
7341 }
7342
7343 case SAVE_EXPR:
7344 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7345
7346 case CALL_EXPR:
7347 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7348 if (cmp <= 0)
7349 return cmp;
7350 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7351 return 0;
7352 {
7353 const_tree arg1, arg2;
7354 const_call_expr_arg_iterator iter1, iter2;
7355 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7356 arg2 = first_const_call_expr_arg (t2, &iter2);
7357 arg1 && arg2;
7358 arg1 = next_const_call_expr_arg (&iter1),
7359 arg2 = next_const_call_expr_arg (&iter2))
7360 {
7361 cmp = simple_cst_equal (arg1, arg2);
7362 if (cmp <= 0)
7363 return cmp;
7364 }
7365 return arg1 == arg2;
7366 }
7367
7368 case TARGET_EXPR:
7369 /* Special case: if either target is an unallocated VAR_DECL,
7370 it means that it's going to be unified with whatever the
7371 TARGET_EXPR is really supposed to initialize, so treat it
7372 as being equivalent to anything. */
7373 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7374 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7375 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7376 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7377 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7378 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7379 cmp = 1;
7380 else
7381 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7382
7383 if (cmp <= 0)
7384 return cmp;
7385
7386 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7387
7388 case WITH_CLEANUP_EXPR:
7389 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7390 if (cmp <= 0)
7391 return cmp;
7392
7393 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7394
7395 case COMPONENT_REF:
7396 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7397 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7398
7399 return 0;
7400
7401 case VAR_DECL:
7402 case PARM_DECL:
7403 case CONST_DECL:
7404 case FUNCTION_DECL:
7405 return 0;
7406
7407 default:
7408 break;
7409 }
7410
7411 /* This general rule works for most tree codes. All exceptions should be
7412 handled above. If this is a language-specific tree code, we can't
7413 trust what might be in the operand, so say we don't know
7414 the situation. */
7415 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7416 return -1;
7417
7418 switch (TREE_CODE_CLASS (code1))
7419 {
7420 case tcc_unary:
7421 case tcc_binary:
7422 case tcc_comparison:
7423 case tcc_expression:
7424 case tcc_reference:
7425 case tcc_statement:
7426 cmp = 1;
7427 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7428 {
7429 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7430 if (cmp <= 0)
7431 return cmp;
7432 }
7433
7434 return cmp;
7435
7436 default:
7437 return -1;
7438 }
7439 }
7440
7441 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7442 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7443 than U, respectively. */
7444
7445 int
7446 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7447 {
7448 if (tree_int_cst_sgn (t) < 0)
7449 return -1;
7450 else if (!tree_fits_uhwi_p (t))
7451 return 1;
7452 else if (TREE_INT_CST_LOW (t) == u)
7453 return 0;
7454 else if (TREE_INT_CST_LOW (t) < u)
7455 return -1;
7456 else
7457 return 1;
7458 }
7459
7460 /* Return true if SIZE represents a constant size that is in bounds of
7461 what the middle-end and the backend accepts (covering not more than
7462 half of the address-space). */
7463
7464 bool
7465 valid_constant_size_p (const_tree size)
7466 {
7467 if (! tree_fits_uhwi_p (size)
7468 || TREE_OVERFLOW (size)
7469 || tree_int_cst_sign_bit (size) != 0)
7470 return false;
7471 return true;
7472 }
7473
7474 /* Return the precision of the type, or for a complex or vector type the
7475 precision of the type of its elements. */
7476
7477 unsigned int
7478 element_precision (const_tree type)
7479 {
7480 enum tree_code code = TREE_CODE (type);
7481 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7482 type = TREE_TYPE (type);
7483
7484 return TYPE_PRECISION (type);
7485 }
7486
7487 /* Return true if CODE represents an associative tree code. Otherwise
7488 return false. */
7489 bool
7490 associative_tree_code (enum tree_code code)
7491 {
7492 switch (code)
7493 {
7494 case BIT_IOR_EXPR:
7495 case BIT_AND_EXPR:
7496 case BIT_XOR_EXPR:
7497 case PLUS_EXPR:
7498 case MULT_EXPR:
7499 case MIN_EXPR:
7500 case MAX_EXPR:
7501 return true;
7502
7503 default:
7504 break;
7505 }
7506 return false;
7507 }
7508
7509 /* Return true if CODE represents a commutative tree code. Otherwise
7510 return false. */
7511 bool
7512 commutative_tree_code (enum tree_code code)
7513 {
7514 switch (code)
7515 {
7516 case PLUS_EXPR:
7517 case MULT_EXPR:
7518 case MULT_HIGHPART_EXPR:
7519 case MIN_EXPR:
7520 case MAX_EXPR:
7521 case BIT_IOR_EXPR:
7522 case BIT_XOR_EXPR:
7523 case BIT_AND_EXPR:
7524 case NE_EXPR:
7525 case EQ_EXPR:
7526 case UNORDERED_EXPR:
7527 case ORDERED_EXPR:
7528 case UNEQ_EXPR:
7529 case LTGT_EXPR:
7530 case TRUTH_AND_EXPR:
7531 case TRUTH_XOR_EXPR:
7532 case TRUTH_OR_EXPR:
7533 case WIDEN_MULT_EXPR:
7534 case VEC_WIDEN_MULT_HI_EXPR:
7535 case VEC_WIDEN_MULT_LO_EXPR:
7536 case VEC_WIDEN_MULT_EVEN_EXPR:
7537 case VEC_WIDEN_MULT_ODD_EXPR:
7538 return true;
7539
7540 default:
7541 break;
7542 }
7543 return false;
7544 }
7545
7546 /* Return true if CODE represents a ternary tree code for which the
7547 first two operands are commutative. Otherwise return false. */
7548 bool
7549 commutative_ternary_tree_code (enum tree_code code)
7550 {
7551 switch (code)
7552 {
7553 case WIDEN_MULT_PLUS_EXPR:
7554 case WIDEN_MULT_MINUS_EXPR:
7555 case DOT_PROD_EXPR:
7556 case FMA_EXPR:
7557 return true;
7558
7559 default:
7560 break;
7561 }
7562 return false;
7563 }
7564
7565 namespace inchash
7566 {
7567
7568 /* Generate a hash value for an expression. This can be used iteratively
7569 by passing a previous result as the HSTATE argument.
7570
7571 This function is intended to produce the same hash for expressions which
7572 would compare equal using operand_equal_p. */
7573 void
7574 add_expr (const_tree t, inchash::hash &hstate)
7575 {
7576 int i;
7577 enum tree_code code;
7578 enum tree_code_class tclass;
7579
7580 if (t == NULL_TREE)
7581 {
7582 hstate.merge_hash (0);
7583 return;
7584 }
7585
7586 code = TREE_CODE (t);
7587
7588 switch (code)
7589 {
7590 /* Alas, constants aren't shared, so we can't rely on pointer
7591 identity. */
7592 case VOID_CST:
7593 hstate.merge_hash (0);
7594 return;
7595 case INTEGER_CST:
7596 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7597 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7598 return;
7599 case REAL_CST:
7600 {
7601 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7602 hstate.merge_hash (val2);
7603 return;
7604 }
7605 case FIXED_CST:
7606 {
7607 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7608 hstate.merge_hash (val2);
7609 return;
7610 }
7611 case STRING_CST:
7612 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7613 return;
7614 case COMPLEX_CST:
7615 inchash::add_expr (TREE_REALPART (t), hstate);
7616 inchash::add_expr (TREE_IMAGPART (t), hstate);
7617 return;
7618 case VECTOR_CST:
7619 {
7620 unsigned i;
7621 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7622 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7623 return;
7624 }
7625 case SSA_NAME:
7626 /* We can just compare by pointer. */
7627 hstate.add_wide_int (SSA_NAME_VERSION (t));
7628 return;
7629 case PLACEHOLDER_EXPR:
7630 /* The node itself doesn't matter. */
7631 return;
7632 case TREE_LIST:
7633 /* A list of expressions, for a CALL_EXPR or as the elements of a
7634 VECTOR_CST. */
7635 for (; t; t = TREE_CHAIN (t))
7636 inchash::add_expr (TREE_VALUE (t), hstate);
7637 return;
7638 case CONSTRUCTOR:
7639 {
7640 unsigned HOST_WIDE_INT idx;
7641 tree field, value;
7642 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7643 {
7644 inchash::add_expr (field, hstate);
7645 inchash::add_expr (value, hstate);
7646 }
7647 return;
7648 }
7649 case FUNCTION_DECL:
7650 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7651 Otherwise nodes that compare equal according to operand_equal_p might
7652 get different hash codes. However, don't do this for machine specific
7653 or front end builtins, since the function code is overloaded in those
7654 cases. */
7655 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7656 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7657 {
7658 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7659 code = TREE_CODE (t);
7660 }
7661 /* FALL THROUGH */
7662 default:
7663 tclass = TREE_CODE_CLASS (code);
7664
7665 if (tclass == tcc_declaration)
7666 {
7667 /* DECL's have a unique ID */
7668 hstate.add_wide_int (DECL_UID (t));
7669 }
7670 else
7671 {
7672 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7673
7674 hstate.add_object (code);
7675
7676 /* Don't hash the type, that can lead to having nodes which
7677 compare equal according to operand_equal_p, but which
7678 have different hash codes. */
7679 if (CONVERT_EXPR_CODE_P (code)
7680 || code == NON_LVALUE_EXPR)
7681 {
7682 /* Make sure to include signness in the hash computation. */
7683 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7684 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7685 }
7686
7687 else if (commutative_tree_code (code))
7688 {
7689 /* It's a commutative expression. We want to hash it the same
7690 however it appears. We do this by first hashing both operands
7691 and then rehashing based on the order of their independent
7692 hashes. */
7693 inchash::hash one, two;
7694 inchash::add_expr (TREE_OPERAND (t, 0), one);
7695 inchash::add_expr (TREE_OPERAND (t, 1), two);
7696 hstate.add_commutative (one, two);
7697 }
7698 else
7699 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7700 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7701 }
7702 return;
7703 }
7704 }
7705
7706 }
7707
7708 /* Constructors for pointer, array and function types.
7709 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7710 constructed by language-dependent code, not here.) */
7711
7712 /* Construct, lay out and return the type of pointers to TO_TYPE with
7713 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7714 reference all of memory. If such a type has already been
7715 constructed, reuse it. */
7716
7717 tree
7718 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7719 bool can_alias_all)
7720 {
7721 tree t;
7722
7723 if (to_type == error_mark_node)
7724 return error_mark_node;
7725
7726 /* If the pointed-to type has the may_alias attribute set, force
7727 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7728 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7729 can_alias_all = true;
7730
7731 /* In some cases, languages will have things that aren't a POINTER_TYPE
7732 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7733 In that case, return that type without regard to the rest of our
7734 operands.
7735
7736 ??? This is a kludge, but consistent with the way this function has
7737 always operated and there doesn't seem to be a good way to avoid this
7738 at the moment. */
7739 if (TYPE_POINTER_TO (to_type) != 0
7740 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7741 return TYPE_POINTER_TO (to_type);
7742
7743 /* First, if we already have a type for pointers to TO_TYPE and it's
7744 the proper mode, use it. */
7745 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7746 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7747 return t;
7748
7749 t = make_node (POINTER_TYPE);
7750
7751 TREE_TYPE (t) = to_type;
7752 SET_TYPE_MODE (t, mode);
7753 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7754 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7755 TYPE_POINTER_TO (to_type) = t;
7756
7757 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7758 SET_TYPE_STRUCTURAL_EQUALITY (t);
7759 else if (TYPE_CANONICAL (to_type) != to_type)
7760 TYPE_CANONICAL (t)
7761 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7762 mode, false);
7763
7764 /* Lay out the type. This function has many callers that are concerned
7765 with expression-construction, and this simplifies them all. */
7766 layout_type (t);
7767
7768 return t;
7769 }
7770
7771 /* By default build pointers in ptr_mode. */
7772
7773 tree
7774 build_pointer_type (tree to_type)
7775 {
7776 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7777 : TYPE_ADDR_SPACE (to_type);
7778 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7779 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7780 }
7781
7782 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7783
7784 tree
7785 build_reference_type_for_mode (tree to_type, machine_mode mode,
7786 bool can_alias_all)
7787 {
7788 tree t;
7789
7790 if (to_type == error_mark_node)
7791 return error_mark_node;
7792
7793 /* If the pointed-to type has the may_alias attribute set, force
7794 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7795 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7796 can_alias_all = true;
7797
7798 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7799 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7800 In that case, return that type without regard to the rest of our
7801 operands.
7802
7803 ??? This is a kludge, but consistent with the way this function has
7804 always operated and there doesn't seem to be a good way to avoid this
7805 at the moment. */
7806 if (TYPE_REFERENCE_TO (to_type) != 0
7807 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7808 return TYPE_REFERENCE_TO (to_type);
7809
7810 /* First, if we already have a type for pointers to TO_TYPE and it's
7811 the proper mode, use it. */
7812 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7813 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7814 return t;
7815
7816 t = make_node (REFERENCE_TYPE);
7817
7818 TREE_TYPE (t) = to_type;
7819 SET_TYPE_MODE (t, mode);
7820 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7821 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7822 TYPE_REFERENCE_TO (to_type) = t;
7823
7824 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7825 SET_TYPE_STRUCTURAL_EQUALITY (t);
7826 else if (TYPE_CANONICAL (to_type) != to_type)
7827 TYPE_CANONICAL (t)
7828 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7829 mode, false);
7830
7831 layout_type (t);
7832
7833 return t;
7834 }
7835
7836
7837 /* Build the node for the type of references-to-TO_TYPE by default
7838 in ptr_mode. */
7839
7840 tree
7841 build_reference_type (tree to_type)
7842 {
7843 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7844 : TYPE_ADDR_SPACE (to_type);
7845 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7846 return build_reference_type_for_mode (to_type, pointer_mode, false);
7847 }
7848
7849 #define MAX_INT_CACHED_PREC \
7850 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7851 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7852
7853 /* Builds a signed or unsigned integer type of precision PRECISION.
7854 Used for C bitfields whose precision does not match that of
7855 built-in target types. */
7856 tree
7857 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7858 int unsignedp)
7859 {
7860 tree itype, ret;
7861
7862 if (unsignedp)
7863 unsignedp = MAX_INT_CACHED_PREC + 1;
7864
7865 if (precision <= MAX_INT_CACHED_PREC)
7866 {
7867 itype = nonstandard_integer_type_cache[precision + unsignedp];
7868 if (itype)
7869 return itype;
7870 }
7871
7872 itype = make_node (INTEGER_TYPE);
7873 TYPE_PRECISION (itype) = precision;
7874
7875 if (unsignedp)
7876 fixup_unsigned_type (itype);
7877 else
7878 fixup_signed_type (itype);
7879
7880 ret = itype;
7881 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7882 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7883 if (precision <= MAX_INT_CACHED_PREC)
7884 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7885
7886 return ret;
7887 }
7888
7889 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7890 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7891 is true, reuse such a type that has already been constructed. */
7892
7893 static tree
7894 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7895 {
7896 tree itype = make_node (INTEGER_TYPE);
7897 inchash::hash hstate;
7898
7899 TREE_TYPE (itype) = type;
7900
7901 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7902 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7903
7904 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7905 SET_TYPE_MODE (itype, TYPE_MODE (type));
7906 TYPE_SIZE (itype) = TYPE_SIZE (type);
7907 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7908 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7909 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7910
7911 if (!shared)
7912 return itype;
7913
7914 if ((TYPE_MIN_VALUE (itype)
7915 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7916 || (TYPE_MAX_VALUE (itype)
7917 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7918 {
7919 /* Since we cannot reliably merge this type, we need to compare it using
7920 structural equality checks. */
7921 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7922 return itype;
7923 }
7924
7925 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7926 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7927 hstate.merge_hash (TYPE_HASH (type));
7928 itype = type_hash_canon (hstate.end (), itype);
7929
7930 return itype;
7931 }
7932
7933 /* Wrapper around build_range_type_1 with SHARED set to true. */
7934
7935 tree
7936 build_range_type (tree type, tree lowval, tree highval)
7937 {
7938 return build_range_type_1 (type, lowval, highval, true);
7939 }
7940
7941 /* Wrapper around build_range_type_1 with SHARED set to false. */
7942
7943 tree
7944 build_nonshared_range_type (tree type, tree lowval, tree highval)
7945 {
7946 return build_range_type_1 (type, lowval, highval, false);
7947 }
7948
7949 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7950 MAXVAL should be the maximum value in the domain
7951 (one less than the length of the array).
7952
7953 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7954 We don't enforce this limit, that is up to caller (e.g. language front end).
7955 The limit exists because the result is a signed type and we don't handle
7956 sizes that use more than one HOST_WIDE_INT. */
7957
7958 tree
7959 build_index_type (tree maxval)
7960 {
7961 return build_range_type (sizetype, size_zero_node, maxval);
7962 }
7963
7964 /* Return true if the debug information for TYPE, a subtype, should be emitted
7965 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7966 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7967 debug info and doesn't reflect the source code. */
7968
7969 bool
7970 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7971 {
7972 tree base_type = TREE_TYPE (type), low, high;
7973
7974 /* Subrange types have a base type which is an integral type. */
7975 if (!INTEGRAL_TYPE_P (base_type))
7976 return false;
7977
7978 /* Get the real bounds of the subtype. */
7979 if (lang_hooks.types.get_subrange_bounds)
7980 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7981 else
7982 {
7983 low = TYPE_MIN_VALUE (type);
7984 high = TYPE_MAX_VALUE (type);
7985 }
7986
7987 /* If the type and its base type have the same representation and the same
7988 name, then the type is not a subrange but a copy of the base type. */
7989 if ((TREE_CODE (base_type) == INTEGER_TYPE
7990 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7991 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7992 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7993 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7994 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7995 return false;
7996
7997 if (lowval)
7998 *lowval = low;
7999 if (highval)
8000 *highval = high;
8001 return true;
8002 }
8003
8004 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8005 and number of elements specified by the range of values of INDEX_TYPE.
8006 If SHARED is true, reuse such a type that has already been constructed. */
8007
8008 static tree
8009 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8010 {
8011 tree t;
8012
8013 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8014 {
8015 error ("arrays of functions are not meaningful");
8016 elt_type = integer_type_node;
8017 }
8018
8019 t = make_node (ARRAY_TYPE);
8020 TREE_TYPE (t) = elt_type;
8021 TYPE_DOMAIN (t) = index_type;
8022 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8023 layout_type (t);
8024
8025 /* If the element type is incomplete at this point we get marked for
8026 structural equality. Do not record these types in the canonical
8027 type hashtable. */
8028 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8029 return t;
8030
8031 if (shared)
8032 {
8033 inchash::hash hstate;
8034 hstate.add_object (TYPE_HASH (elt_type));
8035 if (index_type)
8036 hstate.add_object (TYPE_HASH (index_type));
8037 t = type_hash_canon (hstate.end (), t);
8038 }
8039
8040 if (TYPE_CANONICAL (t) == t)
8041 {
8042 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8043 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
8044 SET_TYPE_STRUCTURAL_EQUALITY (t);
8045 else if (TYPE_CANONICAL (elt_type) != elt_type
8046 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8047 TYPE_CANONICAL (t)
8048 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8049 index_type
8050 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8051 shared);
8052 }
8053
8054 return t;
8055 }
8056
8057 /* Wrapper around build_array_type_1 with SHARED set to true. */
8058
8059 tree
8060 build_array_type (tree elt_type, tree index_type)
8061 {
8062 return build_array_type_1 (elt_type, index_type, true);
8063 }
8064
8065 /* Wrapper around build_array_type_1 with SHARED set to false. */
8066
8067 tree
8068 build_nonshared_array_type (tree elt_type, tree index_type)
8069 {
8070 return build_array_type_1 (elt_type, index_type, false);
8071 }
8072
8073 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8074 sizetype. */
8075
8076 tree
8077 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8078 {
8079 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8080 }
8081
8082 /* Recursively examines the array elements of TYPE, until a non-array
8083 element type is found. */
8084
8085 tree
8086 strip_array_types (tree type)
8087 {
8088 while (TREE_CODE (type) == ARRAY_TYPE)
8089 type = TREE_TYPE (type);
8090
8091 return type;
8092 }
8093
8094 /* Computes the canonical argument types from the argument type list
8095 ARGTYPES.
8096
8097 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8098 on entry to this function, or if any of the ARGTYPES are
8099 structural.
8100
8101 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8102 true on entry to this function, or if any of the ARGTYPES are
8103 non-canonical.
8104
8105 Returns a canonical argument list, which may be ARGTYPES when the
8106 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8107 true) or would not differ from ARGTYPES. */
8108
8109 static tree
8110 maybe_canonicalize_argtypes (tree argtypes,
8111 bool *any_structural_p,
8112 bool *any_noncanonical_p)
8113 {
8114 tree arg;
8115 bool any_noncanonical_argtypes_p = false;
8116
8117 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8118 {
8119 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8120 /* Fail gracefully by stating that the type is structural. */
8121 *any_structural_p = true;
8122 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8123 *any_structural_p = true;
8124 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8125 || TREE_PURPOSE (arg))
8126 /* If the argument has a default argument, we consider it
8127 non-canonical even though the type itself is canonical.
8128 That way, different variants of function and method types
8129 with default arguments will all point to the variant with
8130 no defaults as their canonical type. */
8131 any_noncanonical_argtypes_p = true;
8132 }
8133
8134 if (*any_structural_p)
8135 return argtypes;
8136
8137 if (any_noncanonical_argtypes_p)
8138 {
8139 /* Build the canonical list of argument types. */
8140 tree canon_argtypes = NULL_TREE;
8141 bool is_void = false;
8142
8143 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8144 {
8145 if (arg == void_list_node)
8146 is_void = true;
8147 else
8148 canon_argtypes = tree_cons (NULL_TREE,
8149 TYPE_CANONICAL (TREE_VALUE (arg)),
8150 canon_argtypes);
8151 }
8152
8153 canon_argtypes = nreverse (canon_argtypes);
8154 if (is_void)
8155 canon_argtypes = chainon (canon_argtypes, void_list_node);
8156
8157 /* There is a non-canonical type. */
8158 *any_noncanonical_p = true;
8159 return canon_argtypes;
8160 }
8161
8162 /* The canonical argument types are the same as ARGTYPES. */
8163 return argtypes;
8164 }
8165
8166 /* Construct, lay out and return
8167 the type of functions returning type VALUE_TYPE
8168 given arguments of types ARG_TYPES.
8169 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8170 are data type nodes for the arguments of the function.
8171 If such a type has already been constructed, reuse it. */
8172
8173 tree
8174 build_function_type (tree value_type, tree arg_types)
8175 {
8176 tree t;
8177 inchash::hash hstate;
8178 bool any_structural_p, any_noncanonical_p;
8179 tree canon_argtypes;
8180
8181 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8182 {
8183 error ("function return type cannot be function");
8184 value_type = integer_type_node;
8185 }
8186
8187 /* Make a node of the sort we want. */
8188 t = make_node (FUNCTION_TYPE);
8189 TREE_TYPE (t) = value_type;
8190 TYPE_ARG_TYPES (t) = arg_types;
8191
8192 /* If we already have such a type, use the old one. */
8193 hstate.add_object (TYPE_HASH (value_type));
8194 type_hash_list (arg_types, hstate);
8195 t = type_hash_canon (hstate.end (), t);
8196
8197 /* Set up the canonical type. */
8198 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8199 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8200 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8201 &any_structural_p,
8202 &any_noncanonical_p);
8203 if (any_structural_p)
8204 SET_TYPE_STRUCTURAL_EQUALITY (t);
8205 else if (any_noncanonical_p)
8206 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8207 canon_argtypes);
8208
8209 if (!COMPLETE_TYPE_P (t))
8210 layout_type (t);
8211 return t;
8212 }
8213
8214 /* Build a function type. The RETURN_TYPE is the type returned by the
8215 function. If VAARGS is set, no void_type_node is appended to the
8216 the list. ARGP must be always be terminated be a NULL_TREE. */
8217
8218 static tree
8219 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8220 {
8221 tree t, args, last;
8222
8223 t = va_arg (argp, tree);
8224 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8225 args = tree_cons (NULL_TREE, t, args);
8226
8227 if (vaargs)
8228 {
8229 last = args;
8230 if (args != NULL_TREE)
8231 args = nreverse (args);
8232 gcc_assert (last != void_list_node);
8233 }
8234 else if (args == NULL_TREE)
8235 args = void_list_node;
8236 else
8237 {
8238 last = args;
8239 args = nreverse (args);
8240 TREE_CHAIN (last) = void_list_node;
8241 }
8242 args = build_function_type (return_type, args);
8243
8244 return args;
8245 }
8246
8247 /* Build a function type. The RETURN_TYPE is the type returned by the
8248 function. If additional arguments are provided, they are
8249 additional argument types. The list of argument types must always
8250 be terminated by NULL_TREE. */
8251
8252 tree
8253 build_function_type_list (tree return_type, ...)
8254 {
8255 tree args;
8256 va_list p;
8257
8258 va_start (p, return_type);
8259 args = build_function_type_list_1 (false, return_type, p);
8260 va_end (p);
8261 return args;
8262 }
8263
8264 /* Build a variable argument function type. The RETURN_TYPE is the
8265 type returned by the function. If additional arguments are provided,
8266 they are additional argument types. The list of argument types must
8267 always be terminated by NULL_TREE. */
8268
8269 tree
8270 build_varargs_function_type_list (tree return_type, ...)
8271 {
8272 tree args;
8273 va_list p;
8274
8275 va_start (p, return_type);
8276 args = build_function_type_list_1 (true, return_type, p);
8277 va_end (p);
8278
8279 return args;
8280 }
8281
8282 /* Build a function type. RETURN_TYPE is the type returned by the
8283 function; VAARGS indicates whether the function takes varargs. The
8284 function takes N named arguments, the types of which are provided in
8285 ARG_TYPES. */
8286
8287 static tree
8288 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8289 tree *arg_types)
8290 {
8291 int i;
8292 tree t = vaargs ? NULL_TREE : void_list_node;
8293
8294 for (i = n - 1; i >= 0; i--)
8295 t = tree_cons (NULL_TREE, arg_types[i], t);
8296
8297 return build_function_type (return_type, t);
8298 }
8299
8300 /* Build a function type. RETURN_TYPE is the type returned by the
8301 function. The function takes N named arguments, the types of which
8302 are provided in ARG_TYPES. */
8303
8304 tree
8305 build_function_type_array (tree return_type, int n, tree *arg_types)
8306 {
8307 return build_function_type_array_1 (false, return_type, n, arg_types);
8308 }
8309
8310 /* Build a variable argument function type. RETURN_TYPE is the type
8311 returned by the function. The function takes N named arguments, the
8312 types of which are provided in ARG_TYPES. */
8313
8314 tree
8315 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8316 {
8317 return build_function_type_array_1 (true, return_type, n, arg_types);
8318 }
8319
8320 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8321 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8322 for the method. An implicit additional parameter (of type
8323 pointer-to-BASETYPE) is added to the ARGTYPES. */
8324
8325 tree
8326 build_method_type_directly (tree basetype,
8327 tree rettype,
8328 tree argtypes)
8329 {
8330 tree t;
8331 tree ptype;
8332 inchash::hash hstate;
8333 bool any_structural_p, any_noncanonical_p;
8334 tree canon_argtypes;
8335
8336 /* Make a node of the sort we want. */
8337 t = make_node (METHOD_TYPE);
8338
8339 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8340 TREE_TYPE (t) = rettype;
8341 ptype = build_pointer_type (basetype);
8342
8343 /* The actual arglist for this function includes a "hidden" argument
8344 which is "this". Put it into the list of argument types. */
8345 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8346 TYPE_ARG_TYPES (t) = argtypes;
8347
8348 /* If we already have such a type, use the old one. */
8349 hstate.add_object (TYPE_HASH (basetype));
8350 hstate.add_object (TYPE_HASH (rettype));
8351 type_hash_list (argtypes, hstate);
8352 t = type_hash_canon (hstate.end (), t);
8353
8354 /* Set up the canonical type. */
8355 any_structural_p
8356 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8357 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8358 any_noncanonical_p
8359 = (TYPE_CANONICAL (basetype) != basetype
8360 || TYPE_CANONICAL (rettype) != rettype);
8361 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8362 &any_structural_p,
8363 &any_noncanonical_p);
8364 if (any_structural_p)
8365 SET_TYPE_STRUCTURAL_EQUALITY (t);
8366 else if (any_noncanonical_p)
8367 TYPE_CANONICAL (t)
8368 = build_method_type_directly (TYPE_CANONICAL (basetype),
8369 TYPE_CANONICAL (rettype),
8370 canon_argtypes);
8371 if (!COMPLETE_TYPE_P (t))
8372 layout_type (t);
8373
8374 return t;
8375 }
8376
8377 /* Construct, lay out and return the type of methods belonging to class
8378 BASETYPE and whose arguments and values are described by TYPE.
8379 If that type exists already, reuse it.
8380 TYPE must be a FUNCTION_TYPE node. */
8381
8382 tree
8383 build_method_type (tree basetype, tree type)
8384 {
8385 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8386
8387 return build_method_type_directly (basetype,
8388 TREE_TYPE (type),
8389 TYPE_ARG_TYPES (type));
8390 }
8391
8392 /* Construct, lay out and return the type of offsets to a value
8393 of type TYPE, within an object of type BASETYPE.
8394 If a suitable offset type exists already, reuse it. */
8395
8396 tree
8397 build_offset_type (tree basetype, tree type)
8398 {
8399 tree t;
8400 inchash::hash hstate;
8401
8402 /* Make a node of the sort we want. */
8403 t = make_node (OFFSET_TYPE);
8404
8405 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8406 TREE_TYPE (t) = type;
8407
8408 /* If we already have such a type, use the old one. */
8409 hstate.add_object (TYPE_HASH (basetype));
8410 hstate.add_object (TYPE_HASH (type));
8411 t = type_hash_canon (hstate.end (), t);
8412
8413 if (!COMPLETE_TYPE_P (t))
8414 layout_type (t);
8415
8416 if (TYPE_CANONICAL (t) == t)
8417 {
8418 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8419 || TYPE_STRUCTURAL_EQUALITY_P (type))
8420 SET_TYPE_STRUCTURAL_EQUALITY (t);
8421 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8422 || TYPE_CANONICAL (type) != type)
8423 TYPE_CANONICAL (t)
8424 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8425 TYPE_CANONICAL (type));
8426 }
8427
8428 return t;
8429 }
8430
8431 /* Create a complex type whose components are COMPONENT_TYPE. */
8432
8433 tree
8434 build_complex_type (tree component_type)
8435 {
8436 tree t;
8437 inchash::hash hstate;
8438
8439 gcc_assert (INTEGRAL_TYPE_P (component_type)
8440 || SCALAR_FLOAT_TYPE_P (component_type)
8441 || FIXED_POINT_TYPE_P (component_type));
8442
8443 /* Make a node of the sort we want. */
8444 t = make_node (COMPLEX_TYPE);
8445
8446 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8447
8448 /* If we already have such a type, use the old one. */
8449 hstate.add_object (TYPE_HASH (component_type));
8450 t = type_hash_canon (hstate.end (), t);
8451
8452 if (!COMPLETE_TYPE_P (t))
8453 layout_type (t);
8454
8455 if (TYPE_CANONICAL (t) == t)
8456 {
8457 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8458 SET_TYPE_STRUCTURAL_EQUALITY (t);
8459 else if (TYPE_CANONICAL (component_type) != component_type)
8460 TYPE_CANONICAL (t)
8461 = build_complex_type (TYPE_CANONICAL (component_type));
8462 }
8463
8464 /* We need to create a name, since complex is a fundamental type. */
8465 if (! TYPE_NAME (t))
8466 {
8467 const char *name;
8468 if (component_type == char_type_node)
8469 name = "complex char";
8470 else if (component_type == signed_char_type_node)
8471 name = "complex signed char";
8472 else if (component_type == unsigned_char_type_node)
8473 name = "complex unsigned char";
8474 else if (component_type == short_integer_type_node)
8475 name = "complex short int";
8476 else if (component_type == short_unsigned_type_node)
8477 name = "complex short unsigned int";
8478 else if (component_type == integer_type_node)
8479 name = "complex int";
8480 else if (component_type == unsigned_type_node)
8481 name = "complex unsigned int";
8482 else if (component_type == long_integer_type_node)
8483 name = "complex long int";
8484 else if (component_type == long_unsigned_type_node)
8485 name = "complex long unsigned int";
8486 else if (component_type == long_long_integer_type_node)
8487 name = "complex long long int";
8488 else if (component_type == long_long_unsigned_type_node)
8489 name = "complex long long unsigned int";
8490 else
8491 name = 0;
8492
8493 if (name != 0)
8494 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8495 get_identifier (name), t);
8496 }
8497
8498 return build_qualified_type (t, TYPE_QUALS (component_type));
8499 }
8500
8501 /* If TYPE is a real or complex floating-point type and the target
8502 does not directly support arithmetic on TYPE then return the wider
8503 type to be used for arithmetic on TYPE. Otherwise, return
8504 NULL_TREE. */
8505
8506 tree
8507 excess_precision_type (tree type)
8508 {
8509 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8510 {
8511 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8512 switch (TREE_CODE (type))
8513 {
8514 case REAL_TYPE:
8515 switch (flt_eval_method)
8516 {
8517 case 1:
8518 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8519 return double_type_node;
8520 break;
8521 case 2:
8522 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8523 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8524 return long_double_type_node;
8525 break;
8526 default:
8527 gcc_unreachable ();
8528 }
8529 break;
8530 case COMPLEX_TYPE:
8531 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8532 return NULL_TREE;
8533 switch (flt_eval_method)
8534 {
8535 case 1:
8536 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8537 return complex_double_type_node;
8538 break;
8539 case 2:
8540 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8541 || (TYPE_MODE (TREE_TYPE (type))
8542 == TYPE_MODE (double_type_node)))
8543 return complex_long_double_type_node;
8544 break;
8545 default:
8546 gcc_unreachable ();
8547 }
8548 break;
8549 default:
8550 break;
8551 }
8552 }
8553 return NULL_TREE;
8554 }
8555 \f
8556 /* Return OP, stripped of any conversions to wider types as much as is safe.
8557 Converting the value back to OP's type makes a value equivalent to OP.
8558
8559 If FOR_TYPE is nonzero, we return a value which, if converted to
8560 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8561
8562 OP must have integer, real or enumeral type. Pointers are not allowed!
8563
8564 There are some cases where the obvious value we could return
8565 would regenerate to OP if converted to OP's type,
8566 but would not extend like OP to wider types.
8567 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8568 For example, if OP is (unsigned short)(signed char)-1,
8569 we avoid returning (signed char)-1 if FOR_TYPE is int,
8570 even though extending that to an unsigned short would regenerate OP,
8571 since the result of extending (signed char)-1 to (int)
8572 is different from (int) OP. */
8573
8574 tree
8575 get_unwidened (tree op, tree for_type)
8576 {
8577 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8578 tree type = TREE_TYPE (op);
8579 unsigned final_prec
8580 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8581 int uns
8582 = (for_type != 0 && for_type != type
8583 && final_prec > TYPE_PRECISION (type)
8584 && TYPE_UNSIGNED (type));
8585 tree win = op;
8586
8587 while (CONVERT_EXPR_P (op))
8588 {
8589 int bitschange;
8590
8591 /* TYPE_PRECISION on vector types has different meaning
8592 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8593 so avoid them here. */
8594 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8595 break;
8596
8597 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8598 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8599
8600 /* Truncations are many-one so cannot be removed.
8601 Unless we are later going to truncate down even farther. */
8602 if (bitschange < 0
8603 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8604 break;
8605
8606 /* See what's inside this conversion. If we decide to strip it,
8607 we will set WIN. */
8608 op = TREE_OPERAND (op, 0);
8609
8610 /* If we have not stripped any zero-extensions (uns is 0),
8611 we can strip any kind of extension.
8612 If we have previously stripped a zero-extension,
8613 only zero-extensions can safely be stripped.
8614 Any extension can be stripped if the bits it would produce
8615 are all going to be discarded later by truncating to FOR_TYPE. */
8616
8617 if (bitschange > 0)
8618 {
8619 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8620 win = op;
8621 /* TYPE_UNSIGNED says whether this is a zero-extension.
8622 Let's avoid computing it if it does not affect WIN
8623 and if UNS will not be needed again. */
8624 if ((uns
8625 || CONVERT_EXPR_P (op))
8626 && TYPE_UNSIGNED (TREE_TYPE (op)))
8627 {
8628 uns = 1;
8629 win = op;
8630 }
8631 }
8632 }
8633
8634 /* If we finally reach a constant see if it fits in for_type and
8635 in that case convert it. */
8636 if (for_type
8637 && TREE_CODE (win) == INTEGER_CST
8638 && TREE_TYPE (win) != for_type
8639 && int_fits_type_p (win, for_type))
8640 win = fold_convert (for_type, win);
8641
8642 return win;
8643 }
8644 \f
8645 /* Return OP or a simpler expression for a narrower value
8646 which can be sign-extended or zero-extended to give back OP.
8647 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8648 or 0 if the value should be sign-extended. */
8649
8650 tree
8651 get_narrower (tree op, int *unsignedp_ptr)
8652 {
8653 int uns = 0;
8654 int first = 1;
8655 tree win = op;
8656 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8657
8658 while (TREE_CODE (op) == NOP_EXPR)
8659 {
8660 int bitschange
8661 = (TYPE_PRECISION (TREE_TYPE (op))
8662 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8663
8664 /* Truncations are many-one so cannot be removed. */
8665 if (bitschange < 0)
8666 break;
8667
8668 /* See what's inside this conversion. If we decide to strip it,
8669 we will set WIN. */
8670
8671 if (bitschange > 0)
8672 {
8673 op = TREE_OPERAND (op, 0);
8674 /* An extension: the outermost one can be stripped,
8675 but remember whether it is zero or sign extension. */
8676 if (first)
8677 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8678 /* Otherwise, if a sign extension has been stripped,
8679 only sign extensions can now be stripped;
8680 if a zero extension has been stripped, only zero-extensions. */
8681 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8682 break;
8683 first = 0;
8684 }
8685 else /* bitschange == 0 */
8686 {
8687 /* A change in nominal type can always be stripped, but we must
8688 preserve the unsignedness. */
8689 if (first)
8690 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8691 first = 0;
8692 op = TREE_OPERAND (op, 0);
8693 /* Keep trying to narrow, but don't assign op to win if it
8694 would turn an integral type into something else. */
8695 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8696 continue;
8697 }
8698
8699 win = op;
8700 }
8701
8702 if (TREE_CODE (op) == COMPONENT_REF
8703 /* Since type_for_size always gives an integer type. */
8704 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8705 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8706 /* Ensure field is laid out already. */
8707 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8708 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8709 {
8710 unsigned HOST_WIDE_INT innerprec
8711 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8712 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8713 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8714 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8715
8716 /* We can get this structure field in a narrower type that fits it,
8717 but the resulting extension to its nominal type (a fullword type)
8718 must satisfy the same conditions as for other extensions.
8719
8720 Do this only for fields that are aligned (not bit-fields),
8721 because when bit-field insns will be used there is no
8722 advantage in doing this. */
8723
8724 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8725 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8726 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8727 && type != 0)
8728 {
8729 if (first)
8730 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8731 win = fold_convert (type, op);
8732 }
8733 }
8734
8735 *unsignedp_ptr = uns;
8736 return win;
8737 }
8738 \f
8739 /* Returns true if integer constant C has a value that is permissible
8740 for type TYPE (an INTEGER_TYPE). */
8741
8742 bool
8743 int_fits_type_p (const_tree c, const_tree type)
8744 {
8745 tree type_low_bound, type_high_bound;
8746 bool ok_for_low_bound, ok_for_high_bound;
8747 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8748
8749 retry:
8750 type_low_bound = TYPE_MIN_VALUE (type);
8751 type_high_bound = TYPE_MAX_VALUE (type);
8752
8753 /* If at least one bound of the type is a constant integer, we can check
8754 ourselves and maybe make a decision. If no such decision is possible, but
8755 this type is a subtype, try checking against that. Otherwise, use
8756 fits_to_tree_p, which checks against the precision.
8757
8758 Compute the status for each possibly constant bound, and return if we see
8759 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8760 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8761 for "constant known to fit". */
8762
8763 /* Check if c >= type_low_bound. */
8764 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8765 {
8766 if (tree_int_cst_lt (c, type_low_bound))
8767 return false;
8768 ok_for_low_bound = true;
8769 }
8770 else
8771 ok_for_low_bound = false;
8772
8773 /* Check if c <= type_high_bound. */
8774 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8775 {
8776 if (tree_int_cst_lt (type_high_bound, c))
8777 return false;
8778 ok_for_high_bound = true;
8779 }
8780 else
8781 ok_for_high_bound = false;
8782
8783 /* If the constant fits both bounds, the result is known. */
8784 if (ok_for_low_bound && ok_for_high_bound)
8785 return true;
8786
8787 /* Perform some generic filtering which may allow making a decision
8788 even if the bounds are not constant. First, negative integers
8789 never fit in unsigned types, */
8790 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8791 return false;
8792
8793 /* Second, narrower types always fit in wider ones. */
8794 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8795 return true;
8796
8797 /* Third, unsigned integers with top bit set never fit signed types. */
8798 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8799 {
8800 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8801 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8802 {
8803 /* When a tree_cst is converted to a wide-int, the precision
8804 is taken from the type. However, if the precision of the
8805 mode underneath the type is smaller than that, it is
8806 possible that the value will not fit. The test below
8807 fails if any bit is set between the sign bit of the
8808 underlying mode and the top bit of the type. */
8809 if (wi::ne_p (wi::zext (c, prec - 1), c))
8810 return false;
8811 }
8812 else if (wi::neg_p (c))
8813 return false;
8814 }
8815
8816 /* If we haven't been able to decide at this point, there nothing more we
8817 can check ourselves here. Look at the base type if we have one and it
8818 has the same precision. */
8819 if (TREE_CODE (type) == INTEGER_TYPE
8820 && TREE_TYPE (type) != 0
8821 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8822 {
8823 type = TREE_TYPE (type);
8824 goto retry;
8825 }
8826
8827 /* Or to fits_to_tree_p, if nothing else. */
8828 return wi::fits_to_tree_p (c, type);
8829 }
8830
8831 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8832 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8833 represented (assuming two's-complement arithmetic) within the bit
8834 precision of the type are returned instead. */
8835
8836 void
8837 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8838 {
8839 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8840 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8841 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8842 else
8843 {
8844 if (TYPE_UNSIGNED (type))
8845 mpz_set_ui (min, 0);
8846 else
8847 {
8848 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8849 wi::to_mpz (mn, min, SIGNED);
8850 }
8851 }
8852
8853 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8854 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8855 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8856 else
8857 {
8858 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8859 wi::to_mpz (mn, max, TYPE_SIGN (type));
8860 }
8861 }
8862
8863 /* Return true if VAR is an automatic variable defined in function FN. */
8864
8865 bool
8866 auto_var_in_fn_p (const_tree var, const_tree fn)
8867 {
8868 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8869 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8870 || TREE_CODE (var) == PARM_DECL)
8871 && ! TREE_STATIC (var))
8872 || TREE_CODE (var) == LABEL_DECL
8873 || TREE_CODE (var) == RESULT_DECL));
8874 }
8875
8876 /* Subprogram of following function. Called by walk_tree.
8877
8878 Return *TP if it is an automatic variable or parameter of the
8879 function passed in as DATA. */
8880
8881 static tree
8882 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8883 {
8884 tree fn = (tree) data;
8885
8886 if (TYPE_P (*tp))
8887 *walk_subtrees = 0;
8888
8889 else if (DECL_P (*tp)
8890 && auto_var_in_fn_p (*tp, fn))
8891 return *tp;
8892
8893 return NULL_TREE;
8894 }
8895
8896 /* Returns true if T is, contains, or refers to a type with variable
8897 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8898 arguments, but not the return type. If FN is nonzero, only return
8899 true if a modifier of the type or position of FN is a variable or
8900 parameter inside FN.
8901
8902 This concept is more general than that of C99 'variably modified types':
8903 in C99, a struct type is never variably modified because a VLA may not
8904 appear as a structure member. However, in GNU C code like:
8905
8906 struct S { int i[f()]; };
8907
8908 is valid, and other languages may define similar constructs. */
8909
8910 bool
8911 variably_modified_type_p (tree type, tree fn)
8912 {
8913 tree t;
8914
8915 /* Test if T is either variable (if FN is zero) or an expression containing
8916 a variable in FN. If TYPE isn't gimplified, return true also if
8917 gimplify_one_sizepos would gimplify the expression into a local
8918 variable. */
8919 #define RETURN_TRUE_IF_VAR(T) \
8920 do { tree _t = (T); \
8921 if (_t != NULL_TREE \
8922 && _t != error_mark_node \
8923 && TREE_CODE (_t) != INTEGER_CST \
8924 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8925 && (!fn \
8926 || (!TYPE_SIZES_GIMPLIFIED (type) \
8927 && !is_gimple_sizepos (_t)) \
8928 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8929 return true; } while (0)
8930
8931 if (type == error_mark_node)
8932 return false;
8933
8934 /* If TYPE itself has variable size, it is variably modified. */
8935 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8936 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8937
8938 switch (TREE_CODE (type))
8939 {
8940 case POINTER_TYPE:
8941 case REFERENCE_TYPE:
8942 case VECTOR_TYPE:
8943 if (variably_modified_type_p (TREE_TYPE (type), fn))
8944 return true;
8945 break;
8946
8947 case FUNCTION_TYPE:
8948 case METHOD_TYPE:
8949 /* If TYPE is a function type, it is variably modified if the
8950 return type is variably modified. */
8951 if (variably_modified_type_p (TREE_TYPE (type), fn))
8952 return true;
8953 break;
8954
8955 case INTEGER_TYPE:
8956 case REAL_TYPE:
8957 case FIXED_POINT_TYPE:
8958 case ENUMERAL_TYPE:
8959 case BOOLEAN_TYPE:
8960 /* Scalar types are variably modified if their end points
8961 aren't constant. */
8962 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8963 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8964 break;
8965
8966 case RECORD_TYPE:
8967 case UNION_TYPE:
8968 case QUAL_UNION_TYPE:
8969 /* We can't see if any of the fields are variably-modified by the
8970 definition we normally use, since that would produce infinite
8971 recursion via pointers. */
8972 /* This is variably modified if some field's type is. */
8973 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8974 if (TREE_CODE (t) == FIELD_DECL)
8975 {
8976 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8977 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8978 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8979
8980 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8981 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8982 }
8983 break;
8984
8985 case ARRAY_TYPE:
8986 /* Do not call ourselves to avoid infinite recursion. This is
8987 variably modified if the element type is. */
8988 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8989 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8990 break;
8991
8992 default:
8993 break;
8994 }
8995
8996 /* The current language may have other cases to check, but in general,
8997 all other types are not variably modified. */
8998 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8999
9000 #undef RETURN_TRUE_IF_VAR
9001 }
9002
9003 /* Given a DECL or TYPE, return the scope in which it was declared, or
9004 NULL_TREE if there is no containing scope. */
9005
9006 tree
9007 get_containing_scope (const_tree t)
9008 {
9009 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9010 }
9011
9012 /* Return the innermost context enclosing DECL that is
9013 a FUNCTION_DECL, or zero if none. */
9014
9015 tree
9016 decl_function_context (const_tree decl)
9017 {
9018 tree context;
9019
9020 if (TREE_CODE (decl) == ERROR_MARK)
9021 return 0;
9022
9023 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9024 where we look up the function at runtime. Such functions always take
9025 a first argument of type 'pointer to real context'.
9026
9027 C++ should really be fixed to use DECL_CONTEXT for the real context,
9028 and use something else for the "virtual context". */
9029 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9030 context
9031 = TYPE_MAIN_VARIANT
9032 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9033 else
9034 context = DECL_CONTEXT (decl);
9035
9036 while (context && TREE_CODE (context) != FUNCTION_DECL)
9037 {
9038 if (TREE_CODE (context) == BLOCK)
9039 context = BLOCK_SUPERCONTEXT (context);
9040 else
9041 context = get_containing_scope (context);
9042 }
9043
9044 return context;
9045 }
9046
9047 /* Return the innermost context enclosing DECL that is
9048 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9049 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9050
9051 tree
9052 decl_type_context (const_tree decl)
9053 {
9054 tree context = DECL_CONTEXT (decl);
9055
9056 while (context)
9057 switch (TREE_CODE (context))
9058 {
9059 case NAMESPACE_DECL:
9060 case TRANSLATION_UNIT_DECL:
9061 return NULL_TREE;
9062
9063 case RECORD_TYPE:
9064 case UNION_TYPE:
9065 case QUAL_UNION_TYPE:
9066 return context;
9067
9068 case TYPE_DECL:
9069 case FUNCTION_DECL:
9070 context = DECL_CONTEXT (context);
9071 break;
9072
9073 case BLOCK:
9074 context = BLOCK_SUPERCONTEXT (context);
9075 break;
9076
9077 default:
9078 gcc_unreachable ();
9079 }
9080
9081 return NULL_TREE;
9082 }
9083
9084 /* CALL is a CALL_EXPR. Return the declaration for the function
9085 called, or NULL_TREE if the called function cannot be
9086 determined. */
9087
9088 tree
9089 get_callee_fndecl (const_tree call)
9090 {
9091 tree addr;
9092
9093 if (call == error_mark_node)
9094 return error_mark_node;
9095
9096 /* It's invalid to call this function with anything but a
9097 CALL_EXPR. */
9098 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9099
9100 /* The first operand to the CALL is the address of the function
9101 called. */
9102 addr = CALL_EXPR_FN (call);
9103
9104 /* If there is no function, return early. */
9105 if (addr == NULL_TREE)
9106 return NULL_TREE;
9107
9108 STRIP_NOPS (addr);
9109
9110 /* If this is a readonly function pointer, extract its initial value. */
9111 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9112 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9113 && DECL_INITIAL (addr))
9114 addr = DECL_INITIAL (addr);
9115
9116 /* If the address is just `&f' for some function `f', then we know
9117 that `f' is being called. */
9118 if (TREE_CODE (addr) == ADDR_EXPR
9119 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9120 return TREE_OPERAND (addr, 0);
9121
9122 /* We couldn't figure out what was being called. */
9123 return NULL_TREE;
9124 }
9125
9126 /* Print debugging information about tree nodes generated during the compile,
9127 and any language-specific information. */
9128
9129 void
9130 dump_tree_statistics (void)
9131 {
9132 if (GATHER_STATISTICS)
9133 {
9134 int i;
9135 int total_nodes, total_bytes;
9136 fprintf (stderr, "Kind Nodes Bytes\n");
9137 fprintf (stderr, "---------------------------------------\n");
9138 total_nodes = total_bytes = 0;
9139 for (i = 0; i < (int) all_kinds; i++)
9140 {
9141 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9142 tree_node_counts[i], tree_node_sizes[i]);
9143 total_nodes += tree_node_counts[i];
9144 total_bytes += tree_node_sizes[i];
9145 }
9146 fprintf (stderr, "---------------------------------------\n");
9147 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9148 fprintf (stderr, "---------------------------------------\n");
9149 fprintf (stderr, "Code Nodes\n");
9150 fprintf (stderr, "----------------------------\n");
9151 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9152 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9153 tree_code_counts[i]);
9154 fprintf (stderr, "----------------------------\n");
9155 ssanames_print_statistics ();
9156 phinodes_print_statistics ();
9157 }
9158 else
9159 fprintf (stderr, "(No per-node statistics)\n");
9160
9161 print_type_hash_statistics ();
9162 print_debug_expr_statistics ();
9163 print_value_expr_statistics ();
9164 lang_hooks.print_statistics ();
9165 }
9166 \f
9167 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9168
9169 /* Generate a crc32 of a byte. */
9170
9171 static unsigned
9172 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9173 {
9174 unsigned ix;
9175
9176 for (ix = bits; ix--; value <<= 1)
9177 {
9178 unsigned feedback;
9179
9180 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9181 chksum <<= 1;
9182 chksum ^= feedback;
9183 }
9184 return chksum;
9185 }
9186
9187 /* Generate a crc32 of a 32-bit unsigned. */
9188
9189 unsigned
9190 crc32_unsigned (unsigned chksum, unsigned value)
9191 {
9192 return crc32_unsigned_bits (chksum, value, 32);
9193 }
9194
9195 /* Generate a crc32 of a byte. */
9196
9197 unsigned
9198 crc32_byte (unsigned chksum, char byte)
9199 {
9200 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9201 }
9202
9203 /* Generate a crc32 of a string. */
9204
9205 unsigned
9206 crc32_string (unsigned chksum, const char *string)
9207 {
9208 do
9209 {
9210 chksum = crc32_byte (chksum, *string);
9211 }
9212 while (*string++);
9213 return chksum;
9214 }
9215
9216 /* P is a string that will be used in a symbol. Mask out any characters
9217 that are not valid in that context. */
9218
9219 void
9220 clean_symbol_name (char *p)
9221 {
9222 for (; *p; p++)
9223 if (! (ISALNUM (*p)
9224 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9225 || *p == '$'
9226 #endif
9227 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9228 || *p == '.'
9229 #endif
9230 ))
9231 *p = '_';
9232 }
9233
9234 /* Generate a name for a special-purpose function.
9235 The generated name may need to be unique across the whole link.
9236 Changes to this function may also require corresponding changes to
9237 xstrdup_mask_random.
9238 TYPE is some string to identify the purpose of this function to the
9239 linker or collect2; it must start with an uppercase letter,
9240 one of:
9241 I - for constructors
9242 D - for destructors
9243 N - for C++ anonymous namespaces
9244 F - for DWARF unwind frame information. */
9245
9246 tree
9247 get_file_function_name (const char *type)
9248 {
9249 char *buf;
9250 const char *p;
9251 char *q;
9252
9253 /* If we already have a name we know to be unique, just use that. */
9254 if (first_global_object_name)
9255 p = q = ASTRDUP (first_global_object_name);
9256 /* If the target is handling the constructors/destructors, they
9257 will be local to this file and the name is only necessary for
9258 debugging purposes.
9259 We also assign sub_I and sub_D sufixes to constructors called from
9260 the global static constructors. These are always local. */
9261 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9262 || (strncmp (type, "sub_", 4) == 0
9263 && (type[4] == 'I' || type[4] == 'D')))
9264 {
9265 const char *file = main_input_filename;
9266 if (! file)
9267 file = LOCATION_FILE (input_location);
9268 /* Just use the file's basename, because the full pathname
9269 might be quite long. */
9270 p = q = ASTRDUP (lbasename (file));
9271 }
9272 else
9273 {
9274 /* Otherwise, the name must be unique across the entire link.
9275 We don't have anything that we know to be unique to this translation
9276 unit, so use what we do have and throw in some randomness. */
9277 unsigned len;
9278 const char *name = weak_global_object_name;
9279 const char *file = main_input_filename;
9280
9281 if (! name)
9282 name = "";
9283 if (! file)
9284 file = LOCATION_FILE (input_location);
9285
9286 len = strlen (file);
9287 q = (char *) alloca (9 + 17 + len + 1);
9288 memcpy (q, file, len + 1);
9289
9290 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9291 crc32_string (0, name), get_random_seed (false));
9292
9293 p = q;
9294 }
9295
9296 clean_symbol_name (q);
9297 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9298 + strlen (type));
9299
9300 /* Set up the name of the file-level functions we may need.
9301 Use a global object (which is already required to be unique over
9302 the program) rather than the file name (which imposes extra
9303 constraints). */
9304 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9305
9306 return get_identifier (buf);
9307 }
9308 \f
9309 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9310
9311 /* Complain that the tree code of NODE does not match the expected 0
9312 terminated list of trailing codes. The trailing code list can be
9313 empty, for a more vague error message. FILE, LINE, and FUNCTION
9314 are of the caller. */
9315
9316 void
9317 tree_check_failed (const_tree node, const char *file,
9318 int line, const char *function, ...)
9319 {
9320 va_list args;
9321 const char *buffer;
9322 unsigned length = 0;
9323 enum tree_code code;
9324
9325 va_start (args, function);
9326 while ((code = (enum tree_code) va_arg (args, int)))
9327 length += 4 + strlen (get_tree_code_name (code));
9328 va_end (args);
9329 if (length)
9330 {
9331 char *tmp;
9332 va_start (args, function);
9333 length += strlen ("expected ");
9334 buffer = tmp = (char *) alloca (length);
9335 length = 0;
9336 while ((code = (enum tree_code) va_arg (args, int)))
9337 {
9338 const char *prefix = length ? " or " : "expected ";
9339
9340 strcpy (tmp + length, prefix);
9341 length += strlen (prefix);
9342 strcpy (tmp + length, get_tree_code_name (code));
9343 length += strlen (get_tree_code_name (code));
9344 }
9345 va_end (args);
9346 }
9347 else
9348 buffer = "unexpected node";
9349
9350 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9351 buffer, get_tree_code_name (TREE_CODE (node)),
9352 function, trim_filename (file), line);
9353 }
9354
9355 /* Complain that the tree code of NODE does match the expected 0
9356 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9357 the caller. */
9358
9359 void
9360 tree_not_check_failed (const_tree node, const char *file,
9361 int line, const char *function, ...)
9362 {
9363 va_list args;
9364 char *buffer;
9365 unsigned length = 0;
9366 enum tree_code code;
9367
9368 va_start (args, function);
9369 while ((code = (enum tree_code) va_arg (args, int)))
9370 length += 4 + strlen (get_tree_code_name (code));
9371 va_end (args);
9372 va_start (args, function);
9373 buffer = (char *) alloca (length);
9374 length = 0;
9375 while ((code = (enum tree_code) va_arg (args, int)))
9376 {
9377 if (length)
9378 {
9379 strcpy (buffer + length, " or ");
9380 length += 4;
9381 }
9382 strcpy (buffer + length, get_tree_code_name (code));
9383 length += strlen (get_tree_code_name (code));
9384 }
9385 va_end (args);
9386
9387 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9388 buffer, get_tree_code_name (TREE_CODE (node)),
9389 function, trim_filename (file), line);
9390 }
9391
9392 /* Similar to tree_check_failed, except that we check for a class of tree
9393 code, given in CL. */
9394
9395 void
9396 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9397 const char *file, int line, const char *function)
9398 {
9399 internal_error
9400 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9401 TREE_CODE_CLASS_STRING (cl),
9402 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9403 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9404 }
9405
9406 /* Similar to tree_check_failed, except that instead of specifying a
9407 dozen codes, use the knowledge that they're all sequential. */
9408
9409 void
9410 tree_range_check_failed (const_tree node, const char *file, int line,
9411 const char *function, enum tree_code c1,
9412 enum tree_code c2)
9413 {
9414 char *buffer;
9415 unsigned length = 0;
9416 unsigned int c;
9417
9418 for (c = c1; c <= c2; ++c)
9419 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9420
9421 length += strlen ("expected ");
9422 buffer = (char *) alloca (length);
9423 length = 0;
9424
9425 for (c = c1; c <= c2; ++c)
9426 {
9427 const char *prefix = length ? " or " : "expected ";
9428
9429 strcpy (buffer + length, prefix);
9430 length += strlen (prefix);
9431 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9432 length += strlen (get_tree_code_name ((enum tree_code) c));
9433 }
9434
9435 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9436 buffer, get_tree_code_name (TREE_CODE (node)),
9437 function, trim_filename (file), line);
9438 }
9439
9440
9441 /* Similar to tree_check_failed, except that we check that a tree does
9442 not have the specified code, given in CL. */
9443
9444 void
9445 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9446 const char *file, int line, const char *function)
9447 {
9448 internal_error
9449 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9450 TREE_CODE_CLASS_STRING (cl),
9451 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9452 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9453 }
9454
9455
9456 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9457
9458 void
9459 omp_clause_check_failed (const_tree node, const char *file, int line,
9460 const char *function, enum omp_clause_code code)
9461 {
9462 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9463 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9464 function, trim_filename (file), line);
9465 }
9466
9467
9468 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9469
9470 void
9471 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9472 const char *function, enum omp_clause_code c1,
9473 enum omp_clause_code c2)
9474 {
9475 char *buffer;
9476 unsigned length = 0;
9477 unsigned int c;
9478
9479 for (c = c1; c <= c2; ++c)
9480 length += 4 + strlen (omp_clause_code_name[c]);
9481
9482 length += strlen ("expected ");
9483 buffer = (char *) alloca (length);
9484 length = 0;
9485
9486 for (c = c1; c <= c2; ++c)
9487 {
9488 const char *prefix = length ? " or " : "expected ";
9489
9490 strcpy (buffer + length, prefix);
9491 length += strlen (prefix);
9492 strcpy (buffer + length, omp_clause_code_name[c]);
9493 length += strlen (omp_clause_code_name[c]);
9494 }
9495
9496 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9497 buffer, omp_clause_code_name[TREE_CODE (node)],
9498 function, trim_filename (file), line);
9499 }
9500
9501
9502 #undef DEFTREESTRUCT
9503 #define DEFTREESTRUCT(VAL, NAME) NAME,
9504
9505 static const char *ts_enum_names[] = {
9506 #include "treestruct.def"
9507 };
9508 #undef DEFTREESTRUCT
9509
9510 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9511
9512 /* Similar to tree_class_check_failed, except that we check for
9513 whether CODE contains the tree structure identified by EN. */
9514
9515 void
9516 tree_contains_struct_check_failed (const_tree node,
9517 const enum tree_node_structure_enum en,
9518 const char *file, int line,
9519 const char *function)
9520 {
9521 internal_error
9522 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9523 TS_ENUM_NAME (en),
9524 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9525 }
9526
9527
9528 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9529 (dynamically sized) vector. */
9530
9531 void
9532 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9533 const char *function)
9534 {
9535 internal_error
9536 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9537 idx + 1, len, function, trim_filename (file), line);
9538 }
9539
9540 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9541 (dynamically sized) vector. */
9542
9543 void
9544 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9545 const char *function)
9546 {
9547 internal_error
9548 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9549 idx + 1, len, function, trim_filename (file), line);
9550 }
9551
9552 /* Similar to above, except that the check is for the bounds of the operand
9553 vector of an expression node EXP. */
9554
9555 void
9556 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9557 int line, const char *function)
9558 {
9559 enum tree_code code = TREE_CODE (exp);
9560 internal_error
9561 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9562 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9563 function, trim_filename (file), line);
9564 }
9565
9566 /* Similar to above, except that the check is for the number of
9567 operands of an OMP_CLAUSE node. */
9568
9569 void
9570 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9571 int line, const char *function)
9572 {
9573 internal_error
9574 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9575 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9576 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9577 trim_filename (file), line);
9578 }
9579 #endif /* ENABLE_TREE_CHECKING */
9580 \f
9581 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9582 and mapped to the machine mode MODE. Initialize its fields and build
9583 the information necessary for debugging output. */
9584
9585 static tree
9586 make_vector_type (tree innertype, int nunits, machine_mode mode)
9587 {
9588 tree t;
9589 inchash::hash hstate;
9590
9591 t = make_node (VECTOR_TYPE);
9592 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9593 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9594 SET_TYPE_MODE (t, mode);
9595
9596 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9597 SET_TYPE_STRUCTURAL_EQUALITY (t);
9598 else if (TYPE_CANONICAL (innertype) != innertype
9599 || mode != VOIDmode)
9600 TYPE_CANONICAL (t)
9601 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9602
9603 layout_type (t);
9604
9605 hstate.add_wide_int (VECTOR_TYPE);
9606 hstate.add_wide_int (nunits);
9607 hstate.add_wide_int (mode);
9608 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9609 t = type_hash_canon (hstate.end (), t);
9610
9611 /* We have built a main variant, based on the main variant of the
9612 inner type. Use it to build the variant we return. */
9613 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9614 && TREE_TYPE (t) != innertype)
9615 return build_type_attribute_qual_variant (t,
9616 TYPE_ATTRIBUTES (innertype),
9617 TYPE_QUALS (innertype));
9618
9619 return t;
9620 }
9621
9622 static tree
9623 make_or_reuse_type (unsigned size, int unsignedp)
9624 {
9625 int i;
9626
9627 if (size == INT_TYPE_SIZE)
9628 return unsignedp ? unsigned_type_node : integer_type_node;
9629 if (size == CHAR_TYPE_SIZE)
9630 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9631 if (size == SHORT_TYPE_SIZE)
9632 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9633 if (size == LONG_TYPE_SIZE)
9634 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9635 if (size == LONG_LONG_TYPE_SIZE)
9636 return (unsignedp ? long_long_unsigned_type_node
9637 : long_long_integer_type_node);
9638
9639 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9640 if (size == int_n_data[i].bitsize
9641 && int_n_enabled_p[i])
9642 return (unsignedp ? int_n_trees[i].unsigned_type
9643 : int_n_trees[i].signed_type);
9644
9645 if (unsignedp)
9646 return make_unsigned_type (size);
9647 else
9648 return make_signed_type (size);
9649 }
9650
9651 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9652
9653 static tree
9654 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9655 {
9656 if (satp)
9657 {
9658 if (size == SHORT_FRACT_TYPE_SIZE)
9659 return unsignedp ? sat_unsigned_short_fract_type_node
9660 : sat_short_fract_type_node;
9661 if (size == FRACT_TYPE_SIZE)
9662 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9663 if (size == LONG_FRACT_TYPE_SIZE)
9664 return unsignedp ? sat_unsigned_long_fract_type_node
9665 : sat_long_fract_type_node;
9666 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9667 return unsignedp ? sat_unsigned_long_long_fract_type_node
9668 : sat_long_long_fract_type_node;
9669 }
9670 else
9671 {
9672 if (size == SHORT_FRACT_TYPE_SIZE)
9673 return unsignedp ? unsigned_short_fract_type_node
9674 : short_fract_type_node;
9675 if (size == FRACT_TYPE_SIZE)
9676 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9677 if (size == LONG_FRACT_TYPE_SIZE)
9678 return unsignedp ? unsigned_long_fract_type_node
9679 : long_fract_type_node;
9680 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9681 return unsignedp ? unsigned_long_long_fract_type_node
9682 : long_long_fract_type_node;
9683 }
9684
9685 return make_fract_type (size, unsignedp, satp);
9686 }
9687
9688 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9689
9690 static tree
9691 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9692 {
9693 if (satp)
9694 {
9695 if (size == SHORT_ACCUM_TYPE_SIZE)
9696 return unsignedp ? sat_unsigned_short_accum_type_node
9697 : sat_short_accum_type_node;
9698 if (size == ACCUM_TYPE_SIZE)
9699 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9700 if (size == LONG_ACCUM_TYPE_SIZE)
9701 return unsignedp ? sat_unsigned_long_accum_type_node
9702 : sat_long_accum_type_node;
9703 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9704 return unsignedp ? sat_unsigned_long_long_accum_type_node
9705 : sat_long_long_accum_type_node;
9706 }
9707 else
9708 {
9709 if (size == SHORT_ACCUM_TYPE_SIZE)
9710 return unsignedp ? unsigned_short_accum_type_node
9711 : short_accum_type_node;
9712 if (size == ACCUM_TYPE_SIZE)
9713 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9714 if (size == LONG_ACCUM_TYPE_SIZE)
9715 return unsignedp ? unsigned_long_accum_type_node
9716 : long_accum_type_node;
9717 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9718 return unsignedp ? unsigned_long_long_accum_type_node
9719 : long_long_accum_type_node;
9720 }
9721
9722 return make_accum_type (size, unsignedp, satp);
9723 }
9724
9725
9726 /* Create an atomic variant node for TYPE. This routine is called
9727 during initialization of data types to create the 5 basic atomic
9728 types. The generic build_variant_type function requires these to
9729 already be set up in order to function properly, so cannot be
9730 called from there. If ALIGN is non-zero, then ensure alignment is
9731 overridden to this value. */
9732
9733 static tree
9734 build_atomic_base (tree type, unsigned int align)
9735 {
9736 tree t;
9737
9738 /* Make sure its not already registered. */
9739 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9740 return t;
9741
9742 t = build_variant_type_copy (type);
9743 set_type_quals (t, TYPE_QUAL_ATOMIC);
9744
9745 if (align)
9746 TYPE_ALIGN (t) = align;
9747
9748 return t;
9749 }
9750
9751 /* Create nodes for all integer types (and error_mark_node) using the sizes
9752 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9753 SHORT_DOUBLE specifies whether double should be of the same precision
9754 as float. */
9755
9756 void
9757 build_common_tree_nodes (bool signed_char, bool short_double)
9758 {
9759 int i;
9760
9761 error_mark_node = make_node (ERROR_MARK);
9762 TREE_TYPE (error_mark_node) = error_mark_node;
9763
9764 initialize_sizetypes ();
9765
9766 /* Define both `signed char' and `unsigned char'. */
9767 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9768 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9769 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9770 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9771
9772 /* Define `char', which is like either `signed char' or `unsigned char'
9773 but not the same as either. */
9774 char_type_node
9775 = (signed_char
9776 ? make_signed_type (CHAR_TYPE_SIZE)
9777 : make_unsigned_type (CHAR_TYPE_SIZE));
9778 TYPE_STRING_FLAG (char_type_node) = 1;
9779
9780 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9781 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9782 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9783 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9784 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9785 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9786 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9787 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9788
9789 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9790 {
9791 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9792 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9793 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9794 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9795
9796 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9797 && int_n_enabled_p[i])
9798 {
9799 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9800 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9801 }
9802 }
9803
9804 /* Define a boolean type. This type only represents boolean values but
9805 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9806 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9807 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9808 TYPE_PRECISION (boolean_type_node) = 1;
9809 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9810
9811 /* Define what type to use for size_t. */
9812 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9813 size_type_node = unsigned_type_node;
9814 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9815 size_type_node = long_unsigned_type_node;
9816 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9817 size_type_node = long_long_unsigned_type_node;
9818 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9819 size_type_node = short_unsigned_type_node;
9820 else
9821 {
9822 int i;
9823
9824 size_type_node = NULL_TREE;
9825 for (i = 0; i < NUM_INT_N_ENTS; i++)
9826 if (int_n_enabled_p[i])
9827 {
9828 char name[50];
9829 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9830
9831 if (strcmp (name, SIZE_TYPE) == 0)
9832 {
9833 size_type_node = int_n_trees[i].unsigned_type;
9834 }
9835 }
9836 if (size_type_node == NULL_TREE)
9837 gcc_unreachable ();
9838 }
9839
9840 /* Fill in the rest of the sized types. Reuse existing type nodes
9841 when possible. */
9842 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9843 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9844 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9845 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9846 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9847
9848 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9849 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9850 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9851 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9852 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9853
9854 /* Don't call build_qualified type for atomics. That routine does
9855 special processing for atomics, and until they are initialized
9856 it's better not to make that call.
9857
9858 Check to see if there is a target override for atomic types. */
9859
9860 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9861 targetm.atomic_align_for_mode (QImode));
9862 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9863 targetm.atomic_align_for_mode (HImode));
9864 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9865 targetm.atomic_align_for_mode (SImode));
9866 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9867 targetm.atomic_align_for_mode (DImode));
9868 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9869 targetm.atomic_align_for_mode (TImode));
9870
9871 access_public_node = get_identifier ("public");
9872 access_protected_node = get_identifier ("protected");
9873 access_private_node = get_identifier ("private");
9874
9875 /* Define these next since types below may used them. */
9876 integer_zero_node = build_int_cst (integer_type_node, 0);
9877 integer_one_node = build_int_cst (integer_type_node, 1);
9878 integer_three_node = build_int_cst (integer_type_node, 3);
9879 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9880
9881 size_zero_node = size_int (0);
9882 size_one_node = size_int (1);
9883 bitsize_zero_node = bitsize_int (0);
9884 bitsize_one_node = bitsize_int (1);
9885 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9886
9887 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9888 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9889
9890 void_type_node = make_node (VOID_TYPE);
9891 layout_type (void_type_node);
9892
9893 pointer_bounds_type_node = targetm.chkp_bound_type ();
9894
9895 /* We are not going to have real types in C with less than byte alignment,
9896 so we might as well not have any types that claim to have it. */
9897 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9898 TYPE_USER_ALIGN (void_type_node) = 0;
9899
9900 void_node = make_node (VOID_CST);
9901 TREE_TYPE (void_node) = void_type_node;
9902
9903 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9904 layout_type (TREE_TYPE (null_pointer_node));
9905
9906 ptr_type_node = build_pointer_type (void_type_node);
9907 const_ptr_type_node
9908 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9909 fileptr_type_node = ptr_type_node;
9910
9911 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9912
9913 float_type_node = make_node (REAL_TYPE);
9914 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9915 layout_type (float_type_node);
9916
9917 double_type_node = make_node (REAL_TYPE);
9918 if (short_double)
9919 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9920 else
9921 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9922 layout_type (double_type_node);
9923
9924 long_double_type_node = make_node (REAL_TYPE);
9925 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9926 layout_type (long_double_type_node);
9927
9928 float_ptr_type_node = build_pointer_type (float_type_node);
9929 double_ptr_type_node = build_pointer_type (double_type_node);
9930 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9931 integer_ptr_type_node = build_pointer_type (integer_type_node);
9932
9933 /* Fixed size integer types. */
9934 uint16_type_node = make_or_reuse_type (16, 1);
9935 uint32_type_node = make_or_reuse_type (32, 1);
9936 uint64_type_node = make_or_reuse_type (64, 1);
9937
9938 /* Decimal float types. */
9939 dfloat32_type_node = make_node (REAL_TYPE);
9940 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9941 layout_type (dfloat32_type_node);
9942 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9943 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9944
9945 dfloat64_type_node = make_node (REAL_TYPE);
9946 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9947 layout_type (dfloat64_type_node);
9948 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9949 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9950
9951 dfloat128_type_node = make_node (REAL_TYPE);
9952 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9953 layout_type (dfloat128_type_node);
9954 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9955 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9956
9957 complex_integer_type_node = build_complex_type (integer_type_node);
9958 complex_float_type_node = build_complex_type (float_type_node);
9959 complex_double_type_node = build_complex_type (double_type_node);
9960 complex_long_double_type_node = build_complex_type (long_double_type_node);
9961
9962 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9963 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9964 sat_ ## KIND ## _type_node = \
9965 make_sat_signed_ ## KIND ## _type (SIZE); \
9966 sat_unsigned_ ## KIND ## _type_node = \
9967 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9968 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9969 unsigned_ ## KIND ## _type_node = \
9970 make_unsigned_ ## KIND ## _type (SIZE);
9971
9972 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9973 sat_ ## WIDTH ## KIND ## _type_node = \
9974 make_sat_signed_ ## KIND ## _type (SIZE); \
9975 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9976 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9977 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9978 unsigned_ ## WIDTH ## KIND ## _type_node = \
9979 make_unsigned_ ## KIND ## _type (SIZE);
9980
9981 /* Make fixed-point type nodes based on four different widths. */
9982 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9983 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9984 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9985 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9986 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9987
9988 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9989 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9990 NAME ## _type_node = \
9991 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9992 u ## NAME ## _type_node = \
9993 make_or_reuse_unsigned_ ## KIND ## _type \
9994 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9995 sat_ ## NAME ## _type_node = \
9996 make_or_reuse_sat_signed_ ## KIND ## _type \
9997 (GET_MODE_BITSIZE (MODE ## mode)); \
9998 sat_u ## NAME ## _type_node = \
9999 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10000 (GET_MODE_BITSIZE (U ## MODE ## mode));
10001
10002 /* Fixed-point type and mode nodes. */
10003 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10004 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10005 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10006 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10007 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10008 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10009 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10010 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10011 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10012 MAKE_FIXED_MODE_NODE (accum, da, DA)
10013 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10014
10015 {
10016 tree t = targetm.build_builtin_va_list ();
10017
10018 /* Many back-ends define record types without setting TYPE_NAME.
10019 If we copied the record type here, we'd keep the original
10020 record type without a name. This breaks name mangling. So,
10021 don't copy record types and let c_common_nodes_and_builtins()
10022 declare the type to be __builtin_va_list. */
10023 if (TREE_CODE (t) != RECORD_TYPE)
10024 t = build_variant_type_copy (t);
10025
10026 va_list_type_node = t;
10027 }
10028 }
10029
10030 /* Modify DECL for given flags.
10031 TM_PURE attribute is set only on types, so the function will modify
10032 DECL's type when ECF_TM_PURE is used. */
10033
10034 void
10035 set_call_expr_flags (tree decl, int flags)
10036 {
10037 if (flags & ECF_NOTHROW)
10038 TREE_NOTHROW (decl) = 1;
10039 if (flags & ECF_CONST)
10040 TREE_READONLY (decl) = 1;
10041 if (flags & ECF_PURE)
10042 DECL_PURE_P (decl) = 1;
10043 if (flags & ECF_LOOPING_CONST_OR_PURE)
10044 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10045 if (flags & ECF_NOVOPS)
10046 DECL_IS_NOVOPS (decl) = 1;
10047 if (flags & ECF_NORETURN)
10048 TREE_THIS_VOLATILE (decl) = 1;
10049 if (flags & ECF_MALLOC)
10050 DECL_IS_MALLOC (decl) = 1;
10051 if (flags & ECF_RETURNS_TWICE)
10052 DECL_IS_RETURNS_TWICE (decl) = 1;
10053 if (flags & ECF_LEAF)
10054 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10055 NULL, DECL_ATTRIBUTES (decl));
10056 if ((flags & ECF_TM_PURE) && flag_tm)
10057 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10058 /* Looping const or pure is implied by noreturn.
10059 There is currently no way to declare looping const or looping pure alone. */
10060 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10061 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10062 }
10063
10064
10065 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10066
10067 static void
10068 local_define_builtin (const char *name, tree type, enum built_in_function code,
10069 const char *library_name, int ecf_flags)
10070 {
10071 tree decl;
10072
10073 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10074 library_name, NULL_TREE);
10075 set_call_expr_flags (decl, ecf_flags);
10076
10077 set_builtin_decl (code, decl, true);
10078 }
10079
10080 /* Call this function after instantiating all builtins that the language
10081 front end cares about. This will build the rest of the builtins
10082 and internal functions that are relied upon by the tree optimizers and
10083 the middle-end. */
10084
10085 void
10086 build_common_builtin_nodes (void)
10087 {
10088 tree tmp, ftype;
10089 int ecf_flags;
10090
10091 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10092 {
10093 ftype = build_function_type (void_type_node, void_list_node);
10094 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10095 "__builtin_unreachable",
10096 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10097 | ECF_CONST);
10098 }
10099
10100 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10101 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10102 {
10103 ftype = build_function_type_list (ptr_type_node,
10104 ptr_type_node, const_ptr_type_node,
10105 size_type_node, NULL_TREE);
10106
10107 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10108 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10109 "memcpy", ECF_NOTHROW | ECF_LEAF);
10110 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10111 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10112 "memmove", ECF_NOTHROW | ECF_LEAF);
10113 }
10114
10115 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10116 {
10117 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10118 const_ptr_type_node, size_type_node,
10119 NULL_TREE);
10120 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10121 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10122 }
10123
10124 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10125 {
10126 ftype = build_function_type_list (ptr_type_node,
10127 ptr_type_node, integer_type_node,
10128 size_type_node, NULL_TREE);
10129 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10130 "memset", ECF_NOTHROW | ECF_LEAF);
10131 }
10132
10133 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10134 {
10135 ftype = build_function_type_list (ptr_type_node,
10136 size_type_node, NULL_TREE);
10137 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10138 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10139 }
10140
10141 ftype = build_function_type_list (ptr_type_node, size_type_node,
10142 size_type_node, NULL_TREE);
10143 local_define_builtin ("__builtin_alloca_with_align", ftype,
10144 BUILT_IN_ALLOCA_WITH_ALIGN,
10145 "__builtin_alloca_with_align",
10146 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10147
10148 /* If we're checking the stack, `alloca' can throw. */
10149 if (flag_stack_check)
10150 {
10151 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10152 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10153 }
10154
10155 ftype = build_function_type_list (void_type_node,
10156 ptr_type_node, ptr_type_node,
10157 ptr_type_node, NULL_TREE);
10158 local_define_builtin ("__builtin_init_trampoline", ftype,
10159 BUILT_IN_INIT_TRAMPOLINE,
10160 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10161 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10162 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10163 "__builtin_init_heap_trampoline",
10164 ECF_NOTHROW | ECF_LEAF);
10165
10166 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10167 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10168 BUILT_IN_ADJUST_TRAMPOLINE,
10169 "__builtin_adjust_trampoline",
10170 ECF_CONST | ECF_NOTHROW);
10171
10172 ftype = build_function_type_list (void_type_node,
10173 ptr_type_node, ptr_type_node, NULL_TREE);
10174 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10175 BUILT_IN_NONLOCAL_GOTO,
10176 "__builtin_nonlocal_goto",
10177 ECF_NORETURN | ECF_NOTHROW);
10178
10179 ftype = build_function_type_list (void_type_node,
10180 ptr_type_node, ptr_type_node, NULL_TREE);
10181 local_define_builtin ("__builtin_setjmp_setup", ftype,
10182 BUILT_IN_SETJMP_SETUP,
10183 "__builtin_setjmp_setup", ECF_NOTHROW);
10184
10185 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10186 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10187 BUILT_IN_SETJMP_RECEIVER,
10188 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10189
10190 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10191 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10192 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10193
10194 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10195 local_define_builtin ("__builtin_stack_restore", ftype,
10196 BUILT_IN_STACK_RESTORE,
10197 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10198
10199 /* If there's a possibility that we might use the ARM EABI, build the
10200 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10201 if (targetm.arm_eabi_unwinder)
10202 {
10203 ftype = build_function_type_list (void_type_node, NULL_TREE);
10204 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10205 BUILT_IN_CXA_END_CLEANUP,
10206 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10207 }
10208
10209 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10210 local_define_builtin ("__builtin_unwind_resume", ftype,
10211 BUILT_IN_UNWIND_RESUME,
10212 ((targetm_common.except_unwind_info (&global_options)
10213 == UI_SJLJ)
10214 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10215 ECF_NORETURN);
10216
10217 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10218 {
10219 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10220 NULL_TREE);
10221 local_define_builtin ("__builtin_return_address", ftype,
10222 BUILT_IN_RETURN_ADDRESS,
10223 "__builtin_return_address",
10224 ECF_NOTHROW);
10225 }
10226
10227 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10228 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10229 {
10230 ftype = build_function_type_list (void_type_node, ptr_type_node,
10231 ptr_type_node, NULL_TREE);
10232 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10233 local_define_builtin ("__cyg_profile_func_enter", ftype,
10234 BUILT_IN_PROFILE_FUNC_ENTER,
10235 "__cyg_profile_func_enter", 0);
10236 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10237 local_define_builtin ("__cyg_profile_func_exit", ftype,
10238 BUILT_IN_PROFILE_FUNC_EXIT,
10239 "__cyg_profile_func_exit", 0);
10240 }
10241
10242 /* The exception object and filter values from the runtime. The argument
10243 must be zero before exception lowering, i.e. from the front end. After
10244 exception lowering, it will be the region number for the exception
10245 landing pad. These functions are PURE instead of CONST to prevent
10246 them from being hoisted past the exception edge that will initialize
10247 its value in the landing pad. */
10248 ftype = build_function_type_list (ptr_type_node,
10249 integer_type_node, NULL_TREE);
10250 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10251 /* Only use TM_PURE if we we have TM language support. */
10252 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10253 ecf_flags |= ECF_TM_PURE;
10254 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10255 "__builtin_eh_pointer", ecf_flags);
10256
10257 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10258 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10259 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10260 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10261
10262 ftype = build_function_type_list (void_type_node,
10263 integer_type_node, integer_type_node,
10264 NULL_TREE);
10265 local_define_builtin ("__builtin_eh_copy_values", ftype,
10266 BUILT_IN_EH_COPY_VALUES,
10267 "__builtin_eh_copy_values", ECF_NOTHROW);
10268
10269 /* Complex multiplication and division. These are handled as builtins
10270 rather than optabs because emit_library_call_value doesn't support
10271 complex. Further, we can do slightly better with folding these
10272 beasties if the real and complex parts of the arguments are separate. */
10273 {
10274 int mode;
10275
10276 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10277 {
10278 char mode_name_buf[4], *q;
10279 const char *p;
10280 enum built_in_function mcode, dcode;
10281 tree type, inner_type;
10282 const char *prefix = "__";
10283
10284 if (targetm.libfunc_gnu_prefix)
10285 prefix = "__gnu_";
10286
10287 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10288 if (type == NULL)
10289 continue;
10290 inner_type = TREE_TYPE (type);
10291
10292 ftype = build_function_type_list (type, inner_type, inner_type,
10293 inner_type, inner_type, NULL_TREE);
10294
10295 mcode = ((enum built_in_function)
10296 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10297 dcode = ((enum built_in_function)
10298 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10299
10300 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10301 *q = TOLOWER (*p);
10302 *q = '\0';
10303
10304 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10305 NULL);
10306 local_define_builtin (built_in_names[mcode], ftype, mcode,
10307 built_in_names[mcode],
10308 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10309
10310 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10311 NULL);
10312 local_define_builtin (built_in_names[dcode], ftype, dcode,
10313 built_in_names[dcode],
10314 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10315 }
10316 }
10317
10318 init_internal_fns ();
10319 }
10320
10321 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10322 better way.
10323
10324 If we requested a pointer to a vector, build up the pointers that
10325 we stripped off while looking for the inner type. Similarly for
10326 return values from functions.
10327
10328 The argument TYPE is the top of the chain, and BOTTOM is the
10329 new type which we will point to. */
10330
10331 tree
10332 reconstruct_complex_type (tree type, tree bottom)
10333 {
10334 tree inner, outer;
10335
10336 if (TREE_CODE (type) == POINTER_TYPE)
10337 {
10338 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10339 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10340 TYPE_REF_CAN_ALIAS_ALL (type));
10341 }
10342 else if (TREE_CODE (type) == REFERENCE_TYPE)
10343 {
10344 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10345 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10346 TYPE_REF_CAN_ALIAS_ALL (type));
10347 }
10348 else if (TREE_CODE (type) == ARRAY_TYPE)
10349 {
10350 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10351 outer = build_array_type (inner, TYPE_DOMAIN (type));
10352 }
10353 else if (TREE_CODE (type) == FUNCTION_TYPE)
10354 {
10355 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10356 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10357 }
10358 else if (TREE_CODE (type) == METHOD_TYPE)
10359 {
10360 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10361 /* The build_method_type_directly() routine prepends 'this' to argument list,
10362 so we must compensate by getting rid of it. */
10363 outer
10364 = build_method_type_directly
10365 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10366 inner,
10367 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10368 }
10369 else if (TREE_CODE (type) == OFFSET_TYPE)
10370 {
10371 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10372 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10373 }
10374 else
10375 return bottom;
10376
10377 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10378 TYPE_QUALS (type));
10379 }
10380
10381 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10382 the inner type. */
10383 tree
10384 build_vector_type_for_mode (tree innertype, machine_mode mode)
10385 {
10386 int nunits;
10387
10388 switch (GET_MODE_CLASS (mode))
10389 {
10390 case MODE_VECTOR_INT:
10391 case MODE_VECTOR_FLOAT:
10392 case MODE_VECTOR_FRACT:
10393 case MODE_VECTOR_UFRACT:
10394 case MODE_VECTOR_ACCUM:
10395 case MODE_VECTOR_UACCUM:
10396 nunits = GET_MODE_NUNITS (mode);
10397 break;
10398
10399 case MODE_INT:
10400 /* Check that there are no leftover bits. */
10401 gcc_assert (GET_MODE_BITSIZE (mode)
10402 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10403
10404 nunits = GET_MODE_BITSIZE (mode)
10405 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10406 break;
10407
10408 default:
10409 gcc_unreachable ();
10410 }
10411
10412 return make_vector_type (innertype, nunits, mode);
10413 }
10414
10415 /* Similarly, but takes the inner type and number of units, which must be
10416 a power of two. */
10417
10418 tree
10419 build_vector_type (tree innertype, int nunits)
10420 {
10421 return make_vector_type (innertype, nunits, VOIDmode);
10422 }
10423
10424 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10425
10426 tree
10427 build_opaque_vector_type (tree innertype, int nunits)
10428 {
10429 tree t = make_vector_type (innertype, nunits, VOIDmode);
10430 tree cand;
10431 /* We always build the non-opaque variant before the opaque one,
10432 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10433 cand = TYPE_NEXT_VARIANT (t);
10434 if (cand
10435 && TYPE_VECTOR_OPAQUE (cand)
10436 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10437 return cand;
10438 /* Othewise build a variant type and make sure to queue it after
10439 the non-opaque type. */
10440 cand = build_distinct_type_copy (t);
10441 TYPE_VECTOR_OPAQUE (cand) = true;
10442 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10443 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10444 TYPE_NEXT_VARIANT (t) = cand;
10445 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10446 return cand;
10447 }
10448
10449
10450 /* Given an initializer INIT, return TRUE if INIT is zero or some
10451 aggregate of zeros. Otherwise return FALSE. */
10452 bool
10453 initializer_zerop (const_tree init)
10454 {
10455 tree elt;
10456
10457 STRIP_NOPS (init);
10458
10459 switch (TREE_CODE (init))
10460 {
10461 case INTEGER_CST:
10462 return integer_zerop (init);
10463
10464 case REAL_CST:
10465 /* ??? Note that this is not correct for C4X float formats. There,
10466 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10467 negative exponent. */
10468 return real_zerop (init)
10469 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10470
10471 case FIXED_CST:
10472 return fixed_zerop (init);
10473
10474 case COMPLEX_CST:
10475 return integer_zerop (init)
10476 || (real_zerop (init)
10477 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10478 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10479
10480 case VECTOR_CST:
10481 {
10482 unsigned i;
10483 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10484 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10485 return false;
10486 return true;
10487 }
10488
10489 case CONSTRUCTOR:
10490 {
10491 unsigned HOST_WIDE_INT idx;
10492
10493 if (TREE_CLOBBER_P (init))
10494 return false;
10495 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10496 if (!initializer_zerop (elt))
10497 return false;
10498 return true;
10499 }
10500
10501 case STRING_CST:
10502 {
10503 int i;
10504
10505 /* We need to loop through all elements to handle cases like
10506 "\0" and "\0foobar". */
10507 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10508 if (TREE_STRING_POINTER (init)[i] != '\0')
10509 return false;
10510
10511 return true;
10512 }
10513
10514 default:
10515 return false;
10516 }
10517 }
10518
10519 /* Check if vector VEC consists of all the equal elements and
10520 that the number of elements corresponds to the type of VEC.
10521 The function returns first element of the vector
10522 or NULL_TREE if the vector is not uniform. */
10523 tree
10524 uniform_vector_p (const_tree vec)
10525 {
10526 tree first, t;
10527 unsigned i;
10528
10529 if (vec == NULL_TREE)
10530 return NULL_TREE;
10531
10532 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10533
10534 if (TREE_CODE (vec) == VECTOR_CST)
10535 {
10536 first = VECTOR_CST_ELT (vec, 0);
10537 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10538 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10539 return NULL_TREE;
10540
10541 return first;
10542 }
10543
10544 else if (TREE_CODE (vec) == CONSTRUCTOR)
10545 {
10546 first = error_mark_node;
10547
10548 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10549 {
10550 if (i == 0)
10551 {
10552 first = t;
10553 continue;
10554 }
10555 if (!operand_equal_p (first, t, 0))
10556 return NULL_TREE;
10557 }
10558 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10559 return NULL_TREE;
10560
10561 return first;
10562 }
10563
10564 return NULL_TREE;
10565 }
10566
10567 /* Build an empty statement at location LOC. */
10568
10569 tree
10570 build_empty_stmt (location_t loc)
10571 {
10572 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10573 SET_EXPR_LOCATION (t, loc);
10574 return t;
10575 }
10576
10577
10578 /* Build an OpenMP clause with code CODE. LOC is the location of the
10579 clause. */
10580
10581 tree
10582 build_omp_clause (location_t loc, enum omp_clause_code code)
10583 {
10584 tree t;
10585 int size, length;
10586
10587 length = omp_clause_num_ops[code];
10588 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10589
10590 record_node_allocation_statistics (OMP_CLAUSE, size);
10591
10592 t = (tree) ggc_internal_alloc (size);
10593 memset (t, 0, size);
10594 TREE_SET_CODE (t, OMP_CLAUSE);
10595 OMP_CLAUSE_SET_CODE (t, code);
10596 OMP_CLAUSE_LOCATION (t) = loc;
10597
10598 return t;
10599 }
10600
10601 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10602 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10603 Except for the CODE and operand count field, other storage for the
10604 object is initialized to zeros. */
10605
10606 tree
10607 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10608 {
10609 tree t;
10610 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10611
10612 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10613 gcc_assert (len >= 1);
10614
10615 record_node_allocation_statistics (code, length);
10616
10617 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10618
10619 TREE_SET_CODE (t, code);
10620
10621 /* Can't use TREE_OPERAND to store the length because if checking is
10622 enabled, it will try to check the length before we store it. :-P */
10623 t->exp.operands[0] = build_int_cst (sizetype, len);
10624
10625 return t;
10626 }
10627
10628 /* Helper function for build_call_* functions; build a CALL_EXPR with
10629 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10630 the argument slots. */
10631
10632 static tree
10633 build_call_1 (tree return_type, tree fn, int nargs)
10634 {
10635 tree t;
10636
10637 t = build_vl_exp (CALL_EXPR, nargs + 3);
10638 TREE_TYPE (t) = return_type;
10639 CALL_EXPR_FN (t) = fn;
10640 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10641
10642 return t;
10643 }
10644
10645 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10646 FN and a null static chain slot. NARGS is the number of call arguments
10647 which are specified as "..." arguments. */
10648
10649 tree
10650 build_call_nary (tree return_type, tree fn, int nargs, ...)
10651 {
10652 tree ret;
10653 va_list args;
10654 va_start (args, nargs);
10655 ret = build_call_valist (return_type, fn, nargs, args);
10656 va_end (args);
10657 return ret;
10658 }
10659
10660 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10661 FN and a null static chain slot. NARGS is the number of call arguments
10662 which are specified as a va_list ARGS. */
10663
10664 tree
10665 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10666 {
10667 tree t;
10668 int i;
10669
10670 t = build_call_1 (return_type, fn, nargs);
10671 for (i = 0; i < nargs; i++)
10672 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10673 process_call_operands (t);
10674 return t;
10675 }
10676
10677 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10678 FN and a null static chain slot. NARGS is the number of call arguments
10679 which are specified as a tree array ARGS. */
10680
10681 tree
10682 build_call_array_loc (location_t loc, tree return_type, tree fn,
10683 int nargs, const tree *args)
10684 {
10685 tree t;
10686 int i;
10687
10688 t = build_call_1 (return_type, fn, nargs);
10689 for (i = 0; i < nargs; i++)
10690 CALL_EXPR_ARG (t, i) = args[i];
10691 process_call_operands (t);
10692 SET_EXPR_LOCATION (t, loc);
10693 return t;
10694 }
10695
10696 /* Like build_call_array, but takes a vec. */
10697
10698 tree
10699 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10700 {
10701 tree ret, t;
10702 unsigned int ix;
10703
10704 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10705 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10706 CALL_EXPR_ARG (ret, ix) = t;
10707 process_call_operands (ret);
10708 return ret;
10709 }
10710
10711 /* Conveniently construct a function call expression. FNDECL names the
10712 function to be called and N arguments are passed in the array
10713 ARGARRAY. */
10714
10715 tree
10716 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10717 {
10718 tree fntype = TREE_TYPE (fndecl);
10719 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10720
10721 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10722 }
10723
10724 /* Conveniently construct a function call expression. FNDECL names the
10725 function to be called and the arguments are passed in the vector
10726 VEC. */
10727
10728 tree
10729 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10730 {
10731 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10732 vec_safe_address (vec));
10733 }
10734
10735
10736 /* Conveniently construct a function call expression. FNDECL names the
10737 function to be called, N is the number of arguments, and the "..."
10738 parameters are the argument expressions. */
10739
10740 tree
10741 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10742 {
10743 va_list ap;
10744 tree *argarray = XALLOCAVEC (tree, n);
10745 int i;
10746
10747 va_start (ap, n);
10748 for (i = 0; i < n; i++)
10749 argarray[i] = va_arg (ap, tree);
10750 va_end (ap);
10751 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10752 }
10753
10754 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10755 varargs macros aren't supported by all bootstrap compilers. */
10756
10757 tree
10758 build_call_expr (tree fndecl, int n, ...)
10759 {
10760 va_list ap;
10761 tree *argarray = XALLOCAVEC (tree, n);
10762 int i;
10763
10764 va_start (ap, n);
10765 for (i = 0; i < n; i++)
10766 argarray[i] = va_arg (ap, tree);
10767 va_end (ap);
10768 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10769 }
10770
10771 /* Build internal call expression. This is just like CALL_EXPR, except
10772 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10773 internal function. */
10774
10775 tree
10776 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10777 tree type, int n, ...)
10778 {
10779 va_list ap;
10780 int i;
10781
10782 tree fn = build_call_1 (type, NULL_TREE, n);
10783 va_start (ap, n);
10784 for (i = 0; i < n; i++)
10785 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10786 va_end (ap);
10787 SET_EXPR_LOCATION (fn, loc);
10788 CALL_EXPR_IFN (fn) = ifn;
10789 return fn;
10790 }
10791
10792 /* Create a new constant string literal and return a char* pointer to it.
10793 The STRING_CST value is the LEN characters at STR. */
10794 tree
10795 build_string_literal (int len, const char *str)
10796 {
10797 tree t, elem, index, type;
10798
10799 t = build_string (len, str);
10800 elem = build_type_variant (char_type_node, 1, 0);
10801 index = build_index_type (size_int (len - 1));
10802 type = build_array_type (elem, index);
10803 TREE_TYPE (t) = type;
10804 TREE_CONSTANT (t) = 1;
10805 TREE_READONLY (t) = 1;
10806 TREE_STATIC (t) = 1;
10807
10808 type = build_pointer_type (elem);
10809 t = build1 (ADDR_EXPR, type,
10810 build4 (ARRAY_REF, elem,
10811 t, integer_zero_node, NULL_TREE, NULL_TREE));
10812 return t;
10813 }
10814
10815
10816
10817 /* Return true if T (assumed to be a DECL) must be assigned a memory
10818 location. */
10819
10820 bool
10821 needs_to_live_in_memory (const_tree t)
10822 {
10823 return (TREE_ADDRESSABLE (t)
10824 || is_global_var (t)
10825 || (TREE_CODE (t) == RESULT_DECL
10826 && !DECL_BY_REFERENCE (t)
10827 && aggregate_value_p (t, current_function_decl)));
10828 }
10829
10830 /* Return value of a constant X and sign-extend it. */
10831
10832 HOST_WIDE_INT
10833 int_cst_value (const_tree x)
10834 {
10835 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10836 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10837
10838 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10839 gcc_assert (cst_and_fits_in_hwi (x));
10840
10841 if (bits < HOST_BITS_PER_WIDE_INT)
10842 {
10843 bool negative = ((val >> (bits - 1)) & 1) != 0;
10844 if (negative)
10845 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10846 else
10847 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10848 }
10849
10850 return val;
10851 }
10852
10853 /* If TYPE is an integral or pointer type, return an integer type with
10854 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10855 if TYPE is already an integer type of signedness UNSIGNEDP. */
10856
10857 tree
10858 signed_or_unsigned_type_for (int unsignedp, tree type)
10859 {
10860 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10861 return type;
10862
10863 if (TREE_CODE (type) == VECTOR_TYPE)
10864 {
10865 tree inner = TREE_TYPE (type);
10866 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10867 if (!inner2)
10868 return NULL_TREE;
10869 if (inner == inner2)
10870 return type;
10871 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10872 }
10873
10874 if (!INTEGRAL_TYPE_P (type)
10875 && !POINTER_TYPE_P (type)
10876 && TREE_CODE (type) != OFFSET_TYPE)
10877 return NULL_TREE;
10878
10879 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10880 }
10881
10882 /* If TYPE is an integral or pointer type, return an integer type with
10883 the same precision which is unsigned, or itself if TYPE is already an
10884 unsigned integer type. */
10885
10886 tree
10887 unsigned_type_for (tree type)
10888 {
10889 return signed_or_unsigned_type_for (1, type);
10890 }
10891
10892 /* If TYPE is an integral or pointer type, return an integer type with
10893 the same precision which is signed, or itself if TYPE is already a
10894 signed integer type. */
10895
10896 tree
10897 signed_type_for (tree type)
10898 {
10899 return signed_or_unsigned_type_for (0, type);
10900 }
10901
10902 /* If TYPE is a vector type, return a signed integer vector type with the
10903 same width and number of subparts. Otherwise return boolean_type_node. */
10904
10905 tree
10906 truth_type_for (tree type)
10907 {
10908 if (TREE_CODE (type) == VECTOR_TYPE)
10909 {
10910 tree elem = lang_hooks.types.type_for_size
10911 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10912 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10913 }
10914 else
10915 return boolean_type_node;
10916 }
10917
10918 /* Returns the largest value obtainable by casting something in INNER type to
10919 OUTER type. */
10920
10921 tree
10922 upper_bound_in_type (tree outer, tree inner)
10923 {
10924 unsigned int det = 0;
10925 unsigned oprec = TYPE_PRECISION (outer);
10926 unsigned iprec = TYPE_PRECISION (inner);
10927 unsigned prec;
10928
10929 /* Compute a unique number for every combination. */
10930 det |= (oprec > iprec) ? 4 : 0;
10931 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10932 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10933
10934 /* Determine the exponent to use. */
10935 switch (det)
10936 {
10937 case 0:
10938 case 1:
10939 /* oprec <= iprec, outer: signed, inner: don't care. */
10940 prec = oprec - 1;
10941 break;
10942 case 2:
10943 case 3:
10944 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10945 prec = oprec;
10946 break;
10947 case 4:
10948 /* oprec > iprec, outer: signed, inner: signed. */
10949 prec = iprec - 1;
10950 break;
10951 case 5:
10952 /* oprec > iprec, outer: signed, inner: unsigned. */
10953 prec = iprec;
10954 break;
10955 case 6:
10956 /* oprec > iprec, outer: unsigned, inner: signed. */
10957 prec = oprec;
10958 break;
10959 case 7:
10960 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10961 prec = iprec;
10962 break;
10963 default:
10964 gcc_unreachable ();
10965 }
10966
10967 return wide_int_to_tree (outer,
10968 wi::mask (prec, false, TYPE_PRECISION (outer)));
10969 }
10970
10971 /* Returns the smallest value obtainable by casting something in INNER type to
10972 OUTER type. */
10973
10974 tree
10975 lower_bound_in_type (tree outer, tree inner)
10976 {
10977 unsigned oprec = TYPE_PRECISION (outer);
10978 unsigned iprec = TYPE_PRECISION (inner);
10979
10980 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10981 and obtain 0. */
10982 if (TYPE_UNSIGNED (outer)
10983 /* If we are widening something of an unsigned type, OUTER type
10984 contains all values of INNER type. In particular, both INNER
10985 and OUTER types have zero in common. */
10986 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10987 return build_int_cst (outer, 0);
10988 else
10989 {
10990 /* If we are widening a signed type to another signed type, we
10991 want to obtain -2^^(iprec-1). If we are keeping the
10992 precision or narrowing to a signed type, we want to obtain
10993 -2^(oprec-1). */
10994 unsigned prec = oprec > iprec ? iprec : oprec;
10995 return wide_int_to_tree (outer,
10996 wi::mask (prec - 1, true,
10997 TYPE_PRECISION (outer)));
10998 }
10999 }
11000
11001 /* Return nonzero if two operands that are suitable for PHI nodes are
11002 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11003 SSA_NAME or invariant. Note that this is strictly an optimization.
11004 That is, callers of this function can directly call operand_equal_p
11005 and get the same result, only slower. */
11006
11007 int
11008 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11009 {
11010 if (arg0 == arg1)
11011 return 1;
11012 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11013 return 0;
11014 return operand_equal_p (arg0, arg1, 0);
11015 }
11016
11017 /* Returns number of zeros at the end of binary representation of X. */
11018
11019 tree
11020 num_ending_zeros (const_tree x)
11021 {
11022 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11023 }
11024
11025
11026 #define WALK_SUBTREE(NODE) \
11027 do \
11028 { \
11029 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11030 if (result) \
11031 return result; \
11032 } \
11033 while (0)
11034
11035 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11036 be walked whenever a type is seen in the tree. Rest of operands and return
11037 value are as for walk_tree. */
11038
11039 static tree
11040 walk_type_fields (tree type, walk_tree_fn func, void *data,
11041 hash_set<tree> *pset, walk_tree_lh lh)
11042 {
11043 tree result = NULL_TREE;
11044
11045 switch (TREE_CODE (type))
11046 {
11047 case POINTER_TYPE:
11048 case REFERENCE_TYPE:
11049 case VECTOR_TYPE:
11050 /* We have to worry about mutually recursive pointers. These can't
11051 be written in C. They can in Ada. It's pathological, but
11052 there's an ACATS test (c38102a) that checks it. Deal with this
11053 by checking if we're pointing to another pointer, that one
11054 points to another pointer, that one does too, and we have no htab.
11055 If so, get a hash table. We check three levels deep to avoid
11056 the cost of the hash table if we don't need one. */
11057 if (POINTER_TYPE_P (TREE_TYPE (type))
11058 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11059 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11060 && !pset)
11061 {
11062 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11063 func, data);
11064 if (result)
11065 return result;
11066
11067 break;
11068 }
11069
11070 /* ... fall through ... */
11071
11072 case COMPLEX_TYPE:
11073 WALK_SUBTREE (TREE_TYPE (type));
11074 break;
11075
11076 case METHOD_TYPE:
11077 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11078
11079 /* Fall through. */
11080
11081 case FUNCTION_TYPE:
11082 WALK_SUBTREE (TREE_TYPE (type));
11083 {
11084 tree arg;
11085
11086 /* We never want to walk into default arguments. */
11087 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11088 WALK_SUBTREE (TREE_VALUE (arg));
11089 }
11090 break;
11091
11092 case ARRAY_TYPE:
11093 /* Don't follow this nodes's type if a pointer for fear that
11094 we'll have infinite recursion. If we have a PSET, then we
11095 need not fear. */
11096 if (pset
11097 || (!POINTER_TYPE_P (TREE_TYPE (type))
11098 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11099 WALK_SUBTREE (TREE_TYPE (type));
11100 WALK_SUBTREE (TYPE_DOMAIN (type));
11101 break;
11102
11103 case OFFSET_TYPE:
11104 WALK_SUBTREE (TREE_TYPE (type));
11105 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11106 break;
11107
11108 default:
11109 break;
11110 }
11111
11112 return NULL_TREE;
11113 }
11114
11115 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11116 called with the DATA and the address of each sub-tree. If FUNC returns a
11117 non-NULL value, the traversal is stopped, and the value returned by FUNC
11118 is returned. If PSET is non-NULL it is used to record the nodes visited,
11119 and to avoid visiting a node more than once. */
11120
11121 tree
11122 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11123 hash_set<tree> *pset, walk_tree_lh lh)
11124 {
11125 enum tree_code code;
11126 int walk_subtrees;
11127 tree result;
11128
11129 #define WALK_SUBTREE_TAIL(NODE) \
11130 do \
11131 { \
11132 tp = & (NODE); \
11133 goto tail_recurse; \
11134 } \
11135 while (0)
11136
11137 tail_recurse:
11138 /* Skip empty subtrees. */
11139 if (!*tp)
11140 return NULL_TREE;
11141
11142 /* Don't walk the same tree twice, if the user has requested
11143 that we avoid doing so. */
11144 if (pset && pset->add (*tp))
11145 return NULL_TREE;
11146
11147 /* Call the function. */
11148 walk_subtrees = 1;
11149 result = (*func) (tp, &walk_subtrees, data);
11150
11151 /* If we found something, return it. */
11152 if (result)
11153 return result;
11154
11155 code = TREE_CODE (*tp);
11156
11157 /* Even if we didn't, FUNC may have decided that there was nothing
11158 interesting below this point in the tree. */
11159 if (!walk_subtrees)
11160 {
11161 /* But we still need to check our siblings. */
11162 if (code == TREE_LIST)
11163 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11164 else if (code == OMP_CLAUSE)
11165 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11166 else
11167 return NULL_TREE;
11168 }
11169
11170 if (lh)
11171 {
11172 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11173 if (result || !walk_subtrees)
11174 return result;
11175 }
11176
11177 switch (code)
11178 {
11179 case ERROR_MARK:
11180 case IDENTIFIER_NODE:
11181 case INTEGER_CST:
11182 case REAL_CST:
11183 case FIXED_CST:
11184 case VECTOR_CST:
11185 case STRING_CST:
11186 case BLOCK:
11187 case PLACEHOLDER_EXPR:
11188 case SSA_NAME:
11189 case FIELD_DECL:
11190 case RESULT_DECL:
11191 /* None of these have subtrees other than those already walked
11192 above. */
11193 break;
11194
11195 case TREE_LIST:
11196 WALK_SUBTREE (TREE_VALUE (*tp));
11197 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11198 break;
11199
11200 case TREE_VEC:
11201 {
11202 int len = TREE_VEC_LENGTH (*tp);
11203
11204 if (len == 0)
11205 break;
11206
11207 /* Walk all elements but the first. */
11208 while (--len)
11209 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11210
11211 /* Now walk the first one as a tail call. */
11212 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11213 }
11214
11215 case COMPLEX_CST:
11216 WALK_SUBTREE (TREE_REALPART (*tp));
11217 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11218
11219 case CONSTRUCTOR:
11220 {
11221 unsigned HOST_WIDE_INT idx;
11222 constructor_elt *ce;
11223
11224 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11225 idx++)
11226 WALK_SUBTREE (ce->value);
11227 }
11228 break;
11229
11230 case SAVE_EXPR:
11231 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11232
11233 case BIND_EXPR:
11234 {
11235 tree decl;
11236 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11237 {
11238 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11239 into declarations that are just mentioned, rather than
11240 declared; they don't really belong to this part of the tree.
11241 And, we can see cycles: the initializer for a declaration
11242 can refer to the declaration itself. */
11243 WALK_SUBTREE (DECL_INITIAL (decl));
11244 WALK_SUBTREE (DECL_SIZE (decl));
11245 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11246 }
11247 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11248 }
11249
11250 case STATEMENT_LIST:
11251 {
11252 tree_stmt_iterator i;
11253 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11254 WALK_SUBTREE (*tsi_stmt_ptr (i));
11255 }
11256 break;
11257
11258 case OMP_CLAUSE:
11259 switch (OMP_CLAUSE_CODE (*tp))
11260 {
11261 case OMP_CLAUSE_GANG:
11262 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11263 /* FALLTHRU */
11264
11265 case OMP_CLAUSE_DEVICE_RESIDENT:
11266 case OMP_CLAUSE_USE_DEVICE:
11267 case OMP_CLAUSE_ASYNC:
11268 case OMP_CLAUSE_WAIT:
11269 case OMP_CLAUSE_WORKER:
11270 case OMP_CLAUSE_VECTOR:
11271 case OMP_CLAUSE_NUM_GANGS:
11272 case OMP_CLAUSE_NUM_WORKERS:
11273 case OMP_CLAUSE_VECTOR_LENGTH:
11274 case OMP_CLAUSE_PRIVATE:
11275 case OMP_CLAUSE_SHARED:
11276 case OMP_CLAUSE_FIRSTPRIVATE:
11277 case OMP_CLAUSE_COPYIN:
11278 case OMP_CLAUSE_COPYPRIVATE:
11279 case OMP_CLAUSE_FINAL:
11280 case OMP_CLAUSE_IF:
11281 case OMP_CLAUSE_NUM_THREADS:
11282 case OMP_CLAUSE_SCHEDULE:
11283 case OMP_CLAUSE_UNIFORM:
11284 case OMP_CLAUSE_DEPEND:
11285 case OMP_CLAUSE_NUM_TEAMS:
11286 case OMP_CLAUSE_THREAD_LIMIT:
11287 case OMP_CLAUSE_DEVICE:
11288 case OMP_CLAUSE_DIST_SCHEDULE:
11289 case OMP_CLAUSE_SAFELEN:
11290 case OMP_CLAUSE_SIMDLEN:
11291 case OMP_CLAUSE__LOOPTEMP_:
11292 case OMP_CLAUSE__SIMDUID_:
11293 case OMP_CLAUSE__CILK_FOR_COUNT_:
11294 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11295 /* FALLTHRU */
11296
11297 case OMP_CLAUSE_INDEPENDENT:
11298 case OMP_CLAUSE_NOWAIT:
11299 case OMP_CLAUSE_ORDERED:
11300 case OMP_CLAUSE_DEFAULT:
11301 case OMP_CLAUSE_UNTIED:
11302 case OMP_CLAUSE_MERGEABLE:
11303 case OMP_CLAUSE_PROC_BIND:
11304 case OMP_CLAUSE_INBRANCH:
11305 case OMP_CLAUSE_NOTINBRANCH:
11306 case OMP_CLAUSE_FOR:
11307 case OMP_CLAUSE_PARALLEL:
11308 case OMP_CLAUSE_SECTIONS:
11309 case OMP_CLAUSE_TASKGROUP:
11310 case OMP_CLAUSE_AUTO:
11311 case OMP_CLAUSE_SEQ:
11312 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11313
11314 case OMP_CLAUSE_LASTPRIVATE:
11315 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11316 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11317 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11318
11319 case OMP_CLAUSE_COLLAPSE:
11320 {
11321 int i;
11322 for (i = 0; i < 3; i++)
11323 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11324 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11325 }
11326
11327 case OMP_CLAUSE_LINEAR:
11328 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11329 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11330 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11331 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11332
11333 case OMP_CLAUSE_ALIGNED:
11334 case OMP_CLAUSE_FROM:
11335 case OMP_CLAUSE_TO:
11336 case OMP_CLAUSE_MAP:
11337 case OMP_CLAUSE__CACHE_:
11338 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11339 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11340 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11341
11342 case OMP_CLAUSE_REDUCTION:
11343 {
11344 int i;
11345 for (i = 0; i < 4; i++)
11346 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11347 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11348 }
11349
11350 default:
11351 gcc_unreachable ();
11352 }
11353 break;
11354
11355 case TARGET_EXPR:
11356 {
11357 int i, len;
11358
11359 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11360 But, we only want to walk once. */
11361 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11362 for (i = 0; i < len; ++i)
11363 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11364 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11365 }
11366
11367 case DECL_EXPR:
11368 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11369 defining. We only want to walk into these fields of a type in this
11370 case and not in the general case of a mere reference to the type.
11371
11372 The criterion is as follows: if the field can be an expression, it
11373 must be walked only here. This should be in keeping with the fields
11374 that are directly gimplified in gimplify_type_sizes in order for the
11375 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11376 variable-sized types.
11377
11378 Note that DECLs get walked as part of processing the BIND_EXPR. */
11379 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11380 {
11381 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11382 if (TREE_CODE (*type_p) == ERROR_MARK)
11383 return NULL_TREE;
11384
11385 /* Call the function for the type. See if it returns anything or
11386 doesn't want us to continue. If we are to continue, walk both
11387 the normal fields and those for the declaration case. */
11388 result = (*func) (type_p, &walk_subtrees, data);
11389 if (result || !walk_subtrees)
11390 return result;
11391
11392 /* But do not walk a pointed-to type since it may itself need to
11393 be walked in the declaration case if it isn't anonymous. */
11394 if (!POINTER_TYPE_P (*type_p))
11395 {
11396 result = walk_type_fields (*type_p, func, data, pset, lh);
11397 if (result)
11398 return result;
11399 }
11400
11401 /* If this is a record type, also walk the fields. */
11402 if (RECORD_OR_UNION_TYPE_P (*type_p))
11403 {
11404 tree field;
11405
11406 for (field = TYPE_FIELDS (*type_p); field;
11407 field = DECL_CHAIN (field))
11408 {
11409 /* We'd like to look at the type of the field, but we can
11410 easily get infinite recursion. So assume it's pointed
11411 to elsewhere in the tree. Also, ignore things that
11412 aren't fields. */
11413 if (TREE_CODE (field) != FIELD_DECL)
11414 continue;
11415
11416 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11417 WALK_SUBTREE (DECL_SIZE (field));
11418 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11419 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11420 WALK_SUBTREE (DECL_QUALIFIER (field));
11421 }
11422 }
11423
11424 /* Same for scalar types. */
11425 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11426 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11427 || TREE_CODE (*type_p) == INTEGER_TYPE
11428 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11429 || TREE_CODE (*type_p) == REAL_TYPE)
11430 {
11431 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11432 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11433 }
11434
11435 WALK_SUBTREE (TYPE_SIZE (*type_p));
11436 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11437 }
11438 /* FALLTHRU */
11439
11440 default:
11441 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11442 {
11443 int i, len;
11444
11445 /* Walk over all the sub-trees of this operand. */
11446 len = TREE_OPERAND_LENGTH (*tp);
11447
11448 /* Go through the subtrees. We need to do this in forward order so
11449 that the scope of a FOR_EXPR is handled properly. */
11450 if (len)
11451 {
11452 for (i = 0; i < len - 1; ++i)
11453 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11454 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11455 }
11456 }
11457 /* If this is a type, walk the needed fields in the type. */
11458 else if (TYPE_P (*tp))
11459 return walk_type_fields (*tp, func, data, pset, lh);
11460 break;
11461 }
11462
11463 /* We didn't find what we were looking for. */
11464 return NULL_TREE;
11465
11466 #undef WALK_SUBTREE_TAIL
11467 }
11468 #undef WALK_SUBTREE
11469
11470 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11471
11472 tree
11473 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11474 walk_tree_lh lh)
11475 {
11476 tree result;
11477
11478 hash_set<tree> pset;
11479 result = walk_tree_1 (tp, func, data, &pset, lh);
11480 return result;
11481 }
11482
11483
11484 tree
11485 tree_block (tree t)
11486 {
11487 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11488
11489 if (IS_EXPR_CODE_CLASS (c))
11490 return LOCATION_BLOCK (t->exp.locus);
11491 gcc_unreachable ();
11492 return NULL;
11493 }
11494
11495 void
11496 tree_set_block (tree t, tree b)
11497 {
11498 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11499
11500 if (IS_EXPR_CODE_CLASS (c))
11501 {
11502 if (b)
11503 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11504 else
11505 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11506 }
11507 else
11508 gcc_unreachable ();
11509 }
11510
11511 /* Create a nameless artificial label and put it in the current
11512 function context. The label has a location of LOC. Returns the
11513 newly created label. */
11514
11515 tree
11516 create_artificial_label (location_t loc)
11517 {
11518 tree lab = build_decl (loc,
11519 LABEL_DECL, NULL_TREE, void_type_node);
11520
11521 DECL_ARTIFICIAL (lab) = 1;
11522 DECL_IGNORED_P (lab) = 1;
11523 DECL_CONTEXT (lab) = current_function_decl;
11524 return lab;
11525 }
11526
11527 /* Given a tree, try to return a useful variable name that we can use
11528 to prefix a temporary that is being assigned the value of the tree.
11529 I.E. given <temp> = &A, return A. */
11530
11531 const char *
11532 get_name (tree t)
11533 {
11534 tree stripped_decl;
11535
11536 stripped_decl = t;
11537 STRIP_NOPS (stripped_decl);
11538 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11539 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11540 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11541 {
11542 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11543 if (!name)
11544 return NULL;
11545 return IDENTIFIER_POINTER (name);
11546 }
11547 else
11548 {
11549 switch (TREE_CODE (stripped_decl))
11550 {
11551 case ADDR_EXPR:
11552 return get_name (TREE_OPERAND (stripped_decl, 0));
11553 default:
11554 return NULL;
11555 }
11556 }
11557 }
11558
11559 /* Return true if TYPE has a variable argument list. */
11560
11561 bool
11562 stdarg_p (const_tree fntype)
11563 {
11564 function_args_iterator args_iter;
11565 tree n = NULL_TREE, t;
11566
11567 if (!fntype)
11568 return false;
11569
11570 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11571 {
11572 n = t;
11573 }
11574
11575 return n != NULL_TREE && n != void_type_node;
11576 }
11577
11578 /* Return true if TYPE has a prototype. */
11579
11580 bool
11581 prototype_p (const_tree fntype)
11582 {
11583 tree t;
11584
11585 gcc_assert (fntype != NULL_TREE);
11586
11587 t = TYPE_ARG_TYPES (fntype);
11588 return (t != NULL_TREE);
11589 }
11590
11591 /* If BLOCK is inlined from an __attribute__((__artificial__))
11592 routine, return pointer to location from where it has been
11593 called. */
11594 location_t *
11595 block_nonartificial_location (tree block)
11596 {
11597 location_t *ret = NULL;
11598
11599 while (block && TREE_CODE (block) == BLOCK
11600 && BLOCK_ABSTRACT_ORIGIN (block))
11601 {
11602 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11603
11604 while (TREE_CODE (ao) == BLOCK
11605 && BLOCK_ABSTRACT_ORIGIN (ao)
11606 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11607 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11608
11609 if (TREE_CODE (ao) == FUNCTION_DECL)
11610 {
11611 /* If AO is an artificial inline, point RET to the
11612 call site locus at which it has been inlined and continue
11613 the loop, in case AO's caller is also an artificial
11614 inline. */
11615 if (DECL_DECLARED_INLINE_P (ao)
11616 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11617 ret = &BLOCK_SOURCE_LOCATION (block);
11618 else
11619 break;
11620 }
11621 else if (TREE_CODE (ao) != BLOCK)
11622 break;
11623
11624 block = BLOCK_SUPERCONTEXT (block);
11625 }
11626 return ret;
11627 }
11628
11629
11630 /* If EXP is inlined from an __attribute__((__artificial__))
11631 function, return the location of the original call expression. */
11632
11633 location_t
11634 tree_nonartificial_location (tree exp)
11635 {
11636 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11637
11638 if (loc)
11639 return *loc;
11640 else
11641 return EXPR_LOCATION (exp);
11642 }
11643
11644
11645 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11646 nodes. */
11647
11648 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11649
11650 hashval_t
11651 cl_option_hasher::hash (tree x)
11652 {
11653 const_tree const t = x;
11654 const char *p;
11655 size_t i;
11656 size_t len = 0;
11657 hashval_t hash = 0;
11658
11659 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11660 {
11661 p = (const char *)TREE_OPTIMIZATION (t);
11662 len = sizeof (struct cl_optimization);
11663 }
11664
11665 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11666 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11667
11668 else
11669 gcc_unreachable ();
11670
11671 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11672 something else. */
11673 for (i = 0; i < len; i++)
11674 if (p[i])
11675 hash = (hash << 4) ^ ((i << 2) | p[i]);
11676
11677 return hash;
11678 }
11679
11680 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11681 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11682 same. */
11683
11684 bool
11685 cl_option_hasher::equal (tree x, tree y)
11686 {
11687 const_tree const xt = x;
11688 const_tree const yt = y;
11689 const char *xp;
11690 const char *yp;
11691 size_t len;
11692
11693 if (TREE_CODE (xt) != TREE_CODE (yt))
11694 return 0;
11695
11696 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11697 {
11698 xp = (const char *)TREE_OPTIMIZATION (xt);
11699 yp = (const char *)TREE_OPTIMIZATION (yt);
11700 len = sizeof (struct cl_optimization);
11701 }
11702
11703 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11704 {
11705 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11706 TREE_TARGET_OPTION (yt));
11707 }
11708
11709 else
11710 gcc_unreachable ();
11711
11712 return (memcmp (xp, yp, len) == 0);
11713 }
11714
11715 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11716
11717 tree
11718 build_optimization_node (struct gcc_options *opts)
11719 {
11720 tree t;
11721
11722 /* Use the cache of optimization nodes. */
11723
11724 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11725 opts);
11726
11727 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11728 t = *slot;
11729 if (!t)
11730 {
11731 /* Insert this one into the hash table. */
11732 t = cl_optimization_node;
11733 *slot = t;
11734
11735 /* Make a new node for next time round. */
11736 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11737 }
11738
11739 return t;
11740 }
11741
11742 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11743
11744 tree
11745 build_target_option_node (struct gcc_options *opts)
11746 {
11747 tree t;
11748
11749 /* Use the cache of optimization nodes. */
11750
11751 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11752 opts);
11753
11754 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11755 t = *slot;
11756 if (!t)
11757 {
11758 /* Insert this one into the hash table. */
11759 t = cl_target_option_node;
11760 *slot = t;
11761
11762 /* Make a new node for next time round. */
11763 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11764 }
11765
11766 return t;
11767 }
11768
11769 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11770 so that they aren't saved during PCH writing. */
11771
11772 void
11773 prepare_target_option_nodes_for_pch (void)
11774 {
11775 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11776 for (; iter != cl_option_hash_table->end (); ++iter)
11777 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11778 TREE_TARGET_GLOBALS (*iter) = NULL;
11779 }
11780
11781 /* Determine the "ultimate origin" of a block. The block may be an inlined
11782 instance of an inlined instance of a block which is local to an inline
11783 function, so we have to trace all of the way back through the origin chain
11784 to find out what sort of node actually served as the original seed for the
11785 given block. */
11786
11787 tree
11788 block_ultimate_origin (const_tree block)
11789 {
11790 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11791
11792 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11793 we're trying to output the abstract instance of this function. */
11794 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11795 return NULL_TREE;
11796
11797 if (immediate_origin == NULL_TREE)
11798 return NULL_TREE;
11799 else
11800 {
11801 tree ret_val;
11802 tree lookahead = immediate_origin;
11803
11804 do
11805 {
11806 ret_val = lookahead;
11807 lookahead = (TREE_CODE (ret_val) == BLOCK
11808 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11809 }
11810 while (lookahead != NULL && lookahead != ret_val);
11811
11812 /* The block's abstract origin chain may not be the *ultimate* origin of
11813 the block. It could lead to a DECL that has an abstract origin set.
11814 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11815 will give us if it has one). Note that DECL's abstract origins are
11816 supposed to be the most distant ancestor (or so decl_ultimate_origin
11817 claims), so we don't need to loop following the DECL origins. */
11818 if (DECL_P (ret_val))
11819 return DECL_ORIGIN (ret_val);
11820
11821 return ret_val;
11822 }
11823 }
11824
11825 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11826 no instruction. */
11827
11828 bool
11829 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11830 {
11831 /* Use precision rather then machine mode when we can, which gives
11832 the correct answer even for submode (bit-field) types. */
11833 if ((INTEGRAL_TYPE_P (outer_type)
11834 || POINTER_TYPE_P (outer_type)
11835 || TREE_CODE (outer_type) == OFFSET_TYPE)
11836 && (INTEGRAL_TYPE_P (inner_type)
11837 || POINTER_TYPE_P (inner_type)
11838 || TREE_CODE (inner_type) == OFFSET_TYPE))
11839 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11840
11841 /* Otherwise fall back on comparing machine modes (e.g. for
11842 aggregate types, floats). */
11843 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11844 }
11845
11846 /* Return true iff conversion in EXP generates no instruction. Mark
11847 it inline so that we fully inline into the stripping functions even
11848 though we have two uses of this function. */
11849
11850 static inline bool
11851 tree_nop_conversion (const_tree exp)
11852 {
11853 tree outer_type, inner_type;
11854
11855 if (!CONVERT_EXPR_P (exp)
11856 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11857 return false;
11858 if (TREE_OPERAND (exp, 0) == error_mark_node)
11859 return false;
11860
11861 outer_type = TREE_TYPE (exp);
11862 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11863
11864 if (!inner_type)
11865 return false;
11866
11867 return tree_nop_conversion_p (outer_type, inner_type);
11868 }
11869
11870 /* Return true iff conversion in EXP generates no instruction. Don't
11871 consider conversions changing the signedness. */
11872
11873 static bool
11874 tree_sign_nop_conversion (const_tree exp)
11875 {
11876 tree outer_type, inner_type;
11877
11878 if (!tree_nop_conversion (exp))
11879 return false;
11880
11881 outer_type = TREE_TYPE (exp);
11882 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11883
11884 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11885 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11886 }
11887
11888 /* Strip conversions from EXP according to tree_nop_conversion and
11889 return the resulting expression. */
11890
11891 tree
11892 tree_strip_nop_conversions (tree exp)
11893 {
11894 while (tree_nop_conversion (exp))
11895 exp = TREE_OPERAND (exp, 0);
11896 return exp;
11897 }
11898
11899 /* Strip conversions from EXP according to tree_sign_nop_conversion
11900 and return the resulting expression. */
11901
11902 tree
11903 tree_strip_sign_nop_conversions (tree exp)
11904 {
11905 while (tree_sign_nop_conversion (exp))
11906 exp = TREE_OPERAND (exp, 0);
11907 return exp;
11908 }
11909
11910 /* Avoid any floating point extensions from EXP. */
11911 tree
11912 strip_float_extensions (tree exp)
11913 {
11914 tree sub, expt, subt;
11915
11916 /* For floating point constant look up the narrowest type that can hold
11917 it properly and handle it like (type)(narrowest_type)constant.
11918 This way we can optimize for instance a=a*2.0 where "a" is float
11919 but 2.0 is double constant. */
11920 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11921 {
11922 REAL_VALUE_TYPE orig;
11923 tree type = NULL;
11924
11925 orig = TREE_REAL_CST (exp);
11926 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11927 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11928 type = float_type_node;
11929 else if (TYPE_PRECISION (TREE_TYPE (exp))
11930 > TYPE_PRECISION (double_type_node)
11931 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11932 type = double_type_node;
11933 if (type)
11934 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11935 }
11936
11937 if (!CONVERT_EXPR_P (exp))
11938 return exp;
11939
11940 sub = TREE_OPERAND (exp, 0);
11941 subt = TREE_TYPE (sub);
11942 expt = TREE_TYPE (exp);
11943
11944 if (!FLOAT_TYPE_P (subt))
11945 return exp;
11946
11947 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11948 return exp;
11949
11950 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11951 return exp;
11952
11953 return strip_float_extensions (sub);
11954 }
11955
11956 /* Strip out all handled components that produce invariant
11957 offsets. */
11958
11959 const_tree
11960 strip_invariant_refs (const_tree op)
11961 {
11962 while (handled_component_p (op))
11963 {
11964 switch (TREE_CODE (op))
11965 {
11966 case ARRAY_REF:
11967 case ARRAY_RANGE_REF:
11968 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11969 || TREE_OPERAND (op, 2) != NULL_TREE
11970 || TREE_OPERAND (op, 3) != NULL_TREE)
11971 return NULL;
11972 break;
11973
11974 case COMPONENT_REF:
11975 if (TREE_OPERAND (op, 2) != NULL_TREE)
11976 return NULL;
11977 break;
11978
11979 default:;
11980 }
11981 op = TREE_OPERAND (op, 0);
11982 }
11983
11984 return op;
11985 }
11986
11987 static GTY(()) tree gcc_eh_personality_decl;
11988
11989 /* Return the GCC personality function decl. */
11990
11991 tree
11992 lhd_gcc_personality (void)
11993 {
11994 if (!gcc_eh_personality_decl)
11995 gcc_eh_personality_decl = build_personality_function ("gcc");
11996 return gcc_eh_personality_decl;
11997 }
11998
11999 /* TARGET is a call target of GIMPLE call statement
12000 (obtained by gimple_call_fn). Return true if it is
12001 OBJ_TYPE_REF representing an virtual call of C++ method.
12002 (As opposed to OBJ_TYPE_REF representing objc calls
12003 through a cast where middle-end devirtualization machinery
12004 can't apply.) */
12005
12006 bool
12007 virtual_method_call_p (const_tree target)
12008 {
12009 if (TREE_CODE (target) != OBJ_TYPE_REF)
12010 return false;
12011 tree t = TREE_TYPE (target);
12012 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12013 t = TREE_TYPE (t);
12014 if (TREE_CODE (t) == FUNCTION_TYPE)
12015 return false;
12016 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12017 /* If we do not have BINFO associated, it means that type was built
12018 without devirtualization enabled. Do not consider this a virtual
12019 call. */
12020 if (!TYPE_BINFO (obj_type_ref_class (target)))
12021 return false;
12022 return true;
12023 }
12024
12025 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12026
12027 tree
12028 obj_type_ref_class (const_tree ref)
12029 {
12030 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12031 ref = TREE_TYPE (ref);
12032 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12033 ref = TREE_TYPE (ref);
12034 /* We look for type THIS points to. ObjC also builds
12035 OBJ_TYPE_REF with non-method calls, Their first parameter
12036 ID however also corresponds to class type. */
12037 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12038 || TREE_CODE (ref) == FUNCTION_TYPE);
12039 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12040 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12041 return TREE_TYPE (ref);
12042 }
12043
12044 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12045
12046 static tree
12047 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12048 {
12049 unsigned int i;
12050 tree base_binfo, b;
12051
12052 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12053 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12054 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12055 return base_binfo;
12056 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12057 return b;
12058 return NULL;
12059 }
12060
12061 /* Try to find a base info of BINFO that would have its field decl at offset
12062 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12063 found, return, otherwise return NULL_TREE. */
12064
12065 tree
12066 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12067 {
12068 tree type = BINFO_TYPE (binfo);
12069
12070 while (true)
12071 {
12072 HOST_WIDE_INT pos, size;
12073 tree fld;
12074 int i;
12075
12076 if (types_same_for_odr (type, expected_type))
12077 return binfo;
12078 if (offset < 0)
12079 return NULL_TREE;
12080
12081 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12082 {
12083 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12084 continue;
12085
12086 pos = int_bit_position (fld);
12087 size = tree_to_uhwi (DECL_SIZE (fld));
12088 if (pos <= offset && (pos + size) > offset)
12089 break;
12090 }
12091 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12092 return NULL_TREE;
12093
12094 /* Offset 0 indicates the primary base, whose vtable contents are
12095 represented in the binfo for the derived class. */
12096 else if (offset != 0)
12097 {
12098 tree found_binfo = NULL, base_binfo;
12099 /* Offsets in BINFO are in bytes relative to the whole structure
12100 while POS is in bits relative to the containing field. */
12101 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12102 / BITS_PER_UNIT);
12103
12104 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12105 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12106 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12107 {
12108 found_binfo = base_binfo;
12109 break;
12110 }
12111 if (found_binfo)
12112 binfo = found_binfo;
12113 else
12114 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12115 binfo_offset);
12116 }
12117
12118 type = TREE_TYPE (fld);
12119 offset -= pos;
12120 }
12121 }
12122
12123 /* Returns true if X is a typedef decl. */
12124
12125 bool
12126 is_typedef_decl (const_tree x)
12127 {
12128 return (x && TREE_CODE (x) == TYPE_DECL
12129 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12130 }
12131
12132 /* Returns true iff TYPE is a type variant created for a typedef. */
12133
12134 bool
12135 typedef_variant_p (const_tree type)
12136 {
12137 return is_typedef_decl (TYPE_NAME (type));
12138 }
12139
12140 /* Warn about a use of an identifier which was marked deprecated. */
12141 void
12142 warn_deprecated_use (tree node, tree attr)
12143 {
12144 const char *msg;
12145
12146 if (node == 0 || !warn_deprecated_decl)
12147 return;
12148
12149 if (!attr)
12150 {
12151 if (DECL_P (node))
12152 attr = DECL_ATTRIBUTES (node);
12153 else if (TYPE_P (node))
12154 {
12155 tree decl = TYPE_STUB_DECL (node);
12156 if (decl)
12157 attr = lookup_attribute ("deprecated",
12158 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12159 }
12160 }
12161
12162 if (attr)
12163 attr = lookup_attribute ("deprecated", attr);
12164
12165 if (attr)
12166 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12167 else
12168 msg = NULL;
12169
12170 bool w;
12171 if (DECL_P (node))
12172 {
12173 if (msg)
12174 w = warning (OPT_Wdeprecated_declarations,
12175 "%qD is deprecated: %s", node, msg);
12176 else
12177 w = warning (OPT_Wdeprecated_declarations,
12178 "%qD is deprecated", node);
12179 if (w)
12180 inform (DECL_SOURCE_LOCATION (node), "declared here");
12181 }
12182 else if (TYPE_P (node))
12183 {
12184 tree what = NULL_TREE;
12185 tree decl = TYPE_STUB_DECL (node);
12186
12187 if (TYPE_NAME (node))
12188 {
12189 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12190 what = TYPE_NAME (node);
12191 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12192 && DECL_NAME (TYPE_NAME (node)))
12193 what = DECL_NAME (TYPE_NAME (node));
12194 }
12195
12196 if (decl)
12197 {
12198 if (what)
12199 {
12200 if (msg)
12201 w = warning (OPT_Wdeprecated_declarations,
12202 "%qE is deprecated: %s", what, msg);
12203 else
12204 w = warning (OPT_Wdeprecated_declarations,
12205 "%qE is deprecated", what);
12206 }
12207 else
12208 {
12209 if (msg)
12210 w = warning (OPT_Wdeprecated_declarations,
12211 "type is deprecated: %s", msg);
12212 else
12213 w = warning (OPT_Wdeprecated_declarations,
12214 "type is deprecated");
12215 }
12216 if (w)
12217 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12218 }
12219 else
12220 {
12221 if (what)
12222 {
12223 if (msg)
12224 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12225 what, msg);
12226 else
12227 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12228 }
12229 else
12230 {
12231 if (msg)
12232 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12233 msg);
12234 else
12235 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12236 }
12237 }
12238 }
12239 }
12240
12241 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12242 somewhere in it. */
12243
12244 bool
12245 contains_bitfld_component_ref_p (const_tree ref)
12246 {
12247 while (handled_component_p (ref))
12248 {
12249 if (TREE_CODE (ref) == COMPONENT_REF
12250 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12251 return true;
12252 ref = TREE_OPERAND (ref, 0);
12253 }
12254
12255 return false;
12256 }
12257
12258 /* Try to determine whether a TRY_CATCH expression can fall through.
12259 This is a subroutine of block_may_fallthru. */
12260
12261 static bool
12262 try_catch_may_fallthru (const_tree stmt)
12263 {
12264 tree_stmt_iterator i;
12265
12266 /* If the TRY block can fall through, the whole TRY_CATCH can
12267 fall through. */
12268 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12269 return true;
12270
12271 i = tsi_start (TREE_OPERAND (stmt, 1));
12272 switch (TREE_CODE (tsi_stmt (i)))
12273 {
12274 case CATCH_EXPR:
12275 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12276 catch expression and a body. The whole TRY_CATCH may fall
12277 through iff any of the catch bodies falls through. */
12278 for (; !tsi_end_p (i); tsi_next (&i))
12279 {
12280 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12281 return true;
12282 }
12283 return false;
12284
12285 case EH_FILTER_EXPR:
12286 /* The exception filter expression only matters if there is an
12287 exception. If the exception does not match EH_FILTER_TYPES,
12288 we will execute EH_FILTER_FAILURE, and we will fall through
12289 if that falls through. If the exception does match
12290 EH_FILTER_TYPES, the stack unwinder will continue up the
12291 stack, so we will not fall through. We don't know whether we
12292 will throw an exception which matches EH_FILTER_TYPES or not,
12293 so we just ignore EH_FILTER_TYPES and assume that we might
12294 throw an exception which doesn't match. */
12295 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12296
12297 default:
12298 /* This case represents statements to be executed when an
12299 exception occurs. Those statements are implicitly followed
12300 by a RESX statement to resume execution after the exception.
12301 So in this case the TRY_CATCH never falls through. */
12302 return false;
12303 }
12304 }
12305
12306 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12307 need not be 100% accurate; simply be conservative and return true if we
12308 don't know. This is used only to avoid stupidly generating extra code.
12309 If we're wrong, we'll just delete the extra code later. */
12310
12311 bool
12312 block_may_fallthru (const_tree block)
12313 {
12314 /* This CONST_CAST is okay because expr_last returns its argument
12315 unmodified and we assign it to a const_tree. */
12316 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12317
12318 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12319 {
12320 case GOTO_EXPR:
12321 case RETURN_EXPR:
12322 /* Easy cases. If the last statement of the block implies
12323 control transfer, then we can't fall through. */
12324 return false;
12325
12326 case SWITCH_EXPR:
12327 /* If SWITCH_LABELS is set, this is lowered, and represents a
12328 branch to a selected label and hence can not fall through.
12329 Otherwise SWITCH_BODY is set, and the switch can fall
12330 through. */
12331 return SWITCH_LABELS (stmt) == NULL_TREE;
12332
12333 case COND_EXPR:
12334 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12335 return true;
12336 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12337
12338 case BIND_EXPR:
12339 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12340
12341 case TRY_CATCH_EXPR:
12342 return try_catch_may_fallthru (stmt);
12343
12344 case TRY_FINALLY_EXPR:
12345 /* The finally clause is always executed after the try clause,
12346 so if it does not fall through, then the try-finally will not
12347 fall through. Otherwise, if the try clause does not fall
12348 through, then when the finally clause falls through it will
12349 resume execution wherever the try clause was going. So the
12350 whole try-finally will only fall through if both the try
12351 clause and the finally clause fall through. */
12352 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12353 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12354
12355 case MODIFY_EXPR:
12356 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12357 stmt = TREE_OPERAND (stmt, 1);
12358 else
12359 return true;
12360 /* FALLTHRU */
12361
12362 case CALL_EXPR:
12363 /* Functions that do not return do not fall through. */
12364 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12365
12366 case CLEANUP_POINT_EXPR:
12367 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12368
12369 case TARGET_EXPR:
12370 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12371
12372 case ERROR_MARK:
12373 return true;
12374
12375 default:
12376 return lang_hooks.block_may_fallthru (stmt);
12377 }
12378 }
12379
12380 /* True if we are using EH to handle cleanups. */
12381 static bool using_eh_for_cleanups_flag = false;
12382
12383 /* This routine is called from front ends to indicate eh should be used for
12384 cleanups. */
12385 void
12386 using_eh_for_cleanups (void)
12387 {
12388 using_eh_for_cleanups_flag = true;
12389 }
12390
12391 /* Query whether EH is used for cleanups. */
12392 bool
12393 using_eh_for_cleanups_p (void)
12394 {
12395 return using_eh_for_cleanups_flag;
12396 }
12397
12398 /* Wrapper for tree_code_name to ensure that tree code is valid */
12399 const char *
12400 get_tree_code_name (enum tree_code code)
12401 {
12402 const char *invalid = "<invalid tree code>";
12403
12404 if (code >= MAX_TREE_CODES)
12405 return invalid;
12406
12407 return tree_code_name[code];
12408 }
12409
12410 /* Drops the TREE_OVERFLOW flag from T. */
12411
12412 tree
12413 drop_tree_overflow (tree t)
12414 {
12415 gcc_checking_assert (TREE_OVERFLOW (t));
12416
12417 /* For tree codes with a sharing machinery re-build the result. */
12418 if (TREE_CODE (t) == INTEGER_CST)
12419 return wide_int_to_tree (TREE_TYPE (t), t);
12420
12421 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12422 and drop the flag. */
12423 t = copy_node (t);
12424 TREE_OVERFLOW (t) = 0;
12425 return t;
12426 }
12427
12428 /* Given a memory reference expression T, return its base address.
12429 The base address of a memory reference expression is the main
12430 object being referenced. For instance, the base address for
12431 'array[i].fld[j]' is 'array'. You can think of this as stripping
12432 away the offset part from a memory address.
12433
12434 This function calls handled_component_p to strip away all the inner
12435 parts of the memory reference until it reaches the base object. */
12436
12437 tree
12438 get_base_address (tree t)
12439 {
12440 while (handled_component_p (t))
12441 t = TREE_OPERAND (t, 0);
12442
12443 if ((TREE_CODE (t) == MEM_REF
12444 || TREE_CODE (t) == TARGET_MEM_REF)
12445 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12446 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12447
12448 /* ??? Either the alias oracle or all callers need to properly deal
12449 with WITH_SIZE_EXPRs before we can look through those. */
12450 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12451 return NULL_TREE;
12452
12453 return t;
12454 }
12455
12456 /* Return the machine mode of T. For vectors, returns the mode of the
12457 inner type. The main use case is to feed the result to HONOR_NANS,
12458 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12459
12460 machine_mode
12461 element_mode (const_tree t)
12462 {
12463 if (!TYPE_P (t))
12464 t = TREE_TYPE (t);
12465 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12466 t = TREE_TYPE (t);
12467 return TYPE_MODE (t);
12468 }
12469
12470
12471 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12472 TV. TV should be the more specified variant (i.e. the main variant). */
12473
12474 static bool
12475 verify_type_variant (const_tree t, tree tv)
12476 {
12477 /* Type variant can differ by:
12478
12479 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12480 ENCODE_QUAL_ADDR_SPACE.
12481 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12482 in this case some values may not be set in the variant types
12483 (see TYPE_COMPLETE_P checks).
12484 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12485 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12486 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12487 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12488 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12489 this is necessary to make it possible to merge types form different TUs
12490 - arrays, pointers and references may have TREE_TYPE that is a variant
12491 of TREE_TYPE of their main variants.
12492 - aggregates may have new TYPE_FIELDS list that list variants of
12493 the main variant TYPE_FIELDS.
12494 - vector types may differ by TYPE_VECTOR_OPAQUE
12495 - TYPE_METHODS is always NULL for vairant types and maintained for
12496 main variant only.
12497 */
12498
12499 /* Convenience macro for matching individual fields. */
12500 #define verify_variant_match(flag) \
12501 do { \
12502 if (flag (tv) != flag (t)) \
12503 { \
12504 error ("type variant differs by " #flag "."); \
12505 debug_tree (tv); \
12506 return false; \
12507 } \
12508 } while (false)
12509
12510 /* tree_base checks. */
12511
12512 verify_variant_match (TREE_CODE);
12513 /* FIXME: Ada builds non-artificial variants of artificial types. */
12514 if (TYPE_ARTIFICIAL (tv) && 0)
12515 verify_variant_match (TYPE_ARTIFICIAL);
12516 if (POINTER_TYPE_P (tv))
12517 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12518 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12519 verify_variant_match (TYPE_UNSIGNED);
12520 verify_variant_match (TYPE_ALIGN_OK);
12521 verify_variant_match (TYPE_PACKED);
12522 if (TREE_CODE (t) == REFERENCE_TYPE)
12523 verify_variant_match (TYPE_REF_IS_RVALUE);
12524 verify_variant_match (TYPE_SATURATING);
12525 /* FIXME: This check trigger during libstdc++ build. */
12526 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12527 verify_variant_match (TYPE_FINAL_P);
12528
12529 /* tree_type_common checks. */
12530
12531 if (COMPLETE_TYPE_P (t))
12532 {
12533 verify_variant_match (TYPE_SIZE);
12534 verify_variant_match (TYPE_MODE);
12535 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
12536 /* FIXME: ideally we should compare pointer equality, but java FE
12537 produce variants where size is INTEGER_CST of different type (int
12538 wrt size_type) during libjava biuld. */
12539 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
12540 {
12541 error ("type variant has different TYPE_SIZE_UNIT");
12542 debug_tree (tv);
12543 error ("type variant's TYPE_SIZE_UNIT");
12544 debug_tree (TYPE_SIZE_UNIT (tv));
12545 error ("type's TYPE_SIZE_UNIT");
12546 debug_tree (TYPE_SIZE_UNIT (t));
12547 return false;
12548 }
12549 }
12550 verify_variant_match (TYPE_PRECISION);
12551 verify_variant_match (TYPE_NO_FORCE_BLK);
12552 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12553 if (RECORD_OR_UNION_TYPE_P (t))
12554 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12555 else if (TREE_CODE (t) == ARRAY_TYPE)
12556 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12557 /* During LTO we merge variant lists from diferent translation units
12558 that may differ BY TYPE_CONTEXT that in turn may point
12559 to TRANSLATION_UNIT_DECL.
12560 Ada also builds variants of types with different TYPE_CONTEXT. */
12561 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12562 verify_variant_match (TYPE_CONTEXT);
12563 verify_variant_match (TYPE_STRING_FLAG);
12564 if (TYPE_ALIAS_SET_KNOWN_P (t) && TYPE_ALIAS_SET_KNOWN_P (tv))
12565 verify_variant_match (TYPE_ALIAS_SET);
12566
12567 /* tree_type_non_common checks. */
12568
12569 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12570 and dangle the pointer from time to time. */
12571 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12572 && (in_lto_p || !TYPE_VFIELD (tv)
12573 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12574 {
12575 error ("type variant has different TYPE_VFIELD");
12576 debug_tree (tv);
12577 return false;
12578 }
12579 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
12580 || TREE_CODE (t) == INTEGER_TYPE
12581 || TREE_CODE (t) == BOOLEAN_TYPE
12582 || TREE_CODE (t) == REAL_TYPE
12583 || TREE_CODE (t) == FIXED_POINT_TYPE)
12584 {
12585 verify_variant_match (TYPE_MAX_VALUE);
12586 verify_variant_match (TYPE_MIN_VALUE);
12587 }
12588 if (TREE_CODE (t) == METHOD_TYPE)
12589 verify_variant_match (TYPE_METHOD_BASETYPE);
12590 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
12591 {
12592 error ("type variant has TYPE_METHODS");
12593 debug_tree (tv);
12594 return false;
12595 }
12596 if (TREE_CODE (t) == OFFSET_TYPE)
12597 verify_variant_match (TYPE_OFFSET_BASETYPE);
12598 if (TREE_CODE (t) == ARRAY_TYPE)
12599 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
12600 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
12601 or even type's main variant. This is needed to make bootstrap pass
12602 and the bug seems new in GCC 5.
12603 C++ FE should be updated to make this consistent and we should check
12604 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
12605 is a match with main variant.
12606
12607 Also disable the check for Java for now because of parser hack that builds
12608 first an dummy BINFO and then sometimes replace it by real BINFO in some
12609 of the copies. */
12610 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
12611 && TYPE_BINFO (t) != TYPE_BINFO (tv)
12612 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
12613 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
12614 at LTO time only. */
12615 && (in_lto_p && odr_type_p (t)))
12616 {
12617 error ("type variant has different TYPE_BINFO");
12618 debug_tree (tv);
12619 error ("type variant's TYPE_BINFO");
12620 debug_tree (TYPE_BINFO (tv));
12621 error ("type's TYPE_BINFO");
12622 debug_tree (TYPE_BINFO (t));
12623 return false;
12624 }
12625
12626 /* Check various uses of TYPE_VALUES_RAW. */
12627 if (TREE_CODE (t) == ENUMERAL_TYPE)
12628 verify_variant_match (TYPE_VALUES);
12629 else if (TREE_CODE (t) == ARRAY_TYPE)
12630 verify_variant_match (TYPE_DOMAIN);
12631 /* Permit incomplete variants of complete type. While FEs may complete
12632 all variants, this does not happen for C++ templates in all cases. */
12633 else if (RECORD_OR_UNION_TYPE_P (t)
12634 && COMPLETE_TYPE_P (t)
12635 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
12636 {
12637 tree f1, f2;
12638
12639 /* Fortran builds qualified variants as new records with items of
12640 qualified type. Verify that they looks same. */
12641 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
12642 f1 && f2;
12643 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
12644 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
12645 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
12646 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
12647 /* FIXME: gfc_nonrestricted_type builds all types as variants
12648 with exception of pointer types. It deeply copies the type
12649 which means that we may end up with a variant type
12650 referring non-variant pointer. We may change it to
12651 produce types as variants, too, like
12652 objc_get_protocol_qualified_type does. */
12653 && !POINTER_TYPE_P (TREE_TYPE (f1)))
12654 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
12655 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
12656 break;
12657 if (f1 || f2)
12658 {
12659 error ("type variant has different TYPE_FIELDS");
12660 debug_tree (tv);
12661 error ("first mismatch is field");
12662 debug_tree (f1);
12663 error ("and field");
12664 debug_tree (f2);
12665 return false;
12666 }
12667 }
12668 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
12669 verify_variant_match (TYPE_ARG_TYPES);
12670 /* For C++ the qualified variant of array type is really an array type
12671 of qualified TREE_TYPE.
12672 objc builds variants of pointer where pointer to type is a variant, too
12673 in objc_get_protocol_qualified_type. */
12674 if (TREE_TYPE (t) != TREE_TYPE (tv)
12675 && ((TREE_CODE (t) != ARRAY_TYPE
12676 && !POINTER_TYPE_P (t))
12677 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
12678 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
12679 {
12680 error ("type variant has different TREE_TYPE");
12681 debug_tree (tv);
12682 error ("type variant's TREE_TYPE");
12683 debug_tree (TREE_TYPE (tv));
12684 error ("type's TREE_TYPE");
12685 debug_tree (TREE_TYPE (t));
12686 return false;
12687 }
12688 return true;
12689 #undef verify_variant_match
12690 }
12691
12692
12693 /* The TYPE_CANONICAL merging machinery. It should closely resemble
12694 the middle-end types_compatible_p function. It needs to avoid
12695 claiming types are different for types that should be treated
12696 the same with respect to TBAA. Canonical types are also used
12697 for IL consistency checks via the useless_type_conversion_p
12698 predicate which does not handle all type kinds itself but falls
12699 back to pointer-comparison of TYPE_CANONICAL for aggregates
12700 for example. */
12701
12702 /* Return true iff T1 and T2 are structurally identical for what
12703 TBAA is concerned.
12704 This function is used both by lto.c canonical type merging and by the
12705 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
12706 that have TYPE_CANONICAL defined and assume them equivalent. */
12707
12708 bool
12709 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
12710 bool trust_type_canonical)
12711 {
12712 /* Before starting to set up the SCC machinery handle simple cases. */
12713
12714 /* Check first for the obvious case of pointer identity. */
12715 if (t1 == t2)
12716 return true;
12717
12718 /* Check that we have two types to compare. */
12719 if (t1 == NULL_TREE || t2 == NULL_TREE)
12720 return false;
12721
12722 /* We consider complete types always compatible with incomplete type.
12723 This does not make sense for canonical type calculation and thus we
12724 need to ensure that we are never called on it.
12725
12726 FIXME: For more correctness the function probably should have three modes
12727 1) mode assuming that types are complete mathcing their structure
12728 2) mode allowing incomplete types but producing equivalence classes
12729 and thus ignoring all info from complete types
12730 3) mode allowing incomplete types to match complete but checking
12731 compatibility between complete types.
12732
12733 1 and 2 can be used for canonical type calculation. 3 is the real
12734 definition of type compatibility that can be used i.e. for warnings during
12735 declaration merging. */
12736
12737 gcc_assert (!trust_type_canonical
12738 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
12739 /* If the types have been previously registered and found equal
12740 they still are. */
12741 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
12742 && trust_type_canonical)
12743 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
12744
12745 /* Can't be the same type if the types don't have the same code. */
12746 if (TREE_CODE (t1) != TREE_CODE (t2))
12747 return false;
12748
12749 /* Qualifiers do not matter for canonical type comparison purposes. */
12750
12751 /* Void types and nullptr types are always the same. */
12752 if (TREE_CODE (t1) == VOID_TYPE
12753 || TREE_CODE (t1) == NULLPTR_TYPE)
12754 return true;
12755
12756 /* Can't be the same type if they have different mode. */
12757 if (TYPE_MODE (t1) != TYPE_MODE (t2))
12758 return false;
12759
12760 /* Non-aggregate types can be handled cheaply. */
12761 if (INTEGRAL_TYPE_P (t1)
12762 || SCALAR_FLOAT_TYPE_P (t1)
12763 || FIXED_POINT_TYPE_P (t1)
12764 || TREE_CODE (t1) == VECTOR_TYPE
12765 || TREE_CODE (t1) == COMPLEX_TYPE
12766 || TREE_CODE (t1) == OFFSET_TYPE
12767 || POINTER_TYPE_P (t1))
12768 {
12769 /* Can't be the same type if they have different sign or precision. */
12770 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
12771 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
12772 return false;
12773
12774 if (TREE_CODE (t1) == INTEGER_TYPE
12775 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
12776 return false;
12777
12778 /* For canonical type comparisons we do not want to build SCCs
12779 so we cannot compare pointed-to types. But we can, for now,
12780 require the same pointed-to type kind and match what
12781 useless_type_conversion_p would do. */
12782 if (POINTER_TYPE_P (t1))
12783 {
12784 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
12785 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
12786 return false;
12787
12788 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
12789 return false;
12790 }
12791
12792 /* Tail-recurse to components. */
12793 if (TREE_CODE (t1) == VECTOR_TYPE
12794 || TREE_CODE (t1) == COMPLEX_TYPE)
12795 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
12796 TREE_TYPE (t2),
12797 trust_type_canonical);
12798
12799 return true;
12800 }
12801
12802 /* Do type-specific comparisons. */
12803 switch (TREE_CODE (t1))
12804 {
12805 case ARRAY_TYPE:
12806 /* Array types are the same if the element types are the same and
12807 the number of elements are the same. */
12808 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
12809 trust_type_canonical)
12810 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
12811 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
12812 return false;
12813 else
12814 {
12815 tree i1 = TYPE_DOMAIN (t1);
12816 tree i2 = TYPE_DOMAIN (t2);
12817
12818 /* For an incomplete external array, the type domain can be
12819 NULL_TREE. Check this condition also. */
12820 if (i1 == NULL_TREE && i2 == NULL_TREE)
12821 return true;
12822 else if (i1 == NULL_TREE || i2 == NULL_TREE)
12823 return false;
12824 else
12825 {
12826 tree min1 = TYPE_MIN_VALUE (i1);
12827 tree min2 = TYPE_MIN_VALUE (i2);
12828 tree max1 = TYPE_MAX_VALUE (i1);
12829 tree max2 = TYPE_MAX_VALUE (i2);
12830
12831 /* The minimum/maximum values have to be the same. */
12832 if ((min1 == min2
12833 || (min1 && min2
12834 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
12835 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
12836 || operand_equal_p (min1, min2, 0))))
12837 && (max1 == max2
12838 || (max1 && max2
12839 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
12840 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
12841 || operand_equal_p (max1, max2, 0)))))
12842 return true;
12843 else
12844 return false;
12845 }
12846 }
12847
12848 case METHOD_TYPE:
12849 case FUNCTION_TYPE:
12850 /* Function types are the same if the return type and arguments types
12851 are the same. */
12852 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
12853 trust_type_canonical))
12854 return false;
12855
12856 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
12857 return true;
12858 else
12859 {
12860 tree parms1, parms2;
12861
12862 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
12863 parms1 && parms2;
12864 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
12865 {
12866 if (!gimple_canonical_types_compatible_p
12867 (TREE_VALUE (parms1), TREE_VALUE (parms2),
12868 trust_type_canonical))
12869 return false;
12870 }
12871
12872 if (parms1 || parms2)
12873 return false;
12874
12875 return true;
12876 }
12877
12878 case RECORD_TYPE:
12879 case UNION_TYPE:
12880 case QUAL_UNION_TYPE:
12881 {
12882 tree f1, f2;
12883
12884 /* For aggregate types, all the fields must be the same. */
12885 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
12886 f1 || f2;
12887 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
12888 {
12889 /* Skip non-fields. */
12890 while (f1 && TREE_CODE (f1) != FIELD_DECL)
12891 f1 = TREE_CHAIN (f1);
12892 while (f2 && TREE_CODE (f2) != FIELD_DECL)
12893 f2 = TREE_CHAIN (f2);
12894 if (!f1 || !f2)
12895 break;
12896 /* The fields must have the same name, offset and type. */
12897 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
12898 || !gimple_compare_field_offset (f1, f2)
12899 || !gimple_canonical_types_compatible_p
12900 (TREE_TYPE (f1), TREE_TYPE (f2),
12901 trust_type_canonical))
12902 return false;
12903 }
12904
12905 /* If one aggregate has more fields than the other, they
12906 are not the same. */
12907 if (f1 || f2)
12908 return false;
12909
12910 return true;
12911 }
12912
12913 default:
12914 /* Consider all types with language specific trees in them mutually
12915 compatible. This is executed only from verify_type and false
12916 positives can be tolerated. */
12917 gcc_assert (!in_lto_p);
12918 return true;
12919 }
12920 }
12921
12922 /* Verify type T. */
12923
12924 void
12925 verify_type (const_tree t)
12926 {
12927 bool error_found = false;
12928 tree mv = TYPE_MAIN_VARIANT (t);
12929 if (!mv)
12930 {
12931 error ("Main variant is not defined");
12932 error_found = true;
12933 }
12934 else if (mv != TYPE_MAIN_VARIANT (mv))
12935 {
12936 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
12937 debug_tree (mv);
12938 error_found = true;
12939 }
12940 else if (t != mv && !verify_type_variant (t, mv))
12941 error_found = true;
12942
12943 tree ct = TYPE_CANONICAL (t);
12944 if (!ct)
12945 ;
12946 else if (TYPE_CANONICAL (t) != ct)
12947 {
12948 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
12949 debug_tree (ct);
12950 error_found = true;
12951 }
12952 /* Method and function types can not be used to address memory and thus
12953 TYPE_CANONICAL really matters only for determining useless conversions.
12954
12955 FIXME: C++ FE produce declarations of builtin functions that are not
12956 compatible with main variants. */
12957 else if (TREE_CODE (t) == FUNCTION_TYPE)
12958 ;
12959 else if (t != ct
12960 /* FIXME: gimple_canonical_types_compatible_p can not compare types
12961 with variably sized arrays because their sizes possibly
12962 gimplified to different variables. */
12963 && !variably_modified_type_p (ct, NULL)
12964 && !gimple_canonical_types_compatible_p (t, ct, false))
12965 {
12966 error ("TYPE_CANONICAL is not compatible");
12967 debug_tree (ct);
12968 error_found = true;
12969 }
12970
12971
12972 /* Check various uses of TYPE_MINVAL. */
12973 if (RECORD_OR_UNION_TYPE_P (t))
12974 {
12975 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12976 and danagle the pointer from time to time. */
12977 if (TYPE_VFIELD (t)
12978 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
12979 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
12980 {
12981 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
12982 debug_tree (TYPE_VFIELD (t));
12983 error_found = true;
12984 }
12985 }
12986 else if (TREE_CODE (t) == POINTER_TYPE)
12987 {
12988 if (TYPE_NEXT_PTR_TO (t)
12989 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
12990 {
12991 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
12992 debug_tree (TYPE_NEXT_PTR_TO (t));
12993 error_found = true;
12994 }
12995 }
12996 else if (TREE_CODE (t) == REFERENCE_TYPE)
12997 {
12998 if (TYPE_NEXT_REF_TO (t)
12999 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13000 {
13001 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13002 debug_tree (TYPE_NEXT_REF_TO (t));
13003 error_found = true;
13004 }
13005 }
13006 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13007 || TREE_CODE (t) == FIXED_POINT_TYPE)
13008 {
13009 /* FIXME: The following check should pass:
13010 useless_type_conversion_p (const_cast <tree> (t),
13011 TREE_TYPE (TYPE_MIN_VALUE (t))
13012 but does not for C sizetypes in LTO. */
13013 }
13014 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13015 else if (TYPE_MINVAL (t)
13016 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13017 || in_lto_p))
13018 {
13019 error ("TYPE_MINVAL non-NULL");
13020 debug_tree (TYPE_MINVAL (t));
13021 error_found = true;
13022 }
13023
13024 /* Check various uses of TYPE_MAXVAL. */
13025 if (RECORD_OR_UNION_TYPE_P (t))
13026 {
13027 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13028 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13029 && TYPE_METHODS (t) != error_mark_node)
13030 {
13031 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13032 debug_tree (TYPE_METHODS (t));
13033 error_found = true;
13034 }
13035 }
13036 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13037 {
13038 if (TYPE_METHOD_BASETYPE (t)
13039 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13040 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13041 {
13042 error ("TYPE_METHOD_BASETYPE is not record nor union");
13043 debug_tree (TYPE_METHOD_BASETYPE (t));
13044 error_found = true;
13045 }
13046 }
13047 else if (TREE_CODE (t) == OFFSET_TYPE)
13048 {
13049 if (TYPE_OFFSET_BASETYPE (t)
13050 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13051 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13052 {
13053 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13054 debug_tree (TYPE_OFFSET_BASETYPE (t));
13055 error_found = true;
13056 }
13057 }
13058 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13059 || TREE_CODE (t) == FIXED_POINT_TYPE)
13060 {
13061 /* FIXME: The following check should pass:
13062 useless_type_conversion_p (const_cast <tree> (t),
13063 TREE_TYPE (TYPE_MAX_VALUE (t))
13064 but does not for C sizetypes in LTO. */
13065 }
13066 else if (TREE_CODE (t) == ARRAY_TYPE)
13067 {
13068 if (TYPE_ARRAY_MAX_SIZE (t)
13069 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13070 {
13071 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13072 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13073 error_found = true;
13074 }
13075 }
13076 else if (TYPE_MAXVAL (t))
13077 {
13078 error ("TYPE_MAXVAL non-NULL");
13079 debug_tree (TYPE_MAXVAL (t));
13080 error_found = true;
13081 }
13082
13083 /* Check various uses of TYPE_BINFO. */
13084 if (RECORD_OR_UNION_TYPE_P (t))
13085 {
13086 if (!TYPE_BINFO (t))
13087 ;
13088 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13089 {
13090 error ("TYPE_BINFO is not TREE_BINFO");
13091 debug_tree (TYPE_BINFO (t));
13092 error_found = true;
13093 }
13094 /* FIXME: Java builds invalid empty binfos that do not have
13095 TREE_TYPE set. */
13096 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13097 {
13098 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13099 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13100 error_found = true;
13101 }
13102 }
13103 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13104 {
13105 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13106 debug_tree (TYPE_LANG_SLOT_1 (t));
13107 error_found = true;
13108 }
13109
13110 /* Check various uses of TYPE_VALUES_RAW. */
13111 if (TREE_CODE (t) == ENUMERAL_TYPE)
13112 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13113 {
13114 tree value = TREE_VALUE (l);
13115 tree name = TREE_PURPOSE (l);
13116
13117 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13118 CONST_DECL of ENUMERAL TYPE. */
13119 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13120 {
13121 error ("Enum value is not CONST_DECL or INTEGER_CST");
13122 debug_tree (value);
13123 debug_tree (name);
13124 error_found = true;
13125 }
13126 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13127 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13128 {
13129 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13130 debug_tree (value);
13131 debug_tree (name);
13132 error_found = true;
13133 }
13134 if (TREE_CODE (name) != IDENTIFIER_NODE)
13135 {
13136 error ("Enum value name is not IDENTIFIER_NODE");
13137 debug_tree (value);
13138 debug_tree (name);
13139 error_found = true;
13140 }
13141 }
13142 else if (TREE_CODE (t) == ARRAY_TYPE)
13143 {
13144 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13145 {
13146 error ("Array TYPE_DOMAIN is not integer type");
13147 debug_tree (TYPE_DOMAIN (t));
13148 error_found = true;
13149 }
13150 }
13151 else if (RECORD_OR_UNION_TYPE_P (t))
13152 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13153 {
13154 /* TODO: verify properties of decls. */
13155 if (TREE_CODE (fld) == FIELD_DECL)
13156 ;
13157 else if (TREE_CODE (fld) == TYPE_DECL)
13158 ;
13159 else if (TREE_CODE (fld) == CONST_DECL)
13160 ;
13161 else if (TREE_CODE (fld) == VAR_DECL)
13162 ;
13163 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13164 ;
13165 else if (TREE_CODE (fld) == USING_DECL)
13166 ;
13167 else
13168 {
13169 error ("Wrong tree in TYPE_FIELDS list");
13170 debug_tree (fld);
13171 error_found = true;
13172 }
13173 }
13174 else if (TREE_CODE (t) == INTEGER_TYPE
13175 || TREE_CODE (t) == BOOLEAN_TYPE
13176 || TREE_CODE (t) == OFFSET_TYPE
13177 || TREE_CODE (t) == REFERENCE_TYPE
13178 || TREE_CODE (t) == NULLPTR_TYPE
13179 || TREE_CODE (t) == POINTER_TYPE)
13180 {
13181 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13182 {
13183 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13184 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13185 error_found = true;
13186 }
13187 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13188 {
13189 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13190 debug_tree (TYPE_CACHED_VALUES (t));
13191 error_found = true;
13192 }
13193 /* Verify just enough of cache to ensure that no one copied it to new type.
13194 All copying should go by copy_node that should clear it. */
13195 else if (TYPE_CACHED_VALUES_P (t))
13196 {
13197 int i;
13198 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13199 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13200 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13201 {
13202 error ("wrong TYPE_CACHED_VALUES entry");
13203 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13204 error_found = true;
13205 break;
13206 }
13207 }
13208 }
13209 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13210 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13211 {
13212 /* C++ FE uses TREE_PURPOSE to store initial values. */
13213 if (TREE_PURPOSE (l) && in_lto_p)
13214 {
13215 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13216 debug_tree (l);
13217 error_found = true;
13218 }
13219 if (!TYPE_P (TREE_VALUE (l)))
13220 {
13221 error ("Wrong entry in TYPE_ARG_TYPES list");
13222 debug_tree (l);
13223 error_found = true;
13224 }
13225 }
13226 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13227 {
13228 error ("TYPE_VALUES_RAW field is non-NULL");
13229 debug_tree (TYPE_VALUES_RAW (t));
13230 error_found = true;
13231 }
13232 if (TREE_CODE (t) != INTEGER_TYPE
13233 && TREE_CODE (t) != BOOLEAN_TYPE
13234 && TREE_CODE (t) != OFFSET_TYPE
13235 && TREE_CODE (t) != REFERENCE_TYPE
13236 && TREE_CODE (t) != NULLPTR_TYPE
13237 && TREE_CODE (t) != POINTER_TYPE
13238 && TYPE_CACHED_VALUES_P (t))
13239 {
13240 error ("TYPE_CACHED_VALUES_P is set while it should not");
13241 error_found = true;
13242 }
13243 if (TYPE_STRING_FLAG (t)
13244 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13245 {
13246 error ("TYPE_STRING_FLAG is set on wrong type code");
13247 error_found = true;
13248 }
13249 else if (TYPE_STRING_FLAG (t))
13250 {
13251 const_tree b = t;
13252 if (TREE_CODE (b) == ARRAY_TYPE)
13253 b = TREE_TYPE (t);
13254 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13255 that is 32bits. */
13256 if (TREE_CODE (b) != INTEGER_TYPE)
13257 {
13258 error ("TYPE_STRING_FLAG is set on type that does not look like "
13259 "char nor array of chars");
13260 error_found = true;
13261 }
13262 }
13263
13264 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13265 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13266 of a type. */
13267 if (TREE_CODE (t) == METHOD_TYPE
13268 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13269 {
13270 error ("TYPE_METHOD_BASETYPE is not main variant");
13271 error_found = true;
13272 }
13273
13274 if (error_found)
13275 {
13276 debug_tree (const_cast <tree> (t));
13277 internal_error ("verify_type failed");
13278 }
13279 }
13280
13281 #include "gt-tree.h"