dojump.h: New header file.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "hash-set.h"
36 #include "machmode.h"
37 #include "vec.h"
38 #include "double-int.h"
39 #include "input.h"
40 #include "alias.h"
41 #include "symtab.h"
42 #include "wide-int.h"
43 #include "inchash.h"
44 #include "tree.h"
45 #include "fold-const.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "attribs.h"
49 #include "varasm.h"
50 #include "tm_p.h"
51 #include "hashtab.h"
52 #include "hard-reg-set.h"
53 #include "function.h"
54 #include "obstack.h"
55 #include "toplev.h" /* get_random_seed */
56 #include "filenames.h"
57 #include "output.h"
58 #include "target.h"
59 #include "common/common-target.h"
60 #include "langhooks.h"
61 #include "tree-inline.h"
62 #include "tree-iterator.h"
63 #include "predict.h"
64 #include "dominance.h"
65 #include "cfg.h"
66 #include "basic-block.h"
67 #include "bitmap.h"
68 #include "tree-ssa-alias.h"
69 #include "internal-fn.h"
70 #include "gimple-expr.h"
71 #include "is-a.h"
72 #include "gimple.h"
73 #include "gimple-iterator.h"
74 #include "gimplify.h"
75 #include "gimple-ssa.h"
76 #include "hash-map.h"
77 #include "plugin-api.h"
78 #include "ipa-ref.h"
79 #include "cgraph.h"
80 #include "tree-phinodes.h"
81 #include "stringpool.h"
82 #include "tree-ssanames.h"
83 #include "rtl.h"
84 #include "statistics.h"
85 #include "real.h"
86 #include "fixed-value.h"
87 #include "insn-config.h"
88 #include "expmed.h"
89 #include "dojump.h"
90 #include "explow.h"
91 #include "emit-rtl.h"
92 #include "stmt.h"
93 #include "expr.h"
94 #include "tree-dfa.h"
95 #include "params.h"
96 #include "tree-pass.h"
97 #include "langhooks-def.h"
98 #include "diagnostic.h"
99 #include "tree-diagnostic.h"
100 #include "tree-pretty-print.h"
101 #include "except.h"
102 #include "debug.h"
103 #include "intl.h"
104 #include "builtins.h"
105
106 /* Tree code classes. */
107
108 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
109 #define END_OF_BASE_TREE_CODES tcc_exceptional,
110
111 const enum tree_code_class tree_code_type[] = {
112 #include "all-tree.def"
113 };
114
115 #undef DEFTREECODE
116 #undef END_OF_BASE_TREE_CODES
117
118 /* Table indexed by tree code giving number of expression
119 operands beyond the fixed part of the node structure.
120 Not used for types or decls. */
121
122 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
123 #define END_OF_BASE_TREE_CODES 0,
124
125 const unsigned char tree_code_length[] = {
126 #include "all-tree.def"
127 };
128
129 #undef DEFTREECODE
130 #undef END_OF_BASE_TREE_CODES
131
132 /* Names of tree components.
133 Used for printing out the tree and error messages. */
134 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
135 #define END_OF_BASE_TREE_CODES "@dummy",
136
137 static const char *const tree_code_name[] = {
138 #include "all-tree.def"
139 };
140
141 #undef DEFTREECODE
142 #undef END_OF_BASE_TREE_CODES
143
144 /* Each tree code class has an associated string representation.
145 These must correspond to the tree_code_class entries. */
146
147 const char *const tree_code_class_strings[] =
148 {
149 "exceptional",
150 "constant",
151 "type",
152 "declaration",
153 "reference",
154 "comparison",
155 "unary",
156 "binary",
157 "statement",
158 "vl_exp",
159 "expression"
160 };
161
162 /* obstack.[ch] explicitly declined to prototype this. */
163 extern int _obstack_allocated_p (struct obstack *h, void *obj);
164
165 /* Statistics-gathering stuff. */
166
167 static int tree_code_counts[MAX_TREE_CODES];
168 int tree_node_counts[(int) all_kinds];
169 int tree_node_sizes[(int) all_kinds];
170
171 /* Keep in sync with tree.h:enum tree_node_kind. */
172 static const char * const tree_node_kind_names[] = {
173 "decls",
174 "types",
175 "blocks",
176 "stmts",
177 "refs",
178 "exprs",
179 "constants",
180 "identifiers",
181 "vecs",
182 "binfos",
183 "ssa names",
184 "constructors",
185 "random kinds",
186 "lang_decl kinds",
187 "lang_type kinds",
188 "omp clauses",
189 };
190
191 /* Unique id for next decl created. */
192 static GTY(()) int next_decl_uid;
193 /* Unique id for next type created. */
194 static GTY(()) int next_type_uid = 1;
195 /* Unique id for next debug decl created. Use negative numbers,
196 to catch erroneous uses. */
197 static GTY(()) int next_debug_decl_uid;
198
199 /* Since we cannot rehash a type after it is in the table, we have to
200 keep the hash code. */
201
202 struct GTY((for_user)) type_hash {
203 unsigned long hash;
204 tree type;
205 };
206
207 /* Initial size of the hash table (rounded to next prime). */
208 #define TYPE_HASH_INITIAL_SIZE 1000
209
210 struct type_cache_hasher : ggc_cache_hasher<type_hash *>
211 {
212 static hashval_t hash (type_hash *t) { return t->hash; }
213 static bool equal (type_hash *a, type_hash *b);
214
215 static void
216 handle_cache_entry (type_hash *&t)
217 {
218 extern void gt_ggc_mx (type_hash *&);
219 if (t == HTAB_DELETED_ENTRY || t == HTAB_EMPTY_ENTRY)
220 return;
221 else if (ggc_marked_p (t->type))
222 gt_ggc_mx (t);
223 else
224 t = static_cast<type_hash *> (HTAB_DELETED_ENTRY);
225 }
226 };
227
228 /* Now here is the hash table. When recording a type, it is added to
229 the slot whose index is the hash code. Note that the hash table is
230 used for several kinds of types (function types, array types and
231 array index range types, for now). While all these live in the
232 same table, they are completely independent, and the hash code is
233 computed differently for each of these. */
234
235 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
236
237 /* Hash table and temporary node for larger integer const values. */
238 static GTY (()) tree int_cst_node;
239
240 struct int_cst_hasher : ggc_cache_hasher<tree>
241 {
242 static hashval_t hash (tree t);
243 static bool equal (tree x, tree y);
244 };
245
246 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
247
248 /* Hash table for optimization flags and target option flags. Use the same
249 hash table for both sets of options. Nodes for building the current
250 optimization and target option nodes. The assumption is most of the time
251 the options created will already be in the hash table, so we avoid
252 allocating and freeing up a node repeatably. */
253 static GTY (()) tree cl_optimization_node;
254 static GTY (()) tree cl_target_option_node;
255
256 struct cl_option_hasher : ggc_cache_hasher<tree>
257 {
258 static hashval_t hash (tree t);
259 static bool equal (tree x, tree y);
260 };
261
262 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
263
264 /* General tree->tree mapping structure for use in hash tables. */
265
266
267 static GTY ((cache))
268 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
269
270 static GTY ((cache))
271 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
272
273 struct tree_vec_map_cache_hasher : ggc_cache_hasher<tree_vec_map *>
274 {
275 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
276
277 static bool
278 equal (tree_vec_map *a, tree_vec_map *b)
279 {
280 return a->base.from == b->base.from;
281 }
282
283 static void
284 handle_cache_entry (tree_vec_map *&m)
285 {
286 extern void gt_ggc_mx (tree_vec_map *&);
287 if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
288 return;
289 else if (ggc_marked_p (m->base.from))
290 gt_ggc_mx (m);
291 else
292 m = static_cast<tree_vec_map *> (HTAB_DELETED_ENTRY);
293 }
294 };
295
296 static GTY ((cache))
297 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
298
299 static void set_type_quals (tree, int);
300 static void print_type_hash_statistics (void);
301 static void print_debug_expr_statistics (void);
302 static void print_value_expr_statistics (void);
303 static void type_hash_list (const_tree, inchash::hash &);
304 static void attribute_hash_list (const_tree, inchash::hash &);
305
306 tree global_trees[TI_MAX];
307 tree integer_types[itk_none];
308
309 bool int_n_enabled_p[NUM_INT_N_ENTS];
310 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
311
312 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
313
314 /* Number of operands for each OpenMP clause. */
315 unsigned const char omp_clause_num_ops[] =
316 {
317 0, /* OMP_CLAUSE_ERROR */
318 1, /* OMP_CLAUSE_PRIVATE */
319 1, /* OMP_CLAUSE_SHARED */
320 1, /* OMP_CLAUSE_FIRSTPRIVATE */
321 2, /* OMP_CLAUSE_LASTPRIVATE */
322 4, /* OMP_CLAUSE_REDUCTION */
323 1, /* OMP_CLAUSE_COPYIN */
324 1, /* OMP_CLAUSE_COPYPRIVATE */
325 3, /* OMP_CLAUSE_LINEAR */
326 2, /* OMP_CLAUSE_ALIGNED */
327 1, /* OMP_CLAUSE_DEPEND */
328 1, /* OMP_CLAUSE_UNIFORM */
329 2, /* OMP_CLAUSE_FROM */
330 2, /* OMP_CLAUSE_TO */
331 2, /* OMP_CLAUSE_MAP */
332 1, /* OMP_CLAUSE__LOOPTEMP_ */
333 1, /* OMP_CLAUSE_IF */
334 1, /* OMP_CLAUSE_NUM_THREADS */
335 1, /* OMP_CLAUSE_SCHEDULE */
336 0, /* OMP_CLAUSE_NOWAIT */
337 0, /* OMP_CLAUSE_ORDERED */
338 0, /* OMP_CLAUSE_DEFAULT */
339 3, /* OMP_CLAUSE_COLLAPSE */
340 0, /* OMP_CLAUSE_UNTIED */
341 1, /* OMP_CLAUSE_FINAL */
342 0, /* OMP_CLAUSE_MERGEABLE */
343 1, /* OMP_CLAUSE_DEVICE */
344 1, /* OMP_CLAUSE_DIST_SCHEDULE */
345 0, /* OMP_CLAUSE_INBRANCH */
346 0, /* OMP_CLAUSE_NOTINBRANCH */
347 1, /* OMP_CLAUSE_NUM_TEAMS */
348 1, /* OMP_CLAUSE_THREAD_LIMIT */
349 0, /* OMP_CLAUSE_PROC_BIND */
350 1, /* OMP_CLAUSE_SAFELEN */
351 1, /* OMP_CLAUSE_SIMDLEN */
352 0, /* OMP_CLAUSE_FOR */
353 0, /* OMP_CLAUSE_PARALLEL */
354 0, /* OMP_CLAUSE_SECTIONS */
355 0, /* OMP_CLAUSE_TASKGROUP */
356 1, /* OMP_CLAUSE__SIMDUID_ */
357 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
358 };
359
360 const char * const omp_clause_code_name[] =
361 {
362 "error_clause",
363 "private",
364 "shared",
365 "firstprivate",
366 "lastprivate",
367 "reduction",
368 "copyin",
369 "copyprivate",
370 "linear",
371 "aligned",
372 "depend",
373 "uniform",
374 "from",
375 "to",
376 "map",
377 "_looptemp_",
378 "if",
379 "num_threads",
380 "schedule",
381 "nowait",
382 "ordered",
383 "default",
384 "collapse",
385 "untied",
386 "final",
387 "mergeable",
388 "device",
389 "dist_schedule",
390 "inbranch",
391 "notinbranch",
392 "num_teams",
393 "thread_limit",
394 "proc_bind",
395 "safelen",
396 "simdlen",
397 "for",
398 "parallel",
399 "sections",
400 "taskgroup",
401 "_simduid_",
402 "_Cilk_for_count_"
403 };
404
405
406 /* Return the tree node structure used by tree code CODE. */
407
408 static inline enum tree_node_structure_enum
409 tree_node_structure_for_code (enum tree_code code)
410 {
411 switch (TREE_CODE_CLASS (code))
412 {
413 case tcc_declaration:
414 {
415 switch (code)
416 {
417 case FIELD_DECL:
418 return TS_FIELD_DECL;
419 case PARM_DECL:
420 return TS_PARM_DECL;
421 case VAR_DECL:
422 return TS_VAR_DECL;
423 case LABEL_DECL:
424 return TS_LABEL_DECL;
425 case RESULT_DECL:
426 return TS_RESULT_DECL;
427 case DEBUG_EXPR_DECL:
428 return TS_DECL_WRTL;
429 case CONST_DECL:
430 return TS_CONST_DECL;
431 case TYPE_DECL:
432 return TS_TYPE_DECL;
433 case FUNCTION_DECL:
434 return TS_FUNCTION_DECL;
435 case TRANSLATION_UNIT_DECL:
436 return TS_TRANSLATION_UNIT_DECL;
437 default:
438 return TS_DECL_NON_COMMON;
439 }
440 }
441 case tcc_type:
442 return TS_TYPE_NON_COMMON;
443 case tcc_reference:
444 case tcc_comparison:
445 case tcc_unary:
446 case tcc_binary:
447 case tcc_expression:
448 case tcc_statement:
449 case tcc_vl_exp:
450 return TS_EXP;
451 default: /* tcc_constant and tcc_exceptional */
452 break;
453 }
454 switch (code)
455 {
456 /* tcc_constant cases. */
457 case VOID_CST: return TS_TYPED;
458 case INTEGER_CST: return TS_INT_CST;
459 case REAL_CST: return TS_REAL_CST;
460 case FIXED_CST: return TS_FIXED_CST;
461 case COMPLEX_CST: return TS_COMPLEX;
462 case VECTOR_CST: return TS_VECTOR;
463 case STRING_CST: return TS_STRING;
464 /* tcc_exceptional cases. */
465 case ERROR_MARK: return TS_COMMON;
466 case IDENTIFIER_NODE: return TS_IDENTIFIER;
467 case TREE_LIST: return TS_LIST;
468 case TREE_VEC: return TS_VEC;
469 case SSA_NAME: return TS_SSA_NAME;
470 case PLACEHOLDER_EXPR: return TS_COMMON;
471 case STATEMENT_LIST: return TS_STATEMENT_LIST;
472 case BLOCK: return TS_BLOCK;
473 case CONSTRUCTOR: return TS_CONSTRUCTOR;
474 case TREE_BINFO: return TS_BINFO;
475 case OMP_CLAUSE: return TS_OMP_CLAUSE;
476 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
477 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
478
479 default:
480 gcc_unreachable ();
481 }
482 }
483
484
485 /* Initialize tree_contains_struct to describe the hierarchy of tree
486 nodes. */
487
488 static void
489 initialize_tree_contains_struct (void)
490 {
491 unsigned i;
492
493 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
494 {
495 enum tree_code code;
496 enum tree_node_structure_enum ts_code;
497
498 code = (enum tree_code) i;
499 ts_code = tree_node_structure_for_code (code);
500
501 /* Mark the TS structure itself. */
502 tree_contains_struct[code][ts_code] = 1;
503
504 /* Mark all the structures that TS is derived from. */
505 switch (ts_code)
506 {
507 case TS_TYPED:
508 case TS_BLOCK:
509 MARK_TS_BASE (code);
510 break;
511
512 case TS_COMMON:
513 case TS_INT_CST:
514 case TS_REAL_CST:
515 case TS_FIXED_CST:
516 case TS_VECTOR:
517 case TS_STRING:
518 case TS_COMPLEX:
519 case TS_SSA_NAME:
520 case TS_CONSTRUCTOR:
521 case TS_EXP:
522 case TS_STATEMENT_LIST:
523 MARK_TS_TYPED (code);
524 break;
525
526 case TS_IDENTIFIER:
527 case TS_DECL_MINIMAL:
528 case TS_TYPE_COMMON:
529 case TS_LIST:
530 case TS_VEC:
531 case TS_BINFO:
532 case TS_OMP_CLAUSE:
533 case TS_OPTIMIZATION:
534 case TS_TARGET_OPTION:
535 MARK_TS_COMMON (code);
536 break;
537
538 case TS_TYPE_WITH_LANG_SPECIFIC:
539 MARK_TS_TYPE_COMMON (code);
540 break;
541
542 case TS_TYPE_NON_COMMON:
543 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
544 break;
545
546 case TS_DECL_COMMON:
547 MARK_TS_DECL_MINIMAL (code);
548 break;
549
550 case TS_DECL_WRTL:
551 case TS_CONST_DECL:
552 MARK_TS_DECL_COMMON (code);
553 break;
554
555 case TS_DECL_NON_COMMON:
556 MARK_TS_DECL_WITH_VIS (code);
557 break;
558
559 case TS_DECL_WITH_VIS:
560 case TS_PARM_DECL:
561 case TS_LABEL_DECL:
562 case TS_RESULT_DECL:
563 MARK_TS_DECL_WRTL (code);
564 break;
565
566 case TS_FIELD_DECL:
567 MARK_TS_DECL_COMMON (code);
568 break;
569
570 case TS_VAR_DECL:
571 MARK_TS_DECL_WITH_VIS (code);
572 break;
573
574 case TS_TYPE_DECL:
575 case TS_FUNCTION_DECL:
576 MARK_TS_DECL_NON_COMMON (code);
577 break;
578
579 case TS_TRANSLATION_UNIT_DECL:
580 MARK_TS_DECL_COMMON (code);
581 break;
582
583 default:
584 gcc_unreachable ();
585 }
586 }
587
588 /* Basic consistency checks for attributes used in fold. */
589 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
590 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
591 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
592 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
593 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
594 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
595 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
596 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
597 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
598 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
599 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
600 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
601 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
602 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
603 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
604 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
605 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
606 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
607 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
608 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
609 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
610 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
611 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
612 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
613 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
614 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
615 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
616 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
617 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
618 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
619 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
620 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
621 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
622 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
623 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
624 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
625 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
626 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
627 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
628 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
629 }
630
631
632 /* Init tree.c. */
633
634 void
635 init_ttree (void)
636 {
637 /* Initialize the hash table of types. */
638 type_hash_table
639 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
640
641 debug_expr_for_decl
642 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
643
644 value_expr_for_decl
645 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
646
647 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
648
649 int_cst_node = make_int_cst (1, 1);
650
651 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
652
653 cl_optimization_node = make_node (OPTIMIZATION_NODE);
654 cl_target_option_node = make_node (TARGET_OPTION_NODE);
655
656 /* Initialize the tree_contains_struct array. */
657 initialize_tree_contains_struct ();
658 lang_hooks.init_ts ();
659 }
660
661 \f
662 /* The name of the object as the assembler will see it (but before any
663 translations made by ASM_OUTPUT_LABELREF). Often this is the same
664 as DECL_NAME. It is an IDENTIFIER_NODE. */
665 tree
666 decl_assembler_name (tree decl)
667 {
668 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
669 lang_hooks.set_decl_assembler_name (decl);
670 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
671 }
672
673 /* When the target supports COMDAT groups, this indicates which group the
674 DECL is associated with. This can be either an IDENTIFIER_NODE or a
675 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
676 tree
677 decl_comdat_group (const_tree node)
678 {
679 struct symtab_node *snode = symtab_node::get (node);
680 if (!snode)
681 return NULL;
682 return snode->get_comdat_group ();
683 }
684
685 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
686 tree
687 decl_comdat_group_id (const_tree node)
688 {
689 struct symtab_node *snode = symtab_node::get (node);
690 if (!snode)
691 return NULL;
692 return snode->get_comdat_group_id ();
693 }
694
695 /* When the target supports named section, return its name as IDENTIFIER_NODE
696 or NULL if it is in no section. */
697 const char *
698 decl_section_name (const_tree node)
699 {
700 struct symtab_node *snode = symtab_node::get (node);
701 if (!snode)
702 return NULL;
703 return snode->get_section ();
704 }
705
706 /* Set section section name of NODE to VALUE (that is expected to
707 be identifier node) */
708 void
709 set_decl_section_name (tree node, const char *value)
710 {
711 struct symtab_node *snode;
712
713 if (value == NULL)
714 {
715 snode = symtab_node::get (node);
716 if (!snode)
717 return;
718 }
719 else if (TREE_CODE (node) == VAR_DECL)
720 snode = varpool_node::get_create (node);
721 else
722 snode = cgraph_node::get_create (node);
723 snode->set_section (value);
724 }
725
726 /* Return TLS model of a variable NODE. */
727 enum tls_model
728 decl_tls_model (const_tree node)
729 {
730 struct varpool_node *snode = varpool_node::get (node);
731 if (!snode)
732 return TLS_MODEL_NONE;
733 return snode->tls_model;
734 }
735
736 /* Set TLS model of variable NODE to MODEL. */
737 void
738 set_decl_tls_model (tree node, enum tls_model model)
739 {
740 struct varpool_node *vnode;
741
742 if (model == TLS_MODEL_NONE)
743 {
744 vnode = varpool_node::get (node);
745 if (!vnode)
746 return;
747 }
748 else
749 vnode = varpool_node::get_create (node);
750 vnode->tls_model = model;
751 }
752
753 /* Compute the number of bytes occupied by a tree with code CODE.
754 This function cannot be used for nodes that have variable sizes,
755 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
756 size_t
757 tree_code_size (enum tree_code code)
758 {
759 switch (TREE_CODE_CLASS (code))
760 {
761 case tcc_declaration: /* A decl node */
762 {
763 switch (code)
764 {
765 case FIELD_DECL:
766 return sizeof (struct tree_field_decl);
767 case PARM_DECL:
768 return sizeof (struct tree_parm_decl);
769 case VAR_DECL:
770 return sizeof (struct tree_var_decl);
771 case LABEL_DECL:
772 return sizeof (struct tree_label_decl);
773 case RESULT_DECL:
774 return sizeof (struct tree_result_decl);
775 case CONST_DECL:
776 return sizeof (struct tree_const_decl);
777 case TYPE_DECL:
778 return sizeof (struct tree_type_decl);
779 case FUNCTION_DECL:
780 return sizeof (struct tree_function_decl);
781 case DEBUG_EXPR_DECL:
782 return sizeof (struct tree_decl_with_rtl);
783 case TRANSLATION_UNIT_DECL:
784 return sizeof (struct tree_translation_unit_decl);
785 case NAMESPACE_DECL:
786 case IMPORTED_DECL:
787 case NAMELIST_DECL:
788 return sizeof (struct tree_decl_non_common);
789 default:
790 return lang_hooks.tree_size (code);
791 }
792 }
793
794 case tcc_type: /* a type node */
795 return sizeof (struct tree_type_non_common);
796
797 case tcc_reference: /* a reference */
798 case tcc_expression: /* an expression */
799 case tcc_statement: /* an expression with side effects */
800 case tcc_comparison: /* a comparison expression */
801 case tcc_unary: /* a unary arithmetic expression */
802 case tcc_binary: /* a binary arithmetic expression */
803 return (sizeof (struct tree_exp)
804 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
805
806 case tcc_constant: /* a constant */
807 switch (code)
808 {
809 case VOID_CST: return sizeof (struct tree_typed);
810 case INTEGER_CST: gcc_unreachable ();
811 case REAL_CST: return sizeof (struct tree_real_cst);
812 case FIXED_CST: return sizeof (struct tree_fixed_cst);
813 case COMPLEX_CST: return sizeof (struct tree_complex);
814 case VECTOR_CST: return sizeof (struct tree_vector);
815 case STRING_CST: gcc_unreachable ();
816 default:
817 return lang_hooks.tree_size (code);
818 }
819
820 case tcc_exceptional: /* something random, like an identifier. */
821 switch (code)
822 {
823 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
824 case TREE_LIST: return sizeof (struct tree_list);
825
826 case ERROR_MARK:
827 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
828
829 case TREE_VEC:
830 case OMP_CLAUSE: gcc_unreachable ();
831
832 case SSA_NAME: return sizeof (struct tree_ssa_name);
833
834 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
835 case BLOCK: return sizeof (struct tree_block);
836 case CONSTRUCTOR: return sizeof (struct tree_constructor);
837 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
838 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
839
840 default:
841 return lang_hooks.tree_size (code);
842 }
843
844 default:
845 gcc_unreachable ();
846 }
847 }
848
849 /* Compute the number of bytes occupied by NODE. This routine only
850 looks at TREE_CODE, except for those nodes that have variable sizes. */
851 size_t
852 tree_size (const_tree node)
853 {
854 const enum tree_code code = TREE_CODE (node);
855 switch (code)
856 {
857 case INTEGER_CST:
858 return (sizeof (struct tree_int_cst)
859 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
860
861 case TREE_BINFO:
862 return (offsetof (struct tree_binfo, base_binfos)
863 + vec<tree, va_gc>
864 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
865
866 case TREE_VEC:
867 return (sizeof (struct tree_vec)
868 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
869
870 case VECTOR_CST:
871 return (sizeof (struct tree_vector)
872 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
873
874 case STRING_CST:
875 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
876
877 case OMP_CLAUSE:
878 return (sizeof (struct tree_omp_clause)
879 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
880 * sizeof (tree));
881
882 default:
883 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
884 return (sizeof (struct tree_exp)
885 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
886 else
887 return tree_code_size (code);
888 }
889 }
890
891 /* Record interesting allocation statistics for a tree node with CODE
892 and LENGTH. */
893
894 static void
895 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
896 size_t length ATTRIBUTE_UNUSED)
897 {
898 enum tree_code_class type = TREE_CODE_CLASS (code);
899 tree_node_kind kind;
900
901 if (!GATHER_STATISTICS)
902 return;
903
904 switch (type)
905 {
906 case tcc_declaration: /* A decl node */
907 kind = d_kind;
908 break;
909
910 case tcc_type: /* a type node */
911 kind = t_kind;
912 break;
913
914 case tcc_statement: /* an expression with side effects */
915 kind = s_kind;
916 break;
917
918 case tcc_reference: /* a reference */
919 kind = r_kind;
920 break;
921
922 case tcc_expression: /* an expression */
923 case tcc_comparison: /* a comparison expression */
924 case tcc_unary: /* a unary arithmetic expression */
925 case tcc_binary: /* a binary arithmetic expression */
926 kind = e_kind;
927 break;
928
929 case tcc_constant: /* a constant */
930 kind = c_kind;
931 break;
932
933 case tcc_exceptional: /* something random, like an identifier. */
934 switch (code)
935 {
936 case IDENTIFIER_NODE:
937 kind = id_kind;
938 break;
939
940 case TREE_VEC:
941 kind = vec_kind;
942 break;
943
944 case TREE_BINFO:
945 kind = binfo_kind;
946 break;
947
948 case SSA_NAME:
949 kind = ssa_name_kind;
950 break;
951
952 case BLOCK:
953 kind = b_kind;
954 break;
955
956 case CONSTRUCTOR:
957 kind = constr_kind;
958 break;
959
960 case OMP_CLAUSE:
961 kind = omp_clause_kind;
962 break;
963
964 default:
965 kind = x_kind;
966 break;
967 }
968 break;
969
970 case tcc_vl_exp:
971 kind = e_kind;
972 break;
973
974 default:
975 gcc_unreachable ();
976 }
977
978 tree_code_counts[(int) code]++;
979 tree_node_counts[(int) kind]++;
980 tree_node_sizes[(int) kind] += length;
981 }
982
983 /* Allocate and return a new UID from the DECL_UID namespace. */
984
985 int
986 allocate_decl_uid (void)
987 {
988 return next_decl_uid++;
989 }
990
991 /* Return a newly allocated node of code CODE. For decl and type
992 nodes, some other fields are initialized. The rest of the node is
993 initialized to zero. This function cannot be used for TREE_VEC,
994 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
995 tree_code_size.
996
997 Achoo! I got a code in the node. */
998
999 tree
1000 make_node_stat (enum tree_code code MEM_STAT_DECL)
1001 {
1002 tree t;
1003 enum tree_code_class type = TREE_CODE_CLASS (code);
1004 size_t length = tree_code_size (code);
1005
1006 record_node_allocation_statistics (code, length);
1007
1008 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1009 TREE_SET_CODE (t, code);
1010
1011 switch (type)
1012 {
1013 case tcc_statement:
1014 TREE_SIDE_EFFECTS (t) = 1;
1015 break;
1016
1017 case tcc_declaration:
1018 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1019 {
1020 if (code == FUNCTION_DECL)
1021 {
1022 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1023 DECL_MODE (t) = FUNCTION_MODE;
1024 }
1025 else
1026 DECL_ALIGN (t) = 1;
1027 }
1028 DECL_SOURCE_LOCATION (t) = input_location;
1029 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1030 DECL_UID (t) = --next_debug_decl_uid;
1031 else
1032 {
1033 DECL_UID (t) = allocate_decl_uid ();
1034 SET_DECL_PT_UID (t, -1);
1035 }
1036 if (TREE_CODE (t) == LABEL_DECL)
1037 LABEL_DECL_UID (t) = -1;
1038
1039 break;
1040
1041 case tcc_type:
1042 TYPE_UID (t) = next_type_uid++;
1043 TYPE_ALIGN (t) = BITS_PER_UNIT;
1044 TYPE_USER_ALIGN (t) = 0;
1045 TYPE_MAIN_VARIANT (t) = t;
1046 TYPE_CANONICAL (t) = t;
1047
1048 /* Default to no attributes for type, but let target change that. */
1049 TYPE_ATTRIBUTES (t) = NULL_TREE;
1050 targetm.set_default_type_attributes (t);
1051
1052 /* We have not yet computed the alias set for this type. */
1053 TYPE_ALIAS_SET (t) = -1;
1054 break;
1055
1056 case tcc_constant:
1057 TREE_CONSTANT (t) = 1;
1058 break;
1059
1060 case tcc_expression:
1061 switch (code)
1062 {
1063 case INIT_EXPR:
1064 case MODIFY_EXPR:
1065 case VA_ARG_EXPR:
1066 case PREDECREMENT_EXPR:
1067 case PREINCREMENT_EXPR:
1068 case POSTDECREMENT_EXPR:
1069 case POSTINCREMENT_EXPR:
1070 /* All of these have side-effects, no matter what their
1071 operands are. */
1072 TREE_SIDE_EFFECTS (t) = 1;
1073 break;
1074
1075 default:
1076 break;
1077 }
1078 break;
1079
1080 default:
1081 /* Other classes need no special treatment. */
1082 break;
1083 }
1084
1085 return t;
1086 }
1087 \f
1088 /* Return a new node with the same contents as NODE except that its
1089 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1090
1091 tree
1092 copy_node_stat (tree node MEM_STAT_DECL)
1093 {
1094 tree t;
1095 enum tree_code code = TREE_CODE (node);
1096 size_t length;
1097
1098 gcc_assert (code != STATEMENT_LIST);
1099
1100 length = tree_size (node);
1101 record_node_allocation_statistics (code, length);
1102 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1103 memcpy (t, node, length);
1104
1105 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1106 TREE_CHAIN (t) = 0;
1107 TREE_ASM_WRITTEN (t) = 0;
1108 TREE_VISITED (t) = 0;
1109
1110 if (TREE_CODE_CLASS (code) == tcc_declaration)
1111 {
1112 if (code == DEBUG_EXPR_DECL)
1113 DECL_UID (t) = --next_debug_decl_uid;
1114 else
1115 {
1116 DECL_UID (t) = allocate_decl_uid ();
1117 if (DECL_PT_UID_SET_P (node))
1118 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1119 }
1120 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1121 && DECL_HAS_VALUE_EXPR_P (node))
1122 {
1123 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1124 DECL_HAS_VALUE_EXPR_P (t) = 1;
1125 }
1126 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1127 if (TREE_CODE (node) == VAR_DECL)
1128 {
1129 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1130 t->decl_with_vis.symtab_node = NULL;
1131 }
1132 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1133 {
1134 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1135 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1136 }
1137 if (TREE_CODE (node) == FUNCTION_DECL)
1138 {
1139 DECL_STRUCT_FUNCTION (t) = NULL;
1140 t->decl_with_vis.symtab_node = NULL;
1141 }
1142 }
1143 else if (TREE_CODE_CLASS (code) == tcc_type)
1144 {
1145 TYPE_UID (t) = next_type_uid++;
1146 /* The following is so that the debug code for
1147 the copy is different from the original type.
1148 The two statements usually duplicate each other
1149 (because they clear fields of the same union),
1150 but the optimizer should catch that. */
1151 TYPE_SYMTAB_POINTER (t) = 0;
1152 TYPE_SYMTAB_ADDRESS (t) = 0;
1153
1154 /* Do not copy the values cache. */
1155 if (TYPE_CACHED_VALUES_P (t))
1156 {
1157 TYPE_CACHED_VALUES_P (t) = 0;
1158 TYPE_CACHED_VALUES (t) = NULL_TREE;
1159 }
1160 }
1161
1162 return t;
1163 }
1164
1165 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1166 For example, this can copy a list made of TREE_LIST nodes. */
1167
1168 tree
1169 copy_list (tree list)
1170 {
1171 tree head;
1172 tree prev, next;
1173
1174 if (list == 0)
1175 return 0;
1176
1177 head = prev = copy_node (list);
1178 next = TREE_CHAIN (list);
1179 while (next)
1180 {
1181 TREE_CHAIN (prev) = copy_node (next);
1182 prev = TREE_CHAIN (prev);
1183 next = TREE_CHAIN (next);
1184 }
1185 return head;
1186 }
1187
1188 \f
1189 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1190 INTEGER_CST with value CST and type TYPE. */
1191
1192 static unsigned int
1193 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1194 {
1195 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1196 /* We need an extra zero HWI if CST is an unsigned integer with its
1197 upper bit set, and if CST occupies a whole number of HWIs. */
1198 if (TYPE_UNSIGNED (type)
1199 && wi::neg_p (cst)
1200 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1201 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1202 return cst.get_len ();
1203 }
1204
1205 /* Return a new INTEGER_CST with value CST and type TYPE. */
1206
1207 static tree
1208 build_new_int_cst (tree type, const wide_int &cst)
1209 {
1210 unsigned int len = cst.get_len ();
1211 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1212 tree nt = make_int_cst (len, ext_len);
1213
1214 if (len < ext_len)
1215 {
1216 --ext_len;
1217 TREE_INT_CST_ELT (nt, ext_len) = 0;
1218 for (unsigned int i = len; i < ext_len; ++i)
1219 TREE_INT_CST_ELT (nt, i) = -1;
1220 }
1221 else if (TYPE_UNSIGNED (type)
1222 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1223 {
1224 len--;
1225 TREE_INT_CST_ELT (nt, len)
1226 = zext_hwi (cst.elt (len),
1227 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1228 }
1229
1230 for (unsigned int i = 0; i < len; i++)
1231 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1232 TREE_TYPE (nt) = type;
1233 return nt;
1234 }
1235
1236 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1237
1238 tree
1239 build_int_cst (tree type, HOST_WIDE_INT low)
1240 {
1241 /* Support legacy code. */
1242 if (!type)
1243 type = integer_type_node;
1244
1245 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1246 }
1247
1248 tree
1249 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1250 {
1251 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1252 }
1253
1254 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1255
1256 tree
1257 build_int_cst_type (tree type, HOST_WIDE_INT low)
1258 {
1259 gcc_assert (type);
1260 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1261 }
1262
1263 /* Constructs tree in type TYPE from with value given by CST. Signedness
1264 of CST is assumed to be the same as the signedness of TYPE. */
1265
1266 tree
1267 double_int_to_tree (tree type, double_int cst)
1268 {
1269 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1270 }
1271
1272 /* We force the wide_int CST to the range of the type TYPE by sign or
1273 zero extending it. OVERFLOWABLE indicates if we are interested in
1274 overflow of the value, when >0 we are only interested in signed
1275 overflow, for <0 we are interested in any overflow. OVERFLOWED
1276 indicates whether overflow has already occurred. CONST_OVERFLOWED
1277 indicates whether constant overflow has already occurred. We force
1278 T's value to be within range of T's type (by setting to 0 or 1 all
1279 the bits outside the type's range). We set TREE_OVERFLOWED if,
1280 OVERFLOWED is nonzero,
1281 or OVERFLOWABLE is >0 and signed overflow occurs
1282 or OVERFLOWABLE is <0 and any overflow occurs
1283 We return a new tree node for the extended wide_int. The node
1284 is shared if no overflow flags are set. */
1285
1286
1287 tree
1288 force_fit_type (tree type, const wide_int_ref &cst,
1289 int overflowable, bool overflowed)
1290 {
1291 signop sign = TYPE_SIGN (type);
1292
1293 /* If we need to set overflow flags, return a new unshared node. */
1294 if (overflowed || !wi::fits_to_tree_p (cst, type))
1295 {
1296 if (overflowed
1297 || overflowable < 0
1298 || (overflowable > 0 && sign == SIGNED))
1299 {
1300 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1301 tree t = build_new_int_cst (type, tmp);
1302 TREE_OVERFLOW (t) = 1;
1303 return t;
1304 }
1305 }
1306
1307 /* Else build a shared node. */
1308 return wide_int_to_tree (type, cst);
1309 }
1310
1311 /* These are the hash table functions for the hash table of INTEGER_CST
1312 nodes of a sizetype. */
1313
1314 /* Return the hash code code X, an INTEGER_CST. */
1315
1316 hashval_t
1317 int_cst_hasher::hash (tree x)
1318 {
1319 const_tree const t = x;
1320 hashval_t code = TYPE_UID (TREE_TYPE (t));
1321 int i;
1322
1323 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1324 code ^= TREE_INT_CST_ELT (t, i);
1325
1326 return code;
1327 }
1328
1329 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1330 is the same as that given by *Y, which is the same. */
1331
1332 bool
1333 int_cst_hasher::equal (tree x, tree y)
1334 {
1335 const_tree const xt = x;
1336 const_tree const yt = y;
1337
1338 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1339 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1340 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1341 return false;
1342
1343 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1344 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1345 return false;
1346
1347 return true;
1348 }
1349
1350 /* Create an INT_CST node of TYPE and value CST.
1351 The returned node is always shared. For small integers we use a
1352 per-type vector cache, for larger ones we use a single hash table.
1353 The value is extended from its precision according to the sign of
1354 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1355 the upper bits and ensures that hashing and value equality based
1356 upon the underlying HOST_WIDE_INTs works without masking. */
1357
1358 tree
1359 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1360 {
1361 tree t;
1362 int ix = -1;
1363 int limit = 0;
1364
1365 gcc_assert (type);
1366 unsigned int prec = TYPE_PRECISION (type);
1367 signop sgn = TYPE_SIGN (type);
1368
1369 /* Verify that everything is canonical. */
1370 int l = pcst.get_len ();
1371 if (l > 1)
1372 {
1373 if (pcst.elt (l - 1) == 0)
1374 gcc_checking_assert (pcst.elt (l - 2) < 0);
1375 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1376 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1377 }
1378
1379 wide_int cst = wide_int::from (pcst, prec, sgn);
1380 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1381
1382 if (ext_len == 1)
1383 {
1384 /* We just need to store a single HOST_WIDE_INT. */
1385 HOST_WIDE_INT hwi;
1386 if (TYPE_UNSIGNED (type))
1387 hwi = cst.to_uhwi ();
1388 else
1389 hwi = cst.to_shwi ();
1390
1391 switch (TREE_CODE (type))
1392 {
1393 case NULLPTR_TYPE:
1394 gcc_assert (hwi == 0);
1395 /* Fallthru. */
1396
1397 case POINTER_TYPE:
1398 case REFERENCE_TYPE:
1399 case POINTER_BOUNDS_TYPE:
1400 /* Cache NULL pointer and zero bounds. */
1401 if (hwi == 0)
1402 {
1403 limit = 1;
1404 ix = 0;
1405 }
1406 break;
1407
1408 case BOOLEAN_TYPE:
1409 /* Cache false or true. */
1410 limit = 2;
1411 if (hwi < 2)
1412 ix = hwi;
1413 break;
1414
1415 case INTEGER_TYPE:
1416 case OFFSET_TYPE:
1417 if (TYPE_SIGN (type) == UNSIGNED)
1418 {
1419 /* Cache [0, N). */
1420 limit = INTEGER_SHARE_LIMIT;
1421 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1422 ix = hwi;
1423 }
1424 else
1425 {
1426 /* Cache [-1, N). */
1427 limit = INTEGER_SHARE_LIMIT + 1;
1428 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1429 ix = hwi + 1;
1430 }
1431 break;
1432
1433 case ENUMERAL_TYPE:
1434 break;
1435
1436 default:
1437 gcc_unreachable ();
1438 }
1439
1440 if (ix >= 0)
1441 {
1442 /* Look for it in the type's vector of small shared ints. */
1443 if (!TYPE_CACHED_VALUES_P (type))
1444 {
1445 TYPE_CACHED_VALUES_P (type) = 1;
1446 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1447 }
1448
1449 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1450 if (t)
1451 /* Make sure no one is clobbering the shared constant. */
1452 gcc_checking_assert (TREE_TYPE (t) == type
1453 && TREE_INT_CST_NUNITS (t) == 1
1454 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1455 && TREE_INT_CST_EXT_NUNITS (t) == 1
1456 && TREE_INT_CST_ELT (t, 0) == hwi);
1457 else
1458 {
1459 /* Create a new shared int. */
1460 t = build_new_int_cst (type, cst);
1461 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1462 }
1463 }
1464 else
1465 {
1466 /* Use the cache of larger shared ints, using int_cst_node as
1467 a temporary. */
1468
1469 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1470 TREE_TYPE (int_cst_node) = type;
1471
1472 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1473 t = *slot;
1474 if (!t)
1475 {
1476 /* Insert this one into the hash table. */
1477 t = int_cst_node;
1478 *slot = t;
1479 /* Make a new node for next time round. */
1480 int_cst_node = make_int_cst (1, 1);
1481 }
1482 }
1483 }
1484 else
1485 {
1486 /* The value either hashes properly or we drop it on the floor
1487 for the gc to take care of. There will not be enough of them
1488 to worry about. */
1489
1490 tree nt = build_new_int_cst (type, cst);
1491 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1492 t = *slot;
1493 if (!t)
1494 {
1495 /* Insert this one into the hash table. */
1496 t = nt;
1497 *slot = t;
1498 }
1499 }
1500
1501 return t;
1502 }
1503
1504 void
1505 cache_integer_cst (tree t)
1506 {
1507 tree type = TREE_TYPE (t);
1508 int ix = -1;
1509 int limit = 0;
1510 int prec = TYPE_PRECISION (type);
1511
1512 gcc_assert (!TREE_OVERFLOW (t));
1513
1514 switch (TREE_CODE (type))
1515 {
1516 case NULLPTR_TYPE:
1517 gcc_assert (integer_zerop (t));
1518 /* Fallthru. */
1519
1520 case POINTER_TYPE:
1521 case REFERENCE_TYPE:
1522 /* Cache NULL pointer. */
1523 if (integer_zerop (t))
1524 {
1525 limit = 1;
1526 ix = 0;
1527 }
1528 break;
1529
1530 case BOOLEAN_TYPE:
1531 /* Cache false or true. */
1532 limit = 2;
1533 if (wi::ltu_p (t, 2))
1534 ix = TREE_INT_CST_ELT (t, 0);
1535 break;
1536
1537 case INTEGER_TYPE:
1538 case OFFSET_TYPE:
1539 if (TYPE_UNSIGNED (type))
1540 {
1541 /* Cache 0..N */
1542 limit = INTEGER_SHARE_LIMIT;
1543
1544 /* This is a little hokie, but if the prec is smaller than
1545 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1546 obvious test will not get the correct answer. */
1547 if (prec < HOST_BITS_PER_WIDE_INT)
1548 {
1549 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1550 ix = tree_to_uhwi (t);
1551 }
1552 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1553 ix = tree_to_uhwi (t);
1554 }
1555 else
1556 {
1557 /* Cache -1..N */
1558 limit = INTEGER_SHARE_LIMIT + 1;
1559
1560 if (integer_minus_onep (t))
1561 ix = 0;
1562 else if (!wi::neg_p (t))
1563 {
1564 if (prec < HOST_BITS_PER_WIDE_INT)
1565 {
1566 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1567 ix = tree_to_shwi (t) + 1;
1568 }
1569 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1570 ix = tree_to_shwi (t) + 1;
1571 }
1572 }
1573 break;
1574
1575 case ENUMERAL_TYPE:
1576 break;
1577
1578 default:
1579 gcc_unreachable ();
1580 }
1581
1582 if (ix >= 0)
1583 {
1584 /* Look for it in the type's vector of small shared ints. */
1585 if (!TYPE_CACHED_VALUES_P (type))
1586 {
1587 TYPE_CACHED_VALUES_P (type) = 1;
1588 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1589 }
1590
1591 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1592 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1593 }
1594 else
1595 {
1596 /* Use the cache of larger shared ints. */
1597 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1598 /* If there is already an entry for the number verify it's the
1599 same. */
1600 if (*slot)
1601 gcc_assert (wi::eq_p (tree (*slot), t));
1602 else
1603 /* Otherwise insert this one into the hash table. */
1604 *slot = t;
1605 }
1606 }
1607
1608
1609 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1610 and the rest are zeros. */
1611
1612 tree
1613 build_low_bits_mask (tree type, unsigned bits)
1614 {
1615 gcc_assert (bits <= TYPE_PRECISION (type));
1616
1617 return wide_int_to_tree (type, wi::mask (bits, false,
1618 TYPE_PRECISION (type)));
1619 }
1620
1621 /* Checks that X is integer constant that can be expressed in (unsigned)
1622 HOST_WIDE_INT without loss of precision. */
1623
1624 bool
1625 cst_and_fits_in_hwi (const_tree x)
1626 {
1627 if (TREE_CODE (x) != INTEGER_CST)
1628 return false;
1629
1630 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1631 return false;
1632
1633 return TREE_INT_CST_NUNITS (x) == 1;
1634 }
1635
1636 /* Build a newly constructed TREE_VEC node of length LEN. */
1637
1638 tree
1639 make_vector_stat (unsigned len MEM_STAT_DECL)
1640 {
1641 tree t;
1642 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1643
1644 record_node_allocation_statistics (VECTOR_CST, length);
1645
1646 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1647
1648 TREE_SET_CODE (t, VECTOR_CST);
1649 TREE_CONSTANT (t) = 1;
1650
1651 return t;
1652 }
1653
1654 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1655 are in a list pointed to by VALS. */
1656
1657 tree
1658 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1659 {
1660 int over = 0;
1661 unsigned cnt = 0;
1662 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1663 TREE_TYPE (v) = type;
1664
1665 /* Iterate through elements and check for overflow. */
1666 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1667 {
1668 tree value = vals[cnt];
1669
1670 VECTOR_CST_ELT (v, cnt) = value;
1671
1672 /* Don't crash if we get an address constant. */
1673 if (!CONSTANT_CLASS_P (value))
1674 continue;
1675
1676 over |= TREE_OVERFLOW (value);
1677 }
1678
1679 TREE_OVERFLOW (v) = over;
1680 return v;
1681 }
1682
1683 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1684 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1685
1686 tree
1687 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1688 {
1689 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1690 unsigned HOST_WIDE_INT idx;
1691 tree value;
1692
1693 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1694 vec[idx] = value;
1695 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1696 vec[idx] = build_zero_cst (TREE_TYPE (type));
1697
1698 return build_vector (type, vec);
1699 }
1700
1701 /* Build a vector of type VECTYPE where all the elements are SCs. */
1702 tree
1703 build_vector_from_val (tree vectype, tree sc)
1704 {
1705 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1706
1707 if (sc == error_mark_node)
1708 return sc;
1709
1710 /* Verify that the vector type is suitable for SC. Note that there
1711 is some inconsistency in the type-system with respect to restrict
1712 qualifications of pointers. Vector types always have a main-variant
1713 element type and the qualification is applied to the vector-type.
1714 So TREE_TYPE (vector-type) does not return a properly qualified
1715 vector element-type. */
1716 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1717 TREE_TYPE (vectype)));
1718
1719 if (CONSTANT_CLASS_P (sc))
1720 {
1721 tree *v = XALLOCAVEC (tree, nunits);
1722 for (i = 0; i < nunits; ++i)
1723 v[i] = sc;
1724 return build_vector (vectype, v);
1725 }
1726 else
1727 {
1728 vec<constructor_elt, va_gc> *v;
1729 vec_alloc (v, nunits);
1730 for (i = 0; i < nunits; ++i)
1731 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1732 return build_constructor (vectype, v);
1733 }
1734 }
1735
1736 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1737 are in the vec pointed to by VALS. */
1738 tree
1739 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1740 {
1741 tree c = make_node (CONSTRUCTOR);
1742 unsigned int i;
1743 constructor_elt *elt;
1744 bool constant_p = true;
1745 bool side_effects_p = false;
1746
1747 TREE_TYPE (c) = type;
1748 CONSTRUCTOR_ELTS (c) = vals;
1749
1750 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1751 {
1752 /* Mostly ctors will have elts that don't have side-effects, so
1753 the usual case is to scan all the elements. Hence a single
1754 loop for both const and side effects, rather than one loop
1755 each (with early outs). */
1756 if (!TREE_CONSTANT (elt->value))
1757 constant_p = false;
1758 if (TREE_SIDE_EFFECTS (elt->value))
1759 side_effects_p = true;
1760 }
1761
1762 TREE_SIDE_EFFECTS (c) = side_effects_p;
1763 TREE_CONSTANT (c) = constant_p;
1764
1765 return c;
1766 }
1767
1768 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1769 INDEX and VALUE. */
1770 tree
1771 build_constructor_single (tree type, tree index, tree value)
1772 {
1773 vec<constructor_elt, va_gc> *v;
1774 constructor_elt elt = {index, value};
1775
1776 vec_alloc (v, 1);
1777 v->quick_push (elt);
1778
1779 return build_constructor (type, v);
1780 }
1781
1782
1783 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1784 are in a list pointed to by VALS. */
1785 tree
1786 build_constructor_from_list (tree type, tree vals)
1787 {
1788 tree t;
1789 vec<constructor_elt, va_gc> *v = NULL;
1790
1791 if (vals)
1792 {
1793 vec_alloc (v, list_length (vals));
1794 for (t = vals; t; t = TREE_CHAIN (t))
1795 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1796 }
1797
1798 return build_constructor (type, v);
1799 }
1800
1801 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1802 of elements, provided as index/value pairs. */
1803
1804 tree
1805 build_constructor_va (tree type, int nelts, ...)
1806 {
1807 vec<constructor_elt, va_gc> *v = NULL;
1808 va_list p;
1809
1810 va_start (p, nelts);
1811 vec_alloc (v, nelts);
1812 while (nelts--)
1813 {
1814 tree index = va_arg (p, tree);
1815 tree value = va_arg (p, tree);
1816 CONSTRUCTOR_APPEND_ELT (v, index, value);
1817 }
1818 va_end (p);
1819 return build_constructor (type, v);
1820 }
1821
1822 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1823
1824 tree
1825 build_fixed (tree type, FIXED_VALUE_TYPE f)
1826 {
1827 tree v;
1828 FIXED_VALUE_TYPE *fp;
1829
1830 v = make_node (FIXED_CST);
1831 fp = ggc_alloc<fixed_value> ();
1832 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1833
1834 TREE_TYPE (v) = type;
1835 TREE_FIXED_CST_PTR (v) = fp;
1836 return v;
1837 }
1838
1839 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1840
1841 tree
1842 build_real (tree type, REAL_VALUE_TYPE d)
1843 {
1844 tree v;
1845 REAL_VALUE_TYPE *dp;
1846 int overflow = 0;
1847
1848 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1849 Consider doing it via real_convert now. */
1850
1851 v = make_node (REAL_CST);
1852 dp = ggc_alloc<real_value> ();
1853 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1854
1855 TREE_TYPE (v) = type;
1856 TREE_REAL_CST_PTR (v) = dp;
1857 TREE_OVERFLOW (v) = overflow;
1858 return v;
1859 }
1860
1861 /* Return a new REAL_CST node whose type is TYPE
1862 and whose value is the integer value of the INTEGER_CST node I. */
1863
1864 REAL_VALUE_TYPE
1865 real_value_from_int_cst (const_tree type, const_tree i)
1866 {
1867 REAL_VALUE_TYPE d;
1868
1869 /* Clear all bits of the real value type so that we can later do
1870 bitwise comparisons to see if two values are the same. */
1871 memset (&d, 0, sizeof d);
1872
1873 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1874 TYPE_SIGN (TREE_TYPE (i)));
1875 return d;
1876 }
1877
1878 /* Given a tree representing an integer constant I, return a tree
1879 representing the same value as a floating-point constant of type TYPE. */
1880
1881 tree
1882 build_real_from_int_cst (tree type, const_tree i)
1883 {
1884 tree v;
1885 int overflow = TREE_OVERFLOW (i);
1886
1887 v = build_real (type, real_value_from_int_cst (type, i));
1888
1889 TREE_OVERFLOW (v) |= overflow;
1890 return v;
1891 }
1892
1893 /* Return a newly constructed STRING_CST node whose value is
1894 the LEN characters at STR.
1895 Note that for a C string literal, LEN should include the trailing NUL.
1896 The TREE_TYPE is not initialized. */
1897
1898 tree
1899 build_string (int len, const char *str)
1900 {
1901 tree s;
1902 size_t length;
1903
1904 /* Do not waste bytes provided by padding of struct tree_string. */
1905 length = len + offsetof (struct tree_string, str) + 1;
1906
1907 record_node_allocation_statistics (STRING_CST, length);
1908
1909 s = (tree) ggc_internal_alloc (length);
1910
1911 memset (s, 0, sizeof (struct tree_typed));
1912 TREE_SET_CODE (s, STRING_CST);
1913 TREE_CONSTANT (s) = 1;
1914 TREE_STRING_LENGTH (s) = len;
1915 memcpy (s->string.str, str, len);
1916 s->string.str[len] = '\0';
1917
1918 return s;
1919 }
1920
1921 /* Return a newly constructed COMPLEX_CST node whose value is
1922 specified by the real and imaginary parts REAL and IMAG.
1923 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1924 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1925
1926 tree
1927 build_complex (tree type, tree real, tree imag)
1928 {
1929 tree t = make_node (COMPLEX_CST);
1930
1931 TREE_REALPART (t) = real;
1932 TREE_IMAGPART (t) = imag;
1933 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1934 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1935 return t;
1936 }
1937
1938 /* Return a constant of arithmetic type TYPE which is the
1939 multiplicative identity of the set TYPE. */
1940
1941 tree
1942 build_one_cst (tree type)
1943 {
1944 switch (TREE_CODE (type))
1945 {
1946 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1947 case POINTER_TYPE: case REFERENCE_TYPE:
1948 case OFFSET_TYPE:
1949 return build_int_cst (type, 1);
1950
1951 case REAL_TYPE:
1952 return build_real (type, dconst1);
1953
1954 case FIXED_POINT_TYPE:
1955 /* We can only generate 1 for accum types. */
1956 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1957 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1958
1959 case VECTOR_TYPE:
1960 {
1961 tree scalar = build_one_cst (TREE_TYPE (type));
1962
1963 return build_vector_from_val (type, scalar);
1964 }
1965
1966 case COMPLEX_TYPE:
1967 return build_complex (type,
1968 build_one_cst (TREE_TYPE (type)),
1969 build_zero_cst (TREE_TYPE (type)));
1970
1971 default:
1972 gcc_unreachable ();
1973 }
1974 }
1975
1976 /* Return an integer of type TYPE containing all 1's in as much precision as
1977 it contains, or a complex or vector whose subparts are such integers. */
1978
1979 tree
1980 build_all_ones_cst (tree type)
1981 {
1982 if (TREE_CODE (type) == COMPLEX_TYPE)
1983 {
1984 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1985 return build_complex (type, scalar, scalar);
1986 }
1987 else
1988 return build_minus_one_cst (type);
1989 }
1990
1991 /* Return a constant of arithmetic type TYPE which is the
1992 opposite of the multiplicative identity of the set TYPE. */
1993
1994 tree
1995 build_minus_one_cst (tree type)
1996 {
1997 switch (TREE_CODE (type))
1998 {
1999 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2000 case POINTER_TYPE: case REFERENCE_TYPE:
2001 case OFFSET_TYPE:
2002 return build_int_cst (type, -1);
2003
2004 case REAL_TYPE:
2005 return build_real (type, dconstm1);
2006
2007 case FIXED_POINT_TYPE:
2008 /* We can only generate 1 for accum types. */
2009 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2010 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2011 TYPE_MODE (type)));
2012
2013 case VECTOR_TYPE:
2014 {
2015 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2016
2017 return build_vector_from_val (type, scalar);
2018 }
2019
2020 case COMPLEX_TYPE:
2021 return build_complex (type,
2022 build_minus_one_cst (TREE_TYPE (type)),
2023 build_zero_cst (TREE_TYPE (type)));
2024
2025 default:
2026 gcc_unreachable ();
2027 }
2028 }
2029
2030 /* Build 0 constant of type TYPE. This is used by constructor folding
2031 and thus the constant should be represented in memory by
2032 zero(es). */
2033
2034 tree
2035 build_zero_cst (tree type)
2036 {
2037 switch (TREE_CODE (type))
2038 {
2039 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2040 case POINTER_TYPE: case REFERENCE_TYPE:
2041 case OFFSET_TYPE: case NULLPTR_TYPE:
2042 return build_int_cst (type, 0);
2043
2044 case REAL_TYPE:
2045 return build_real (type, dconst0);
2046
2047 case FIXED_POINT_TYPE:
2048 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2049
2050 case VECTOR_TYPE:
2051 {
2052 tree scalar = build_zero_cst (TREE_TYPE (type));
2053
2054 return build_vector_from_val (type, scalar);
2055 }
2056
2057 case COMPLEX_TYPE:
2058 {
2059 tree zero = build_zero_cst (TREE_TYPE (type));
2060
2061 return build_complex (type, zero, zero);
2062 }
2063
2064 default:
2065 if (!AGGREGATE_TYPE_P (type))
2066 return fold_convert (type, integer_zero_node);
2067 return build_constructor (type, NULL);
2068 }
2069 }
2070
2071
2072 /* Build a BINFO with LEN language slots. */
2073
2074 tree
2075 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2076 {
2077 tree t;
2078 size_t length = (offsetof (struct tree_binfo, base_binfos)
2079 + vec<tree, va_gc>::embedded_size (base_binfos));
2080
2081 record_node_allocation_statistics (TREE_BINFO, length);
2082
2083 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2084
2085 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2086
2087 TREE_SET_CODE (t, TREE_BINFO);
2088
2089 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2090
2091 return t;
2092 }
2093
2094 /* Create a CASE_LABEL_EXPR tree node and return it. */
2095
2096 tree
2097 build_case_label (tree low_value, tree high_value, tree label_decl)
2098 {
2099 tree t = make_node (CASE_LABEL_EXPR);
2100
2101 TREE_TYPE (t) = void_type_node;
2102 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2103
2104 CASE_LOW (t) = low_value;
2105 CASE_HIGH (t) = high_value;
2106 CASE_LABEL (t) = label_decl;
2107 CASE_CHAIN (t) = NULL_TREE;
2108
2109 return t;
2110 }
2111
2112 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2113 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2114 The latter determines the length of the HOST_WIDE_INT vector. */
2115
2116 tree
2117 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2118 {
2119 tree t;
2120 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2121 + sizeof (struct tree_int_cst));
2122
2123 gcc_assert (len);
2124 record_node_allocation_statistics (INTEGER_CST, length);
2125
2126 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2127
2128 TREE_SET_CODE (t, INTEGER_CST);
2129 TREE_INT_CST_NUNITS (t) = len;
2130 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2131 /* to_offset can only be applied to trees that are offset_int-sized
2132 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2133 must be exactly the precision of offset_int and so LEN is correct. */
2134 if (ext_len <= OFFSET_INT_ELTS)
2135 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2136 else
2137 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2138
2139 TREE_CONSTANT (t) = 1;
2140
2141 return t;
2142 }
2143
2144 /* Build a newly constructed TREE_VEC node of length LEN. */
2145
2146 tree
2147 make_tree_vec_stat (int len MEM_STAT_DECL)
2148 {
2149 tree t;
2150 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2151
2152 record_node_allocation_statistics (TREE_VEC, length);
2153
2154 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2155
2156 TREE_SET_CODE (t, TREE_VEC);
2157 TREE_VEC_LENGTH (t) = len;
2158
2159 return t;
2160 }
2161
2162 /* Grow a TREE_VEC node to new length LEN. */
2163
2164 tree
2165 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2166 {
2167 gcc_assert (TREE_CODE (v) == TREE_VEC);
2168
2169 int oldlen = TREE_VEC_LENGTH (v);
2170 gcc_assert (len > oldlen);
2171
2172 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2173 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2174
2175 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2176
2177 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2178
2179 TREE_VEC_LENGTH (v) = len;
2180
2181 return v;
2182 }
2183 \f
2184 /* Return 1 if EXPR is the integer constant zero or a complex constant
2185 of zero. */
2186
2187 int
2188 integer_zerop (const_tree expr)
2189 {
2190 STRIP_NOPS (expr);
2191
2192 switch (TREE_CODE (expr))
2193 {
2194 case INTEGER_CST:
2195 return wi::eq_p (expr, 0);
2196 case COMPLEX_CST:
2197 return (integer_zerop (TREE_REALPART (expr))
2198 && integer_zerop (TREE_IMAGPART (expr)));
2199 case VECTOR_CST:
2200 {
2201 unsigned i;
2202 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2203 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2204 return false;
2205 return true;
2206 }
2207 default:
2208 return false;
2209 }
2210 }
2211
2212 /* Return 1 if EXPR is the integer constant one or the corresponding
2213 complex constant. */
2214
2215 int
2216 integer_onep (const_tree expr)
2217 {
2218 STRIP_NOPS (expr);
2219
2220 switch (TREE_CODE (expr))
2221 {
2222 case INTEGER_CST:
2223 return wi::eq_p (wi::to_widest (expr), 1);
2224 case COMPLEX_CST:
2225 return (integer_onep (TREE_REALPART (expr))
2226 && integer_zerop (TREE_IMAGPART (expr)));
2227 case VECTOR_CST:
2228 {
2229 unsigned i;
2230 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2231 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2232 return false;
2233 return true;
2234 }
2235 default:
2236 return false;
2237 }
2238 }
2239
2240 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2241 return 1 if every piece is the integer constant one. */
2242
2243 int
2244 integer_each_onep (const_tree expr)
2245 {
2246 STRIP_NOPS (expr);
2247
2248 if (TREE_CODE (expr) == COMPLEX_CST)
2249 return (integer_onep (TREE_REALPART (expr))
2250 && integer_onep (TREE_IMAGPART (expr)));
2251 else
2252 return integer_onep (expr);
2253 }
2254
2255 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2256 it contains, or a complex or vector whose subparts are such integers. */
2257
2258 int
2259 integer_all_onesp (const_tree expr)
2260 {
2261 STRIP_NOPS (expr);
2262
2263 if (TREE_CODE (expr) == COMPLEX_CST
2264 && integer_all_onesp (TREE_REALPART (expr))
2265 && integer_all_onesp (TREE_IMAGPART (expr)))
2266 return 1;
2267
2268 else if (TREE_CODE (expr) == VECTOR_CST)
2269 {
2270 unsigned i;
2271 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2272 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2273 return 0;
2274 return 1;
2275 }
2276
2277 else if (TREE_CODE (expr) != INTEGER_CST)
2278 return 0;
2279
2280 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2281 }
2282
2283 /* Return 1 if EXPR is the integer constant minus one. */
2284
2285 int
2286 integer_minus_onep (const_tree expr)
2287 {
2288 STRIP_NOPS (expr);
2289
2290 if (TREE_CODE (expr) == COMPLEX_CST)
2291 return (integer_all_onesp (TREE_REALPART (expr))
2292 && integer_zerop (TREE_IMAGPART (expr)));
2293 else
2294 return integer_all_onesp (expr);
2295 }
2296
2297 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2298 one bit on). */
2299
2300 int
2301 integer_pow2p (const_tree expr)
2302 {
2303 STRIP_NOPS (expr);
2304
2305 if (TREE_CODE (expr) == COMPLEX_CST
2306 && integer_pow2p (TREE_REALPART (expr))
2307 && integer_zerop (TREE_IMAGPART (expr)))
2308 return 1;
2309
2310 if (TREE_CODE (expr) != INTEGER_CST)
2311 return 0;
2312
2313 return wi::popcount (expr) == 1;
2314 }
2315
2316 /* Return 1 if EXPR is an integer constant other than zero or a
2317 complex constant other than zero. */
2318
2319 int
2320 integer_nonzerop (const_tree expr)
2321 {
2322 STRIP_NOPS (expr);
2323
2324 return ((TREE_CODE (expr) == INTEGER_CST
2325 && !wi::eq_p (expr, 0))
2326 || (TREE_CODE (expr) == COMPLEX_CST
2327 && (integer_nonzerop (TREE_REALPART (expr))
2328 || integer_nonzerop (TREE_IMAGPART (expr)))));
2329 }
2330
2331 /* Return 1 if EXPR is the integer constant one. For vector,
2332 return 1 if every piece is the integer constant minus one
2333 (representing the value TRUE). */
2334
2335 int
2336 integer_truep (const_tree expr)
2337 {
2338 STRIP_NOPS (expr);
2339
2340 if (TREE_CODE (expr) == VECTOR_CST)
2341 return integer_all_onesp (expr);
2342 return integer_onep (expr);
2343 }
2344
2345 /* Return 1 if EXPR is the fixed-point constant zero. */
2346
2347 int
2348 fixed_zerop (const_tree expr)
2349 {
2350 return (TREE_CODE (expr) == FIXED_CST
2351 && TREE_FIXED_CST (expr).data.is_zero ());
2352 }
2353
2354 /* Return the power of two represented by a tree node known to be a
2355 power of two. */
2356
2357 int
2358 tree_log2 (const_tree expr)
2359 {
2360 STRIP_NOPS (expr);
2361
2362 if (TREE_CODE (expr) == COMPLEX_CST)
2363 return tree_log2 (TREE_REALPART (expr));
2364
2365 return wi::exact_log2 (expr);
2366 }
2367
2368 /* Similar, but return the largest integer Y such that 2 ** Y is less
2369 than or equal to EXPR. */
2370
2371 int
2372 tree_floor_log2 (const_tree expr)
2373 {
2374 STRIP_NOPS (expr);
2375
2376 if (TREE_CODE (expr) == COMPLEX_CST)
2377 return tree_log2 (TREE_REALPART (expr));
2378
2379 return wi::floor_log2 (expr);
2380 }
2381
2382 /* Return number of known trailing zero bits in EXPR, or, if the value of
2383 EXPR is known to be zero, the precision of it's type. */
2384
2385 unsigned int
2386 tree_ctz (const_tree expr)
2387 {
2388 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2389 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2390 return 0;
2391
2392 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2393 switch (TREE_CODE (expr))
2394 {
2395 case INTEGER_CST:
2396 ret1 = wi::ctz (expr);
2397 return MIN (ret1, prec);
2398 case SSA_NAME:
2399 ret1 = wi::ctz (get_nonzero_bits (expr));
2400 return MIN (ret1, prec);
2401 case PLUS_EXPR:
2402 case MINUS_EXPR:
2403 case BIT_IOR_EXPR:
2404 case BIT_XOR_EXPR:
2405 case MIN_EXPR:
2406 case MAX_EXPR:
2407 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2408 if (ret1 == 0)
2409 return ret1;
2410 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2411 return MIN (ret1, ret2);
2412 case POINTER_PLUS_EXPR:
2413 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2414 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2415 /* Second operand is sizetype, which could be in theory
2416 wider than pointer's precision. Make sure we never
2417 return more than prec. */
2418 ret2 = MIN (ret2, prec);
2419 return MIN (ret1, ret2);
2420 case BIT_AND_EXPR:
2421 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2422 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2423 return MAX (ret1, ret2);
2424 case MULT_EXPR:
2425 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2426 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2427 return MIN (ret1 + ret2, prec);
2428 case LSHIFT_EXPR:
2429 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2430 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2431 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2432 {
2433 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2434 return MIN (ret1 + ret2, prec);
2435 }
2436 return ret1;
2437 case RSHIFT_EXPR:
2438 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2439 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2440 {
2441 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2442 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2443 if (ret1 > ret2)
2444 return ret1 - ret2;
2445 }
2446 return 0;
2447 case TRUNC_DIV_EXPR:
2448 case CEIL_DIV_EXPR:
2449 case FLOOR_DIV_EXPR:
2450 case ROUND_DIV_EXPR:
2451 case EXACT_DIV_EXPR:
2452 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2453 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2454 {
2455 int l = tree_log2 (TREE_OPERAND (expr, 1));
2456 if (l >= 0)
2457 {
2458 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2459 ret2 = l;
2460 if (ret1 > ret2)
2461 return ret1 - ret2;
2462 }
2463 }
2464 return 0;
2465 CASE_CONVERT:
2466 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2467 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2468 ret1 = prec;
2469 return MIN (ret1, prec);
2470 case SAVE_EXPR:
2471 return tree_ctz (TREE_OPERAND (expr, 0));
2472 case COND_EXPR:
2473 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2474 if (ret1 == 0)
2475 return 0;
2476 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2477 return MIN (ret1, ret2);
2478 case COMPOUND_EXPR:
2479 return tree_ctz (TREE_OPERAND (expr, 1));
2480 case ADDR_EXPR:
2481 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2482 if (ret1 > BITS_PER_UNIT)
2483 {
2484 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2485 return MIN (ret1, prec);
2486 }
2487 return 0;
2488 default:
2489 return 0;
2490 }
2491 }
2492
2493 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2494 decimal float constants, so don't return 1 for them. */
2495
2496 int
2497 real_zerop (const_tree expr)
2498 {
2499 STRIP_NOPS (expr);
2500
2501 switch (TREE_CODE (expr))
2502 {
2503 case REAL_CST:
2504 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2505 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2506 case COMPLEX_CST:
2507 return real_zerop (TREE_REALPART (expr))
2508 && real_zerop (TREE_IMAGPART (expr));
2509 case VECTOR_CST:
2510 {
2511 unsigned i;
2512 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2513 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2514 return false;
2515 return true;
2516 }
2517 default:
2518 return false;
2519 }
2520 }
2521
2522 /* Return 1 if EXPR is the real constant one in real or complex form.
2523 Trailing zeroes matter for decimal float constants, so don't return
2524 1 for them. */
2525
2526 int
2527 real_onep (const_tree expr)
2528 {
2529 STRIP_NOPS (expr);
2530
2531 switch (TREE_CODE (expr))
2532 {
2533 case REAL_CST:
2534 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2535 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2536 case COMPLEX_CST:
2537 return real_onep (TREE_REALPART (expr))
2538 && real_zerop (TREE_IMAGPART (expr));
2539 case VECTOR_CST:
2540 {
2541 unsigned i;
2542 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2543 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2544 return false;
2545 return true;
2546 }
2547 default:
2548 return false;
2549 }
2550 }
2551
2552 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2553 matter for decimal float constants, so don't return 1 for them. */
2554
2555 int
2556 real_minus_onep (const_tree expr)
2557 {
2558 STRIP_NOPS (expr);
2559
2560 switch (TREE_CODE (expr))
2561 {
2562 case REAL_CST:
2563 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2564 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2565 case COMPLEX_CST:
2566 return real_minus_onep (TREE_REALPART (expr))
2567 && real_zerop (TREE_IMAGPART (expr));
2568 case VECTOR_CST:
2569 {
2570 unsigned i;
2571 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2572 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2573 return false;
2574 return true;
2575 }
2576 default:
2577 return false;
2578 }
2579 }
2580
2581 /* Nonzero if EXP is a constant or a cast of a constant. */
2582
2583 int
2584 really_constant_p (const_tree exp)
2585 {
2586 /* This is not quite the same as STRIP_NOPS. It does more. */
2587 while (CONVERT_EXPR_P (exp)
2588 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2589 exp = TREE_OPERAND (exp, 0);
2590 return TREE_CONSTANT (exp);
2591 }
2592 \f
2593 /* Return first list element whose TREE_VALUE is ELEM.
2594 Return 0 if ELEM is not in LIST. */
2595
2596 tree
2597 value_member (tree elem, tree list)
2598 {
2599 while (list)
2600 {
2601 if (elem == TREE_VALUE (list))
2602 return list;
2603 list = TREE_CHAIN (list);
2604 }
2605 return NULL_TREE;
2606 }
2607
2608 /* Return first list element whose TREE_PURPOSE is ELEM.
2609 Return 0 if ELEM is not in LIST. */
2610
2611 tree
2612 purpose_member (const_tree elem, tree list)
2613 {
2614 while (list)
2615 {
2616 if (elem == TREE_PURPOSE (list))
2617 return list;
2618 list = TREE_CHAIN (list);
2619 }
2620 return NULL_TREE;
2621 }
2622
2623 /* Return true if ELEM is in V. */
2624
2625 bool
2626 vec_member (const_tree elem, vec<tree, va_gc> *v)
2627 {
2628 unsigned ix;
2629 tree t;
2630 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2631 if (elem == t)
2632 return true;
2633 return false;
2634 }
2635
2636 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2637 NULL_TREE. */
2638
2639 tree
2640 chain_index (int idx, tree chain)
2641 {
2642 for (; chain && idx > 0; --idx)
2643 chain = TREE_CHAIN (chain);
2644 return chain;
2645 }
2646
2647 /* Return nonzero if ELEM is part of the chain CHAIN. */
2648
2649 int
2650 chain_member (const_tree elem, const_tree chain)
2651 {
2652 while (chain)
2653 {
2654 if (elem == chain)
2655 return 1;
2656 chain = DECL_CHAIN (chain);
2657 }
2658
2659 return 0;
2660 }
2661
2662 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2663 We expect a null pointer to mark the end of the chain.
2664 This is the Lisp primitive `length'. */
2665
2666 int
2667 list_length (const_tree t)
2668 {
2669 const_tree p = t;
2670 #ifdef ENABLE_TREE_CHECKING
2671 const_tree q = t;
2672 #endif
2673 int len = 0;
2674
2675 while (p)
2676 {
2677 p = TREE_CHAIN (p);
2678 #ifdef ENABLE_TREE_CHECKING
2679 if (len % 2)
2680 q = TREE_CHAIN (q);
2681 gcc_assert (p != q);
2682 #endif
2683 len++;
2684 }
2685
2686 return len;
2687 }
2688
2689 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2690 UNION_TYPE TYPE, or NULL_TREE if none. */
2691
2692 tree
2693 first_field (const_tree type)
2694 {
2695 tree t = TYPE_FIELDS (type);
2696 while (t && TREE_CODE (t) != FIELD_DECL)
2697 t = TREE_CHAIN (t);
2698 return t;
2699 }
2700
2701 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2702 by modifying the last node in chain 1 to point to chain 2.
2703 This is the Lisp primitive `nconc'. */
2704
2705 tree
2706 chainon (tree op1, tree op2)
2707 {
2708 tree t1;
2709
2710 if (!op1)
2711 return op2;
2712 if (!op2)
2713 return op1;
2714
2715 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2716 continue;
2717 TREE_CHAIN (t1) = op2;
2718
2719 #ifdef ENABLE_TREE_CHECKING
2720 {
2721 tree t2;
2722 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2723 gcc_assert (t2 != t1);
2724 }
2725 #endif
2726
2727 return op1;
2728 }
2729
2730 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2731
2732 tree
2733 tree_last (tree chain)
2734 {
2735 tree next;
2736 if (chain)
2737 while ((next = TREE_CHAIN (chain)))
2738 chain = next;
2739 return chain;
2740 }
2741
2742 /* Reverse the order of elements in the chain T,
2743 and return the new head of the chain (old last element). */
2744
2745 tree
2746 nreverse (tree t)
2747 {
2748 tree prev = 0, decl, next;
2749 for (decl = t; decl; decl = next)
2750 {
2751 /* We shouldn't be using this function to reverse BLOCK chains; we
2752 have blocks_nreverse for that. */
2753 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2754 next = TREE_CHAIN (decl);
2755 TREE_CHAIN (decl) = prev;
2756 prev = decl;
2757 }
2758 return prev;
2759 }
2760 \f
2761 /* Return a newly created TREE_LIST node whose
2762 purpose and value fields are PARM and VALUE. */
2763
2764 tree
2765 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2766 {
2767 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2768 TREE_PURPOSE (t) = parm;
2769 TREE_VALUE (t) = value;
2770 return t;
2771 }
2772
2773 /* Build a chain of TREE_LIST nodes from a vector. */
2774
2775 tree
2776 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2777 {
2778 tree ret = NULL_TREE;
2779 tree *pp = &ret;
2780 unsigned int i;
2781 tree t;
2782 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2783 {
2784 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2785 pp = &TREE_CHAIN (*pp);
2786 }
2787 return ret;
2788 }
2789
2790 /* Return a newly created TREE_LIST node whose
2791 purpose and value fields are PURPOSE and VALUE
2792 and whose TREE_CHAIN is CHAIN. */
2793
2794 tree
2795 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2796 {
2797 tree node;
2798
2799 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2800 memset (node, 0, sizeof (struct tree_common));
2801
2802 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2803
2804 TREE_SET_CODE (node, TREE_LIST);
2805 TREE_CHAIN (node) = chain;
2806 TREE_PURPOSE (node) = purpose;
2807 TREE_VALUE (node) = value;
2808 return node;
2809 }
2810
2811 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2812 trees. */
2813
2814 vec<tree, va_gc> *
2815 ctor_to_vec (tree ctor)
2816 {
2817 vec<tree, va_gc> *vec;
2818 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2819 unsigned int ix;
2820 tree val;
2821
2822 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2823 vec->quick_push (val);
2824
2825 return vec;
2826 }
2827 \f
2828 /* Return the size nominally occupied by an object of type TYPE
2829 when it resides in memory. The value is measured in units of bytes,
2830 and its data type is that normally used for type sizes
2831 (which is the first type created by make_signed_type or
2832 make_unsigned_type). */
2833
2834 tree
2835 size_in_bytes (const_tree type)
2836 {
2837 tree t;
2838
2839 if (type == error_mark_node)
2840 return integer_zero_node;
2841
2842 type = TYPE_MAIN_VARIANT (type);
2843 t = TYPE_SIZE_UNIT (type);
2844
2845 if (t == 0)
2846 {
2847 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2848 return size_zero_node;
2849 }
2850
2851 return t;
2852 }
2853
2854 /* Return the size of TYPE (in bytes) as a wide integer
2855 or return -1 if the size can vary or is larger than an integer. */
2856
2857 HOST_WIDE_INT
2858 int_size_in_bytes (const_tree type)
2859 {
2860 tree t;
2861
2862 if (type == error_mark_node)
2863 return 0;
2864
2865 type = TYPE_MAIN_VARIANT (type);
2866 t = TYPE_SIZE_UNIT (type);
2867
2868 if (t && tree_fits_uhwi_p (t))
2869 return TREE_INT_CST_LOW (t);
2870 else
2871 return -1;
2872 }
2873
2874 /* Return the maximum size of TYPE (in bytes) as a wide integer
2875 or return -1 if the size can vary or is larger than an integer. */
2876
2877 HOST_WIDE_INT
2878 max_int_size_in_bytes (const_tree type)
2879 {
2880 HOST_WIDE_INT size = -1;
2881 tree size_tree;
2882
2883 /* If this is an array type, check for a possible MAX_SIZE attached. */
2884
2885 if (TREE_CODE (type) == ARRAY_TYPE)
2886 {
2887 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2888
2889 if (size_tree && tree_fits_uhwi_p (size_tree))
2890 size = tree_to_uhwi (size_tree);
2891 }
2892
2893 /* If we still haven't been able to get a size, see if the language
2894 can compute a maximum size. */
2895
2896 if (size == -1)
2897 {
2898 size_tree = lang_hooks.types.max_size (type);
2899
2900 if (size_tree && tree_fits_uhwi_p (size_tree))
2901 size = tree_to_uhwi (size_tree);
2902 }
2903
2904 return size;
2905 }
2906 \f
2907 /* Return the bit position of FIELD, in bits from the start of the record.
2908 This is a tree of type bitsizetype. */
2909
2910 tree
2911 bit_position (const_tree field)
2912 {
2913 return bit_from_pos (DECL_FIELD_OFFSET (field),
2914 DECL_FIELD_BIT_OFFSET (field));
2915 }
2916 \f
2917 /* Return the byte position of FIELD, in bytes from the start of the record.
2918 This is a tree of type sizetype. */
2919
2920 tree
2921 byte_position (const_tree field)
2922 {
2923 return byte_from_pos (DECL_FIELD_OFFSET (field),
2924 DECL_FIELD_BIT_OFFSET (field));
2925 }
2926
2927 /* Likewise, but return as an integer. It must be representable in
2928 that way (since it could be a signed value, we don't have the
2929 option of returning -1 like int_size_in_byte can. */
2930
2931 HOST_WIDE_INT
2932 int_byte_position (const_tree field)
2933 {
2934 return tree_to_shwi (byte_position (field));
2935 }
2936 \f
2937 /* Return the strictest alignment, in bits, that T is known to have. */
2938
2939 unsigned int
2940 expr_align (const_tree t)
2941 {
2942 unsigned int align0, align1;
2943
2944 switch (TREE_CODE (t))
2945 {
2946 CASE_CONVERT: case NON_LVALUE_EXPR:
2947 /* If we have conversions, we know that the alignment of the
2948 object must meet each of the alignments of the types. */
2949 align0 = expr_align (TREE_OPERAND (t, 0));
2950 align1 = TYPE_ALIGN (TREE_TYPE (t));
2951 return MAX (align0, align1);
2952
2953 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2954 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2955 case CLEANUP_POINT_EXPR:
2956 /* These don't change the alignment of an object. */
2957 return expr_align (TREE_OPERAND (t, 0));
2958
2959 case COND_EXPR:
2960 /* The best we can do is say that the alignment is the least aligned
2961 of the two arms. */
2962 align0 = expr_align (TREE_OPERAND (t, 1));
2963 align1 = expr_align (TREE_OPERAND (t, 2));
2964 return MIN (align0, align1);
2965
2966 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2967 meaningfully, it's always 1. */
2968 case LABEL_DECL: case CONST_DECL:
2969 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2970 case FUNCTION_DECL:
2971 gcc_assert (DECL_ALIGN (t) != 0);
2972 return DECL_ALIGN (t);
2973
2974 default:
2975 break;
2976 }
2977
2978 /* Otherwise take the alignment from that of the type. */
2979 return TYPE_ALIGN (TREE_TYPE (t));
2980 }
2981 \f
2982 /* Return, as a tree node, the number of elements for TYPE (which is an
2983 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2984
2985 tree
2986 array_type_nelts (const_tree type)
2987 {
2988 tree index_type, min, max;
2989
2990 /* If they did it with unspecified bounds, then we should have already
2991 given an error about it before we got here. */
2992 if (! TYPE_DOMAIN (type))
2993 return error_mark_node;
2994
2995 index_type = TYPE_DOMAIN (type);
2996 min = TYPE_MIN_VALUE (index_type);
2997 max = TYPE_MAX_VALUE (index_type);
2998
2999 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3000 if (!max)
3001 return error_mark_node;
3002
3003 return (integer_zerop (min)
3004 ? max
3005 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3006 }
3007 \f
3008 /* If arg is static -- a reference to an object in static storage -- then
3009 return the object. This is not the same as the C meaning of `static'.
3010 If arg isn't static, return NULL. */
3011
3012 tree
3013 staticp (tree arg)
3014 {
3015 switch (TREE_CODE (arg))
3016 {
3017 case FUNCTION_DECL:
3018 /* Nested functions are static, even though taking their address will
3019 involve a trampoline as we unnest the nested function and create
3020 the trampoline on the tree level. */
3021 return arg;
3022
3023 case VAR_DECL:
3024 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3025 && ! DECL_THREAD_LOCAL_P (arg)
3026 && ! DECL_DLLIMPORT_P (arg)
3027 ? arg : NULL);
3028
3029 case CONST_DECL:
3030 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3031 ? arg : NULL);
3032
3033 case CONSTRUCTOR:
3034 return TREE_STATIC (arg) ? arg : NULL;
3035
3036 case LABEL_DECL:
3037 case STRING_CST:
3038 return arg;
3039
3040 case COMPONENT_REF:
3041 /* If the thing being referenced is not a field, then it is
3042 something language specific. */
3043 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3044
3045 /* If we are referencing a bitfield, we can't evaluate an
3046 ADDR_EXPR at compile time and so it isn't a constant. */
3047 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3048 return NULL;
3049
3050 return staticp (TREE_OPERAND (arg, 0));
3051
3052 case BIT_FIELD_REF:
3053 return NULL;
3054
3055 case INDIRECT_REF:
3056 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3057
3058 case ARRAY_REF:
3059 case ARRAY_RANGE_REF:
3060 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3061 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3062 return staticp (TREE_OPERAND (arg, 0));
3063 else
3064 return NULL;
3065
3066 case COMPOUND_LITERAL_EXPR:
3067 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3068
3069 default:
3070 return NULL;
3071 }
3072 }
3073
3074 \f
3075
3076
3077 /* Return whether OP is a DECL whose address is function-invariant. */
3078
3079 bool
3080 decl_address_invariant_p (const_tree op)
3081 {
3082 /* The conditions below are slightly less strict than the one in
3083 staticp. */
3084
3085 switch (TREE_CODE (op))
3086 {
3087 case PARM_DECL:
3088 case RESULT_DECL:
3089 case LABEL_DECL:
3090 case FUNCTION_DECL:
3091 return true;
3092
3093 case VAR_DECL:
3094 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3095 || DECL_THREAD_LOCAL_P (op)
3096 || DECL_CONTEXT (op) == current_function_decl
3097 || decl_function_context (op) == current_function_decl)
3098 return true;
3099 break;
3100
3101 case CONST_DECL:
3102 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3103 || decl_function_context (op) == current_function_decl)
3104 return true;
3105 break;
3106
3107 default:
3108 break;
3109 }
3110
3111 return false;
3112 }
3113
3114 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3115
3116 bool
3117 decl_address_ip_invariant_p (const_tree op)
3118 {
3119 /* The conditions below are slightly less strict than the one in
3120 staticp. */
3121
3122 switch (TREE_CODE (op))
3123 {
3124 case LABEL_DECL:
3125 case FUNCTION_DECL:
3126 case STRING_CST:
3127 return true;
3128
3129 case VAR_DECL:
3130 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3131 && !DECL_DLLIMPORT_P (op))
3132 || DECL_THREAD_LOCAL_P (op))
3133 return true;
3134 break;
3135
3136 case CONST_DECL:
3137 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3138 return true;
3139 break;
3140
3141 default:
3142 break;
3143 }
3144
3145 return false;
3146 }
3147
3148
3149 /* Return true if T is function-invariant (internal function, does
3150 not handle arithmetic; that's handled in skip_simple_arithmetic and
3151 tree_invariant_p). */
3152
3153 static bool tree_invariant_p (tree t);
3154
3155 static bool
3156 tree_invariant_p_1 (tree t)
3157 {
3158 tree op;
3159
3160 if (TREE_CONSTANT (t)
3161 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3162 return true;
3163
3164 switch (TREE_CODE (t))
3165 {
3166 case SAVE_EXPR:
3167 return true;
3168
3169 case ADDR_EXPR:
3170 op = TREE_OPERAND (t, 0);
3171 while (handled_component_p (op))
3172 {
3173 switch (TREE_CODE (op))
3174 {
3175 case ARRAY_REF:
3176 case ARRAY_RANGE_REF:
3177 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3178 || TREE_OPERAND (op, 2) != NULL_TREE
3179 || TREE_OPERAND (op, 3) != NULL_TREE)
3180 return false;
3181 break;
3182
3183 case COMPONENT_REF:
3184 if (TREE_OPERAND (op, 2) != NULL_TREE)
3185 return false;
3186 break;
3187
3188 default:;
3189 }
3190 op = TREE_OPERAND (op, 0);
3191 }
3192
3193 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3194
3195 default:
3196 break;
3197 }
3198
3199 return false;
3200 }
3201
3202 /* Return true if T is function-invariant. */
3203
3204 static bool
3205 tree_invariant_p (tree t)
3206 {
3207 tree inner = skip_simple_arithmetic (t);
3208 return tree_invariant_p_1 (inner);
3209 }
3210
3211 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3212 Do this to any expression which may be used in more than one place,
3213 but must be evaluated only once.
3214
3215 Normally, expand_expr would reevaluate the expression each time.
3216 Calling save_expr produces something that is evaluated and recorded
3217 the first time expand_expr is called on it. Subsequent calls to
3218 expand_expr just reuse the recorded value.
3219
3220 The call to expand_expr that generates code that actually computes
3221 the value is the first call *at compile time*. Subsequent calls
3222 *at compile time* generate code to use the saved value.
3223 This produces correct result provided that *at run time* control
3224 always flows through the insns made by the first expand_expr
3225 before reaching the other places where the save_expr was evaluated.
3226 You, the caller of save_expr, must make sure this is so.
3227
3228 Constants, and certain read-only nodes, are returned with no
3229 SAVE_EXPR because that is safe. Expressions containing placeholders
3230 are not touched; see tree.def for an explanation of what these
3231 are used for. */
3232
3233 tree
3234 save_expr (tree expr)
3235 {
3236 tree t = fold (expr);
3237 tree inner;
3238
3239 /* If the tree evaluates to a constant, then we don't want to hide that
3240 fact (i.e. this allows further folding, and direct checks for constants).
3241 However, a read-only object that has side effects cannot be bypassed.
3242 Since it is no problem to reevaluate literals, we just return the
3243 literal node. */
3244 inner = skip_simple_arithmetic (t);
3245 if (TREE_CODE (inner) == ERROR_MARK)
3246 return inner;
3247
3248 if (tree_invariant_p_1 (inner))
3249 return t;
3250
3251 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3252 it means that the size or offset of some field of an object depends on
3253 the value within another field.
3254
3255 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3256 and some variable since it would then need to be both evaluated once and
3257 evaluated more than once. Front-ends must assure this case cannot
3258 happen by surrounding any such subexpressions in their own SAVE_EXPR
3259 and forcing evaluation at the proper time. */
3260 if (contains_placeholder_p (inner))
3261 return t;
3262
3263 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3264 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3265
3266 /* This expression might be placed ahead of a jump to ensure that the
3267 value was computed on both sides of the jump. So make sure it isn't
3268 eliminated as dead. */
3269 TREE_SIDE_EFFECTS (t) = 1;
3270 return t;
3271 }
3272
3273 /* Look inside EXPR into any simple arithmetic operations. Return the
3274 outermost non-arithmetic or non-invariant node. */
3275
3276 tree
3277 skip_simple_arithmetic (tree expr)
3278 {
3279 /* We don't care about whether this can be used as an lvalue in this
3280 context. */
3281 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3282 expr = TREE_OPERAND (expr, 0);
3283
3284 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3285 a constant, it will be more efficient to not make another SAVE_EXPR since
3286 it will allow better simplification and GCSE will be able to merge the
3287 computations if they actually occur. */
3288 while (true)
3289 {
3290 if (UNARY_CLASS_P (expr))
3291 expr = TREE_OPERAND (expr, 0);
3292 else if (BINARY_CLASS_P (expr))
3293 {
3294 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3295 expr = TREE_OPERAND (expr, 0);
3296 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3297 expr = TREE_OPERAND (expr, 1);
3298 else
3299 break;
3300 }
3301 else
3302 break;
3303 }
3304
3305 return expr;
3306 }
3307
3308 /* Look inside EXPR into simple arithmetic operations involving constants.
3309 Return the outermost non-arithmetic or non-constant node. */
3310
3311 tree
3312 skip_simple_constant_arithmetic (tree expr)
3313 {
3314 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3315 expr = TREE_OPERAND (expr, 0);
3316
3317 while (true)
3318 {
3319 if (UNARY_CLASS_P (expr))
3320 expr = TREE_OPERAND (expr, 0);
3321 else if (BINARY_CLASS_P (expr))
3322 {
3323 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3324 expr = TREE_OPERAND (expr, 0);
3325 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3326 expr = TREE_OPERAND (expr, 1);
3327 else
3328 break;
3329 }
3330 else
3331 break;
3332 }
3333
3334 return expr;
3335 }
3336
3337 /* Return which tree structure is used by T. */
3338
3339 enum tree_node_structure_enum
3340 tree_node_structure (const_tree t)
3341 {
3342 const enum tree_code code = TREE_CODE (t);
3343 return tree_node_structure_for_code (code);
3344 }
3345
3346 /* Set various status flags when building a CALL_EXPR object T. */
3347
3348 static void
3349 process_call_operands (tree t)
3350 {
3351 bool side_effects = TREE_SIDE_EFFECTS (t);
3352 bool read_only = false;
3353 int i = call_expr_flags (t);
3354
3355 /* Calls have side-effects, except those to const or pure functions. */
3356 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3357 side_effects = true;
3358 /* Propagate TREE_READONLY of arguments for const functions. */
3359 if (i & ECF_CONST)
3360 read_only = true;
3361
3362 if (!side_effects || read_only)
3363 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3364 {
3365 tree op = TREE_OPERAND (t, i);
3366 if (op && TREE_SIDE_EFFECTS (op))
3367 side_effects = true;
3368 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3369 read_only = false;
3370 }
3371
3372 TREE_SIDE_EFFECTS (t) = side_effects;
3373 TREE_READONLY (t) = read_only;
3374 }
3375 \f
3376 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3377 size or offset that depends on a field within a record. */
3378
3379 bool
3380 contains_placeholder_p (const_tree exp)
3381 {
3382 enum tree_code code;
3383
3384 if (!exp)
3385 return 0;
3386
3387 code = TREE_CODE (exp);
3388 if (code == PLACEHOLDER_EXPR)
3389 return 1;
3390
3391 switch (TREE_CODE_CLASS (code))
3392 {
3393 case tcc_reference:
3394 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3395 position computations since they will be converted into a
3396 WITH_RECORD_EXPR involving the reference, which will assume
3397 here will be valid. */
3398 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3399
3400 case tcc_exceptional:
3401 if (code == TREE_LIST)
3402 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3403 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3404 break;
3405
3406 case tcc_unary:
3407 case tcc_binary:
3408 case tcc_comparison:
3409 case tcc_expression:
3410 switch (code)
3411 {
3412 case COMPOUND_EXPR:
3413 /* Ignoring the first operand isn't quite right, but works best. */
3414 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3415
3416 case COND_EXPR:
3417 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3418 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3419 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3420
3421 case SAVE_EXPR:
3422 /* The save_expr function never wraps anything containing
3423 a PLACEHOLDER_EXPR. */
3424 return 0;
3425
3426 default:
3427 break;
3428 }
3429
3430 switch (TREE_CODE_LENGTH (code))
3431 {
3432 case 1:
3433 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3434 case 2:
3435 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3436 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3437 default:
3438 return 0;
3439 }
3440
3441 case tcc_vl_exp:
3442 switch (code)
3443 {
3444 case CALL_EXPR:
3445 {
3446 const_tree arg;
3447 const_call_expr_arg_iterator iter;
3448 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3449 if (CONTAINS_PLACEHOLDER_P (arg))
3450 return 1;
3451 return 0;
3452 }
3453 default:
3454 return 0;
3455 }
3456
3457 default:
3458 return 0;
3459 }
3460 return 0;
3461 }
3462
3463 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3464 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3465 field positions. */
3466
3467 static bool
3468 type_contains_placeholder_1 (const_tree type)
3469 {
3470 /* If the size contains a placeholder or the parent type (component type in
3471 the case of arrays) type involves a placeholder, this type does. */
3472 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3473 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3474 || (!POINTER_TYPE_P (type)
3475 && TREE_TYPE (type)
3476 && type_contains_placeholder_p (TREE_TYPE (type))))
3477 return true;
3478
3479 /* Now do type-specific checks. Note that the last part of the check above
3480 greatly limits what we have to do below. */
3481 switch (TREE_CODE (type))
3482 {
3483 case VOID_TYPE:
3484 case POINTER_BOUNDS_TYPE:
3485 case COMPLEX_TYPE:
3486 case ENUMERAL_TYPE:
3487 case BOOLEAN_TYPE:
3488 case POINTER_TYPE:
3489 case OFFSET_TYPE:
3490 case REFERENCE_TYPE:
3491 case METHOD_TYPE:
3492 case FUNCTION_TYPE:
3493 case VECTOR_TYPE:
3494 case NULLPTR_TYPE:
3495 return false;
3496
3497 case INTEGER_TYPE:
3498 case REAL_TYPE:
3499 case FIXED_POINT_TYPE:
3500 /* Here we just check the bounds. */
3501 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3502 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3503
3504 case ARRAY_TYPE:
3505 /* We have already checked the component type above, so just check the
3506 domain type. */
3507 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3508
3509 case RECORD_TYPE:
3510 case UNION_TYPE:
3511 case QUAL_UNION_TYPE:
3512 {
3513 tree field;
3514
3515 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3516 if (TREE_CODE (field) == FIELD_DECL
3517 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3518 || (TREE_CODE (type) == QUAL_UNION_TYPE
3519 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3520 || type_contains_placeholder_p (TREE_TYPE (field))))
3521 return true;
3522
3523 return false;
3524 }
3525
3526 default:
3527 gcc_unreachable ();
3528 }
3529 }
3530
3531 /* Wrapper around above function used to cache its result. */
3532
3533 bool
3534 type_contains_placeholder_p (tree type)
3535 {
3536 bool result;
3537
3538 /* If the contains_placeholder_bits field has been initialized,
3539 then we know the answer. */
3540 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3541 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3542
3543 /* Indicate that we've seen this type node, and the answer is false.
3544 This is what we want to return if we run into recursion via fields. */
3545 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3546
3547 /* Compute the real value. */
3548 result = type_contains_placeholder_1 (type);
3549
3550 /* Store the real value. */
3551 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3552
3553 return result;
3554 }
3555 \f
3556 /* Push tree EXP onto vector QUEUE if it is not already present. */
3557
3558 static void
3559 push_without_duplicates (tree exp, vec<tree> *queue)
3560 {
3561 unsigned int i;
3562 tree iter;
3563
3564 FOR_EACH_VEC_ELT (*queue, i, iter)
3565 if (simple_cst_equal (iter, exp) == 1)
3566 break;
3567
3568 if (!iter)
3569 queue->safe_push (exp);
3570 }
3571
3572 /* Given a tree EXP, find all occurrences of references to fields
3573 in a PLACEHOLDER_EXPR and place them in vector REFS without
3574 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3575 we assume here that EXP contains only arithmetic expressions
3576 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3577 argument list. */
3578
3579 void
3580 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3581 {
3582 enum tree_code code = TREE_CODE (exp);
3583 tree inner;
3584 int i;
3585
3586 /* We handle TREE_LIST and COMPONENT_REF separately. */
3587 if (code == TREE_LIST)
3588 {
3589 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3590 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3591 }
3592 else if (code == COMPONENT_REF)
3593 {
3594 for (inner = TREE_OPERAND (exp, 0);
3595 REFERENCE_CLASS_P (inner);
3596 inner = TREE_OPERAND (inner, 0))
3597 ;
3598
3599 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3600 push_without_duplicates (exp, refs);
3601 else
3602 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3603 }
3604 else
3605 switch (TREE_CODE_CLASS (code))
3606 {
3607 case tcc_constant:
3608 break;
3609
3610 case tcc_declaration:
3611 /* Variables allocated to static storage can stay. */
3612 if (!TREE_STATIC (exp))
3613 push_without_duplicates (exp, refs);
3614 break;
3615
3616 case tcc_expression:
3617 /* This is the pattern built in ada/make_aligning_type. */
3618 if (code == ADDR_EXPR
3619 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3620 {
3621 push_without_duplicates (exp, refs);
3622 break;
3623 }
3624
3625 /* Fall through... */
3626
3627 case tcc_exceptional:
3628 case tcc_unary:
3629 case tcc_binary:
3630 case tcc_comparison:
3631 case tcc_reference:
3632 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3633 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3634 break;
3635
3636 case tcc_vl_exp:
3637 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3638 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3639 break;
3640
3641 default:
3642 gcc_unreachable ();
3643 }
3644 }
3645
3646 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3647 return a tree with all occurrences of references to F in a
3648 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3649 CONST_DECLs. Note that we assume here that EXP contains only
3650 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3651 occurring only in their argument list. */
3652
3653 tree
3654 substitute_in_expr (tree exp, tree f, tree r)
3655 {
3656 enum tree_code code = TREE_CODE (exp);
3657 tree op0, op1, op2, op3;
3658 tree new_tree;
3659
3660 /* We handle TREE_LIST and COMPONENT_REF separately. */
3661 if (code == TREE_LIST)
3662 {
3663 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3664 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3665 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3666 return exp;
3667
3668 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3669 }
3670 else if (code == COMPONENT_REF)
3671 {
3672 tree inner;
3673
3674 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3675 and it is the right field, replace it with R. */
3676 for (inner = TREE_OPERAND (exp, 0);
3677 REFERENCE_CLASS_P (inner);
3678 inner = TREE_OPERAND (inner, 0))
3679 ;
3680
3681 /* The field. */
3682 op1 = TREE_OPERAND (exp, 1);
3683
3684 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3685 return r;
3686
3687 /* If this expression hasn't been completed let, leave it alone. */
3688 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3689 return exp;
3690
3691 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3692 if (op0 == TREE_OPERAND (exp, 0))
3693 return exp;
3694
3695 new_tree
3696 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3697 }
3698 else
3699 switch (TREE_CODE_CLASS (code))
3700 {
3701 case tcc_constant:
3702 return exp;
3703
3704 case tcc_declaration:
3705 if (exp == f)
3706 return r;
3707 else
3708 return exp;
3709
3710 case tcc_expression:
3711 if (exp == f)
3712 return r;
3713
3714 /* Fall through... */
3715
3716 case tcc_exceptional:
3717 case tcc_unary:
3718 case tcc_binary:
3719 case tcc_comparison:
3720 case tcc_reference:
3721 switch (TREE_CODE_LENGTH (code))
3722 {
3723 case 0:
3724 return exp;
3725
3726 case 1:
3727 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3728 if (op0 == TREE_OPERAND (exp, 0))
3729 return exp;
3730
3731 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3732 break;
3733
3734 case 2:
3735 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3736 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3737
3738 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3739 return exp;
3740
3741 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3742 break;
3743
3744 case 3:
3745 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3746 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3747 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3748
3749 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3750 && op2 == TREE_OPERAND (exp, 2))
3751 return exp;
3752
3753 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3754 break;
3755
3756 case 4:
3757 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3758 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3759 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3760 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3761
3762 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3763 && op2 == TREE_OPERAND (exp, 2)
3764 && op3 == TREE_OPERAND (exp, 3))
3765 return exp;
3766
3767 new_tree
3768 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3769 break;
3770
3771 default:
3772 gcc_unreachable ();
3773 }
3774 break;
3775
3776 case tcc_vl_exp:
3777 {
3778 int i;
3779
3780 new_tree = NULL_TREE;
3781
3782 /* If we are trying to replace F with a constant, inline back
3783 functions which do nothing else than computing a value from
3784 the arguments they are passed. This makes it possible to
3785 fold partially or entirely the replacement expression. */
3786 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3787 {
3788 tree t = maybe_inline_call_in_expr (exp);
3789 if (t)
3790 return SUBSTITUTE_IN_EXPR (t, f, r);
3791 }
3792
3793 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3794 {
3795 tree op = TREE_OPERAND (exp, i);
3796 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3797 if (new_op != op)
3798 {
3799 if (!new_tree)
3800 new_tree = copy_node (exp);
3801 TREE_OPERAND (new_tree, i) = new_op;
3802 }
3803 }
3804
3805 if (new_tree)
3806 {
3807 new_tree = fold (new_tree);
3808 if (TREE_CODE (new_tree) == CALL_EXPR)
3809 process_call_operands (new_tree);
3810 }
3811 else
3812 return exp;
3813 }
3814 break;
3815
3816 default:
3817 gcc_unreachable ();
3818 }
3819
3820 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3821
3822 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3823 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3824
3825 return new_tree;
3826 }
3827
3828 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3829 for it within OBJ, a tree that is an object or a chain of references. */
3830
3831 tree
3832 substitute_placeholder_in_expr (tree exp, tree obj)
3833 {
3834 enum tree_code code = TREE_CODE (exp);
3835 tree op0, op1, op2, op3;
3836 tree new_tree;
3837
3838 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3839 in the chain of OBJ. */
3840 if (code == PLACEHOLDER_EXPR)
3841 {
3842 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3843 tree elt;
3844
3845 for (elt = obj; elt != 0;
3846 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3847 || TREE_CODE (elt) == COND_EXPR)
3848 ? TREE_OPERAND (elt, 1)
3849 : (REFERENCE_CLASS_P (elt)
3850 || UNARY_CLASS_P (elt)
3851 || BINARY_CLASS_P (elt)
3852 || VL_EXP_CLASS_P (elt)
3853 || EXPRESSION_CLASS_P (elt))
3854 ? TREE_OPERAND (elt, 0) : 0))
3855 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3856 return elt;
3857
3858 for (elt = obj; elt != 0;
3859 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3860 || TREE_CODE (elt) == COND_EXPR)
3861 ? TREE_OPERAND (elt, 1)
3862 : (REFERENCE_CLASS_P (elt)
3863 || UNARY_CLASS_P (elt)
3864 || BINARY_CLASS_P (elt)
3865 || VL_EXP_CLASS_P (elt)
3866 || EXPRESSION_CLASS_P (elt))
3867 ? TREE_OPERAND (elt, 0) : 0))
3868 if (POINTER_TYPE_P (TREE_TYPE (elt))
3869 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3870 == need_type))
3871 return fold_build1 (INDIRECT_REF, need_type, elt);
3872
3873 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3874 survives until RTL generation, there will be an error. */
3875 return exp;
3876 }
3877
3878 /* TREE_LIST is special because we need to look at TREE_VALUE
3879 and TREE_CHAIN, not TREE_OPERANDS. */
3880 else if (code == TREE_LIST)
3881 {
3882 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3883 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3884 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3885 return exp;
3886
3887 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3888 }
3889 else
3890 switch (TREE_CODE_CLASS (code))
3891 {
3892 case tcc_constant:
3893 case tcc_declaration:
3894 return exp;
3895
3896 case tcc_exceptional:
3897 case tcc_unary:
3898 case tcc_binary:
3899 case tcc_comparison:
3900 case tcc_expression:
3901 case tcc_reference:
3902 case tcc_statement:
3903 switch (TREE_CODE_LENGTH (code))
3904 {
3905 case 0:
3906 return exp;
3907
3908 case 1:
3909 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3910 if (op0 == TREE_OPERAND (exp, 0))
3911 return exp;
3912
3913 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3914 break;
3915
3916 case 2:
3917 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3918 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3919
3920 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3921 return exp;
3922
3923 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3924 break;
3925
3926 case 3:
3927 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3928 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3929 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3930
3931 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3932 && op2 == TREE_OPERAND (exp, 2))
3933 return exp;
3934
3935 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3936 break;
3937
3938 case 4:
3939 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3940 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3941 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3942 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3943
3944 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3945 && op2 == TREE_OPERAND (exp, 2)
3946 && op3 == TREE_OPERAND (exp, 3))
3947 return exp;
3948
3949 new_tree
3950 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3951 break;
3952
3953 default:
3954 gcc_unreachable ();
3955 }
3956 break;
3957
3958 case tcc_vl_exp:
3959 {
3960 int i;
3961
3962 new_tree = NULL_TREE;
3963
3964 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3965 {
3966 tree op = TREE_OPERAND (exp, i);
3967 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3968 if (new_op != op)
3969 {
3970 if (!new_tree)
3971 new_tree = copy_node (exp);
3972 TREE_OPERAND (new_tree, i) = new_op;
3973 }
3974 }
3975
3976 if (new_tree)
3977 {
3978 new_tree = fold (new_tree);
3979 if (TREE_CODE (new_tree) == CALL_EXPR)
3980 process_call_operands (new_tree);
3981 }
3982 else
3983 return exp;
3984 }
3985 break;
3986
3987 default:
3988 gcc_unreachable ();
3989 }
3990
3991 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3992
3993 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3994 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3995
3996 return new_tree;
3997 }
3998 \f
3999
4000 /* Subroutine of stabilize_reference; this is called for subtrees of
4001 references. Any expression with side-effects must be put in a SAVE_EXPR
4002 to ensure that it is only evaluated once.
4003
4004 We don't put SAVE_EXPR nodes around everything, because assigning very
4005 simple expressions to temporaries causes us to miss good opportunities
4006 for optimizations. Among other things, the opportunity to fold in the
4007 addition of a constant into an addressing mode often gets lost, e.g.
4008 "y[i+1] += x;". In general, we take the approach that we should not make
4009 an assignment unless we are forced into it - i.e., that any non-side effect
4010 operator should be allowed, and that cse should take care of coalescing
4011 multiple utterances of the same expression should that prove fruitful. */
4012
4013 static tree
4014 stabilize_reference_1 (tree e)
4015 {
4016 tree result;
4017 enum tree_code code = TREE_CODE (e);
4018
4019 /* We cannot ignore const expressions because it might be a reference
4020 to a const array but whose index contains side-effects. But we can
4021 ignore things that are actual constant or that already have been
4022 handled by this function. */
4023
4024 if (tree_invariant_p (e))
4025 return e;
4026
4027 switch (TREE_CODE_CLASS (code))
4028 {
4029 case tcc_exceptional:
4030 case tcc_type:
4031 case tcc_declaration:
4032 case tcc_comparison:
4033 case tcc_statement:
4034 case tcc_expression:
4035 case tcc_reference:
4036 case tcc_vl_exp:
4037 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4038 so that it will only be evaluated once. */
4039 /* The reference (r) and comparison (<) classes could be handled as
4040 below, but it is generally faster to only evaluate them once. */
4041 if (TREE_SIDE_EFFECTS (e))
4042 return save_expr (e);
4043 return e;
4044
4045 case tcc_constant:
4046 /* Constants need no processing. In fact, we should never reach
4047 here. */
4048 return e;
4049
4050 case tcc_binary:
4051 /* Division is slow and tends to be compiled with jumps,
4052 especially the division by powers of 2 that is often
4053 found inside of an array reference. So do it just once. */
4054 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4055 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4056 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4057 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4058 return save_expr (e);
4059 /* Recursively stabilize each operand. */
4060 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4061 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4062 break;
4063
4064 case tcc_unary:
4065 /* Recursively stabilize each operand. */
4066 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4067 break;
4068
4069 default:
4070 gcc_unreachable ();
4071 }
4072
4073 TREE_TYPE (result) = TREE_TYPE (e);
4074 TREE_READONLY (result) = TREE_READONLY (e);
4075 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4076 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4077
4078 return result;
4079 }
4080
4081 /* Stabilize a reference so that we can use it any number of times
4082 without causing its operands to be evaluated more than once.
4083 Returns the stabilized reference. This works by means of save_expr,
4084 so see the caveats in the comments about save_expr.
4085
4086 Also allows conversion expressions whose operands are references.
4087 Any other kind of expression is returned unchanged. */
4088
4089 tree
4090 stabilize_reference (tree ref)
4091 {
4092 tree result;
4093 enum tree_code code = TREE_CODE (ref);
4094
4095 switch (code)
4096 {
4097 case VAR_DECL:
4098 case PARM_DECL:
4099 case RESULT_DECL:
4100 /* No action is needed in this case. */
4101 return ref;
4102
4103 CASE_CONVERT:
4104 case FLOAT_EXPR:
4105 case FIX_TRUNC_EXPR:
4106 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4107 break;
4108
4109 case INDIRECT_REF:
4110 result = build_nt (INDIRECT_REF,
4111 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4112 break;
4113
4114 case COMPONENT_REF:
4115 result = build_nt (COMPONENT_REF,
4116 stabilize_reference (TREE_OPERAND (ref, 0)),
4117 TREE_OPERAND (ref, 1), NULL_TREE);
4118 break;
4119
4120 case BIT_FIELD_REF:
4121 result = build_nt (BIT_FIELD_REF,
4122 stabilize_reference (TREE_OPERAND (ref, 0)),
4123 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4124 break;
4125
4126 case ARRAY_REF:
4127 result = build_nt (ARRAY_REF,
4128 stabilize_reference (TREE_OPERAND (ref, 0)),
4129 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4130 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4131 break;
4132
4133 case ARRAY_RANGE_REF:
4134 result = build_nt (ARRAY_RANGE_REF,
4135 stabilize_reference (TREE_OPERAND (ref, 0)),
4136 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4137 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4138 break;
4139
4140 case COMPOUND_EXPR:
4141 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4142 it wouldn't be ignored. This matters when dealing with
4143 volatiles. */
4144 return stabilize_reference_1 (ref);
4145
4146 /* If arg isn't a kind of lvalue we recognize, make no change.
4147 Caller should recognize the error for an invalid lvalue. */
4148 default:
4149 return ref;
4150
4151 case ERROR_MARK:
4152 return error_mark_node;
4153 }
4154
4155 TREE_TYPE (result) = TREE_TYPE (ref);
4156 TREE_READONLY (result) = TREE_READONLY (ref);
4157 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4158 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4159
4160 return result;
4161 }
4162 \f
4163 /* Low-level constructors for expressions. */
4164
4165 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4166 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4167
4168 void
4169 recompute_tree_invariant_for_addr_expr (tree t)
4170 {
4171 tree node;
4172 bool tc = true, se = false;
4173
4174 /* We started out assuming this address is both invariant and constant, but
4175 does not have side effects. Now go down any handled components and see if
4176 any of them involve offsets that are either non-constant or non-invariant.
4177 Also check for side-effects.
4178
4179 ??? Note that this code makes no attempt to deal with the case where
4180 taking the address of something causes a copy due to misalignment. */
4181
4182 #define UPDATE_FLAGS(NODE) \
4183 do { tree _node = (NODE); \
4184 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4185 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4186
4187 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4188 node = TREE_OPERAND (node, 0))
4189 {
4190 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4191 array reference (probably made temporarily by the G++ front end),
4192 so ignore all the operands. */
4193 if ((TREE_CODE (node) == ARRAY_REF
4194 || TREE_CODE (node) == ARRAY_RANGE_REF)
4195 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4196 {
4197 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4198 if (TREE_OPERAND (node, 2))
4199 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4200 if (TREE_OPERAND (node, 3))
4201 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4202 }
4203 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4204 FIELD_DECL, apparently. The G++ front end can put something else
4205 there, at least temporarily. */
4206 else if (TREE_CODE (node) == COMPONENT_REF
4207 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4208 {
4209 if (TREE_OPERAND (node, 2))
4210 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4211 }
4212 }
4213
4214 node = lang_hooks.expr_to_decl (node, &tc, &se);
4215
4216 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4217 the address, since &(*a)->b is a form of addition. If it's a constant, the
4218 address is constant too. If it's a decl, its address is constant if the
4219 decl is static. Everything else is not constant and, furthermore,
4220 taking the address of a volatile variable is not volatile. */
4221 if (TREE_CODE (node) == INDIRECT_REF
4222 || TREE_CODE (node) == MEM_REF)
4223 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4224 else if (CONSTANT_CLASS_P (node))
4225 ;
4226 else if (DECL_P (node))
4227 tc &= (staticp (node) != NULL_TREE);
4228 else
4229 {
4230 tc = false;
4231 se |= TREE_SIDE_EFFECTS (node);
4232 }
4233
4234
4235 TREE_CONSTANT (t) = tc;
4236 TREE_SIDE_EFFECTS (t) = se;
4237 #undef UPDATE_FLAGS
4238 }
4239
4240 /* Build an expression of code CODE, data type TYPE, and operands as
4241 specified. Expressions and reference nodes can be created this way.
4242 Constants, decls, types and misc nodes cannot be.
4243
4244 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4245 enough for all extant tree codes. */
4246
4247 tree
4248 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4249 {
4250 tree t;
4251
4252 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4253
4254 t = make_node_stat (code PASS_MEM_STAT);
4255 TREE_TYPE (t) = tt;
4256
4257 return t;
4258 }
4259
4260 tree
4261 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4262 {
4263 int length = sizeof (struct tree_exp);
4264 tree t;
4265
4266 record_node_allocation_statistics (code, length);
4267
4268 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4269
4270 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4271
4272 memset (t, 0, sizeof (struct tree_common));
4273
4274 TREE_SET_CODE (t, code);
4275
4276 TREE_TYPE (t) = type;
4277 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4278 TREE_OPERAND (t, 0) = node;
4279 if (node && !TYPE_P (node))
4280 {
4281 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4282 TREE_READONLY (t) = TREE_READONLY (node);
4283 }
4284
4285 if (TREE_CODE_CLASS (code) == tcc_statement)
4286 TREE_SIDE_EFFECTS (t) = 1;
4287 else switch (code)
4288 {
4289 case VA_ARG_EXPR:
4290 /* All of these have side-effects, no matter what their
4291 operands are. */
4292 TREE_SIDE_EFFECTS (t) = 1;
4293 TREE_READONLY (t) = 0;
4294 break;
4295
4296 case INDIRECT_REF:
4297 /* Whether a dereference is readonly has nothing to do with whether
4298 its operand is readonly. */
4299 TREE_READONLY (t) = 0;
4300 break;
4301
4302 case ADDR_EXPR:
4303 if (node)
4304 recompute_tree_invariant_for_addr_expr (t);
4305 break;
4306
4307 default:
4308 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4309 && node && !TYPE_P (node)
4310 && TREE_CONSTANT (node))
4311 TREE_CONSTANT (t) = 1;
4312 if (TREE_CODE_CLASS (code) == tcc_reference
4313 && node && TREE_THIS_VOLATILE (node))
4314 TREE_THIS_VOLATILE (t) = 1;
4315 break;
4316 }
4317
4318 return t;
4319 }
4320
4321 #define PROCESS_ARG(N) \
4322 do { \
4323 TREE_OPERAND (t, N) = arg##N; \
4324 if (arg##N &&!TYPE_P (arg##N)) \
4325 { \
4326 if (TREE_SIDE_EFFECTS (arg##N)) \
4327 side_effects = 1; \
4328 if (!TREE_READONLY (arg##N) \
4329 && !CONSTANT_CLASS_P (arg##N)) \
4330 (void) (read_only = 0); \
4331 if (!TREE_CONSTANT (arg##N)) \
4332 (void) (constant = 0); \
4333 } \
4334 } while (0)
4335
4336 tree
4337 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4338 {
4339 bool constant, read_only, side_effects;
4340 tree t;
4341
4342 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4343
4344 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4345 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4346 /* When sizetype precision doesn't match that of pointers
4347 we need to be able to build explicit extensions or truncations
4348 of the offset argument. */
4349 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4350 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4351 && TREE_CODE (arg1) == INTEGER_CST);
4352
4353 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4354 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4355 && ptrofftype_p (TREE_TYPE (arg1)));
4356
4357 t = make_node_stat (code PASS_MEM_STAT);
4358 TREE_TYPE (t) = tt;
4359
4360 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4361 result based on those same flags for the arguments. But if the
4362 arguments aren't really even `tree' expressions, we shouldn't be trying
4363 to do this. */
4364
4365 /* Expressions without side effects may be constant if their
4366 arguments are as well. */
4367 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4368 || TREE_CODE_CLASS (code) == tcc_binary);
4369 read_only = 1;
4370 side_effects = TREE_SIDE_EFFECTS (t);
4371
4372 PROCESS_ARG (0);
4373 PROCESS_ARG (1);
4374
4375 TREE_SIDE_EFFECTS (t) = side_effects;
4376 if (code == MEM_REF)
4377 {
4378 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4379 {
4380 tree o = TREE_OPERAND (arg0, 0);
4381 TREE_READONLY (t) = TREE_READONLY (o);
4382 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4383 }
4384 }
4385 else
4386 {
4387 TREE_READONLY (t) = read_only;
4388 TREE_CONSTANT (t) = constant;
4389 TREE_THIS_VOLATILE (t)
4390 = (TREE_CODE_CLASS (code) == tcc_reference
4391 && arg0 && TREE_THIS_VOLATILE (arg0));
4392 }
4393
4394 return t;
4395 }
4396
4397
4398 tree
4399 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4400 tree arg2 MEM_STAT_DECL)
4401 {
4402 bool constant, read_only, side_effects;
4403 tree t;
4404
4405 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4406 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4407
4408 t = make_node_stat (code PASS_MEM_STAT);
4409 TREE_TYPE (t) = tt;
4410
4411 read_only = 1;
4412
4413 /* As a special exception, if COND_EXPR has NULL branches, we
4414 assume that it is a gimple statement and always consider
4415 it to have side effects. */
4416 if (code == COND_EXPR
4417 && tt == void_type_node
4418 && arg1 == NULL_TREE
4419 && arg2 == NULL_TREE)
4420 side_effects = true;
4421 else
4422 side_effects = TREE_SIDE_EFFECTS (t);
4423
4424 PROCESS_ARG (0);
4425 PROCESS_ARG (1);
4426 PROCESS_ARG (2);
4427
4428 if (code == COND_EXPR)
4429 TREE_READONLY (t) = read_only;
4430
4431 TREE_SIDE_EFFECTS (t) = side_effects;
4432 TREE_THIS_VOLATILE (t)
4433 = (TREE_CODE_CLASS (code) == tcc_reference
4434 && arg0 && TREE_THIS_VOLATILE (arg0));
4435
4436 return t;
4437 }
4438
4439 tree
4440 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4441 tree arg2, tree arg3 MEM_STAT_DECL)
4442 {
4443 bool constant, read_only, side_effects;
4444 tree t;
4445
4446 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4447
4448 t = make_node_stat (code PASS_MEM_STAT);
4449 TREE_TYPE (t) = tt;
4450
4451 side_effects = TREE_SIDE_EFFECTS (t);
4452
4453 PROCESS_ARG (0);
4454 PROCESS_ARG (1);
4455 PROCESS_ARG (2);
4456 PROCESS_ARG (3);
4457
4458 TREE_SIDE_EFFECTS (t) = side_effects;
4459 TREE_THIS_VOLATILE (t)
4460 = (TREE_CODE_CLASS (code) == tcc_reference
4461 && arg0 && TREE_THIS_VOLATILE (arg0));
4462
4463 return t;
4464 }
4465
4466 tree
4467 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4468 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4469 {
4470 bool constant, read_only, side_effects;
4471 tree t;
4472
4473 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4474
4475 t = make_node_stat (code PASS_MEM_STAT);
4476 TREE_TYPE (t) = tt;
4477
4478 side_effects = TREE_SIDE_EFFECTS (t);
4479
4480 PROCESS_ARG (0);
4481 PROCESS_ARG (1);
4482 PROCESS_ARG (2);
4483 PROCESS_ARG (3);
4484 PROCESS_ARG (4);
4485
4486 TREE_SIDE_EFFECTS (t) = side_effects;
4487 if (code == TARGET_MEM_REF)
4488 {
4489 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4490 {
4491 tree o = TREE_OPERAND (arg0, 0);
4492 TREE_READONLY (t) = TREE_READONLY (o);
4493 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4494 }
4495 }
4496 else
4497 TREE_THIS_VOLATILE (t)
4498 = (TREE_CODE_CLASS (code) == tcc_reference
4499 && arg0 && TREE_THIS_VOLATILE (arg0));
4500
4501 return t;
4502 }
4503
4504 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4505 on the pointer PTR. */
4506
4507 tree
4508 build_simple_mem_ref_loc (location_t loc, tree ptr)
4509 {
4510 HOST_WIDE_INT offset = 0;
4511 tree ptype = TREE_TYPE (ptr);
4512 tree tem;
4513 /* For convenience allow addresses that collapse to a simple base
4514 and offset. */
4515 if (TREE_CODE (ptr) == ADDR_EXPR
4516 && (handled_component_p (TREE_OPERAND (ptr, 0))
4517 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4518 {
4519 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4520 gcc_assert (ptr);
4521 ptr = build_fold_addr_expr (ptr);
4522 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4523 }
4524 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4525 ptr, build_int_cst (ptype, offset));
4526 SET_EXPR_LOCATION (tem, loc);
4527 return tem;
4528 }
4529
4530 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4531
4532 offset_int
4533 mem_ref_offset (const_tree t)
4534 {
4535 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4536 }
4537
4538 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4539 offsetted by OFFSET units. */
4540
4541 tree
4542 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4543 {
4544 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4545 build_fold_addr_expr (base),
4546 build_int_cst (ptr_type_node, offset));
4547 tree addr = build1 (ADDR_EXPR, type, ref);
4548 recompute_tree_invariant_for_addr_expr (addr);
4549 return addr;
4550 }
4551
4552 /* Similar except don't specify the TREE_TYPE
4553 and leave the TREE_SIDE_EFFECTS as 0.
4554 It is permissible for arguments to be null,
4555 or even garbage if their values do not matter. */
4556
4557 tree
4558 build_nt (enum tree_code code, ...)
4559 {
4560 tree t;
4561 int length;
4562 int i;
4563 va_list p;
4564
4565 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4566
4567 va_start (p, code);
4568
4569 t = make_node (code);
4570 length = TREE_CODE_LENGTH (code);
4571
4572 for (i = 0; i < length; i++)
4573 TREE_OPERAND (t, i) = va_arg (p, tree);
4574
4575 va_end (p);
4576 return t;
4577 }
4578
4579 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4580 tree vec. */
4581
4582 tree
4583 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4584 {
4585 tree ret, t;
4586 unsigned int ix;
4587
4588 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4589 CALL_EXPR_FN (ret) = fn;
4590 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4591 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4592 CALL_EXPR_ARG (ret, ix) = t;
4593 return ret;
4594 }
4595 \f
4596 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4597 We do NOT enter this node in any sort of symbol table.
4598
4599 LOC is the location of the decl.
4600
4601 layout_decl is used to set up the decl's storage layout.
4602 Other slots are initialized to 0 or null pointers. */
4603
4604 tree
4605 build_decl_stat (location_t loc, enum tree_code code, tree name,
4606 tree type MEM_STAT_DECL)
4607 {
4608 tree t;
4609
4610 t = make_node_stat (code PASS_MEM_STAT);
4611 DECL_SOURCE_LOCATION (t) = loc;
4612
4613 /* if (type == error_mark_node)
4614 type = integer_type_node; */
4615 /* That is not done, deliberately, so that having error_mark_node
4616 as the type can suppress useless errors in the use of this variable. */
4617
4618 DECL_NAME (t) = name;
4619 TREE_TYPE (t) = type;
4620
4621 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4622 layout_decl (t, 0);
4623
4624 return t;
4625 }
4626
4627 /* Builds and returns function declaration with NAME and TYPE. */
4628
4629 tree
4630 build_fn_decl (const char *name, tree type)
4631 {
4632 tree id = get_identifier (name);
4633 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4634
4635 DECL_EXTERNAL (decl) = 1;
4636 TREE_PUBLIC (decl) = 1;
4637 DECL_ARTIFICIAL (decl) = 1;
4638 TREE_NOTHROW (decl) = 1;
4639
4640 return decl;
4641 }
4642
4643 vec<tree, va_gc> *all_translation_units;
4644
4645 /* Builds a new translation-unit decl with name NAME, queues it in the
4646 global list of translation-unit decls and returns it. */
4647
4648 tree
4649 build_translation_unit_decl (tree name)
4650 {
4651 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4652 name, NULL_TREE);
4653 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4654 vec_safe_push (all_translation_units, tu);
4655 return tu;
4656 }
4657
4658 \f
4659 /* BLOCK nodes are used to represent the structure of binding contours
4660 and declarations, once those contours have been exited and their contents
4661 compiled. This information is used for outputting debugging info. */
4662
4663 tree
4664 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4665 {
4666 tree block = make_node (BLOCK);
4667
4668 BLOCK_VARS (block) = vars;
4669 BLOCK_SUBBLOCKS (block) = subblocks;
4670 BLOCK_SUPERCONTEXT (block) = supercontext;
4671 BLOCK_CHAIN (block) = chain;
4672 return block;
4673 }
4674
4675 \f
4676 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4677
4678 LOC is the location to use in tree T. */
4679
4680 void
4681 protected_set_expr_location (tree t, location_t loc)
4682 {
4683 if (CAN_HAVE_LOCATION_P (t))
4684 SET_EXPR_LOCATION (t, loc);
4685 }
4686 \f
4687 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4688 is ATTRIBUTE. */
4689
4690 tree
4691 build_decl_attribute_variant (tree ddecl, tree attribute)
4692 {
4693 DECL_ATTRIBUTES (ddecl) = attribute;
4694 return ddecl;
4695 }
4696
4697 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4698 is ATTRIBUTE and its qualifiers are QUALS.
4699
4700 Record such modified types already made so we don't make duplicates. */
4701
4702 tree
4703 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4704 {
4705 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4706 {
4707 inchash::hash hstate;
4708 tree ntype;
4709 int i;
4710 tree t;
4711 enum tree_code code = TREE_CODE (ttype);
4712
4713 /* Building a distinct copy of a tagged type is inappropriate; it
4714 causes breakage in code that expects there to be a one-to-one
4715 relationship between a struct and its fields.
4716 build_duplicate_type is another solution (as used in
4717 handle_transparent_union_attribute), but that doesn't play well
4718 with the stronger C++ type identity model. */
4719 if (TREE_CODE (ttype) == RECORD_TYPE
4720 || TREE_CODE (ttype) == UNION_TYPE
4721 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4722 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4723 {
4724 warning (OPT_Wattributes,
4725 "ignoring attributes applied to %qT after definition",
4726 TYPE_MAIN_VARIANT (ttype));
4727 return build_qualified_type (ttype, quals);
4728 }
4729
4730 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4731 ntype = build_distinct_type_copy (ttype);
4732
4733 TYPE_ATTRIBUTES (ntype) = attribute;
4734
4735 hstate.add_int (code);
4736 if (TREE_TYPE (ntype))
4737 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4738 attribute_hash_list (attribute, hstate);
4739
4740 switch (TREE_CODE (ntype))
4741 {
4742 case FUNCTION_TYPE:
4743 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4744 break;
4745 case ARRAY_TYPE:
4746 if (TYPE_DOMAIN (ntype))
4747 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4748 break;
4749 case INTEGER_TYPE:
4750 t = TYPE_MAX_VALUE (ntype);
4751 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4752 hstate.add_object (TREE_INT_CST_ELT (t, i));
4753 break;
4754 case REAL_TYPE:
4755 case FIXED_POINT_TYPE:
4756 {
4757 unsigned int precision = TYPE_PRECISION (ntype);
4758 hstate.add_object (precision);
4759 }
4760 break;
4761 default:
4762 break;
4763 }
4764
4765 ntype = type_hash_canon (hstate.end(), ntype);
4766
4767 /* If the target-dependent attributes make NTYPE different from
4768 its canonical type, we will need to use structural equality
4769 checks for this type. */
4770 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4771 || !comp_type_attributes (ntype, ttype))
4772 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4773 else if (TYPE_CANONICAL (ntype) == ntype)
4774 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4775
4776 ttype = build_qualified_type (ntype, quals);
4777 }
4778 else if (TYPE_QUALS (ttype) != quals)
4779 ttype = build_qualified_type (ttype, quals);
4780
4781 return ttype;
4782 }
4783
4784 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4785 the same. */
4786
4787 static bool
4788 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4789 {
4790 tree cl1, cl2;
4791 for (cl1 = clauses1, cl2 = clauses2;
4792 cl1 && cl2;
4793 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4794 {
4795 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4796 return false;
4797 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4798 {
4799 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4800 OMP_CLAUSE_DECL (cl2)) != 1)
4801 return false;
4802 }
4803 switch (OMP_CLAUSE_CODE (cl1))
4804 {
4805 case OMP_CLAUSE_ALIGNED:
4806 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4807 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4808 return false;
4809 break;
4810 case OMP_CLAUSE_LINEAR:
4811 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4812 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4813 return false;
4814 break;
4815 case OMP_CLAUSE_SIMDLEN:
4816 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4817 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4818 return false;
4819 default:
4820 break;
4821 }
4822 }
4823 return true;
4824 }
4825
4826 /* Compare two constructor-element-type constants. Return 1 if the lists
4827 are known to be equal; otherwise return 0. */
4828
4829 static bool
4830 simple_cst_list_equal (const_tree l1, const_tree l2)
4831 {
4832 while (l1 != NULL_TREE && l2 != NULL_TREE)
4833 {
4834 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4835 return false;
4836
4837 l1 = TREE_CHAIN (l1);
4838 l2 = TREE_CHAIN (l2);
4839 }
4840
4841 return l1 == l2;
4842 }
4843
4844 /* Compare two attributes for their value identity. Return true if the
4845 attribute values are known to be equal; otherwise return false.
4846 */
4847
4848 static bool
4849 attribute_value_equal (const_tree attr1, const_tree attr2)
4850 {
4851 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4852 return true;
4853
4854 if (TREE_VALUE (attr1) != NULL_TREE
4855 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4856 && TREE_VALUE (attr2) != NULL
4857 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4858 return (simple_cst_list_equal (TREE_VALUE (attr1),
4859 TREE_VALUE (attr2)) == 1);
4860
4861 if ((flag_openmp || flag_openmp_simd)
4862 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4863 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4864 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4865 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4866 TREE_VALUE (attr2));
4867
4868 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4869 }
4870
4871 /* Return 0 if the attributes for two types are incompatible, 1 if they
4872 are compatible, and 2 if they are nearly compatible (which causes a
4873 warning to be generated). */
4874 int
4875 comp_type_attributes (const_tree type1, const_tree type2)
4876 {
4877 const_tree a1 = TYPE_ATTRIBUTES (type1);
4878 const_tree a2 = TYPE_ATTRIBUTES (type2);
4879 const_tree a;
4880
4881 if (a1 == a2)
4882 return 1;
4883 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4884 {
4885 const struct attribute_spec *as;
4886 const_tree attr;
4887
4888 as = lookup_attribute_spec (get_attribute_name (a));
4889 if (!as || as->affects_type_identity == false)
4890 continue;
4891
4892 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4893 if (!attr || !attribute_value_equal (a, attr))
4894 break;
4895 }
4896 if (!a)
4897 {
4898 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4899 {
4900 const struct attribute_spec *as;
4901
4902 as = lookup_attribute_spec (get_attribute_name (a));
4903 if (!as || as->affects_type_identity == false)
4904 continue;
4905
4906 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4907 break;
4908 /* We don't need to compare trees again, as we did this
4909 already in first loop. */
4910 }
4911 /* All types - affecting identity - are equal, so
4912 there is no need to call target hook for comparison. */
4913 if (!a)
4914 return 1;
4915 }
4916 /* As some type combinations - like default calling-convention - might
4917 be compatible, we have to call the target hook to get the final result. */
4918 return targetm.comp_type_attributes (type1, type2);
4919 }
4920
4921 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4922 is ATTRIBUTE.
4923
4924 Record such modified types already made so we don't make duplicates. */
4925
4926 tree
4927 build_type_attribute_variant (tree ttype, tree attribute)
4928 {
4929 return build_type_attribute_qual_variant (ttype, attribute,
4930 TYPE_QUALS (ttype));
4931 }
4932
4933
4934 /* Reset the expression *EXPR_P, a size or position.
4935
4936 ??? We could reset all non-constant sizes or positions. But it's cheap
4937 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4938
4939 We need to reset self-referential sizes or positions because they cannot
4940 be gimplified and thus can contain a CALL_EXPR after the gimplification
4941 is finished, which will run afoul of LTO streaming. And they need to be
4942 reset to something essentially dummy but not constant, so as to preserve
4943 the properties of the object they are attached to. */
4944
4945 static inline void
4946 free_lang_data_in_one_sizepos (tree *expr_p)
4947 {
4948 tree expr = *expr_p;
4949 if (CONTAINS_PLACEHOLDER_P (expr))
4950 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4951 }
4952
4953
4954 /* Reset all the fields in a binfo node BINFO. We only keep
4955 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4956
4957 static void
4958 free_lang_data_in_binfo (tree binfo)
4959 {
4960 unsigned i;
4961 tree t;
4962
4963 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4964
4965 BINFO_VIRTUALS (binfo) = NULL_TREE;
4966 BINFO_BASE_ACCESSES (binfo) = NULL;
4967 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4968 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4969
4970 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4971 free_lang_data_in_binfo (t);
4972 }
4973
4974
4975 /* Reset all language specific information still present in TYPE. */
4976
4977 static void
4978 free_lang_data_in_type (tree type)
4979 {
4980 gcc_assert (TYPE_P (type));
4981
4982 /* Give the FE a chance to remove its own data first. */
4983 lang_hooks.free_lang_data (type);
4984
4985 TREE_LANG_FLAG_0 (type) = 0;
4986 TREE_LANG_FLAG_1 (type) = 0;
4987 TREE_LANG_FLAG_2 (type) = 0;
4988 TREE_LANG_FLAG_3 (type) = 0;
4989 TREE_LANG_FLAG_4 (type) = 0;
4990 TREE_LANG_FLAG_5 (type) = 0;
4991 TREE_LANG_FLAG_6 (type) = 0;
4992
4993 if (TREE_CODE (type) == FUNCTION_TYPE)
4994 {
4995 /* Remove the const and volatile qualifiers from arguments. The
4996 C++ front end removes them, but the C front end does not,
4997 leading to false ODR violation errors when merging two
4998 instances of the same function signature compiled by
4999 different front ends. */
5000 tree p;
5001
5002 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5003 {
5004 tree arg_type = TREE_VALUE (p);
5005
5006 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5007 {
5008 int quals = TYPE_QUALS (arg_type)
5009 & ~TYPE_QUAL_CONST
5010 & ~TYPE_QUAL_VOLATILE;
5011 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5012 free_lang_data_in_type (TREE_VALUE (p));
5013 }
5014 }
5015 }
5016
5017 /* Remove members that are not actually FIELD_DECLs from the field
5018 list of an aggregate. These occur in C++. */
5019 if (RECORD_OR_UNION_TYPE_P (type))
5020 {
5021 tree prev, member;
5022
5023 /* Note that TYPE_FIELDS can be shared across distinct
5024 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5025 to be removed, we cannot set its TREE_CHAIN to NULL.
5026 Otherwise, we would not be able to find all the other fields
5027 in the other instances of this TREE_TYPE.
5028
5029 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5030 prev = NULL_TREE;
5031 member = TYPE_FIELDS (type);
5032 while (member)
5033 {
5034 if (TREE_CODE (member) == FIELD_DECL
5035 || TREE_CODE (member) == TYPE_DECL)
5036 {
5037 if (prev)
5038 TREE_CHAIN (prev) = member;
5039 else
5040 TYPE_FIELDS (type) = member;
5041 prev = member;
5042 }
5043
5044 member = TREE_CHAIN (member);
5045 }
5046
5047 if (prev)
5048 TREE_CHAIN (prev) = NULL_TREE;
5049 else
5050 TYPE_FIELDS (type) = NULL_TREE;
5051
5052 TYPE_METHODS (type) = NULL_TREE;
5053 if (TYPE_BINFO (type))
5054 {
5055 free_lang_data_in_binfo (TYPE_BINFO (type));
5056 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5057 || !flag_devirtualize)
5058 && (!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5059 || debug_info_level != DINFO_LEVEL_NONE))
5060 TYPE_BINFO (type) = NULL;
5061 }
5062 }
5063 else
5064 {
5065 /* For non-aggregate types, clear out the language slot (which
5066 overloads TYPE_BINFO). */
5067 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5068
5069 if (INTEGRAL_TYPE_P (type)
5070 || SCALAR_FLOAT_TYPE_P (type)
5071 || FIXED_POINT_TYPE_P (type))
5072 {
5073 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5074 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5075 }
5076 }
5077
5078 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5079 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5080
5081 if (TYPE_CONTEXT (type)
5082 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5083 {
5084 tree ctx = TYPE_CONTEXT (type);
5085 do
5086 {
5087 ctx = BLOCK_SUPERCONTEXT (ctx);
5088 }
5089 while (ctx && TREE_CODE (ctx) == BLOCK);
5090 TYPE_CONTEXT (type) = ctx;
5091 }
5092 }
5093
5094
5095 /* Return true if DECL may need an assembler name to be set. */
5096
5097 static inline bool
5098 need_assembler_name_p (tree decl)
5099 {
5100 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule
5101 merging. */
5102 if (flag_lto_odr_type_mering
5103 && TREE_CODE (decl) == TYPE_DECL
5104 && DECL_NAME (decl)
5105 && decl == TYPE_NAME (TREE_TYPE (decl))
5106 && !is_lang_specific (TREE_TYPE (decl))
5107 && AGGREGATE_TYPE_P (TREE_TYPE (decl))
5108 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)
5109 && !type_in_anonymous_namespace_p (TREE_TYPE (decl)))
5110 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5111 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5112 if (TREE_CODE (decl) != FUNCTION_DECL
5113 && TREE_CODE (decl) != VAR_DECL)
5114 return false;
5115
5116 /* If DECL already has its assembler name set, it does not need a
5117 new one. */
5118 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5119 || DECL_ASSEMBLER_NAME_SET_P (decl))
5120 return false;
5121
5122 /* Abstract decls do not need an assembler name. */
5123 if (DECL_ABSTRACT_P (decl))
5124 return false;
5125
5126 /* For VAR_DECLs, only static, public and external symbols need an
5127 assembler name. */
5128 if (TREE_CODE (decl) == VAR_DECL
5129 && !TREE_STATIC (decl)
5130 && !TREE_PUBLIC (decl)
5131 && !DECL_EXTERNAL (decl))
5132 return false;
5133
5134 if (TREE_CODE (decl) == FUNCTION_DECL)
5135 {
5136 /* Do not set assembler name on builtins. Allow RTL expansion to
5137 decide whether to expand inline or via a regular call. */
5138 if (DECL_BUILT_IN (decl)
5139 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5140 return false;
5141
5142 /* Functions represented in the callgraph need an assembler name. */
5143 if (cgraph_node::get (decl) != NULL)
5144 return true;
5145
5146 /* Unused and not public functions don't need an assembler name. */
5147 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5148 return false;
5149 }
5150
5151 return true;
5152 }
5153
5154
5155 /* Reset all language specific information still present in symbol
5156 DECL. */
5157
5158 static void
5159 free_lang_data_in_decl (tree decl)
5160 {
5161 gcc_assert (DECL_P (decl));
5162
5163 /* Give the FE a chance to remove its own data first. */
5164 lang_hooks.free_lang_data (decl);
5165
5166 TREE_LANG_FLAG_0 (decl) = 0;
5167 TREE_LANG_FLAG_1 (decl) = 0;
5168 TREE_LANG_FLAG_2 (decl) = 0;
5169 TREE_LANG_FLAG_3 (decl) = 0;
5170 TREE_LANG_FLAG_4 (decl) = 0;
5171 TREE_LANG_FLAG_5 (decl) = 0;
5172 TREE_LANG_FLAG_6 (decl) = 0;
5173
5174 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5175 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5176 if (TREE_CODE (decl) == FIELD_DECL)
5177 {
5178 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5179 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5180 DECL_QUALIFIER (decl) = NULL_TREE;
5181 }
5182
5183 if (TREE_CODE (decl) == FUNCTION_DECL)
5184 {
5185 struct cgraph_node *node;
5186 if (!(node = cgraph_node::get (decl))
5187 || (!node->definition && !node->clones))
5188 {
5189 if (node)
5190 node->release_body ();
5191 else
5192 {
5193 release_function_body (decl);
5194 DECL_ARGUMENTS (decl) = NULL;
5195 DECL_RESULT (decl) = NULL;
5196 DECL_INITIAL (decl) = error_mark_node;
5197 }
5198 }
5199 if (gimple_has_body_p (decl))
5200 {
5201 tree t;
5202
5203 /* If DECL has a gimple body, then the context for its
5204 arguments must be DECL. Otherwise, it doesn't really
5205 matter, as we will not be emitting any code for DECL. In
5206 general, there may be other instances of DECL created by
5207 the front end and since PARM_DECLs are generally shared,
5208 their DECL_CONTEXT changes as the replicas of DECL are
5209 created. The only time where DECL_CONTEXT is important
5210 is for the FUNCTION_DECLs that have a gimple body (since
5211 the PARM_DECL will be used in the function's body). */
5212 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5213 DECL_CONTEXT (t) = decl;
5214 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5215 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5216 = target_option_default_node;
5217 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5218 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5219 = optimization_default_node;
5220 }
5221
5222 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5223 At this point, it is not needed anymore. */
5224 DECL_SAVED_TREE (decl) = NULL_TREE;
5225
5226 /* Clear the abstract origin if it refers to a method. Otherwise
5227 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5228 origin will not be output correctly. */
5229 if (DECL_ABSTRACT_ORIGIN (decl)
5230 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5231 && RECORD_OR_UNION_TYPE_P
5232 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5233 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5234
5235 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5236 DECL_VINDEX referring to itself into a vtable slot number as it
5237 should. Happens with functions that are copied and then forgotten
5238 about. Just clear it, it won't matter anymore. */
5239 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5240 DECL_VINDEX (decl) = NULL_TREE;
5241 }
5242 else if (TREE_CODE (decl) == VAR_DECL)
5243 {
5244 if ((DECL_EXTERNAL (decl)
5245 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5246 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5247 DECL_INITIAL (decl) = NULL_TREE;
5248 }
5249 else if (TREE_CODE (decl) == TYPE_DECL
5250 || TREE_CODE (decl) == FIELD_DECL)
5251 DECL_INITIAL (decl) = NULL_TREE;
5252 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5253 && DECL_INITIAL (decl)
5254 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5255 {
5256 /* Strip builtins from the translation-unit BLOCK. We still have targets
5257 without builtin_decl_explicit support and also builtins are shared
5258 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5259 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5260 while (*nextp)
5261 {
5262 tree var = *nextp;
5263 if (TREE_CODE (var) == FUNCTION_DECL
5264 && DECL_BUILT_IN (var))
5265 *nextp = TREE_CHAIN (var);
5266 else
5267 nextp = &TREE_CHAIN (var);
5268 }
5269 }
5270 }
5271
5272
5273 /* Data used when collecting DECLs and TYPEs for language data removal. */
5274
5275 struct free_lang_data_d
5276 {
5277 /* Worklist to avoid excessive recursion. */
5278 vec<tree> worklist;
5279
5280 /* Set of traversed objects. Used to avoid duplicate visits. */
5281 hash_set<tree> *pset;
5282
5283 /* Array of symbols to process with free_lang_data_in_decl. */
5284 vec<tree> decls;
5285
5286 /* Array of types to process with free_lang_data_in_type. */
5287 vec<tree> types;
5288 };
5289
5290
5291 /* Save all language fields needed to generate proper debug information
5292 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5293
5294 static void
5295 save_debug_info_for_decl (tree t)
5296 {
5297 /*struct saved_debug_info_d *sdi;*/
5298
5299 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5300
5301 /* FIXME. Partial implementation for saving debug info removed. */
5302 }
5303
5304
5305 /* Save all language fields needed to generate proper debug information
5306 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5307
5308 static void
5309 save_debug_info_for_type (tree t)
5310 {
5311 /*struct saved_debug_info_d *sdi;*/
5312
5313 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5314
5315 /* FIXME. Partial implementation for saving debug info removed. */
5316 }
5317
5318
5319 /* Add type or decl T to one of the list of tree nodes that need their
5320 language data removed. The lists are held inside FLD. */
5321
5322 static void
5323 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5324 {
5325 if (DECL_P (t))
5326 {
5327 fld->decls.safe_push (t);
5328 if (debug_info_level > DINFO_LEVEL_TERSE)
5329 save_debug_info_for_decl (t);
5330 }
5331 else if (TYPE_P (t))
5332 {
5333 fld->types.safe_push (t);
5334 if (debug_info_level > DINFO_LEVEL_TERSE)
5335 save_debug_info_for_type (t);
5336 }
5337 else
5338 gcc_unreachable ();
5339 }
5340
5341 /* Push tree node T into FLD->WORKLIST. */
5342
5343 static inline void
5344 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5345 {
5346 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5347 fld->worklist.safe_push ((t));
5348 }
5349
5350
5351 /* Operand callback helper for free_lang_data_in_node. *TP is the
5352 subtree operand being considered. */
5353
5354 static tree
5355 find_decls_types_r (tree *tp, int *ws, void *data)
5356 {
5357 tree t = *tp;
5358 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5359
5360 if (TREE_CODE (t) == TREE_LIST)
5361 return NULL_TREE;
5362
5363 /* Language specific nodes will be removed, so there is no need
5364 to gather anything under them. */
5365 if (is_lang_specific (t))
5366 {
5367 *ws = 0;
5368 return NULL_TREE;
5369 }
5370
5371 if (DECL_P (t))
5372 {
5373 /* Note that walk_tree does not traverse every possible field in
5374 decls, so we have to do our own traversals here. */
5375 add_tree_to_fld_list (t, fld);
5376
5377 fld_worklist_push (DECL_NAME (t), fld);
5378 fld_worklist_push (DECL_CONTEXT (t), fld);
5379 fld_worklist_push (DECL_SIZE (t), fld);
5380 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5381
5382 /* We are going to remove everything under DECL_INITIAL for
5383 TYPE_DECLs. No point walking them. */
5384 if (TREE_CODE (t) != TYPE_DECL)
5385 fld_worklist_push (DECL_INITIAL (t), fld);
5386
5387 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5388 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5389
5390 if (TREE_CODE (t) == FUNCTION_DECL)
5391 {
5392 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5393 fld_worklist_push (DECL_RESULT (t), fld);
5394 }
5395 else if (TREE_CODE (t) == TYPE_DECL)
5396 {
5397 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5398 }
5399 else if (TREE_CODE (t) == FIELD_DECL)
5400 {
5401 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5402 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5403 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5404 fld_worklist_push (DECL_FCONTEXT (t), fld);
5405 }
5406
5407 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5408 && DECL_HAS_VALUE_EXPR_P (t))
5409 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5410
5411 if (TREE_CODE (t) != FIELD_DECL
5412 && TREE_CODE (t) != TYPE_DECL)
5413 fld_worklist_push (TREE_CHAIN (t), fld);
5414 *ws = 0;
5415 }
5416 else if (TYPE_P (t))
5417 {
5418 /* Note that walk_tree does not traverse every possible field in
5419 types, so we have to do our own traversals here. */
5420 add_tree_to_fld_list (t, fld);
5421
5422 if (!RECORD_OR_UNION_TYPE_P (t))
5423 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5424 fld_worklist_push (TYPE_SIZE (t), fld);
5425 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5426 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5427 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5428 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5429 fld_worklist_push (TYPE_NAME (t), fld);
5430 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5431 them and thus do not and want not to reach unused pointer types
5432 this way. */
5433 if (!POINTER_TYPE_P (t))
5434 fld_worklist_push (TYPE_MINVAL (t), fld);
5435 if (!RECORD_OR_UNION_TYPE_P (t))
5436 fld_worklist_push (TYPE_MAXVAL (t), fld);
5437 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5438 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5439 do not and want not to reach unused variants this way. */
5440 if (TYPE_CONTEXT (t))
5441 {
5442 tree ctx = TYPE_CONTEXT (t);
5443 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5444 So push that instead. */
5445 while (ctx && TREE_CODE (ctx) == BLOCK)
5446 ctx = BLOCK_SUPERCONTEXT (ctx);
5447 fld_worklist_push (ctx, fld);
5448 }
5449 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5450 and want not to reach unused types this way. */
5451
5452 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5453 {
5454 unsigned i;
5455 tree tem;
5456 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5457 fld_worklist_push (TREE_TYPE (tem), fld);
5458 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5459 if (tem
5460 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5461 && TREE_CODE (tem) == TREE_LIST)
5462 do
5463 {
5464 fld_worklist_push (TREE_VALUE (tem), fld);
5465 tem = TREE_CHAIN (tem);
5466 }
5467 while (tem);
5468 }
5469 if (RECORD_OR_UNION_TYPE_P (t))
5470 {
5471 tree tem;
5472 /* Push all TYPE_FIELDS - there can be interleaving interesting
5473 and non-interesting things. */
5474 tem = TYPE_FIELDS (t);
5475 while (tem)
5476 {
5477 if (TREE_CODE (tem) == FIELD_DECL
5478 || TREE_CODE (tem) == TYPE_DECL)
5479 fld_worklist_push (tem, fld);
5480 tem = TREE_CHAIN (tem);
5481 }
5482 }
5483
5484 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5485 *ws = 0;
5486 }
5487 else if (TREE_CODE (t) == BLOCK)
5488 {
5489 tree tem;
5490 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5491 fld_worklist_push (tem, fld);
5492 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5493 fld_worklist_push (tem, fld);
5494 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5495 }
5496
5497 if (TREE_CODE (t) != IDENTIFIER_NODE
5498 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5499 fld_worklist_push (TREE_TYPE (t), fld);
5500
5501 return NULL_TREE;
5502 }
5503
5504
5505 /* Find decls and types in T. */
5506
5507 static void
5508 find_decls_types (tree t, struct free_lang_data_d *fld)
5509 {
5510 while (1)
5511 {
5512 if (!fld->pset->contains (t))
5513 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5514 if (fld->worklist.is_empty ())
5515 break;
5516 t = fld->worklist.pop ();
5517 }
5518 }
5519
5520 /* Translate all the types in LIST with the corresponding runtime
5521 types. */
5522
5523 static tree
5524 get_eh_types_for_runtime (tree list)
5525 {
5526 tree head, prev;
5527
5528 if (list == NULL_TREE)
5529 return NULL_TREE;
5530
5531 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5532 prev = head;
5533 list = TREE_CHAIN (list);
5534 while (list)
5535 {
5536 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5537 TREE_CHAIN (prev) = n;
5538 prev = TREE_CHAIN (prev);
5539 list = TREE_CHAIN (list);
5540 }
5541
5542 return head;
5543 }
5544
5545
5546 /* Find decls and types referenced in EH region R and store them in
5547 FLD->DECLS and FLD->TYPES. */
5548
5549 static void
5550 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5551 {
5552 switch (r->type)
5553 {
5554 case ERT_CLEANUP:
5555 break;
5556
5557 case ERT_TRY:
5558 {
5559 eh_catch c;
5560
5561 /* The types referenced in each catch must first be changed to the
5562 EH types used at runtime. This removes references to FE types
5563 in the region. */
5564 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5565 {
5566 c->type_list = get_eh_types_for_runtime (c->type_list);
5567 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5568 }
5569 }
5570 break;
5571
5572 case ERT_ALLOWED_EXCEPTIONS:
5573 r->u.allowed.type_list
5574 = get_eh_types_for_runtime (r->u.allowed.type_list);
5575 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5576 break;
5577
5578 case ERT_MUST_NOT_THROW:
5579 walk_tree (&r->u.must_not_throw.failure_decl,
5580 find_decls_types_r, fld, fld->pset);
5581 break;
5582 }
5583 }
5584
5585
5586 /* Find decls and types referenced in cgraph node N and store them in
5587 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5588 look for *every* kind of DECL and TYPE node reachable from N,
5589 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5590 NAMESPACE_DECLs, etc). */
5591
5592 static void
5593 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5594 {
5595 basic_block bb;
5596 struct function *fn;
5597 unsigned ix;
5598 tree t;
5599
5600 find_decls_types (n->decl, fld);
5601
5602 if (!gimple_has_body_p (n->decl))
5603 return;
5604
5605 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5606
5607 fn = DECL_STRUCT_FUNCTION (n->decl);
5608
5609 /* Traverse locals. */
5610 FOR_EACH_LOCAL_DECL (fn, ix, t)
5611 find_decls_types (t, fld);
5612
5613 /* Traverse EH regions in FN. */
5614 {
5615 eh_region r;
5616 FOR_ALL_EH_REGION_FN (r, fn)
5617 find_decls_types_in_eh_region (r, fld);
5618 }
5619
5620 /* Traverse every statement in FN. */
5621 FOR_EACH_BB_FN (bb, fn)
5622 {
5623 gphi_iterator psi;
5624 gimple_stmt_iterator si;
5625 unsigned i;
5626
5627 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5628 {
5629 gphi *phi = psi.phi ();
5630
5631 for (i = 0; i < gimple_phi_num_args (phi); i++)
5632 {
5633 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5634 find_decls_types (*arg_p, fld);
5635 }
5636 }
5637
5638 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5639 {
5640 gimple stmt = gsi_stmt (si);
5641
5642 if (is_gimple_call (stmt))
5643 find_decls_types (gimple_call_fntype (stmt), fld);
5644
5645 for (i = 0; i < gimple_num_ops (stmt); i++)
5646 {
5647 tree arg = gimple_op (stmt, i);
5648 find_decls_types (arg, fld);
5649 }
5650 }
5651 }
5652 }
5653
5654
5655 /* Find decls and types referenced in varpool node N and store them in
5656 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5657 look for *every* kind of DECL and TYPE node reachable from N,
5658 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5659 NAMESPACE_DECLs, etc). */
5660
5661 static void
5662 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5663 {
5664 find_decls_types (v->decl, fld);
5665 }
5666
5667 /* If T needs an assembler name, have one created for it. */
5668
5669 void
5670 assign_assembler_name_if_neeeded (tree t)
5671 {
5672 if (need_assembler_name_p (t))
5673 {
5674 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5675 diagnostics that use input_location to show locus
5676 information. The problem here is that, at this point,
5677 input_location is generally anchored to the end of the file
5678 (since the parser is long gone), so we don't have a good
5679 position to pin it to.
5680
5681 To alleviate this problem, this uses the location of T's
5682 declaration. Examples of this are
5683 testsuite/g++.dg/template/cond2.C and
5684 testsuite/g++.dg/template/pr35240.C. */
5685 location_t saved_location = input_location;
5686 input_location = DECL_SOURCE_LOCATION (t);
5687
5688 decl_assembler_name (t);
5689
5690 input_location = saved_location;
5691 }
5692 }
5693
5694
5695 /* Free language specific information for every operand and expression
5696 in every node of the call graph. This process operates in three stages:
5697
5698 1- Every callgraph node and varpool node is traversed looking for
5699 decls and types embedded in them. This is a more exhaustive
5700 search than that done by find_referenced_vars, because it will
5701 also collect individual fields, decls embedded in types, etc.
5702
5703 2- All the decls found are sent to free_lang_data_in_decl.
5704
5705 3- All the types found are sent to free_lang_data_in_type.
5706
5707 The ordering between decls and types is important because
5708 free_lang_data_in_decl sets assembler names, which includes
5709 mangling. So types cannot be freed up until assembler names have
5710 been set up. */
5711
5712 static void
5713 free_lang_data_in_cgraph (void)
5714 {
5715 struct cgraph_node *n;
5716 varpool_node *v;
5717 struct free_lang_data_d fld;
5718 tree t;
5719 unsigned i;
5720 alias_pair *p;
5721
5722 /* Initialize sets and arrays to store referenced decls and types. */
5723 fld.pset = new hash_set<tree>;
5724 fld.worklist.create (0);
5725 fld.decls.create (100);
5726 fld.types.create (100);
5727
5728 /* Find decls and types in the body of every function in the callgraph. */
5729 FOR_EACH_FUNCTION (n)
5730 find_decls_types_in_node (n, &fld);
5731
5732 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5733 find_decls_types (p->decl, &fld);
5734
5735 /* Find decls and types in every varpool symbol. */
5736 FOR_EACH_VARIABLE (v)
5737 find_decls_types_in_var (v, &fld);
5738
5739 /* Set the assembler name on every decl found. We need to do this
5740 now because free_lang_data_in_decl will invalidate data needed
5741 for mangling. This breaks mangling on interdependent decls. */
5742 FOR_EACH_VEC_ELT (fld.decls, i, t)
5743 assign_assembler_name_if_neeeded (t);
5744
5745 /* Traverse every decl found freeing its language data. */
5746 FOR_EACH_VEC_ELT (fld.decls, i, t)
5747 free_lang_data_in_decl (t);
5748
5749 /* Traverse every type found freeing its language data. */
5750 FOR_EACH_VEC_ELT (fld.types, i, t)
5751 free_lang_data_in_type (t);
5752
5753 delete fld.pset;
5754 fld.worklist.release ();
5755 fld.decls.release ();
5756 fld.types.release ();
5757 }
5758
5759
5760 /* Free resources that are used by FE but are not needed once they are done. */
5761
5762 static unsigned
5763 free_lang_data (void)
5764 {
5765 unsigned i;
5766
5767 /* If we are the LTO frontend we have freed lang-specific data already. */
5768 if (in_lto_p
5769 || (!flag_generate_lto && !flag_generate_offload))
5770 return 0;
5771
5772 /* Allocate and assign alias sets to the standard integer types
5773 while the slots are still in the way the frontends generated them. */
5774 for (i = 0; i < itk_none; ++i)
5775 if (integer_types[i])
5776 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5777
5778 /* Traverse the IL resetting language specific information for
5779 operands, expressions, etc. */
5780 free_lang_data_in_cgraph ();
5781
5782 /* Create gimple variants for common types. */
5783 ptrdiff_type_node = integer_type_node;
5784 fileptr_type_node = ptr_type_node;
5785
5786 /* Reset some langhooks. Do not reset types_compatible_p, it may
5787 still be used indirectly via the get_alias_set langhook. */
5788 lang_hooks.dwarf_name = lhd_dwarf_name;
5789 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5790 /* We do not want the default decl_assembler_name implementation,
5791 rather if we have fixed everything we want a wrapper around it
5792 asserting that all non-local symbols already got their assembler
5793 name and only produce assembler names for local symbols. Or rather
5794 make sure we never call decl_assembler_name on local symbols and
5795 devise a separate, middle-end private scheme for it. */
5796
5797 /* Reset diagnostic machinery. */
5798 tree_diagnostics_defaults (global_dc);
5799
5800 return 0;
5801 }
5802
5803
5804 namespace {
5805
5806 const pass_data pass_data_ipa_free_lang_data =
5807 {
5808 SIMPLE_IPA_PASS, /* type */
5809 "*free_lang_data", /* name */
5810 OPTGROUP_NONE, /* optinfo_flags */
5811 TV_IPA_FREE_LANG_DATA, /* tv_id */
5812 0, /* properties_required */
5813 0, /* properties_provided */
5814 0, /* properties_destroyed */
5815 0, /* todo_flags_start */
5816 0, /* todo_flags_finish */
5817 };
5818
5819 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5820 {
5821 public:
5822 pass_ipa_free_lang_data (gcc::context *ctxt)
5823 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5824 {}
5825
5826 /* opt_pass methods: */
5827 virtual unsigned int execute (function *) { return free_lang_data (); }
5828
5829 }; // class pass_ipa_free_lang_data
5830
5831 } // anon namespace
5832
5833 simple_ipa_opt_pass *
5834 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5835 {
5836 return new pass_ipa_free_lang_data (ctxt);
5837 }
5838
5839 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5840 ATTR_NAME. Also used internally by remove_attribute(). */
5841 bool
5842 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5843 {
5844 size_t ident_len = IDENTIFIER_LENGTH (ident);
5845
5846 if (ident_len == attr_len)
5847 {
5848 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5849 return true;
5850 }
5851 else if (ident_len == attr_len + 4)
5852 {
5853 /* There is the possibility that ATTR is 'text' and IDENT is
5854 '__text__'. */
5855 const char *p = IDENTIFIER_POINTER (ident);
5856 if (p[0] == '_' && p[1] == '_'
5857 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5858 && strncmp (attr_name, p + 2, attr_len) == 0)
5859 return true;
5860 }
5861
5862 return false;
5863 }
5864
5865 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5866 of ATTR_NAME, and LIST is not NULL_TREE. */
5867 tree
5868 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5869 {
5870 while (list)
5871 {
5872 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5873
5874 if (ident_len == attr_len)
5875 {
5876 if (!strcmp (attr_name,
5877 IDENTIFIER_POINTER (get_attribute_name (list))))
5878 break;
5879 }
5880 /* TODO: If we made sure that attributes were stored in the
5881 canonical form without '__...__' (ie, as in 'text' as opposed
5882 to '__text__') then we could avoid the following case. */
5883 else if (ident_len == attr_len + 4)
5884 {
5885 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5886 if (p[0] == '_' && p[1] == '_'
5887 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5888 && strncmp (attr_name, p + 2, attr_len) == 0)
5889 break;
5890 }
5891 list = TREE_CHAIN (list);
5892 }
5893
5894 return list;
5895 }
5896
5897 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5898 return a pointer to the attribute's list first element if the attribute
5899 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5900 '__text__'). */
5901
5902 tree
5903 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5904 tree list)
5905 {
5906 while (list)
5907 {
5908 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5909
5910 if (attr_len > ident_len)
5911 {
5912 list = TREE_CHAIN (list);
5913 continue;
5914 }
5915
5916 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5917
5918 if (strncmp (attr_name, p, attr_len) == 0)
5919 break;
5920
5921 /* TODO: If we made sure that attributes were stored in the
5922 canonical form without '__...__' (ie, as in 'text' as opposed
5923 to '__text__') then we could avoid the following case. */
5924 if (p[0] == '_' && p[1] == '_' &&
5925 strncmp (attr_name, p + 2, attr_len) == 0)
5926 break;
5927
5928 list = TREE_CHAIN (list);
5929 }
5930
5931 return list;
5932 }
5933
5934
5935 /* A variant of lookup_attribute() that can be used with an identifier
5936 as the first argument, and where the identifier can be either
5937 'text' or '__text__'.
5938
5939 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5940 return a pointer to the attribute's list element if the attribute
5941 is part of the list, or NULL_TREE if not found. If the attribute
5942 appears more than once, this only returns the first occurrence; the
5943 TREE_CHAIN of the return value should be passed back in if further
5944 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5945 can be in the form 'text' or '__text__'. */
5946 static tree
5947 lookup_ident_attribute (tree attr_identifier, tree list)
5948 {
5949 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5950
5951 while (list)
5952 {
5953 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5954 == IDENTIFIER_NODE);
5955
5956 /* Identifiers can be compared directly for equality. */
5957 if (attr_identifier == get_attribute_name (list))
5958 break;
5959
5960 /* If they are not equal, they may still be one in the form
5961 'text' while the other one is in the form '__text__'. TODO:
5962 If we were storing attributes in normalized 'text' form, then
5963 this could all go away and we could take full advantage of
5964 the fact that we're comparing identifiers. :-) */
5965 {
5966 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5967 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5968
5969 if (ident_len == attr_len + 4)
5970 {
5971 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5972 const char *q = IDENTIFIER_POINTER (attr_identifier);
5973 if (p[0] == '_' && p[1] == '_'
5974 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5975 && strncmp (q, p + 2, attr_len) == 0)
5976 break;
5977 }
5978 else if (ident_len + 4 == attr_len)
5979 {
5980 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5981 const char *q = IDENTIFIER_POINTER (attr_identifier);
5982 if (q[0] == '_' && q[1] == '_'
5983 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5984 && strncmp (q + 2, p, ident_len) == 0)
5985 break;
5986 }
5987 }
5988 list = TREE_CHAIN (list);
5989 }
5990
5991 return list;
5992 }
5993
5994 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5995 modified list. */
5996
5997 tree
5998 remove_attribute (const char *attr_name, tree list)
5999 {
6000 tree *p;
6001 size_t attr_len = strlen (attr_name);
6002
6003 gcc_checking_assert (attr_name[0] != '_');
6004
6005 for (p = &list; *p; )
6006 {
6007 tree l = *p;
6008 /* TODO: If we were storing attributes in normalized form, here
6009 we could use a simple strcmp(). */
6010 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6011 *p = TREE_CHAIN (l);
6012 else
6013 p = &TREE_CHAIN (l);
6014 }
6015
6016 return list;
6017 }
6018
6019 /* Return an attribute list that is the union of a1 and a2. */
6020
6021 tree
6022 merge_attributes (tree a1, tree a2)
6023 {
6024 tree attributes;
6025
6026 /* Either one unset? Take the set one. */
6027
6028 if ((attributes = a1) == 0)
6029 attributes = a2;
6030
6031 /* One that completely contains the other? Take it. */
6032
6033 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6034 {
6035 if (attribute_list_contained (a2, a1))
6036 attributes = a2;
6037 else
6038 {
6039 /* Pick the longest list, and hang on the other list. */
6040
6041 if (list_length (a1) < list_length (a2))
6042 attributes = a2, a2 = a1;
6043
6044 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6045 {
6046 tree a;
6047 for (a = lookup_ident_attribute (get_attribute_name (a2),
6048 attributes);
6049 a != NULL_TREE && !attribute_value_equal (a, a2);
6050 a = lookup_ident_attribute (get_attribute_name (a2),
6051 TREE_CHAIN (a)))
6052 ;
6053 if (a == NULL_TREE)
6054 {
6055 a1 = copy_node (a2);
6056 TREE_CHAIN (a1) = attributes;
6057 attributes = a1;
6058 }
6059 }
6060 }
6061 }
6062 return attributes;
6063 }
6064
6065 /* Given types T1 and T2, merge their attributes and return
6066 the result. */
6067
6068 tree
6069 merge_type_attributes (tree t1, tree t2)
6070 {
6071 return merge_attributes (TYPE_ATTRIBUTES (t1),
6072 TYPE_ATTRIBUTES (t2));
6073 }
6074
6075 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6076 the result. */
6077
6078 tree
6079 merge_decl_attributes (tree olddecl, tree newdecl)
6080 {
6081 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6082 DECL_ATTRIBUTES (newdecl));
6083 }
6084
6085 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6086
6087 /* Specialization of merge_decl_attributes for various Windows targets.
6088
6089 This handles the following situation:
6090
6091 __declspec (dllimport) int foo;
6092 int foo;
6093
6094 The second instance of `foo' nullifies the dllimport. */
6095
6096 tree
6097 merge_dllimport_decl_attributes (tree old, tree new_tree)
6098 {
6099 tree a;
6100 int delete_dllimport_p = 1;
6101
6102 /* What we need to do here is remove from `old' dllimport if it doesn't
6103 appear in `new'. dllimport behaves like extern: if a declaration is
6104 marked dllimport and a definition appears later, then the object
6105 is not dllimport'd. We also remove a `new' dllimport if the old list
6106 contains dllexport: dllexport always overrides dllimport, regardless
6107 of the order of declaration. */
6108 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6109 delete_dllimport_p = 0;
6110 else if (DECL_DLLIMPORT_P (new_tree)
6111 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6112 {
6113 DECL_DLLIMPORT_P (new_tree) = 0;
6114 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6115 "dllimport ignored", new_tree);
6116 }
6117 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6118 {
6119 /* Warn about overriding a symbol that has already been used, e.g.:
6120 extern int __attribute__ ((dllimport)) foo;
6121 int* bar () {return &foo;}
6122 int foo;
6123 */
6124 if (TREE_USED (old))
6125 {
6126 warning (0, "%q+D redeclared without dllimport attribute "
6127 "after being referenced with dll linkage", new_tree);
6128 /* If we have used a variable's address with dllimport linkage,
6129 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6130 decl may already have had TREE_CONSTANT computed.
6131 We still remove the attribute so that assembler code refers
6132 to '&foo rather than '_imp__foo'. */
6133 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6134 DECL_DLLIMPORT_P (new_tree) = 1;
6135 }
6136
6137 /* Let an inline definition silently override the external reference,
6138 but otherwise warn about attribute inconsistency. */
6139 else if (TREE_CODE (new_tree) == VAR_DECL
6140 || !DECL_DECLARED_INLINE_P (new_tree))
6141 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6142 "previous dllimport ignored", new_tree);
6143 }
6144 else
6145 delete_dllimport_p = 0;
6146
6147 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6148
6149 if (delete_dllimport_p)
6150 a = remove_attribute ("dllimport", a);
6151
6152 return a;
6153 }
6154
6155 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6156 struct attribute_spec.handler. */
6157
6158 tree
6159 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6160 bool *no_add_attrs)
6161 {
6162 tree node = *pnode;
6163 bool is_dllimport;
6164
6165 /* These attributes may apply to structure and union types being created,
6166 but otherwise should pass to the declaration involved. */
6167 if (!DECL_P (node))
6168 {
6169 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6170 | (int) ATTR_FLAG_ARRAY_NEXT))
6171 {
6172 *no_add_attrs = true;
6173 return tree_cons (name, args, NULL_TREE);
6174 }
6175 if (TREE_CODE (node) == RECORD_TYPE
6176 || TREE_CODE (node) == UNION_TYPE)
6177 {
6178 node = TYPE_NAME (node);
6179 if (!node)
6180 return NULL_TREE;
6181 }
6182 else
6183 {
6184 warning (OPT_Wattributes, "%qE attribute ignored",
6185 name);
6186 *no_add_attrs = true;
6187 return NULL_TREE;
6188 }
6189 }
6190
6191 if (TREE_CODE (node) != FUNCTION_DECL
6192 && TREE_CODE (node) != VAR_DECL
6193 && TREE_CODE (node) != TYPE_DECL)
6194 {
6195 *no_add_attrs = true;
6196 warning (OPT_Wattributes, "%qE attribute ignored",
6197 name);
6198 return NULL_TREE;
6199 }
6200
6201 if (TREE_CODE (node) == TYPE_DECL
6202 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6203 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6204 {
6205 *no_add_attrs = true;
6206 warning (OPT_Wattributes, "%qE attribute ignored",
6207 name);
6208 return NULL_TREE;
6209 }
6210
6211 is_dllimport = is_attribute_p ("dllimport", name);
6212
6213 /* Report error on dllimport ambiguities seen now before they cause
6214 any damage. */
6215 if (is_dllimport)
6216 {
6217 /* Honor any target-specific overrides. */
6218 if (!targetm.valid_dllimport_attribute_p (node))
6219 *no_add_attrs = true;
6220
6221 else if (TREE_CODE (node) == FUNCTION_DECL
6222 && DECL_DECLARED_INLINE_P (node))
6223 {
6224 warning (OPT_Wattributes, "inline function %q+D declared as "
6225 " dllimport: attribute ignored", node);
6226 *no_add_attrs = true;
6227 }
6228 /* Like MS, treat definition of dllimported variables and
6229 non-inlined functions on declaration as syntax errors. */
6230 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6231 {
6232 error ("function %q+D definition is marked dllimport", node);
6233 *no_add_attrs = true;
6234 }
6235
6236 else if (TREE_CODE (node) == VAR_DECL)
6237 {
6238 if (DECL_INITIAL (node))
6239 {
6240 error ("variable %q+D definition is marked dllimport",
6241 node);
6242 *no_add_attrs = true;
6243 }
6244
6245 /* `extern' needn't be specified with dllimport.
6246 Specify `extern' now and hope for the best. Sigh. */
6247 DECL_EXTERNAL (node) = 1;
6248 /* Also, implicitly give dllimport'd variables declared within
6249 a function global scope, unless declared static. */
6250 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6251 TREE_PUBLIC (node) = 1;
6252 }
6253
6254 if (*no_add_attrs == false)
6255 DECL_DLLIMPORT_P (node) = 1;
6256 }
6257 else if (TREE_CODE (node) == FUNCTION_DECL
6258 && DECL_DECLARED_INLINE_P (node)
6259 && flag_keep_inline_dllexport)
6260 /* An exported function, even if inline, must be emitted. */
6261 DECL_EXTERNAL (node) = 0;
6262
6263 /* Report error if symbol is not accessible at global scope. */
6264 if (!TREE_PUBLIC (node)
6265 && (TREE_CODE (node) == VAR_DECL
6266 || TREE_CODE (node) == FUNCTION_DECL))
6267 {
6268 error ("external linkage required for symbol %q+D because of "
6269 "%qE attribute", node, name);
6270 *no_add_attrs = true;
6271 }
6272
6273 /* A dllexport'd entity must have default visibility so that other
6274 program units (shared libraries or the main executable) can see
6275 it. A dllimport'd entity must have default visibility so that
6276 the linker knows that undefined references within this program
6277 unit can be resolved by the dynamic linker. */
6278 if (!*no_add_attrs)
6279 {
6280 if (DECL_VISIBILITY_SPECIFIED (node)
6281 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6282 error ("%qE implies default visibility, but %qD has already "
6283 "been declared with a different visibility",
6284 name, node);
6285 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6286 DECL_VISIBILITY_SPECIFIED (node) = 1;
6287 }
6288
6289 return NULL_TREE;
6290 }
6291
6292 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6293 \f
6294 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6295 of the various TYPE_QUAL values. */
6296
6297 static void
6298 set_type_quals (tree type, int type_quals)
6299 {
6300 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6301 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6302 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6303 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6304 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6305 }
6306
6307 /* Returns true iff unqualified CAND and BASE are equivalent. */
6308
6309 bool
6310 check_base_type (const_tree cand, const_tree base)
6311 {
6312 return (TYPE_NAME (cand) == TYPE_NAME (base)
6313 /* Apparently this is needed for Objective-C. */
6314 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6315 /* Check alignment. */
6316 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6317 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6318 TYPE_ATTRIBUTES (base)));
6319 }
6320
6321 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6322
6323 bool
6324 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6325 {
6326 return (TYPE_QUALS (cand) == type_quals
6327 && check_base_type (cand, base));
6328 }
6329
6330 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6331
6332 static bool
6333 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6334 {
6335 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6336 && TYPE_NAME (cand) == TYPE_NAME (base)
6337 /* Apparently this is needed for Objective-C. */
6338 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6339 /* Check alignment. */
6340 && TYPE_ALIGN (cand) == align
6341 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6342 TYPE_ATTRIBUTES (base)));
6343 }
6344
6345 /* This function checks to see if TYPE matches the size one of the built-in
6346 atomic types, and returns that core atomic type. */
6347
6348 static tree
6349 find_atomic_core_type (tree type)
6350 {
6351 tree base_atomic_type;
6352
6353 /* Only handle complete types. */
6354 if (TYPE_SIZE (type) == NULL_TREE)
6355 return NULL_TREE;
6356
6357 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6358 switch (type_size)
6359 {
6360 case 8:
6361 base_atomic_type = atomicQI_type_node;
6362 break;
6363
6364 case 16:
6365 base_atomic_type = atomicHI_type_node;
6366 break;
6367
6368 case 32:
6369 base_atomic_type = atomicSI_type_node;
6370 break;
6371
6372 case 64:
6373 base_atomic_type = atomicDI_type_node;
6374 break;
6375
6376 case 128:
6377 base_atomic_type = atomicTI_type_node;
6378 break;
6379
6380 default:
6381 base_atomic_type = NULL_TREE;
6382 }
6383
6384 return base_atomic_type;
6385 }
6386
6387 /* Return a version of the TYPE, qualified as indicated by the
6388 TYPE_QUALS, if one exists. If no qualified version exists yet,
6389 return NULL_TREE. */
6390
6391 tree
6392 get_qualified_type (tree type, int type_quals)
6393 {
6394 tree t;
6395
6396 if (TYPE_QUALS (type) == type_quals)
6397 return type;
6398
6399 /* Search the chain of variants to see if there is already one there just
6400 like the one we need to have. If so, use that existing one. We must
6401 preserve the TYPE_NAME, since there is code that depends on this. */
6402 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6403 if (check_qualified_type (t, type, type_quals))
6404 return t;
6405
6406 return NULL_TREE;
6407 }
6408
6409 /* Like get_qualified_type, but creates the type if it does not
6410 exist. This function never returns NULL_TREE. */
6411
6412 tree
6413 build_qualified_type (tree type, int type_quals)
6414 {
6415 tree t;
6416
6417 /* See if we already have the appropriate qualified variant. */
6418 t = get_qualified_type (type, type_quals);
6419
6420 /* If not, build it. */
6421 if (!t)
6422 {
6423 t = build_variant_type_copy (type);
6424 set_type_quals (t, type_quals);
6425
6426 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6427 {
6428 /* See if this object can map to a basic atomic type. */
6429 tree atomic_type = find_atomic_core_type (type);
6430 if (atomic_type)
6431 {
6432 /* Ensure the alignment of this type is compatible with
6433 the required alignment of the atomic type. */
6434 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6435 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6436 }
6437 }
6438
6439 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6440 /* Propagate structural equality. */
6441 SET_TYPE_STRUCTURAL_EQUALITY (t);
6442 else if (TYPE_CANONICAL (type) != type)
6443 /* Build the underlying canonical type, since it is different
6444 from TYPE. */
6445 {
6446 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6447 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6448 }
6449 else
6450 /* T is its own canonical type. */
6451 TYPE_CANONICAL (t) = t;
6452
6453 }
6454
6455 return t;
6456 }
6457
6458 /* Create a variant of type T with alignment ALIGN. */
6459
6460 tree
6461 build_aligned_type (tree type, unsigned int align)
6462 {
6463 tree t;
6464
6465 if (TYPE_PACKED (type)
6466 || TYPE_ALIGN (type) == align)
6467 return type;
6468
6469 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6470 if (check_aligned_type (t, type, align))
6471 return t;
6472
6473 t = build_variant_type_copy (type);
6474 TYPE_ALIGN (t) = align;
6475
6476 return t;
6477 }
6478
6479 /* Create a new distinct copy of TYPE. The new type is made its own
6480 MAIN_VARIANT. If TYPE requires structural equality checks, the
6481 resulting type requires structural equality checks; otherwise, its
6482 TYPE_CANONICAL points to itself. */
6483
6484 tree
6485 build_distinct_type_copy (tree type)
6486 {
6487 tree t = copy_node (type);
6488
6489 TYPE_POINTER_TO (t) = 0;
6490 TYPE_REFERENCE_TO (t) = 0;
6491
6492 /* Set the canonical type either to a new equivalence class, or
6493 propagate the need for structural equality checks. */
6494 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6495 SET_TYPE_STRUCTURAL_EQUALITY (t);
6496 else
6497 TYPE_CANONICAL (t) = t;
6498
6499 /* Make it its own variant. */
6500 TYPE_MAIN_VARIANT (t) = t;
6501 TYPE_NEXT_VARIANT (t) = 0;
6502
6503 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6504 whose TREE_TYPE is not t. This can also happen in the Ada
6505 frontend when using subtypes. */
6506
6507 return t;
6508 }
6509
6510 /* Create a new variant of TYPE, equivalent but distinct. This is so
6511 the caller can modify it. TYPE_CANONICAL for the return type will
6512 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6513 are considered equal by the language itself (or that both types
6514 require structural equality checks). */
6515
6516 tree
6517 build_variant_type_copy (tree type)
6518 {
6519 tree t, m = TYPE_MAIN_VARIANT (type);
6520
6521 t = build_distinct_type_copy (type);
6522
6523 /* Since we're building a variant, assume that it is a non-semantic
6524 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6525 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6526
6527 /* Add the new type to the chain of variants of TYPE. */
6528 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6529 TYPE_NEXT_VARIANT (m) = t;
6530 TYPE_MAIN_VARIANT (t) = m;
6531
6532 return t;
6533 }
6534 \f
6535 /* Return true if the from tree in both tree maps are equal. */
6536
6537 int
6538 tree_map_base_eq (const void *va, const void *vb)
6539 {
6540 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6541 *const b = (const struct tree_map_base *) vb;
6542 return (a->from == b->from);
6543 }
6544
6545 /* Hash a from tree in a tree_base_map. */
6546
6547 unsigned int
6548 tree_map_base_hash (const void *item)
6549 {
6550 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6551 }
6552
6553 /* Return true if this tree map structure is marked for garbage collection
6554 purposes. We simply return true if the from tree is marked, so that this
6555 structure goes away when the from tree goes away. */
6556
6557 int
6558 tree_map_base_marked_p (const void *p)
6559 {
6560 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6561 }
6562
6563 /* Hash a from tree in a tree_map. */
6564
6565 unsigned int
6566 tree_map_hash (const void *item)
6567 {
6568 return (((const struct tree_map *) item)->hash);
6569 }
6570
6571 /* Hash a from tree in a tree_decl_map. */
6572
6573 unsigned int
6574 tree_decl_map_hash (const void *item)
6575 {
6576 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6577 }
6578
6579 /* Return the initialization priority for DECL. */
6580
6581 priority_type
6582 decl_init_priority_lookup (tree decl)
6583 {
6584 symtab_node *snode = symtab_node::get (decl);
6585
6586 if (!snode)
6587 return DEFAULT_INIT_PRIORITY;
6588 return
6589 snode->get_init_priority ();
6590 }
6591
6592 /* Return the finalization priority for DECL. */
6593
6594 priority_type
6595 decl_fini_priority_lookup (tree decl)
6596 {
6597 cgraph_node *node = cgraph_node::get (decl);
6598
6599 if (!node)
6600 return DEFAULT_INIT_PRIORITY;
6601 return
6602 node->get_fini_priority ();
6603 }
6604
6605 /* Set the initialization priority for DECL to PRIORITY. */
6606
6607 void
6608 decl_init_priority_insert (tree decl, priority_type priority)
6609 {
6610 struct symtab_node *snode;
6611
6612 if (priority == DEFAULT_INIT_PRIORITY)
6613 {
6614 snode = symtab_node::get (decl);
6615 if (!snode)
6616 return;
6617 }
6618 else if (TREE_CODE (decl) == VAR_DECL)
6619 snode = varpool_node::get_create (decl);
6620 else
6621 snode = cgraph_node::get_create (decl);
6622 snode->set_init_priority (priority);
6623 }
6624
6625 /* Set the finalization priority for DECL to PRIORITY. */
6626
6627 void
6628 decl_fini_priority_insert (tree decl, priority_type priority)
6629 {
6630 struct cgraph_node *node;
6631
6632 if (priority == DEFAULT_INIT_PRIORITY)
6633 {
6634 node = cgraph_node::get (decl);
6635 if (!node)
6636 return;
6637 }
6638 else
6639 node = cgraph_node::get_create (decl);
6640 node->set_fini_priority (priority);
6641 }
6642
6643 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6644
6645 static void
6646 print_debug_expr_statistics (void)
6647 {
6648 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6649 (long) debug_expr_for_decl->size (),
6650 (long) debug_expr_for_decl->elements (),
6651 debug_expr_for_decl->collisions ());
6652 }
6653
6654 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6655
6656 static void
6657 print_value_expr_statistics (void)
6658 {
6659 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6660 (long) value_expr_for_decl->size (),
6661 (long) value_expr_for_decl->elements (),
6662 value_expr_for_decl->collisions ());
6663 }
6664
6665 /* Lookup a debug expression for FROM, and return it if we find one. */
6666
6667 tree
6668 decl_debug_expr_lookup (tree from)
6669 {
6670 struct tree_decl_map *h, in;
6671 in.base.from = from;
6672
6673 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6674 if (h)
6675 return h->to;
6676 return NULL_TREE;
6677 }
6678
6679 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6680
6681 void
6682 decl_debug_expr_insert (tree from, tree to)
6683 {
6684 struct tree_decl_map *h;
6685
6686 h = ggc_alloc<tree_decl_map> ();
6687 h->base.from = from;
6688 h->to = to;
6689 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6690 }
6691
6692 /* Lookup a value expression for FROM, and return it if we find one. */
6693
6694 tree
6695 decl_value_expr_lookup (tree from)
6696 {
6697 struct tree_decl_map *h, in;
6698 in.base.from = from;
6699
6700 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6701 if (h)
6702 return h->to;
6703 return NULL_TREE;
6704 }
6705
6706 /* Insert a mapping FROM->TO in the value expression hashtable. */
6707
6708 void
6709 decl_value_expr_insert (tree from, tree to)
6710 {
6711 struct tree_decl_map *h;
6712
6713 h = ggc_alloc<tree_decl_map> ();
6714 h->base.from = from;
6715 h->to = to;
6716 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6717 }
6718
6719 /* Lookup a vector of debug arguments for FROM, and return it if we
6720 find one. */
6721
6722 vec<tree, va_gc> **
6723 decl_debug_args_lookup (tree from)
6724 {
6725 struct tree_vec_map *h, in;
6726
6727 if (!DECL_HAS_DEBUG_ARGS_P (from))
6728 return NULL;
6729 gcc_checking_assert (debug_args_for_decl != NULL);
6730 in.base.from = from;
6731 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6732 if (h)
6733 return &h->to;
6734 return NULL;
6735 }
6736
6737 /* Insert a mapping FROM->empty vector of debug arguments in the value
6738 expression hashtable. */
6739
6740 vec<tree, va_gc> **
6741 decl_debug_args_insert (tree from)
6742 {
6743 struct tree_vec_map *h;
6744 tree_vec_map **loc;
6745
6746 if (DECL_HAS_DEBUG_ARGS_P (from))
6747 return decl_debug_args_lookup (from);
6748 if (debug_args_for_decl == NULL)
6749 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6750 h = ggc_alloc<tree_vec_map> ();
6751 h->base.from = from;
6752 h->to = NULL;
6753 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6754 *loc = h;
6755 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6756 return &h->to;
6757 }
6758
6759 /* Hashing of types so that we don't make duplicates.
6760 The entry point is `type_hash_canon'. */
6761
6762 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6763 with types in the TREE_VALUE slots), by adding the hash codes
6764 of the individual types. */
6765
6766 static void
6767 type_hash_list (const_tree list, inchash::hash &hstate)
6768 {
6769 const_tree tail;
6770
6771 for (tail = list; tail; tail = TREE_CHAIN (tail))
6772 if (TREE_VALUE (tail) != error_mark_node)
6773 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6774 }
6775
6776 /* These are the Hashtable callback functions. */
6777
6778 /* Returns true iff the types are equivalent. */
6779
6780 bool
6781 type_cache_hasher::equal (type_hash *a, type_hash *b)
6782 {
6783 /* First test the things that are the same for all types. */
6784 if (a->hash != b->hash
6785 || TREE_CODE (a->type) != TREE_CODE (b->type)
6786 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6787 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6788 TYPE_ATTRIBUTES (b->type))
6789 || (TREE_CODE (a->type) != COMPLEX_TYPE
6790 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6791 return 0;
6792
6793 /* Be careful about comparing arrays before and after the element type
6794 has been completed; don't compare TYPE_ALIGN unless both types are
6795 complete. */
6796 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6797 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6798 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6799 return 0;
6800
6801 switch (TREE_CODE (a->type))
6802 {
6803 case VOID_TYPE:
6804 case COMPLEX_TYPE:
6805 case POINTER_TYPE:
6806 case REFERENCE_TYPE:
6807 case NULLPTR_TYPE:
6808 return 1;
6809
6810 case VECTOR_TYPE:
6811 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6812
6813 case ENUMERAL_TYPE:
6814 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6815 && !(TYPE_VALUES (a->type)
6816 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6817 && TYPE_VALUES (b->type)
6818 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6819 && type_list_equal (TYPE_VALUES (a->type),
6820 TYPE_VALUES (b->type))))
6821 return 0;
6822
6823 /* ... fall through ... */
6824
6825 case INTEGER_TYPE:
6826 case REAL_TYPE:
6827 case BOOLEAN_TYPE:
6828 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6829 return false;
6830 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6831 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6832 TYPE_MAX_VALUE (b->type)))
6833 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6834 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6835 TYPE_MIN_VALUE (b->type))));
6836
6837 case FIXED_POINT_TYPE:
6838 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6839
6840 case OFFSET_TYPE:
6841 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6842
6843 case METHOD_TYPE:
6844 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6845 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6846 || (TYPE_ARG_TYPES (a->type)
6847 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6848 && TYPE_ARG_TYPES (b->type)
6849 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6850 && type_list_equal (TYPE_ARG_TYPES (a->type),
6851 TYPE_ARG_TYPES (b->type)))))
6852 break;
6853 return 0;
6854 case ARRAY_TYPE:
6855 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6856
6857 case RECORD_TYPE:
6858 case UNION_TYPE:
6859 case QUAL_UNION_TYPE:
6860 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6861 || (TYPE_FIELDS (a->type)
6862 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6863 && TYPE_FIELDS (b->type)
6864 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6865 && type_list_equal (TYPE_FIELDS (a->type),
6866 TYPE_FIELDS (b->type))));
6867
6868 case FUNCTION_TYPE:
6869 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6870 || (TYPE_ARG_TYPES (a->type)
6871 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6872 && TYPE_ARG_TYPES (b->type)
6873 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6874 && type_list_equal (TYPE_ARG_TYPES (a->type),
6875 TYPE_ARG_TYPES (b->type))))
6876 break;
6877 return 0;
6878
6879 default:
6880 return 0;
6881 }
6882
6883 if (lang_hooks.types.type_hash_eq != NULL)
6884 return lang_hooks.types.type_hash_eq (a->type, b->type);
6885
6886 return 1;
6887 }
6888
6889 /* Given TYPE, and HASHCODE its hash code, return the canonical
6890 object for an identical type if one already exists.
6891 Otherwise, return TYPE, and record it as the canonical object.
6892
6893 To use this function, first create a type of the sort you want.
6894 Then compute its hash code from the fields of the type that
6895 make it different from other similar types.
6896 Then call this function and use the value. */
6897
6898 tree
6899 type_hash_canon (unsigned int hashcode, tree type)
6900 {
6901 type_hash in;
6902 type_hash **loc;
6903
6904 /* The hash table only contains main variants, so ensure that's what we're
6905 being passed. */
6906 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6907
6908 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6909 must call that routine before comparing TYPE_ALIGNs. */
6910 layout_type (type);
6911
6912 in.hash = hashcode;
6913 in.type = type;
6914
6915 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6916 if (*loc)
6917 {
6918 tree t1 = ((type_hash *) *loc)->type;
6919 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6920 if (GATHER_STATISTICS)
6921 {
6922 tree_code_counts[(int) TREE_CODE (type)]--;
6923 tree_node_counts[(int) t_kind]--;
6924 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6925 }
6926 return t1;
6927 }
6928 else
6929 {
6930 struct type_hash *h;
6931
6932 h = ggc_alloc<type_hash> ();
6933 h->hash = hashcode;
6934 h->type = type;
6935 *loc = h;
6936
6937 return type;
6938 }
6939 }
6940
6941 static void
6942 print_type_hash_statistics (void)
6943 {
6944 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6945 (long) type_hash_table->size (),
6946 (long) type_hash_table->elements (),
6947 type_hash_table->collisions ());
6948 }
6949
6950 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6951 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6952 by adding the hash codes of the individual attributes. */
6953
6954 static void
6955 attribute_hash_list (const_tree list, inchash::hash &hstate)
6956 {
6957 const_tree tail;
6958
6959 for (tail = list; tail; tail = TREE_CHAIN (tail))
6960 /* ??? Do we want to add in TREE_VALUE too? */
6961 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6962 }
6963
6964 /* Given two lists of attributes, return true if list l2 is
6965 equivalent to l1. */
6966
6967 int
6968 attribute_list_equal (const_tree l1, const_tree l2)
6969 {
6970 if (l1 == l2)
6971 return 1;
6972
6973 return attribute_list_contained (l1, l2)
6974 && attribute_list_contained (l2, l1);
6975 }
6976
6977 /* Given two lists of attributes, return true if list L2 is
6978 completely contained within L1. */
6979 /* ??? This would be faster if attribute names were stored in a canonicalized
6980 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6981 must be used to show these elements are equivalent (which they are). */
6982 /* ??? It's not clear that attributes with arguments will always be handled
6983 correctly. */
6984
6985 int
6986 attribute_list_contained (const_tree l1, const_tree l2)
6987 {
6988 const_tree t1, t2;
6989
6990 /* First check the obvious, maybe the lists are identical. */
6991 if (l1 == l2)
6992 return 1;
6993
6994 /* Maybe the lists are similar. */
6995 for (t1 = l1, t2 = l2;
6996 t1 != 0 && t2 != 0
6997 && get_attribute_name (t1) == get_attribute_name (t2)
6998 && TREE_VALUE (t1) == TREE_VALUE (t2);
6999 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7000 ;
7001
7002 /* Maybe the lists are equal. */
7003 if (t1 == 0 && t2 == 0)
7004 return 1;
7005
7006 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7007 {
7008 const_tree attr;
7009 /* This CONST_CAST is okay because lookup_attribute does not
7010 modify its argument and the return value is assigned to a
7011 const_tree. */
7012 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7013 CONST_CAST_TREE (l1));
7014 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7015 attr = lookup_ident_attribute (get_attribute_name (t2),
7016 TREE_CHAIN (attr)))
7017 ;
7018
7019 if (attr == NULL_TREE)
7020 return 0;
7021 }
7022
7023 return 1;
7024 }
7025
7026 /* Given two lists of types
7027 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7028 return 1 if the lists contain the same types in the same order.
7029 Also, the TREE_PURPOSEs must match. */
7030
7031 int
7032 type_list_equal (const_tree l1, const_tree l2)
7033 {
7034 const_tree t1, t2;
7035
7036 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7037 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7038 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7039 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7040 && (TREE_TYPE (TREE_PURPOSE (t1))
7041 == TREE_TYPE (TREE_PURPOSE (t2))))))
7042 return 0;
7043
7044 return t1 == t2;
7045 }
7046
7047 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7048 given by TYPE. If the argument list accepts variable arguments,
7049 then this function counts only the ordinary arguments. */
7050
7051 int
7052 type_num_arguments (const_tree type)
7053 {
7054 int i = 0;
7055 tree t;
7056
7057 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7058 /* If the function does not take a variable number of arguments,
7059 the last element in the list will have type `void'. */
7060 if (VOID_TYPE_P (TREE_VALUE (t)))
7061 break;
7062 else
7063 ++i;
7064
7065 return i;
7066 }
7067
7068 /* Nonzero if integer constants T1 and T2
7069 represent the same constant value. */
7070
7071 int
7072 tree_int_cst_equal (const_tree t1, const_tree t2)
7073 {
7074 if (t1 == t2)
7075 return 1;
7076
7077 if (t1 == 0 || t2 == 0)
7078 return 0;
7079
7080 if (TREE_CODE (t1) == INTEGER_CST
7081 && TREE_CODE (t2) == INTEGER_CST
7082 && wi::to_widest (t1) == wi::to_widest (t2))
7083 return 1;
7084
7085 return 0;
7086 }
7087
7088 /* Return true if T is an INTEGER_CST whose numerical value (extended
7089 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7090
7091 bool
7092 tree_fits_shwi_p (const_tree t)
7093 {
7094 return (t != NULL_TREE
7095 && TREE_CODE (t) == INTEGER_CST
7096 && wi::fits_shwi_p (wi::to_widest (t)));
7097 }
7098
7099 /* Return true if T is an INTEGER_CST whose numerical value (extended
7100 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7101
7102 bool
7103 tree_fits_uhwi_p (const_tree t)
7104 {
7105 return (t != NULL_TREE
7106 && TREE_CODE (t) == INTEGER_CST
7107 && wi::fits_uhwi_p (wi::to_widest (t)));
7108 }
7109
7110 /* T is an INTEGER_CST whose numerical value (extended according to
7111 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7112 HOST_WIDE_INT. */
7113
7114 HOST_WIDE_INT
7115 tree_to_shwi (const_tree t)
7116 {
7117 gcc_assert (tree_fits_shwi_p (t));
7118 return TREE_INT_CST_LOW (t);
7119 }
7120
7121 /* T is an INTEGER_CST whose numerical value (extended according to
7122 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7123 HOST_WIDE_INT. */
7124
7125 unsigned HOST_WIDE_INT
7126 tree_to_uhwi (const_tree t)
7127 {
7128 gcc_assert (tree_fits_uhwi_p (t));
7129 return TREE_INT_CST_LOW (t);
7130 }
7131
7132 /* Return the most significant (sign) bit of T. */
7133
7134 int
7135 tree_int_cst_sign_bit (const_tree t)
7136 {
7137 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7138
7139 return wi::extract_uhwi (t, bitno, 1);
7140 }
7141
7142 /* Return an indication of the sign of the integer constant T.
7143 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7144 Note that -1 will never be returned if T's type is unsigned. */
7145
7146 int
7147 tree_int_cst_sgn (const_tree t)
7148 {
7149 if (wi::eq_p (t, 0))
7150 return 0;
7151 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7152 return 1;
7153 else if (wi::neg_p (t))
7154 return -1;
7155 else
7156 return 1;
7157 }
7158
7159 /* Return the minimum number of bits needed to represent VALUE in a
7160 signed or unsigned type, UNSIGNEDP says which. */
7161
7162 unsigned int
7163 tree_int_cst_min_precision (tree value, signop sgn)
7164 {
7165 /* If the value is negative, compute its negative minus 1. The latter
7166 adjustment is because the absolute value of the largest negative value
7167 is one larger than the largest positive value. This is equivalent to
7168 a bit-wise negation, so use that operation instead. */
7169
7170 if (tree_int_cst_sgn (value) < 0)
7171 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7172
7173 /* Return the number of bits needed, taking into account the fact
7174 that we need one more bit for a signed than unsigned type.
7175 If value is 0 or -1, the minimum precision is 1 no matter
7176 whether unsignedp is true or false. */
7177
7178 if (integer_zerop (value))
7179 return 1;
7180 else
7181 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7182 }
7183
7184 /* Return truthvalue of whether T1 is the same tree structure as T2.
7185 Return 1 if they are the same.
7186 Return 0 if they are understandably different.
7187 Return -1 if either contains tree structure not understood by
7188 this function. */
7189
7190 int
7191 simple_cst_equal (const_tree t1, const_tree t2)
7192 {
7193 enum tree_code code1, code2;
7194 int cmp;
7195 int i;
7196
7197 if (t1 == t2)
7198 return 1;
7199 if (t1 == 0 || t2 == 0)
7200 return 0;
7201
7202 code1 = TREE_CODE (t1);
7203 code2 = TREE_CODE (t2);
7204
7205 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7206 {
7207 if (CONVERT_EXPR_CODE_P (code2)
7208 || code2 == NON_LVALUE_EXPR)
7209 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7210 else
7211 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7212 }
7213
7214 else if (CONVERT_EXPR_CODE_P (code2)
7215 || code2 == NON_LVALUE_EXPR)
7216 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7217
7218 if (code1 != code2)
7219 return 0;
7220
7221 switch (code1)
7222 {
7223 case INTEGER_CST:
7224 return wi::to_widest (t1) == wi::to_widest (t2);
7225
7226 case REAL_CST:
7227 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7228
7229 case FIXED_CST:
7230 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7231
7232 case STRING_CST:
7233 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7234 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7235 TREE_STRING_LENGTH (t1)));
7236
7237 case CONSTRUCTOR:
7238 {
7239 unsigned HOST_WIDE_INT idx;
7240 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7241 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7242
7243 if (vec_safe_length (v1) != vec_safe_length (v2))
7244 return false;
7245
7246 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7247 /* ??? Should we handle also fields here? */
7248 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7249 return false;
7250 return true;
7251 }
7252
7253 case SAVE_EXPR:
7254 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7255
7256 case CALL_EXPR:
7257 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7258 if (cmp <= 0)
7259 return cmp;
7260 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7261 return 0;
7262 {
7263 const_tree arg1, arg2;
7264 const_call_expr_arg_iterator iter1, iter2;
7265 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7266 arg2 = first_const_call_expr_arg (t2, &iter2);
7267 arg1 && arg2;
7268 arg1 = next_const_call_expr_arg (&iter1),
7269 arg2 = next_const_call_expr_arg (&iter2))
7270 {
7271 cmp = simple_cst_equal (arg1, arg2);
7272 if (cmp <= 0)
7273 return cmp;
7274 }
7275 return arg1 == arg2;
7276 }
7277
7278 case TARGET_EXPR:
7279 /* Special case: if either target is an unallocated VAR_DECL,
7280 it means that it's going to be unified with whatever the
7281 TARGET_EXPR is really supposed to initialize, so treat it
7282 as being equivalent to anything. */
7283 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7284 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7285 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7286 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7287 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7288 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7289 cmp = 1;
7290 else
7291 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7292
7293 if (cmp <= 0)
7294 return cmp;
7295
7296 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7297
7298 case WITH_CLEANUP_EXPR:
7299 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7300 if (cmp <= 0)
7301 return cmp;
7302
7303 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7304
7305 case COMPONENT_REF:
7306 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7307 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7308
7309 return 0;
7310
7311 case VAR_DECL:
7312 case PARM_DECL:
7313 case CONST_DECL:
7314 case FUNCTION_DECL:
7315 return 0;
7316
7317 default:
7318 break;
7319 }
7320
7321 /* This general rule works for most tree codes. All exceptions should be
7322 handled above. If this is a language-specific tree code, we can't
7323 trust what might be in the operand, so say we don't know
7324 the situation. */
7325 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7326 return -1;
7327
7328 switch (TREE_CODE_CLASS (code1))
7329 {
7330 case tcc_unary:
7331 case tcc_binary:
7332 case tcc_comparison:
7333 case tcc_expression:
7334 case tcc_reference:
7335 case tcc_statement:
7336 cmp = 1;
7337 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7338 {
7339 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7340 if (cmp <= 0)
7341 return cmp;
7342 }
7343
7344 return cmp;
7345
7346 default:
7347 return -1;
7348 }
7349 }
7350
7351 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7352 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7353 than U, respectively. */
7354
7355 int
7356 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7357 {
7358 if (tree_int_cst_sgn (t) < 0)
7359 return -1;
7360 else if (!tree_fits_uhwi_p (t))
7361 return 1;
7362 else if (TREE_INT_CST_LOW (t) == u)
7363 return 0;
7364 else if (TREE_INT_CST_LOW (t) < u)
7365 return -1;
7366 else
7367 return 1;
7368 }
7369
7370 /* Return true if SIZE represents a constant size that is in bounds of
7371 what the middle-end and the backend accepts (covering not more than
7372 half of the address-space). */
7373
7374 bool
7375 valid_constant_size_p (const_tree size)
7376 {
7377 if (! tree_fits_uhwi_p (size)
7378 || TREE_OVERFLOW (size)
7379 || tree_int_cst_sign_bit (size) != 0)
7380 return false;
7381 return true;
7382 }
7383
7384 /* Return the precision of the type, or for a complex or vector type the
7385 precision of the type of its elements. */
7386
7387 unsigned int
7388 element_precision (const_tree type)
7389 {
7390 enum tree_code code = TREE_CODE (type);
7391 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7392 type = TREE_TYPE (type);
7393
7394 return TYPE_PRECISION (type);
7395 }
7396
7397 /* Return true if CODE represents an associative tree code. Otherwise
7398 return false. */
7399 bool
7400 associative_tree_code (enum tree_code code)
7401 {
7402 switch (code)
7403 {
7404 case BIT_IOR_EXPR:
7405 case BIT_AND_EXPR:
7406 case BIT_XOR_EXPR:
7407 case PLUS_EXPR:
7408 case MULT_EXPR:
7409 case MIN_EXPR:
7410 case MAX_EXPR:
7411 return true;
7412
7413 default:
7414 break;
7415 }
7416 return false;
7417 }
7418
7419 /* Return true if CODE represents a commutative tree code. Otherwise
7420 return false. */
7421 bool
7422 commutative_tree_code (enum tree_code code)
7423 {
7424 switch (code)
7425 {
7426 case PLUS_EXPR:
7427 case MULT_EXPR:
7428 case MULT_HIGHPART_EXPR:
7429 case MIN_EXPR:
7430 case MAX_EXPR:
7431 case BIT_IOR_EXPR:
7432 case BIT_XOR_EXPR:
7433 case BIT_AND_EXPR:
7434 case NE_EXPR:
7435 case EQ_EXPR:
7436 case UNORDERED_EXPR:
7437 case ORDERED_EXPR:
7438 case UNEQ_EXPR:
7439 case LTGT_EXPR:
7440 case TRUTH_AND_EXPR:
7441 case TRUTH_XOR_EXPR:
7442 case TRUTH_OR_EXPR:
7443 case WIDEN_MULT_EXPR:
7444 case VEC_WIDEN_MULT_HI_EXPR:
7445 case VEC_WIDEN_MULT_LO_EXPR:
7446 case VEC_WIDEN_MULT_EVEN_EXPR:
7447 case VEC_WIDEN_MULT_ODD_EXPR:
7448 return true;
7449
7450 default:
7451 break;
7452 }
7453 return false;
7454 }
7455
7456 /* Return true if CODE represents a ternary tree code for which the
7457 first two operands are commutative. Otherwise return false. */
7458 bool
7459 commutative_ternary_tree_code (enum tree_code code)
7460 {
7461 switch (code)
7462 {
7463 case WIDEN_MULT_PLUS_EXPR:
7464 case WIDEN_MULT_MINUS_EXPR:
7465 case DOT_PROD_EXPR:
7466 case FMA_EXPR:
7467 return true;
7468
7469 default:
7470 break;
7471 }
7472 return false;
7473 }
7474
7475 namespace inchash
7476 {
7477
7478 /* Generate a hash value for an expression. This can be used iteratively
7479 by passing a previous result as the HSTATE argument.
7480
7481 This function is intended to produce the same hash for expressions which
7482 would compare equal using operand_equal_p. */
7483 void
7484 add_expr (const_tree t, inchash::hash &hstate)
7485 {
7486 int i;
7487 enum tree_code code;
7488 enum tree_code_class tclass;
7489
7490 if (t == NULL_TREE)
7491 {
7492 hstate.merge_hash (0);
7493 return;
7494 }
7495
7496 code = TREE_CODE (t);
7497
7498 switch (code)
7499 {
7500 /* Alas, constants aren't shared, so we can't rely on pointer
7501 identity. */
7502 case VOID_CST:
7503 hstate.merge_hash (0);
7504 return;
7505 case INTEGER_CST:
7506 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7507 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7508 return;
7509 case REAL_CST:
7510 {
7511 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7512 hstate.merge_hash (val2);
7513 return;
7514 }
7515 case FIXED_CST:
7516 {
7517 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7518 hstate.merge_hash (val2);
7519 return;
7520 }
7521 case STRING_CST:
7522 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7523 return;
7524 case COMPLEX_CST:
7525 inchash::add_expr (TREE_REALPART (t), hstate);
7526 inchash::add_expr (TREE_IMAGPART (t), hstate);
7527 return;
7528 case VECTOR_CST:
7529 {
7530 unsigned i;
7531 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7532 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7533 return;
7534 }
7535 case SSA_NAME:
7536 /* We can just compare by pointer. */
7537 hstate.add_wide_int (SSA_NAME_VERSION (t));
7538 return;
7539 case PLACEHOLDER_EXPR:
7540 /* The node itself doesn't matter. */
7541 return;
7542 case TREE_LIST:
7543 /* A list of expressions, for a CALL_EXPR or as the elements of a
7544 VECTOR_CST. */
7545 for (; t; t = TREE_CHAIN (t))
7546 inchash::add_expr (TREE_VALUE (t), hstate);
7547 return;
7548 case CONSTRUCTOR:
7549 {
7550 unsigned HOST_WIDE_INT idx;
7551 tree field, value;
7552 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7553 {
7554 inchash::add_expr (field, hstate);
7555 inchash::add_expr (value, hstate);
7556 }
7557 return;
7558 }
7559 case FUNCTION_DECL:
7560 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7561 Otherwise nodes that compare equal according to operand_equal_p might
7562 get different hash codes. However, don't do this for machine specific
7563 or front end builtins, since the function code is overloaded in those
7564 cases. */
7565 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7566 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7567 {
7568 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7569 code = TREE_CODE (t);
7570 }
7571 /* FALL THROUGH */
7572 default:
7573 tclass = TREE_CODE_CLASS (code);
7574
7575 if (tclass == tcc_declaration)
7576 {
7577 /* DECL's have a unique ID */
7578 hstate.add_wide_int (DECL_UID (t));
7579 }
7580 else
7581 {
7582 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7583
7584 hstate.add_object (code);
7585
7586 /* Don't hash the type, that can lead to having nodes which
7587 compare equal according to operand_equal_p, but which
7588 have different hash codes. */
7589 if (CONVERT_EXPR_CODE_P (code)
7590 || code == NON_LVALUE_EXPR)
7591 {
7592 /* Make sure to include signness in the hash computation. */
7593 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7594 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7595 }
7596
7597 else if (commutative_tree_code (code))
7598 {
7599 /* It's a commutative expression. We want to hash it the same
7600 however it appears. We do this by first hashing both operands
7601 and then rehashing based on the order of their independent
7602 hashes. */
7603 inchash::hash one, two;
7604 inchash::add_expr (TREE_OPERAND (t, 0), one);
7605 inchash::add_expr (TREE_OPERAND (t, 1), two);
7606 hstate.add_commutative (one, two);
7607 }
7608 else
7609 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7610 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7611 }
7612 return;
7613 }
7614 }
7615
7616 }
7617
7618 /* Constructors for pointer, array and function types.
7619 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7620 constructed by language-dependent code, not here.) */
7621
7622 /* Construct, lay out and return the type of pointers to TO_TYPE with
7623 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7624 reference all of memory. If such a type has already been
7625 constructed, reuse it. */
7626
7627 tree
7628 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7629 bool can_alias_all)
7630 {
7631 tree t;
7632
7633 if (to_type == error_mark_node)
7634 return error_mark_node;
7635
7636 /* If the pointed-to type has the may_alias attribute set, force
7637 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7638 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7639 can_alias_all = true;
7640
7641 /* In some cases, languages will have things that aren't a POINTER_TYPE
7642 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7643 In that case, return that type without regard to the rest of our
7644 operands.
7645
7646 ??? This is a kludge, but consistent with the way this function has
7647 always operated and there doesn't seem to be a good way to avoid this
7648 at the moment. */
7649 if (TYPE_POINTER_TO (to_type) != 0
7650 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7651 return TYPE_POINTER_TO (to_type);
7652
7653 /* First, if we already have a type for pointers to TO_TYPE and it's
7654 the proper mode, use it. */
7655 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7656 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7657 return t;
7658
7659 t = make_node (POINTER_TYPE);
7660
7661 TREE_TYPE (t) = to_type;
7662 SET_TYPE_MODE (t, mode);
7663 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7664 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7665 TYPE_POINTER_TO (to_type) = t;
7666
7667 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7668 SET_TYPE_STRUCTURAL_EQUALITY (t);
7669 else if (TYPE_CANONICAL (to_type) != to_type)
7670 TYPE_CANONICAL (t)
7671 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7672 mode, can_alias_all);
7673
7674 /* Lay out the type. This function has many callers that are concerned
7675 with expression-construction, and this simplifies them all. */
7676 layout_type (t);
7677
7678 return t;
7679 }
7680
7681 /* By default build pointers in ptr_mode. */
7682
7683 tree
7684 build_pointer_type (tree to_type)
7685 {
7686 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7687 : TYPE_ADDR_SPACE (to_type);
7688 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7689 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7690 }
7691
7692 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7693
7694 tree
7695 build_reference_type_for_mode (tree to_type, machine_mode mode,
7696 bool can_alias_all)
7697 {
7698 tree t;
7699
7700 if (to_type == error_mark_node)
7701 return error_mark_node;
7702
7703 /* If the pointed-to type has the may_alias attribute set, force
7704 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7705 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7706 can_alias_all = true;
7707
7708 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7709 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7710 In that case, return that type without regard to the rest of our
7711 operands.
7712
7713 ??? This is a kludge, but consistent with the way this function has
7714 always operated and there doesn't seem to be a good way to avoid this
7715 at the moment. */
7716 if (TYPE_REFERENCE_TO (to_type) != 0
7717 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7718 return TYPE_REFERENCE_TO (to_type);
7719
7720 /* First, if we already have a type for pointers to TO_TYPE and it's
7721 the proper mode, use it. */
7722 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7723 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7724 return t;
7725
7726 t = make_node (REFERENCE_TYPE);
7727
7728 TREE_TYPE (t) = to_type;
7729 SET_TYPE_MODE (t, mode);
7730 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7731 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7732 TYPE_REFERENCE_TO (to_type) = t;
7733
7734 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7735 SET_TYPE_STRUCTURAL_EQUALITY (t);
7736 else if (TYPE_CANONICAL (to_type) != to_type)
7737 TYPE_CANONICAL (t)
7738 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7739 mode, can_alias_all);
7740
7741 layout_type (t);
7742
7743 return t;
7744 }
7745
7746
7747 /* Build the node for the type of references-to-TO_TYPE by default
7748 in ptr_mode. */
7749
7750 tree
7751 build_reference_type (tree to_type)
7752 {
7753 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7754 : TYPE_ADDR_SPACE (to_type);
7755 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7756 return build_reference_type_for_mode (to_type, pointer_mode, false);
7757 }
7758
7759 #define MAX_INT_CACHED_PREC \
7760 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7761 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7762
7763 /* Builds a signed or unsigned integer type of precision PRECISION.
7764 Used for C bitfields whose precision does not match that of
7765 built-in target types. */
7766 tree
7767 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7768 int unsignedp)
7769 {
7770 tree itype, ret;
7771
7772 if (unsignedp)
7773 unsignedp = MAX_INT_CACHED_PREC + 1;
7774
7775 if (precision <= MAX_INT_CACHED_PREC)
7776 {
7777 itype = nonstandard_integer_type_cache[precision + unsignedp];
7778 if (itype)
7779 return itype;
7780 }
7781
7782 itype = make_node (INTEGER_TYPE);
7783 TYPE_PRECISION (itype) = precision;
7784
7785 if (unsignedp)
7786 fixup_unsigned_type (itype);
7787 else
7788 fixup_signed_type (itype);
7789
7790 ret = itype;
7791 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7792 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7793 if (precision <= MAX_INT_CACHED_PREC)
7794 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7795
7796 return ret;
7797 }
7798
7799 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7800 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7801 is true, reuse such a type that has already been constructed. */
7802
7803 static tree
7804 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7805 {
7806 tree itype = make_node (INTEGER_TYPE);
7807 inchash::hash hstate;
7808
7809 TREE_TYPE (itype) = type;
7810
7811 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7812 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7813
7814 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7815 SET_TYPE_MODE (itype, TYPE_MODE (type));
7816 TYPE_SIZE (itype) = TYPE_SIZE (type);
7817 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7818 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7819 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7820
7821 if (!shared)
7822 return itype;
7823
7824 if ((TYPE_MIN_VALUE (itype)
7825 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7826 || (TYPE_MAX_VALUE (itype)
7827 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7828 {
7829 /* Since we cannot reliably merge this type, we need to compare it using
7830 structural equality checks. */
7831 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7832 return itype;
7833 }
7834
7835 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7836 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7837 hstate.merge_hash (TYPE_HASH (type));
7838 itype = type_hash_canon (hstate.end (), itype);
7839
7840 return itype;
7841 }
7842
7843 /* Wrapper around build_range_type_1 with SHARED set to true. */
7844
7845 tree
7846 build_range_type (tree type, tree lowval, tree highval)
7847 {
7848 return build_range_type_1 (type, lowval, highval, true);
7849 }
7850
7851 /* Wrapper around build_range_type_1 with SHARED set to false. */
7852
7853 tree
7854 build_nonshared_range_type (tree type, tree lowval, tree highval)
7855 {
7856 return build_range_type_1 (type, lowval, highval, false);
7857 }
7858
7859 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7860 MAXVAL should be the maximum value in the domain
7861 (one less than the length of the array).
7862
7863 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7864 We don't enforce this limit, that is up to caller (e.g. language front end).
7865 The limit exists because the result is a signed type and we don't handle
7866 sizes that use more than one HOST_WIDE_INT. */
7867
7868 tree
7869 build_index_type (tree maxval)
7870 {
7871 return build_range_type (sizetype, size_zero_node, maxval);
7872 }
7873
7874 /* Return true if the debug information for TYPE, a subtype, should be emitted
7875 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7876 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7877 debug info and doesn't reflect the source code. */
7878
7879 bool
7880 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7881 {
7882 tree base_type = TREE_TYPE (type), low, high;
7883
7884 /* Subrange types have a base type which is an integral type. */
7885 if (!INTEGRAL_TYPE_P (base_type))
7886 return false;
7887
7888 /* Get the real bounds of the subtype. */
7889 if (lang_hooks.types.get_subrange_bounds)
7890 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7891 else
7892 {
7893 low = TYPE_MIN_VALUE (type);
7894 high = TYPE_MAX_VALUE (type);
7895 }
7896
7897 /* If the type and its base type have the same representation and the same
7898 name, then the type is not a subrange but a copy of the base type. */
7899 if ((TREE_CODE (base_type) == INTEGER_TYPE
7900 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7901 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7902 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7903 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7904 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7905 return false;
7906
7907 if (lowval)
7908 *lowval = low;
7909 if (highval)
7910 *highval = high;
7911 return true;
7912 }
7913
7914 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7915 and number of elements specified by the range of values of INDEX_TYPE.
7916 If SHARED is true, reuse such a type that has already been constructed. */
7917
7918 static tree
7919 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7920 {
7921 tree t;
7922
7923 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7924 {
7925 error ("arrays of functions are not meaningful");
7926 elt_type = integer_type_node;
7927 }
7928
7929 t = make_node (ARRAY_TYPE);
7930 TREE_TYPE (t) = elt_type;
7931 TYPE_DOMAIN (t) = index_type;
7932 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7933 layout_type (t);
7934
7935 /* If the element type is incomplete at this point we get marked for
7936 structural equality. Do not record these types in the canonical
7937 type hashtable. */
7938 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7939 return t;
7940
7941 if (shared)
7942 {
7943 inchash::hash hstate;
7944 hstate.add_object (TYPE_HASH (elt_type));
7945 if (index_type)
7946 hstate.add_object (TYPE_HASH (index_type));
7947 t = type_hash_canon (hstate.end (), t);
7948 }
7949
7950 if (TYPE_CANONICAL (t) == t)
7951 {
7952 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7953 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7954 SET_TYPE_STRUCTURAL_EQUALITY (t);
7955 else if (TYPE_CANONICAL (elt_type) != elt_type
7956 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7957 TYPE_CANONICAL (t)
7958 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7959 index_type
7960 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7961 shared);
7962 }
7963
7964 return t;
7965 }
7966
7967 /* Wrapper around build_array_type_1 with SHARED set to true. */
7968
7969 tree
7970 build_array_type (tree elt_type, tree index_type)
7971 {
7972 return build_array_type_1 (elt_type, index_type, true);
7973 }
7974
7975 /* Wrapper around build_array_type_1 with SHARED set to false. */
7976
7977 tree
7978 build_nonshared_array_type (tree elt_type, tree index_type)
7979 {
7980 return build_array_type_1 (elt_type, index_type, false);
7981 }
7982
7983 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7984 sizetype. */
7985
7986 tree
7987 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7988 {
7989 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7990 }
7991
7992 /* Recursively examines the array elements of TYPE, until a non-array
7993 element type is found. */
7994
7995 tree
7996 strip_array_types (tree type)
7997 {
7998 while (TREE_CODE (type) == ARRAY_TYPE)
7999 type = TREE_TYPE (type);
8000
8001 return type;
8002 }
8003
8004 /* Computes the canonical argument types from the argument type list
8005 ARGTYPES.
8006
8007 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8008 on entry to this function, or if any of the ARGTYPES are
8009 structural.
8010
8011 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8012 true on entry to this function, or if any of the ARGTYPES are
8013 non-canonical.
8014
8015 Returns a canonical argument list, which may be ARGTYPES when the
8016 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8017 true) or would not differ from ARGTYPES. */
8018
8019 static tree
8020 maybe_canonicalize_argtypes (tree argtypes,
8021 bool *any_structural_p,
8022 bool *any_noncanonical_p)
8023 {
8024 tree arg;
8025 bool any_noncanonical_argtypes_p = false;
8026
8027 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8028 {
8029 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8030 /* Fail gracefully by stating that the type is structural. */
8031 *any_structural_p = true;
8032 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8033 *any_structural_p = true;
8034 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8035 || TREE_PURPOSE (arg))
8036 /* If the argument has a default argument, we consider it
8037 non-canonical even though the type itself is canonical.
8038 That way, different variants of function and method types
8039 with default arguments will all point to the variant with
8040 no defaults as their canonical type. */
8041 any_noncanonical_argtypes_p = true;
8042 }
8043
8044 if (*any_structural_p)
8045 return argtypes;
8046
8047 if (any_noncanonical_argtypes_p)
8048 {
8049 /* Build the canonical list of argument types. */
8050 tree canon_argtypes = NULL_TREE;
8051 bool is_void = false;
8052
8053 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8054 {
8055 if (arg == void_list_node)
8056 is_void = true;
8057 else
8058 canon_argtypes = tree_cons (NULL_TREE,
8059 TYPE_CANONICAL (TREE_VALUE (arg)),
8060 canon_argtypes);
8061 }
8062
8063 canon_argtypes = nreverse (canon_argtypes);
8064 if (is_void)
8065 canon_argtypes = chainon (canon_argtypes, void_list_node);
8066
8067 /* There is a non-canonical type. */
8068 *any_noncanonical_p = true;
8069 return canon_argtypes;
8070 }
8071
8072 /* The canonical argument types are the same as ARGTYPES. */
8073 return argtypes;
8074 }
8075
8076 /* Construct, lay out and return
8077 the type of functions returning type VALUE_TYPE
8078 given arguments of types ARG_TYPES.
8079 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8080 are data type nodes for the arguments of the function.
8081 If such a type has already been constructed, reuse it. */
8082
8083 tree
8084 build_function_type (tree value_type, tree arg_types)
8085 {
8086 tree t;
8087 inchash::hash hstate;
8088 bool any_structural_p, any_noncanonical_p;
8089 tree canon_argtypes;
8090
8091 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8092 {
8093 error ("function return type cannot be function");
8094 value_type = integer_type_node;
8095 }
8096
8097 /* Make a node of the sort we want. */
8098 t = make_node (FUNCTION_TYPE);
8099 TREE_TYPE (t) = value_type;
8100 TYPE_ARG_TYPES (t) = arg_types;
8101
8102 /* If we already have such a type, use the old one. */
8103 hstate.add_object (TYPE_HASH (value_type));
8104 type_hash_list (arg_types, hstate);
8105 t = type_hash_canon (hstate.end (), t);
8106
8107 /* Set up the canonical type. */
8108 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8109 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8110 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8111 &any_structural_p,
8112 &any_noncanonical_p);
8113 if (any_structural_p)
8114 SET_TYPE_STRUCTURAL_EQUALITY (t);
8115 else if (any_noncanonical_p)
8116 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8117 canon_argtypes);
8118
8119 if (!COMPLETE_TYPE_P (t))
8120 layout_type (t);
8121 return t;
8122 }
8123
8124 /* Build a function type. The RETURN_TYPE is the type returned by the
8125 function. If VAARGS is set, no void_type_node is appended to the
8126 the list. ARGP must be always be terminated be a NULL_TREE. */
8127
8128 static tree
8129 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8130 {
8131 tree t, args, last;
8132
8133 t = va_arg (argp, tree);
8134 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8135 args = tree_cons (NULL_TREE, t, args);
8136
8137 if (vaargs)
8138 {
8139 last = args;
8140 if (args != NULL_TREE)
8141 args = nreverse (args);
8142 gcc_assert (last != void_list_node);
8143 }
8144 else if (args == NULL_TREE)
8145 args = void_list_node;
8146 else
8147 {
8148 last = args;
8149 args = nreverse (args);
8150 TREE_CHAIN (last) = void_list_node;
8151 }
8152 args = build_function_type (return_type, args);
8153
8154 return args;
8155 }
8156
8157 /* Build a function type. The RETURN_TYPE is the type returned by the
8158 function. If additional arguments are provided, they are
8159 additional argument types. The list of argument types must always
8160 be terminated by NULL_TREE. */
8161
8162 tree
8163 build_function_type_list (tree return_type, ...)
8164 {
8165 tree args;
8166 va_list p;
8167
8168 va_start (p, return_type);
8169 args = build_function_type_list_1 (false, return_type, p);
8170 va_end (p);
8171 return args;
8172 }
8173
8174 /* Build a variable argument function type. The RETURN_TYPE is the
8175 type returned by the function. If additional arguments are provided,
8176 they are additional argument types. The list of argument types must
8177 always be terminated by NULL_TREE. */
8178
8179 tree
8180 build_varargs_function_type_list (tree return_type, ...)
8181 {
8182 tree args;
8183 va_list p;
8184
8185 va_start (p, return_type);
8186 args = build_function_type_list_1 (true, return_type, p);
8187 va_end (p);
8188
8189 return args;
8190 }
8191
8192 /* Build a function type. RETURN_TYPE is the type returned by the
8193 function; VAARGS indicates whether the function takes varargs. The
8194 function takes N named arguments, the types of which are provided in
8195 ARG_TYPES. */
8196
8197 static tree
8198 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8199 tree *arg_types)
8200 {
8201 int i;
8202 tree t = vaargs ? NULL_TREE : void_list_node;
8203
8204 for (i = n - 1; i >= 0; i--)
8205 t = tree_cons (NULL_TREE, arg_types[i], t);
8206
8207 return build_function_type (return_type, t);
8208 }
8209
8210 /* Build a function type. RETURN_TYPE is the type returned by the
8211 function. The function takes N named arguments, the types of which
8212 are provided in ARG_TYPES. */
8213
8214 tree
8215 build_function_type_array (tree return_type, int n, tree *arg_types)
8216 {
8217 return build_function_type_array_1 (false, return_type, n, arg_types);
8218 }
8219
8220 /* Build a variable argument function type. RETURN_TYPE is the type
8221 returned by the function. The function takes N named arguments, the
8222 types of which are provided in ARG_TYPES. */
8223
8224 tree
8225 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8226 {
8227 return build_function_type_array_1 (true, return_type, n, arg_types);
8228 }
8229
8230 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8231 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8232 for the method. An implicit additional parameter (of type
8233 pointer-to-BASETYPE) is added to the ARGTYPES. */
8234
8235 tree
8236 build_method_type_directly (tree basetype,
8237 tree rettype,
8238 tree argtypes)
8239 {
8240 tree t;
8241 tree ptype;
8242 inchash::hash hstate;
8243 bool any_structural_p, any_noncanonical_p;
8244 tree canon_argtypes;
8245
8246 /* Make a node of the sort we want. */
8247 t = make_node (METHOD_TYPE);
8248
8249 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8250 TREE_TYPE (t) = rettype;
8251 ptype = build_pointer_type (basetype);
8252
8253 /* The actual arglist for this function includes a "hidden" argument
8254 which is "this". Put it into the list of argument types. */
8255 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8256 TYPE_ARG_TYPES (t) = argtypes;
8257
8258 /* If we already have such a type, use the old one. */
8259 hstate.add_object (TYPE_HASH (basetype));
8260 hstate.add_object (TYPE_HASH (rettype));
8261 type_hash_list (argtypes, hstate);
8262 t = type_hash_canon (hstate.end (), t);
8263
8264 /* Set up the canonical type. */
8265 any_structural_p
8266 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8267 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8268 any_noncanonical_p
8269 = (TYPE_CANONICAL (basetype) != basetype
8270 || TYPE_CANONICAL (rettype) != rettype);
8271 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8272 &any_structural_p,
8273 &any_noncanonical_p);
8274 if (any_structural_p)
8275 SET_TYPE_STRUCTURAL_EQUALITY (t);
8276 else if (any_noncanonical_p)
8277 TYPE_CANONICAL (t)
8278 = build_method_type_directly (TYPE_CANONICAL (basetype),
8279 TYPE_CANONICAL (rettype),
8280 canon_argtypes);
8281 if (!COMPLETE_TYPE_P (t))
8282 layout_type (t);
8283
8284 return t;
8285 }
8286
8287 /* Construct, lay out and return the type of methods belonging to class
8288 BASETYPE and whose arguments and values are described by TYPE.
8289 If that type exists already, reuse it.
8290 TYPE must be a FUNCTION_TYPE node. */
8291
8292 tree
8293 build_method_type (tree basetype, tree type)
8294 {
8295 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8296
8297 return build_method_type_directly (basetype,
8298 TREE_TYPE (type),
8299 TYPE_ARG_TYPES (type));
8300 }
8301
8302 /* Construct, lay out and return the type of offsets to a value
8303 of type TYPE, within an object of type BASETYPE.
8304 If a suitable offset type exists already, reuse it. */
8305
8306 tree
8307 build_offset_type (tree basetype, tree type)
8308 {
8309 tree t;
8310 inchash::hash hstate;
8311
8312 /* Make a node of the sort we want. */
8313 t = make_node (OFFSET_TYPE);
8314
8315 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8316 TREE_TYPE (t) = type;
8317
8318 /* If we already have such a type, use the old one. */
8319 hstate.add_object (TYPE_HASH (basetype));
8320 hstate.add_object (TYPE_HASH (type));
8321 t = type_hash_canon (hstate.end (), t);
8322
8323 if (!COMPLETE_TYPE_P (t))
8324 layout_type (t);
8325
8326 if (TYPE_CANONICAL (t) == t)
8327 {
8328 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8329 || TYPE_STRUCTURAL_EQUALITY_P (type))
8330 SET_TYPE_STRUCTURAL_EQUALITY (t);
8331 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8332 || TYPE_CANONICAL (type) != type)
8333 TYPE_CANONICAL (t)
8334 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8335 TYPE_CANONICAL (type));
8336 }
8337
8338 return t;
8339 }
8340
8341 /* Create a complex type whose components are COMPONENT_TYPE. */
8342
8343 tree
8344 build_complex_type (tree component_type)
8345 {
8346 tree t;
8347 inchash::hash hstate;
8348
8349 gcc_assert (INTEGRAL_TYPE_P (component_type)
8350 || SCALAR_FLOAT_TYPE_P (component_type)
8351 || FIXED_POINT_TYPE_P (component_type));
8352
8353 /* Make a node of the sort we want. */
8354 t = make_node (COMPLEX_TYPE);
8355
8356 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8357
8358 /* If we already have such a type, use the old one. */
8359 hstate.add_object (TYPE_HASH (component_type));
8360 t = type_hash_canon (hstate.end (), t);
8361
8362 if (!COMPLETE_TYPE_P (t))
8363 layout_type (t);
8364
8365 if (TYPE_CANONICAL (t) == t)
8366 {
8367 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8368 SET_TYPE_STRUCTURAL_EQUALITY (t);
8369 else if (TYPE_CANONICAL (component_type) != component_type)
8370 TYPE_CANONICAL (t)
8371 = build_complex_type (TYPE_CANONICAL (component_type));
8372 }
8373
8374 /* We need to create a name, since complex is a fundamental type. */
8375 if (! TYPE_NAME (t))
8376 {
8377 const char *name;
8378 if (component_type == char_type_node)
8379 name = "complex char";
8380 else if (component_type == signed_char_type_node)
8381 name = "complex signed char";
8382 else if (component_type == unsigned_char_type_node)
8383 name = "complex unsigned char";
8384 else if (component_type == short_integer_type_node)
8385 name = "complex short int";
8386 else if (component_type == short_unsigned_type_node)
8387 name = "complex short unsigned int";
8388 else if (component_type == integer_type_node)
8389 name = "complex int";
8390 else if (component_type == unsigned_type_node)
8391 name = "complex unsigned int";
8392 else if (component_type == long_integer_type_node)
8393 name = "complex long int";
8394 else if (component_type == long_unsigned_type_node)
8395 name = "complex long unsigned int";
8396 else if (component_type == long_long_integer_type_node)
8397 name = "complex long long int";
8398 else if (component_type == long_long_unsigned_type_node)
8399 name = "complex long long unsigned int";
8400 else
8401 name = 0;
8402
8403 if (name != 0)
8404 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8405 get_identifier (name), t);
8406 }
8407
8408 return build_qualified_type (t, TYPE_QUALS (component_type));
8409 }
8410
8411 /* If TYPE is a real or complex floating-point type and the target
8412 does not directly support arithmetic on TYPE then return the wider
8413 type to be used for arithmetic on TYPE. Otherwise, return
8414 NULL_TREE. */
8415
8416 tree
8417 excess_precision_type (tree type)
8418 {
8419 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8420 {
8421 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8422 switch (TREE_CODE (type))
8423 {
8424 case REAL_TYPE:
8425 switch (flt_eval_method)
8426 {
8427 case 1:
8428 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8429 return double_type_node;
8430 break;
8431 case 2:
8432 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8433 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8434 return long_double_type_node;
8435 break;
8436 default:
8437 gcc_unreachable ();
8438 }
8439 break;
8440 case COMPLEX_TYPE:
8441 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8442 return NULL_TREE;
8443 switch (flt_eval_method)
8444 {
8445 case 1:
8446 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8447 return complex_double_type_node;
8448 break;
8449 case 2:
8450 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8451 || (TYPE_MODE (TREE_TYPE (type))
8452 == TYPE_MODE (double_type_node)))
8453 return complex_long_double_type_node;
8454 break;
8455 default:
8456 gcc_unreachable ();
8457 }
8458 break;
8459 default:
8460 break;
8461 }
8462 }
8463 return NULL_TREE;
8464 }
8465 \f
8466 /* Return OP, stripped of any conversions to wider types as much as is safe.
8467 Converting the value back to OP's type makes a value equivalent to OP.
8468
8469 If FOR_TYPE is nonzero, we return a value which, if converted to
8470 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8471
8472 OP must have integer, real or enumeral type. Pointers are not allowed!
8473
8474 There are some cases where the obvious value we could return
8475 would regenerate to OP if converted to OP's type,
8476 but would not extend like OP to wider types.
8477 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8478 For example, if OP is (unsigned short)(signed char)-1,
8479 we avoid returning (signed char)-1 if FOR_TYPE is int,
8480 even though extending that to an unsigned short would regenerate OP,
8481 since the result of extending (signed char)-1 to (int)
8482 is different from (int) OP. */
8483
8484 tree
8485 get_unwidened (tree op, tree for_type)
8486 {
8487 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8488 tree type = TREE_TYPE (op);
8489 unsigned final_prec
8490 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8491 int uns
8492 = (for_type != 0 && for_type != type
8493 && final_prec > TYPE_PRECISION (type)
8494 && TYPE_UNSIGNED (type));
8495 tree win = op;
8496
8497 while (CONVERT_EXPR_P (op))
8498 {
8499 int bitschange;
8500
8501 /* TYPE_PRECISION on vector types has different meaning
8502 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8503 so avoid them here. */
8504 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8505 break;
8506
8507 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8508 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8509
8510 /* Truncations are many-one so cannot be removed.
8511 Unless we are later going to truncate down even farther. */
8512 if (bitschange < 0
8513 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8514 break;
8515
8516 /* See what's inside this conversion. If we decide to strip it,
8517 we will set WIN. */
8518 op = TREE_OPERAND (op, 0);
8519
8520 /* If we have not stripped any zero-extensions (uns is 0),
8521 we can strip any kind of extension.
8522 If we have previously stripped a zero-extension,
8523 only zero-extensions can safely be stripped.
8524 Any extension can be stripped if the bits it would produce
8525 are all going to be discarded later by truncating to FOR_TYPE. */
8526
8527 if (bitschange > 0)
8528 {
8529 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8530 win = op;
8531 /* TYPE_UNSIGNED says whether this is a zero-extension.
8532 Let's avoid computing it if it does not affect WIN
8533 and if UNS will not be needed again. */
8534 if ((uns
8535 || CONVERT_EXPR_P (op))
8536 && TYPE_UNSIGNED (TREE_TYPE (op)))
8537 {
8538 uns = 1;
8539 win = op;
8540 }
8541 }
8542 }
8543
8544 /* If we finally reach a constant see if it fits in for_type and
8545 in that case convert it. */
8546 if (for_type
8547 && TREE_CODE (win) == INTEGER_CST
8548 && TREE_TYPE (win) != for_type
8549 && int_fits_type_p (win, for_type))
8550 win = fold_convert (for_type, win);
8551
8552 return win;
8553 }
8554 \f
8555 /* Return OP or a simpler expression for a narrower value
8556 which can be sign-extended or zero-extended to give back OP.
8557 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8558 or 0 if the value should be sign-extended. */
8559
8560 tree
8561 get_narrower (tree op, int *unsignedp_ptr)
8562 {
8563 int uns = 0;
8564 int first = 1;
8565 tree win = op;
8566 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8567
8568 while (TREE_CODE (op) == NOP_EXPR)
8569 {
8570 int bitschange
8571 = (TYPE_PRECISION (TREE_TYPE (op))
8572 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8573
8574 /* Truncations are many-one so cannot be removed. */
8575 if (bitschange < 0)
8576 break;
8577
8578 /* See what's inside this conversion. If we decide to strip it,
8579 we will set WIN. */
8580
8581 if (bitschange > 0)
8582 {
8583 op = TREE_OPERAND (op, 0);
8584 /* An extension: the outermost one can be stripped,
8585 but remember whether it is zero or sign extension. */
8586 if (first)
8587 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8588 /* Otherwise, if a sign extension has been stripped,
8589 only sign extensions can now be stripped;
8590 if a zero extension has been stripped, only zero-extensions. */
8591 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8592 break;
8593 first = 0;
8594 }
8595 else /* bitschange == 0 */
8596 {
8597 /* A change in nominal type can always be stripped, but we must
8598 preserve the unsignedness. */
8599 if (first)
8600 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8601 first = 0;
8602 op = TREE_OPERAND (op, 0);
8603 /* Keep trying to narrow, but don't assign op to win if it
8604 would turn an integral type into something else. */
8605 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8606 continue;
8607 }
8608
8609 win = op;
8610 }
8611
8612 if (TREE_CODE (op) == COMPONENT_REF
8613 /* Since type_for_size always gives an integer type. */
8614 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8615 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8616 /* Ensure field is laid out already. */
8617 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8618 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8619 {
8620 unsigned HOST_WIDE_INT innerprec
8621 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8622 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8623 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8624 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8625
8626 /* We can get this structure field in a narrower type that fits it,
8627 but the resulting extension to its nominal type (a fullword type)
8628 must satisfy the same conditions as for other extensions.
8629
8630 Do this only for fields that are aligned (not bit-fields),
8631 because when bit-field insns will be used there is no
8632 advantage in doing this. */
8633
8634 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8635 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8636 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8637 && type != 0)
8638 {
8639 if (first)
8640 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8641 win = fold_convert (type, op);
8642 }
8643 }
8644
8645 *unsignedp_ptr = uns;
8646 return win;
8647 }
8648 \f
8649 /* Returns true if integer constant C has a value that is permissible
8650 for type TYPE (an INTEGER_TYPE). */
8651
8652 bool
8653 int_fits_type_p (const_tree c, const_tree type)
8654 {
8655 tree type_low_bound, type_high_bound;
8656 bool ok_for_low_bound, ok_for_high_bound;
8657 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8658
8659 retry:
8660 type_low_bound = TYPE_MIN_VALUE (type);
8661 type_high_bound = TYPE_MAX_VALUE (type);
8662
8663 /* If at least one bound of the type is a constant integer, we can check
8664 ourselves and maybe make a decision. If no such decision is possible, but
8665 this type is a subtype, try checking against that. Otherwise, use
8666 fits_to_tree_p, which checks against the precision.
8667
8668 Compute the status for each possibly constant bound, and return if we see
8669 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8670 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8671 for "constant known to fit". */
8672
8673 /* Check if c >= type_low_bound. */
8674 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8675 {
8676 if (tree_int_cst_lt (c, type_low_bound))
8677 return false;
8678 ok_for_low_bound = true;
8679 }
8680 else
8681 ok_for_low_bound = false;
8682
8683 /* Check if c <= type_high_bound. */
8684 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8685 {
8686 if (tree_int_cst_lt (type_high_bound, c))
8687 return false;
8688 ok_for_high_bound = true;
8689 }
8690 else
8691 ok_for_high_bound = false;
8692
8693 /* If the constant fits both bounds, the result is known. */
8694 if (ok_for_low_bound && ok_for_high_bound)
8695 return true;
8696
8697 /* Perform some generic filtering which may allow making a decision
8698 even if the bounds are not constant. First, negative integers
8699 never fit in unsigned types, */
8700 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8701 return false;
8702
8703 /* Second, narrower types always fit in wider ones. */
8704 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8705 return true;
8706
8707 /* Third, unsigned integers with top bit set never fit signed types. */
8708 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8709 {
8710 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8711 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8712 {
8713 /* When a tree_cst is converted to a wide-int, the precision
8714 is taken from the type. However, if the precision of the
8715 mode underneath the type is smaller than that, it is
8716 possible that the value will not fit. The test below
8717 fails if any bit is set between the sign bit of the
8718 underlying mode and the top bit of the type. */
8719 if (wi::ne_p (wi::zext (c, prec - 1), c))
8720 return false;
8721 }
8722 else if (wi::neg_p (c))
8723 return false;
8724 }
8725
8726 /* If we haven't been able to decide at this point, there nothing more we
8727 can check ourselves here. Look at the base type if we have one and it
8728 has the same precision. */
8729 if (TREE_CODE (type) == INTEGER_TYPE
8730 && TREE_TYPE (type) != 0
8731 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8732 {
8733 type = TREE_TYPE (type);
8734 goto retry;
8735 }
8736
8737 /* Or to fits_to_tree_p, if nothing else. */
8738 return wi::fits_to_tree_p (c, type);
8739 }
8740
8741 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8742 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8743 represented (assuming two's-complement arithmetic) within the bit
8744 precision of the type are returned instead. */
8745
8746 void
8747 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8748 {
8749 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8750 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8751 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8752 else
8753 {
8754 if (TYPE_UNSIGNED (type))
8755 mpz_set_ui (min, 0);
8756 else
8757 {
8758 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8759 wi::to_mpz (mn, min, SIGNED);
8760 }
8761 }
8762
8763 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8764 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8765 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8766 else
8767 {
8768 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8769 wi::to_mpz (mn, max, TYPE_SIGN (type));
8770 }
8771 }
8772
8773 /* Return true if VAR is an automatic variable defined in function FN. */
8774
8775 bool
8776 auto_var_in_fn_p (const_tree var, const_tree fn)
8777 {
8778 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8779 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8780 || TREE_CODE (var) == PARM_DECL)
8781 && ! TREE_STATIC (var))
8782 || TREE_CODE (var) == LABEL_DECL
8783 || TREE_CODE (var) == RESULT_DECL));
8784 }
8785
8786 /* Subprogram of following function. Called by walk_tree.
8787
8788 Return *TP if it is an automatic variable or parameter of the
8789 function passed in as DATA. */
8790
8791 static tree
8792 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8793 {
8794 tree fn = (tree) data;
8795
8796 if (TYPE_P (*tp))
8797 *walk_subtrees = 0;
8798
8799 else if (DECL_P (*tp)
8800 && auto_var_in_fn_p (*tp, fn))
8801 return *tp;
8802
8803 return NULL_TREE;
8804 }
8805
8806 /* Returns true if T is, contains, or refers to a type with variable
8807 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8808 arguments, but not the return type. If FN is nonzero, only return
8809 true if a modifier of the type or position of FN is a variable or
8810 parameter inside FN.
8811
8812 This concept is more general than that of C99 'variably modified types':
8813 in C99, a struct type is never variably modified because a VLA may not
8814 appear as a structure member. However, in GNU C code like:
8815
8816 struct S { int i[f()]; };
8817
8818 is valid, and other languages may define similar constructs. */
8819
8820 bool
8821 variably_modified_type_p (tree type, tree fn)
8822 {
8823 tree t;
8824
8825 /* Test if T is either variable (if FN is zero) or an expression containing
8826 a variable in FN. If TYPE isn't gimplified, return true also if
8827 gimplify_one_sizepos would gimplify the expression into a local
8828 variable. */
8829 #define RETURN_TRUE_IF_VAR(T) \
8830 do { tree _t = (T); \
8831 if (_t != NULL_TREE \
8832 && _t != error_mark_node \
8833 && TREE_CODE (_t) != INTEGER_CST \
8834 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8835 && (!fn \
8836 || (!TYPE_SIZES_GIMPLIFIED (type) \
8837 && !is_gimple_sizepos (_t)) \
8838 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8839 return true; } while (0)
8840
8841 if (type == error_mark_node)
8842 return false;
8843
8844 /* If TYPE itself has variable size, it is variably modified. */
8845 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8846 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8847
8848 switch (TREE_CODE (type))
8849 {
8850 case POINTER_TYPE:
8851 case REFERENCE_TYPE:
8852 case VECTOR_TYPE:
8853 if (variably_modified_type_p (TREE_TYPE (type), fn))
8854 return true;
8855 break;
8856
8857 case FUNCTION_TYPE:
8858 case METHOD_TYPE:
8859 /* If TYPE is a function type, it is variably modified if the
8860 return type is variably modified. */
8861 if (variably_modified_type_p (TREE_TYPE (type), fn))
8862 return true;
8863 break;
8864
8865 case INTEGER_TYPE:
8866 case REAL_TYPE:
8867 case FIXED_POINT_TYPE:
8868 case ENUMERAL_TYPE:
8869 case BOOLEAN_TYPE:
8870 /* Scalar types are variably modified if their end points
8871 aren't constant. */
8872 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8873 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8874 break;
8875
8876 case RECORD_TYPE:
8877 case UNION_TYPE:
8878 case QUAL_UNION_TYPE:
8879 /* We can't see if any of the fields are variably-modified by the
8880 definition we normally use, since that would produce infinite
8881 recursion via pointers. */
8882 /* This is variably modified if some field's type is. */
8883 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8884 if (TREE_CODE (t) == FIELD_DECL)
8885 {
8886 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8887 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8888 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8889
8890 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8891 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8892 }
8893 break;
8894
8895 case ARRAY_TYPE:
8896 /* Do not call ourselves to avoid infinite recursion. This is
8897 variably modified if the element type is. */
8898 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8899 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8900 break;
8901
8902 default:
8903 break;
8904 }
8905
8906 /* The current language may have other cases to check, but in general,
8907 all other types are not variably modified. */
8908 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8909
8910 #undef RETURN_TRUE_IF_VAR
8911 }
8912
8913 /* Given a DECL or TYPE, return the scope in which it was declared, or
8914 NULL_TREE if there is no containing scope. */
8915
8916 tree
8917 get_containing_scope (const_tree t)
8918 {
8919 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8920 }
8921
8922 /* Return the innermost context enclosing DECL that is
8923 a FUNCTION_DECL, or zero if none. */
8924
8925 tree
8926 decl_function_context (const_tree decl)
8927 {
8928 tree context;
8929
8930 if (TREE_CODE (decl) == ERROR_MARK)
8931 return 0;
8932
8933 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8934 where we look up the function at runtime. Such functions always take
8935 a first argument of type 'pointer to real context'.
8936
8937 C++ should really be fixed to use DECL_CONTEXT for the real context,
8938 and use something else for the "virtual context". */
8939 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8940 context
8941 = TYPE_MAIN_VARIANT
8942 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8943 else
8944 context = DECL_CONTEXT (decl);
8945
8946 while (context && TREE_CODE (context) != FUNCTION_DECL)
8947 {
8948 if (TREE_CODE (context) == BLOCK)
8949 context = BLOCK_SUPERCONTEXT (context);
8950 else
8951 context = get_containing_scope (context);
8952 }
8953
8954 return context;
8955 }
8956
8957 /* Return the innermost context enclosing DECL that is
8958 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8959 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8960
8961 tree
8962 decl_type_context (const_tree decl)
8963 {
8964 tree context = DECL_CONTEXT (decl);
8965
8966 while (context)
8967 switch (TREE_CODE (context))
8968 {
8969 case NAMESPACE_DECL:
8970 case TRANSLATION_UNIT_DECL:
8971 return NULL_TREE;
8972
8973 case RECORD_TYPE:
8974 case UNION_TYPE:
8975 case QUAL_UNION_TYPE:
8976 return context;
8977
8978 case TYPE_DECL:
8979 case FUNCTION_DECL:
8980 context = DECL_CONTEXT (context);
8981 break;
8982
8983 case BLOCK:
8984 context = BLOCK_SUPERCONTEXT (context);
8985 break;
8986
8987 default:
8988 gcc_unreachable ();
8989 }
8990
8991 return NULL_TREE;
8992 }
8993
8994 /* CALL is a CALL_EXPR. Return the declaration for the function
8995 called, or NULL_TREE if the called function cannot be
8996 determined. */
8997
8998 tree
8999 get_callee_fndecl (const_tree call)
9000 {
9001 tree addr;
9002
9003 if (call == error_mark_node)
9004 return error_mark_node;
9005
9006 /* It's invalid to call this function with anything but a
9007 CALL_EXPR. */
9008 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9009
9010 /* The first operand to the CALL is the address of the function
9011 called. */
9012 addr = CALL_EXPR_FN (call);
9013
9014 /* If there is no function, return early. */
9015 if (addr == NULL_TREE)
9016 return NULL_TREE;
9017
9018 STRIP_NOPS (addr);
9019
9020 /* If this is a readonly function pointer, extract its initial value. */
9021 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9022 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9023 && DECL_INITIAL (addr))
9024 addr = DECL_INITIAL (addr);
9025
9026 /* If the address is just `&f' for some function `f', then we know
9027 that `f' is being called. */
9028 if (TREE_CODE (addr) == ADDR_EXPR
9029 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9030 return TREE_OPERAND (addr, 0);
9031
9032 /* We couldn't figure out what was being called. */
9033 return NULL_TREE;
9034 }
9035
9036 /* Print debugging information about tree nodes generated during the compile,
9037 and any language-specific information. */
9038
9039 void
9040 dump_tree_statistics (void)
9041 {
9042 if (GATHER_STATISTICS)
9043 {
9044 int i;
9045 int total_nodes, total_bytes;
9046 fprintf (stderr, "Kind Nodes Bytes\n");
9047 fprintf (stderr, "---------------------------------------\n");
9048 total_nodes = total_bytes = 0;
9049 for (i = 0; i < (int) all_kinds; i++)
9050 {
9051 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9052 tree_node_counts[i], tree_node_sizes[i]);
9053 total_nodes += tree_node_counts[i];
9054 total_bytes += tree_node_sizes[i];
9055 }
9056 fprintf (stderr, "---------------------------------------\n");
9057 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9058 fprintf (stderr, "---------------------------------------\n");
9059 fprintf (stderr, "Code Nodes\n");
9060 fprintf (stderr, "----------------------------\n");
9061 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9062 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9063 tree_code_counts[i]);
9064 fprintf (stderr, "----------------------------\n");
9065 ssanames_print_statistics ();
9066 phinodes_print_statistics ();
9067 }
9068 else
9069 fprintf (stderr, "(No per-node statistics)\n");
9070
9071 print_type_hash_statistics ();
9072 print_debug_expr_statistics ();
9073 print_value_expr_statistics ();
9074 lang_hooks.print_statistics ();
9075 }
9076 \f
9077 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9078
9079 /* Generate a crc32 of a byte. */
9080
9081 static unsigned
9082 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9083 {
9084 unsigned ix;
9085
9086 for (ix = bits; ix--; value <<= 1)
9087 {
9088 unsigned feedback;
9089
9090 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9091 chksum <<= 1;
9092 chksum ^= feedback;
9093 }
9094 return chksum;
9095 }
9096
9097 /* Generate a crc32 of a 32-bit unsigned. */
9098
9099 unsigned
9100 crc32_unsigned (unsigned chksum, unsigned value)
9101 {
9102 return crc32_unsigned_bits (chksum, value, 32);
9103 }
9104
9105 /* Generate a crc32 of a byte. */
9106
9107 unsigned
9108 crc32_byte (unsigned chksum, char byte)
9109 {
9110 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9111 }
9112
9113 /* Generate a crc32 of a string. */
9114
9115 unsigned
9116 crc32_string (unsigned chksum, const char *string)
9117 {
9118 do
9119 {
9120 chksum = crc32_byte (chksum, *string);
9121 }
9122 while (*string++);
9123 return chksum;
9124 }
9125
9126 /* P is a string that will be used in a symbol. Mask out any characters
9127 that are not valid in that context. */
9128
9129 void
9130 clean_symbol_name (char *p)
9131 {
9132 for (; *p; p++)
9133 if (! (ISALNUM (*p)
9134 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9135 || *p == '$'
9136 #endif
9137 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9138 || *p == '.'
9139 #endif
9140 ))
9141 *p = '_';
9142 }
9143
9144 /* Generate a name for a special-purpose function.
9145 The generated name may need to be unique across the whole link.
9146 Changes to this function may also require corresponding changes to
9147 xstrdup_mask_random.
9148 TYPE is some string to identify the purpose of this function to the
9149 linker or collect2; it must start with an uppercase letter,
9150 one of:
9151 I - for constructors
9152 D - for destructors
9153 N - for C++ anonymous namespaces
9154 F - for DWARF unwind frame information. */
9155
9156 tree
9157 get_file_function_name (const char *type)
9158 {
9159 char *buf;
9160 const char *p;
9161 char *q;
9162
9163 /* If we already have a name we know to be unique, just use that. */
9164 if (first_global_object_name)
9165 p = q = ASTRDUP (first_global_object_name);
9166 /* If the target is handling the constructors/destructors, they
9167 will be local to this file and the name is only necessary for
9168 debugging purposes.
9169 We also assign sub_I and sub_D sufixes to constructors called from
9170 the global static constructors. These are always local. */
9171 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9172 || (strncmp (type, "sub_", 4) == 0
9173 && (type[4] == 'I' || type[4] == 'D')))
9174 {
9175 const char *file = main_input_filename;
9176 if (! file)
9177 file = LOCATION_FILE (input_location);
9178 /* Just use the file's basename, because the full pathname
9179 might be quite long. */
9180 p = q = ASTRDUP (lbasename (file));
9181 }
9182 else
9183 {
9184 /* Otherwise, the name must be unique across the entire link.
9185 We don't have anything that we know to be unique to this translation
9186 unit, so use what we do have and throw in some randomness. */
9187 unsigned len;
9188 const char *name = weak_global_object_name;
9189 const char *file = main_input_filename;
9190
9191 if (! name)
9192 name = "";
9193 if (! file)
9194 file = LOCATION_FILE (input_location);
9195
9196 len = strlen (file);
9197 q = (char *) alloca (9 + 17 + len + 1);
9198 memcpy (q, file, len + 1);
9199
9200 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9201 crc32_string (0, name), get_random_seed (false));
9202
9203 p = q;
9204 }
9205
9206 clean_symbol_name (q);
9207 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9208 + strlen (type));
9209
9210 /* Set up the name of the file-level functions we may need.
9211 Use a global object (which is already required to be unique over
9212 the program) rather than the file name (which imposes extra
9213 constraints). */
9214 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9215
9216 return get_identifier (buf);
9217 }
9218 \f
9219 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9220
9221 /* Complain that the tree code of NODE does not match the expected 0
9222 terminated list of trailing codes. The trailing code list can be
9223 empty, for a more vague error message. FILE, LINE, and FUNCTION
9224 are of the caller. */
9225
9226 void
9227 tree_check_failed (const_tree node, const char *file,
9228 int line, const char *function, ...)
9229 {
9230 va_list args;
9231 const char *buffer;
9232 unsigned length = 0;
9233 enum tree_code code;
9234
9235 va_start (args, function);
9236 while ((code = (enum tree_code) va_arg (args, int)))
9237 length += 4 + strlen (get_tree_code_name (code));
9238 va_end (args);
9239 if (length)
9240 {
9241 char *tmp;
9242 va_start (args, function);
9243 length += strlen ("expected ");
9244 buffer = tmp = (char *) alloca (length);
9245 length = 0;
9246 while ((code = (enum tree_code) va_arg (args, int)))
9247 {
9248 const char *prefix = length ? " or " : "expected ";
9249
9250 strcpy (tmp + length, prefix);
9251 length += strlen (prefix);
9252 strcpy (tmp + length, get_tree_code_name (code));
9253 length += strlen (get_tree_code_name (code));
9254 }
9255 va_end (args);
9256 }
9257 else
9258 buffer = "unexpected node";
9259
9260 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9261 buffer, get_tree_code_name (TREE_CODE (node)),
9262 function, trim_filename (file), line);
9263 }
9264
9265 /* Complain that the tree code of NODE does match the expected 0
9266 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9267 the caller. */
9268
9269 void
9270 tree_not_check_failed (const_tree node, const char *file,
9271 int line, const char *function, ...)
9272 {
9273 va_list args;
9274 char *buffer;
9275 unsigned length = 0;
9276 enum tree_code code;
9277
9278 va_start (args, function);
9279 while ((code = (enum tree_code) va_arg (args, int)))
9280 length += 4 + strlen (get_tree_code_name (code));
9281 va_end (args);
9282 va_start (args, function);
9283 buffer = (char *) alloca (length);
9284 length = 0;
9285 while ((code = (enum tree_code) va_arg (args, int)))
9286 {
9287 if (length)
9288 {
9289 strcpy (buffer + length, " or ");
9290 length += 4;
9291 }
9292 strcpy (buffer + length, get_tree_code_name (code));
9293 length += strlen (get_tree_code_name (code));
9294 }
9295 va_end (args);
9296
9297 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9298 buffer, get_tree_code_name (TREE_CODE (node)),
9299 function, trim_filename (file), line);
9300 }
9301
9302 /* Similar to tree_check_failed, except that we check for a class of tree
9303 code, given in CL. */
9304
9305 void
9306 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9307 const char *file, int line, const char *function)
9308 {
9309 internal_error
9310 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9311 TREE_CODE_CLASS_STRING (cl),
9312 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9313 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9314 }
9315
9316 /* Similar to tree_check_failed, except that instead of specifying a
9317 dozen codes, use the knowledge that they're all sequential. */
9318
9319 void
9320 tree_range_check_failed (const_tree node, const char *file, int line,
9321 const char *function, enum tree_code c1,
9322 enum tree_code c2)
9323 {
9324 char *buffer;
9325 unsigned length = 0;
9326 unsigned int c;
9327
9328 for (c = c1; c <= c2; ++c)
9329 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9330
9331 length += strlen ("expected ");
9332 buffer = (char *) alloca (length);
9333 length = 0;
9334
9335 for (c = c1; c <= c2; ++c)
9336 {
9337 const char *prefix = length ? " or " : "expected ";
9338
9339 strcpy (buffer + length, prefix);
9340 length += strlen (prefix);
9341 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9342 length += strlen (get_tree_code_name ((enum tree_code) c));
9343 }
9344
9345 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9346 buffer, get_tree_code_name (TREE_CODE (node)),
9347 function, trim_filename (file), line);
9348 }
9349
9350
9351 /* Similar to tree_check_failed, except that we check that a tree does
9352 not have the specified code, given in CL. */
9353
9354 void
9355 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9356 const char *file, int line, const char *function)
9357 {
9358 internal_error
9359 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9360 TREE_CODE_CLASS_STRING (cl),
9361 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9362 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9363 }
9364
9365
9366 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9367
9368 void
9369 omp_clause_check_failed (const_tree node, const char *file, int line,
9370 const char *function, enum omp_clause_code code)
9371 {
9372 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9373 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9374 function, trim_filename (file), line);
9375 }
9376
9377
9378 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9379
9380 void
9381 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9382 const char *function, enum omp_clause_code c1,
9383 enum omp_clause_code c2)
9384 {
9385 char *buffer;
9386 unsigned length = 0;
9387 unsigned int c;
9388
9389 for (c = c1; c <= c2; ++c)
9390 length += 4 + strlen (omp_clause_code_name[c]);
9391
9392 length += strlen ("expected ");
9393 buffer = (char *) alloca (length);
9394 length = 0;
9395
9396 for (c = c1; c <= c2; ++c)
9397 {
9398 const char *prefix = length ? " or " : "expected ";
9399
9400 strcpy (buffer + length, prefix);
9401 length += strlen (prefix);
9402 strcpy (buffer + length, omp_clause_code_name[c]);
9403 length += strlen (omp_clause_code_name[c]);
9404 }
9405
9406 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9407 buffer, omp_clause_code_name[TREE_CODE (node)],
9408 function, trim_filename (file), line);
9409 }
9410
9411
9412 #undef DEFTREESTRUCT
9413 #define DEFTREESTRUCT(VAL, NAME) NAME,
9414
9415 static const char *ts_enum_names[] = {
9416 #include "treestruct.def"
9417 };
9418 #undef DEFTREESTRUCT
9419
9420 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9421
9422 /* Similar to tree_class_check_failed, except that we check for
9423 whether CODE contains the tree structure identified by EN. */
9424
9425 void
9426 tree_contains_struct_check_failed (const_tree node,
9427 const enum tree_node_structure_enum en,
9428 const char *file, int line,
9429 const char *function)
9430 {
9431 internal_error
9432 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9433 TS_ENUM_NAME (en),
9434 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9435 }
9436
9437
9438 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9439 (dynamically sized) vector. */
9440
9441 void
9442 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9443 const char *function)
9444 {
9445 internal_error
9446 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9447 idx + 1, len, function, trim_filename (file), line);
9448 }
9449
9450 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9451 (dynamically sized) vector. */
9452
9453 void
9454 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9455 const char *function)
9456 {
9457 internal_error
9458 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9459 idx + 1, len, function, trim_filename (file), line);
9460 }
9461
9462 /* Similar to above, except that the check is for the bounds of the operand
9463 vector of an expression node EXP. */
9464
9465 void
9466 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9467 int line, const char *function)
9468 {
9469 enum tree_code code = TREE_CODE (exp);
9470 internal_error
9471 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9472 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9473 function, trim_filename (file), line);
9474 }
9475
9476 /* Similar to above, except that the check is for the number of
9477 operands of an OMP_CLAUSE node. */
9478
9479 void
9480 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9481 int line, const char *function)
9482 {
9483 internal_error
9484 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9485 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9486 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9487 trim_filename (file), line);
9488 }
9489 #endif /* ENABLE_TREE_CHECKING */
9490 \f
9491 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9492 and mapped to the machine mode MODE. Initialize its fields and build
9493 the information necessary for debugging output. */
9494
9495 static tree
9496 make_vector_type (tree innertype, int nunits, machine_mode mode)
9497 {
9498 tree t;
9499 inchash::hash hstate;
9500
9501 t = make_node (VECTOR_TYPE);
9502 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9503 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9504 SET_TYPE_MODE (t, mode);
9505
9506 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9507 SET_TYPE_STRUCTURAL_EQUALITY (t);
9508 else if (TYPE_CANONICAL (innertype) != innertype
9509 || mode != VOIDmode)
9510 TYPE_CANONICAL (t)
9511 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9512
9513 layout_type (t);
9514
9515 hstate.add_wide_int (VECTOR_TYPE);
9516 hstate.add_wide_int (nunits);
9517 hstate.add_wide_int (mode);
9518 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9519 t = type_hash_canon (hstate.end (), t);
9520
9521 /* We have built a main variant, based on the main variant of the
9522 inner type. Use it to build the variant we return. */
9523 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9524 && TREE_TYPE (t) != innertype)
9525 return build_type_attribute_qual_variant (t,
9526 TYPE_ATTRIBUTES (innertype),
9527 TYPE_QUALS (innertype));
9528
9529 return t;
9530 }
9531
9532 static tree
9533 make_or_reuse_type (unsigned size, int unsignedp)
9534 {
9535 int i;
9536
9537 if (size == INT_TYPE_SIZE)
9538 return unsignedp ? unsigned_type_node : integer_type_node;
9539 if (size == CHAR_TYPE_SIZE)
9540 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9541 if (size == SHORT_TYPE_SIZE)
9542 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9543 if (size == LONG_TYPE_SIZE)
9544 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9545 if (size == LONG_LONG_TYPE_SIZE)
9546 return (unsignedp ? long_long_unsigned_type_node
9547 : long_long_integer_type_node);
9548
9549 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9550 if (size == int_n_data[i].bitsize
9551 && int_n_enabled_p[i])
9552 return (unsignedp ? int_n_trees[i].unsigned_type
9553 : int_n_trees[i].signed_type);
9554
9555 if (unsignedp)
9556 return make_unsigned_type (size);
9557 else
9558 return make_signed_type (size);
9559 }
9560
9561 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9562
9563 static tree
9564 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9565 {
9566 if (satp)
9567 {
9568 if (size == SHORT_FRACT_TYPE_SIZE)
9569 return unsignedp ? sat_unsigned_short_fract_type_node
9570 : sat_short_fract_type_node;
9571 if (size == FRACT_TYPE_SIZE)
9572 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9573 if (size == LONG_FRACT_TYPE_SIZE)
9574 return unsignedp ? sat_unsigned_long_fract_type_node
9575 : sat_long_fract_type_node;
9576 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9577 return unsignedp ? sat_unsigned_long_long_fract_type_node
9578 : sat_long_long_fract_type_node;
9579 }
9580 else
9581 {
9582 if (size == SHORT_FRACT_TYPE_SIZE)
9583 return unsignedp ? unsigned_short_fract_type_node
9584 : short_fract_type_node;
9585 if (size == FRACT_TYPE_SIZE)
9586 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9587 if (size == LONG_FRACT_TYPE_SIZE)
9588 return unsignedp ? unsigned_long_fract_type_node
9589 : long_fract_type_node;
9590 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9591 return unsignedp ? unsigned_long_long_fract_type_node
9592 : long_long_fract_type_node;
9593 }
9594
9595 return make_fract_type (size, unsignedp, satp);
9596 }
9597
9598 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9599
9600 static tree
9601 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9602 {
9603 if (satp)
9604 {
9605 if (size == SHORT_ACCUM_TYPE_SIZE)
9606 return unsignedp ? sat_unsigned_short_accum_type_node
9607 : sat_short_accum_type_node;
9608 if (size == ACCUM_TYPE_SIZE)
9609 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9610 if (size == LONG_ACCUM_TYPE_SIZE)
9611 return unsignedp ? sat_unsigned_long_accum_type_node
9612 : sat_long_accum_type_node;
9613 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9614 return unsignedp ? sat_unsigned_long_long_accum_type_node
9615 : sat_long_long_accum_type_node;
9616 }
9617 else
9618 {
9619 if (size == SHORT_ACCUM_TYPE_SIZE)
9620 return unsignedp ? unsigned_short_accum_type_node
9621 : short_accum_type_node;
9622 if (size == ACCUM_TYPE_SIZE)
9623 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9624 if (size == LONG_ACCUM_TYPE_SIZE)
9625 return unsignedp ? unsigned_long_accum_type_node
9626 : long_accum_type_node;
9627 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9628 return unsignedp ? unsigned_long_long_accum_type_node
9629 : long_long_accum_type_node;
9630 }
9631
9632 return make_accum_type (size, unsignedp, satp);
9633 }
9634
9635
9636 /* Create an atomic variant node for TYPE. This routine is called
9637 during initialization of data types to create the 5 basic atomic
9638 types. The generic build_variant_type function requires these to
9639 already be set up in order to function properly, so cannot be
9640 called from there. If ALIGN is non-zero, then ensure alignment is
9641 overridden to this value. */
9642
9643 static tree
9644 build_atomic_base (tree type, unsigned int align)
9645 {
9646 tree t;
9647
9648 /* Make sure its not already registered. */
9649 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9650 return t;
9651
9652 t = build_variant_type_copy (type);
9653 set_type_quals (t, TYPE_QUAL_ATOMIC);
9654
9655 if (align)
9656 TYPE_ALIGN (t) = align;
9657
9658 return t;
9659 }
9660
9661 /* Create nodes for all integer types (and error_mark_node) using the sizes
9662 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9663 SHORT_DOUBLE specifies whether double should be of the same precision
9664 as float. */
9665
9666 void
9667 build_common_tree_nodes (bool signed_char, bool short_double)
9668 {
9669 int i;
9670
9671 error_mark_node = make_node (ERROR_MARK);
9672 TREE_TYPE (error_mark_node) = error_mark_node;
9673
9674 initialize_sizetypes ();
9675
9676 /* Define both `signed char' and `unsigned char'. */
9677 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9678 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9679 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9680 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9681
9682 /* Define `char', which is like either `signed char' or `unsigned char'
9683 but not the same as either. */
9684 char_type_node
9685 = (signed_char
9686 ? make_signed_type (CHAR_TYPE_SIZE)
9687 : make_unsigned_type (CHAR_TYPE_SIZE));
9688 TYPE_STRING_FLAG (char_type_node) = 1;
9689
9690 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9691 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9692 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9693 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9694 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9695 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9696 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9697 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9698
9699 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9700 {
9701 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9702 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9703 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9704 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9705
9706 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9707 && int_n_enabled_p[i])
9708 {
9709 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9710 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9711 }
9712 }
9713
9714 /* Define a boolean type. This type only represents boolean values but
9715 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9716 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9717 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9718 TYPE_PRECISION (boolean_type_node) = 1;
9719 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9720
9721 /* Define what type to use for size_t. */
9722 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9723 size_type_node = unsigned_type_node;
9724 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9725 size_type_node = long_unsigned_type_node;
9726 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9727 size_type_node = long_long_unsigned_type_node;
9728 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9729 size_type_node = short_unsigned_type_node;
9730 else
9731 {
9732 int i;
9733
9734 size_type_node = NULL_TREE;
9735 for (i = 0; i < NUM_INT_N_ENTS; i++)
9736 if (int_n_enabled_p[i])
9737 {
9738 char name[50];
9739 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9740
9741 if (strcmp (name, SIZE_TYPE) == 0)
9742 {
9743 size_type_node = int_n_trees[i].unsigned_type;
9744 }
9745 }
9746 if (size_type_node == NULL_TREE)
9747 gcc_unreachable ();
9748 }
9749
9750 /* Fill in the rest of the sized types. Reuse existing type nodes
9751 when possible. */
9752 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9753 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9754 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9755 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9756 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9757
9758 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9759 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9760 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9761 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9762 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9763
9764 /* Don't call build_qualified type for atomics. That routine does
9765 special processing for atomics, and until they are initialized
9766 it's better not to make that call.
9767
9768 Check to see if there is a target override for atomic types. */
9769
9770 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9771 targetm.atomic_align_for_mode (QImode));
9772 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9773 targetm.atomic_align_for_mode (HImode));
9774 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9775 targetm.atomic_align_for_mode (SImode));
9776 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9777 targetm.atomic_align_for_mode (DImode));
9778 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9779 targetm.atomic_align_for_mode (TImode));
9780
9781 access_public_node = get_identifier ("public");
9782 access_protected_node = get_identifier ("protected");
9783 access_private_node = get_identifier ("private");
9784
9785 /* Define these next since types below may used them. */
9786 integer_zero_node = build_int_cst (integer_type_node, 0);
9787 integer_one_node = build_int_cst (integer_type_node, 1);
9788 integer_three_node = build_int_cst (integer_type_node, 3);
9789 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9790
9791 size_zero_node = size_int (0);
9792 size_one_node = size_int (1);
9793 bitsize_zero_node = bitsize_int (0);
9794 bitsize_one_node = bitsize_int (1);
9795 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9796
9797 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9798 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9799
9800 void_type_node = make_node (VOID_TYPE);
9801 layout_type (void_type_node);
9802
9803 pointer_bounds_type_node = targetm.chkp_bound_type ();
9804
9805 /* We are not going to have real types in C with less than byte alignment,
9806 so we might as well not have any types that claim to have it. */
9807 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9808 TYPE_USER_ALIGN (void_type_node) = 0;
9809
9810 void_node = make_node (VOID_CST);
9811 TREE_TYPE (void_node) = void_type_node;
9812
9813 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9814 layout_type (TREE_TYPE (null_pointer_node));
9815
9816 ptr_type_node = build_pointer_type (void_type_node);
9817 const_ptr_type_node
9818 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9819 fileptr_type_node = ptr_type_node;
9820
9821 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9822
9823 float_type_node = make_node (REAL_TYPE);
9824 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9825 layout_type (float_type_node);
9826
9827 double_type_node = make_node (REAL_TYPE);
9828 if (short_double)
9829 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9830 else
9831 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9832 layout_type (double_type_node);
9833
9834 long_double_type_node = make_node (REAL_TYPE);
9835 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9836 layout_type (long_double_type_node);
9837
9838 float_ptr_type_node = build_pointer_type (float_type_node);
9839 double_ptr_type_node = build_pointer_type (double_type_node);
9840 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9841 integer_ptr_type_node = build_pointer_type (integer_type_node);
9842
9843 /* Fixed size integer types. */
9844 uint16_type_node = make_or_reuse_type (16, 1);
9845 uint32_type_node = make_or_reuse_type (32, 1);
9846 uint64_type_node = make_or_reuse_type (64, 1);
9847
9848 /* Decimal float types. */
9849 dfloat32_type_node = make_node (REAL_TYPE);
9850 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9851 layout_type (dfloat32_type_node);
9852 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9853 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9854
9855 dfloat64_type_node = make_node (REAL_TYPE);
9856 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9857 layout_type (dfloat64_type_node);
9858 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9859 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9860
9861 dfloat128_type_node = make_node (REAL_TYPE);
9862 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9863 layout_type (dfloat128_type_node);
9864 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9865 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9866
9867 complex_integer_type_node = build_complex_type (integer_type_node);
9868 complex_float_type_node = build_complex_type (float_type_node);
9869 complex_double_type_node = build_complex_type (double_type_node);
9870 complex_long_double_type_node = build_complex_type (long_double_type_node);
9871
9872 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9873 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9874 sat_ ## KIND ## _type_node = \
9875 make_sat_signed_ ## KIND ## _type (SIZE); \
9876 sat_unsigned_ ## KIND ## _type_node = \
9877 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9878 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9879 unsigned_ ## KIND ## _type_node = \
9880 make_unsigned_ ## KIND ## _type (SIZE);
9881
9882 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9883 sat_ ## WIDTH ## KIND ## _type_node = \
9884 make_sat_signed_ ## KIND ## _type (SIZE); \
9885 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9886 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9887 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9888 unsigned_ ## WIDTH ## KIND ## _type_node = \
9889 make_unsigned_ ## KIND ## _type (SIZE);
9890
9891 /* Make fixed-point type nodes based on four different widths. */
9892 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9893 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9894 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9895 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9896 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9897
9898 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9899 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9900 NAME ## _type_node = \
9901 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9902 u ## NAME ## _type_node = \
9903 make_or_reuse_unsigned_ ## KIND ## _type \
9904 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9905 sat_ ## NAME ## _type_node = \
9906 make_or_reuse_sat_signed_ ## KIND ## _type \
9907 (GET_MODE_BITSIZE (MODE ## mode)); \
9908 sat_u ## NAME ## _type_node = \
9909 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9910 (GET_MODE_BITSIZE (U ## MODE ## mode));
9911
9912 /* Fixed-point type and mode nodes. */
9913 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9914 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9915 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9916 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9917 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9918 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9919 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9920 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9921 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9922 MAKE_FIXED_MODE_NODE (accum, da, DA)
9923 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9924
9925 {
9926 tree t = targetm.build_builtin_va_list ();
9927
9928 /* Many back-ends define record types without setting TYPE_NAME.
9929 If we copied the record type here, we'd keep the original
9930 record type without a name. This breaks name mangling. So,
9931 don't copy record types and let c_common_nodes_and_builtins()
9932 declare the type to be __builtin_va_list. */
9933 if (TREE_CODE (t) != RECORD_TYPE)
9934 t = build_variant_type_copy (t);
9935
9936 va_list_type_node = t;
9937 }
9938 }
9939
9940 /* Modify DECL for given flags.
9941 TM_PURE attribute is set only on types, so the function will modify
9942 DECL's type when ECF_TM_PURE is used. */
9943
9944 void
9945 set_call_expr_flags (tree decl, int flags)
9946 {
9947 if (flags & ECF_NOTHROW)
9948 TREE_NOTHROW (decl) = 1;
9949 if (flags & ECF_CONST)
9950 TREE_READONLY (decl) = 1;
9951 if (flags & ECF_PURE)
9952 DECL_PURE_P (decl) = 1;
9953 if (flags & ECF_LOOPING_CONST_OR_PURE)
9954 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9955 if (flags & ECF_NOVOPS)
9956 DECL_IS_NOVOPS (decl) = 1;
9957 if (flags & ECF_NORETURN)
9958 TREE_THIS_VOLATILE (decl) = 1;
9959 if (flags & ECF_MALLOC)
9960 DECL_IS_MALLOC (decl) = 1;
9961 if (flags & ECF_RETURNS_TWICE)
9962 DECL_IS_RETURNS_TWICE (decl) = 1;
9963 if (flags & ECF_LEAF)
9964 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9965 NULL, DECL_ATTRIBUTES (decl));
9966 if ((flags & ECF_TM_PURE) && flag_tm)
9967 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9968 /* Looping const or pure is implied by noreturn.
9969 There is currently no way to declare looping const or looping pure alone. */
9970 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9971 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9972 }
9973
9974
9975 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9976
9977 static void
9978 local_define_builtin (const char *name, tree type, enum built_in_function code,
9979 const char *library_name, int ecf_flags)
9980 {
9981 tree decl;
9982
9983 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9984 library_name, NULL_TREE);
9985 set_call_expr_flags (decl, ecf_flags);
9986
9987 set_builtin_decl (code, decl, true);
9988 }
9989
9990 /* Call this function after instantiating all builtins that the language
9991 front end cares about. This will build the rest of the builtins
9992 and internal functions that are relied upon by the tree optimizers and
9993 the middle-end. */
9994
9995 void
9996 build_common_builtin_nodes (void)
9997 {
9998 tree tmp, ftype;
9999 int ecf_flags;
10000
10001 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10002 {
10003 ftype = build_function_type (void_type_node, void_list_node);
10004 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10005 "__builtin_unreachable",
10006 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10007 | ECF_CONST);
10008 }
10009
10010 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10011 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10012 {
10013 ftype = build_function_type_list (ptr_type_node,
10014 ptr_type_node, const_ptr_type_node,
10015 size_type_node, NULL_TREE);
10016
10017 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10018 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10019 "memcpy", ECF_NOTHROW | ECF_LEAF);
10020 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10021 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10022 "memmove", ECF_NOTHROW | ECF_LEAF);
10023 }
10024
10025 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10026 {
10027 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10028 const_ptr_type_node, size_type_node,
10029 NULL_TREE);
10030 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10031 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10032 }
10033
10034 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10035 {
10036 ftype = build_function_type_list (ptr_type_node,
10037 ptr_type_node, integer_type_node,
10038 size_type_node, NULL_TREE);
10039 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10040 "memset", ECF_NOTHROW | ECF_LEAF);
10041 }
10042
10043 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10044 {
10045 ftype = build_function_type_list (ptr_type_node,
10046 size_type_node, NULL_TREE);
10047 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10048 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10049 }
10050
10051 ftype = build_function_type_list (ptr_type_node, size_type_node,
10052 size_type_node, NULL_TREE);
10053 local_define_builtin ("__builtin_alloca_with_align", ftype,
10054 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
10055 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10056
10057 /* If we're checking the stack, `alloca' can throw. */
10058 if (flag_stack_check)
10059 {
10060 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10061 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10062 }
10063
10064 ftype = build_function_type_list (void_type_node,
10065 ptr_type_node, ptr_type_node,
10066 ptr_type_node, NULL_TREE);
10067 local_define_builtin ("__builtin_init_trampoline", ftype,
10068 BUILT_IN_INIT_TRAMPOLINE,
10069 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10070 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10071 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10072 "__builtin_init_heap_trampoline",
10073 ECF_NOTHROW | ECF_LEAF);
10074
10075 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10076 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10077 BUILT_IN_ADJUST_TRAMPOLINE,
10078 "__builtin_adjust_trampoline",
10079 ECF_CONST | ECF_NOTHROW);
10080
10081 ftype = build_function_type_list (void_type_node,
10082 ptr_type_node, ptr_type_node, NULL_TREE);
10083 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10084 BUILT_IN_NONLOCAL_GOTO,
10085 "__builtin_nonlocal_goto",
10086 ECF_NORETURN | ECF_NOTHROW);
10087
10088 ftype = build_function_type_list (void_type_node,
10089 ptr_type_node, ptr_type_node, NULL_TREE);
10090 local_define_builtin ("__builtin_setjmp_setup", ftype,
10091 BUILT_IN_SETJMP_SETUP,
10092 "__builtin_setjmp_setup", ECF_NOTHROW);
10093
10094 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10095 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10096 BUILT_IN_SETJMP_RECEIVER,
10097 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10098
10099 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10100 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10101 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10102
10103 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10104 local_define_builtin ("__builtin_stack_restore", ftype,
10105 BUILT_IN_STACK_RESTORE,
10106 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10107
10108 /* If there's a possibility that we might use the ARM EABI, build the
10109 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10110 if (targetm.arm_eabi_unwinder)
10111 {
10112 ftype = build_function_type_list (void_type_node, NULL_TREE);
10113 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10114 BUILT_IN_CXA_END_CLEANUP,
10115 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10116 }
10117
10118 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10119 local_define_builtin ("__builtin_unwind_resume", ftype,
10120 BUILT_IN_UNWIND_RESUME,
10121 ((targetm_common.except_unwind_info (&global_options)
10122 == UI_SJLJ)
10123 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10124 ECF_NORETURN);
10125
10126 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10127 {
10128 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10129 NULL_TREE);
10130 local_define_builtin ("__builtin_return_address", ftype,
10131 BUILT_IN_RETURN_ADDRESS,
10132 "__builtin_return_address",
10133 ECF_NOTHROW);
10134 }
10135
10136 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10137 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10138 {
10139 ftype = build_function_type_list (void_type_node, ptr_type_node,
10140 ptr_type_node, NULL_TREE);
10141 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10142 local_define_builtin ("__cyg_profile_func_enter", ftype,
10143 BUILT_IN_PROFILE_FUNC_ENTER,
10144 "__cyg_profile_func_enter", 0);
10145 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10146 local_define_builtin ("__cyg_profile_func_exit", ftype,
10147 BUILT_IN_PROFILE_FUNC_EXIT,
10148 "__cyg_profile_func_exit", 0);
10149 }
10150
10151 /* The exception object and filter values from the runtime. The argument
10152 must be zero before exception lowering, i.e. from the front end. After
10153 exception lowering, it will be the region number for the exception
10154 landing pad. These functions are PURE instead of CONST to prevent
10155 them from being hoisted past the exception edge that will initialize
10156 its value in the landing pad. */
10157 ftype = build_function_type_list (ptr_type_node,
10158 integer_type_node, NULL_TREE);
10159 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10160 /* Only use TM_PURE if we we have TM language support. */
10161 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10162 ecf_flags |= ECF_TM_PURE;
10163 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10164 "__builtin_eh_pointer", ecf_flags);
10165
10166 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10167 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10168 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10169 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10170
10171 ftype = build_function_type_list (void_type_node,
10172 integer_type_node, integer_type_node,
10173 NULL_TREE);
10174 local_define_builtin ("__builtin_eh_copy_values", ftype,
10175 BUILT_IN_EH_COPY_VALUES,
10176 "__builtin_eh_copy_values", ECF_NOTHROW);
10177
10178 /* Complex multiplication and division. These are handled as builtins
10179 rather than optabs because emit_library_call_value doesn't support
10180 complex. Further, we can do slightly better with folding these
10181 beasties if the real and complex parts of the arguments are separate. */
10182 {
10183 int mode;
10184
10185 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10186 {
10187 char mode_name_buf[4], *q;
10188 const char *p;
10189 enum built_in_function mcode, dcode;
10190 tree type, inner_type;
10191 const char *prefix = "__";
10192
10193 if (targetm.libfunc_gnu_prefix)
10194 prefix = "__gnu_";
10195
10196 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10197 if (type == NULL)
10198 continue;
10199 inner_type = TREE_TYPE (type);
10200
10201 ftype = build_function_type_list (type, inner_type, inner_type,
10202 inner_type, inner_type, NULL_TREE);
10203
10204 mcode = ((enum built_in_function)
10205 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10206 dcode = ((enum built_in_function)
10207 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10208
10209 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10210 *q = TOLOWER (*p);
10211 *q = '\0';
10212
10213 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10214 NULL);
10215 local_define_builtin (built_in_names[mcode], ftype, mcode,
10216 built_in_names[mcode],
10217 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10218
10219 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10220 NULL);
10221 local_define_builtin (built_in_names[dcode], ftype, dcode,
10222 built_in_names[dcode],
10223 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10224 }
10225 }
10226
10227 init_internal_fns ();
10228 }
10229
10230 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10231 better way.
10232
10233 If we requested a pointer to a vector, build up the pointers that
10234 we stripped off while looking for the inner type. Similarly for
10235 return values from functions.
10236
10237 The argument TYPE is the top of the chain, and BOTTOM is the
10238 new type which we will point to. */
10239
10240 tree
10241 reconstruct_complex_type (tree type, tree bottom)
10242 {
10243 tree inner, outer;
10244
10245 if (TREE_CODE (type) == POINTER_TYPE)
10246 {
10247 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10248 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10249 TYPE_REF_CAN_ALIAS_ALL (type));
10250 }
10251 else if (TREE_CODE (type) == REFERENCE_TYPE)
10252 {
10253 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10254 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10255 TYPE_REF_CAN_ALIAS_ALL (type));
10256 }
10257 else if (TREE_CODE (type) == ARRAY_TYPE)
10258 {
10259 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10260 outer = build_array_type (inner, TYPE_DOMAIN (type));
10261 }
10262 else if (TREE_CODE (type) == FUNCTION_TYPE)
10263 {
10264 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10265 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10266 }
10267 else if (TREE_CODE (type) == METHOD_TYPE)
10268 {
10269 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10270 /* The build_method_type_directly() routine prepends 'this' to argument list,
10271 so we must compensate by getting rid of it. */
10272 outer
10273 = build_method_type_directly
10274 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10275 inner,
10276 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10277 }
10278 else if (TREE_CODE (type) == OFFSET_TYPE)
10279 {
10280 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10281 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10282 }
10283 else
10284 return bottom;
10285
10286 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10287 TYPE_QUALS (type));
10288 }
10289
10290 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10291 the inner type. */
10292 tree
10293 build_vector_type_for_mode (tree innertype, machine_mode mode)
10294 {
10295 int nunits;
10296
10297 switch (GET_MODE_CLASS (mode))
10298 {
10299 case MODE_VECTOR_INT:
10300 case MODE_VECTOR_FLOAT:
10301 case MODE_VECTOR_FRACT:
10302 case MODE_VECTOR_UFRACT:
10303 case MODE_VECTOR_ACCUM:
10304 case MODE_VECTOR_UACCUM:
10305 nunits = GET_MODE_NUNITS (mode);
10306 break;
10307
10308 case MODE_INT:
10309 /* Check that there are no leftover bits. */
10310 gcc_assert (GET_MODE_BITSIZE (mode)
10311 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10312
10313 nunits = GET_MODE_BITSIZE (mode)
10314 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10315 break;
10316
10317 default:
10318 gcc_unreachable ();
10319 }
10320
10321 return make_vector_type (innertype, nunits, mode);
10322 }
10323
10324 /* Similarly, but takes the inner type and number of units, which must be
10325 a power of two. */
10326
10327 tree
10328 build_vector_type (tree innertype, int nunits)
10329 {
10330 return make_vector_type (innertype, nunits, VOIDmode);
10331 }
10332
10333 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10334
10335 tree
10336 build_opaque_vector_type (tree innertype, int nunits)
10337 {
10338 tree t = make_vector_type (innertype, nunits, VOIDmode);
10339 tree cand;
10340 /* We always build the non-opaque variant before the opaque one,
10341 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10342 cand = TYPE_NEXT_VARIANT (t);
10343 if (cand
10344 && TYPE_VECTOR_OPAQUE (cand)
10345 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10346 return cand;
10347 /* Othewise build a variant type and make sure to queue it after
10348 the non-opaque type. */
10349 cand = build_distinct_type_copy (t);
10350 TYPE_VECTOR_OPAQUE (cand) = true;
10351 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10352 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10353 TYPE_NEXT_VARIANT (t) = cand;
10354 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10355 return cand;
10356 }
10357
10358
10359 /* Given an initializer INIT, return TRUE if INIT is zero or some
10360 aggregate of zeros. Otherwise return FALSE. */
10361 bool
10362 initializer_zerop (const_tree init)
10363 {
10364 tree elt;
10365
10366 STRIP_NOPS (init);
10367
10368 switch (TREE_CODE (init))
10369 {
10370 case INTEGER_CST:
10371 return integer_zerop (init);
10372
10373 case REAL_CST:
10374 /* ??? Note that this is not correct for C4X float formats. There,
10375 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10376 negative exponent. */
10377 return real_zerop (init)
10378 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10379
10380 case FIXED_CST:
10381 return fixed_zerop (init);
10382
10383 case COMPLEX_CST:
10384 return integer_zerop (init)
10385 || (real_zerop (init)
10386 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10387 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10388
10389 case VECTOR_CST:
10390 {
10391 unsigned i;
10392 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10393 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10394 return false;
10395 return true;
10396 }
10397
10398 case CONSTRUCTOR:
10399 {
10400 unsigned HOST_WIDE_INT idx;
10401
10402 if (TREE_CLOBBER_P (init))
10403 return false;
10404 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10405 if (!initializer_zerop (elt))
10406 return false;
10407 return true;
10408 }
10409
10410 case STRING_CST:
10411 {
10412 int i;
10413
10414 /* We need to loop through all elements to handle cases like
10415 "\0" and "\0foobar". */
10416 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10417 if (TREE_STRING_POINTER (init)[i] != '\0')
10418 return false;
10419
10420 return true;
10421 }
10422
10423 default:
10424 return false;
10425 }
10426 }
10427
10428 /* Check if vector VEC consists of all the equal elements and
10429 that the number of elements corresponds to the type of VEC.
10430 The function returns first element of the vector
10431 or NULL_TREE if the vector is not uniform. */
10432 tree
10433 uniform_vector_p (const_tree vec)
10434 {
10435 tree first, t;
10436 unsigned i;
10437
10438 if (vec == NULL_TREE)
10439 return NULL_TREE;
10440
10441 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10442
10443 if (TREE_CODE (vec) == VECTOR_CST)
10444 {
10445 first = VECTOR_CST_ELT (vec, 0);
10446 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10447 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10448 return NULL_TREE;
10449
10450 return first;
10451 }
10452
10453 else if (TREE_CODE (vec) == CONSTRUCTOR)
10454 {
10455 first = error_mark_node;
10456
10457 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10458 {
10459 if (i == 0)
10460 {
10461 first = t;
10462 continue;
10463 }
10464 if (!operand_equal_p (first, t, 0))
10465 return NULL_TREE;
10466 }
10467 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10468 return NULL_TREE;
10469
10470 return first;
10471 }
10472
10473 return NULL_TREE;
10474 }
10475
10476 /* Build an empty statement at location LOC. */
10477
10478 tree
10479 build_empty_stmt (location_t loc)
10480 {
10481 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10482 SET_EXPR_LOCATION (t, loc);
10483 return t;
10484 }
10485
10486
10487 /* Build an OpenMP clause with code CODE. LOC is the location of the
10488 clause. */
10489
10490 tree
10491 build_omp_clause (location_t loc, enum omp_clause_code code)
10492 {
10493 tree t;
10494 int size, length;
10495
10496 length = omp_clause_num_ops[code];
10497 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10498
10499 record_node_allocation_statistics (OMP_CLAUSE, size);
10500
10501 t = (tree) ggc_internal_alloc (size);
10502 memset (t, 0, size);
10503 TREE_SET_CODE (t, OMP_CLAUSE);
10504 OMP_CLAUSE_SET_CODE (t, code);
10505 OMP_CLAUSE_LOCATION (t) = loc;
10506
10507 return t;
10508 }
10509
10510 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10511 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10512 Except for the CODE and operand count field, other storage for the
10513 object is initialized to zeros. */
10514
10515 tree
10516 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10517 {
10518 tree t;
10519 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10520
10521 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10522 gcc_assert (len >= 1);
10523
10524 record_node_allocation_statistics (code, length);
10525
10526 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10527
10528 TREE_SET_CODE (t, code);
10529
10530 /* Can't use TREE_OPERAND to store the length because if checking is
10531 enabled, it will try to check the length before we store it. :-P */
10532 t->exp.operands[0] = build_int_cst (sizetype, len);
10533
10534 return t;
10535 }
10536
10537 /* Helper function for build_call_* functions; build a CALL_EXPR with
10538 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10539 the argument slots. */
10540
10541 static tree
10542 build_call_1 (tree return_type, tree fn, int nargs)
10543 {
10544 tree t;
10545
10546 t = build_vl_exp (CALL_EXPR, nargs + 3);
10547 TREE_TYPE (t) = return_type;
10548 CALL_EXPR_FN (t) = fn;
10549 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10550
10551 return t;
10552 }
10553
10554 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10555 FN and a null static chain slot. NARGS is the number of call arguments
10556 which are specified as "..." arguments. */
10557
10558 tree
10559 build_call_nary (tree return_type, tree fn, int nargs, ...)
10560 {
10561 tree ret;
10562 va_list args;
10563 va_start (args, nargs);
10564 ret = build_call_valist (return_type, fn, nargs, args);
10565 va_end (args);
10566 return ret;
10567 }
10568
10569 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10570 FN and a null static chain slot. NARGS is the number of call arguments
10571 which are specified as a va_list ARGS. */
10572
10573 tree
10574 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10575 {
10576 tree t;
10577 int i;
10578
10579 t = build_call_1 (return_type, fn, nargs);
10580 for (i = 0; i < nargs; i++)
10581 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10582 process_call_operands (t);
10583 return t;
10584 }
10585
10586 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10587 FN and a null static chain slot. NARGS is the number of call arguments
10588 which are specified as a tree array ARGS. */
10589
10590 tree
10591 build_call_array_loc (location_t loc, tree return_type, tree fn,
10592 int nargs, const tree *args)
10593 {
10594 tree t;
10595 int i;
10596
10597 t = build_call_1 (return_type, fn, nargs);
10598 for (i = 0; i < nargs; i++)
10599 CALL_EXPR_ARG (t, i) = args[i];
10600 process_call_operands (t);
10601 SET_EXPR_LOCATION (t, loc);
10602 return t;
10603 }
10604
10605 /* Like build_call_array, but takes a vec. */
10606
10607 tree
10608 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10609 {
10610 tree ret, t;
10611 unsigned int ix;
10612
10613 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10614 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10615 CALL_EXPR_ARG (ret, ix) = t;
10616 process_call_operands (ret);
10617 return ret;
10618 }
10619
10620 /* Conveniently construct a function call expression. FNDECL names the
10621 function to be called and N arguments are passed in the array
10622 ARGARRAY. */
10623
10624 tree
10625 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10626 {
10627 tree fntype = TREE_TYPE (fndecl);
10628 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10629
10630 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10631 }
10632
10633 /* Conveniently construct a function call expression. FNDECL names the
10634 function to be called and the arguments are passed in the vector
10635 VEC. */
10636
10637 tree
10638 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10639 {
10640 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10641 vec_safe_address (vec));
10642 }
10643
10644
10645 /* Conveniently construct a function call expression. FNDECL names the
10646 function to be called, N is the number of arguments, and the "..."
10647 parameters are the argument expressions. */
10648
10649 tree
10650 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10651 {
10652 va_list ap;
10653 tree *argarray = XALLOCAVEC (tree, n);
10654 int i;
10655
10656 va_start (ap, n);
10657 for (i = 0; i < n; i++)
10658 argarray[i] = va_arg (ap, tree);
10659 va_end (ap);
10660 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10661 }
10662
10663 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10664 varargs macros aren't supported by all bootstrap compilers. */
10665
10666 tree
10667 build_call_expr (tree fndecl, int n, ...)
10668 {
10669 va_list ap;
10670 tree *argarray = XALLOCAVEC (tree, n);
10671 int i;
10672
10673 va_start (ap, n);
10674 for (i = 0; i < n; i++)
10675 argarray[i] = va_arg (ap, tree);
10676 va_end (ap);
10677 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10678 }
10679
10680 /* Build internal call expression. This is just like CALL_EXPR, except
10681 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10682 internal function. */
10683
10684 tree
10685 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10686 tree type, int n, ...)
10687 {
10688 va_list ap;
10689 int i;
10690
10691 tree fn = build_call_1 (type, NULL_TREE, n);
10692 va_start (ap, n);
10693 for (i = 0; i < n; i++)
10694 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10695 va_end (ap);
10696 SET_EXPR_LOCATION (fn, loc);
10697 CALL_EXPR_IFN (fn) = ifn;
10698 return fn;
10699 }
10700
10701 /* Create a new constant string literal and return a char* pointer to it.
10702 The STRING_CST value is the LEN characters at STR. */
10703 tree
10704 build_string_literal (int len, const char *str)
10705 {
10706 tree t, elem, index, type;
10707
10708 t = build_string (len, str);
10709 elem = build_type_variant (char_type_node, 1, 0);
10710 index = build_index_type (size_int (len - 1));
10711 type = build_array_type (elem, index);
10712 TREE_TYPE (t) = type;
10713 TREE_CONSTANT (t) = 1;
10714 TREE_READONLY (t) = 1;
10715 TREE_STATIC (t) = 1;
10716
10717 type = build_pointer_type (elem);
10718 t = build1 (ADDR_EXPR, type,
10719 build4 (ARRAY_REF, elem,
10720 t, integer_zero_node, NULL_TREE, NULL_TREE));
10721 return t;
10722 }
10723
10724
10725
10726 /* Return true if T (assumed to be a DECL) must be assigned a memory
10727 location. */
10728
10729 bool
10730 needs_to_live_in_memory (const_tree t)
10731 {
10732 return (TREE_ADDRESSABLE (t)
10733 || is_global_var (t)
10734 || (TREE_CODE (t) == RESULT_DECL
10735 && !DECL_BY_REFERENCE (t)
10736 && aggregate_value_p (t, current_function_decl)));
10737 }
10738
10739 /* Return value of a constant X and sign-extend it. */
10740
10741 HOST_WIDE_INT
10742 int_cst_value (const_tree x)
10743 {
10744 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10745 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10746
10747 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10748 gcc_assert (cst_and_fits_in_hwi (x));
10749
10750 if (bits < HOST_BITS_PER_WIDE_INT)
10751 {
10752 bool negative = ((val >> (bits - 1)) & 1) != 0;
10753 if (negative)
10754 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10755 else
10756 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10757 }
10758
10759 return val;
10760 }
10761
10762 /* If TYPE is an integral or pointer type, return an integer type with
10763 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10764 if TYPE is already an integer type of signedness UNSIGNEDP. */
10765
10766 tree
10767 signed_or_unsigned_type_for (int unsignedp, tree type)
10768 {
10769 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10770 return type;
10771
10772 if (TREE_CODE (type) == VECTOR_TYPE)
10773 {
10774 tree inner = TREE_TYPE (type);
10775 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10776 if (!inner2)
10777 return NULL_TREE;
10778 if (inner == inner2)
10779 return type;
10780 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10781 }
10782
10783 if (!INTEGRAL_TYPE_P (type)
10784 && !POINTER_TYPE_P (type)
10785 && TREE_CODE (type) != OFFSET_TYPE)
10786 return NULL_TREE;
10787
10788 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10789 }
10790
10791 /* If TYPE is an integral or pointer type, return an integer type with
10792 the same precision which is unsigned, or itself if TYPE is already an
10793 unsigned integer type. */
10794
10795 tree
10796 unsigned_type_for (tree type)
10797 {
10798 return signed_or_unsigned_type_for (1, type);
10799 }
10800
10801 /* If TYPE is an integral or pointer type, return an integer type with
10802 the same precision which is signed, or itself if TYPE is already a
10803 signed integer type. */
10804
10805 tree
10806 signed_type_for (tree type)
10807 {
10808 return signed_or_unsigned_type_for (0, type);
10809 }
10810
10811 /* If TYPE is a vector type, return a signed integer vector type with the
10812 same width and number of subparts. Otherwise return boolean_type_node. */
10813
10814 tree
10815 truth_type_for (tree type)
10816 {
10817 if (TREE_CODE (type) == VECTOR_TYPE)
10818 {
10819 tree elem = lang_hooks.types.type_for_size
10820 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10821 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10822 }
10823 else
10824 return boolean_type_node;
10825 }
10826
10827 /* Returns the largest value obtainable by casting something in INNER type to
10828 OUTER type. */
10829
10830 tree
10831 upper_bound_in_type (tree outer, tree inner)
10832 {
10833 unsigned int det = 0;
10834 unsigned oprec = TYPE_PRECISION (outer);
10835 unsigned iprec = TYPE_PRECISION (inner);
10836 unsigned prec;
10837
10838 /* Compute a unique number for every combination. */
10839 det |= (oprec > iprec) ? 4 : 0;
10840 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10841 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10842
10843 /* Determine the exponent to use. */
10844 switch (det)
10845 {
10846 case 0:
10847 case 1:
10848 /* oprec <= iprec, outer: signed, inner: don't care. */
10849 prec = oprec - 1;
10850 break;
10851 case 2:
10852 case 3:
10853 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10854 prec = oprec;
10855 break;
10856 case 4:
10857 /* oprec > iprec, outer: signed, inner: signed. */
10858 prec = iprec - 1;
10859 break;
10860 case 5:
10861 /* oprec > iprec, outer: signed, inner: unsigned. */
10862 prec = iprec;
10863 break;
10864 case 6:
10865 /* oprec > iprec, outer: unsigned, inner: signed. */
10866 prec = oprec;
10867 break;
10868 case 7:
10869 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10870 prec = iprec;
10871 break;
10872 default:
10873 gcc_unreachable ();
10874 }
10875
10876 return wide_int_to_tree (outer,
10877 wi::mask (prec, false, TYPE_PRECISION (outer)));
10878 }
10879
10880 /* Returns the smallest value obtainable by casting something in INNER type to
10881 OUTER type. */
10882
10883 tree
10884 lower_bound_in_type (tree outer, tree inner)
10885 {
10886 unsigned oprec = TYPE_PRECISION (outer);
10887 unsigned iprec = TYPE_PRECISION (inner);
10888
10889 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10890 and obtain 0. */
10891 if (TYPE_UNSIGNED (outer)
10892 /* If we are widening something of an unsigned type, OUTER type
10893 contains all values of INNER type. In particular, both INNER
10894 and OUTER types have zero in common. */
10895 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10896 return build_int_cst (outer, 0);
10897 else
10898 {
10899 /* If we are widening a signed type to another signed type, we
10900 want to obtain -2^^(iprec-1). If we are keeping the
10901 precision or narrowing to a signed type, we want to obtain
10902 -2^(oprec-1). */
10903 unsigned prec = oprec > iprec ? iprec : oprec;
10904 return wide_int_to_tree (outer,
10905 wi::mask (prec - 1, true,
10906 TYPE_PRECISION (outer)));
10907 }
10908 }
10909
10910 /* Return nonzero if two operands that are suitable for PHI nodes are
10911 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10912 SSA_NAME or invariant. Note that this is strictly an optimization.
10913 That is, callers of this function can directly call operand_equal_p
10914 and get the same result, only slower. */
10915
10916 int
10917 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10918 {
10919 if (arg0 == arg1)
10920 return 1;
10921 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10922 return 0;
10923 return operand_equal_p (arg0, arg1, 0);
10924 }
10925
10926 /* Returns number of zeros at the end of binary representation of X. */
10927
10928 tree
10929 num_ending_zeros (const_tree x)
10930 {
10931 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10932 }
10933
10934
10935 #define WALK_SUBTREE(NODE) \
10936 do \
10937 { \
10938 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10939 if (result) \
10940 return result; \
10941 } \
10942 while (0)
10943
10944 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10945 be walked whenever a type is seen in the tree. Rest of operands and return
10946 value are as for walk_tree. */
10947
10948 static tree
10949 walk_type_fields (tree type, walk_tree_fn func, void *data,
10950 hash_set<tree> *pset, walk_tree_lh lh)
10951 {
10952 tree result = NULL_TREE;
10953
10954 switch (TREE_CODE (type))
10955 {
10956 case POINTER_TYPE:
10957 case REFERENCE_TYPE:
10958 case VECTOR_TYPE:
10959 /* We have to worry about mutually recursive pointers. These can't
10960 be written in C. They can in Ada. It's pathological, but
10961 there's an ACATS test (c38102a) that checks it. Deal with this
10962 by checking if we're pointing to another pointer, that one
10963 points to another pointer, that one does too, and we have no htab.
10964 If so, get a hash table. We check three levels deep to avoid
10965 the cost of the hash table if we don't need one. */
10966 if (POINTER_TYPE_P (TREE_TYPE (type))
10967 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10968 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10969 && !pset)
10970 {
10971 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10972 func, data);
10973 if (result)
10974 return result;
10975
10976 break;
10977 }
10978
10979 /* ... fall through ... */
10980
10981 case COMPLEX_TYPE:
10982 WALK_SUBTREE (TREE_TYPE (type));
10983 break;
10984
10985 case METHOD_TYPE:
10986 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10987
10988 /* Fall through. */
10989
10990 case FUNCTION_TYPE:
10991 WALK_SUBTREE (TREE_TYPE (type));
10992 {
10993 tree arg;
10994
10995 /* We never want to walk into default arguments. */
10996 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10997 WALK_SUBTREE (TREE_VALUE (arg));
10998 }
10999 break;
11000
11001 case ARRAY_TYPE:
11002 /* Don't follow this nodes's type if a pointer for fear that
11003 we'll have infinite recursion. If we have a PSET, then we
11004 need not fear. */
11005 if (pset
11006 || (!POINTER_TYPE_P (TREE_TYPE (type))
11007 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11008 WALK_SUBTREE (TREE_TYPE (type));
11009 WALK_SUBTREE (TYPE_DOMAIN (type));
11010 break;
11011
11012 case OFFSET_TYPE:
11013 WALK_SUBTREE (TREE_TYPE (type));
11014 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11015 break;
11016
11017 default:
11018 break;
11019 }
11020
11021 return NULL_TREE;
11022 }
11023
11024 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11025 called with the DATA and the address of each sub-tree. If FUNC returns a
11026 non-NULL value, the traversal is stopped, and the value returned by FUNC
11027 is returned. If PSET is non-NULL it is used to record the nodes visited,
11028 and to avoid visiting a node more than once. */
11029
11030 tree
11031 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11032 hash_set<tree> *pset, walk_tree_lh lh)
11033 {
11034 enum tree_code code;
11035 int walk_subtrees;
11036 tree result;
11037
11038 #define WALK_SUBTREE_TAIL(NODE) \
11039 do \
11040 { \
11041 tp = & (NODE); \
11042 goto tail_recurse; \
11043 } \
11044 while (0)
11045
11046 tail_recurse:
11047 /* Skip empty subtrees. */
11048 if (!*tp)
11049 return NULL_TREE;
11050
11051 /* Don't walk the same tree twice, if the user has requested
11052 that we avoid doing so. */
11053 if (pset && pset->add (*tp))
11054 return NULL_TREE;
11055
11056 /* Call the function. */
11057 walk_subtrees = 1;
11058 result = (*func) (tp, &walk_subtrees, data);
11059
11060 /* If we found something, return it. */
11061 if (result)
11062 return result;
11063
11064 code = TREE_CODE (*tp);
11065
11066 /* Even if we didn't, FUNC may have decided that there was nothing
11067 interesting below this point in the tree. */
11068 if (!walk_subtrees)
11069 {
11070 /* But we still need to check our siblings. */
11071 if (code == TREE_LIST)
11072 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11073 else if (code == OMP_CLAUSE)
11074 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11075 else
11076 return NULL_TREE;
11077 }
11078
11079 if (lh)
11080 {
11081 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11082 if (result || !walk_subtrees)
11083 return result;
11084 }
11085
11086 switch (code)
11087 {
11088 case ERROR_MARK:
11089 case IDENTIFIER_NODE:
11090 case INTEGER_CST:
11091 case REAL_CST:
11092 case FIXED_CST:
11093 case VECTOR_CST:
11094 case STRING_CST:
11095 case BLOCK:
11096 case PLACEHOLDER_EXPR:
11097 case SSA_NAME:
11098 case FIELD_DECL:
11099 case RESULT_DECL:
11100 /* None of these have subtrees other than those already walked
11101 above. */
11102 break;
11103
11104 case TREE_LIST:
11105 WALK_SUBTREE (TREE_VALUE (*tp));
11106 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11107 break;
11108
11109 case TREE_VEC:
11110 {
11111 int len = TREE_VEC_LENGTH (*tp);
11112
11113 if (len == 0)
11114 break;
11115
11116 /* Walk all elements but the first. */
11117 while (--len)
11118 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11119
11120 /* Now walk the first one as a tail call. */
11121 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11122 }
11123
11124 case COMPLEX_CST:
11125 WALK_SUBTREE (TREE_REALPART (*tp));
11126 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11127
11128 case CONSTRUCTOR:
11129 {
11130 unsigned HOST_WIDE_INT idx;
11131 constructor_elt *ce;
11132
11133 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11134 idx++)
11135 WALK_SUBTREE (ce->value);
11136 }
11137 break;
11138
11139 case SAVE_EXPR:
11140 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11141
11142 case BIND_EXPR:
11143 {
11144 tree decl;
11145 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11146 {
11147 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11148 into declarations that are just mentioned, rather than
11149 declared; they don't really belong to this part of the tree.
11150 And, we can see cycles: the initializer for a declaration
11151 can refer to the declaration itself. */
11152 WALK_SUBTREE (DECL_INITIAL (decl));
11153 WALK_SUBTREE (DECL_SIZE (decl));
11154 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11155 }
11156 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11157 }
11158
11159 case STATEMENT_LIST:
11160 {
11161 tree_stmt_iterator i;
11162 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11163 WALK_SUBTREE (*tsi_stmt_ptr (i));
11164 }
11165 break;
11166
11167 case OMP_CLAUSE:
11168 switch (OMP_CLAUSE_CODE (*tp))
11169 {
11170 case OMP_CLAUSE_PRIVATE:
11171 case OMP_CLAUSE_SHARED:
11172 case OMP_CLAUSE_FIRSTPRIVATE:
11173 case OMP_CLAUSE_COPYIN:
11174 case OMP_CLAUSE_COPYPRIVATE:
11175 case OMP_CLAUSE_FINAL:
11176 case OMP_CLAUSE_IF:
11177 case OMP_CLAUSE_NUM_THREADS:
11178 case OMP_CLAUSE_SCHEDULE:
11179 case OMP_CLAUSE_UNIFORM:
11180 case OMP_CLAUSE_DEPEND:
11181 case OMP_CLAUSE_NUM_TEAMS:
11182 case OMP_CLAUSE_THREAD_LIMIT:
11183 case OMP_CLAUSE_DEVICE:
11184 case OMP_CLAUSE_DIST_SCHEDULE:
11185 case OMP_CLAUSE_SAFELEN:
11186 case OMP_CLAUSE_SIMDLEN:
11187 case OMP_CLAUSE__LOOPTEMP_:
11188 case OMP_CLAUSE__SIMDUID_:
11189 case OMP_CLAUSE__CILK_FOR_COUNT_:
11190 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11191 /* FALLTHRU */
11192
11193 case OMP_CLAUSE_NOWAIT:
11194 case OMP_CLAUSE_ORDERED:
11195 case OMP_CLAUSE_DEFAULT:
11196 case OMP_CLAUSE_UNTIED:
11197 case OMP_CLAUSE_MERGEABLE:
11198 case OMP_CLAUSE_PROC_BIND:
11199 case OMP_CLAUSE_INBRANCH:
11200 case OMP_CLAUSE_NOTINBRANCH:
11201 case OMP_CLAUSE_FOR:
11202 case OMP_CLAUSE_PARALLEL:
11203 case OMP_CLAUSE_SECTIONS:
11204 case OMP_CLAUSE_TASKGROUP:
11205 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11206
11207 case OMP_CLAUSE_LASTPRIVATE:
11208 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11209 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11210 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11211
11212 case OMP_CLAUSE_COLLAPSE:
11213 {
11214 int i;
11215 for (i = 0; i < 3; i++)
11216 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11217 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11218 }
11219
11220 case OMP_CLAUSE_LINEAR:
11221 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11222 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11223 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11224 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11225
11226 case OMP_CLAUSE_ALIGNED:
11227 case OMP_CLAUSE_FROM:
11228 case OMP_CLAUSE_TO:
11229 case OMP_CLAUSE_MAP:
11230 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11231 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11232 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11233
11234 case OMP_CLAUSE_REDUCTION:
11235 {
11236 int i;
11237 for (i = 0; i < 4; i++)
11238 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11239 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11240 }
11241
11242 default:
11243 gcc_unreachable ();
11244 }
11245 break;
11246
11247 case TARGET_EXPR:
11248 {
11249 int i, len;
11250
11251 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11252 But, we only want to walk once. */
11253 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11254 for (i = 0; i < len; ++i)
11255 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11256 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11257 }
11258
11259 case DECL_EXPR:
11260 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11261 defining. We only want to walk into these fields of a type in this
11262 case and not in the general case of a mere reference to the type.
11263
11264 The criterion is as follows: if the field can be an expression, it
11265 must be walked only here. This should be in keeping with the fields
11266 that are directly gimplified in gimplify_type_sizes in order for the
11267 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11268 variable-sized types.
11269
11270 Note that DECLs get walked as part of processing the BIND_EXPR. */
11271 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11272 {
11273 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11274 if (TREE_CODE (*type_p) == ERROR_MARK)
11275 return NULL_TREE;
11276
11277 /* Call the function for the type. See if it returns anything or
11278 doesn't want us to continue. If we are to continue, walk both
11279 the normal fields and those for the declaration case. */
11280 result = (*func) (type_p, &walk_subtrees, data);
11281 if (result || !walk_subtrees)
11282 return result;
11283
11284 /* But do not walk a pointed-to type since it may itself need to
11285 be walked in the declaration case if it isn't anonymous. */
11286 if (!POINTER_TYPE_P (*type_p))
11287 {
11288 result = walk_type_fields (*type_p, func, data, pset, lh);
11289 if (result)
11290 return result;
11291 }
11292
11293 /* If this is a record type, also walk the fields. */
11294 if (RECORD_OR_UNION_TYPE_P (*type_p))
11295 {
11296 tree field;
11297
11298 for (field = TYPE_FIELDS (*type_p); field;
11299 field = DECL_CHAIN (field))
11300 {
11301 /* We'd like to look at the type of the field, but we can
11302 easily get infinite recursion. So assume it's pointed
11303 to elsewhere in the tree. Also, ignore things that
11304 aren't fields. */
11305 if (TREE_CODE (field) != FIELD_DECL)
11306 continue;
11307
11308 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11309 WALK_SUBTREE (DECL_SIZE (field));
11310 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11311 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11312 WALK_SUBTREE (DECL_QUALIFIER (field));
11313 }
11314 }
11315
11316 /* Same for scalar types. */
11317 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11318 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11319 || TREE_CODE (*type_p) == INTEGER_TYPE
11320 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11321 || TREE_CODE (*type_p) == REAL_TYPE)
11322 {
11323 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11324 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11325 }
11326
11327 WALK_SUBTREE (TYPE_SIZE (*type_p));
11328 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11329 }
11330 /* FALLTHRU */
11331
11332 default:
11333 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11334 {
11335 int i, len;
11336
11337 /* Walk over all the sub-trees of this operand. */
11338 len = TREE_OPERAND_LENGTH (*tp);
11339
11340 /* Go through the subtrees. We need to do this in forward order so
11341 that the scope of a FOR_EXPR is handled properly. */
11342 if (len)
11343 {
11344 for (i = 0; i < len - 1; ++i)
11345 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11346 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11347 }
11348 }
11349 /* If this is a type, walk the needed fields in the type. */
11350 else if (TYPE_P (*tp))
11351 return walk_type_fields (*tp, func, data, pset, lh);
11352 break;
11353 }
11354
11355 /* We didn't find what we were looking for. */
11356 return NULL_TREE;
11357
11358 #undef WALK_SUBTREE_TAIL
11359 }
11360 #undef WALK_SUBTREE
11361
11362 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11363
11364 tree
11365 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11366 walk_tree_lh lh)
11367 {
11368 tree result;
11369
11370 hash_set<tree> pset;
11371 result = walk_tree_1 (tp, func, data, &pset, lh);
11372 return result;
11373 }
11374
11375
11376 tree
11377 tree_block (tree t)
11378 {
11379 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11380
11381 if (IS_EXPR_CODE_CLASS (c))
11382 return LOCATION_BLOCK (t->exp.locus);
11383 gcc_unreachable ();
11384 return NULL;
11385 }
11386
11387 void
11388 tree_set_block (tree t, tree b)
11389 {
11390 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11391
11392 if (IS_EXPR_CODE_CLASS (c))
11393 {
11394 if (b)
11395 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11396 else
11397 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11398 }
11399 else
11400 gcc_unreachable ();
11401 }
11402
11403 /* Create a nameless artificial label and put it in the current
11404 function context. The label has a location of LOC. Returns the
11405 newly created label. */
11406
11407 tree
11408 create_artificial_label (location_t loc)
11409 {
11410 tree lab = build_decl (loc,
11411 LABEL_DECL, NULL_TREE, void_type_node);
11412
11413 DECL_ARTIFICIAL (lab) = 1;
11414 DECL_IGNORED_P (lab) = 1;
11415 DECL_CONTEXT (lab) = current_function_decl;
11416 return lab;
11417 }
11418
11419 /* Given a tree, try to return a useful variable name that we can use
11420 to prefix a temporary that is being assigned the value of the tree.
11421 I.E. given <temp> = &A, return A. */
11422
11423 const char *
11424 get_name (tree t)
11425 {
11426 tree stripped_decl;
11427
11428 stripped_decl = t;
11429 STRIP_NOPS (stripped_decl);
11430 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11431 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11432 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11433 {
11434 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11435 if (!name)
11436 return NULL;
11437 return IDENTIFIER_POINTER (name);
11438 }
11439 else
11440 {
11441 switch (TREE_CODE (stripped_decl))
11442 {
11443 case ADDR_EXPR:
11444 return get_name (TREE_OPERAND (stripped_decl, 0));
11445 default:
11446 return NULL;
11447 }
11448 }
11449 }
11450
11451 /* Return true if TYPE has a variable argument list. */
11452
11453 bool
11454 stdarg_p (const_tree fntype)
11455 {
11456 function_args_iterator args_iter;
11457 tree n = NULL_TREE, t;
11458
11459 if (!fntype)
11460 return false;
11461
11462 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11463 {
11464 n = t;
11465 }
11466
11467 return n != NULL_TREE && n != void_type_node;
11468 }
11469
11470 /* Return true if TYPE has a prototype. */
11471
11472 bool
11473 prototype_p (tree fntype)
11474 {
11475 tree t;
11476
11477 gcc_assert (fntype != NULL_TREE);
11478
11479 t = TYPE_ARG_TYPES (fntype);
11480 return (t != NULL_TREE);
11481 }
11482
11483 /* If BLOCK is inlined from an __attribute__((__artificial__))
11484 routine, return pointer to location from where it has been
11485 called. */
11486 location_t *
11487 block_nonartificial_location (tree block)
11488 {
11489 location_t *ret = NULL;
11490
11491 while (block && TREE_CODE (block) == BLOCK
11492 && BLOCK_ABSTRACT_ORIGIN (block))
11493 {
11494 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11495
11496 while (TREE_CODE (ao) == BLOCK
11497 && BLOCK_ABSTRACT_ORIGIN (ao)
11498 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11499 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11500
11501 if (TREE_CODE (ao) == FUNCTION_DECL)
11502 {
11503 /* If AO is an artificial inline, point RET to the
11504 call site locus at which it has been inlined and continue
11505 the loop, in case AO's caller is also an artificial
11506 inline. */
11507 if (DECL_DECLARED_INLINE_P (ao)
11508 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11509 ret = &BLOCK_SOURCE_LOCATION (block);
11510 else
11511 break;
11512 }
11513 else if (TREE_CODE (ao) != BLOCK)
11514 break;
11515
11516 block = BLOCK_SUPERCONTEXT (block);
11517 }
11518 return ret;
11519 }
11520
11521
11522 /* If EXP is inlined from an __attribute__((__artificial__))
11523 function, return the location of the original call expression. */
11524
11525 location_t
11526 tree_nonartificial_location (tree exp)
11527 {
11528 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11529
11530 if (loc)
11531 return *loc;
11532 else
11533 return EXPR_LOCATION (exp);
11534 }
11535
11536
11537 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11538 nodes. */
11539
11540 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11541
11542 hashval_t
11543 cl_option_hasher::hash (tree x)
11544 {
11545 const_tree const t = x;
11546 const char *p;
11547 size_t i;
11548 size_t len = 0;
11549 hashval_t hash = 0;
11550
11551 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11552 {
11553 p = (const char *)TREE_OPTIMIZATION (t);
11554 len = sizeof (struct cl_optimization);
11555 }
11556
11557 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11558 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11559
11560 else
11561 gcc_unreachable ();
11562
11563 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11564 something else. */
11565 for (i = 0; i < len; i++)
11566 if (p[i])
11567 hash = (hash << 4) ^ ((i << 2) | p[i]);
11568
11569 return hash;
11570 }
11571
11572 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11573 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11574 same. */
11575
11576 bool
11577 cl_option_hasher::equal (tree x, tree y)
11578 {
11579 const_tree const xt = x;
11580 const_tree const yt = y;
11581 const char *xp;
11582 const char *yp;
11583 size_t len;
11584
11585 if (TREE_CODE (xt) != TREE_CODE (yt))
11586 return 0;
11587
11588 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11589 {
11590 xp = (const char *)TREE_OPTIMIZATION (xt);
11591 yp = (const char *)TREE_OPTIMIZATION (yt);
11592 len = sizeof (struct cl_optimization);
11593 }
11594
11595 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11596 {
11597 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11598 TREE_TARGET_OPTION (yt));
11599 }
11600
11601 else
11602 gcc_unreachable ();
11603
11604 return (memcmp (xp, yp, len) == 0);
11605 }
11606
11607 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11608
11609 tree
11610 build_optimization_node (struct gcc_options *opts)
11611 {
11612 tree t;
11613
11614 /* Use the cache of optimization nodes. */
11615
11616 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11617 opts);
11618
11619 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11620 t = *slot;
11621 if (!t)
11622 {
11623 /* Insert this one into the hash table. */
11624 t = cl_optimization_node;
11625 *slot = t;
11626
11627 /* Make a new node for next time round. */
11628 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11629 }
11630
11631 return t;
11632 }
11633
11634 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11635
11636 tree
11637 build_target_option_node (struct gcc_options *opts)
11638 {
11639 tree t;
11640
11641 /* Use the cache of optimization nodes. */
11642
11643 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11644 opts);
11645
11646 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11647 t = *slot;
11648 if (!t)
11649 {
11650 /* Insert this one into the hash table. */
11651 t = cl_target_option_node;
11652 *slot = t;
11653
11654 /* Make a new node for next time round. */
11655 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11656 }
11657
11658 return t;
11659 }
11660
11661 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11662 so that they aren't saved during PCH writing. */
11663
11664 void
11665 prepare_target_option_nodes_for_pch (void)
11666 {
11667 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11668 for (; iter != cl_option_hash_table->end (); ++iter)
11669 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11670 TREE_TARGET_GLOBALS (*iter) = NULL;
11671 }
11672
11673 /* Determine the "ultimate origin" of a block. The block may be an inlined
11674 instance of an inlined instance of a block which is local to an inline
11675 function, so we have to trace all of the way back through the origin chain
11676 to find out what sort of node actually served as the original seed for the
11677 given block. */
11678
11679 tree
11680 block_ultimate_origin (const_tree block)
11681 {
11682 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11683
11684 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11685 we're trying to output the abstract instance of this function. */
11686 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11687 return NULL_TREE;
11688
11689 if (immediate_origin == NULL_TREE)
11690 return NULL_TREE;
11691 else
11692 {
11693 tree ret_val;
11694 tree lookahead = immediate_origin;
11695
11696 do
11697 {
11698 ret_val = lookahead;
11699 lookahead = (TREE_CODE (ret_val) == BLOCK
11700 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11701 }
11702 while (lookahead != NULL && lookahead != ret_val);
11703
11704 /* The block's abstract origin chain may not be the *ultimate* origin of
11705 the block. It could lead to a DECL that has an abstract origin set.
11706 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11707 will give us if it has one). Note that DECL's abstract origins are
11708 supposed to be the most distant ancestor (or so decl_ultimate_origin
11709 claims), so we don't need to loop following the DECL origins. */
11710 if (DECL_P (ret_val))
11711 return DECL_ORIGIN (ret_val);
11712
11713 return ret_val;
11714 }
11715 }
11716
11717 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11718 no instruction. */
11719
11720 bool
11721 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11722 {
11723 /* Use precision rather then machine mode when we can, which gives
11724 the correct answer even for submode (bit-field) types. */
11725 if ((INTEGRAL_TYPE_P (outer_type)
11726 || POINTER_TYPE_P (outer_type)
11727 || TREE_CODE (outer_type) == OFFSET_TYPE)
11728 && (INTEGRAL_TYPE_P (inner_type)
11729 || POINTER_TYPE_P (inner_type)
11730 || TREE_CODE (inner_type) == OFFSET_TYPE))
11731 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11732
11733 /* Otherwise fall back on comparing machine modes (e.g. for
11734 aggregate types, floats). */
11735 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11736 }
11737
11738 /* Return true iff conversion in EXP generates no instruction. Mark
11739 it inline so that we fully inline into the stripping functions even
11740 though we have two uses of this function. */
11741
11742 static inline bool
11743 tree_nop_conversion (const_tree exp)
11744 {
11745 tree outer_type, inner_type;
11746
11747 if (!CONVERT_EXPR_P (exp)
11748 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11749 return false;
11750 if (TREE_OPERAND (exp, 0) == error_mark_node)
11751 return false;
11752
11753 outer_type = TREE_TYPE (exp);
11754 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11755
11756 if (!inner_type)
11757 return false;
11758
11759 return tree_nop_conversion_p (outer_type, inner_type);
11760 }
11761
11762 /* Return true iff conversion in EXP generates no instruction. Don't
11763 consider conversions changing the signedness. */
11764
11765 static bool
11766 tree_sign_nop_conversion (const_tree exp)
11767 {
11768 tree outer_type, inner_type;
11769
11770 if (!tree_nop_conversion (exp))
11771 return false;
11772
11773 outer_type = TREE_TYPE (exp);
11774 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11775
11776 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11777 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11778 }
11779
11780 /* Strip conversions from EXP according to tree_nop_conversion and
11781 return the resulting expression. */
11782
11783 tree
11784 tree_strip_nop_conversions (tree exp)
11785 {
11786 while (tree_nop_conversion (exp))
11787 exp = TREE_OPERAND (exp, 0);
11788 return exp;
11789 }
11790
11791 /* Strip conversions from EXP according to tree_sign_nop_conversion
11792 and return the resulting expression. */
11793
11794 tree
11795 tree_strip_sign_nop_conversions (tree exp)
11796 {
11797 while (tree_sign_nop_conversion (exp))
11798 exp = TREE_OPERAND (exp, 0);
11799 return exp;
11800 }
11801
11802 /* Avoid any floating point extensions from EXP. */
11803 tree
11804 strip_float_extensions (tree exp)
11805 {
11806 tree sub, expt, subt;
11807
11808 /* For floating point constant look up the narrowest type that can hold
11809 it properly and handle it like (type)(narrowest_type)constant.
11810 This way we can optimize for instance a=a*2.0 where "a" is float
11811 but 2.0 is double constant. */
11812 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11813 {
11814 REAL_VALUE_TYPE orig;
11815 tree type = NULL;
11816
11817 orig = TREE_REAL_CST (exp);
11818 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11819 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11820 type = float_type_node;
11821 else if (TYPE_PRECISION (TREE_TYPE (exp))
11822 > TYPE_PRECISION (double_type_node)
11823 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11824 type = double_type_node;
11825 if (type)
11826 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11827 }
11828
11829 if (!CONVERT_EXPR_P (exp))
11830 return exp;
11831
11832 sub = TREE_OPERAND (exp, 0);
11833 subt = TREE_TYPE (sub);
11834 expt = TREE_TYPE (exp);
11835
11836 if (!FLOAT_TYPE_P (subt))
11837 return exp;
11838
11839 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11840 return exp;
11841
11842 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11843 return exp;
11844
11845 return strip_float_extensions (sub);
11846 }
11847
11848 /* Strip out all handled components that produce invariant
11849 offsets. */
11850
11851 const_tree
11852 strip_invariant_refs (const_tree op)
11853 {
11854 while (handled_component_p (op))
11855 {
11856 switch (TREE_CODE (op))
11857 {
11858 case ARRAY_REF:
11859 case ARRAY_RANGE_REF:
11860 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11861 || TREE_OPERAND (op, 2) != NULL_TREE
11862 || TREE_OPERAND (op, 3) != NULL_TREE)
11863 return NULL;
11864 break;
11865
11866 case COMPONENT_REF:
11867 if (TREE_OPERAND (op, 2) != NULL_TREE)
11868 return NULL;
11869 break;
11870
11871 default:;
11872 }
11873 op = TREE_OPERAND (op, 0);
11874 }
11875
11876 return op;
11877 }
11878
11879 static GTY(()) tree gcc_eh_personality_decl;
11880
11881 /* Return the GCC personality function decl. */
11882
11883 tree
11884 lhd_gcc_personality (void)
11885 {
11886 if (!gcc_eh_personality_decl)
11887 gcc_eh_personality_decl = build_personality_function ("gcc");
11888 return gcc_eh_personality_decl;
11889 }
11890
11891 /* TARGET is a call target of GIMPLE call statement
11892 (obtained by gimple_call_fn). Return true if it is
11893 OBJ_TYPE_REF representing an virtual call of C++ method.
11894 (As opposed to OBJ_TYPE_REF representing objc calls
11895 through a cast where middle-end devirtualization machinery
11896 can't apply.) */
11897
11898 bool
11899 virtual_method_call_p (tree target)
11900 {
11901 if (TREE_CODE (target) != OBJ_TYPE_REF)
11902 return false;
11903 tree t = TREE_TYPE (target);
11904 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
11905 t = TREE_TYPE (t);
11906 if (TREE_CODE (t) == FUNCTION_TYPE)
11907 return false;
11908 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
11909 /* If we do not have BINFO associated, it means that type was built
11910 without devirtualization enabled. Do not consider this a virtual
11911 call. */
11912 if (!TYPE_BINFO (obj_type_ref_class (target)))
11913 return false;
11914 return true;
11915 }
11916
11917 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11918
11919 tree
11920 obj_type_ref_class (tree ref)
11921 {
11922 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11923 ref = TREE_TYPE (ref);
11924 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11925 ref = TREE_TYPE (ref);
11926 /* We look for type THIS points to. ObjC also builds
11927 OBJ_TYPE_REF with non-method calls, Their first parameter
11928 ID however also corresponds to class type. */
11929 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11930 || TREE_CODE (ref) == FUNCTION_TYPE);
11931 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11932 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11933 return TREE_TYPE (ref);
11934 }
11935
11936 /* Return true if T is in anonymous namespace. */
11937
11938 bool
11939 type_in_anonymous_namespace_p (const_tree t)
11940 {
11941 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11942 bulitin types; those have CONTEXT NULL. */
11943 if (!TYPE_CONTEXT (t))
11944 return false;
11945 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11946 }
11947
11948 /* Try to find a base info of BINFO that would have its field decl at offset
11949 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11950 found, return, otherwise return NULL_TREE. */
11951
11952 tree
11953 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11954 {
11955 tree type = BINFO_TYPE (binfo);
11956
11957 while (true)
11958 {
11959 HOST_WIDE_INT pos, size;
11960 tree fld;
11961 int i;
11962
11963 if (types_same_for_odr (type, expected_type))
11964 return binfo;
11965 if (offset < 0)
11966 return NULL_TREE;
11967
11968 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11969 {
11970 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
11971 continue;
11972
11973 pos = int_bit_position (fld);
11974 size = tree_to_uhwi (DECL_SIZE (fld));
11975 if (pos <= offset && (pos + size) > offset)
11976 break;
11977 }
11978 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11979 return NULL_TREE;
11980
11981 /* Offset 0 indicates the primary base, whose vtable contents are
11982 represented in the binfo for the derived class. */
11983 else if (offset != 0)
11984 {
11985 tree base_binfo, binfo2 = binfo;
11986
11987 /* Find BINFO corresponding to FLD. This is bit harder
11988 by a fact that in virtual inheritance we may need to walk down
11989 the non-virtual inheritance chain. */
11990 while (true)
11991 {
11992 tree containing_binfo = NULL, found_binfo = NULL;
11993 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11994 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11995 {
11996 found_binfo = base_binfo;
11997 break;
11998 }
11999 else
12000 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
12001 - tree_to_shwi (BINFO_OFFSET (binfo)))
12002 * BITS_PER_UNIT < pos
12003 /* Rule out types with no virtual methods or we can get confused
12004 here by zero sized bases. */
12005 && TYPE_BINFO (BINFO_TYPE (base_binfo))
12006 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
12007 && (!containing_binfo
12008 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
12009 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
12010 containing_binfo = base_binfo;
12011 if (found_binfo)
12012 {
12013 binfo = found_binfo;
12014 break;
12015 }
12016 if (!containing_binfo)
12017 return NULL_TREE;
12018 binfo2 = containing_binfo;
12019 }
12020 }
12021
12022 type = TREE_TYPE (fld);
12023 offset -= pos;
12024 }
12025 }
12026
12027 /* Returns true if X is a typedef decl. */
12028
12029 bool
12030 is_typedef_decl (tree x)
12031 {
12032 return (x && TREE_CODE (x) == TYPE_DECL
12033 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12034 }
12035
12036 /* Returns true iff TYPE is a type variant created for a typedef. */
12037
12038 bool
12039 typedef_variant_p (tree type)
12040 {
12041 return is_typedef_decl (TYPE_NAME (type));
12042 }
12043
12044 /* Warn about a use of an identifier which was marked deprecated. */
12045 void
12046 warn_deprecated_use (tree node, tree attr)
12047 {
12048 const char *msg;
12049
12050 if (node == 0 || !warn_deprecated_decl)
12051 return;
12052
12053 if (!attr)
12054 {
12055 if (DECL_P (node))
12056 attr = DECL_ATTRIBUTES (node);
12057 else if (TYPE_P (node))
12058 {
12059 tree decl = TYPE_STUB_DECL (node);
12060 if (decl)
12061 attr = lookup_attribute ("deprecated",
12062 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12063 }
12064 }
12065
12066 if (attr)
12067 attr = lookup_attribute ("deprecated", attr);
12068
12069 if (attr)
12070 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12071 else
12072 msg = NULL;
12073
12074 bool w;
12075 if (DECL_P (node))
12076 {
12077 if (msg)
12078 w = warning (OPT_Wdeprecated_declarations,
12079 "%qD is deprecated: %s", node, msg);
12080 else
12081 w = warning (OPT_Wdeprecated_declarations,
12082 "%qD is deprecated", node);
12083 if (w)
12084 inform (DECL_SOURCE_LOCATION (node), "declared here");
12085 }
12086 else if (TYPE_P (node))
12087 {
12088 tree what = NULL_TREE;
12089 tree decl = TYPE_STUB_DECL (node);
12090
12091 if (TYPE_NAME (node))
12092 {
12093 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12094 what = TYPE_NAME (node);
12095 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12096 && DECL_NAME (TYPE_NAME (node)))
12097 what = DECL_NAME (TYPE_NAME (node));
12098 }
12099
12100 if (decl)
12101 {
12102 if (what)
12103 {
12104 if (msg)
12105 w = warning (OPT_Wdeprecated_declarations,
12106 "%qE is deprecated: %s", what, msg);
12107 else
12108 w = warning (OPT_Wdeprecated_declarations,
12109 "%qE is deprecated", what);
12110 }
12111 else
12112 {
12113 if (msg)
12114 w = warning (OPT_Wdeprecated_declarations,
12115 "type is deprecated: %s", msg);
12116 else
12117 w = warning (OPT_Wdeprecated_declarations,
12118 "type is deprecated");
12119 }
12120 if (w)
12121 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12122 }
12123 else
12124 {
12125 if (what)
12126 {
12127 if (msg)
12128 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12129 what, msg);
12130 else
12131 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12132 }
12133 else
12134 {
12135 if (msg)
12136 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12137 msg);
12138 else
12139 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12140 }
12141 }
12142 }
12143 }
12144
12145 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12146 somewhere in it. */
12147
12148 bool
12149 contains_bitfld_component_ref_p (const_tree ref)
12150 {
12151 while (handled_component_p (ref))
12152 {
12153 if (TREE_CODE (ref) == COMPONENT_REF
12154 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12155 return true;
12156 ref = TREE_OPERAND (ref, 0);
12157 }
12158
12159 return false;
12160 }
12161
12162 /* Try to determine whether a TRY_CATCH expression can fall through.
12163 This is a subroutine of block_may_fallthru. */
12164
12165 static bool
12166 try_catch_may_fallthru (const_tree stmt)
12167 {
12168 tree_stmt_iterator i;
12169
12170 /* If the TRY block can fall through, the whole TRY_CATCH can
12171 fall through. */
12172 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12173 return true;
12174
12175 i = tsi_start (TREE_OPERAND (stmt, 1));
12176 switch (TREE_CODE (tsi_stmt (i)))
12177 {
12178 case CATCH_EXPR:
12179 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12180 catch expression and a body. The whole TRY_CATCH may fall
12181 through iff any of the catch bodies falls through. */
12182 for (; !tsi_end_p (i); tsi_next (&i))
12183 {
12184 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12185 return true;
12186 }
12187 return false;
12188
12189 case EH_FILTER_EXPR:
12190 /* The exception filter expression only matters if there is an
12191 exception. If the exception does not match EH_FILTER_TYPES,
12192 we will execute EH_FILTER_FAILURE, and we will fall through
12193 if that falls through. If the exception does match
12194 EH_FILTER_TYPES, the stack unwinder will continue up the
12195 stack, so we will not fall through. We don't know whether we
12196 will throw an exception which matches EH_FILTER_TYPES or not,
12197 so we just ignore EH_FILTER_TYPES and assume that we might
12198 throw an exception which doesn't match. */
12199 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12200
12201 default:
12202 /* This case represents statements to be executed when an
12203 exception occurs. Those statements are implicitly followed
12204 by a RESX statement to resume execution after the exception.
12205 So in this case the TRY_CATCH never falls through. */
12206 return false;
12207 }
12208 }
12209
12210 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12211 need not be 100% accurate; simply be conservative and return true if we
12212 don't know. This is used only to avoid stupidly generating extra code.
12213 If we're wrong, we'll just delete the extra code later. */
12214
12215 bool
12216 block_may_fallthru (const_tree block)
12217 {
12218 /* This CONST_CAST is okay because expr_last returns its argument
12219 unmodified and we assign it to a const_tree. */
12220 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12221
12222 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12223 {
12224 case GOTO_EXPR:
12225 case RETURN_EXPR:
12226 /* Easy cases. If the last statement of the block implies
12227 control transfer, then we can't fall through. */
12228 return false;
12229
12230 case SWITCH_EXPR:
12231 /* If SWITCH_LABELS is set, this is lowered, and represents a
12232 branch to a selected label and hence can not fall through.
12233 Otherwise SWITCH_BODY is set, and the switch can fall
12234 through. */
12235 return SWITCH_LABELS (stmt) == NULL_TREE;
12236
12237 case COND_EXPR:
12238 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12239 return true;
12240 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12241
12242 case BIND_EXPR:
12243 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12244
12245 case TRY_CATCH_EXPR:
12246 return try_catch_may_fallthru (stmt);
12247
12248 case TRY_FINALLY_EXPR:
12249 /* The finally clause is always executed after the try clause,
12250 so if it does not fall through, then the try-finally will not
12251 fall through. Otherwise, if the try clause does not fall
12252 through, then when the finally clause falls through it will
12253 resume execution wherever the try clause was going. So the
12254 whole try-finally will only fall through if both the try
12255 clause and the finally clause fall through. */
12256 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12257 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12258
12259 case MODIFY_EXPR:
12260 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12261 stmt = TREE_OPERAND (stmt, 1);
12262 else
12263 return true;
12264 /* FALLTHRU */
12265
12266 case CALL_EXPR:
12267 /* Functions that do not return do not fall through. */
12268 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12269
12270 case CLEANUP_POINT_EXPR:
12271 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12272
12273 case TARGET_EXPR:
12274 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12275
12276 case ERROR_MARK:
12277 return true;
12278
12279 default:
12280 return lang_hooks.block_may_fallthru (stmt);
12281 }
12282 }
12283
12284 /* True if we are using EH to handle cleanups. */
12285 static bool using_eh_for_cleanups_flag = false;
12286
12287 /* This routine is called from front ends to indicate eh should be used for
12288 cleanups. */
12289 void
12290 using_eh_for_cleanups (void)
12291 {
12292 using_eh_for_cleanups_flag = true;
12293 }
12294
12295 /* Query whether EH is used for cleanups. */
12296 bool
12297 using_eh_for_cleanups_p (void)
12298 {
12299 return using_eh_for_cleanups_flag;
12300 }
12301
12302 /* Wrapper for tree_code_name to ensure that tree code is valid */
12303 const char *
12304 get_tree_code_name (enum tree_code code)
12305 {
12306 const char *invalid = "<invalid tree code>";
12307
12308 if (code >= MAX_TREE_CODES)
12309 return invalid;
12310
12311 return tree_code_name[code];
12312 }
12313
12314 /* Drops the TREE_OVERFLOW flag from T. */
12315
12316 tree
12317 drop_tree_overflow (tree t)
12318 {
12319 gcc_checking_assert (TREE_OVERFLOW (t));
12320
12321 /* For tree codes with a sharing machinery re-build the result. */
12322 if (TREE_CODE (t) == INTEGER_CST)
12323 return wide_int_to_tree (TREE_TYPE (t), t);
12324
12325 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12326 and drop the flag. */
12327 t = copy_node (t);
12328 TREE_OVERFLOW (t) = 0;
12329 return t;
12330 }
12331
12332 /* Given a memory reference expression T, return its base address.
12333 The base address of a memory reference expression is the main
12334 object being referenced. For instance, the base address for
12335 'array[i].fld[j]' is 'array'. You can think of this as stripping
12336 away the offset part from a memory address.
12337
12338 This function calls handled_component_p to strip away all the inner
12339 parts of the memory reference until it reaches the base object. */
12340
12341 tree
12342 get_base_address (tree t)
12343 {
12344 while (handled_component_p (t))
12345 t = TREE_OPERAND (t, 0);
12346
12347 if ((TREE_CODE (t) == MEM_REF
12348 || TREE_CODE (t) == TARGET_MEM_REF)
12349 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12350 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12351
12352 /* ??? Either the alias oracle or all callers need to properly deal
12353 with WITH_SIZE_EXPRs before we can look through those. */
12354 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12355 return NULL_TREE;
12356
12357 return t;
12358 }
12359
12360 /* Return the machine mode of T. For vectors, returns the mode of the
12361 inner type. The main use case is to feed the result to HONOR_NANS,
12362 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12363
12364 machine_mode
12365 element_mode (const_tree t)
12366 {
12367 if (!TYPE_P (t))
12368 t = TREE_TYPE (t);
12369 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12370 t = TREE_TYPE (t);
12371 return TYPE_MODE (t);
12372 }
12373
12374 #include "gt-tree.h"