implement a replacement for if_marked
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "hashtab.h"
42 #include "hash-set.h"
43 #include "vec.h"
44 #include "machmode.h"
45 #include "hard-reg-set.h"
46 #include "input.h"
47 #include "function.h"
48 #include "obstack.h"
49 #include "toplev.h" /* get_random_seed */
50 #include "inchash.h"
51 #include "filenames.h"
52 #include "output.h"
53 #include "target.h"
54 #include "common/common-target.h"
55 #include "langhooks.h"
56 #include "tree-inline.h"
57 #include "tree-iterator.h"
58 #include "predict.h"
59 #include "dominance.h"
60 #include "cfg.h"
61 #include "basic-block.h"
62 #include "bitmap.h"
63 #include "tree-ssa-alias.h"
64 #include "internal-fn.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimple-iterator.h"
69 #include "gimplify.h"
70 #include "gimple-ssa.h"
71 #include "hash-map.h"
72 #include "plugin-api.h"
73 #include "ipa-ref.h"
74 #include "cgraph.h"
75 #include "tree-phinodes.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
78 #include "expr.h"
79 #include "tree-dfa.h"
80 #include "params.h"
81 #include "tree-pass.h"
82 #include "langhooks-def.h"
83 #include "diagnostic.h"
84 #include "tree-diagnostic.h"
85 #include "tree-pretty-print.h"
86 #include "except.h"
87 #include "debug.h"
88 #include "intl.h"
89 #include "wide-int.h"
90 #include "builtins.h"
91
92 /* Tree code classes. */
93
94 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
95 #define END_OF_BASE_TREE_CODES tcc_exceptional,
96
97 const enum tree_code_class tree_code_type[] = {
98 #include "all-tree.def"
99 };
100
101 #undef DEFTREECODE
102 #undef END_OF_BASE_TREE_CODES
103
104 /* Table indexed by tree code giving number of expression
105 operands beyond the fixed part of the node structure.
106 Not used for types or decls. */
107
108 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
109 #define END_OF_BASE_TREE_CODES 0,
110
111 const unsigned char tree_code_length[] = {
112 #include "all-tree.def"
113 };
114
115 #undef DEFTREECODE
116 #undef END_OF_BASE_TREE_CODES
117
118 /* Names of tree components.
119 Used for printing out the tree and error messages. */
120 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
121 #define END_OF_BASE_TREE_CODES "@dummy",
122
123 static const char *const tree_code_name[] = {
124 #include "all-tree.def"
125 };
126
127 #undef DEFTREECODE
128 #undef END_OF_BASE_TREE_CODES
129
130 /* Each tree code class has an associated string representation.
131 These must correspond to the tree_code_class entries. */
132
133 const char *const tree_code_class_strings[] =
134 {
135 "exceptional",
136 "constant",
137 "type",
138 "declaration",
139 "reference",
140 "comparison",
141 "unary",
142 "binary",
143 "statement",
144 "vl_exp",
145 "expression"
146 };
147
148 /* obstack.[ch] explicitly declined to prototype this. */
149 extern int _obstack_allocated_p (struct obstack *h, void *obj);
150
151 /* Statistics-gathering stuff. */
152
153 static int tree_code_counts[MAX_TREE_CODES];
154 int tree_node_counts[(int) all_kinds];
155 int tree_node_sizes[(int) all_kinds];
156
157 /* Keep in sync with tree.h:enum tree_node_kind. */
158 static const char * const tree_node_kind_names[] = {
159 "decls",
160 "types",
161 "blocks",
162 "stmts",
163 "refs",
164 "exprs",
165 "constants",
166 "identifiers",
167 "vecs",
168 "binfos",
169 "ssa names",
170 "constructors",
171 "random kinds",
172 "lang_decl kinds",
173 "lang_type kinds",
174 "omp clauses",
175 };
176
177 /* Unique id for next decl created. */
178 static GTY(()) int next_decl_uid;
179 /* Unique id for next type created. */
180 static GTY(()) int next_type_uid = 1;
181 /* Unique id for next debug decl created. Use negative numbers,
182 to catch erroneous uses. */
183 static GTY(()) int next_debug_decl_uid;
184
185 /* Since we cannot rehash a type after it is in the table, we have to
186 keep the hash code. */
187
188 struct GTY(()) type_hash {
189 unsigned long hash;
190 tree type;
191 };
192
193 /* Initial size of the hash table (rounded to next prime). */
194 #define TYPE_HASH_INITIAL_SIZE 1000
195
196 /* Now here is the hash table. When recording a type, it is added to
197 the slot whose index is the hash code. Note that the hash table is
198 used for several kinds of types (function types, array types and
199 array index range types, for now). While all these live in the
200 same table, they are completely independent, and the hash code is
201 computed differently for each of these. */
202
203 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
204 htab_t type_hash_table;
205
206 /* Hash table and temporary node for larger integer const values. */
207 static GTY (()) tree int_cst_node;
208
209 struct int_cst_hasher : ggc_cache_hasher<tree>
210 {
211 static hashval_t hash (tree t);
212 static bool equal (tree x, tree y);
213 };
214
215 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
216
217 /* Hash table for optimization flags and target option flags. Use the same
218 hash table for both sets of options. Nodes for building the current
219 optimization and target option nodes. The assumption is most of the time
220 the options created will already be in the hash table, so we avoid
221 allocating and freeing up a node repeatably. */
222 static GTY (()) tree cl_optimization_node;
223 static GTY (()) tree cl_target_option_node;
224
225 struct cl_option_hasher : ggc_cache_hasher<tree>
226 {
227 static hashval_t hash (tree t);
228 static bool equal (tree x, tree y);
229 };
230
231 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
232
233 /* General tree->tree mapping structure for use in hash tables. */
234
235
236 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
237 htab_t debug_expr_for_decl;
238
239 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
240 htab_t value_expr_for_decl;
241
242 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
243 htab_t debug_args_for_decl;
244
245 static void set_type_quals (tree, int);
246 static int type_hash_eq (const void *, const void *);
247 static hashval_t type_hash_hash (const void *);
248 static void print_type_hash_statistics (void);
249 static void print_debug_expr_statistics (void);
250 static void print_value_expr_statistics (void);
251 static int type_hash_marked_p (const void *);
252 static void type_hash_list (const_tree, inchash::hash &);
253 static void attribute_hash_list (const_tree, inchash::hash &);
254
255 tree global_trees[TI_MAX];
256 tree integer_types[itk_none];
257
258 bool int_n_enabled_p[NUM_INT_N_ENTS];
259 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
260
261 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
262
263 /* Number of operands for each OpenMP clause. */
264 unsigned const char omp_clause_num_ops[] =
265 {
266 0, /* OMP_CLAUSE_ERROR */
267 1, /* OMP_CLAUSE_PRIVATE */
268 1, /* OMP_CLAUSE_SHARED */
269 1, /* OMP_CLAUSE_FIRSTPRIVATE */
270 2, /* OMP_CLAUSE_LASTPRIVATE */
271 4, /* OMP_CLAUSE_REDUCTION */
272 1, /* OMP_CLAUSE_COPYIN */
273 1, /* OMP_CLAUSE_COPYPRIVATE */
274 3, /* OMP_CLAUSE_LINEAR */
275 2, /* OMP_CLAUSE_ALIGNED */
276 1, /* OMP_CLAUSE_DEPEND */
277 1, /* OMP_CLAUSE_UNIFORM */
278 2, /* OMP_CLAUSE_FROM */
279 2, /* OMP_CLAUSE_TO */
280 2, /* OMP_CLAUSE_MAP */
281 1, /* OMP_CLAUSE__LOOPTEMP_ */
282 1, /* OMP_CLAUSE_IF */
283 1, /* OMP_CLAUSE_NUM_THREADS */
284 1, /* OMP_CLAUSE_SCHEDULE */
285 0, /* OMP_CLAUSE_NOWAIT */
286 0, /* OMP_CLAUSE_ORDERED */
287 0, /* OMP_CLAUSE_DEFAULT */
288 3, /* OMP_CLAUSE_COLLAPSE */
289 0, /* OMP_CLAUSE_UNTIED */
290 1, /* OMP_CLAUSE_FINAL */
291 0, /* OMP_CLAUSE_MERGEABLE */
292 1, /* OMP_CLAUSE_DEVICE */
293 1, /* OMP_CLAUSE_DIST_SCHEDULE */
294 0, /* OMP_CLAUSE_INBRANCH */
295 0, /* OMP_CLAUSE_NOTINBRANCH */
296 1, /* OMP_CLAUSE_NUM_TEAMS */
297 1, /* OMP_CLAUSE_THREAD_LIMIT */
298 0, /* OMP_CLAUSE_PROC_BIND */
299 1, /* OMP_CLAUSE_SAFELEN */
300 1, /* OMP_CLAUSE_SIMDLEN */
301 0, /* OMP_CLAUSE_FOR */
302 0, /* OMP_CLAUSE_PARALLEL */
303 0, /* OMP_CLAUSE_SECTIONS */
304 0, /* OMP_CLAUSE_TASKGROUP */
305 1, /* OMP_CLAUSE__SIMDUID_ */
306 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
307 };
308
309 const char * const omp_clause_code_name[] =
310 {
311 "error_clause",
312 "private",
313 "shared",
314 "firstprivate",
315 "lastprivate",
316 "reduction",
317 "copyin",
318 "copyprivate",
319 "linear",
320 "aligned",
321 "depend",
322 "uniform",
323 "from",
324 "to",
325 "map",
326 "_looptemp_",
327 "if",
328 "num_threads",
329 "schedule",
330 "nowait",
331 "ordered",
332 "default",
333 "collapse",
334 "untied",
335 "final",
336 "mergeable",
337 "device",
338 "dist_schedule",
339 "inbranch",
340 "notinbranch",
341 "num_teams",
342 "thread_limit",
343 "proc_bind",
344 "safelen",
345 "simdlen",
346 "for",
347 "parallel",
348 "sections",
349 "taskgroup",
350 "_simduid_",
351 "_Cilk_for_count_"
352 };
353
354
355 /* Return the tree node structure used by tree code CODE. */
356
357 static inline enum tree_node_structure_enum
358 tree_node_structure_for_code (enum tree_code code)
359 {
360 switch (TREE_CODE_CLASS (code))
361 {
362 case tcc_declaration:
363 {
364 switch (code)
365 {
366 case FIELD_DECL:
367 return TS_FIELD_DECL;
368 case PARM_DECL:
369 return TS_PARM_DECL;
370 case VAR_DECL:
371 return TS_VAR_DECL;
372 case LABEL_DECL:
373 return TS_LABEL_DECL;
374 case RESULT_DECL:
375 return TS_RESULT_DECL;
376 case DEBUG_EXPR_DECL:
377 return TS_DECL_WRTL;
378 case CONST_DECL:
379 return TS_CONST_DECL;
380 case TYPE_DECL:
381 return TS_TYPE_DECL;
382 case FUNCTION_DECL:
383 return TS_FUNCTION_DECL;
384 case TRANSLATION_UNIT_DECL:
385 return TS_TRANSLATION_UNIT_DECL;
386 default:
387 return TS_DECL_NON_COMMON;
388 }
389 }
390 case tcc_type:
391 return TS_TYPE_NON_COMMON;
392 case tcc_reference:
393 case tcc_comparison:
394 case tcc_unary:
395 case tcc_binary:
396 case tcc_expression:
397 case tcc_statement:
398 case tcc_vl_exp:
399 return TS_EXP;
400 default: /* tcc_constant and tcc_exceptional */
401 break;
402 }
403 switch (code)
404 {
405 /* tcc_constant cases. */
406 case VOID_CST: return TS_TYPED;
407 case INTEGER_CST: return TS_INT_CST;
408 case REAL_CST: return TS_REAL_CST;
409 case FIXED_CST: return TS_FIXED_CST;
410 case COMPLEX_CST: return TS_COMPLEX;
411 case VECTOR_CST: return TS_VECTOR;
412 case STRING_CST: return TS_STRING;
413 /* tcc_exceptional cases. */
414 case ERROR_MARK: return TS_COMMON;
415 case IDENTIFIER_NODE: return TS_IDENTIFIER;
416 case TREE_LIST: return TS_LIST;
417 case TREE_VEC: return TS_VEC;
418 case SSA_NAME: return TS_SSA_NAME;
419 case PLACEHOLDER_EXPR: return TS_COMMON;
420 case STATEMENT_LIST: return TS_STATEMENT_LIST;
421 case BLOCK: return TS_BLOCK;
422 case CONSTRUCTOR: return TS_CONSTRUCTOR;
423 case TREE_BINFO: return TS_BINFO;
424 case OMP_CLAUSE: return TS_OMP_CLAUSE;
425 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
426 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
427
428 default:
429 gcc_unreachable ();
430 }
431 }
432
433
434 /* Initialize tree_contains_struct to describe the hierarchy of tree
435 nodes. */
436
437 static void
438 initialize_tree_contains_struct (void)
439 {
440 unsigned i;
441
442 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
443 {
444 enum tree_code code;
445 enum tree_node_structure_enum ts_code;
446
447 code = (enum tree_code) i;
448 ts_code = tree_node_structure_for_code (code);
449
450 /* Mark the TS structure itself. */
451 tree_contains_struct[code][ts_code] = 1;
452
453 /* Mark all the structures that TS is derived from. */
454 switch (ts_code)
455 {
456 case TS_TYPED:
457 case TS_BLOCK:
458 MARK_TS_BASE (code);
459 break;
460
461 case TS_COMMON:
462 case TS_INT_CST:
463 case TS_REAL_CST:
464 case TS_FIXED_CST:
465 case TS_VECTOR:
466 case TS_STRING:
467 case TS_COMPLEX:
468 case TS_SSA_NAME:
469 case TS_CONSTRUCTOR:
470 case TS_EXP:
471 case TS_STATEMENT_LIST:
472 MARK_TS_TYPED (code);
473 break;
474
475 case TS_IDENTIFIER:
476 case TS_DECL_MINIMAL:
477 case TS_TYPE_COMMON:
478 case TS_LIST:
479 case TS_VEC:
480 case TS_BINFO:
481 case TS_OMP_CLAUSE:
482 case TS_OPTIMIZATION:
483 case TS_TARGET_OPTION:
484 MARK_TS_COMMON (code);
485 break;
486
487 case TS_TYPE_WITH_LANG_SPECIFIC:
488 MARK_TS_TYPE_COMMON (code);
489 break;
490
491 case TS_TYPE_NON_COMMON:
492 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
493 break;
494
495 case TS_DECL_COMMON:
496 MARK_TS_DECL_MINIMAL (code);
497 break;
498
499 case TS_DECL_WRTL:
500 case TS_CONST_DECL:
501 MARK_TS_DECL_COMMON (code);
502 break;
503
504 case TS_DECL_NON_COMMON:
505 MARK_TS_DECL_WITH_VIS (code);
506 break;
507
508 case TS_DECL_WITH_VIS:
509 case TS_PARM_DECL:
510 case TS_LABEL_DECL:
511 case TS_RESULT_DECL:
512 MARK_TS_DECL_WRTL (code);
513 break;
514
515 case TS_FIELD_DECL:
516 MARK_TS_DECL_COMMON (code);
517 break;
518
519 case TS_VAR_DECL:
520 MARK_TS_DECL_WITH_VIS (code);
521 break;
522
523 case TS_TYPE_DECL:
524 case TS_FUNCTION_DECL:
525 MARK_TS_DECL_NON_COMMON (code);
526 break;
527
528 case TS_TRANSLATION_UNIT_DECL:
529 MARK_TS_DECL_COMMON (code);
530 break;
531
532 default:
533 gcc_unreachable ();
534 }
535 }
536
537 /* Basic consistency checks for attributes used in fold. */
538 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
539 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
540 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
541 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
542 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
543 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
544 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
545 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
546 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
547 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
548 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
549 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
550 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
551 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
552 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
553 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
554 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
555 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
556 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
557 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
558 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
559 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
560 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
561 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
562 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
563 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
564 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
565 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
566 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
567 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
568 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
569 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
570 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
571 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
572 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
573 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
574 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
575 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
576 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
577 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
578 }
579
580
581 /* Init tree.c. */
582
583 void
584 init_ttree (void)
585 {
586 /* Initialize the hash table of types. */
587 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
588 type_hash_eq, 0);
589
590 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
591 tree_decl_map_eq, 0);
592
593 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
594 tree_decl_map_eq, 0);
595
596 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
597
598 int_cst_node = make_int_cst (1, 1);
599
600 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
601
602 cl_optimization_node = make_node (OPTIMIZATION_NODE);
603 cl_target_option_node = make_node (TARGET_OPTION_NODE);
604
605 /* Initialize the tree_contains_struct array. */
606 initialize_tree_contains_struct ();
607 lang_hooks.init_ts ();
608 }
609
610 \f
611 /* The name of the object as the assembler will see it (but before any
612 translations made by ASM_OUTPUT_LABELREF). Often this is the same
613 as DECL_NAME. It is an IDENTIFIER_NODE. */
614 tree
615 decl_assembler_name (tree decl)
616 {
617 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
618 lang_hooks.set_decl_assembler_name (decl);
619 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
620 }
621
622 /* When the target supports COMDAT groups, this indicates which group the
623 DECL is associated with. This can be either an IDENTIFIER_NODE or a
624 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
625 tree
626 decl_comdat_group (const_tree node)
627 {
628 struct symtab_node *snode = symtab_node::get (node);
629 if (!snode)
630 return NULL;
631 return snode->get_comdat_group ();
632 }
633
634 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
635 tree
636 decl_comdat_group_id (const_tree node)
637 {
638 struct symtab_node *snode = symtab_node::get (node);
639 if (!snode)
640 return NULL;
641 return snode->get_comdat_group_id ();
642 }
643
644 /* When the target supports named section, return its name as IDENTIFIER_NODE
645 or NULL if it is in no section. */
646 const char *
647 decl_section_name (const_tree node)
648 {
649 struct symtab_node *snode = symtab_node::get (node);
650 if (!snode)
651 return NULL;
652 return snode->get_section ();
653 }
654
655 /* Set section section name of NODE to VALUE (that is expected to
656 be identifier node) */
657 void
658 set_decl_section_name (tree node, const char *value)
659 {
660 struct symtab_node *snode;
661
662 if (value == NULL)
663 {
664 snode = symtab_node::get (node);
665 if (!snode)
666 return;
667 }
668 else if (TREE_CODE (node) == VAR_DECL)
669 snode = varpool_node::get_create (node);
670 else
671 snode = cgraph_node::get_create (node);
672 snode->set_section (value);
673 }
674
675 /* Return TLS model of a variable NODE. */
676 enum tls_model
677 decl_tls_model (const_tree node)
678 {
679 struct varpool_node *snode = varpool_node::get (node);
680 if (!snode)
681 return TLS_MODEL_NONE;
682 return snode->tls_model;
683 }
684
685 /* Set TLS model of variable NODE to MODEL. */
686 void
687 set_decl_tls_model (tree node, enum tls_model model)
688 {
689 struct varpool_node *vnode;
690
691 if (model == TLS_MODEL_NONE)
692 {
693 vnode = varpool_node::get (node);
694 if (!vnode)
695 return;
696 }
697 else
698 vnode = varpool_node::get_create (node);
699 vnode->tls_model = model;
700 }
701
702 /* Compute the number of bytes occupied by a tree with code CODE.
703 This function cannot be used for nodes that have variable sizes,
704 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
705 size_t
706 tree_code_size (enum tree_code code)
707 {
708 switch (TREE_CODE_CLASS (code))
709 {
710 case tcc_declaration: /* A decl node */
711 {
712 switch (code)
713 {
714 case FIELD_DECL:
715 return sizeof (struct tree_field_decl);
716 case PARM_DECL:
717 return sizeof (struct tree_parm_decl);
718 case VAR_DECL:
719 return sizeof (struct tree_var_decl);
720 case LABEL_DECL:
721 return sizeof (struct tree_label_decl);
722 case RESULT_DECL:
723 return sizeof (struct tree_result_decl);
724 case CONST_DECL:
725 return sizeof (struct tree_const_decl);
726 case TYPE_DECL:
727 return sizeof (struct tree_type_decl);
728 case FUNCTION_DECL:
729 return sizeof (struct tree_function_decl);
730 case DEBUG_EXPR_DECL:
731 return sizeof (struct tree_decl_with_rtl);
732 case TRANSLATION_UNIT_DECL:
733 return sizeof (struct tree_translation_unit_decl);
734 case NAMESPACE_DECL:
735 case IMPORTED_DECL:
736 case NAMELIST_DECL:
737 return sizeof (struct tree_decl_non_common);
738 default:
739 return lang_hooks.tree_size (code);
740 }
741 }
742
743 case tcc_type: /* a type node */
744 return sizeof (struct tree_type_non_common);
745
746 case tcc_reference: /* a reference */
747 case tcc_expression: /* an expression */
748 case tcc_statement: /* an expression with side effects */
749 case tcc_comparison: /* a comparison expression */
750 case tcc_unary: /* a unary arithmetic expression */
751 case tcc_binary: /* a binary arithmetic expression */
752 return (sizeof (struct tree_exp)
753 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
754
755 case tcc_constant: /* a constant */
756 switch (code)
757 {
758 case VOID_CST: return sizeof (struct tree_typed);
759 case INTEGER_CST: gcc_unreachable ();
760 case REAL_CST: return sizeof (struct tree_real_cst);
761 case FIXED_CST: return sizeof (struct tree_fixed_cst);
762 case COMPLEX_CST: return sizeof (struct tree_complex);
763 case VECTOR_CST: return sizeof (struct tree_vector);
764 case STRING_CST: gcc_unreachable ();
765 default:
766 return lang_hooks.tree_size (code);
767 }
768
769 case tcc_exceptional: /* something random, like an identifier. */
770 switch (code)
771 {
772 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
773 case TREE_LIST: return sizeof (struct tree_list);
774
775 case ERROR_MARK:
776 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
777
778 case TREE_VEC:
779 case OMP_CLAUSE: gcc_unreachable ();
780
781 case SSA_NAME: return sizeof (struct tree_ssa_name);
782
783 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
784 case BLOCK: return sizeof (struct tree_block);
785 case CONSTRUCTOR: return sizeof (struct tree_constructor);
786 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
787 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
788
789 default:
790 return lang_hooks.tree_size (code);
791 }
792
793 default:
794 gcc_unreachable ();
795 }
796 }
797
798 /* Compute the number of bytes occupied by NODE. This routine only
799 looks at TREE_CODE, except for those nodes that have variable sizes. */
800 size_t
801 tree_size (const_tree node)
802 {
803 const enum tree_code code = TREE_CODE (node);
804 switch (code)
805 {
806 case INTEGER_CST:
807 return (sizeof (struct tree_int_cst)
808 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
809
810 case TREE_BINFO:
811 return (offsetof (struct tree_binfo, base_binfos)
812 + vec<tree, va_gc>
813 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
814
815 case TREE_VEC:
816 return (sizeof (struct tree_vec)
817 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
818
819 case VECTOR_CST:
820 return (sizeof (struct tree_vector)
821 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
822
823 case STRING_CST:
824 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
825
826 case OMP_CLAUSE:
827 return (sizeof (struct tree_omp_clause)
828 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
829 * sizeof (tree));
830
831 default:
832 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
833 return (sizeof (struct tree_exp)
834 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
835 else
836 return tree_code_size (code);
837 }
838 }
839
840 /* Record interesting allocation statistics for a tree node with CODE
841 and LENGTH. */
842
843 static void
844 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
845 size_t length ATTRIBUTE_UNUSED)
846 {
847 enum tree_code_class type = TREE_CODE_CLASS (code);
848 tree_node_kind kind;
849
850 if (!GATHER_STATISTICS)
851 return;
852
853 switch (type)
854 {
855 case tcc_declaration: /* A decl node */
856 kind = d_kind;
857 break;
858
859 case tcc_type: /* a type node */
860 kind = t_kind;
861 break;
862
863 case tcc_statement: /* an expression with side effects */
864 kind = s_kind;
865 break;
866
867 case tcc_reference: /* a reference */
868 kind = r_kind;
869 break;
870
871 case tcc_expression: /* an expression */
872 case tcc_comparison: /* a comparison expression */
873 case tcc_unary: /* a unary arithmetic expression */
874 case tcc_binary: /* a binary arithmetic expression */
875 kind = e_kind;
876 break;
877
878 case tcc_constant: /* a constant */
879 kind = c_kind;
880 break;
881
882 case tcc_exceptional: /* something random, like an identifier. */
883 switch (code)
884 {
885 case IDENTIFIER_NODE:
886 kind = id_kind;
887 break;
888
889 case TREE_VEC:
890 kind = vec_kind;
891 break;
892
893 case TREE_BINFO:
894 kind = binfo_kind;
895 break;
896
897 case SSA_NAME:
898 kind = ssa_name_kind;
899 break;
900
901 case BLOCK:
902 kind = b_kind;
903 break;
904
905 case CONSTRUCTOR:
906 kind = constr_kind;
907 break;
908
909 case OMP_CLAUSE:
910 kind = omp_clause_kind;
911 break;
912
913 default:
914 kind = x_kind;
915 break;
916 }
917 break;
918
919 case tcc_vl_exp:
920 kind = e_kind;
921 break;
922
923 default:
924 gcc_unreachable ();
925 }
926
927 tree_code_counts[(int) code]++;
928 tree_node_counts[(int) kind]++;
929 tree_node_sizes[(int) kind] += length;
930 }
931
932 /* Allocate and return a new UID from the DECL_UID namespace. */
933
934 int
935 allocate_decl_uid (void)
936 {
937 return next_decl_uid++;
938 }
939
940 /* Return a newly allocated node of code CODE. For decl and type
941 nodes, some other fields are initialized. The rest of the node is
942 initialized to zero. This function cannot be used for TREE_VEC,
943 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
944 tree_code_size.
945
946 Achoo! I got a code in the node. */
947
948 tree
949 make_node_stat (enum tree_code code MEM_STAT_DECL)
950 {
951 tree t;
952 enum tree_code_class type = TREE_CODE_CLASS (code);
953 size_t length = tree_code_size (code);
954
955 record_node_allocation_statistics (code, length);
956
957 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
958 TREE_SET_CODE (t, code);
959
960 switch (type)
961 {
962 case tcc_statement:
963 TREE_SIDE_EFFECTS (t) = 1;
964 break;
965
966 case tcc_declaration:
967 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
968 {
969 if (code == FUNCTION_DECL)
970 {
971 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
972 DECL_MODE (t) = FUNCTION_MODE;
973 }
974 else
975 DECL_ALIGN (t) = 1;
976 }
977 DECL_SOURCE_LOCATION (t) = input_location;
978 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
979 DECL_UID (t) = --next_debug_decl_uid;
980 else
981 {
982 DECL_UID (t) = allocate_decl_uid ();
983 SET_DECL_PT_UID (t, -1);
984 }
985 if (TREE_CODE (t) == LABEL_DECL)
986 LABEL_DECL_UID (t) = -1;
987
988 break;
989
990 case tcc_type:
991 TYPE_UID (t) = next_type_uid++;
992 TYPE_ALIGN (t) = BITS_PER_UNIT;
993 TYPE_USER_ALIGN (t) = 0;
994 TYPE_MAIN_VARIANT (t) = t;
995 TYPE_CANONICAL (t) = t;
996
997 /* Default to no attributes for type, but let target change that. */
998 TYPE_ATTRIBUTES (t) = NULL_TREE;
999 targetm.set_default_type_attributes (t);
1000
1001 /* We have not yet computed the alias set for this type. */
1002 TYPE_ALIAS_SET (t) = -1;
1003 break;
1004
1005 case tcc_constant:
1006 TREE_CONSTANT (t) = 1;
1007 break;
1008
1009 case tcc_expression:
1010 switch (code)
1011 {
1012 case INIT_EXPR:
1013 case MODIFY_EXPR:
1014 case VA_ARG_EXPR:
1015 case PREDECREMENT_EXPR:
1016 case PREINCREMENT_EXPR:
1017 case POSTDECREMENT_EXPR:
1018 case POSTINCREMENT_EXPR:
1019 /* All of these have side-effects, no matter what their
1020 operands are. */
1021 TREE_SIDE_EFFECTS (t) = 1;
1022 break;
1023
1024 default:
1025 break;
1026 }
1027 break;
1028
1029 default:
1030 /* Other classes need no special treatment. */
1031 break;
1032 }
1033
1034 return t;
1035 }
1036 \f
1037 /* Return a new node with the same contents as NODE except that its
1038 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1039
1040 tree
1041 copy_node_stat (tree node MEM_STAT_DECL)
1042 {
1043 tree t;
1044 enum tree_code code = TREE_CODE (node);
1045 size_t length;
1046
1047 gcc_assert (code != STATEMENT_LIST);
1048
1049 length = tree_size (node);
1050 record_node_allocation_statistics (code, length);
1051 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1052 memcpy (t, node, length);
1053
1054 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1055 TREE_CHAIN (t) = 0;
1056 TREE_ASM_WRITTEN (t) = 0;
1057 TREE_VISITED (t) = 0;
1058
1059 if (TREE_CODE_CLASS (code) == tcc_declaration)
1060 {
1061 if (code == DEBUG_EXPR_DECL)
1062 DECL_UID (t) = --next_debug_decl_uid;
1063 else
1064 {
1065 DECL_UID (t) = allocate_decl_uid ();
1066 if (DECL_PT_UID_SET_P (node))
1067 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1068 }
1069 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1070 && DECL_HAS_VALUE_EXPR_P (node))
1071 {
1072 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1073 DECL_HAS_VALUE_EXPR_P (t) = 1;
1074 }
1075 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1076 if (TREE_CODE (node) == VAR_DECL)
1077 {
1078 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1079 t->decl_with_vis.symtab_node = NULL;
1080 }
1081 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1082 {
1083 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1084 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1085 }
1086 if (TREE_CODE (node) == FUNCTION_DECL)
1087 {
1088 DECL_STRUCT_FUNCTION (t) = NULL;
1089 t->decl_with_vis.symtab_node = NULL;
1090 }
1091 }
1092 else if (TREE_CODE_CLASS (code) == tcc_type)
1093 {
1094 TYPE_UID (t) = next_type_uid++;
1095 /* The following is so that the debug code for
1096 the copy is different from the original type.
1097 The two statements usually duplicate each other
1098 (because they clear fields of the same union),
1099 but the optimizer should catch that. */
1100 TYPE_SYMTAB_POINTER (t) = 0;
1101 TYPE_SYMTAB_ADDRESS (t) = 0;
1102
1103 /* Do not copy the values cache. */
1104 if (TYPE_CACHED_VALUES_P (t))
1105 {
1106 TYPE_CACHED_VALUES_P (t) = 0;
1107 TYPE_CACHED_VALUES (t) = NULL_TREE;
1108 }
1109 }
1110
1111 return t;
1112 }
1113
1114 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1115 For example, this can copy a list made of TREE_LIST nodes. */
1116
1117 tree
1118 copy_list (tree list)
1119 {
1120 tree head;
1121 tree prev, next;
1122
1123 if (list == 0)
1124 return 0;
1125
1126 head = prev = copy_node (list);
1127 next = TREE_CHAIN (list);
1128 while (next)
1129 {
1130 TREE_CHAIN (prev) = copy_node (next);
1131 prev = TREE_CHAIN (prev);
1132 next = TREE_CHAIN (next);
1133 }
1134 return head;
1135 }
1136
1137 \f
1138 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1139 INTEGER_CST with value CST and type TYPE. */
1140
1141 static unsigned int
1142 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1143 {
1144 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1145 /* We need an extra zero HWI if CST is an unsigned integer with its
1146 upper bit set, and if CST occupies a whole number of HWIs. */
1147 if (TYPE_UNSIGNED (type)
1148 && wi::neg_p (cst)
1149 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1150 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1151 return cst.get_len ();
1152 }
1153
1154 /* Return a new INTEGER_CST with value CST and type TYPE. */
1155
1156 static tree
1157 build_new_int_cst (tree type, const wide_int &cst)
1158 {
1159 unsigned int len = cst.get_len ();
1160 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1161 tree nt = make_int_cst (len, ext_len);
1162
1163 if (len < ext_len)
1164 {
1165 --ext_len;
1166 TREE_INT_CST_ELT (nt, ext_len) = 0;
1167 for (unsigned int i = len; i < ext_len; ++i)
1168 TREE_INT_CST_ELT (nt, i) = -1;
1169 }
1170 else if (TYPE_UNSIGNED (type)
1171 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1172 {
1173 len--;
1174 TREE_INT_CST_ELT (nt, len)
1175 = zext_hwi (cst.elt (len),
1176 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1177 }
1178
1179 for (unsigned int i = 0; i < len; i++)
1180 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1181 TREE_TYPE (nt) = type;
1182 return nt;
1183 }
1184
1185 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1186
1187 tree
1188 build_int_cst (tree type, HOST_WIDE_INT low)
1189 {
1190 /* Support legacy code. */
1191 if (!type)
1192 type = integer_type_node;
1193
1194 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1195 }
1196
1197 tree
1198 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1199 {
1200 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1201 }
1202
1203 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1204
1205 tree
1206 build_int_cst_type (tree type, HOST_WIDE_INT low)
1207 {
1208 gcc_assert (type);
1209 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1210 }
1211
1212 /* Constructs tree in type TYPE from with value given by CST. Signedness
1213 of CST is assumed to be the same as the signedness of TYPE. */
1214
1215 tree
1216 double_int_to_tree (tree type, double_int cst)
1217 {
1218 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1219 }
1220
1221 /* We force the wide_int CST to the range of the type TYPE by sign or
1222 zero extending it. OVERFLOWABLE indicates if we are interested in
1223 overflow of the value, when >0 we are only interested in signed
1224 overflow, for <0 we are interested in any overflow. OVERFLOWED
1225 indicates whether overflow has already occurred. CONST_OVERFLOWED
1226 indicates whether constant overflow has already occurred. We force
1227 T's value to be within range of T's type (by setting to 0 or 1 all
1228 the bits outside the type's range). We set TREE_OVERFLOWED if,
1229 OVERFLOWED is nonzero,
1230 or OVERFLOWABLE is >0 and signed overflow occurs
1231 or OVERFLOWABLE is <0 and any overflow occurs
1232 We return a new tree node for the extended wide_int. The node
1233 is shared if no overflow flags are set. */
1234
1235
1236 tree
1237 force_fit_type (tree type, const wide_int_ref &cst,
1238 int overflowable, bool overflowed)
1239 {
1240 signop sign = TYPE_SIGN (type);
1241
1242 /* If we need to set overflow flags, return a new unshared node. */
1243 if (overflowed || !wi::fits_to_tree_p (cst, type))
1244 {
1245 if (overflowed
1246 || overflowable < 0
1247 || (overflowable > 0 && sign == SIGNED))
1248 {
1249 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1250 tree t = build_new_int_cst (type, tmp);
1251 TREE_OVERFLOW (t) = 1;
1252 return t;
1253 }
1254 }
1255
1256 /* Else build a shared node. */
1257 return wide_int_to_tree (type, cst);
1258 }
1259
1260 /* These are the hash table functions for the hash table of INTEGER_CST
1261 nodes of a sizetype. */
1262
1263 /* Return the hash code code X, an INTEGER_CST. */
1264
1265 hashval_t
1266 int_cst_hasher::hash (tree x)
1267 {
1268 const_tree const t = x;
1269 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1270 int i;
1271
1272 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1273 code ^= TREE_INT_CST_ELT (t, i);
1274
1275 return code;
1276 }
1277
1278 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1279 is the same as that given by *Y, which is the same. */
1280
1281 bool
1282 int_cst_hasher::equal (tree x, tree y)
1283 {
1284 const_tree const xt = x;
1285 const_tree const yt = y;
1286
1287 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1288 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1289 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1290 return false;
1291
1292 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1293 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1294 return false;
1295
1296 return true;
1297 }
1298
1299 /* Create an INT_CST node of TYPE and value CST.
1300 The returned node is always shared. For small integers we use a
1301 per-type vector cache, for larger ones we use a single hash table.
1302 The value is extended from its precision according to the sign of
1303 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1304 the upper bits and ensures that hashing and value equality based
1305 upon the underlying HOST_WIDE_INTs works without masking. */
1306
1307 tree
1308 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1309 {
1310 tree t;
1311 int ix = -1;
1312 int limit = 0;
1313
1314 gcc_assert (type);
1315 unsigned int prec = TYPE_PRECISION (type);
1316 signop sgn = TYPE_SIGN (type);
1317
1318 /* Verify that everything is canonical. */
1319 int l = pcst.get_len ();
1320 if (l > 1)
1321 {
1322 if (pcst.elt (l - 1) == 0)
1323 gcc_checking_assert (pcst.elt (l - 2) < 0);
1324 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1325 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1326 }
1327
1328 wide_int cst = wide_int::from (pcst, prec, sgn);
1329 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1330
1331 if (ext_len == 1)
1332 {
1333 /* We just need to store a single HOST_WIDE_INT. */
1334 HOST_WIDE_INT hwi;
1335 if (TYPE_UNSIGNED (type))
1336 hwi = cst.to_uhwi ();
1337 else
1338 hwi = cst.to_shwi ();
1339
1340 switch (TREE_CODE (type))
1341 {
1342 case NULLPTR_TYPE:
1343 gcc_assert (hwi == 0);
1344 /* Fallthru. */
1345
1346 case POINTER_TYPE:
1347 case REFERENCE_TYPE:
1348 case POINTER_BOUNDS_TYPE:
1349 /* Cache NULL pointer and zero bounds. */
1350 if (hwi == 0)
1351 {
1352 limit = 1;
1353 ix = 0;
1354 }
1355 break;
1356
1357 case BOOLEAN_TYPE:
1358 /* Cache false or true. */
1359 limit = 2;
1360 if (hwi < 2)
1361 ix = hwi;
1362 break;
1363
1364 case INTEGER_TYPE:
1365 case OFFSET_TYPE:
1366 if (TYPE_SIGN (type) == UNSIGNED)
1367 {
1368 /* Cache [0, N). */
1369 limit = INTEGER_SHARE_LIMIT;
1370 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1371 ix = hwi;
1372 }
1373 else
1374 {
1375 /* Cache [-1, N). */
1376 limit = INTEGER_SHARE_LIMIT + 1;
1377 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1378 ix = hwi + 1;
1379 }
1380 break;
1381
1382 case ENUMERAL_TYPE:
1383 break;
1384
1385 default:
1386 gcc_unreachable ();
1387 }
1388
1389 if (ix >= 0)
1390 {
1391 /* Look for it in the type's vector of small shared ints. */
1392 if (!TYPE_CACHED_VALUES_P (type))
1393 {
1394 TYPE_CACHED_VALUES_P (type) = 1;
1395 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1396 }
1397
1398 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1399 if (t)
1400 /* Make sure no one is clobbering the shared constant. */
1401 gcc_checking_assert (TREE_TYPE (t) == type
1402 && TREE_INT_CST_NUNITS (t) == 1
1403 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1404 && TREE_INT_CST_EXT_NUNITS (t) == 1
1405 && TREE_INT_CST_ELT (t, 0) == hwi);
1406 else
1407 {
1408 /* Create a new shared int. */
1409 t = build_new_int_cst (type, cst);
1410 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1411 }
1412 }
1413 else
1414 {
1415 /* Use the cache of larger shared ints, using int_cst_node as
1416 a temporary. */
1417
1418 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1419 TREE_TYPE (int_cst_node) = type;
1420
1421 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1422 t = *slot;
1423 if (!t)
1424 {
1425 /* Insert this one into the hash table. */
1426 t = int_cst_node;
1427 *slot = t;
1428 /* Make a new node for next time round. */
1429 int_cst_node = make_int_cst (1, 1);
1430 }
1431 }
1432 }
1433 else
1434 {
1435 /* The value either hashes properly or we drop it on the floor
1436 for the gc to take care of. There will not be enough of them
1437 to worry about. */
1438
1439 tree nt = build_new_int_cst (type, cst);
1440 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1441 t = *slot;
1442 if (!t)
1443 {
1444 /* Insert this one into the hash table. */
1445 t = nt;
1446 *slot = t;
1447 }
1448 }
1449
1450 return t;
1451 }
1452
1453 void
1454 cache_integer_cst (tree t)
1455 {
1456 tree type = TREE_TYPE (t);
1457 int ix = -1;
1458 int limit = 0;
1459 int prec = TYPE_PRECISION (type);
1460
1461 gcc_assert (!TREE_OVERFLOW (t));
1462
1463 switch (TREE_CODE (type))
1464 {
1465 case NULLPTR_TYPE:
1466 gcc_assert (integer_zerop (t));
1467 /* Fallthru. */
1468
1469 case POINTER_TYPE:
1470 case REFERENCE_TYPE:
1471 /* Cache NULL pointer. */
1472 if (integer_zerop (t))
1473 {
1474 limit = 1;
1475 ix = 0;
1476 }
1477 break;
1478
1479 case BOOLEAN_TYPE:
1480 /* Cache false or true. */
1481 limit = 2;
1482 if (wi::ltu_p (t, 2))
1483 ix = TREE_INT_CST_ELT (t, 0);
1484 break;
1485
1486 case INTEGER_TYPE:
1487 case OFFSET_TYPE:
1488 if (TYPE_UNSIGNED (type))
1489 {
1490 /* Cache 0..N */
1491 limit = INTEGER_SHARE_LIMIT;
1492
1493 /* This is a little hokie, but if the prec is smaller than
1494 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1495 obvious test will not get the correct answer. */
1496 if (prec < HOST_BITS_PER_WIDE_INT)
1497 {
1498 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1499 ix = tree_to_uhwi (t);
1500 }
1501 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1502 ix = tree_to_uhwi (t);
1503 }
1504 else
1505 {
1506 /* Cache -1..N */
1507 limit = INTEGER_SHARE_LIMIT + 1;
1508
1509 if (integer_minus_onep (t))
1510 ix = 0;
1511 else if (!wi::neg_p (t))
1512 {
1513 if (prec < HOST_BITS_PER_WIDE_INT)
1514 {
1515 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1516 ix = tree_to_shwi (t) + 1;
1517 }
1518 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1519 ix = tree_to_shwi (t) + 1;
1520 }
1521 }
1522 break;
1523
1524 case ENUMERAL_TYPE:
1525 break;
1526
1527 default:
1528 gcc_unreachable ();
1529 }
1530
1531 if (ix >= 0)
1532 {
1533 /* Look for it in the type's vector of small shared ints. */
1534 if (!TYPE_CACHED_VALUES_P (type))
1535 {
1536 TYPE_CACHED_VALUES_P (type) = 1;
1537 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1538 }
1539
1540 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1541 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1542 }
1543 else
1544 {
1545 /* Use the cache of larger shared ints. */
1546 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1547 /* If there is already an entry for the number verify it's the
1548 same. */
1549 if (*slot)
1550 gcc_assert (wi::eq_p (tree (*slot), t));
1551 else
1552 /* Otherwise insert this one into the hash table. */
1553 *slot = t;
1554 }
1555 }
1556
1557
1558 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1559 and the rest are zeros. */
1560
1561 tree
1562 build_low_bits_mask (tree type, unsigned bits)
1563 {
1564 gcc_assert (bits <= TYPE_PRECISION (type));
1565
1566 return wide_int_to_tree (type, wi::mask (bits, false,
1567 TYPE_PRECISION (type)));
1568 }
1569
1570 /* Checks that X is integer constant that can be expressed in (unsigned)
1571 HOST_WIDE_INT without loss of precision. */
1572
1573 bool
1574 cst_and_fits_in_hwi (const_tree x)
1575 {
1576 if (TREE_CODE (x) != INTEGER_CST)
1577 return false;
1578
1579 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1580 return false;
1581
1582 return TREE_INT_CST_NUNITS (x) == 1;
1583 }
1584
1585 /* Build a newly constructed TREE_VEC node of length LEN. */
1586
1587 tree
1588 make_vector_stat (unsigned len MEM_STAT_DECL)
1589 {
1590 tree t;
1591 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1592
1593 record_node_allocation_statistics (VECTOR_CST, length);
1594
1595 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1596
1597 TREE_SET_CODE (t, VECTOR_CST);
1598 TREE_CONSTANT (t) = 1;
1599
1600 return t;
1601 }
1602
1603 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1604 are in a list pointed to by VALS. */
1605
1606 tree
1607 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1608 {
1609 int over = 0;
1610 unsigned cnt = 0;
1611 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1612 TREE_TYPE (v) = type;
1613
1614 /* Iterate through elements and check for overflow. */
1615 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1616 {
1617 tree value = vals[cnt];
1618
1619 VECTOR_CST_ELT (v, cnt) = value;
1620
1621 /* Don't crash if we get an address constant. */
1622 if (!CONSTANT_CLASS_P (value))
1623 continue;
1624
1625 over |= TREE_OVERFLOW (value);
1626 }
1627
1628 TREE_OVERFLOW (v) = over;
1629 return v;
1630 }
1631
1632 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1633 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1634
1635 tree
1636 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1637 {
1638 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1639 unsigned HOST_WIDE_INT idx;
1640 tree value;
1641
1642 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1643 vec[idx] = value;
1644 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1645 vec[idx] = build_zero_cst (TREE_TYPE (type));
1646
1647 return build_vector (type, vec);
1648 }
1649
1650 /* Build a vector of type VECTYPE where all the elements are SCs. */
1651 tree
1652 build_vector_from_val (tree vectype, tree sc)
1653 {
1654 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1655
1656 if (sc == error_mark_node)
1657 return sc;
1658
1659 /* Verify that the vector type is suitable for SC. Note that there
1660 is some inconsistency in the type-system with respect to restrict
1661 qualifications of pointers. Vector types always have a main-variant
1662 element type and the qualification is applied to the vector-type.
1663 So TREE_TYPE (vector-type) does not return a properly qualified
1664 vector element-type. */
1665 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1666 TREE_TYPE (vectype)));
1667
1668 if (CONSTANT_CLASS_P (sc))
1669 {
1670 tree *v = XALLOCAVEC (tree, nunits);
1671 for (i = 0; i < nunits; ++i)
1672 v[i] = sc;
1673 return build_vector (vectype, v);
1674 }
1675 else
1676 {
1677 vec<constructor_elt, va_gc> *v;
1678 vec_alloc (v, nunits);
1679 for (i = 0; i < nunits; ++i)
1680 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1681 return build_constructor (vectype, v);
1682 }
1683 }
1684
1685 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1686 are in the vec pointed to by VALS. */
1687 tree
1688 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1689 {
1690 tree c = make_node (CONSTRUCTOR);
1691 unsigned int i;
1692 constructor_elt *elt;
1693 bool constant_p = true;
1694 bool side_effects_p = false;
1695
1696 TREE_TYPE (c) = type;
1697 CONSTRUCTOR_ELTS (c) = vals;
1698
1699 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1700 {
1701 /* Mostly ctors will have elts that don't have side-effects, so
1702 the usual case is to scan all the elements. Hence a single
1703 loop for both const and side effects, rather than one loop
1704 each (with early outs). */
1705 if (!TREE_CONSTANT (elt->value))
1706 constant_p = false;
1707 if (TREE_SIDE_EFFECTS (elt->value))
1708 side_effects_p = true;
1709 }
1710
1711 TREE_SIDE_EFFECTS (c) = side_effects_p;
1712 TREE_CONSTANT (c) = constant_p;
1713
1714 return c;
1715 }
1716
1717 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1718 INDEX and VALUE. */
1719 tree
1720 build_constructor_single (tree type, tree index, tree value)
1721 {
1722 vec<constructor_elt, va_gc> *v;
1723 constructor_elt elt = {index, value};
1724
1725 vec_alloc (v, 1);
1726 v->quick_push (elt);
1727
1728 return build_constructor (type, v);
1729 }
1730
1731
1732 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1733 are in a list pointed to by VALS. */
1734 tree
1735 build_constructor_from_list (tree type, tree vals)
1736 {
1737 tree t;
1738 vec<constructor_elt, va_gc> *v = NULL;
1739
1740 if (vals)
1741 {
1742 vec_alloc (v, list_length (vals));
1743 for (t = vals; t; t = TREE_CHAIN (t))
1744 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1745 }
1746
1747 return build_constructor (type, v);
1748 }
1749
1750 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1751 of elements, provided as index/value pairs. */
1752
1753 tree
1754 build_constructor_va (tree type, int nelts, ...)
1755 {
1756 vec<constructor_elt, va_gc> *v = NULL;
1757 va_list p;
1758
1759 va_start (p, nelts);
1760 vec_alloc (v, nelts);
1761 while (nelts--)
1762 {
1763 tree index = va_arg (p, tree);
1764 tree value = va_arg (p, tree);
1765 CONSTRUCTOR_APPEND_ELT (v, index, value);
1766 }
1767 va_end (p);
1768 return build_constructor (type, v);
1769 }
1770
1771 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1772
1773 tree
1774 build_fixed (tree type, FIXED_VALUE_TYPE f)
1775 {
1776 tree v;
1777 FIXED_VALUE_TYPE *fp;
1778
1779 v = make_node (FIXED_CST);
1780 fp = ggc_alloc<fixed_value> ();
1781 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1782
1783 TREE_TYPE (v) = type;
1784 TREE_FIXED_CST_PTR (v) = fp;
1785 return v;
1786 }
1787
1788 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1789
1790 tree
1791 build_real (tree type, REAL_VALUE_TYPE d)
1792 {
1793 tree v;
1794 REAL_VALUE_TYPE *dp;
1795 int overflow = 0;
1796
1797 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1798 Consider doing it via real_convert now. */
1799
1800 v = make_node (REAL_CST);
1801 dp = ggc_alloc<real_value> ();
1802 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1803
1804 TREE_TYPE (v) = type;
1805 TREE_REAL_CST_PTR (v) = dp;
1806 TREE_OVERFLOW (v) = overflow;
1807 return v;
1808 }
1809
1810 /* Return a new REAL_CST node whose type is TYPE
1811 and whose value is the integer value of the INTEGER_CST node I. */
1812
1813 REAL_VALUE_TYPE
1814 real_value_from_int_cst (const_tree type, const_tree i)
1815 {
1816 REAL_VALUE_TYPE d;
1817
1818 /* Clear all bits of the real value type so that we can later do
1819 bitwise comparisons to see if two values are the same. */
1820 memset (&d, 0, sizeof d);
1821
1822 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1823 TYPE_SIGN (TREE_TYPE (i)));
1824 return d;
1825 }
1826
1827 /* Given a tree representing an integer constant I, return a tree
1828 representing the same value as a floating-point constant of type TYPE. */
1829
1830 tree
1831 build_real_from_int_cst (tree type, const_tree i)
1832 {
1833 tree v;
1834 int overflow = TREE_OVERFLOW (i);
1835
1836 v = build_real (type, real_value_from_int_cst (type, i));
1837
1838 TREE_OVERFLOW (v) |= overflow;
1839 return v;
1840 }
1841
1842 /* Return a newly constructed STRING_CST node whose value is
1843 the LEN characters at STR.
1844 Note that for a C string literal, LEN should include the trailing NUL.
1845 The TREE_TYPE is not initialized. */
1846
1847 tree
1848 build_string (int len, const char *str)
1849 {
1850 tree s;
1851 size_t length;
1852
1853 /* Do not waste bytes provided by padding of struct tree_string. */
1854 length = len + offsetof (struct tree_string, str) + 1;
1855
1856 record_node_allocation_statistics (STRING_CST, length);
1857
1858 s = (tree) ggc_internal_alloc (length);
1859
1860 memset (s, 0, sizeof (struct tree_typed));
1861 TREE_SET_CODE (s, STRING_CST);
1862 TREE_CONSTANT (s) = 1;
1863 TREE_STRING_LENGTH (s) = len;
1864 memcpy (s->string.str, str, len);
1865 s->string.str[len] = '\0';
1866
1867 return s;
1868 }
1869
1870 /* Return a newly constructed COMPLEX_CST node whose value is
1871 specified by the real and imaginary parts REAL and IMAG.
1872 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1873 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1874
1875 tree
1876 build_complex (tree type, tree real, tree imag)
1877 {
1878 tree t = make_node (COMPLEX_CST);
1879
1880 TREE_REALPART (t) = real;
1881 TREE_IMAGPART (t) = imag;
1882 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1883 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1884 return t;
1885 }
1886
1887 /* Return a constant of arithmetic type TYPE which is the
1888 multiplicative identity of the set TYPE. */
1889
1890 tree
1891 build_one_cst (tree type)
1892 {
1893 switch (TREE_CODE (type))
1894 {
1895 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1896 case POINTER_TYPE: case REFERENCE_TYPE:
1897 case OFFSET_TYPE:
1898 return build_int_cst (type, 1);
1899
1900 case REAL_TYPE:
1901 return build_real (type, dconst1);
1902
1903 case FIXED_POINT_TYPE:
1904 /* We can only generate 1 for accum types. */
1905 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1906 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1907
1908 case VECTOR_TYPE:
1909 {
1910 tree scalar = build_one_cst (TREE_TYPE (type));
1911
1912 return build_vector_from_val (type, scalar);
1913 }
1914
1915 case COMPLEX_TYPE:
1916 return build_complex (type,
1917 build_one_cst (TREE_TYPE (type)),
1918 build_zero_cst (TREE_TYPE (type)));
1919
1920 default:
1921 gcc_unreachable ();
1922 }
1923 }
1924
1925 /* Return an integer of type TYPE containing all 1's in as much precision as
1926 it contains, or a complex or vector whose subparts are such integers. */
1927
1928 tree
1929 build_all_ones_cst (tree type)
1930 {
1931 if (TREE_CODE (type) == COMPLEX_TYPE)
1932 {
1933 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1934 return build_complex (type, scalar, scalar);
1935 }
1936 else
1937 return build_minus_one_cst (type);
1938 }
1939
1940 /* Return a constant of arithmetic type TYPE which is the
1941 opposite of the multiplicative identity of the set TYPE. */
1942
1943 tree
1944 build_minus_one_cst (tree type)
1945 {
1946 switch (TREE_CODE (type))
1947 {
1948 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1949 case POINTER_TYPE: case REFERENCE_TYPE:
1950 case OFFSET_TYPE:
1951 return build_int_cst (type, -1);
1952
1953 case REAL_TYPE:
1954 return build_real (type, dconstm1);
1955
1956 case FIXED_POINT_TYPE:
1957 /* We can only generate 1 for accum types. */
1958 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1959 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1960 TYPE_MODE (type)));
1961
1962 case VECTOR_TYPE:
1963 {
1964 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1965
1966 return build_vector_from_val (type, scalar);
1967 }
1968
1969 case COMPLEX_TYPE:
1970 return build_complex (type,
1971 build_minus_one_cst (TREE_TYPE (type)),
1972 build_zero_cst (TREE_TYPE (type)));
1973
1974 default:
1975 gcc_unreachable ();
1976 }
1977 }
1978
1979 /* Build 0 constant of type TYPE. This is used by constructor folding
1980 and thus the constant should be represented in memory by
1981 zero(es). */
1982
1983 tree
1984 build_zero_cst (tree type)
1985 {
1986 switch (TREE_CODE (type))
1987 {
1988 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1989 case POINTER_TYPE: case REFERENCE_TYPE:
1990 case OFFSET_TYPE: case NULLPTR_TYPE:
1991 return build_int_cst (type, 0);
1992
1993 case REAL_TYPE:
1994 return build_real (type, dconst0);
1995
1996 case FIXED_POINT_TYPE:
1997 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1998
1999 case VECTOR_TYPE:
2000 {
2001 tree scalar = build_zero_cst (TREE_TYPE (type));
2002
2003 return build_vector_from_val (type, scalar);
2004 }
2005
2006 case COMPLEX_TYPE:
2007 {
2008 tree zero = build_zero_cst (TREE_TYPE (type));
2009
2010 return build_complex (type, zero, zero);
2011 }
2012
2013 default:
2014 if (!AGGREGATE_TYPE_P (type))
2015 return fold_convert (type, integer_zero_node);
2016 return build_constructor (type, NULL);
2017 }
2018 }
2019
2020
2021 /* Build a BINFO with LEN language slots. */
2022
2023 tree
2024 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2025 {
2026 tree t;
2027 size_t length = (offsetof (struct tree_binfo, base_binfos)
2028 + vec<tree, va_gc>::embedded_size (base_binfos));
2029
2030 record_node_allocation_statistics (TREE_BINFO, length);
2031
2032 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2033
2034 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2035
2036 TREE_SET_CODE (t, TREE_BINFO);
2037
2038 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2039
2040 return t;
2041 }
2042
2043 /* Create a CASE_LABEL_EXPR tree node and return it. */
2044
2045 tree
2046 build_case_label (tree low_value, tree high_value, tree label_decl)
2047 {
2048 tree t = make_node (CASE_LABEL_EXPR);
2049
2050 TREE_TYPE (t) = void_type_node;
2051 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2052
2053 CASE_LOW (t) = low_value;
2054 CASE_HIGH (t) = high_value;
2055 CASE_LABEL (t) = label_decl;
2056 CASE_CHAIN (t) = NULL_TREE;
2057
2058 return t;
2059 }
2060
2061 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2062 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2063 The latter determines the length of the HOST_WIDE_INT vector. */
2064
2065 tree
2066 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2067 {
2068 tree t;
2069 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2070 + sizeof (struct tree_int_cst));
2071
2072 gcc_assert (len);
2073 record_node_allocation_statistics (INTEGER_CST, length);
2074
2075 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2076
2077 TREE_SET_CODE (t, INTEGER_CST);
2078 TREE_INT_CST_NUNITS (t) = len;
2079 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2080 /* to_offset can only be applied to trees that are offset_int-sized
2081 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2082 must be exactly the precision of offset_int and so LEN is correct. */
2083 if (ext_len <= OFFSET_INT_ELTS)
2084 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2085 else
2086 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2087
2088 TREE_CONSTANT (t) = 1;
2089
2090 return t;
2091 }
2092
2093 /* Build a newly constructed TREE_VEC node of length LEN. */
2094
2095 tree
2096 make_tree_vec_stat (int len MEM_STAT_DECL)
2097 {
2098 tree t;
2099 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2100
2101 record_node_allocation_statistics (TREE_VEC, length);
2102
2103 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2104
2105 TREE_SET_CODE (t, TREE_VEC);
2106 TREE_VEC_LENGTH (t) = len;
2107
2108 return t;
2109 }
2110
2111 /* Grow a TREE_VEC node to new length LEN. */
2112
2113 tree
2114 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2115 {
2116 gcc_assert (TREE_CODE (v) == TREE_VEC);
2117
2118 int oldlen = TREE_VEC_LENGTH (v);
2119 gcc_assert (len > oldlen);
2120
2121 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2122 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2123
2124 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2125
2126 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2127
2128 TREE_VEC_LENGTH (v) = len;
2129
2130 return v;
2131 }
2132 \f
2133 /* Return 1 if EXPR is the integer constant zero or a complex constant
2134 of zero. */
2135
2136 int
2137 integer_zerop (const_tree expr)
2138 {
2139 STRIP_NOPS (expr);
2140
2141 switch (TREE_CODE (expr))
2142 {
2143 case INTEGER_CST:
2144 return wi::eq_p (expr, 0);
2145 case COMPLEX_CST:
2146 return (integer_zerop (TREE_REALPART (expr))
2147 && integer_zerop (TREE_IMAGPART (expr)));
2148 case VECTOR_CST:
2149 {
2150 unsigned i;
2151 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2152 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2153 return false;
2154 return true;
2155 }
2156 default:
2157 return false;
2158 }
2159 }
2160
2161 /* Return 1 if EXPR is the integer constant one or the corresponding
2162 complex constant. */
2163
2164 int
2165 integer_onep (const_tree expr)
2166 {
2167 STRIP_NOPS (expr);
2168
2169 switch (TREE_CODE (expr))
2170 {
2171 case INTEGER_CST:
2172 return wi::eq_p (wi::to_widest (expr), 1);
2173 case COMPLEX_CST:
2174 return (integer_onep (TREE_REALPART (expr))
2175 && integer_zerop (TREE_IMAGPART (expr)));
2176 case VECTOR_CST:
2177 {
2178 unsigned i;
2179 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2180 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2181 return false;
2182 return true;
2183 }
2184 default:
2185 return false;
2186 }
2187 }
2188
2189 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2190 return 1 if every piece is the integer constant one. */
2191
2192 int
2193 integer_each_onep (const_tree expr)
2194 {
2195 STRIP_NOPS (expr);
2196
2197 if (TREE_CODE (expr) == COMPLEX_CST)
2198 return (integer_onep (TREE_REALPART (expr))
2199 && integer_onep (TREE_IMAGPART (expr)));
2200 else
2201 return integer_onep (expr);
2202 }
2203
2204 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2205 it contains, or a complex or vector whose subparts are such integers. */
2206
2207 int
2208 integer_all_onesp (const_tree expr)
2209 {
2210 STRIP_NOPS (expr);
2211
2212 if (TREE_CODE (expr) == COMPLEX_CST
2213 && integer_all_onesp (TREE_REALPART (expr))
2214 && integer_all_onesp (TREE_IMAGPART (expr)))
2215 return 1;
2216
2217 else if (TREE_CODE (expr) == VECTOR_CST)
2218 {
2219 unsigned i;
2220 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2221 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2222 return 0;
2223 return 1;
2224 }
2225
2226 else if (TREE_CODE (expr) != INTEGER_CST)
2227 return 0;
2228
2229 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2230 }
2231
2232 /* Return 1 if EXPR is the integer constant minus one. */
2233
2234 int
2235 integer_minus_onep (const_tree expr)
2236 {
2237 STRIP_NOPS (expr);
2238
2239 if (TREE_CODE (expr) == COMPLEX_CST)
2240 return (integer_all_onesp (TREE_REALPART (expr))
2241 && integer_zerop (TREE_IMAGPART (expr)));
2242 else
2243 return integer_all_onesp (expr);
2244 }
2245
2246 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2247 one bit on). */
2248
2249 int
2250 integer_pow2p (const_tree expr)
2251 {
2252 STRIP_NOPS (expr);
2253
2254 if (TREE_CODE (expr) == COMPLEX_CST
2255 && integer_pow2p (TREE_REALPART (expr))
2256 && integer_zerop (TREE_IMAGPART (expr)))
2257 return 1;
2258
2259 if (TREE_CODE (expr) != INTEGER_CST)
2260 return 0;
2261
2262 return wi::popcount (expr) == 1;
2263 }
2264
2265 /* Return 1 if EXPR is an integer constant other than zero or a
2266 complex constant other than zero. */
2267
2268 int
2269 integer_nonzerop (const_tree expr)
2270 {
2271 STRIP_NOPS (expr);
2272
2273 return ((TREE_CODE (expr) == INTEGER_CST
2274 && !wi::eq_p (expr, 0))
2275 || (TREE_CODE (expr) == COMPLEX_CST
2276 && (integer_nonzerop (TREE_REALPART (expr))
2277 || integer_nonzerop (TREE_IMAGPART (expr)))));
2278 }
2279
2280 /* Return 1 if EXPR is the integer constant one. For vector,
2281 return 1 if every piece is the integer constant minus one
2282 (representing the value TRUE). */
2283
2284 int
2285 integer_truep (const_tree expr)
2286 {
2287 STRIP_NOPS (expr);
2288
2289 if (TREE_CODE (expr) == VECTOR_CST)
2290 return integer_all_onesp (expr);
2291 return integer_onep (expr);
2292 }
2293
2294 /* Return 1 if EXPR is the fixed-point constant zero. */
2295
2296 int
2297 fixed_zerop (const_tree expr)
2298 {
2299 return (TREE_CODE (expr) == FIXED_CST
2300 && TREE_FIXED_CST (expr).data.is_zero ());
2301 }
2302
2303 /* Return the power of two represented by a tree node known to be a
2304 power of two. */
2305
2306 int
2307 tree_log2 (const_tree expr)
2308 {
2309 STRIP_NOPS (expr);
2310
2311 if (TREE_CODE (expr) == COMPLEX_CST)
2312 return tree_log2 (TREE_REALPART (expr));
2313
2314 return wi::exact_log2 (expr);
2315 }
2316
2317 /* Similar, but return the largest integer Y such that 2 ** Y is less
2318 than or equal to EXPR. */
2319
2320 int
2321 tree_floor_log2 (const_tree expr)
2322 {
2323 STRIP_NOPS (expr);
2324
2325 if (TREE_CODE (expr) == COMPLEX_CST)
2326 return tree_log2 (TREE_REALPART (expr));
2327
2328 return wi::floor_log2 (expr);
2329 }
2330
2331 /* Return number of known trailing zero bits in EXPR, or, if the value of
2332 EXPR is known to be zero, the precision of it's type. */
2333
2334 unsigned int
2335 tree_ctz (const_tree expr)
2336 {
2337 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2338 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2339 return 0;
2340
2341 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2342 switch (TREE_CODE (expr))
2343 {
2344 case INTEGER_CST:
2345 ret1 = wi::ctz (expr);
2346 return MIN (ret1, prec);
2347 case SSA_NAME:
2348 ret1 = wi::ctz (get_nonzero_bits (expr));
2349 return MIN (ret1, prec);
2350 case PLUS_EXPR:
2351 case MINUS_EXPR:
2352 case BIT_IOR_EXPR:
2353 case BIT_XOR_EXPR:
2354 case MIN_EXPR:
2355 case MAX_EXPR:
2356 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2357 if (ret1 == 0)
2358 return ret1;
2359 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2360 return MIN (ret1, ret2);
2361 case POINTER_PLUS_EXPR:
2362 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2363 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2364 /* Second operand is sizetype, which could be in theory
2365 wider than pointer's precision. Make sure we never
2366 return more than prec. */
2367 ret2 = MIN (ret2, prec);
2368 return MIN (ret1, ret2);
2369 case BIT_AND_EXPR:
2370 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2371 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2372 return MAX (ret1, ret2);
2373 case MULT_EXPR:
2374 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2375 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2376 return MIN (ret1 + ret2, prec);
2377 case LSHIFT_EXPR:
2378 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2379 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2380 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2381 {
2382 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2383 return MIN (ret1 + ret2, prec);
2384 }
2385 return ret1;
2386 case RSHIFT_EXPR:
2387 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2388 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2389 {
2390 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2391 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2392 if (ret1 > ret2)
2393 return ret1 - ret2;
2394 }
2395 return 0;
2396 case TRUNC_DIV_EXPR:
2397 case CEIL_DIV_EXPR:
2398 case FLOOR_DIV_EXPR:
2399 case ROUND_DIV_EXPR:
2400 case EXACT_DIV_EXPR:
2401 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2402 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2403 {
2404 int l = tree_log2 (TREE_OPERAND (expr, 1));
2405 if (l >= 0)
2406 {
2407 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2408 ret2 = l;
2409 if (ret1 > ret2)
2410 return ret1 - ret2;
2411 }
2412 }
2413 return 0;
2414 CASE_CONVERT:
2415 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2416 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2417 ret1 = prec;
2418 return MIN (ret1, prec);
2419 case SAVE_EXPR:
2420 return tree_ctz (TREE_OPERAND (expr, 0));
2421 case COND_EXPR:
2422 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2423 if (ret1 == 0)
2424 return 0;
2425 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2426 return MIN (ret1, ret2);
2427 case COMPOUND_EXPR:
2428 return tree_ctz (TREE_OPERAND (expr, 1));
2429 case ADDR_EXPR:
2430 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2431 if (ret1 > BITS_PER_UNIT)
2432 {
2433 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2434 return MIN (ret1, prec);
2435 }
2436 return 0;
2437 default:
2438 return 0;
2439 }
2440 }
2441
2442 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2443 decimal float constants, so don't return 1 for them. */
2444
2445 int
2446 real_zerop (const_tree expr)
2447 {
2448 STRIP_NOPS (expr);
2449
2450 switch (TREE_CODE (expr))
2451 {
2452 case REAL_CST:
2453 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2454 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2455 case COMPLEX_CST:
2456 return real_zerop (TREE_REALPART (expr))
2457 && real_zerop (TREE_IMAGPART (expr));
2458 case VECTOR_CST:
2459 {
2460 unsigned i;
2461 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2462 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2463 return false;
2464 return true;
2465 }
2466 default:
2467 return false;
2468 }
2469 }
2470
2471 /* Return 1 if EXPR is the real constant one in real or complex form.
2472 Trailing zeroes matter for decimal float constants, so don't return
2473 1 for them. */
2474
2475 int
2476 real_onep (const_tree expr)
2477 {
2478 STRIP_NOPS (expr);
2479
2480 switch (TREE_CODE (expr))
2481 {
2482 case REAL_CST:
2483 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2484 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2485 case COMPLEX_CST:
2486 return real_onep (TREE_REALPART (expr))
2487 && real_zerop (TREE_IMAGPART (expr));
2488 case VECTOR_CST:
2489 {
2490 unsigned i;
2491 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2492 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2493 return false;
2494 return true;
2495 }
2496 default:
2497 return false;
2498 }
2499 }
2500
2501 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2502 matter for decimal float constants, so don't return 1 for them. */
2503
2504 int
2505 real_minus_onep (const_tree expr)
2506 {
2507 STRIP_NOPS (expr);
2508
2509 switch (TREE_CODE (expr))
2510 {
2511 case REAL_CST:
2512 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2513 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2514 case COMPLEX_CST:
2515 return real_minus_onep (TREE_REALPART (expr))
2516 && real_zerop (TREE_IMAGPART (expr));
2517 case VECTOR_CST:
2518 {
2519 unsigned i;
2520 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2521 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2522 return false;
2523 return true;
2524 }
2525 default:
2526 return false;
2527 }
2528 }
2529
2530 /* Nonzero if EXP is a constant or a cast of a constant. */
2531
2532 int
2533 really_constant_p (const_tree exp)
2534 {
2535 /* This is not quite the same as STRIP_NOPS. It does more. */
2536 while (CONVERT_EXPR_P (exp)
2537 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2538 exp = TREE_OPERAND (exp, 0);
2539 return TREE_CONSTANT (exp);
2540 }
2541 \f
2542 /* Return first list element whose TREE_VALUE is ELEM.
2543 Return 0 if ELEM is not in LIST. */
2544
2545 tree
2546 value_member (tree elem, tree list)
2547 {
2548 while (list)
2549 {
2550 if (elem == TREE_VALUE (list))
2551 return list;
2552 list = TREE_CHAIN (list);
2553 }
2554 return NULL_TREE;
2555 }
2556
2557 /* Return first list element whose TREE_PURPOSE is ELEM.
2558 Return 0 if ELEM is not in LIST. */
2559
2560 tree
2561 purpose_member (const_tree elem, tree list)
2562 {
2563 while (list)
2564 {
2565 if (elem == TREE_PURPOSE (list))
2566 return list;
2567 list = TREE_CHAIN (list);
2568 }
2569 return NULL_TREE;
2570 }
2571
2572 /* Return true if ELEM is in V. */
2573
2574 bool
2575 vec_member (const_tree elem, vec<tree, va_gc> *v)
2576 {
2577 unsigned ix;
2578 tree t;
2579 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2580 if (elem == t)
2581 return true;
2582 return false;
2583 }
2584
2585 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2586 NULL_TREE. */
2587
2588 tree
2589 chain_index (int idx, tree chain)
2590 {
2591 for (; chain && idx > 0; --idx)
2592 chain = TREE_CHAIN (chain);
2593 return chain;
2594 }
2595
2596 /* Return nonzero if ELEM is part of the chain CHAIN. */
2597
2598 int
2599 chain_member (const_tree elem, const_tree chain)
2600 {
2601 while (chain)
2602 {
2603 if (elem == chain)
2604 return 1;
2605 chain = DECL_CHAIN (chain);
2606 }
2607
2608 return 0;
2609 }
2610
2611 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2612 We expect a null pointer to mark the end of the chain.
2613 This is the Lisp primitive `length'. */
2614
2615 int
2616 list_length (const_tree t)
2617 {
2618 const_tree p = t;
2619 #ifdef ENABLE_TREE_CHECKING
2620 const_tree q = t;
2621 #endif
2622 int len = 0;
2623
2624 while (p)
2625 {
2626 p = TREE_CHAIN (p);
2627 #ifdef ENABLE_TREE_CHECKING
2628 if (len % 2)
2629 q = TREE_CHAIN (q);
2630 gcc_assert (p != q);
2631 #endif
2632 len++;
2633 }
2634
2635 return len;
2636 }
2637
2638 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2639 UNION_TYPE TYPE, or NULL_TREE if none. */
2640
2641 tree
2642 first_field (const_tree type)
2643 {
2644 tree t = TYPE_FIELDS (type);
2645 while (t && TREE_CODE (t) != FIELD_DECL)
2646 t = TREE_CHAIN (t);
2647 return t;
2648 }
2649
2650 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2651 by modifying the last node in chain 1 to point to chain 2.
2652 This is the Lisp primitive `nconc'. */
2653
2654 tree
2655 chainon (tree op1, tree op2)
2656 {
2657 tree t1;
2658
2659 if (!op1)
2660 return op2;
2661 if (!op2)
2662 return op1;
2663
2664 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2665 continue;
2666 TREE_CHAIN (t1) = op2;
2667
2668 #ifdef ENABLE_TREE_CHECKING
2669 {
2670 tree t2;
2671 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2672 gcc_assert (t2 != t1);
2673 }
2674 #endif
2675
2676 return op1;
2677 }
2678
2679 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2680
2681 tree
2682 tree_last (tree chain)
2683 {
2684 tree next;
2685 if (chain)
2686 while ((next = TREE_CHAIN (chain)))
2687 chain = next;
2688 return chain;
2689 }
2690
2691 /* Reverse the order of elements in the chain T,
2692 and return the new head of the chain (old last element). */
2693
2694 tree
2695 nreverse (tree t)
2696 {
2697 tree prev = 0, decl, next;
2698 for (decl = t; decl; decl = next)
2699 {
2700 /* We shouldn't be using this function to reverse BLOCK chains; we
2701 have blocks_nreverse for that. */
2702 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2703 next = TREE_CHAIN (decl);
2704 TREE_CHAIN (decl) = prev;
2705 prev = decl;
2706 }
2707 return prev;
2708 }
2709 \f
2710 /* Return a newly created TREE_LIST node whose
2711 purpose and value fields are PARM and VALUE. */
2712
2713 tree
2714 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2715 {
2716 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2717 TREE_PURPOSE (t) = parm;
2718 TREE_VALUE (t) = value;
2719 return t;
2720 }
2721
2722 /* Build a chain of TREE_LIST nodes from a vector. */
2723
2724 tree
2725 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2726 {
2727 tree ret = NULL_TREE;
2728 tree *pp = &ret;
2729 unsigned int i;
2730 tree t;
2731 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2732 {
2733 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2734 pp = &TREE_CHAIN (*pp);
2735 }
2736 return ret;
2737 }
2738
2739 /* Return a newly created TREE_LIST node whose
2740 purpose and value fields are PURPOSE and VALUE
2741 and whose TREE_CHAIN is CHAIN. */
2742
2743 tree
2744 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2745 {
2746 tree node;
2747
2748 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2749 memset (node, 0, sizeof (struct tree_common));
2750
2751 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2752
2753 TREE_SET_CODE (node, TREE_LIST);
2754 TREE_CHAIN (node) = chain;
2755 TREE_PURPOSE (node) = purpose;
2756 TREE_VALUE (node) = value;
2757 return node;
2758 }
2759
2760 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2761 trees. */
2762
2763 vec<tree, va_gc> *
2764 ctor_to_vec (tree ctor)
2765 {
2766 vec<tree, va_gc> *vec;
2767 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2768 unsigned int ix;
2769 tree val;
2770
2771 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2772 vec->quick_push (val);
2773
2774 return vec;
2775 }
2776 \f
2777 /* Return the size nominally occupied by an object of type TYPE
2778 when it resides in memory. The value is measured in units of bytes,
2779 and its data type is that normally used for type sizes
2780 (which is the first type created by make_signed_type or
2781 make_unsigned_type). */
2782
2783 tree
2784 size_in_bytes (const_tree type)
2785 {
2786 tree t;
2787
2788 if (type == error_mark_node)
2789 return integer_zero_node;
2790
2791 type = TYPE_MAIN_VARIANT (type);
2792 t = TYPE_SIZE_UNIT (type);
2793
2794 if (t == 0)
2795 {
2796 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2797 return size_zero_node;
2798 }
2799
2800 return t;
2801 }
2802
2803 /* Return the size of TYPE (in bytes) as a wide integer
2804 or return -1 if the size can vary or is larger than an integer. */
2805
2806 HOST_WIDE_INT
2807 int_size_in_bytes (const_tree type)
2808 {
2809 tree t;
2810
2811 if (type == error_mark_node)
2812 return 0;
2813
2814 type = TYPE_MAIN_VARIANT (type);
2815 t = TYPE_SIZE_UNIT (type);
2816
2817 if (t && tree_fits_uhwi_p (t))
2818 return TREE_INT_CST_LOW (t);
2819 else
2820 return -1;
2821 }
2822
2823 /* Return the maximum size of TYPE (in bytes) as a wide integer
2824 or return -1 if the size can vary or is larger than an integer. */
2825
2826 HOST_WIDE_INT
2827 max_int_size_in_bytes (const_tree type)
2828 {
2829 HOST_WIDE_INT size = -1;
2830 tree size_tree;
2831
2832 /* If this is an array type, check for a possible MAX_SIZE attached. */
2833
2834 if (TREE_CODE (type) == ARRAY_TYPE)
2835 {
2836 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2837
2838 if (size_tree && tree_fits_uhwi_p (size_tree))
2839 size = tree_to_uhwi (size_tree);
2840 }
2841
2842 /* If we still haven't been able to get a size, see if the language
2843 can compute a maximum size. */
2844
2845 if (size == -1)
2846 {
2847 size_tree = lang_hooks.types.max_size (type);
2848
2849 if (size_tree && tree_fits_uhwi_p (size_tree))
2850 size = tree_to_uhwi (size_tree);
2851 }
2852
2853 return size;
2854 }
2855 \f
2856 /* Return the bit position of FIELD, in bits from the start of the record.
2857 This is a tree of type bitsizetype. */
2858
2859 tree
2860 bit_position (const_tree field)
2861 {
2862 return bit_from_pos (DECL_FIELD_OFFSET (field),
2863 DECL_FIELD_BIT_OFFSET (field));
2864 }
2865 \f
2866 /* Return the byte position of FIELD, in bytes from the start of the record.
2867 This is a tree of type sizetype. */
2868
2869 tree
2870 byte_position (const_tree field)
2871 {
2872 return byte_from_pos (DECL_FIELD_OFFSET (field),
2873 DECL_FIELD_BIT_OFFSET (field));
2874 }
2875
2876 /* Likewise, but return as an integer. It must be representable in
2877 that way (since it could be a signed value, we don't have the
2878 option of returning -1 like int_size_in_byte can. */
2879
2880 HOST_WIDE_INT
2881 int_byte_position (const_tree field)
2882 {
2883 return tree_to_shwi (byte_position (field));
2884 }
2885 \f
2886 /* Return the strictest alignment, in bits, that T is known to have. */
2887
2888 unsigned int
2889 expr_align (const_tree t)
2890 {
2891 unsigned int align0, align1;
2892
2893 switch (TREE_CODE (t))
2894 {
2895 CASE_CONVERT: case NON_LVALUE_EXPR:
2896 /* If we have conversions, we know that the alignment of the
2897 object must meet each of the alignments of the types. */
2898 align0 = expr_align (TREE_OPERAND (t, 0));
2899 align1 = TYPE_ALIGN (TREE_TYPE (t));
2900 return MAX (align0, align1);
2901
2902 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2903 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2904 case CLEANUP_POINT_EXPR:
2905 /* These don't change the alignment of an object. */
2906 return expr_align (TREE_OPERAND (t, 0));
2907
2908 case COND_EXPR:
2909 /* The best we can do is say that the alignment is the least aligned
2910 of the two arms. */
2911 align0 = expr_align (TREE_OPERAND (t, 1));
2912 align1 = expr_align (TREE_OPERAND (t, 2));
2913 return MIN (align0, align1);
2914
2915 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2916 meaningfully, it's always 1. */
2917 case LABEL_DECL: case CONST_DECL:
2918 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2919 case FUNCTION_DECL:
2920 gcc_assert (DECL_ALIGN (t) != 0);
2921 return DECL_ALIGN (t);
2922
2923 default:
2924 break;
2925 }
2926
2927 /* Otherwise take the alignment from that of the type. */
2928 return TYPE_ALIGN (TREE_TYPE (t));
2929 }
2930 \f
2931 /* Return, as a tree node, the number of elements for TYPE (which is an
2932 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2933
2934 tree
2935 array_type_nelts (const_tree type)
2936 {
2937 tree index_type, min, max;
2938
2939 /* If they did it with unspecified bounds, then we should have already
2940 given an error about it before we got here. */
2941 if (! TYPE_DOMAIN (type))
2942 return error_mark_node;
2943
2944 index_type = TYPE_DOMAIN (type);
2945 min = TYPE_MIN_VALUE (index_type);
2946 max = TYPE_MAX_VALUE (index_type);
2947
2948 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2949 if (!max)
2950 return error_mark_node;
2951
2952 return (integer_zerop (min)
2953 ? max
2954 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2955 }
2956 \f
2957 /* If arg is static -- a reference to an object in static storage -- then
2958 return the object. This is not the same as the C meaning of `static'.
2959 If arg isn't static, return NULL. */
2960
2961 tree
2962 staticp (tree arg)
2963 {
2964 switch (TREE_CODE (arg))
2965 {
2966 case FUNCTION_DECL:
2967 /* Nested functions are static, even though taking their address will
2968 involve a trampoline as we unnest the nested function and create
2969 the trampoline on the tree level. */
2970 return arg;
2971
2972 case VAR_DECL:
2973 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2974 && ! DECL_THREAD_LOCAL_P (arg)
2975 && ! DECL_DLLIMPORT_P (arg)
2976 ? arg : NULL);
2977
2978 case CONST_DECL:
2979 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2980 ? arg : NULL);
2981
2982 case CONSTRUCTOR:
2983 return TREE_STATIC (arg) ? arg : NULL;
2984
2985 case LABEL_DECL:
2986 case STRING_CST:
2987 return arg;
2988
2989 case COMPONENT_REF:
2990 /* If the thing being referenced is not a field, then it is
2991 something language specific. */
2992 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2993
2994 /* If we are referencing a bitfield, we can't evaluate an
2995 ADDR_EXPR at compile time and so it isn't a constant. */
2996 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2997 return NULL;
2998
2999 return staticp (TREE_OPERAND (arg, 0));
3000
3001 case BIT_FIELD_REF:
3002 return NULL;
3003
3004 case INDIRECT_REF:
3005 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3006
3007 case ARRAY_REF:
3008 case ARRAY_RANGE_REF:
3009 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3010 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3011 return staticp (TREE_OPERAND (arg, 0));
3012 else
3013 return NULL;
3014
3015 case COMPOUND_LITERAL_EXPR:
3016 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3017
3018 default:
3019 return NULL;
3020 }
3021 }
3022
3023 \f
3024
3025
3026 /* Return whether OP is a DECL whose address is function-invariant. */
3027
3028 bool
3029 decl_address_invariant_p (const_tree op)
3030 {
3031 /* The conditions below are slightly less strict than the one in
3032 staticp. */
3033
3034 switch (TREE_CODE (op))
3035 {
3036 case PARM_DECL:
3037 case RESULT_DECL:
3038 case LABEL_DECL:
3039 case FUNCTION_DECL:
3040 return true;
3041
3042 case VAR_DECL:
3043 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3044 || DECL_THREAD_LOCAL_P (op)
3045 || DECL_CONTEXT (op) == current_function_decl
3046 || decl_function_context (op) == current_function_decl)
3047 return true;
3048 break;
3049
3050 case CONST_DECL:
3051 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3052 || decl_function_context (op) == current_function_decl)
3053 return true;
3054 break;
3055
3056 default:
3057 break;
3058 }
3059
3060 return false;
3061 }
3062
3063 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3064
3065 bool
3066 decl_address_ip_invariant_p (const_tree op)
3067 {
3068 /* The conditions below are slightly less strict than the one in
3069 staticp. */
3070
3071 switch (TREE_CODE (op))
3072 {
3073 case LABEL_DECL:
3074 case FUNCTION_DECL:
3075 case STRING_CST:
3076 return true;
3077
3078 case VAR_DECL:
3079 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3080 && !DECL_DLLIMPORT_P (op))
3081 || DECL_THREAD_LOCAL_P (op))
3082 return true;
3083 break;
3084
3085 case CONST_DECL:
3086 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3087 return true;
3088 break;
3089
3090 default:
3091 break;
3092 }
3093
3094 return false;
3095 }
3096
3097
3098 /* Return true if T is function-invariant (internal function, does
3099 not handle arithmetic; that's handled in skip_simple_arithmetic and
3100 tree_invariant_p). */
3101
3102 static bool tree_invariant_p (tree t);
3103
3104 static bool
3105 tree_invariant_p_1 (tree t)
3106 {
3107 tree op;
3108
3109 if (TREE_CONSTANT (t)
3110 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3111 return true;
3112
3113 switch (TREE_CODE (t))
3114 {
3115 case SAVE_EXPR:
3116 return true;
3117
3118 case ADDR_EXPR:
3119 op = TREE_OPERAND (t, 0);
3120 while (handled_component_p (op))
3121 {
3122 switch (TREE_CODE (op))
3123 {
3124 case ARRAY_REF:
3125 case ARRAY_RANGE_REF:
3126 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3127 || TREE_OPERAND (op, 2) != NULL_TREE
3128 || TREE_OPERAND (op, 3) != NULL_TREE)
3129 return false;
3130 break;
3131
3132 case COMPONENT_REF:
3133 if (TREE_OPERAND (op, 2) != NULL_TREE)
3134 return false;
3135 break;
3136
3137 default:;
3138 }
3139 op = TREE_OPERAND (op, 0);
3140 }
3141
3142 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3143
3144 default:
3145 break;
3146 }
3147
3148 return false;
3149 }
3150
3151 /* Return true if T is function-invariant. */
3152
3153 static bool
3154 tree_invariant_p (tree t)
3155 {
3156 tree inner = skip_simple_arithmetic (t);
3157 return tree_invariant_p_1 (inner);
3158 }
3159
3160 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3161 Do this to any expression which may be used in more than one place,
3162 but must be evaluated only once.
3163
3164 Normally, expand_expr would reevaluate the expression each time.
3165 Calling save_expr produces something that is evaluated and recorded
3166 the first time expand_expr is called on it. Subsequent calls to
3167 expand_expr just reuse the recorded value.
3168
3169 The call to expand_expr that generates code that actually computes
3170 the value is the first call *at compile time*. Subsequent calls
3171 *at compile time* generate code to use the saved value.
3172 This produces correct result provided that *at run time* control
3173 always flows through the insns made by the first expand_expr
3174 before reaching the other places where the save_expr was evaluated.
3175 You, the caller of save_expr, must make sure this is so.
3176
3177 Constants, and certain read-only nodes, are returned with no
3178 SAVE_EXPR because that is safe. Expressions containing placeholders
3179 are not touched; see tree.def for an explanation of what these
3180 are used for. */
3181
3182 tree
3183 save_expr (tree expr)
3184 {
3185 tree t = fold (expr);
3186 tree inner;
3187
3188 /* If the tree evaluates to a constant, then we don't want to hide that
3189 fact (i.e. this allows further folding, and direct checks for constants).
3190 However, a read-only object that has side effects cannot be bypassed.
3191 Since it is no problem to reevaluate literals, we just return the
3192 literal node. */
3193 inner = skip_simple_arithmetic (t);
3194 if (TREE_CODE (inner) == ERROR_MARK)
3195 return inner;
3196
3197 if (tree_invariant_p_1 (inner))
3198 return t;
3199
3200 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3201 it means that the size or offset of some field of an object depends on
3202 the value within another field.
3203
3204 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3205 and some variable since it would then need to be both evaluated once and
3206 evaluated more than once. Front-ends must assure this case cannot
3207 happen by surrounding any such subexpressions in their own SAVE_EXPR
3208 and forcing evaluation at the proper time. */
3209 if (contains_placeholder_p (inner))
3210 return t;
3211
3212 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3213 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3214
3215 /* This expression might be placed ahead of a jump to ensure that the
3216 value was computed on both sides of the jump. So make sure it isn't
3217 eliminated as dead. */
3218 TREE_SIDE_EFFECTS (t) = 1;
3219 return t;
3220 }
3221
3222 /* Look inside EXPR into any simple arithmetic operations. Return the
3223 outermost non-arithmetic or non-invariant node. */
3224
3225 tree
3226 skip_simple_arithmetic (tree expr)
3227 {
3228 /* We don't care about whether this can be used as an lvalue in this
3229 context. */
3230 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3231 expr = TREE_OPERAND (expr, 0);
3232
3233 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3234 a constant, it will be more efficient to not make another SAVE_EXPR since
3235 it will allow better simplification and GCSE will be able to merge the
3236 computations if they actually occur. */
3237 while (true)
3238 {
3239 if (UNARY_CLASS_P (expr))
3240 expr = TREE_OPERAND (expr, 0);
3241 else if (BINARY_CLASS_P (expr))
3242 {
3243 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3244 expr = TREE_OPERAND (expr, 0);
3245 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3246 expr = TREE_OPERAND (expr, 1);
3247 else
3248 break;
3249 }
3250 else
3251 break;
3252 }
3253
3254 return expr;
3255 }
3256
3257 /* Look inside EXPR into simple arithmetic operations involving constants.
3258 Return the outermost non-arithmetic or non-constant node. */
3259
3260 tree
3261 skip_simple_constant_arithmetic (tree expr)
3262 {
3263 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3264 expr = TREE_OPERAND (expr, 0);
3265
3266 while (true)
3267 {
3268 if (UNARY_CLASS_P (expr))
3269 expr = TREE_OPERAND (expr, 0);
3270 else if (BINARY_CLASS_P (expr))
3271 {
3272 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3273 expr = TREE_OPERAND (expr, 0);
3274 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3275 expr = TREE_OPERAND (expr, 1);
3276 else
3277 break;
3278 }
3279 else
3280 break;
3281 }
3282
3283 return expr;
3284 }
3285
3286 /* Return which tree structure is used by T. */
3287
3288 enum tree_node_structure_enum
3289 tree_node_structure (const_tree t)
3290 {
3291 const enum tree_code code = TREE_CODE (t);
3292 return tree_node_structure_for_code (code);
3293 }
3294
3295 /* Set various status flags when building a CALL_EXPR object T. */
3296
3297 static void
3298 process_call_operands (tree t)
3299 {
3300 bool side_effects = TREE_SIDE_EFFECTS (t);
3301 bool read_only = false;
3302 int i = call_expr_flags (t);
3303
3304 /* Calls have side-effects, except those to const or pure functions. */
3305 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3306 side_effects = true;
3307 /* Propagate TREE_READONLY of arguments for const functions. */
3308 if (i & ECF_CONST)
3309 read_only = true;
3310
3311 if (!side_effects || read_only)
3312 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3313 {
3314 tree op = TREE_OPERAND (t, i);
3315 if (op && TREE_SIDE_EFFECTS (op))
3316 side_effects = true;
3317 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3318 read_only = false;
3319 }
3320
3321 TREE_SIDE_EFFECTS (t) = side_effects;
3322 TREE_READONLY (t) = read_only;
3323 }
3324 \f
3325 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3326 size or offset that depends on a field within a record. */
3327
3328 bool
3329 contains_placeholder_p (const_tree exp)
3330 {
3331 enum tree_code code;
3332
3333 if (!exp)
3334 return 0;
3335
3336 code = TREE_CODE (exp);
3337 if (code == PLACEHOLDER_EXPR)
3338 return 1;
3339
3340 switch (TREE_CODE_CLASS (code))
3341 {
3342 case tcc_reference:
3343 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3344 position computations since they will be converted into a
3345 WITH_RECORD_EXPR involving the reference, which will assume
3346 here will be valid. */
3347 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3348
3349 case tcc_exceptional:
3350 if (code == TREE_LIST)
3351 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3352 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3353 break;
3354
3355 case tcc_unary:
3356 case tcc_binary:
3357 case tcc_comparison:
3358 case tcc_expression:
3359 switch (code)
3360 {
3361 case COMPOUND_EXPR:
3362 /* Ignoring the first operand isn't quite right, but works best. */
3363 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3364
3365 case COND_EXPR:
3366 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3367 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3368 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3369
3370 case SAVE_EXPR:
3371 /* The save_expr function never wraps anything containing
3372 a PLACEHOLDER_EXPR. */
3373 return 0;
3374
3375 default:
3376 break;
3377 }
3378
3379 switch (TREE_CODE_LENGTH (code))
3380 {
3381 case 1:
3382 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3383 case 2:
3384 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3385 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3386 default:
3387 return 0;
3388 }
3389
3390 case tcc_vl_exp:
3391 switch (code)
3392 {
3393 case CALL_EXPR:
3394 {
3395 const_tree arg;
3396 const_call_expr_arg_iterator iter;
3397 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3398 if (CONTAINS_PLACEHOLDER_P (arg))
3399 return 1;
3400 return 0;
3401 }
3402 default:
3403 return 0;
3404 }
3405
3406 default:
3407 return 0;
3408 }
3409 return 0;
3410 }
3411
3412 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3413 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3414 field positions. */
3415
3416 static bool
3417 type_contains_placeholder_1 (const_tree type)
3418 {
3419 /* If the size contains a placeholder or the parent type (component type in
3420 the case of arrays) type involves a placeholder, this type does. */
3421 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3422 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3423 || (!POINTER_TYPE_P (type)
3424 && TREE_TYPE (type)
3425 && type_contains_placeholder_p (TREE_TYPE (type))))
3426 return true;
3427
3428 /* Now do type-specific checks. Note that the last part of the check above
3429 greatly limits what we have to do below. */
3430 switch (TREE_CODE (type))
3431 {
3432 case VOID_TYPE:
3433 case POINTER_BOUNDS_TYPE:
3434 case COMPLEX_TYPE:
3435 case ENUMERAL_TYPE:
3436 case BOOLEAN_TYPE:
3437 case POINTER_TYPE:
3438 case OFFSET_TYPE:
3439 case REFERENCE_TYPE:
3440 case METHOD_TYPE:
3441 case FUNCTION_TYPE:
3442 case VECTOR_TYPE:
3443 case NULLPTR_TYPE:
3444 return false;
3445
3446 case INTEGER_TYPE:
3447 case REAL_TYPE:
3448 case FIXED_POINT_TYPE:
3449 /* Here we just check the bounds. */
3450 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3451 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3452
3453 case ARRAY_TYPE:
3454 /* We have already checked the component type above, so just check the
3455 domain type. */
3456 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3457
3458 case RECORD_TYPE:
3459 case UNION_TYPE:
3460 case QUAL_UNION_TYPE:
3461 {
3462 tree field;
3463
3464 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3465 if (TREE_CODE (field) == FIELD_DECL
3466 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3467 || (TREE_CODE (type) == QUAL_UNION_TYPE
3468 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3469 || type_contains_placeholder_p (TREE_TYPE (field))))
3470 return true;
3471
3472 return false;
3473 }
3474
3475 default:
3476 gcc_unreachable ();
3477 }
3478 }
3479
3480 /* Wrapper around above function used to cache its result. */
3481
3482 bool
3483 type_contains_placeholder_p (tree type)
3484 {
3485 bool result;
3486
3487 /* If the contains_placeholder_bits field has been initialized,
3488 then we know the answer. */
3489 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3490 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3491
3492 /* Indicate that we've seen this type node, and the answer is false.
3493 This is what we want to return if we run into recursion via fields. */
3494 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3495
3496 /* Compute the real value. */
3497 result = type_contains_placeholder_1 (type);
3498
3499 /* Store the real value. */
3500 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3501
3502 return result;
3503 }
3504 \f
3505 /* Push tree EXP onto vector QUEUE if it is not already present. */
3506
3507 static void
3508 push_without_duplicates (tree exp, vec<tree> *queue)
3509 {
3510 unsigned int i;
3511 tree iter;
3512
3513 FOR_EACH_VEC_ELT (*queue, i, iter)
3514 if (simple_cst_equal (iter, exp) == 1)
3515 break;
3516
3517 if (!iter)
3518 queue->safe_push (exp);
3519 }
3520
3521 /* Given a tree EXP, find all occurrences of references to fields
3522 in a PLACEHOLDER_EXPR and place them in vector REFS without
3523 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3524 we assume here that EXP contains only arithmetic expressions
3525 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3526 argument list. */
3527
3528 void
3529 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3530 {
3531 enum tree_code code = TREE_CODE (exp);
3532 tree inner;
3533 int i;
3534
3535 /* We handle TREE_LIST and COMPONENT_REF separately. */
3536 if (code == TREE_LIST)
3537 {
3538 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3539 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3540 }
3541 else if (code == COMPONENT_REF)
3542 {
3543 for (inner = TREE_OPERAND (exp, 0);
3544 REFERENCE_CLASS_P (inner);
3545 inner = TREE_OPERAND (inner, 0))
3546 ;
3547
3548 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3549 push_without_duplicates (exp, refs);
3550 else
3551 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3552 }
3553 else
3554 switch (TREE_CODE_CLASS (code))
3555 {
3556 case tcc_constant:
3557 break;
3558
3559 case tcc_declaration:
3560 /* Variables allocated to static storage can stay. */
3561 if (!TREE_STATIC (exp))
3562 push_without_duplicates (exp, refs);
3563 break;
3564
3565 case tcc_expression:
3566 /* This is the pattern built in ada/make_aligning_type. */
3567 if (code == ADDR_EXPR
3568 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3569 {
3570 push_without_duplicates (exp, refs);
3571 break;
3572 }
3573
3574 /* Fall through... */
3575
3576 case tcc_exceptional:
3577 case tcc_unary:
3578 case tcc_binary:
3579 case tcc_comparison:
3580 case tcc_reference:
3581 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3582 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3583 break;
3584
3585 case tcc_vl_exp:
3586 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3587 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3588 break;
3589
3590 default:
3591 gcc_unreachable ();
3592 }
3593 }
3594
3595 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3596 return a tree with all occurrences of references to F in a
3597 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3598 CONST_DECLs. Note that we assume here that EXP contains only
3599 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3600 occurring only in their argument list. */
3601
3602 tree
3603 substitute_in_expr (tree exp, tree f, tree r)
3604 {
3605 enum tree_code code = TREE_CODE (exp);
3606 tree op0, op1, op2, op3;
3607 tree new_tree;
3608
3609 /* We handle TREE_LIST and COMPONENT_REF separately. */
3610 if (code == TREE_LIST)
3611 {
3612 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3613 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3614 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3615 return exp;
3616
3617 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3618 }
3619 else if (code == COMPONENT_REF)
3620 {
3621 tree inner;
3622
3623 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3624 and it is the right field, replace it with R. */
3625 for (inner = TREE_OPERAND (exp, 0);
3626 REFERENCE_CLASS_P (inner);
3627 inner = TREE_OPERAND (inner, 0))
3628 ;
3629
3630 /* The field. */
3631 op1 = TREE_OPERAND (exp, 1);
3632
3633 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3634 return r;
3635
3636 /* If this expression hasn't been completed let, leave it alone. */
3637 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3638 return exp;
3639
3640 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3641 if (op0 == TREE_OPERAND (exp, 0))
3642 return exp;
3643
3644 new_tree
3645 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3646 }
3647 else
3648 switch (TREE_CODE_CLASS (code))
3649 {
3650 case tcc_constant:
3651 return exp;
3652
3653 case tcc_declaration:
3654 if (exp == f)
3655 return r;
3656 else
3657 return exp;
3658
3659 case tcc_expression:
3660 if (exp == f)
3661 return r;
3662
3663 /* Fall through... */
3664
3665 case tcc_exceptional:
3666 case tcc_unary:
3667 case tcc_binary:
3668 case tcc_comparison:
3669 case tcc_reference:
3670 switch (TREE_CODE_LENGTH (code))
3671 {
3672 case 0:
3673 return exp;
3674
3675 case 1:
3676 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3677 if (op0 == TREE_OPERAND (exp, 0))
3678 return exp;
3679
3680 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3681 break;
3682
3683 case 2:
3684 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3685 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3686
3687 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3688 return exp;
3689
3690 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3691 break;
3692
3693 case 3:
3694 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3695 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3696 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3697
3698 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3699 && op2 == TREE_OPERAND (exp, 2))
3700 return exp;
3701
3702 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3703 break;
3704
3705 case 4:
3706 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3707 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3708 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3709 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3710
3711 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3712 && op2 == TREE_OPERAND (exp, 2)
3713 && op3 == TREE_OPERAND (exp, 3))
3714 return exp;
3715
3716 new_tree
3717 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3718 break;
3719
3720 default:
3721 gcc_unreachable ();
3722 }
3723 break;
3724
3725 case tcc_vl_exp:
3726 {
3727 int i;
3728
3729 new_tree = NULL_TREE;
3730
3731 /* If we are trying to replace F with a constant, inline back
3732 functions which do nothing else than computing a value from
3733 the arguments they are passed. This makes it possible to
3734 fold partially or entirely the replacement expression. */
3735 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3736 {
3737 tree t = maybe_inline_call_in_expr (exp);
3738 if (t)
3739 return SUBSTITUTE_IN_EXPR (t, f, r);
3740 }
3741
3742 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3743 {
3744 tree op = TREE_OPERAND (exp, i);
3745 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3746 if (new_op != op)
3747 {
3748 if (!new_tree)
3749 new_tree = copy_node (exp);
3750 TREE_OPERAND (new_tree, i) = new_op;
3751 }
3752 }
3753
3754 if (new_tree)
3755 {
3756 new_tree = fold (new_tree);
3757 if (TREE_CODE (new_tree) == CALL_EXPR)
3758 process_call_operands (new_tree);
3759 }
3760 else
3761 return exp;
3762 }
3763 break;
3764
3765 default:
3766 gcc_unreachable ();
3767 }
3768
3769 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3770
3771 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3772 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3773
3774 return new_tree;
3775 }
3776
3777 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3778 for it within OBJ, a tree that is an object or a chain of references. */
3779
3780 tree
3781 substitute_placeholder_in_expr (tree exp, tree obj)
3782 {
3783 enum tree_code code = TREE_CODE (exp);
3784 tree op0, op1, op2, op3;
3785 tree new_tree;
3786
3787 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3788 in the chain of OBJ. */
3789 if (code == PLACEHOLDER_EXPR)
3790 {
3791 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3792 tree elt;
3793
3794 for (elt = obj; elt != 0;
3795 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3796 || TREE_CODE (elt) == COND_EXPR)
3797 ? TREE_OPERAND (elt, 1)
3798 : (REFERENCE_CLASS_P (elt)
3799 || UNARY_CLASS_P (elt)
3800 || BINARY_CLASS_P (elt)
3801 || VL_EXP_CLASS_P (elt)
3802 || EXPRESSION_CLASS_P (elt))
3803 ? TREE_OPERAND (elt, 0) : 0))
3804 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3805 return elt;
3806
3807 for (elt = obj; elt != 0;
3808 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3809 || TREE_CODE (elt) == COND_EXPR)
3810 ? TREE_OPERAND (elt, 1)
3811 : (REFERENCE_CLASS_P (elt)
3812 || UNARY_CLASS_P (elt)
3813 || BINARY_CLASS_P (elt)
3814 || VL_EXP_CLASS_P (elt)
3815 || EXPRESSION_CLASS_P (elt))
3816 ? TREE_OPERAND (elt, 0) : 0))
3817 if (POINTER_TYPE_P (TREE_TYPE (elt))
3818 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3819 == need_type))
3820 return fold_build1 (INDIRECT_REF, need_type, elt);
3821
3822 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3823 survives until RTL generation, there will be an error. */
3824 return exp;
3825 }
3826
3827 /* TREE_LIST is special because we need to look at TREE_VALUE
3828 and TREE_CHAIN, not TREE_OPERANDS. */
3829 else if (code == TREE_LIST)
3830 {
3831 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3832 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3833 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3834 return exp;
3835
3836 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3837 }
3838 else
3839 switch (TREE_CODE_CLASS (code))
3840 {
3841 case tcc_constant:
3842 case tcc_declaration:
3843 return exp;
3844
3845 case tcc_exceptional:
3846 case tcc_unary:
3847 case tcc_binary:
3848 case tcc_comparison:
3849 case tcc_expression:
3850 case tcc_reference:
3851 case tcc_statement:
3852 switch (TREE_CODE_LENGTH (code))
3853 {
3854 case 0:
3855 return exp;
3856
3857 case 1:
3858 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3859 if (op0 == TREE_OPERAND (exp, 0))
3860 return exp;
3861
3862 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3863 break;
3864
3865 case 2:
3866 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3867 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3868
3869 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3870 return exp;
3871
3872 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3873 break;
3874
3875 case 3:
3876 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3877 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3878 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3879
3880 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3881 && op2 == TREE_OPERAND (exp, 2))
3882 return exp;
3883
3884 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3885 break;
3886
3887 case 4:
3888 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3889 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3890 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3891 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3892
3893 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3894 && op2 == TREE_OPERAND (exp, 2)
3895 && op3 == TREE_OPERAND (exp, 3))
3896 return exp;
3897
3898 new_tree
3899 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3900 break;
3901
3902 default:
3903 gcc_unreachable ();
3904 }
3905 break;
3906
3907 case tcc_vl_exp:
3908 {
3909 int i;
3910
3911 new_tree = NULL_TREE;
3912
3913 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3914 {
3915 tree op = TREE_OPERAND (exp, i);
3916 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3917 if (new_op != op)
3918 {
3919 if (!new_tree)
3920 new_tree = copy_node (exp);
3921 TREE_OPERAND (new_tree, i) = new_op;
3922 }
3923 }
3924
3925 if (new_tree)
3926 {
3927 new_tree = fold (new_tree);
3928 if (TREE_CODE (new_tree) == CALL_EXPR)
3929 process_call_operands (new_tree);
3930 }
3931 else
3932 return exp;
3933 }
3934 break;
3935
3936 default:
3937 gcc_unreachable ();
3938 }
3939
3940 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3941
3942 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3943 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3944
3945 return new_tree;
3946 }
3947 \f
3948
3949 /* Subroutine of stabilize_reference; this is called for subtrees of
3950 references. Any expression with side-effects must be put in a SAVE_EXPR
3951 to ensure that it is only evaluated once.
3952
3953 We don't put SAVE_EXPR nodes around everything, because assigning very
3954 simple expressions to temporaries causes us to miss good opportunities
3955 for optimizations. Among other things, the opportunity to fold in the
3956 addition of a constant into an addressing mode often gets lost, e.g.
3957 "y[i+1] += x;". In general, we take the approach that we should not make
3958 an assignment unless we are forced into it - i.e., that any non-side effect
3959 operator should be allowed, and that cse should take care of coalescing
3960 multiple utterances of the same expression should that prove fruitful. */
3961
3962 static tree
3963 stabilize_reference_1 (tree e)
3964 {
3965 tree result;
3966 enum tree_code code = TREE_CODE (e);
3967
3968 /* We cannot ignore const expressions because it might be a reference
3969 to a const array but whose index contains side-effects. But we can
3970 ignore things that are actual constant or that already have been
3971 handled by this function. */
3972
3973 if (tree_invariant_p (e))
3974 return e;
3975
3976 switch (TREE_CODE_CLASS (code))
3977 {
3978 case tcc_exceptional:
3979 case tcc_type:
3980 case tcc_declaration:
3981 case tcc_comparison:
3982 case tcc_statement:
3983 case tcc_expression:
3984 case tcc_reference:
3985 case tcc_vl_exp:
3986 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3987 so that it will only be evaluated once. */
3988 /* The reference (r) and comparison (<) classes could be handled as
3989 below, but it is generally faster to only evaluate them once. */
3990 if (TREE_SIDE_EFFECTS (e))
3991 return save_expr (e);
3992 return e;
3993
3994 case tcc_constant:
3995 /* Constants need no processing. In fact, we should never reach
3996 here. */
3997 return e;
3998
3999 case tcc_binary:
4000 /* Division is slow and tends to be compiled with jumps,
4001 especially the division by powers of 2 that is often
4002 found inside of an array reference. So do it just once. */
4003 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4004 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4005 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4006 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4007 return save_expr (e);
4008 /* Recursively stabilize each operand. */
4009 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4010 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4011 break;
4012
4013 case tcc_unary:
4014 /* Recursively stabilize each operand. */
4015 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4016 break;
4017
4018 default:
4019 gcc_unreachable ();
4020 }
4021
4022 TREE_TYPE (result) = TREE_TYPE (e);
4023 TREE_READONLY (result) = TREE_READONLY (e);
4024 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4025 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4026
4027 return result;
4028 }
4029
4030 /* Stabilize a reference so that we can use it any number of times
4031 without causing its operands to be evaluated more than once.
4032 Returns the stabilized reference. This works by means of save_expr,
4033 so see the caveats in the comments about save_expr.
4034
4035 Also allows conversion expressions whose operands are references.
4036 Any other kind of expression is returned unchanged. */
4037
4038 tree
4039 stabilize_reference (tree ref)
4040 {
4041 tree result;
4042 enum tree_code code = TREE_CODE (ref);
4043
4044 switch (code)
4045 {
4046 case VAR_DECL:
4047 case PARM_DECL:
4048 case RESULT_DECL:
4049 /* No action is needed in this case. */
4050 return ref;
4051
4052 CASE_CONVERT:
4053 case FLOAT_EXPR:
4054 case FIX_TRUNC_EXPR:
4055 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4056 break;
4057
4058 case INDIRECT_REF:
4059 result = build_nt (INDIRECT_REF,
4060 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4061 break;
4062
4063 case COMPONENT_REF:
4064 result = build_nt (COMPONENT_REF,
4065 stabilize_reference (TREE_OPERAND (ref, 0)),
4066 TREE_OPERAND (ref, 1), NULL_TREE);
4067 break;
4068
4069 case BIT_FIELD_REF:
4070 result = build_nt (BIT_FIELD_REF,
4071 stabilize_reference (TREE_OPERAND (ref, 0)),
4072 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4073 break;
4074
4075 case ARRAY_REF:
4076 result = build_nt (ARRAY_REF,
4077 stabilize_reference (TREE_OPERAND (ref, 0)),
4078 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4079 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4080 break;
4081
4082 case ARRAY_RANGE_REF:
4083 result = build_nt (ARRAY_RANGE_REF,
4084 stabilize_reference (TREE_OPERAND (ref, 0)),
4085 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4086 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4087 break;
4088
4089 case COMPOUND_EXPR:
4090 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4091 it wouldn't be ignored. This matters when dealing with
4092 volatiles. */
4093 return stabilize_reference_1 (ref);
4094
4095 /* If arg isn't a kind of lvalue we recognize, make no change.
4096 Caller should recognize the error for an invalid lvalue. */
4097 default:
4098 return ref;
4099
4100 case ERROR_MARK:
4101 return error_mark_node;
4102 }
4103
4104 TREE_TYPE (result) = TREE_TYPE (ref);
4105 TREE_READONLY (result) = TREE_READONLY (ref);
4106 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4107 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4108
4109 return result;
4110 }
4111 \f
4112 /* Low-level constructors for expressions. */
4113
4114 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4115 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4116
4117 void
4118 recompute_tree_invariant_for_addr_expr (tree t)
4119 {
4120 tree node;
4121 bool tc = true, se = false;
4122
4123 /* We started out assuming this address is both invariant and constant, but
4124 does not have side effects. Now go down any handled components and see if
4125 any of them involve offsets that are either non-constant or non-invariant.
4126 Also check for side-effects.
4127
4128 ??? Note that this code makes no attempt to deal with the case where
4129 taking the address of something causes a copy due to misalignment. */
4130
4131 #define UPDATE_FLAGS(NODE) \
4132 do { tree _node = (NODE); \
4133 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4134 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4135
4136 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4137 node = TREE_OPERAND (node, 0))
4138 {
4139 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4140 array reference (probably made temporarily by the G++ front end),
4141 so ignore all the operands. */
4142 if ((TREE_CODE (node) == ARRAY_REF
4143 || TREE_CODE (node) == ARRAY_RANGE_REF)
4144 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4145 {
4146 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4147 if (TREE_OPERAND (node, 2))
4148 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4149 if (TREE_OPERAND (node, 3))
4150 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4151 }
4152 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4153 FIELD_DECL, apparently. The G++ front end can put something else
4154 there, at least temporarily. */
4155 else if (TREE_CODE (node) == COMPONENT_REF
4156 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4157 {
4158 if (TREE_OPERAND (node, 2))
4159 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4160 }
4161 }
4162
4163 node = lang_hooks.expr_to_decl (node, &tc, &se);
4164
4165 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4166 the address, since &(*a)->b is a form of addition. If it's a constant, the
4167 address is constant too. If it's a decl, its address is constant if the
4168 decl is static. Everything else is not constant and, furthermore,
4169 taking the address of a volatile variable is not volatile. */
4170 if (TREE_CODE (node) == INDIRECT_REF
4171 || TREE_CODE (node) == MEM_REF)
4172 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4173 else if (CONSTANT_CLASS_P (node))
4174 ;
4175 else if (DECL_P (node))
4176 tc &= (staticp (node) != NULL_TREE);
4177 else
4178 {
4179 tc = false;
4180 se |= TREE_SIDE_EFFECTS (node);
4181 }
4182
4183
4184 TREE_CONSTANT (t) = tc;
4185 TREE_SIDE_EFFECTS (t) = se;
4186 #undef UPDATE_FLAGS
4187 }
4188
4189 /* Build an expression of code CODE, data type TYPE, and operands as
4190 specified. Expressions and reference nodes can be created this way.
4191 Constants, decls, types and misc nodes cannot be.
4192
4193 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4194 enough for all extant tree codes. */
4195
4196 tree
4197 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4198 {
4199 tree t;
4200
4201 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4202
4203 t = make_node_stat (code PASS_MEM_STAT);
4204 TREE_TYPE (t) = tt;
4205
4206 return t;
4207 }
4208
4209 tree
4210 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4211 {
4212 int length = sizeof (struct tree_exp);
4213 tree t;
4214
4215 record_node_allocation_statistics (code, length);
4216
4217 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4218
4219 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4220
4221 memset (t, 0, sizeof (struct tree_common));
4222
4223 TREE_SET_CODE (t, code);
4224
4225 TREE_TYPE (t) = type;
4226 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4227 TREE_OPERAND (t, 0) = node;
4228 if (node && !TYPE_P (node))
4229 {
4230 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4231 TREE_READONLY (t) = TREE_READONLY (node);
4232 }
4233
4234 if (TREE_CODE_CLASS (code) == tcc_statement)
4235 TREE_SIDE_EFFECTS (t) = 1;
4236 else switch (code)
4237 {
4238 case VA_ARG_EXPR:
4239 /* All of these have side-effects, no matter what their
4240 operands are. */
4241 TREE_SIDE_EFFECTS (t) = 1;
4242 TREE_READONLY (t) = 0;
4243 break;
4244
4245 case INDIRECT_REF:
4246 /* Whether a dereference is readonly has nothing to do with whether
4247 its operand is readonly. */
4248 TREE_READONLY (t) = 0;
4249 break;
4250
4251 case ADDR_EXPR:
4252 if (node)
4253 recompute_tree_invariant_for_addr_expr (t);
4254 break;
4255
4256 default:
4257 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4258 && node && !TYPE_P (node)
4259 && TREE_CONSTANT (node))
4260 TREE_CONSTANT (t) = 1;
4261 if (TREE_CODE_CLASS (code) == tcc_reference
4262 && node && TREE_THIS_VOLATILE (node))
4263 TREE_THIS_VOLATILE (t) = 1;
4264 break;
4265 }
4266
4267 return t;
4268 }
4269
4270 #define PROCESS_ARG(N) \
4271 do { \
4272 TREE_OPERAND (t, N) = arg##N; \
4273 if (arg##N &&!TYPE_P (arg##N)) \
4274 { \
4275 if (TREE_SIDE_EFFECTS (arg##N)) \
4276 side_effects = 1; \
4277 if (!TREE_READONLY (arg##N) \
4278 && !CONSTANT_CLASS_P (arg##N)) \
4279 (void) (read_only = 0); \
4280 if (!TREE_CONSTANT (arg##N)) \
4281 (void) (constant = 0); \
4282 } \
4283 } while (0)
4284
4285 tree
4286 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4287 {
4288 bool constant, read_only, side_effects;
4289 tree t;
4290
4291 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4292
4293 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4294 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4295 /* When sizetype precision doesn't match that of pointers
4296 we need to be able to build explicit extensions or truncations
4297 of the offset argument. */
4298 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4299 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4300 && TREE_CODE (arg1) == INTEGER_CST);
4301
4302 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4303 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4304 && ptrofftype_p (TREE_TYPE (arg1)));
4305
4306 t = make_node_stat (code PASS_MEM_STAT);
4307 TREE_TYPE (t) = tt;
4308
4309 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4310 result based on those same flags for the arguments. But if the
4311 arguments aren't really even `tree' expressions, we shouldn't be trying
4312 to do this. */
4313
4314 /* Expressions without side effects may be constant if their
4315 arguments are as well. */
4316 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4317 || TREE_CODE_CLASS (code) == tcc_binary);
4318 read_only = 1;
4319 side_effects = TREE_SIDE_EFFECTS (t);
4320
4321 PROCESS_ARG (0);
4322 PROCESS_ARG (1);
4323
4324 TREE_READONLY (t) = read_only;
4325 TREE_CONSTANT (t) = constant;
4326 TREE_SIDE_EFFECTS (t) = side_effects;
4327 TREE_THIS_VOLATILE (t)
4328 = (TREE_CODE_CLASS (code) == tcc_reference
4329 && arg0 && TREE_THIS_VOLATILE (arg0));
4330
4331 return t;
4332 }
4333
4334
4335 tree
4336 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4337 tree arg2 MEM_STAT_DECL)
4338 {
4339 bool constant, read_only, side_effects;
4340 tree t;
4341
4342 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4343 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4344
4345 t = make_node_stat (code PASS_MEM_STAT);
4346 TREE_TYPE (t) = tt;
4347
4348 read_only = 1;
4349
4350 /* As a special exception, if COND_EXPR has NULL branches, we
4351 assume that it is a gimple statement and always consider
4352 it to have side effects. */
4353 if (code == COND_EXPR
4354 && tt == void_type_node
4355 && arg1 == NULL_TREE
4356 && arg2 == NULL_TREE)
4357 side_effects = true;
4358 else
4359 side_effects = TREE_SIDE_EFFECTS (t);
4360
4361 PROCESS_ARG (0);
4362 PROCESS_ARG (1);
4363 PROCESS_ARG (2);
4364
4365 if (code == COND_EXPR)
4366 TREE_READONLY (t) = read_only;
4367
4368 TREE_SIDE_EFFECTS (t) = side_effects;
4369 TREE_THIS_VOLATILE (t)
4370 = (TREE_CODE_CLASS (code) == tcc_reference
4371 && arg0 && TREE_THIS_VOLATILE (arg0));
4372
4373 return t;
4374 }
4375
4376 tree
4377 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4378 tree arg2, tree arg3 MEM_STAT_DECL)
4379 {
4380 bool constant, read_only, side_effects;
4381 tree t;
4382
4383 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4384
4385 t = make_node_stat (code PASS_MEM_STAT);
4386 TREE_TYPE (t) = tt;
4387
4388 side_effects = TREE_SIDE_EFFECTS (t);
4389
4390 PROCESS_ARG (0);
4391 PROCESS_ARG (1);
4392 PROCESS_ARG (2);
4393 PROCESS_ARG (3);
4394
4395 TREE_SIDE_EFFECTS (t) = side_effects;
4396 TREE_THIS_VOLATILE (t)
4397 = (TREE_CODE_CLASS (code) == tcc_reference
4398 && arg0 && TREE_THIS_VOLATILE (arg0));
4399
4400 return t;
4401 }
4402
4403 tree
4404 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4405 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4406 {
4407 bool constant, read_only, side_effects;
4408 tree t;
4409
4410 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4411
4412 t = make_node_stat (code PASS_MEM_STAT);
4413 TREE_TYPE (t) = tt;
4414
4415 side_effects = TREE_SIDE_EFFECTS (t);
4416
4417 PROCESS_ARG (0);
4418 PROCESS_ARG (1);
4419 PROCESS_ARG (2);
4420 PROCESS_ARG (3);
4421 PROCESS_ARG (4);
4422
4423 TREE_SIDE_EFFECTS (t) = side_effects;
4424 TREE_THIS_VOLATILE (t)
4425 = (TREE_CODE_CLASS (code) == tcc_reference
4426 && arg0 && TREE_THIS_VOLATILE (arg0));
4427
4428 return t;
4429 }
4430
4431 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4432 on the pointer PTR. */
4433
4434 tree
4435 build_simple_mem_ref_loc (location_t loc, tree ptr)
4436 {
4437 HOST_WIDE_INT offset = 0;
4438 tree ptype = TREE_TYPE (ptr);
4439 tree tem;
4440 /* For convenience allow addresses that collapse to a simple base
4441 and offset. */
4442 if (TREE_CODE (ptr) == ADDR_EXPR
4443 && (handled_component_p (TREE_OPERAND (ptr, 0))
4444 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4445 {
4446 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4447 gcc_assert (ptr);
4448 ptr = build_fold_addr_expr (ptr);
4449 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4450 }
4451 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4452 ptr, build_int_cst (ptype, offset));
4453 SET_EXPR_LOCATION (tem, loc);
4454 return tem;
4455 }
4456
4457 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4458
4459 offset_int
4460 mem_ref_offset (const_tree t)
4461 {
4462 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4463 }
4464
4465 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4466 offsetted by OFFSET units. */
4467
4468 tree
4469 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4470 {
4471 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4472 build_fold_addr_expr (base),
4473 build_int_cst (ptr_type_node, offset));
4474 tree addr = build1 (ADDR_EXPR, type, ref);
4475 recompute_tree_invariant_for_addr_expr (addr);
4476 return addr;
4477 }
4478
4479 /* Similar except don't specify the TREE_TYPE
4480 and leave the TREE_SIDE_EFFECTS as 0.
4481 It is permissible for arguments to be null,
4482 or even garbage if their values do not matter. */
4483
4484 tree
4485 build_nt (enum tree_code code, ...)
4486 {
4487 tree t;
4488 int length;
4489 int i;
4490 va_list p;
4491
4492 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4493
4494 va_start (p, code);
4495
4496 t = make_node (code);
4497 length = TREE_CODE_LENGTH (code);
4498
4499 for (i = 0; i < length; i++)
4500 TREE_OPERAND (t, i) = va_arg (p, tree);
4501
4502 va_end (p);
4503 return t;
4504 }
4505
4506 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4507 tree vec. */
4508
4509 tree
4510 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4511 {
4512 tree ret, t;
4513 unsigned int ix;
4514
4515 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4516 CALL_EXPR_FN (ret) = fn;
4517 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4518 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4519 CALL_EXPR_ARG (ret, ix) = t;
4520 return ret;
4521 }
4522 \f
4523 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4524 We do NOT enter this node in any sort of symbol table.
4525
4526 LOC is the location of the decl.
4527
4528 layout_decl is used to set up the decl's storage layout.
4529 Other slots are initialized to 0 or null pointers. */
4530
4531 tree
4532 build_decl_stat (location_t loc, enum tree_code code, tree name,
4533 tree type MEM_STAT_DECL)
4534 {
4535 tree t;
4536
4537 t = make_node_stat (code PASS_MEM_STAT);
4538 DECL_SOURCE_LOCATION (t) = loc;
4539
4540 /* if (type == error_mark_node)
4541 type = integer_type_node; */
4542 /* That is not done, deliberately, so that having error_mark_node
4543 as the type can suppress useless errors in the use of this variable. */
4544
4545 DECL_NAME (t) = name;
4546 TREE_TYPE (t) = type;
4547
4548 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4549 layout_decl (t, 0);
4550
4551 return t;
4552 }
4553
4554 /* Builds and returns function declaration with NAME and TYPE. */
4555
4556 tree
4557 build_fn_decl (const char *name, tree type)
4558 {
4559 tree id = get_identifier (name);
4560 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4561
4562 DECL_EXTERNAL (decl) = 1;
4563 TREE_PUBLIC (decl) = 1;
4564 DECL_ARTIFICIAL (decl) = 1;
4565 TREE_NOTHROW (decl) = 1;
4566
4567 return decl;
4568 }
4569
4570 vec<tree, va_gc> *all_translation_units;
4571
4572 /* Builds a new translation-unit decl with name NAME, queues it in the
4573 global list of translation-unit decls and returns it. */
4574
4575 tree
4576 build_translation_unit_decl (tree name)
4577 {
4578 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4579 name, NULL_TREE);
4580 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4581 vec_safe_push (all_translation_units, tu);
4582 return tu;
4583 }
4584
4585 \f
4586 /* BLOCK nodes are used to represent the structure of binding contours
4587 and declarations, once those contours have been exited and their contents
4588 compiled. This information is used for outputting debugging info. */
4589
4590 tree
4591 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4592 {
4593 tree block = make_node (BLOCK);
4594
4595 BLOCK_VARS (block) = vars;
4596 BLOCK_SUBBLOCKS (block) = subblocks;
4597 BLOCK_SUPERCONTEXT (block) = supercontext;
4598 BLOCK_CHAIN (block) = chain;
4599 return block;
4600 }
4601
4602 \f
4603 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4604
4605 LOC is the location to use in tree T. */
4606
4607 void
4608 protected_set_expr_location (tree t, location_t loc)
4609 {
4610 if (CAN_HAVE_LOCATION_P (t))
4611 SET_EXPR_LOCATION (t, loc);
4612 }
4613 \f
4614 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4615 is ATTRIBUTE. */
4616
4617 tree
4618 build_decl_attribute_variant (tree ddecl, tree attribute)
4619 {
4620 DECL_ATTRIBUTES (ddecl) = attribute;
4621 return ddecl;
4622 }
4623
4624 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4625 is ATTRIBUTE and its qualifiers are QUALS.
4626
4627 Record such modified types already made so we don't make duplicates. */
4628
4629 tree
4630 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4631 {
4632 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4633 {
4634 inchash::hash hstate;
4635 tree ntype;
4636 int i;
4637 tree t;
4638 enum tree_code code = TREE_CODE (ttype);
4639
4640 /* Building a distinct copy of a tagged type is inappropriate; it
4641 causes breakage in code that expects there to be a one-to-one
4642 relationship between a struct and its fields.
4643 build_duplicate_type is another solution (as used in
4644 handle_transparent_union_attribute), but that doesn't play well
4645 with the stronger C++ type identity model. */
4646 if (TREE_CODE (ttype) == RECORD_TYPE
4647 || TREE_CODE (ttype) == UNION_TYPE
4648 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4649 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4650 {
4651 warning (OPT_Wattributes,
4652 "ignoring attributes applied to %qT after definition",
4653 TYPE_MAIN_VARIANT (ttype));
4654 return build_qualified_type (ttype, quals);
4655 }
4656
4657 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4658 ntype = build_distinct_type_copy (ttype);
4659
4660 TYPE_ATTRIBUTES (ntype) = attribute;
4661
4662 hstate.add_int (code);
4663 if (TREE_TYPE (ntype))
4664 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4665 attribute_hash_list (attribute, hstate);
4666
4667 switch (TREE_CODE (ntype))
4668 {
4669 case FUNCTION_TYPE:
4670 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4671 break;
4672 case ARRAY_TYPE:
4673 if (TYPE_DOMAIN (ntype))
4674 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4675 break;
4676 case INTEGER_TYPE:
4677 t = TYPE_MAX_VALUE (ntype);
4678 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4679 hstate.add_object (TREE_INT_CST_ELT (t, i));
4680 break;
4681 case REAL_TYPE:
4682 case FIXED_POINT_TYPE:
4683 {
4684 unsigned int precision = TYPE_PRECISION (ntype);
4685 hstate.add_object (precision);
4686 }
4687 break;
4688 default:
4689 break;
4690 }
4691
4692 ntype = type_hash_canon (hstate.end(), ntype);
4693
4694 /* If the target-dependent attributes make NTYPE different from
4695 its canonical type, we will need to use structural equality
4696 checks for this type. */
4697 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4698 || !comp_type_attributes (ntype, ttype))
4699 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4700 else if (TYPE_CANONICAL (ntype) == ntype)
4701 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4702
4703 ttype = build_qualified_type (ntype, quals);
4704 }
4705 else if (TYPE_QUALS (ttype) != quals)
4706 ttype = build_qualified_type (ttype, quals);
4707
4708 return ttype;
4709 }
4710
4711 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4712 the same. */
4713
4714 static bool
4715 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4716 {
4717 tree cl1, cl2;
4718 for (cl1 = clauses1, cl2 = clauses2;
4719 cl1 && cl2;
4720 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4721 {
4722 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4723 return false;
4724 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4725 {
4726 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4727 OMP_CLAUSE_DECL (cl2)) != 1)
4728 return false;
4729 }
4730 switch (OMP_CLAUSE_CODE (cl1))
4731 {
4732 case OMP_CLAUSE_ALIGNED:
4733 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4734 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4735 return false;
4736 break;
4737 case OMP_CLAUSE_LINEAR:
4738 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4739 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4740 return false;
4741 break;
4742 case OMP_CLAUSE_SIMDLEN:
4743 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4744 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4745 return false;
4746 default:
4747 break;
4748 }
4749 }
4750 return true;
4751 }
4752
4753 /* Compare two constructor-element-type constants. Return 1 if the lists
4754 are known to be equal; otherwise return 0. */
4755
4756 static bool
4757 simple_cst_list_equal (const_tree l1, const_tree l2)
4758 {
4759 while (l1 != NULL_TREE && l2 != NULL_TREE)
4760 {
4761 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4762 return false;
4763
4764 l1 = TREE_CHAIN (l1);
4765 l2 = TREE_CHAIN (l2);
4766 }
4767
4768 return l1 == l2;
4769 }
4770
4771 /* Compare two attributes for their value identity. Return true if the
4772 attribute values are known to be equal; otherwise return false.
4773 */
4774
4775 static bool
4776 attribute_value_equal (const_tree attr1, const_tree attr2)
4777 {
4778 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4779 return true;
4780
4781 if (TREE_VALUE (attr1) != NULL_TREE
4782 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4783 && TREE_VALUE (attr2) != NULL
4784 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4785 return (simple_cst_list_equal (TREE_VALUE (attr1),
4786 TREE_VALUE (attr2)) == 1);
4787
4788 if ((flag_openmp || flag_openmp_simd)
4789 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4790 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4791 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4792 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4793 TREE_VALUE (attr2));
4794
4795 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4796 }
4797
4798 /* Return 0 if the attributes for two types are incompatible, 1 if they
4799 are compatible, and 2 if they are nearly compatible (which causes a
4800 warning to be generated). */
4801 int
4802 comp_type_attributes (const_tree type1, const_tree type2)
4803 {
4804 const_tree a1 = TYPE_ATTRIBUTES (type1);
4805 const_tree a2 = TYPE_ATTRIBUTES (type2);
4806 const_tree a;
4807
4808 if (a1 == a2)
4809 return 1;
4810 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4811 {
4812 const struct attribute_spec *as;
4813 const_tree attr;
4814
4815 as = lookup_attribute_spec (get_attribute_name (a));
4816 if (!as || as->affects_type_identity == false)
4817 continue;
4818
4819 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4820 if (!attr || !attribute_value_equal (a, attr))
4821 break;
4822 }
4823 if (!a)
4824 {
4825 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4826 {
4827 const struct attribute_spec *as;
4828
4829 as = lookup_attribute_spec (get_attribute_name (a));
4830 if (!as || as->affects_type_identity == false)
4831 continue;
4832
4833 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4834 break;
4835 /* We don't need to compare trees again, as we did this
4836 already in first loop. */
4837 }
4838 /* All types - affecting identity - are equal, so
4839 there is no need to call target hook for comparison. */
4840 if (!a)
4841 return 1;
4842 }
4843 /* As some type combinations - like default calling-convention - might
4844 be compatible, we have to call the target hook to get the final result. */
4845 return targetm.comp_type_attributes (type1, type2);
4846 }
4847
4848 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4849 is ATTRIBUTE.
4850
4851 Record such modified types already made so we don't make duplicates. */
4852
4853 tree
4854 build_type_attribute_variant (tree ttype, tree attribute)
4855 {
4856 return build_type_attribute_qual_variant (ttype, attribute,
4857 TYPE_QUALS (ttype));
4858 }
4859
4860
4861 /* Reset the expression *EXPR_P, a size or position.
4862
4863 ??? We could reset all non-constant sizes or positions. But it's cheap
4864 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4865
4866 We need to reset self-referential sizes or positions because they cannot
4867 be gimplified and thus can contain a CALL_EXPR after the gimplification
4868 is finished, which will run afoul of LTO streaming. And they need to be
4869 reset to something essentially dummy but not constant, so as to preserve
4870 the properties of the object they are attached to. */
4871
4872 static inline void
4873 free_lang_data_in_one_sizepos (tree *expr_p)
4874 {
4875 tree expr = *expr_p;
4876 if (CONTAINS_PLACEHOLDER_P (expr))
4877 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4878 }
4879
4880
4881 /* Reset all the fields in a binfo node BINFO. We only keep
4882 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4883
4884 static void
4885 free_lang_data_in_binfo (tree binfo)
4886 {
4887 unsigned i;
4888 tree t;
4889
4890 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4891
4892 BINFO_VIRTUALS (binfo) = NULL_TREE;
4893 BINFO_BASE_ACCESSES (binfo) = NULL;
4894 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4895 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4896
4897 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4898 free_lang_data_in_binfo (t);
4899 }
4900
4901
4902 /* Reset all language specific information still present in TYPE. */
4903
4904 static void
4905 free_lang_data_in_type (tree type)
4906 {
4907 gcc_assert (TYPE_P (type));
4908
4909 /* Give the FE a chance to remove its own data first. */
4910 lang_hooks.free_lang_data (type);
4911
4912 TREE_LANG_FLAG_0 (type) = 0;
4913 TREE_LANG_FLAG_1 (type) = 0;
4914 TREE_LANG_FLAG_2 (type) = 0;
4915 TREE_LANG_FLAG_3 (type) = 0;
4916 TREE_LANG_FLAG_4 (type) = 0;
4917 TREE_LANG_FLAG_5 (type) = 0;
4918 TREE_LANG_FLAG_6 (type) = 0;
4919
4920 if (TREE_CODE (type) == FUNCTION_TYPE)
4921 {
4922 /* Remove the const and volatile qualifiers from arguments. The
4923 C++ front end removes them, but the C front end does not,
4924 leading to false ODR violation errors when merging two
4925 instances of the same function signature compiled by
4926 different front ends. */
4927 tree p;
4928
4929 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4930 {
4931 tree arg_type = TREE_VALUE (p);
4932
4933 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4934 {
4935 int quals = TYPE_QUALS (arg_type)
4936 & ~TYPE_QUAL_CONST
4937 & ~TYPE_QUAL_VOLATILE;
4938 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4939 free_lang_data_in_type (TREE_VALUE (p));
4940 }
4941 }
4942 }
4943
4944 /* Remove members that are not actually FIELD_DECLs from the field
4945 list of an aggregate. These occur in C++. */
4946 if (RECORD_OR_UNION_TYPE_P (type))
4947 {
4948 tree prev, member;
4949
4950 /* Note that TYPE_FIELDS can be shared across distinct
4951 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4952 to be removed, we cannot set its TREE_CHAIN to NULL.
4953 Otherwise, we would not be able to find all the other fields
4954 in the other instances of this TREE_TYPE.
4955
4956 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4957 prev = NULL_TREE;
4958 member = TYPE_FIELDS (type);
4959 while (member)
4960 {
4961 if (TREE_CODE (member) == FIELD_DECL
4962 || TREE_CODE (member) == TYPE_DECL)
4963 {
4964 if (prev)
4965 TREE_CHAIN (prev) = member;
4966 else
4967 TYPE_FIELDS (type) = member;
4968 prev = member;
4969 }
4970
4971 member = TREE_CHAIN (member);
4972 }
4973
4974 if (prev)
4975 TREE_CHAIN (prev) = NULL_TREE;
4976 else
4977 TYPE_FIELDS (type) = NULL_TREE;
4978
4979 TYPE_METHODS (type) = NULL_TREE;
4980 if (TYPE_BINFO (type))
4981 {
4982 free_lang_data_in_binfo (TYPE_BINFO (type));
4983 if ((!BINFO_VTABLE (TYPE_BINFO (type))
4984 || !flag_devirtualize)
4985 && (!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
4986 || debug_info_level != DINFO_LEVEL_NONE))
4987 TYPE_BINFO (type) = NULL;
4988 }
4989 }
4990 else
4991 {
4992 /* For non-aggregate types, clear out the language slot (which
4993 overloads TYPE_BINFO). */
4994 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4995
4996 if (INTEGRAL_TYPE_P (type)
4997 || SCALAR_FLOAT_TYPE_P (type)
4998 || FIXED_POINT_TYPE_P (type))
4999 {
5000 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5001 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5002 }
5003 }
5004
5005 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5006 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5007
5008 if (TYPE_CONTEXT (type)
5009 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5010 {
5011 tree ctx = TYPE_CONTEXT (type);
5012 do
5013 {
5014 ctx = BLOCK_SUPERCONTEXT (ctx);
5015 }
5016 while (ctx && TREE_CODE (ctx) == BLOCK);
5017 TYPE_CONTEXT (type) = ctx;
5018 }
5019 }
5020
5021
5022 /* Return true if DECL may need an assembler name to be set. */
5023
5024 static inline bool
5025 need_assembler_name_p (tree decl)
5026 {
5027 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule
5028 merging. */
5029 if (flag_lto_odr_type_mering
5030 && TREE_CODE (decl) == TYPE_DECL
5031 && DECL_NAME (decl)
5032 && decl == TYPE_NAME (TREE_TYPE (decl))
5033 && !is_lang_specific (TREE_TYPE (decl))
5034 && AGGREGATE_TYPE_P (TREE_TYPE (decl))
5035 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)
5036 && !type_in_anonymous_namespace_p (TREE_TYPE (decl)))
5037 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5038 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5039 if (TREE_CODE (decl) != FUNCTION_DECL
5040 && TREE_CODE (decl) != VAR_DECL)
5041 return false;
5042
5043 /* If DECL already has its assembler name set, it does not need a
5044 new one. */
5045 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5046 || DECL_ASSEMBLER_NAME_SET_P (decl))
5047 return false;
5048
5049 /* Abstract decls do not need an assembler name. */
5050 if (DECL_ABSTRACT_P (decl))
5051 return false;
5052
5053 /* For VAR_DECLs, only static, public and external symbols need an
5054 assembler name. */
5055 if (TREE_CODE (decl) == VAR_DECL
5056 && !TREE_STATIC (decl)
5057 && !TREE_PUBLIC (decl)
5058 && !DECL_EXTERNAL (decl))
5059 return false;
5060
5061 if (TREE_CODE (decl) == FUNCTION_DECL)
5062 {
5063 /* Do not set assembler name on builtins. Allow RTL expansion to
5064 decide whether to expand inline or via a regular call. */
5065 if (DECL_BUILT_IN (decl)
5066 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5067 return false;
5068
5069 /* Functions represented in the callgraph need an assembler name. */
5070 if (cgraph_node::get (decl) != NULL)
5071 return true;
5072
5073 /* Unused and not public functions don't need an assembler name. */
5074 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5075 return false;
5076 }
5077
5078 return true;
5079 }
5080
5081
5082 /* Reset all language specific information still present in symbol
5083 DECL. */
5084
5085 static void
5086 free_lang_data_in_decl (tree decl)
5087 {
5088 gcc_assert (DECL_P (decl));
5089
5090 /* Give the FE a chance to remove its own data first. */
5091 lang_hooks.free_lang_data (decl);
5092
5093 TREE_LANG_FLAG_0 (decl) = 0;
5094 TREE_LANG_FLAG_1 (decl) = 0;
5095 TREE_LANG_FLAG_2 (decl) = 0;
5096 TREE_LANG_FLAG_3 (decl) = 0;
5097 TREE_LANG_FLAG_4 (decl) = 0;
5098 TREE_LANG_FLAG_5 (decl) = 0;
5099 TREE_LANG_FLAG_6 (decl) = 0;
5100
5101 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5102 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5103 if (TREE_CODE (decl) == FIELD_DECL)
5104 {
5105 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5106 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5107 DECL_QUALIFIER (decl) = NULL_TREE;
5108 }
5109
5110 if (TREE_CODE (decl) == FUNCTION_DECL)
5111 {
5112 struct cgraph_node *node;
5113 if (!(node = cgraph_node::get (decl))
5114 || (!node->definition && !node->clones))
5115 {
5116 if (node)
5117 node->release_body ();
5118 else
5119 {
5120 release_function_body (decl);
5121 DECL_ARGUMENTS (decl) = NULL;
5122 DECL_RESULT (decl) = NULL;
5123 DECL_INITIAL (decl) = error_mark_node;
5124 }
5125 }
5126 if (gimple_has_body_p (decl))
5127 {
5128 tree t;
5129
5130 /* If DECL has a gimple body, then the context for its
5131 arguments must be DECL. Otherwise, it doesn't really
5132 matter, as we will not be emitting any code for DECL. In
5133 general, there may be other instances of DECL created by
5134 the front end and since PARM_DECLs are generally shared,
5135 their DECL_CONTEXT changes as the replicas of DECL are
5136 created. The only time where DECL_CONTEXT is important
5137 is for the FUNCTION_DECLs that have a gimple body (since
5138 the PARM_DECL will be used in the function's body). */
5139 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5140 DECL_CONTEXT (t) = decl;
5141 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5142 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5143 = target_option_default_node;
5144 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5145 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5146 = optimization_default_node;
5147 }
5148
5149 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5150 At this point, it is not needed anymore. */
5151 DECL_SAVED_TREE (decl) = NULL_TREE;
5152
5153 /* Clear the abstract origin if it refers to a method. Otherwise
5154 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5155 origin will not be output correctly. */
5156 if (DECL_ABSTRACT_ORIGIN (decl)
5157 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5158 && RECORD_OR_UNION_TYPE_P
5159 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5160 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5161
5162 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5163 DECL_VINDEX referring to itself into a vtable slot number as it
5164 should. Happens with functions that are copied and then forgotten
5165 about. Just clear it, it won't matter anymore. */
5166 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5167 DECL_VINDEX (decl) = NULL_TREE;
5168 }
5169 else if (TREE_CODE (decl) == VAR_DECL)
5170 {
5171 if ((DECL_EXTERNAL (decl)
5172 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5173 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5174 DECL_INITIAL (decl) = NULL_TREE;
5175 }
5176 else if (TREE_CODE (decl) == TYPE_DECL
5177 || TREE_CODE (decl) == FIELD_DECL)
5178 DECL_INITIAL (decl) = NULL_TREE;
5179 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5180 && DECL_INITIAL (decl)
5181 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5182 {
5183 /* Strip builtins from the translation-unit BLOCK. We still have targets
5184 without builtin_decl_explicit support and also builtins are shared
5185 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5186 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5187 while (*nextp)
5188 {
5189 tree var = *nextp;
5190 if (TREE_CODE (var) == FUNCTION_DECL
5191 && DECL_BUILT_IN (var))
5192 *nextp = TREE_CHAIN (var);
5193 else
5194 nextp = &TREE_CHAIN (var);
5195 }
5196 }
5197 }
5198
5199
5200 /* Data used when collecting DECLs and TYPEs for language data removal. */
5201
5202 struct free_lang_data_d
5203 {
5204 /* Worklist to avoid excessive recursion. */
5205 vec<tree> worklist;
5206
5207 /* Set of traversed objects. Used to avoid duplicate visits. */
5208 hash_set<tree> *pset;
5209
5210 /* Array of symbols to process with free_lang_data_in_decl. */
5211 vec<tree> decls;
5212
5213 /* Array of types to process with free_lang_data_in_type. */
5214 vec<tree> types;
5215 };
5216
5217
5218 /* Save all language fields needed to generate proper debug information
5219 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5220
5221 static void
5222 save_debug_info_for_decl (tree t)
5223 {
5224 /*struct saved_debug_info_d *sdi;*/
5225
5226 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5227
5228 /* FIXME. Partial implementation for saving debug info removed. */
5229 }
5230
5231
5232 /* Save all language fields needed to generate proper debug information
5233 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5234
5235 static void
5236 save_debug_info_for_type (tree t)
5237 {
5238 /*struct saved_debug_info_d *sdi;*/
5239
5240 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5241
5242 /* FIXME. Partial implementation for saving debug info removed. */
5243 }
5244
5245
5246 /* Add type or decl T to one of the list of tree nodes that need their
5247 language data removed. The lists are held inside FLD. */
5248
5249 static void
5250 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5251 {
5252 if (DECL_P (t))
5253 {
5254 fld->decls.safe_push (t);
5255 if (debug_info_level > DINFO_LEVEL_TERSE)
5256 save_debug_info_for_decl (t);
5257 }
5258 else if (TYPE_P (t))
5259 {
5260 fld->types.safe_push (t);
5261 if (debug_info_level > DINFO_LEVEL_TERSE)
5262 save_debug_info_for_type (t);
5263 }
5264 else
5265 gcc_unreachable ();
5266 }
5267
5268 /* Push tree node T into FLD->WORKLIST. */
5269
5270 static inline void
5271 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5272 {
5273 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5274 fld->worklist.safe_push ((t));
5275 }
5276
5277
5278 /* Operand callback helper for free_lang_data_in_node. *TP is the
5279 subtree operand being considered. */
5280
5281 static tree
5282 find_decls_types_r (tree *tp, int *ws, void *data)
5283 {
5284 tree t = *tp;
5285 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5286
5287 if (TREE_CODE (t) == TREE_LIST)
5288 return NULL_TREE;
5289
5290 /* Language specific nodes will be removed, so there is no need
5291 to gather anything under them. */
5292 if (is_lang_specific (t))
5293 {
5294 *ws = 0;
5295 return NULL_TREE;
5296 }
5297
5298 if (DECL_P (t))
5299 {
5300 /* Note that walk_tree does not traverse every possible field in
5301 decls, so we have to do our own traversals here. */
5302 add_tree_to_fld_list (t, fld);
5303
5304 fld_worklist_push (DECL_NAME (t), fld);
5305 fld_worklist_push (DECL_CONTEXT (t), fld);
5306 fld_worklist_push (DECL_SIZE (t), fld);
5307 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5308
5309 /* We are going to remove everything under DECL_INITIAL for
5310 TYPE_DECLs. No point walking them. */
5311 if (TREE_CODE (t) != TYPE_DECL)
5312 fld_worklist_push (DECL_INITIAL (t), fld);
5313
5314 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5315 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5316
5317 if (TREE_CODE (t) == FUNCTION_DECL)
5318 {
5319 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5320 fld_worklist_push (DECL_RESULT (t), fld);
5321 }
5322 else if (TREE_CODE (t) == TYPE_DECL)
5323 {
5324 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5325 }
5326 else if (TREE_CODE (t) == FIELD_DECL)
5327 {
5328 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5329 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5330 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5331 fld_worklist_push (DECL_FCONTEXT (t), fld);
5332 }
5333
5334 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5335 && DECL_HAS_VALUE_EXPR_P (t))
5336 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5337
5338 if (TREE_CODE (t) != FIELD_DECL
5339 && TREE_CODE (t) != TYPE_DECL)
5340 fld_worklist_push (TREE_CHAIN (t), fld);
5341 *ws = 0;
5342 }
5343 else if (TYPE_P (t))
5344 {
5345 /* Note that walk_tree does not traverse every possible field in
5346 types, so we have to do our own traversals here. */
5347 add_tree_to_fld_list (t, fld);
5348
5349 if (!RECORD_OR_UNION_TYPE_P (t))
5350 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5351 fld_worklist_push (TYPE_SIZE (t), fld);
5352 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5353 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5354 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5355 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5356 fld_worklist_push (TYPE_NAME (t), fld);
5357 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5358 them and thus do not and want not to reach unused pointer types
5359 this way. */
5360 if (!POINTER_TYPE_P (t))
5361 fld_worklist_push (TYPE_MINVAL (t), fld);
5362 if (!RECORD_OR_UNION_TYPE_P (t))
5363 fld_worklist_push (TYPE_MAXVAL (t), fld);
5364 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5365 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5366 do not and want not to reach unused variants this way. */
5367 if (TYPE_CONTEXT (t))
5368 {
5369 tree ctx = TYPE_CONTEXT (t);
5370 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5371 So push that instead. */
5372 while (ctx && TREE_CODE (ctx) == BLOCK)
5373 ctx = BLOCK_SUPERCONTEXT (ctx);
5374 fld_worklist_push (ctx, fld);
5375 }
5376 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5377 and want not to reach unused types this way. */
5378
5379 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5380 {
5381 unsigned i;
5382 tree tem;
5383 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5384 fld_worklist_push (TREE_TYPE (tem), fld);
5385 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5386 if (tem
5387 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5388 && TREE_CODE (tem) == TREE_LIST)
5389 do
5390 {
5391 fld_worklist_push (TREE_VALUE (tem), fld);
5392 tem = TREE_CHAIN (tem);
5393 }
5394 while (tem);
5395 }
5396 if (RECORD_OR_UNION_TYPE_P (t))
5397 {
5398 tree tem;
5399 /* Push all TYPE_FIELDS - there can be interleaving interesting
5400 and non-interesting things. */
5401 tem = TYPE_FIELDS (t);
5402 while (tem)
5403 {
5404 if (TREE_CODE (tem) == FIELD_DECL
5405 || TREE_CODE (tem) == TYPE_DECL)
5406 fld_worklist_push (tem, fld);
5407 tem = TREE_CHAIN (tem);
5408 }
5409 }
5410
5411 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5412 *ws = 0;
5413 }
5414 else if (TREE_CODE (t) == BLOCK)
5415 {
5416 tree tem;
5417 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5418 fld_worklist_push (tem, fld);
5419 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5420 fld_worklist_push (tem, fld);
5421 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5422 }
5423
5424 if (TREE_CODE (t) != IDENTIFIER_NODE
5425 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5426 fld_worklist_push (TREE_TYPE (t), fld);
5427
5428 return NULL_TREE;
5429 }
5430
5431
5432 /* Find decls and types in T. */
5433
5434 static void
5435 find_decls_types (tree t, struct free_lang_data_d *fld)
5436 {
5437 while (1)
5438 {
5439 if (!fld->pset->contains (t))
5440 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5441 if (fld->worklist.is_empty ())
5442 break;
5443 t = fld->worklist.pop ();
5444 }
5445 }
5446
5447 /* Translate all the types in LIST with the corresponding runtime
5448 types. */
5449
5450 static tree
5451 get_eh_types_for_runtime (tree list)
5452 {
5453 tree head, prev;
5454
5455 if (list == NULL_TREE)
5456 return NULL_TREE;
5457
5458 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5459 prev = head;
5460 list = TREE_CHAIN (list);
5461 while (list)
5462 {
5463 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5464 TREE_CHAIN (prev) = n;
5465 prev = TREE_CHAIN (prev);
5466 list = TREE_CHAIN (list);
5467 }
5468
5469 return head;
5470 }
5471
5472
5473 /* Find decls and types referenced in EH region R and store them in
5474 FLD->DECLS and FLD->TYPES. */
5475
5476 static void
5477 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5478 {
5479 switch (r->type)
5480 {
5481 case ERT_CLEANUP:
5482 break;
5483
5484 case ERT_TRY:
5485 {
5486 eh_catch c;
5487
5488 /* The types referenced in each catch must first be changed to the
5489 EH types used at runtime. This removes references to FE types
5490 in the region. */
5491 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5492 {
5493 c->type_list = get_eh_types_for_runtime (c->type_list);
5494 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5495 }
5496 }
5497 break;
5498
5499 case ERT_ALLOWED_EXCEPTIONS:
5500 r->u.allowed.type_list
5501 = get_eh_types_for_runtime (r->u.allowed.type_list);
5502 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5503 break;
5504
5505 case ERT_MUST_NOT_THROW:
5506 walk_tree (&r->u.must_not_throw.failure_decl,
5507 find_decls_types_r, fld, fld->pset);
5508 break;
5509 }
5510 }
5511
5512
5513 /* Find decls and types referenced in cgraph node N and store them in
5514 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5515 look for *every* kind of DECL and TYPE node reachable from N,
5516 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5517 NAMESPACE_DECLs, etc). */
5518
5519 static void
5520 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5521 {
5522 basic_block bb;
5523 struct function *fn;
5524 unsigned ix;
5525 tree t;
5526
5527 find_decls_types (n->decl, fld);
5528
5529 if (!gimple_has_body_p (n->decl))
5530 return;
5531
5532 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5533
5534 fn = DECL_STRUCT_FUNCTION (n->decl);
5535
5536 /* Traverse locals. */
5537 FOR_EACH_LOCAL_DECL (fn, ix, t)
5538 find_decls_types (t, fld);
5539
5540 /* Traverse EH regions in FN. */
5541 {
5542 eh_region r;
5543 FOR_ALL_EH_REGION_FN (r, fn)
5544 find_decls_types_in_eh_region (r, fld);
5545 }
5546
5547 /* Traverse every statement in FN. */
5548 FOR_EACH_BB_FN (bb, fn)
5549 {
5550 gphi_iterator psi;
5551 gimple_stmt_iterator si;
5552 unsigned i;
5553
5554 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5555 {
5556 gphi *phi = psi.phi ();
5557
5558 for (i = 0; i < gimple_phi_num_args (phi); i++)
5559 {
5560 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5561 find_decls_types (*arg_p, fld);
5562 }
5563 }
5564
5565 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5566 {
5567 gimple stmt = gsi_stmt (si);
5568
5569 if (is_gimple_call (stmt))
5570 find_decls_types (gimple_call_fntype (stmt), fld);
5571
5572 for (i = 0; i < gimple_num_ops (stmt); i++)
5573 {
5574 tree arg = gimple_op (stmt, i);
5575 find_decls_types (arg, fld);
5576 }
5577 }
5578 }
5579 }
5580
5581
5582 /* Find decls and types referenced in varpool node N and store them in
5583 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5584 look for *every* kind of DECL and TYPE node reachable from N,
5585 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5586 NAMESPACE_DECLs, etc). */
5587
5588 static void
5589 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5590 {
5591 find_decls_types (v->decl, fld);
5592 }
5593
5594 /* If T needs an assembler name, have one created for it. */
5595
5596 void
5597 assign_assembler_name_if_neeeded (tree t)
5598 {
5599 if (need_assembler_name_p (t))
5600 {
5601 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5602 diagnostics that use input_location to show locus
5603 information. The problem here is that, at this point,
5604 input_location is generally anchored to the end of the file
5605 (since the parser is long gone), so we don't have a good
5606 position to pin it to.
5607
5608 To alleviate this problem, this uses the location of T's
5609 declaration. Examples of this are
5610 testsuite/g++.dg/template/cond2.C and
5611 testsuite/g++.dg/template/pr35240.C. */
5612 location_t saved_location = input_location;
5613 input_location = DECL_SOURCE_LOCATION (t);
5614
5615 decl_assembler_name (t);
5616
5617 input_location = saved_location;
5618 }
5619 }
5620
5621
5622 /* Free language specific information for every operand and expression
5623 in every node of the call graph. This process operates in three stages:
5624
5625 1- Every callgraph node and varpool node is traversed looking for
5626 decls and types embedded in them. This is a more exhaustive
5627 search than that done by find_referenced_vars, because it will
5628 also collect individual fields, decls embedded in types, etc.
5629
5630 2- All the decls found are sent to free_lang_data_in_decl.
5631
5632 3- All the types found are sent to free_lang_data_in_type.
5633
5634 The ordering between decls and types is important because
5635 free_lang_data_in_decl sets assembler names, which includes
5636 mangling. So types cannot be freed up until assembler names have
5637 been set up. */
5638
5639 static void
5640 free_lang_data_in_cgraph (void)
5641 {
5642 struct cgraph_node *n;
5643 varpool_node *v;
5644 struct free_lang_data_d fld;
5645 tree t;
5646 unsigned i;
5647 alias_pair *p;
5648
5649 /* Initialize sets and arrays to store referenced decls and types. */
5650 fld.pset = new hash_set<tree>;
5651 fld.worklist.create (0);
5652 fld.decls.create (100);
5653 fld.types.create (100);
5654
5655 /* Find decls and types in the body of every function in the callgraph. */
5656 FOR_EACH_FUNCTION (n)
5657 find_decls_types_in_node (n, &fld);
5658
5659 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5660 find_decls_types (p->decl, &fld);
5661
5662 /* Find decls and types in every varpool symbol. */
5663 FOR_EACH_VARIABLE (v)
5664 find_decls_types_in_var (v, &fld);
5665
5666 /* Set the assembler name on every decl found. We need to do this
5667 now because free_lang_data_in_decl will invalidate data needed
5668 for mangling. This breaks mangling on interdependent decls. */
5669 FOR_EACH_VEC_ELT (fld.decls, i, t)
5670 assign_assembler_name_if_neeeded (t);
5671
5672 /* Traverse every decl found freeing its language data. */
5673 FOR_EACH_VEC_ELT (fld.decls, i, t)
5674 free_lang_data_in_decl (t);
5675
5676 /* Traverse every type found freeing its language data. */
5677 FOR_EACH_VEC_ELT (fld.types, i, t)
5678 free_lang_data_in_type (t);
5679
5680 delete fld.pset;
5681 fld.worklist.release ();
5682 fld.decls.release ();
5683 fld.types.release ();
5684 }
5685
5686
5687 /* Free resources that are used by FE but are not needed once they are done. */
5688
5689 static unsigned
5690 free_lang_data (void)
5691 {
5692 unsigned i;
5693
5694 /* If we are the LTO frontend we have freed lang-specific data already. */
5695 if (in_lto_p
5696 || !flag_generate_lto)
5697 return 0;
5698
5699 /* Allocate and assign alias sets to the standard integer types
5700 while the slots are still in the way the frontends generated them. */
5701 for (i = 0; i < itk_none; ++i)
5702 if (integer_types[i])
5703 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5704
5705 /* Traverse the IL resetting language specific information for
5706 operands, expressions, etc. */
5707 free_lang_data_in_cgraph ();
5708
5709 /* Create gimple variants for common types. */
5710 ptrdiff_type_node = integer_type_node;
5711 fileptr_type_node = ptr_type_node;
5712
5713 /* Reset some langhooks. Do not reset types_compatible_p, it may
5714 still be used indirectly via the get_alias_set langhook. */
5715 lang_hooks.dwarf_name = lhd_dwarf_name;
5716 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5717 /* We do not want the default decl_assembler_name implementation,
5718 rather if we have fixed everything we want a wrapper around it
5719 asserting that all non-local symbols already got their assembler
5720 name and only produce assembler names for local symbols. Or rather
5721 make sure we never call decl_assembler_name on local symbols and
5722 devise a separate, middle-end private scheme for it. */
5723
5724 /* Reset diagnostic machinery. */
5725 tree_diagnostics_defaults (global_dc);
5726
5727 return 0;
5728 }
5729
5730
5731 namespace {
5732
5733 const pass_data pass_data_ipa_free_lang_data =
5734 {
5735 SIMPLE_IPA_PASS, /* type */
5736 "*free_lang_data", /* name */
5737 OPTGROUP_NONE, /* optinfo_flags */
5738 TV_IPA_FREE_LANG_DATA, /* tv_id */
5739 0, /* properties_required */
5740 0, /* properties_provided */
5741 0, /* properties_destroyed */
5742 0, /* todo_flags_start */
5743 0, /* todo_flags_finish */
5744 };
5745
5746 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5747 {
5748 public:
5749 pass_ipa_free_lang_data (gcc::context *ctxt)
5750 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5751 {}
5752
5753 /* opt_pass methods: */
5754 virtual unsigned int execute (function *) { return free_lang_data (); }
5755
5756 }; // class pass_ipa_free_lang_data
5757
5758 } // anon namespace
5759
5760 simple_ipa_opt_pass *
5761 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5762 {
5763 return new pass_ipa_free_lang_data (ctxt);
5764 }
5765
5766 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5767 ATTR_NAME. Also used internally by remove_attribute(). */
5768 bool
5769 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5770 {
5771 size_t ident_len = IDENTIFIER_LENGTH (ident);
5772
5773 if (ident_len == attr_len)
5774 {
5775 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5776 return true;
5777 }
5778 else if (ident_len == attr_len + 4)
5779 {
5780 /* There is the possibility that ATTR is 'text' and IDENT is
5781 '__text__'. */
5782 const char *p = IDENTIFIER_POINTER (ident);
5783 if (p[0] == '_' && p[1] == '_'
5784 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5785 && strncmp (attr_name, p + 2, attr_len) == 0)
5786 return true;
5787 }
5788
5789 return false;
5790 }
5791
5792 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5793 of ATTR_NAME, and LIST is not NULL_TREE. */
5794 tree
5795 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5796 {
5797 while (list)
5798 {
5799 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5800
5801 if (ident_len == attr_len)
5802 {
5803 if (!strcmp (attr_name,
5804 IDENTIFIER_POINTER (get_attribute_name (list))))
5805 break;
5806 }
5807 /* TODO: If we made sure that attributes were stored in the
5808 canonical form without '__...__' (ie, as in 'text' as opposed
5809 to '__text__') then we could avoid the following case. */
5810 else if (ident_len == attr_len + 4)
5811 {
5812 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5813 if (p[0] == '_' && p[1] == '_'
5814 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5815 && strncmp (attr_name, p + 2, attr_len) == 0)
5816 break;
5817 }
5818 list = TREE_CHAIN (list);
5819 }
5820
5821 return list;
5822 }
5823
5824 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5825 return a pointer to the attribute's list first element if the attribute
5826 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5827 '__text__'). */
5828
5829 tree
5830 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5831 tree list)
5832 {
5833 while (list)
5834 {
5835 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5836
5837 if (attr_len > ident_len)
5838 {
5839 list = TREE_CHAIN (list);
5840 continue;
5841 }
5842
5843 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5844
5845 if (strncmp (attr_name, p, attr_len) == 0)
5846 break;
5847
5848 /* TODO: If we made sure that attributes were stored in the
5849 canonical form without '__...__' (ie, as in 'text' as opposed
5850 to '__text__') then we could avoid the following case. */
5851 if (p[0] == '_' && p[1] == '_' &&
5852 strncmp (attr_name, p + 2, attr_len) == 0)
5853 break;
5854
5855 list = TREE_CHAIN (list);
5856 }
5857
5858 return list;
5859 }
5860
5861
5862 /* A variant of lookup_attribute() that can be used with an identifier
5863 as the first argument, and where the identifier can be either
5864 'text' or '__text__'.
5865
5866 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5867 return a pointer to the attribute's list element if the attribute
5868 is part of the list, or NULL_TREE if not found. If the attribute
5869 appears more than once, this only returns the first occurrence; the
5870 TREE_CHAIN of the return value should be passed back in if further
5871 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5872 can be in the form 'text' or '__text__'. */
5873 static tree
5874 lookup_ident_attribute (tree attr_identifier, tree list)
5875 {
5876 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5877
5878 while (list)
5879 {
5880 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5881 == IDENTIFIER_NODE);
5882
5883 /* Identifiers can be compared directly for equality. */
5884 if (attr_identifier == get_attribute_name (list))
5885 break;
5886
5887 /* If they are not equal, they may still be one in the form
5888 'text' while the other one is in the form '__text__'. TODO:
5889 If we were storing attributes in normalized 'text' form, then
5890 this could all go away and we could take full advantage of
5891 the fact that we're comparing identifiers. :-) */
5892 {
5893 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5894 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5895
5896 if (ident_len == attr_len + 4)
5897 {
5898 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5899 const char *q = IDENTIFIER_POINTER (attr_identifier);
5900 if (p[0] == '_' && p[1] == '_'
5901 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5902 && strncmp (q, p + 2, attr_len) == 0)
5903 break;
5904 }
5905 else if (ident_len + 4 == attr_len)
5906 {
5907 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5908 const char *q = IDENTIFIER_POINTER (attr_identifier);
5909 if (q[0] == '_' && q[1] == '_'
5910 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5911 && strncmp (q + 2, p, ident_len) == 0)
5912 break;
5913 }
5914 }
5915 list = TREE_CHAIN (list);
5916 }
5917
5918 return list;
5919 }
5920
5921 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5922 modified list. */
5923
5924 tree
5925 remove_attribute (const char *attr_name, tree list)
5926 {
5927 tree *p;
5928 size_t attr_len = strlen (attr_name);
5929
5930 gcc_checking_assert (attr_name[0] != '_');
5931
5932 for (p = &list; *p; )
5933 {
5934 tree l = *p;
5935 /* TODO: If we were storing attributes in normalized form, here
5936 we could use a simple strcmp(). */
5937 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5938 *p = TREE_CHAIN (l);
5939 else
5940 p = &TREE_CHAIN (l);
5941 }
5942
5943 return list;
5944 }
5945
5946 /* Return an attribute list that is the union of a1 and a2. */
5947
5948 tree
5949 merge_attributes (tree a1, tree a2)
5950 {
5951 tree attributes;
5952
5953 /* Either one unset? Take the set one. */
5954
5955 if ((attributes = a1) == 0)
5956 attributes = a2;
5957
5958 /* One that completely contains the other? Take it. */
5959
5960 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5961 {
5962 if (attribute_list_contained (a2, a1))
5963 attributes = a2;
5964 else
5965 {
5966 /* Pick the longest list, and hang on the other list. */
5967
5968 if (list_length (a1) < list_length (a2))
5969 attributes = a2, a2 = a1;
5970
5971 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5972 {
5973 tree a;
5974 for (a = lookup_ident_attribute (get_attribute_name (a2),
5975 attributes);
5976 a != NULL_TREE && !attribute_value_equal (a, a2);
5977 a = lookup_ident_attribute (get_attribute_name (a2),
5978 TREE_CHAIN (a)))
5979 ;
5980 if (a == NULL_TREE)
5981 {
5982 a1 = copy_node (a2);
5983 TREE_CHAIN (a1) = attributes;
5984 attributes = a1;
5985 }
5986 }
5987 }
5988 }
5989 return attributes;
5990 }
5991
5992 /* Given types T1 and T2, merge their attributes and return
5993 the result. */
5994
5995 tree
5996 merge_type_attributes (tree t1, tree t2)
5997 {
5998 return merge_attributes (TYPE_ATTRIBUTES (t1),
5999 TYPE_ATTRIBUTES (t2));
6000 }
6001
6002 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6003 the result. */
6004
6005 tree
6006 merge_decl_attributes (tree olddecl, tree newdecl)
6007 {
6008 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6009 DECL_ATTRIBUTES (newdecl));
6010 }
6011
6012 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6013
6014 /* Specialization of merge_decl_attributes for various Windows targets.
6015
6016 This handles the following situation:
6017
6018 __declspec (dllimport) int foo;
6019 int foo;
6020
6021 The second instance of `foo' nullifies the dllimport. */
6022
6023 tree
6024 merge_dllimport_decl_attributes (tree old, tree new_tree)
6025 {
6026 tree a;
6027 int delete_dllimport_p = 1;
6028
6029 /* What we need to do here is remove from `old' dllimport if it doesn't
6030 appear in `new'. dllimport behaves like extern: if a declaration is
6031 marked dllimport and a definition appears later, then the object
6032 is not dllimport'd. We also remove a `new' dllimport if the old list
6033 contains dllexport: dllexport always overrides dllimport, regardless
6034 of the order of declaration. */
6035 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6036 delete_dllimport_p = 0;
6037 else if (DECL_DLLIMPORT_P (new_tree)
6038 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6039 {
6040 DECL_DLLIMPORT_P (new_tree) = 0;
6041 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6042 "dllimport ignored", new_tree);
6043 }
6044 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6045 {
6046 /* Warn about overriding a symbol that has already been used, e.g.:
6047 extern int __attribute__ ((dllimport)) foo;
6048 int* bar () {return &foo;}
6049 int foo;
6050 */
6051 if (TREE_USED (old))
6052 {
6053 warning (0, "%q+D redeclared without dllimport attribute "
6054 "after being referenced with dll linkage", new_tree);
6055 /* If we have used a variable's address with dllimport linkage,
6056 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6057 decl may already have had TREE_CONSTANT computed.
6058 We still remove the attribute so that assembler code refers
6059 to '&foo rather than '_imp__foo'. */
6060 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6061 DECL_DLLIMPORT_P (new_tree) = 1;
6062 }
6063
6064 /* Let an inline definition silently override the external reference,
6065 but otherwise warn about attribute inconsistency. */
6066 else if (TREE_CODE (new_tree) == VAR_DECL
6067 || !DECL_DECLARED_INLINE_P (new_tree))
6068 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6069 "previous dllimport ignored", new_tree);
6070 }
6071 else
6072 delete_dllimport_p = 0;
6073
6074 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6075
6076 if (delete_dllimport_p)
6077 a = remove_attribute ("dllimport", a);
6078
6079 return a;
6080 }
6081
6082 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6083 struct attribute_spec.handler. */
6084
6085 tree
6086 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6087 bool *no_add_attrs)
6088 {
6089 tree node = *pnode;
6090 bool is_dllimport;
6091
6092 /* These attributes may apply to structure and union types being created,
6093 but otherwise should pass to the declaration involved. */
6094 if (!DECL_P (node))
6095 {
6096 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6097 | (int) ATTR_FLAG_ARRAY_NEXT))
6098 {
6099 *no_add_attrs = true;
6100 return tree_cons (name, args, NULL_TREE);
6101 }
6102 if (TREE_CODE (node) == RECORD_TYPE
6103 || TREE_CODE (node) == UNION_TYPE)
6104 {
6105 node = TYPE_NAME (node);
6106 if (!node)
6107 return NULL_TREE;
6108 }
6109 else
6110 {
6111 warning (OPT_Wattributes, "%qE attribute ignored",
6112 name);
6113 *no_add_attrs = true;
6114 return NULL_TREE;
6115 }
6116 }
6117
6118 if (TREE_CODE (node) != FUNCTION_DECL
6119 && TREE_CODE (node) != VAR_DECL
6120 && TREE_CODE (node) != TYPE_DECL)
6121 {
6122 *no_add_attrs = true;
6123 warning (OPT_Wattributes, "%qE attribute ignored",
6124 name);
6125 return NULL_TREE;
6126 }
6127
6128 if (TREE_CODE (node) == TYPE_DECL
6129 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6130 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6131 {
6132 *no_add_attrs = true;
6133 warning (OPT_Wattributes, "%qE attribute ignored",
6134 name);
6135 return NULL_TREE;
6136 }
6137
6138 is_dllimport = is_attribute_p ("dllimport", name);
6139
6140 /* Report error on dllimport ambiguities seen now before they cause
6141 any damage. */
6142 if (is_dllimport)
6143 {
6144 /* Honor any target-specific overrides. */
6145 if (!targetm.valid_dllimport_attribute_p (node))
6146 *no_add_attrs = true;
6147
6148 else if (TREE_CODE (node) == FUNCTION_DECL
6149 && DECL_DECLARED_INLINE_P (node))
6150 {
6151 warning (OPT_Wattributes, "inline function %q+D declared as "
6152 " dllimport: attribute ignored", node);
6153 *no_add_attrs = true;
6154 }
6155 /* Like MS, treat definition of dllimported variables and
6156 non-inlined functions on declaration as syntax errors. */
6157 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6158 {
6159 error ("function %q+D definition is marked dllimport", node);
6160 *no_add_attrs = true;
6161 }
6162
6163 else if (TREE_CODE (node) == VAR_DECL)
6164 {
6165 if (DECL_INITIAL (node))
6166 {
6167 error ("variable %q+D definition is marked dllimport",
6168 node);
6169 *no_add_attrs = true;
6170 }
6171
6172 /* `extern' needn't be specified with dllimport.
6173 Specify `extern' now and hope for the best. Sigh. */
6174 DECL_EXTERNAL (node) = 1;
6175 /* Also, implicitly give dllimport'd variables declared within
6176 a function global scope, unless declared static. */
6177 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6178 TREE_PUBLIC (node) = 1;
6179 }
6180
6181 if (*no_add_attrs == false)
6182 DECL_DLLIMPORT_P (node) = 1;
6183 }
6184 else if (TREE_CODE (node) == FUNCTION_DECL
6185 && DECL_DECLARED_INLINE_P (node)
6186 && flag_keep_inline_dllexport)
6187 /* An exported function, even if inline, must be emitted. */
6188 DECL_EXTERNAL (node) = 0;
6189
6190 /* Report error if symbol is not accessible at global scope. */
6191 if (!TREE_PUBLIC (node)
6192 && (TREE_CODE (node) == VAR_DECL
6193 || TREE_CODE (node) == FUNCTION_DECL))
6194 {
6195 error ("external linkage required for symbol %q+D because of "
6196 "%qE attribute", node, name);
6197 *no_add_attrs = true;
6198 }
6199
6200 /* A dllexport'd entity must have default visibility so that other
6201 program units (shared libraries or the main executable) can see
6202 it. A dllimport'd entity must have default visibility so that
6203 the linker knows that undefined references within this program
6204 unit can be resolved by the dynamic linker. */
6205 if (!*no_add_attrs)
6206 {
6207 if (DECL_VISIBILITY_SPECIFIED (node)
6208 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6209 error ("%qE implies default visibility, but %qD has already "
6210 "been declared with a different visibility",
6211 name, node);
6212 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6213 DECL_VISIBILITY_SPECIFIED (node) = 1;
6214 }
6215
6216 return NULL_TREE;
6217 }
6218
6219 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6220 \f
6221 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6222 of the various TYPE_QUAL values. */
6223
6224 static void
6225 set_type_quals (tree type, int type_quals)
6226 {
6227 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6228 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6229 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6230 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6231 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6232 }
6233
6234 /* Returns true iff unqualified CAND and BASE are equivalent. */
6235
6236 bool
6237 check_base_type (const_tree cand, const_tree base)
6238 {
6239 return (TYPE_NAME (cand) == TYPE_NAME (base)
6240 /* Apparently this is needed for Objective-C. */
6241 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6242 /* Check alignment. */
6243 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6244 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6245 TYPE_ATTRIBUTES (base)));
6246 }
6247
6248 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6249
6250 bool
6251 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6252 {
6253 return (TYPE_QUALS (cand) == type_quals
6254 && check_base_type (cand, base));
6255 }
6256
6257 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6258
6259 static bool
6260 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6261 {
6262 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6263 && TYPE_NAME (cand) == TYPE_NAME (base)
6264 /* Apparently this is needed for Objective-C. */
6265 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6266 /* Check alignment. */
6267 && TYPE_ALIGN (cand) == align
6268 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6269 TYPE_ATTRIBUTES (base)));
6270 }
6271
6272 /* This function checks to see if TYPE matches the size one of the built-in
6273 atomic types, and returns that core atomic type. */
6274
6275 static tree
6276 find_atomic_core_type (tree type)
6277 {
6278 tree base_atomic_type;
6279
6280 /* Only handle complete types. */
6281 if (TYPE_SIZE (type) == NULL_TREE)
6282 return NULL_TREE;
6283
6284 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6285 switch (type_size)
6286 {
6287 case 8:
6288 base_atomic_type = atomicQI_type_node;
6289 break;
6290
6291 case 16:
6292 base_atomic_type = atomicHI_type_node;
6293 break;
6294
6295 case 32:
6296 base_atomic_type = atomicSI_type_node;
6297 break;
6298
6299 case 64:
6300 base_atomic_type = atomicDI_type_node;
6301 break;
6302
6303 case 128:
6304 base_atomic_type = atomicTI_type_node;
6305 break;
6306
6307 default:
6308 base_atomic_type = NULL_TREE;
6309 }
6310
6311 return base_atomic_type;
6312 }
6313
6314 /* Return a version of the TYPE, qualified as indicated by the
6315 TYPE_QUALS, if one exists. If no qualified version exists yet,
6316 return NULL_TREE. */
6317
6318 tree
6319 get_qualified_type (tree type, int type_quals)
6320 {
6321 tree t;
6322
6323 if (TYPE_QUALS (type) == type_quals)
6324 return type;
6325
6326 /* Search the chain of variants to see if there is already one there just
6327 like the one we need to have. If so, use that existing one. We must
6328 preserve the TYPE_NAME, since there is code that depends on this. */
6329 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6330 if (check_qualified_type (t, type, type_quals))
6331 return t;
6332
6333 return NULL_TREE;
6334 }
6335
6336 /* Like get_qualified_type, but creates the type if it does not
6337 exist. This function never returns NULL_TREE. */
6338
6339 tree
6340 build_qualified_type (tree type, int type_quals)
6341 {
6342 tree t;
6343
6344 /* See if we already have the appropriate qualified variant. */
6345 t = get_qualified_type (type, type_quals);
6346
6347 /* If not, build it. */
6348 if (!t)
6349 {
6350 t = build_variant_type_copy (type);
6351 set_type_quals (t, type_quals);
6352
6353 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6354 {
6355 /* See if this object can map to a basic atomic type. */
6356 tree atomic_type = find_atomic_core_type (type);
6357 if (atomic_type)
6358 {
6359 /* Ensure the alignment of this type is compatible with
6360 the required alignment of the atomic type. */
6361 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6362 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6363 }
6364 }
6365
6366 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6367 /* Propagate structural equality. */
6368 SET_TYPE_STRUCTURAL_EQUALITY (t);
6369 else if (TYPE_CANONICAL (type) != type)
6370 /* Build the underlying canonical type, since it is different
6371 from TYPE. */
6372 {
6373 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6374 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6375 }
6376 else
6377 /* T is its own canonical type. */
6378 TYPE_CANONICAL (t) = t;
6379
6380 }
6381
6382 return t;
6383 }
6384
6385 /* Create a variant of type T with alignment ALIGN. */
6386
6387 tree
6388 build_aligned_type (tree type, unsigned int align)
6389 {
6390 tree t;
6391
6392 if (TYPE_PACKED (type)
6393 || TYPE_ALIGN (type) == align)
6394 return type;
6395
6396 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6397 if (check_aligned_type (t, type, align))
6398 return t;
6399
6400 t = build_variant_type_copy (type);
6401 TYPE_ALIGN (t) = align;
6402
6403 return t;
6404 }
6405
6406 /* Create a new distinct copy of TYPE. The new type is made its own
6407 MAIN_VARIANT. If TYPE requires structural equality checks, the
6408 resulting type requires structural equality checks; otherwise, its
6409 TYPE_CANONICAL points to itself. */
6410
6411 tree
6412 build_distinct_type_copy (tree type)
6413 {
6414 tree t = copy_node (type);
6415
6416 TYPE_POINTER_TO (t) = 0;
6417 TYPE_REFERENCE_TO (t) = 0;
6418
6419 /* Set the canonical type either to a new equivalence class, or
6420 propagate the need for structural equality checks. */
6421 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6422 SET_TYPE_STRUCTURAL_EQUALITY (t);
6423 else
6424 TYPE_CANONICAL (t) = t;
6425
6426 /* Make it its own variant. */
6427 TYPE_MAIN_VARIANT (t) = t;
6428 TYPE_NEXT_VARIANT (t) = 0;
6429
6430 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6431 whose TREE_TYPE is not t. This can also happen in the Ada
6432 frontend when using subtypes. */
6433
6434 return t;
6435 }
6436
6437 /* Create a new variant of TYPE, equivalent but distinct. This is so
6438 the caller can modify it. TYPE_CANONICAL for the return type will
6439 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6440 are considered equal by the language itself (or that both types
6441 require structural equality checks). */
6442
6443 tree
6444 build_variant_type_copy (tree type)
6445 {
6446 tree t, m = TYPE_MAIN_VARIANT (type);
6447
6448 t = build_distinct_type_copy (type);
6449
6450 /* Since we're building a variant, assume that it is a non-semantic
6451 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6452 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6453
6454 /* Add the new type to the chain of variants of TYPE. */
6455 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6456 TYPE_NEXT_VARIANT (m) = t;
6457 TYPE_MAIN_VARIANT (t) = m;
6458
6459 return t;
6460 }
6461 \f
6462 /* Return true if the from tree in both tree maps are equal. */
6463
6464 int
6465 tree_map_base_eq (const void *va, const void *vb)
6466 {
6467 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6468 *const b = (const struct tree_map_base *) vb;
6469 return (a->from == b->from);
6470 }
6471
6472 /* Hash a from tree in a tree_base_map. */
6473
6474 unsigned int
6475 tree_map_base_hash (const void *item)
6476 {
6477 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6478 }
6479
6480 /* Return true if this tree map structure is marked for garbage collection
6481 purposes. We simply return true if the from tree is marked, so that this
6482 structure goes away when the from tree goes away. */
6483
6484 int
6485 tree_map_base_marked_p (const void *p)
6486 {
6487 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6488 }
6489
6490 /* Hash a from tree in a tree_map. */
6491
6492 unsigned int
6493 tree_map_hash (const void *item)
6494 {
6495 return (((const struct tree_map *) item)->hash);
6496 }
6497
6498 /* Hash a from tree in a tree_decl_map. */
6499
6500 unsigned int
6501 tree_decl_map_hash (const void *item)
6502 {
6503 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6504 }
6505
6506 /* Return the initialization priority for DECL. */
6507
6508 priority_type
6509 decl_init_priority_lookup (tree decl)
6510 {
6511 symtab_node *snode = symtab_node::get (decl);
6512
6513 if (!snode)
6514 return DEFAULT_INIT_PRIORITY;
6515 return
6516 snode->get_init_priority ();
6517 }
6518
6519 /* Return the finalization priority for DECL. */
6520
6521 priority_type
6522 decl_fini_priority_lookup (tree decl)
6523 {
6524 cgraph_node *node = cgraph_node::get (decl);
6525
6526 if (!node)
6527 return DEFAULT_INIT_PRIORITY;
6528 return
6529 node->get_fini_priority ();
6530 }
6531
6532 /* Set the initialization priority for DECL to PRIORITY. */
6533
6534 void
6535 decl_init_priority_insert (tree decl, priority_type priority)
6536 {
6537 struct symtab_node *snode;
6538
6539 if (priority == DEFAULT_INIT_PRIORITY)
6540 {
6541 snode = symtab_node::get (decl);
6542 if (!snode)
6543 return;
6544 }
6545 else if (TREE_CODE (decl) == VAR_DECL)
6546 snode = varpool_node::get_create (decl);
6547 else
6548 snode = cgraph_node::get_create (decl);
6549 snode->set_init_priority (priority);
6550 }
6551
6552 /* Set the finalization priority for DECL to PRIORITY. */
6553
6554 void
6555 decl_fini_priority_insert (tree decl, priority_type priority)
6556 {
6557 struct cgraph_node *node;
6558
6559 if (priority == DEFAULT_INIT_PRIORITY)
6560 {
6561 node = cgraph_node::get (decl);
6562 if (!node)
6563 return;
6564 }
6565 else
6566 node = cgraph_node::get_create (decl);
6567 node->set_fini_priority (priority);
6568 }
6569
6570 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6571
6572 static void
6573 print_debug_expr_statistics (void)
6574 {
6575 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6576 (long) htab_size (debug_expr_for_decl),
6577 (long) htab_elements (debug_expr_for_decl),
6578 htab_collisions (debug_expr_for_decl));
6579 }
6580
6581 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6582
6583 static void
6584 print_value_expr_statistics (void)
6585 {
6586 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6587 (long) htab_size (value_expr_for_decl),
6588 (long) htab_elements (value_expr_for_decl),
6589 htab_collisions (value_expr_for_decl));
6590 }
6591
6592 /* Lookup a debug expression for FROM, and return it if we find one. */
6593
6594 tree
6595 decl_debug_expr_lookup (tree from)
6596 {
6597 struct tree_decl_map *h, in;
6598 in.base.from = from;
6599
6600 h = (struct tree_decl_map *)
6601 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6602 if (h)
6603 return h->to;
6604 return NULL_TREE;
6605 }
6606
6607 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6608
6609 void
6610 decl_debug_expr_insert (tree from, tree to)
6611 {
6612 struct tree_decl_map *h;
6613 void **loc;
6614
6615 h = ggc_alloc<tree_decl_map> ();
6616 h->base.from = from;
6617 h->to = to;
6618 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6619 INSERT);
6620 *(struct tree_decl_map **) loc = h;
6621 }
6622
6623 /* Lookup a value expression for FROM, and return it if we find one. */
6624
6625 tree
6626 decl_value_expr_lookup (tree from)
6627 {
6628 struct tree_decl_map *h, in;
6629 in.base.from = from;
6630
6631 h = (struct tree_decl_map *)
6632 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6633 if (h)
6634 return h->to;
6635 return NULL_TREE;
6636 }
6637
6638 /* Insert a mapping FROM->TO in the value expression hashtable. */
6639
6640 void
6641 decl_value_expr_insert (tree from, tree to)
6642 {
6643 struct tree_decl_map *h;
6644 void **loc;
6645
6646 h = ggc_alloc<tree_decl_map> ();
6647 h->base.from = from;
6648 h->to = to;
6649 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6650 INSERT);
6651 *(struct tree_decl_map **) loc = h;
6652 }
6653
6654 /* Lookup a vector of debug arguments for FROM, and return it if we
6655 find one. */
6656
6657 vec<tree, va_gc> **
6658 decl_debug_args_lookup (tree from)
6659 {
6660 struct tree_vec_map *h, in;
6661
6662 if (!DECL_HAS_DEBUG_ARGS_P (from))
6663 return NULL;
6664 gcc_checking_assert (debug_args_for_decl != NULL);
6665 in.base.from = from;
6666 h = (struct tree_vec_map *)
6667 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6668 if (h)
6669 return &h->to;
6670 return NULL;
6671 }
6672
6673 /* Insert a mapping FROM->empty vector of debug arguments in the value
6674 expression hashtable. */
6675
6676 vec<tree, va_gc> **
6677 decl_debug_args_insert (tree from)
6678 {
6679 struct tree_vec_map *h;
6680 void **loc;
6681
6682 if (DECL_HAS_DEBUG_ARGS_P (from))
6683 return decl_debug_args_lookup (from);
6684 if (debug_args_for_decl == NULL)
6685 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6686 tree_vec_map_eq, 0);
6687 h = ggc_alloc<tree_vec_map> ();
6688 h->base.from = from;
6689 h->to = NULL;
6690 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6691 INSERT);
6692 *(struct tree_vec_map **) loc = h;
6693 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6694 return &h->to;
6695 }
6696
6697 /* Hashing of types so that we don't make duplicates.
6698 The entry point is `type_hash_canon'. */
6699
6700 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6701 with types in the TREE_VALUE slots), by adding the hash codes
6702 of the individual types. */
6703
6704 static void
6705 type_hash_list (const_tree list, inchash::hash &hstate)
6706 {
6707 const_tree tail;
6708
6709 for (tail = list; tail; tail = TREE_CHAIN (tail))
6710 if (TREE_VALUE (tail) != error_mark_node)
6711 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6712 }
6713
6714 /* These are the Hashtable callback functions. */
6715
6716 /* Returns true iff the types are equivalent. */
6717
6718 static int
6719 type_hash_eq (const void *va, const void *vb)
6720 {
6721 const struct type_hash *const a = (const struct type_hash *) va,
6722 *const b = (const struct type_hash *) vb;
6723
6724 /* First test the things that are the same for all types. */
6725 if (a->hash != b->hash
6726 || TREE_CODE (a->type) != TREE_CODE (b->type)
6727 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6728 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6729 TYPE_ATTRIBUTES (b->type))
6730 || (TREE_CODE (a->type) != COMPLEX_TYPE
6731 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6732 return 0;
6733
6734 /* Be careful about comparing arrays before and after the element type
6735 has been completed; don't compare TYPE_ALIGN unless both types are
6736 complete. */
6737 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6738 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6739 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6740 return 0;
6741
6742 switch (TREE_CODE (a->type))
6743 {
6744 case VOID_TYPE:
6745 case COMPLEX_TYPE:
6746 case POINTER_TYPE:
6747 case REFERENCE_TYPE:
6748 case NULLPTR_TYPE:
6749 return 1;
6750
6751 case VECTOR_TYPE:
6752 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6753
6754 case ENUMERAL_TYPE:
6755 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6756 && !(TYPE_VALUES (a->type)
6757 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6758 && TYPE_VALUES (b->type)
6759 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6760 && type_list_equal (TYPE_VALUES (a->type),
6761 TYPE_VALUES (b->type))))
6762 return 0;
6763
6764 /* ... fall through ... */
6765
6766 case INTEGER_TYPE:
6767 case REAL_TYPE:
6768 case BOOLEAN_TYPE:
6769 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6770 return false;
6771 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6772 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6773 TYPE_MAX_VALUE (b->type)))
6774 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6775 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6776 TYPE_MIN_VALUE (b->type))));
6777
6778 case FIXED_POINT_TYPE:
6779 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6780
6781 case OFFSET_TYPE:
6782 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6783
6784 case METHOD_TYPE:
6785 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6786 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6787 || (TYPE_ARG_TYPES (a->type)
6788 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6789 && TYPE_ARG_TYPES (b->type)
6790 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6791 && type_list_equal (TYPE_ARG_TYPES (a->type),
6792 TYPE_ARG_TYPES (b->type)))))
6793 break;
6794 return 0;
6795 case ARRAY_TYPE:
6796 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6797
6798 case RECORD_TYPE:
6799 case UNION_TYPE:
6800 case QUAL_UNION_TYPE:
6801 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6802 || (TYPE_FIELDS (a->type)
6803 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6804 && TYPE_FIELDS (b->type)
6805 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6806 && type_list_equal (TYPE_FIELDS (a->type),
6807 TYPE_FIELDS (b->type))));
6808
6809 case FUNCTION_TYPE:
6810 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6811 || (TYPE_ARG_TYPES (a->type)
6812 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6813 && TYPE_ARG_TYPES (b->type)
6814 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6815 && type_list_equal (TYPE_ARG_TYPES (a->type),
6816 TYPE_ARG_TYPES (b->type))))
6817 break;
6818 return 0;
6819
6820 default:
6821 return 0;
6822 }
6823
6824 if (lang_hooks.types.type_hash_eq != NULL)
6825 return lang_hooks.types.type_hash_eq (a->type, b->type);
6826
6827 return 1;
6828 }
6829
6830 /* Return the cached hash value. */
6831
6832 static hashval_t
6833 type_hash_hash (const void *item)
6834 {
6835 return ((const struct type_hash *) item)->hash;
6836 }
6837
6838 /* Given TYPE, and HASHCODE its hash code, return the canonical
6839 object for an identical type if one already exists.
6840 Otherwise, return TYPE, and record it as the canonical object.
6841
6842 To use this function, first create a type of the sort you want.
6843 Then compute its hash code from the fields of the type that
6844 make it different from other similar types.
6845 Then call this function and use the value. */
6846
6847 tree
6848 type_hash_canon (unsigned int hashcode, tree type)
6849 {
6850 type_hash in;
6851 void **loc;
6852
6853 /* The hash table only contains main variants, so ensure that's what we're
6854 being passed. */
6855 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6856
6857 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6858 must call that routine before comparing TYPE_ALIGNs. */
6859 layout_type (type);
6860
6861 in.hash = hashcode;
6862 in.type = type;
6863
6864 loc = htab_find_slot_with_hash (type_hash_table, &in, hashcode, INSERT);
6865 if (*loc)
6866 {
6867 tree t1 = ((type_hash *) *loc)->type;
6868 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6869 if (GATHER_STATISTICS)
6870 {
6871 tree_code_counts[(int) TREE_CODE (type)]--;
6872 tree_node_counts[(int) t_kind]--;
6873 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6874 }
6875 return t1;
6876 }
6877 else
6878 {
6879 struct type_hash *h;
6880
6881 h = ggc_alloc<type_hash> ();
6882 h->hash = hashcode;
6883 h->type = type;
6884 *loc = (void *)h;
6885
6886 return type;
6887 }
6888 }
6889
6890 /* See if the data pointed to by the type hash table is marked. We consider
6891 it marked if the type is marked or if a debug type number or symbol
6892 table entry has been made for the type. */
6893
6894 static int
6895 type_hash_marked_p (const void *p)
6896 {
6897 const_tree const type = ((const struct type_hash *) p)->type;
6898
6899 return ggc_marked_p (type);
6900 }
6901
6902 static void
6903 print_type_hash_statistics (void)
6904 {
6905 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6906 (long) htab_size (type_hash_table),
6907 (long) htab_elements (type_hash_table),
6908 htab_collisions (type_hash_table));
6909 }
6910
6911 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6912 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6913 by adding the hash codes of the individual attributes. */
6914
6915 static void
6916 attribute_hash_list (const_tree list, inchash::hash &hstate)
6917 {
6918 const_tree tail;
6919
6920 for (tail = list; tail; tail = TREE_CHAIN (tail))
6921 /* ??? Do we want to add in TREE_VALUE too? */
6922 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6923 }
6924
6925 /* Given two lists of attributes, return true if list l2 is
6926 equivalent to l1. */
6927
6928 int
6929 attribute_list_equal (const_tree l1, const_tree l2)
6930 {
6931 if (l1 == l2)
6932 return 1;
6933
6934 return attribute_list_contained (l1, l2)
6935 && attribute_list_contained (l2, l1);
6936 }
6937
6938 /* Given two lists of attributes, return true if list L2 is
6939 completely contained within L1. */
6940 /* ??? This would be faster if attribute names were stored in a canonicalized
6941 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6942 must be used to show these elements are equivalent (which they are). */
6943 /* ??? It's not clear that attributes with arguments will always be handled
6944 correctly. */
6945
6946 int
6947 attribute_list_contained (const_tree l1, const_tree l2)
6948 {
6949 const_tree t1, t2;
6950
6951 /* First check the obvious, maybe the lists are identical. */
6952 if (l1 == l2)
6953 return 1;
6954
6955 /* Maybe the lists are similar. */
6956 for (t1 = l1, t2 = l2;
6957 t1 != 0 && t2 != 0
6958 && get_attribute_name (t1) == get_attribute_name (t2)
6959 && TREE_VALUE (t1) == TREE_VALUE (t2);
6960 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6961 ;
6962
6963 /* Maybe the lists are equal. */
6964 if (t1 == 0 && t2 == 0)
6965 return 1;
6966
6967 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6968 {
6969 const_tree attr;
6970 /* This CONST_CAST is okay because lookup_attribute does not
6971 modify its argument and the return value is assigned to a
6972 const_tree. */
6973 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6974 CONST_CAST_TREE (l1));
6975 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6976 attr = lookup_ident_attribute (get_attribute_name (t2),
6977 TREE_CHAIN (attr)))
6978 ;
6979
6980 if (attr == NULL_TREE)
6981 return 0;
6982 }
6983
6984 return 1;
6985 }
6986
6987 /* Given two lists of types
6988 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6989 return 1 if the lists contain the same types in the same order.
6990 Also, the TREE_PURPOSEs must match. */
6991
6992 int
6993 type_list_equal (const_tree l1, const_tree l2)
6994 {
6995 const_tree t1, t2;
6996
6997 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6998 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6999 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7000 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7001 && (TREE_TYPE (TREE_PURPOSE (t1))
7002 == TREE_TYPE (TREE_PURPOSE (t2))))))
7003 return 0;
7004
7005 return t1 == t2;
7006 }
7007
7008 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7009 given by TYPE. If the argument list accepts variable arguments,
7010 then this function counts only the ordinary arguments. */
7011
7012 int
7013 type_num_arguments (const_tree type)
7014 {
7015 int i = 0;
7016 tree t;
7017
7018 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7019 /* If the function does not take a variable number of arguments,
7020 the last element in the list will have type `void'. */
7021 if (VOID_TYPE_P (TREE_VALUE (t)))
7022 break;
7023 else
7024 ++i;
7025
7026 return i;
7027 }
7028
7029 /* Nonzero if integer constants T1 and T2
7030 represent the same constant value. */
7031
7032 int
7033 tree_int_cst_equal (const_tree t1, const_tree t2)
7034 {
7035 if (t1 == t2)
7036 return 1;
7037
7038 if (t1 == 0 || t2 == 0)
7039 return 0;
7040
7041 if (TREE_CODE (t1) == INTEGER_CST
7042 && TREE_CODE (t2) == INTEGER_CST
7043 && wi::to_widest (t1) == wi::to_widest (t2))
7044 return 1;
7045
7046 return 0;
7047 }
7048
7049 /* Return true if T is an INTEGER_CST whose numerical value (extended
7050 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7051
7052 bool
7053 tree_fits_shwi_p (const_tree t)
7054 {
7055 return (t != NULL_TREE
7056 && TREE_CODE (t) == INTEGER_CST
7057 && wi::fits_shwi_p (wi::to_widest (t)));
7058 }
7059
7060 /* Return true if T is an INTEGER_CST whose numerical value (extended
7061 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7062
7063 bool
7064 tree_fits_uhwi_p (const_tree t)
7065 {
7066 return (t != NULL_TREE
7067 && TREE_CODE (t) == INTEGER_CST
7068 && wi::fits_uhwi_p (wi::to_widest (t)));
7069 }
7070
7071 /* T is an INTEGER_CST whose numerical value (extended according to
7072 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7073 HOST_WIDE_INT. */
7074
7075 HOST_WIDE_INT
7076 tree_to_shwi (const_tree t)
7077 {
7078 gcc_assert (tree_fits_shwi_p (t));
7079 return TREE_INT_CST_LOW (t);
7080 }
7081
7082 /* T is an INTEGER_CST whose numerical value (extended according to
7083 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7084 HOST_WIDE_INT. */
7085
7086 unsigned HOST_WIDE_INT
7087 tree_to_uhwi (const_tree t)
7088 {
7089 gcc_assert (tree_fits_uhwi_p (t));
7090 return TREE_INT_CST_LOW (t);
7091 }
7092
7093 /* Return the most significant (sign) bit of T. */
7094
7095 int
7096 tree_int_cst_sign_bit (const_tree t)
7097 {
7098 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7099
7100 return wi::extract_uhwi (t, bitno, 1);
7101 }
7102
7103 /* Return an indication of the sign of the integer constant T.
7104 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7105 Note that -1 will never be returned if T's type is unsigned. */
7106
7107 int
7108 tree_int_cst_sgn (const_tree t)
7109 {
7110 if (wi::eq_p (t, 0))
7111 return 0;
7112 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7113 return 1;
7114 else if (wi::neg_p (t))
7115 return -1;
7116 else
7117 return 1;
7118 }
7119
7120 /* Return the minimum number of bits needed to represent VALUE in a
7121 signed or unsigned type, UNSIGNEDP says which. */
7122
7123 unsigned int
7124 tree_int_cst_min_precision (tree value, signop sgn)
7125 {
7126 /* If the value is negative, compute its negative minus 1. The latter
7127 adjustment is because the absolute value of the largest negative value
7128 is one larger than the largest positive value. This is equivalent to
7129 a bit-wise negation, so use that operation instead. */
7130
7131 if (tree_int_cst_sgn (value) < 0)
7132 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7133
7134 /* Return the number of bits needed, taking into account the fact
7135 that we need one more bit for a signed than unsigned type.
7136 If value is 0 or -1, the minimum precision is 1 no matter
7137 whether unsignedp is true or false. */
7138
7139 if (integer_zerop (value))
7140 return 1;
7141 else
7142 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7143 }
7144
7145 /* Return truthvalue of whether T1 is the same tree structure as T2.
7146 Return 1 if they are the same.
7147 Return 0 if they are understandably different.
7148 Return -1 if either contains tree structure not understood by
7149 this function. */
7150
7151 int
7152 simple_cst_equal (const_tree t1, const_tree t2)
7153 {
7154 enum tree_code code1, code2;
7155 int cmp;
7156 int i;
7157
7158 if (t1 == t2)
7159 return 1;
7160 if (t1 == 0 || t2 == 0)
7161 return 0;
7162
7163 code1 = TREE_CODE (t1);
7164 code2 = TREE_CODE (t2);
7165
7166 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7167 {
7168 if (CONVERT_EXPR_CODE_P (code2)
7169 || code2 == NON_LVALUE_EXPR)
7170 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7171 else
7172 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7173 }
7174
7175 else if (CONVERT_EXPR_CODE_P (code2)
7176 || code2 == NON_LVALUE_EXPR)
7177 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7178
7179 if (code1 != code2)
7180 return 0;
7181
7182 switch (code1)
7183 {
7184 case INTEGER_CST:
7185 return wi::to_widest (t1) == wi::to_widest (t2);
7186
7187 case REAL_CST:
7188 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7189
7190 case FIXED_CST:
7191 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7192
7193 case STRING_CST:
7194 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7195 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7196 TREE_STRING_LENGTH (t1)));
7197
7198 case CONSTRUCTOR:
7199 {
7200 unsigned HOST_WIDE_INT idx;
7201 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7202 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7203
7204 if (vec_safe_length (v1) != vec_safe_length (v2))
7205 return false;
7206
7207 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7208 /* ??? Should we handle also fields here? */
7209 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7210 return false;
7211 return true;
7212 }
7213
7214 case SAVE_EXPR:
7215 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7216
7217 case CALL_EXPR:
7218 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7219 if (cmp <= 0)
7220 return cmp;
7221 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7222 return 0;
7223 {
7224 const_tree arg1, arg2;
7225 const_call_expr_arg_iterator iter1, iter2;
7226 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7227 arg2 = first_const_call_expr_arg (t2, &iter2);
7228 arg1 && arg2;
7229 arg1 = next_const_call_expr_arg (&iter1),
7230 arg2 = next_const_call_expr_arg (&iter2))
7231 {
7232 cmp = simple_cst_equal (arg1, arg2);
7233 if (cmp <= 0)
7234 return cmp;
7235 }
7236 return arg1 == arg2;
7237 }
7238
7239 case TARGET_EXPR:
7240 /* Special case: if either target is an unallocated VAR_DECL,
7241 it means that it's going to be unified with whatever the
7242 TARGET_EXPR is really supposed to initialize, so treat it
7243 as being equivalent to anything. */
7244 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7245 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7246 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7247 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7248 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7249 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7250 cmp = 1;
7251 else
7252 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7253
7254 if (cmp <= 0)
7255 return cmp;
7256
7257 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7258
7259 case WITH_CLEANUP_EXPR:
7260 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7261 if (cmp <= 0)
7262 return cmp;
7263
7264 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7265
7266 case COMPONENT_REF:
7267 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7268 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7269
7270 return 0;
7271
7272 case VAR_DECL:
7273 case PARM_DECL:
7274 case CONST_DECL:
7275 case FUNCTION_DECL:
7276 return 0;
7277
7278 default:
7279 break;
7280 }
7281
7282 /* This general rule works for most tree codes. All exceptions should be
7283 handled above. If this is a language-specific tree code, we can't
7284 trust what might be in the operand, so say we don't know
7285 the situation. */
7286 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7287 return -1;
7288
7289 switch (TREE_CODE_CLASS (code1))
7290 {
7291 case tcc_unary:
7292 case tcc_binary:
7293 case tcc_comparison:
7294 case tcc_expression:
7295 case tcc_reference:
7296 case tcc_statement:
7297 cmp = 1;
7298 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7299 {
7300 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7301 if (cmp <= 0)
7302 return cmp;
7303 }
7304
7305 return cmp;
7306
7307 default:
7308 return -1;
7309 }
7310 }
7311
7312 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7313 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7314 than U, respectively. */
7315
7316 int
7317 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7318 {
7319 if (tree_int_cst_sgn (t) < 0)
7320 return -1;
7321 else if (!tree_fits_uhwi_p (t))
7322 return 1;
7323 else if (TREE_INT_CST_LOW (t) == u)
7324 return 0;
7325 else if (TREE_INT_CST_LOW (t) < u)
7326 return -1;
7327 else
7328 return 1;
7329 }
7330
7331 /* Return true if SIZE represents a constant size that is in bounds of
7332 what the middle-end and the backend accepts (covering not more than
7333 half of the address-space). */
7334
7335 bool
7336 valid_constant_size_p (const_tree size)
7337 {
7338 if (! tree_fits_uhwi_p (size)
7339 || TREE_OVERFLOW (size)
7340 || tree_int_cst_sign_bit (size) != 0)
7341 return false;
7342 return true;
7343 }
7344
7345 /* Return the precision of the type, or for a complex or vector type the
7346 precision of the type of its elements. */
7347
7348 unsigned int
7349 element_precision (const_tree type)
7350 {
7351 enum tree_code code = TREE_CODE (type);
7352 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7353 type = TREE_TYPE (type);
7354
7355 return TYPE_PRECISION (type);
7356 }
7357
7358 /* Return true if CODE represents an associative tree code. Otherwise
7359 return false. */
7360 bool
7361 associative_tree_code (enum tree_code code)
7362 {
7363 switch (code)
7364 {
7365 case BIT_IOR_EXPR:
7366 case BIT_AND_EXPR:
7367 case BIT_XOR_EXPR:
7368 case PLUS_EXPR:
7369 case MULT_EXPR:
7370 case MIN_EXPR:
7371 case MAX_EXPR:
7372 return true;
7373
7374 default:
7375 break;
7376 }
7377 return false;
7378 }
7379
7380 /* Return true if CODE represents a commutative tree code. Otherwise
7381 return false. */
7382 bool
7383 commutative_tree_code (enum tree_code code)
7384 {
7385 switch (code)
7386 {
7387 case PLUS_EXPR:
7388 case MULT_EXPR:
7389 case MULT_HIGHPART_EXPR:
7390 case MIN_EXPR:
7391 case MAX_EXPR:
7392 case BIT_IOR_EXPR:
7393 case BIT_XOR_EXPR:
7394 case BIT_AND_EXPR:
7395 case NE_EXPR:
7396 case EQ_EXPR:
7397 case UNORDERED_EXPR:
7398 case ORDERED_EXPR:
7399 case UNEQ_EXPR:
7400 case LTGT_EXPR:
7401 case TRUTH_AND_EXPR:
7402 case TRUTH_XOR_EXPR:
7403 case TRUTH_OR_EXPR:
7404 case WIDEN_MULT_EXPR:
7405 case VEC_WIDEN_MULT_HI_EXPR:
7406 case VEC_WIDEN_MULT_LO_EXPR:
7407 case VEC_WIDEN_MULT_EVEN_EXPR:
7408 case VEC_WIDEN_MULT_ODD_EXPR:
7409 return true;
7410
7411 default:
7412 break;
7413 }
7414 return false;
7415 }
7416
7417 /* Return true if CODE represents a ternary tree code for which the
7418 first two operands are commutative. Otherwise return false. */
7419 bool
7420 commutative_ternary_tree_code (enum tree_code code)
7421 {
7422 switch (code)
7423 {
7424 case WIDEN_MULT_PLUS_EXPR:
7425 case WIDEN_MULT_MINUS_EXPR:
7426 case DOT_PROD_EXPR:
7427 case FMA_EXPR:
7428 return true;
7429
7430 default:
7431 break;
7432 }
7433 return false;
7434 }
7435
7436 namespace inchash
7437 {
7438
7439 /* Generate a hash value for an expression. This can be used iteratively
7440 by passing a previous result as the HSTATE argument.
7441
7442 This function is intended to produce the same hash for expressions which
7443 would compare equal using operand_equal_p. */
7444 void
7445 add_expr (const_tree t, inchash::hash &hstate)
7446 {
7447 int i;
7448 enum tree_code code;
7449 enum tree_code_class tclass;
7450
7451 if (t == NULL_TREE)
7452 {
7453 hstate.merge_hash (0);
7454 return;
7455 }
7456
7457 code = TREE_CODE (t);
7458
7459 switch (code)
7460 {
7461 /* Alas, constants aren't shared, so we can't rely on pointer
7462 identity. */
7463 case VOID_CST:
7464 hstate.merge_hash (0);
7465 return;
7466 case INTEGER_CST:
7467 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7468 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7469 return;
7470 case REAL_CST:
7471 {
7472 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7473 hstate.merge_hash (val2);
7474 return;
7475 }
7476 case FIXED_CST:
7477 {
7478 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7479 hstate.merge_hash (val2);
7480 return;
7481 }
7482 case STRING_CST:
7483 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7484 return;
7485 case COMPLEX_CST:
7486 inchash::add_expr (TREE_REALPART (t), hstate);
7487 inchash::add_expr (TREE_IMAGPART (t), hstate);
7488 return;
7489 case VECTOR_CST:
7490 {
7491 unsigned i;
7492 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7493 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7494 return;
7495 }
7496 case SSA_NAME:
7497 /* We can just compare by pointer. */
7498 hstate.add_wide_int (SSA_NAME_VERSION (t));
7499 return;
7500 case PLACEHOLDER_EXPR:
7501 /* The node itself doesn't matter. */
7502 return;
7503 case TREE_LIST:
7504 /* A list of expressions, for a CALL_EXPR or as the elements of a
7505 VECTOR_CST. */
7506 for (; t; t = TREE_CHAIN (t))
7507 inchash::add_expr (TREE_VALUE (t), hstate);
7508 return;
7509 case CONSTRUCTOR:
7510 {
7511 unsigned HOST_WIDE_INT idx;
7512 tree field, value;
7513 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7514 {
7515 inchash::add_expr (field, hstate);
7516 inchash::add_expr (value, hstate);
7517 }
7518 return;
7519 }
7520 case FUNCTION_DECL:
7521 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7522 Otherwise nodes that compare equal according to operand_equal_p might
7523 get different hash codes. However, don't do this for machine specific
7524 or front end builtins, since the function code is overloaded in those
7525 cases. */
7526 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7527 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7528 {
7529 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7530 code = TREE_CODE (t);
7531 }
7532 /* FALL THROUGH */
7533 default:
7534 tclass = TREE_CODE_CLASS (code);
7535
7536 if (tclass == tcc_declaration)
7537 {
7538 /* DECL's have a unique ID */
7539 hstate.add_wide_int (DECL_UID (t));
7540 }
7541 else
7542 {
7543 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7544
7545 hstate.add_object (code);
7546
7547 /* Don't hash the type, that can lead to having nodes which
7548 compare equal according to operand_equal_p, but which
7549 have different hash codes. */
7550 if (CONVERT_EXPR_CODE_P (code)
7551 || code == NON_LVALUE_EXPR)
7552 {
7553 /* Make sure to include signness in the hash computation. */
7554 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7555 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7556 }
7557
7558 else if (commutative_tree_code (code))
7559 {
7560 /* It's a commutative expression. We want to hash it the same
7561 however it appears. We do this by first hashing both operands
7562 and then rehashing based on the order of their independent
7563 hashes. */
7564 inchash::hash one, two;
7565 inchash::add_expr (TREE_OPERAND (t, 0), one);
7566 inchash::add_expr (TREE_OPERAND (t, 1), two);
7567 hstate.add_commutative (one, two);
7568 }
7569 else
7570 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7571 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7572 }
7573 return;
7574 }
7575 }
7576
7577 }
7578
7579 /* Constructors for pointer, array and function types.
7580 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7581 constructed by language-dependent code, not here.) */
7582
7583 /* Construct, lay out and return the type of pointers to TO_TYPE with
7584 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7585 reference all of memory. If such a type has already been
7586 constructed, reuse it. */
7587
7588 tree
7589 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7590 bool can_alias_all)
7591 {
7592 tree t;
7593
7594 if (to_type == error_mark_node)
7595 return error_mark_node;
7596
7597 /* If the pointed-to type has the may_alias attribute set, force
7598 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7599 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7600 can_alias_all = true;
7601
7602 /* In some cases, languages will have things that aren't a POINTER_TYPE
7603 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7604 In that case, return that type without regard to the rest of our
7605 operands.
7606
7607 ??? This is a kludge, but consistent with the way this function has
7608 always operated and there doesn't seem to be a good way to avoid this
7609 at the moment. */
7610 if (TYPE_POINTER_TO (to_type) != 0
7611 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7612 return TYPE_POINTER_TO (to_type);
7613
7614 /* First, if we already have a type for pointers to TO_TYPE and it's
7615 the proper mode, use it. */
7616 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7617 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7618 return t;
7619
7620 t = make_node (POINTER_TYPE);
7621
7622 TREE_TYPE (t) = to_type;
7623 SET_TYPE_MODE (t, mode);
7624 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7625 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7626 TYPE_POINTER_TO (to_type) = t;
7627
7628 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7629 SET_TYPE_STRUCTURAL_EQUALITY (t);
7630 else if (TYPE_CANONICAL (to_type) != to_type)
7631 TYPE_CANONICAL (t)
7632 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7633 mode, can_alias_all);
7634
7635 /* Lay out the type. This function has many callers that are concerned
7636 with expression-construction, and this simplifies them all. */
7637 layout_type (t);
7638
7639 return t;
7640 }
7641
7642 /* By default build pointers in ptr_mode. */
7643
7644 tree
7645 build_pointer_type (tree to_type)
7646 {
7647 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7648 : TYPE_ADDR_SPACE (to_type);
7649 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7650 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7651 }
7652
7653 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7654
7655 tree
7656 build_reference_type_for_mode (tree to_type, machine_mode mode,
7657 bool can_alias_all)
7658 {
7659 tree t;
7660
7661 if (to_type == error_mark_node)
7662 return error_mark_node;
7663
7664 /* If the pointed-to type has the may_alias attribute set, force
7665 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7666 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7667 can_alias_all = true;
7668
7669 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7670 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7671 In that case, return that type without regard to the rest of our
7672 operands.
7673
7674 ??? This is a kludge, but consistent with the way this function has
7675 always operated and there doesn't seem to be a good way to avoid this
7676 at the moment. */
7677 if (TYPE_REFERENCE_TO (to_type) != 0
7678 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7679 return TYPE_REFERENCE_TO (to_type);
7680
7681 /* First, if we already have a type for pointers to TO_TYPE and it's
7682 the proper mode, use it. */
7683 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7684 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7685 return t;
7686
7687 t = make_node (REFERENCE_TYPE);
7688
7689 TREE_TYPE (t) = to_type;
7690 SET_TYPE_MODE (t, mode);
7691 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7692 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7693 TYPE_REFERENCE_TO (to_type) = t;
7694
7695 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7696 SET_TYPE_STRUCTURAL_EQUALITY (t);
7697 else if (TYPE_CANONICAL (to_type) != to_type)
7698 TYPE_CANONICAL (t)
7699 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7700 mode, can_alias_all);
7701
7702 layout_type (t);
7703
7704 return t;
7705 }
7706
7707
7708 /* Build the node for the type of references-to-TO_TYPE by default
7709 in ptr_mode. */
7710
7711 tree
7712 build_reference_type (tree to_type)
7713 {
7714 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7715 : TYPE_ADDR_SPACE (to_type);
7716 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7717 return build_reference_type_for_mode (to_type, pointer_mode, false);
7718 }
7719
7720 #define MAX_INT_CACHED_PREC \
7721 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7722 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7723
7724 /* Builds a signed or unsigned integer type of precision PRECISION.
7725 Used for C bitfields whose precision does not match that of
7726 built-in target types. */
7727 tree
7728 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7729 int unsignedp)
7730 {
7731 tree itype, ret;
7732
7733 if (unsignedp)
7734 unsignedp = MAX_INT_CACHED_PREC + 1;
7735
7736 if (precision <= MAX_INT_CACHED_PREC)
7737 {
7738 itype = nonstandard_integer_type_cache[precision + unsignedp];
7739 if (itype)
7740 return itype;
7741 }
7742
7743 itype = make_node (INTEGER_TYPE);
7744 TYPE_PRECISION (itype) = precision;
7745
7746 if (unsignedp)
7747 fixup_unsigned_type (itype);
7748 else
7749 fixup_signed_type (itype);
7750
7751 ret = itype;
7752 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7753 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7754 if (precision <= MAX_INT_CACHED_PREC)
7755 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7756
7757 return ret;
7758 }
7759
7760 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7761 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7762 is true, reuse such a type that has already been constructed. */
7763
7764 static tree
7765 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7766 {
7767 tree itype = make_node (INTEGER_TYPE);
7768 inchash::hash hstate;
7769
7770 TREE_TYPE (itype) = type;
7771
7772 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7773 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7774
7775 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7776 SET_TYPE_MODE (itype, TYPE_MODE (type));
7777 TYPE_SIZE (itype) = TYPE_SIZE (type);
7778 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7779 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7780 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7781
7782 if (!shared)
7783 return itype;
7784
7785 if ((TYPE_MIN_VALUE (itype)
7786 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7787 || (TYPE_MAX_VALUE (itype)
7788 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7789 {
7790 /* Since we cannot reliably merge this type, we need to compare it using
7791 structural equality checks. */
7792 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7793 return itype;
7794 }
7795
7796 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7797 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7798 hstate.merge_hash (TYPE_HASH (type));
7799 itype = type_hash_canon (hstate.end (), itype);
7800
7801 return itype;
7802 }
7803
7804 /* Wrapper around build_range_type_1 with SHARED set to true. */
7805
7806 tree
7807 build_range_type (tree type, tree lowval, tree highval)
7808 {
7809 return build_range_type_1 (type, lowval, highval, true);
7810 }
7811
7812 /* Wrapper around build_range_type_1 with SHARED set to false. */
7813
7814 tree
7815 build_nonshared_range_type (tree type, tree lowval, tree highval)
7816 {
7817 return build_range_type_1 (type, lowval, highval, false);
7818 }
7819
7820 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7821 MAXVAL should be the maximum value in the domain
7822 (one less than the length of the array).
7823
7824 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7825 We don't enforce this limit, that is up to caller (e.g. language front end).
7826 The limit exists because the result is a signed type and we don't handle
7827 sizes that use more than one HOST_WIDE_INT. */
7828
7829 tree
7830 build_index_type (tree maxval)
7831 {
7832 return build_range_type (sizetype, size_zero_node, maxval);
7833 }
7834
7835 /* Return true if the debug information for TYPE, a subtype, should be emitted
7836 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7837 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7838 debug info and doesn't reflect the source code. */
7839
7840 bool
7841 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7842 {
7843 tree base_type = TREE_TYPE (type), low, high;
7844
7845 /* Subrange types have a base type which is an integral type. */
7846 if (!INTEGRAL_TYPE_P (base_type))
7847 return false;
7848
7849 /* Get the real bounds of the subtype. */
7850 if (lang_hooks.types.get_subrange_bounds)
7851 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7852 else
7853 {
7854 low = TYPE_MIN_VALUE (type);
7855 high = TYPE_MAX_VALUE (type);
7856 }
7857
7858 /* If the type and its base type have the same representation and the same
7859 name, then the type is not a subrange but a copy of the base type. */
7860 if ((TREE_CODE (base_type) == INTEGER_TYPE
7861 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7862 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7863 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7864 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7865 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7866 return false;
7867
7868 if (lowval)
7869 *lowval = low;
7870 if (highval)
7871 *highval = high;
7872 return true;
7873 }
7874
7875 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7876 and number of elements specified by the range of values of INDEX_TYPE.
7877 If SHARED is true, reuse such a type that has already been constructed. */
7878
7879 static tree
7880 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7881 {
7882 tree t;
7883
7884 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7885 {
7886 error ("arrays of functions are not meaningful");
7887 elt_type = integer_type_node;
7888 }
7889
7890 t = make_node (ARRAY_TYPE);
7891 TREE_TYPE (t) = elt_type;
7892 TYPE_DOMAIN (t) = index_type;
7893 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7894 layout_type (t);
7895
7896 /* If the element type is incomplete at this point we get marked for
7897 structural equality. Do not record these types in the canonical
7898 type hashtable. */
7899 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7900 return t;
7901
7902 if (shared)
7903 {
7904 inchash::hash hstate;
7905 hstate.add_object (TYPE_HASH (elt_type));
7906 if (index_type)
7907 hstate.add_object (TYPE_HASH (index_type));
7908 t = type_hash_canon (hstate.end (), t);
7909 }
7910
7911 if (TYPE_CANONICAL (t) == t)
7912 {
7913 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7914 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7915 SET_TYPE_STRUCTURAL_EQUALITY (t);
7916 else if (TYPE_CANONICAL (elt_type) != elt_type
7917 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7918 TYPE_CANONICAL (t)
7919 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7920 index_type
7921 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7922 shared);
7923 }
7924
7925 return t;
7926 }
7927
7928 /* Wrapper around build_array_type_1 with SHARED set to true. */
7929
7930 tree
7931 build_array_type (tree elt_type, tree index_type)
7932 {
7933 return build_array_type_1 (elt_type, index_type, true);
7934 }
7935
7936 /* Wrapper around build_array_type_1 with SHARED set to false. */
7937
7938 tree
7939 build_nonshared_array_type (tree elt_type, tree index_type)
7940 {
7941 return build_array_type_1 (elt_type, index_type, false);
7942 }
7943
7944 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7945 sizetype. */
7946
7947 tree
7948 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7949 {
7950 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7951 }
7952
7953 /* Recursively examines the array elements of TYPE, until a non-array
7954 element type is found. */
7955
7956 tree
7957 strip_array_types (tree type)
7958 {
7959 while (TREE_CODE (type) == ARRAY_TYPE)
7960 type = TREE_TYPE (type);
7961
7962 return type;
7963 }
7964
7965 /* Computes the canonical argument types from the argument type list
7966 ARGTYPES.
7967
7968 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7969 on entry to this function, or if any of the ARGTYPES are
7970 structural.
7971
7972 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7973 true on entry to this function, or if any of the ARGTYPES are
7974 non-canonical.
7975
7976 Returns a canonical argument list, which may be ARGTYPES when the
7977 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7978 true) or would not differ from ARGTYPES. */
7979
7980 static tree
7981 maybe_canonicalize_argtypes (tree argtypes,
7982 bool *any_structural_p,
7983 bool *any_noncanonical_p)
7984 {
7985 tree arg;
7986 bool any_noncanonical_argtypes_p = false;
7987
7988 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7989 {
7990 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7991 /* Fail gracefully by stating that the type is structural. */
7992 *any_structural_p = true;
7993 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7994 *any_structural_p = true;
7995 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7996 || TREE_PURPOSE (arg))
7997 /* If the argument has a default argument, we consider it
7998 non-canonical even though the type itself is canonical.
7999 That way, different variants of function and method types
8000 with default arguments will all point to the variant with
8001 no defaults as their canonical type. */
8002 any_noncanonical_argtypes_p = true;
8003 }
8004
8005 if (*any_structural_p)
8006 return argtypes;
8007
8008 if (any_noncanonical_argtypes_p)
8009 {
8010 /* Build the canonical list of argument types. */
8011 tree canon_argtypes = NULL_TREE;
8012 bool is_void = false;
8013
8014 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8015 {
8016 if (arg == void_list_node)
8017 is_void = true;
8018 else
8019 canon_argtypes = tree_cons (NULL_TREE,
8020 TYPE_CANONICAL (TREE_VALUE (arg)),
8021 canon_argtypes);
8022 }
8023
8024 canon_argtypes = nreverse (canon_argtypes);
8025 if (is_void)
8026 canon_argtypes = chainon (canon_argtypes, void_list_node);
8027
8028 /* There is a non-canonical type. */
8029 *any_noncanonical_p = true;
8030 return canon_argtypes;
8031 }
8032
8033 /* The canonical argument types are the same as ARGTYPES. */
8034 return argtypes;
8035 }
8036
8037 /* Construct, lay out and return
8038 the type of functions returning type VALUE_TYPE
8039 given arguments of types ARG_TYPES.
8040 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8041 are data type nodes for the arguments of the function.
8042 If such a type has already been constructed, reuse it. */
8043
8044 tree
8045 build_function_type (tree value_type, tree arg_types)
8046 {
8047 tree t;
8048 inchash::hash hstate;
8049 bool any_structural_p, any_noncanonical_p;
8050 tree canon_argtypes;
8051
8052 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8053 {
8054 error ("function return type cannot be function");
8055 value_type = integer_type_node;
8056 }
8057
8058 /* Make a node of the sort we want. */
8059 t = make_node (FUNCTION_TYPE);
8060 TREE_TYPE (t) = value_type;
8061 TYPE_ARG_TYPES (t) = arg_types;
8062
8063 /* If we already have such a type, use the old one. */
8064 hstate.add_object (TYPE_HASH (value_type));
8065 type_hash_list (arg_types, hstate);
8066 t = type_hash_canon (hstate.end (), t);
8067
8068 /* Set up the canonical type. */
8069 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8070 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8071 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8072 &any_structural_p,
8073 &any_noncanonical_p);
8074 if (any_structural_p)
8075 SET_TYPE_STRUCTURAL_EQUALITY (t);
8076 else if (any_noncanonical_p)
8077 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8078 canon_argtypes);
8079
8080 if (!COMPLETE_TYPE_P (t))
8081 layout_type (t);
8082 return t;
8083 }
8084
8085 /* Build a function type. The RETURN_TYPE is the type returned by the
8086 function. If VAARGS is set, no void_type_node is appended to the
8087 the list. ARGP must be always be terminated be a NULL_TREE. */
8088
8089 static tree
8090 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8091 {
8092 tree t, args, last;
8093
8094 t = va_arg (argp, tree);
8095 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8096 args = tree_cons (NULL_TREE, t, args);
8097
8098 if (vaargs)
8099 {
8100 last = args;
8101 if (args != NULL_TREE)
8102 args = nreverse (args);
8103 gcc_assert (last != void_list_node);
8104 }
8105 else if (args == NULL_TREE)
8106 args = void_list_node;
8107 else
8108 {
8109 last = args;
8110 args = nreverse (args);
8111 TREE_CHAIN (last) = void_list_node;
8112 }
8113 args = build_function_type (return_type, args);
8114
8115 return args;
8116 }
8117
8118 /* Build a function type. The RETURN_TYPE is the type returned by the
8119 function. If additional arguments are provided, they are
8120 additional argument types. The list of argument types must always
8121 be terminated by NULL_TREE. */
8122
8123 tree
8124 build_function_type_list (tree return_type, ...)
8125 {
8126 tree args;
8127 va_list p;
8128
8129 va_start (p, return_type);
8130 args = build_function_type_list_1 (false, return_type, p);
8131 va_end (p);
8132 return args;
8133 }
8134
8135 /* Build a variable argument function type. The RETURN_TYPE is the
8136 type returned by the function. If additional arguments are provided,
8137 they are additional argument types. The list of argument types must
8138 always be terminated by NULL_TREE. */
8139
8140 tree
8141 build_varargs_function_type_list (tree return_type, ...)
8142 {
8143 tree args;
8144 va_list p;
8145
8146 va_start (p, return_type);
8147 args = build_function_type_list_1 (true, return_type, p);
8148 va_end (p);
8149
8150 return args;
8151 }
8152
8153 /* Build a function type. RETURN_TYPE is the type returned by the
8154 function; VAARGS indicates whether the function takes varargs. The
8155 function takes N named arguments, the types of which are provided in
8156 ARG_TYPES. */
8157
8158 static tree
8159 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8160 tree *arg_types)
8161 {
8162 int i;
8163 tree t = vaargs ? NULL_TREE : void_list_node;
8164
8165 for (i = n - 1; i >= 0; i--)
8166 t = tree_cons (NULL_TREE, arg_types[i], t);
8167
8168 return build_function_type (return_type, t);
8169 }
8170
8171 /* Build a function type. RETURN_TYPE is the type returned by the
8172 function. The function takes N named arguments, the types of which
8173 are provided in ARG_TYPES. */
8174
8175 tree
8176 build_function_type_array (tree return_type, int n, tree *arg_types)
8177 {
8178 return build_function_type_array_1 (false, return_type, n, arg_types);
8179 }
8180
8181 /* Build a variable argument function type. RETURN_TYPE is the type
8182 returned by the function. The function takes N named arguments, the
8183 types of which are provided in ARG_TYPES. */
8184
8185 tree
8186 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8187 {
8188 return build_function_type_array_1 (true, return_type, n, arg_types);
8189 }
8190
8191 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8192 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8193 for the method. An implicit additional parameter (of type
8194 pointer-to-BASETYPE) is added to the ARGTYPES. */
8195
8196 tree
8197 build_method_type_directly (tree basetype,
8198 tree rettype,
8199 tree argtypes)
8200 {
8201 tree t;
8202 tree ptype;
8203 inchash::hash hstate;
8204 bool any_structural_p, any_noncanonical_p;
8205 tree canon_argtypes;
8206
8207 /* Make a node of the sort we want. */
8208 t = make_node (METHOD_TYPE);
8209
8210 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8211 TREE_TYPE (t) = rettype;
8212 ptype = build_pointer_type (basetype);
8213
8214 /* The actual arglist for this function includes a "hidden" argument
8215 which is "this". Put it into the list of argument types. */
8216 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8217 TYPE_ARG_TYPES (t) = argtypes;
8218
8219 /* If we already have such a type, use the old one. */
8220 hstate.add_object (TYPE_HASH (basetype));
8221 hstate.add_object (TYPE_HASH (rettype));
8222 type_hash_list (argtypes, hstate);
8223 t = type_hash_canon (hstate.end (), t);
8224
8225 /* Set up the canonical type. */
8226 any_structural_p
8227 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8228 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8229 any_noncanonical_p
8230 = (TYPE_CANONICAL (basetype) != basetype
8231 || TYPE_CANONICAL (rettype) != rettype);
8232 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8233 &any_structural_p,
8234 &any_noncanonical_p);
8235 if (any_structural_p)
8236 SET_TYPE_STRUCTURAL_EQUALITY (t);
8237 else if (any_noncanonical_p)
8238 TYPE_CANONICAL (t)
8239 = build_method_type_directly (TYPE_CANONICAL (basetype),
8240 TYPE_CANONICAL (rettype),
8241 canon_argtypes);
8242 if (!COMPLETE_TYPE_P (t))
8243 layout_type (t);
8244
8245 return t;
8246 }
8247
8248 /* Construct, lay out and return the type of methods belonging to class
8249 BASETYPE and whose arguments and values are described by TYPE.
8250 If that type exists already, reuse it.
8251 TYPE must be a FUNCTION_TYPE node. */
8252
8253 tree
8254 build_method_type (tree basetype, tree type)
8255 {
8256 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8257
8258 return build_method_type_directly (basetype,
8259 TREE_TYPE (type),
8260 TYPE_ARG_TYPES (type));
8261 }
8262
8263 /* Construct, lay out and return the type of offsets to a value
8264 of type TYPE, within an object of type BASETYPE.
8265 If a suitable offset type exists already, reuse it. */
8266
8267 tree
8268 build_offset_type (tree basetype, tree type)
8269 {
8270 tree t;
8271 inchash::hash hstate;
8272
8273 /* Make a node of the sort we want. */
8274 t = make_node (OFFSET_TYPE);
8275
8276 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8277 TREE_TYPE (t) = type;
8278
8279 /* If we already have such a type, use the old one. */
8280 hstate.add_object (TYPE_HASH (basetype));
8281 hstate.add_object (TYPE_HASH (type));
8282 t = type_hash_canon (hstate.end (), t);
8283
8284 if (!COMPLETE_TYPE_P (t))
8285 layout_type (t);
8286
8287 if (TYPE_CANONICAL (t) == t)
8288 {
8289 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8290 || TYPE_STRUCTURAL_EQUALITY_P (type))
8291 SET_TYPE_STRUCTURAL_EQUALITY (t);
8292 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8293 || TYPE_CANONICAL (type) != type)
8294 TYPE_CANONICAL (t)
8295 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8296 TYPE_CANONICAL (type));
8297 }
8298
8299 return t;
8300 }
8301
8302 /* Create a complex type whose components are COMPONENT_TYPE. */
8303
8304 tree
8305 build_complex_type (tree component_type)
8306 {
8307 tree t;
8308 inchash::hash hstate;
8309
8310 gcc_assert (INTEGRAL_TYPE_P (component_type)
8311 || SCALAR_FLOAT_TYPE_P (component_type)
8312 || FIXED_POINT_TYPE_P (component_type));
8313
8314 /* Make a node of the sort we want. */
8315 t = make_node (COMPLEX_TYPE);
8316
8317 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8318
8319 /* If we already have such a type, use the old one. */
8320 hstate.add_object (TYPE_HASH (component_type));
8321 t = type_hash_canon (hstate.end (), t);
8322
8323 if (!COMPLETE_TYPE_P (t))
8324 layout_type (t);
8325
8326 if (TYPE_CANONICAL (t) == t)
8327 {
8328 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8329 SET_TYPE_STRUCTURAL_EQUALITY (t);
8330 else if (TYPE_CANONICAL (component_type) != component_type)
8331 TYPE_CANONICAL (t)
8332 = build_complex_type (TYPE_CANONICAL (component_type));
8333 }
8334
8335 /* We need to create a name, since complex is a fundamental type. */
8336 if (! TYPE_NAME (t))
8337 {
8338 const char *name;
8339 if (component_type == char_type_node)
8340 name = "complex char";
8341 else if (component_type == signed_char_type_node)
8342 name = "complex signed char";
8343 else if (component_type == unsigned_char_type_node)
8344 name = "complex unsigned char";
8345 else if (component_type == short_integer_type_node)
8346 name = "complex short int";
8347 else if (component_type == short_unsigned_type_node)
8348 name = "complex short unsigned int";
8349 else if (component_type == integer_type_node)
8350 name = "complex int";
8351 else if (component_type == unsigned_type_node)
8352 name = "complex unsigned int";
8353 else if (component_type == long_integer_type_node)
8354 name = "complex long int";
8355 else if (component_type == long_unsigned_type_node)
8356 name = "complex long unsigned int";
8357 else if (component_type == long_long_integer_type_node)
8358 name = "complex long long int";
8359 else if (component_type == long_long_unsigned_type_node)
8360 name = "complex long long unsigned int";
8361 else
8362 name = 0;
8363
8364 if (name != 0)
8365 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8366 get_identifier (name), t);
8367 }
8368
8369 return build_qualified_type (t, TYPE_QUALS (component_type));
8370 }
8371
8372 /* If TYPE is a real or complex floating-point type and the target
8373 does not directly support arithmetic on TYPE then return the wider
8374 type to be used for arithmetic on TYPE. Otherwise, return
8375 NULL_TREE. */
8376
8377 tree
8378 excess_precision_type (tree type)
8379 {
8380 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8381 {
8382 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8383 switch (TREE_CODE (type))
8384 {
8385 case REAL_TYPE:
8386 switch (flt_eval_method)
8387 {
8388 case 1:
8389 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8390 return double_type_node;
8391 break;
8392 case 2:
8393 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8394 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8395 return long_double_type_node;
8396 break;
8397 default:
8398 gcc_unreachable ();
8399 }
8400 break;
8401 case COMPLEX_TYPE:
8402 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8403 return NULL_TREE;
8404 switch (flt_eval_method)
8405 {
8406 case 1:
8407 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8408 return complex_double_type_node;
8409 break;
8410 case 2:
8411 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8412 || (TYPE_MODE (TREE_TYPE (type))
8413 == TYPE_MODE (double_type_node)))
8414 return complex_long_double_type_node;
8415 break;
8416 default:
8417 gcc_unreachable ();
8418 }
8419 break;
8420 default:
8421 break;
8422 }
8423 }
8424 return NULL_TREE;
8425 }
8426 \f
8427 /* Return OP, stripped of any conversions to wider types as much as is safe.
8428 Converting the value back to OP's type makes a value equivalent to OP.
8429
8430 If FOR_TYPE is nonzero, we return a value which, if converted to
8431 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8432
8433 OP must have integer, real or enumeral type. Pointers are not allowed!
8434
8435 There are some cases where the obvious value we could return
8436 would regenerate to OP if converted to OP's type,
8437 but would not extend like OP to wider types.
8438 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8439 For example, if OP is (unsigned short)(signed char)-1,
8440 we avoid returning (signed char)-1 if FOR_TYPE is int,
8441 even though extending that to an unsigned short would regenerate OP,
8442 since the result of extending (signed char)-1 to (int)
8443 is different from (int) OP. */
8444
8445 tree
8446 get_unwidened (tree op, tree for_type)
8447 {
8448 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8449 tree type = TREE_TYPE (op);
8450 unsigned final_prec
8451 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8452 int uns
8453 = (for_type != 0 && for_type != type
8454 && final_prec > TYPE_PRECISION (type)
8455 && TYPE_UNSIGNED (type));
8456 tree win = op;
8457
8458 while (CONVERT_EXPR_P (op))
8459 {
8460 int bitschange;
8461
8462 /* TYPE_PRECISION on vector types has different meaning
8463 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8464 so avoid them here. */
8465 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8466 break;
8467
8468 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8469 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8470
8471 /* Truncations are many-one so cannot be removed.
8472 Unless we are later going to truncate down even farther. */
8473 if (bitschange < 0
8474 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8475 break;
8476
8477 /* See what's inside this conversion. If we decide to strip it,
8478 we will set WIN. */
8479 op = TREE_OPERAND (op, 0);
8480
8481 /* If we have not stripped any zero-extensions (uns is 0),
8482 we can strip any kind of extension.
8483 If we have previously stripped a zero-extension,
8484 only zero-extensions can safely be stripped.
8485 Any extension can be stripped if the bits it would produce
8486 are all going to be discarded later by truncating to FOR_TYPE. */
8487
8488 if (bitschange > 0)
8489 {
8490 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8491 win = op;
8492 /* TYPE_UNSIGNED says whether this is a zero-extension.
8493 Let's avoid computing it if it does not affect WIN
8494 and if UNS will not be needed again. */
8495 if ((uns
8496 || CONVERT_EXPR_P (op))
8497 && TYPE_UNSIGNED (TREE_TYPE (op)))
8498 {
8499 uns = 1;
8500 win = op;
8501 }
8502 }
8503 }
8504
8505 /* If we finally reach a constant see if it fits in for_type and
8506 in that case convert it. */
8507 if (for_type
8508 && TREE_CODE (win) == INTEGER_CST
8509 && TREE_TYPE (win) != for_type
8510 && int_fits_type_p (win, for_type))
8511 win = fold_convert (for_type, win);
8512
8513 return win;
8514 }
8515 \f
8516 /* Return OP or a simpler expression for a narrower value
8517 which can be sign-extended or zero-extended to give back OP.
8518 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8519 or 0 if the value should be sign-extended. */
8520
8521 tree
8522 get_narrower (tree op, int *unsignedp_ptr)
8523 {
8524 int uns = 0;
8525 int first = 1;
8526 tree win = op;
8527 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8528
8529 while (TREE_CODE (op) == NOP_EXPR)
8530 {
8531 int bitschange
8532 = (TYPE_PRECISION (TREE_TYPE (op))
8533 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8534
8535 /* Truncations are many-one so cannot be removed. */
8536 if (bitschange < 0)
8537 break;
8538
8539 /* See what's inside this conversion. If we decide to strip it,
8540 we will set WIN. */
8541
8542 if (bitschange > 0)
8543 {
8544 op = TREE_OPERAND (op, 0);
8545 /* An extension: the outermost one can be stripped,
8546 but remember whether it is zero or sign extension. */
8547 if (first)
8548 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8549 /* Otherwise, if a sign extension has been stripped,
8550 only sign extensions can now be stripped;
8551 if a zero extension has been stripped, only zero-extensions. */
8552 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8553 break;
8554 first = 0;
8555 }
8556 else /* bitschange == 0 */
8557 {
8558 /* A change in nominal type can always be stripped, but we must
8559 preserve the unsignedness. */
8560 if (first)
8561 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8562 first = 0;
8563 op = TREE_OPERAND (op, 0);
8564 /* Keep trying to narrow, but don't assign op to win if it
8565 would turn an integral type into something else. */
8566 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8567 continue;
8568 }
8569
8570 win = op;
8571 }
8572
8573 if (TREE_CODE (op) == COMPONENT_REF
8574 /* Since type_for_size always gives an integer type. */
8575 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8576 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8577 /* Ensure field is laid out already. */
8578 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8579 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8580 {
8581 unsigned HOST_WIDE_INT innerprec
8582 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8583 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8584 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8585 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8586
8587 /* We can get this structure field in a narrower type that fits it,
8588 but the resulting extension to its nominal type (a fullword type)
8589 must satisfy the same conditions as for other extensions.
8590
8591 Do this only for fields that are aligned (not bit-fields),
8592 because when bit-field insns will be used there is no
8593 advantage in doing this. */
8594
8595 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8596 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8597 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8598 && type != 0)
8599 {
8600 if (first)
8601 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8602 win = fold_convert (type, op);
8603 }
8604 }
8605
8606 *unsignedp_ptr = uns;
8607 return win;
8608 }
8609 \f
8610 /* Returns true if integer constant C has a value that is permissible
8611 for type TYPE (an INTEGER_TYPE). */
8612
8613 bool
8614 int_fits_type_p (const_tree c, const_tree type)
8615 {
8616 tree type_low_bound, type_high_bound;
8617 bool ok_for_low_bound, ok_for_high_bound;
8618 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8619
8620 retry:
8621 type_low_bound = TYPE_MIN_VALUE (type);
8622 type_high_bound = TYPE_MAX_VALUE (type);
8623
8624 /* If at least one bound of the type is a constant integer, we can check
8625 ourselves and maybe make a decision. If no such decision is possible, but
8626 this type is a subtype, try checking against that. Otherwise, use
8627 fits_to_tree_p, which checks against the precision.
8628
8629 Compute the status for each possibly constant bound, and return if we see
8630 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8631 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8632 for "constant known to fit". */
8633
8634 /* Check if c >= type_low_bound. */
8635 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8636 {
8637 if (tree_int_cst_lt (c, type_low_bound))
8638 return false;
8639 ok_for_low_bound = true;
8640 }
8641 else
8642 ok_for_low_bound = false;
8643
8644 /* Check if c <= type_high_bound. */
8645 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8646 {
8647 if (tree_int_cst_lt (type_high_bound, c))
8648 return false;
8649 ok_for_high_bound = true;
8650 }
8651 else
8652 ok_for_high_bound = false;
8653
8654 /* If the constant fits both bounds, the result is known. */
8655 if (ok_for_low_bound && ok_for_high_bound)
8656 return true;
8657
8658 /* Perform some generic filtering which may allow making a decision
8659 even if the bounds are not constant. First, negative integers
8660 never fit in unsigned types, */
8661 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8662 return false;
8663
8664 /* Second, narrower types always fit in wider ones. */
8665 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8666 return true;
8667
8668 /* Third, unsigned integers with top bit set never fit signed types. */
8669 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8670 {
8671 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8672 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8673 {
8674 /* When a tree_cst is converted to a wide-int, the precision
8675 is taken from the type. However, if the precision of the
8676 mode underneath the type is smaller than that, it is
8677 possible that the value will not fit. The test below
8678 fails if any bit is set between the sign bit of the
8679 underlying mode and the top bit of the type. */
8680 if (wi::ne_p (wi::zext (c, prec - 1), c))
8681 return false;
8682 }
8683 else if (wi::neg_p (c))
8684 return false;
8685 }
8686
8687 /* If we haven't been able to decide at this point, there nothing more we
8688 can check ourselves here. Look at the base type if we have one and it
8689 has the same precision. */
8690 if (TREE_CODE (type) == INTEGER_TYPE
8691 && TREE_TYPE (type) != 0
8692 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8693 {
8694 type = TREE_TYPE (type);
8695 goto retry;
8696 }
8697
8698 /* Or to fits_to_tree_p, if nothing else. */
8699 return wi::fits_to_tree_p (c, type);
8700 }
8701
8702 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8703 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8704 represented (assuming two's-complement arithmetic) within the bit
8705 precision of the type are returned instead. */
8706
8707 void
8708 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8709 {
8710 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8711 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8712 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8713 else
8714 {
8715 if (TYPE_UNSIGNED (type))
8716 mpz_set_ui (min, 0);
8717 else
8718 {
8719 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8720 wi::to_mpz (mn, min, SIGNED);
8721 }
8722 }
8723
8724 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8725 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8726 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8727 else
8728 {
8729 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8730 wi::to_mpz (mn, max, TYPE_SIGN (type));
8731 }
8732 }
8733
8734 /* Return true if VAR is an automatic variable defined in function FN. */
8735
8736 bool
8737 auto_var_in_fn_p (const_tree var, const_tree fn)
8738 {
8739 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8740 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8741 || TREE_CODE (var) == PARM_DECL)
8742 && ! TREE_STATIC (var))
8743 || TREE_CODE (var) == LABEL_DECL
8744 || TREE_CODE (var) == RESULT_DECL));
8745 }
8746
8747 /* Subprogram of following function. Called by walk_tree.
8748
8749 Return *TP if it is an automatic variable or parameter of the
8750 function passed in as DATA. */
8751
8752 static tree
8753 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8754 {
8755 tree fn = (tree) data;
8756
8757 if (TYPE_P (*tp))
8758 *walk_subtrees = 0;
8759
8760 else if (DECL_P (*tp)
8761 && auto_var_in_fn_p (*tp, fn))
8762 return *tp;
8763
8764 return NULL_TREE;
8765 }
8766
8767 /* Returns true if T is, contains, or refers to a type with variable
8768 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8769 arguments, but not the return type. If FN is nonzero, only return
8770 true if a modifier of the type or position of FN is a variable or
8771 parameter inside FN.
8772
8773 This concept is more general than that of C99 'variably modified types':
8774 in C99, a struct type is never variably modified because a VLA may not
8775 appear as a structure member. However, in GNU C code like:
8776
8777 struct S { int i[f()]; };
8778
8779 is valid, and other languages may define similar constructs. */
8780
8781 bool
8782 variably_modified_type_p (tree type, tree fn)
8783 {
8784 tree t;
8785
8786 /* Test if T is either variable (if FN is zero) or an expression containing
8787 a variable in FN. If TYPE isn't gimplified, return true also if
8788 gimplify_one_sizepos would gimplify the expression into a local
8789 variable. */
8790 #define RETURN_TRUE_IF_VAR(T) \
8791 do { tree _t = (T); \
8792 if (_t != NULL_TREE \
8793 && _t != error_mark_node \
8794 && TREE_CODE (_t) != INTEGER_CST \
8795 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8796 && (!fn \
8797 || (!TYPE_SIZES_GIMPLIFIED (type) \
8798 && !is_gimple_sizepos (_t)) \
8799 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8800 return true; } while (0)
8801
8802 if (type == error_mark_node)
8803 return false;
8804
8805 /* If TYPE itself has variable size, it is variably modified. */
8806 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8807 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8808
8809 switch (TREE_CODE (type))
8810 {
8811 case POINTER_TYPE:
8812 case REFERENCE_TYPE:
8813 case VECTOR_TYPE:
8814 if (variably_modified_type_p (TREE_TYPE (type), fn))
8815 return true;
8816 break;
8817
8818 case FUNCTION_TYPE:
8819 case METHOD_TYPE:
8820 /* If TYPE is a function type, it is variably modified if the
8821 return type is variably modified. */
8822 if (variably_modified_type_p (TREE_TYPE (type), fn))
8823 return true;
8824 break;
8825
8826 case INTEGER_TYPE:
8827 case REAL_TYPE:
8828 case FIXED_POINT_TYPE:
8829 case ENUMERAL_TYPE:
8830 case BOOLEAN_TYPE:
8831 /* Scalar types are variably modified if their end points
8832 aren't constant. */
8833 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8834 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8835 break;
8836
8837 case RECORD_TYPE:
8838 case UNION_TYPE:
8839 case QUAL_UNION_TYPE:
8840 /* We can't see if any of the fields are variably-modified by the
8841 definition we normally use, since that would produce infinite
8842 recursion via pointers. */
8843 /* This is variably modified if some field's type is. */
8844 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8845 if (TREE_CODE (t) == FIELD_DECL)
8846 {
8847 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8848 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8849 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8850
8851 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8852 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8853 }
8854 break;
8855
8856 case ARRAY_TYPE:
8857 /* Do not call ourselves to avoid infinite recursion. This is
8858 variably modified if the element type is. */
8859 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8860 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8861 break;
8862
8863 default:
8864 break;
8865 }
8866
8867 /* The current language may have other cases to check, but in general,
8868 all other types are not variably modified. */
8869 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8870
8871 #undef RETURN_TRUE_IF_VAR
8872 }
8873
8874 /* Given a DECL or TYPE, return the scope in which it was declared, or
8875 NULL_TREE if there is no containing scope. */
8876
8877 tree
8878 get_containing_scope (const_tree t)
8879 {
8880 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8881 }
8882
8883 /* Return the innermost context enclosing DECL that is
8884 a FUNCTION_DECL, or zero if none. */
8885
8886 tree
8887 decl_function_context (const_tree decl)
8888 {
8889 tree context;
8890
8891 if (TREE_CODE (decl) == ERROR_MARK)
8892 return 0;
8893
8894 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8895 where we look up the function at runtime. Such functions always take
8896 a first argument of type 'pointer to real context'.
8897
8898 C++ should really be fixed to use DECL_CONTEXT for the real context,
8899 and use something else for the "virtual context". */
8900 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8901 context
8902 = TYPE_MAIN_VARIANT
8903 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8904 else
8905 context = DECL_CONTEXT (decl);
8906
8907 while (context && TREE_CODE (context) != FUNCTION_DECL)
8908 {
8909 if (TREE_CODE (context) == BLOCK)
8910 context = BLOCK_SUPERCONTEXT (context);
8911 else
8912 context = get_containing_scope (context);
8913 }
8914
8915 return context;
8916 }
8917
8918 /* Return the innermost context enclosing DECL that is
8919 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8920 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8921
8922 tree
8923 decl_type_context (const_tree decl)
8924 {
8925 tree context = DECL_CONTEXT (decl);
8926
8927 while (context)
8928 switch (TREE_CODE (context))
8929 {
8930 case NAMESPACE_DECL:
8931 case TRANSLATION_UNIT_DECL:
8932 return NULL_TREE;
8933
8934 case RECORD_TYPE:
8935 case UNION_TYPE:
8936 case QUAL_UNION_TYPE:
8937 return context;
8938
8939 case TYPE_DECL:
8940 case FUNCTION_DECL:
8941 context = DECL_CONTEXT (context);
8942 break;
8943
8944 case BLOCK:
8945 context = BLOCK_SUPERCONTEXT (context);
8946 break;
8947
8948 default:
8949 gcc_unreachable ();
8950 }
8951
8952 return NULL_TREE;
8953 }
8954
8955 /* CALL is a CALL_EXPR. Return the declaration for the function
8956 called, or NULL_TREE if the called function cannot be
8957 determined. */
8958
8959 tree
8960 get_callee_fndecl (const_tree call)
8961 {
8962 tree addr;
8963
8964 if (call == error_mark_node)
8965 return error_mark_node;
8966
8967 /* It's invalid to call this function with anything but a
8968 CALL_EXPR. */
8969 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8970
8971 /* The first operand to the CALL is the address of the function
8972 called. */
8973 addr = CALL_EXPR_FN (call);
8974
8975 /* If there is no function, return early. */
8976 if (addr == NULL_TREE)
8977 return NULL_TREE;
8978
8979 STRIP_NOPS (addr);
8980
8981 /* If this is a readonly function pointer, extract its initial value. */
8982 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8983 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8984 && DECL_INITIAL (addr))
8985 addr = DECL_INITIAL (addr);
8986
8987 /* If the address is just `&f' for some function `f', then we know
8988 that `f' is being called. */
8989 if (TREE_CODE (addr) == ADDR_EXPR
8990 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8991 return TREE_OPERAND (addr, 0);
8992
8993 /* We couldn't figure out what was being called. */
8994 return NULL_TREE;
8995 }
8996
8997 /* Print debugging information about tree nodes generated during the compile,
8998 and any language-specific information. */
8999
9000 void
9001 dump_tree_statistics (void)
9002 {
9003 if (GATHER_STATISTICS)
9004 {
9005 int i;
9006 int total_nodes, total_bytes;
9007 fprintf (stderr, "Kind Nodes Bytes\n");
9008 fprintf (stderr, "---------------------------------------\n");
9009 total_nodes = total_bytes = 0;
9010 for (i = 0; i < (int) all_kinds; i++)
9011 {
9012 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9013 tree_node_counts[i], tree_node_sizes[i]);
9014 total_nodes += tree_node_counts[i];
9015 total_bytes += tree_node_sizes[i];
9016 }
9017 fprintf (stderr, "---------------------------------------\n");
9018 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9019 fprintf (stderr, "---------------------------------------\n");
9020 fprintf (stderr, "Code Nodes\n");
9021 fprintf (stderr, "----------------------------\n");
9022 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9023 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9024 tree_code_counts[i]);
9025 fprintf (stderr, "----------------------------\n");
9026 ssanames_print_statistics ();
9027 phinodes_print_statistics ();
9028 }
9029 else
9030 fprintf (stderr, "(No per-node statistics)\n");
9031
9032 print_type_hash_statistics ();
9033 print_debug_expr_statistics ();
9034 print_value_expr_statistics ();
9035 lang_hooks.print_statistics ();
9036 }
9037 \f
9038 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9039
9040 /* Generate a crc32 of a byte. */
9041
9042 static unsigned
9043 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9044 {
9045 unsigned ix;
9046
9047 for (ix = bits; ix--; value <<= 1)
9048 {
9049 unsigned feedback;
9050
9051 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9052 chksum <<= 1;
9053 chksum ^= feedback;
9054 }
9055 return chksum;
9056 }
9057
9058 /* Generate a crc32 of a 32-bit unsigned. */
9059
9060 unsigned
9061 crc32_unsigned (unsigned chksum, unsigned value)
9062 {
9063 return crc32_unsigned_bits (chksum, value, 32);
9064 }
9065
9066 /* Generate a crc32 of a byte. */
9067
9068 unsigned
9069 crc32_byte (unsigned chksum, char byte)
9070 {
9071 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9072 }
9073
9074 /* Generate a crc32 of a string. */
9075
9076 unsigned
9077 crc32_string (unsigned chksum, const char *string)
9078 {
9079 do
9080 {
9081 chksum = crc32_byte (chksum, *string);
9082 }
9083 while (*string++);
9084 return chksum;
9085 }
9086
9087 /* P is a string that will be used in a symbol. Mask out any characters
9088 that are not valid in that context. */
9089
9090 void
9091 clean_symbol_name (char *p)
9092 {
9093 for (; *p; p++)
9094 if (! (ISALNUM (*p)
9095 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9096 || *p == '$'
9097 #endif
9098 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9099 || *p == '.'
9100 #endif
9101 ))
9102 *p = '_';
9103 }
9104
9105 /* Generate a name for a special-purpose function.
9106 The generated name may need to be unique across the whole link.
9107 Changes to this function may also require corresponding changes to
9108 xstrdup_mask_random.
9109 TYPE is some string to identify the purpose of this function to the
9110 linker or collect2; it must start with an uppercase letter,
9111 one of:
9112 I - for constructors
9113 D - for destructors
9114 N - for C++ anonymous namespaces
9115 F - for DWARF unwind frame information. */
9116
9117 tree
9118 get_file_function_name (const char *type)
9119 {
9120 char *buf;
9121 const char *p;
9122 char *q;
9123
9124 /* If we already have a name we know to be unique, just use that. */
9125 if (first_global_object_name)
9126 p = q = ASTRDUP (first_global_object_name);
9127 /* If the target is handling the constructors/destructors, they
9128 will be local to this file and the name is only necessary for
9129 debugging purposes.
9130 We also assign sub_I and sub_D sufixes to constructors called from
9131 the global static constructors. These are always local. */
9132 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9133 || (strncmp (type, "sub_", 4) == 0
9134 && (type[4] == 'I' || type[4] == 'D')))
9135 {
9136 const char *file = main_input_filename;
9137 if (! file)
9138 file = LOCATION_FILE (input_location);
9139 /* Just use the file's basename, because the full pathname
9140 might be quite long. */
9141 p = q = ASTRDUP (lbasename (file));
9142 }
9143 else
9144 {
9145 /* Otherwise, the name must be unique across the entire link.
9146 We don't have anything that we know to be unique to this translation
9147 unit, so use what we do have and throw in some randomness. */
9148 unsigned len;
9149 const char *name = weak_global_object_name;
9150 const char *file = main_input_filename;
9151
9152 if (! name)
9153 name = "";
9154 if (! file)
9155 file = LOCATION_FILE (input_location);
9156
9157 len = strlen (file);
9158 q = (char *) alloca (9 + 17 + len + 1);
9159 memcpy (q, file, len + 1);
9160
9161 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9162 crc32_string (0, name), get_random_seed (false));
9163
9164 p = q;
9165 }
9166
9167 clean_symbol_name (q);
9168 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9169 + strlen (type));
9170
9171 /* Set up the name of the file-level functions we may need.
9172 Use a global object (which is already required to be unique over
9173 the program) rather than the file name (which imposes extra
9174 constraints). */
9175 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9176
9177 return get_identifier (buf);
9178 }
9179 \f
9180 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9181
9182 /* Complain that the tree code of NODE does not match the expected 0
9183 terminated list of trailing codes. The trailing code list can be
9184 empty, for a more vague error message. FILE, LINE, and FUNCTION
9185 are of the caller. */
9186
9187 void
9188 tree_check_failed (const_tree node, const char *file,
9189 int line, const char *function, ...)
9190 {
9191 va_list args;
9192 const char *buffer;
9193 unsigned length = 0;
9194 enum tree_code code;
9195
9196 va_start (args, function);
9197 while ((code = (enum tree_code) va_arg (args, int)))
9198 length += 4 + strlen (get_tree_code_name (code));
9199 va_end (args);
9200 if (length)
9201 {
9202 char *tmp;
9203 va_start (args, function);
9204 length += strlen ("expected ");
9205 buffer = tmp = (char *) alloca (length);
9206 length = 0;
9207 while ((code = (enum tree_code) va_arg (args, int)))
9208 {
9209 const char *prefix = length ? " or " : "expected ";
9210
9211 strcpy (tmp + length, prefix);
9212 length += strlen (prefix);
9213 strcpy (tmp + length, get_tree_code_name (code));
9214 length += strlen (get_tree_code_name (code));
9215 }
9216 va_end (args);
9217 }
9218 else
9219 buffer = "unexpected node";
9220
9221 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9222 buffer, get_tree_code_name (TREE_CODE (node)),
9223 function, trim_filename (file), line);
9224 }
9225
9226 /* Complain that the tree code of NODE does match the expected 0
9227 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9228 the caller. */
9229
9230 void
9231 tree_not_check_failed (const_tree node, const char *file,
9232 int line, const char *function, ...)
9233 {
9234 va_list args;
9235 char *buffer;
9236 unsigned length = 0;
9237 enum tree_code code;
9238
9239 va_start (args, function);
9240 while ((code = (enum tree_code) va_arg (args, int)))
9241 length += 4 + strlen (get_tree_code_name (code));
9242 va_end (args);
9243 va_start (args, function);
9244 buffer = (char *) alloca (length);
9245 length = 0;
9246 while ((code = (enum tree_code) va_arg (args, int)))
9247 {
9248 if (length)
9249 {
9250 strcpy (buffer + length, " or ");
9251 length += 4;
9252 }
9253 strcpy (buffer + length, get_tree_code_name (code));
9254 length += strlen (get_tree_code_name (code));
9255 }
9256 va_end (args);
9257
9258 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9259 buffer, get_tree_code_name (TREE_CODE (node)),
9260 function, trim_filename (file), line);
9261 }
9262
9263 /* Similar to tree_check_failed, except that we check for a class of tree
9264 code, given in CL. */
9265
9266 void
9267 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9268 const char *file, int line, const char *function)
9269 {
9270 internal_error
9271 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9272 TREE_CODE_CLASS_STRING (cl),
9273 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9274 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9275 }
9276
9277 /* Similar to tree_check_failed, except that instead of specifying a
9278 dozen codes, use the knowledge that they're all sequential. */
9279
9280 void
9281 tree_range_check_failed (const_tree node, const char *file, int line,
9282 const char *function, enum tree_code c1,
9283 enum tree_code c2)
9284 {
9285 char *buffer;
9286 unsigned length = 0;
9287 unsigned int c;
9288
9289 for (c = c1; c <= c2; ++c)
9290 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9291
9292 length += strlen ("expected ");
9293 buffer = (char *) alloca (length);
9294 length = 0;
9295
9296 for (c = c1; c <= c2; ++c)
9297 {
9298 const char *prefix = length ? " or " : "expected ";
9299
9300 strcpy (buffer + length, prefix);
9301 length += strlen (prefix);
9302 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9303 length += strlen (get_tree_code_name ((enum tree_code) c));
9304 }
9305
9306 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9307 buffer, get_tree_code_name (TREE_CODE (node)),
9308 function, trim_filename (file), line);
9309 }
9310
9311
9312 /* Similar to tree_check_failed, except that we check that a tree does
9313 not have the specified code, given in CL. */
9314
9315 void
9316 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9317 const char *file, int line, const char *function)
9318 {
9319 internal_error
9320 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9321 TREE_CODE_CLASS_STRING (cl),
9322 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9323 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9324 }
9325
9326
9327 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9328
9329 void
9330 omp_clause_check_failed (const_tree node, const char *file, int line,
9331 const char *function, enum omp_clause_code code)
9332 {
9333 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9334 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9335 function, trim_filename (file), line);
9336 }
9337
9338
9339 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9340
9341 void
9342 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9343 const char *function, enum omp_clause_code c1,
9344 enum omp_clause_code c2)
9345 {
9346 char *buffer;
9347 unsigned length = 0;
9348 unsigned int c;
9349
9350 for (c = c1; c <= c2; ++c)
9351 length += 4 + strlen (omp_clause_code_name[c]);
9352
9353 length += strlen ("expected ");
9354 buffer = (char *) alloca (length);
9355 length = 0;
9356
9357 for (c = c1; c <= c2; ++c)
9358 {
9359 const char *prefix = length ? " or " : "expected ";
9360
9361 strcpy (buffer + length, prefix);
9362 length += strlen (prefix);
9363 strcpy (buffer + length, omp_clause_code_name[c]);
9364 length += strlen (omp_clause_code_name[c]);
9365 }
9366
9367 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9368 buffer, omp_clause_code_name[TREE_CODE (node)],
9369 function, trim_filename (file), line);
9370 }
9371
9372
9373 #undef DEFTREESTRUCT
9374 #define DEFTREESTRUCT(VAL, NAME) NAME,
9375
9376 static const char *ts_enum_names[] = {
9377 #include "treestruct.def"
9378 };
9379 #undef DEFTREESTRUCT
9380
9381 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9382
9383 /* Similar to tree_class_check_failed, except that we check for
9384 whether CODE contains the tree structure identified by EN. */
9385
9386 void
9387 tree_contains_struct_check_failed (const_tree node,
9388 const enum tree_node_structure_enum en,
9389 const char *file, int line,
9390 const char *function)
9391 {
9392 internal_error
9393 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9394 TS_ENUM_NAME (en),
9395 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9396 }
9397
9398
9399 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9400 (dynamically sized) vector. */
9401
9402 void
9403 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9404 const char *function)
9405 {
9406 internal_error
9407 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9408 idx + 1, len, function, trim_filename (file), line);
9409 }
9410
9411 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9412 (dynamically sized) vector. */
9413
9414 void
9415 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9416 const char *function)
9417 {
9418 internal_error
9419 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9420 idx + 1, len, function, trim_filename (file), line);
9421 }
9422
9423 /* Similar to above, except that the check is for the bounds of the operand
9424 vector of an expression node EXP. */
9425
9426 void
9427 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9428 int line, const char *function)
9429 {
9430 enum tree_code code = TREE_CODE (exp);
9431 internal_error
9432 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9433 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9434 function, trim_filename (file), line);
9435 }
9436
9437 /* Similar to above, except that the check is for the number of
9438 operands of an OMP_CLAUSE node. */
9439
9440 void
9441 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9442 int line, const char *function)
9443 {
9444 internal_error
9445 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9446 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9447 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9448 trim_filename (file), line);
9449 }
9450 #endif /* ENABLE_TREE_CHECKING */
9451 \f
9452 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9453 and mapped to the machine mode MODE. Initialize its fields and build
9454 the information necessary for debugging output. */
9455
9456 static tree
9457 make_vector_type (tree innertype, int nunits, machine_mode mode)
9458 {
9459 tree t;
9460 inchash::hash hstate;
9461
9462 t = make_node (VECTOR_TYPE);
9463 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9464 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9465 SET_TYPE_MODE (t, mode);
9466
9467 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9468 SET_TYPE_STRUCTURAL_EQUALITY (t);
9469 else if (TYPE_CANONICAL (innertype) != innertype
9470 || mode != VOIDmode)
9471 TYPE_CANONICAL (t)
9472 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9473
9474 layout_type (t);
9475
9476 hstate.add_wide_int (VECTOR_TYPE);
9477 hstate.add_wide_int (nunits);
9478 hstate.add_wide_int (mode);
9479 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9480 t = type_hash_canon (hstate.end (), t);
9481
9482 /* We have built a main variant, based on the main variant of the
9483 inner type. Use it to build the variant we return. */
9484 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9485 && TREE_TYPE (t) != innertype)
9486 return build_type_attribute_qual_variant (t,
9487 TYPE_ATTRIBUTES (innertype),
9488 TYPE_QUALS (innertype));
9489
9490 return t;
9491 }
9492
9493 static tree
9494 make_or_reuse_type (unsigned size, int unsignedp)
9495 {
9496 int i;
9497
9498 if (size == INT_TYPE_SIZE)
9499 return unsignedp ? unsigned_type_node : integer_type_node;
9500 if (size == CHAR_TYPE_SIZE)
9501 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9502 if (size == SHORT_TYPE_SIZE)
9503 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9504 if (size == LONG_TYPE_SIZE)
9505 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9506 if (size == LONG_LONG_TYPE_SIZE)
9507 return (unsignedp ? long_long_unsigned_type_node
9508 : long_long_integer_type_node);
9509
9510 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9511 if (size == int_n_data[i].bitsize
9512 && int_n_enabled_p[i])
9513 return (unsignedp ? int_n_trees[i].unsigned_type
9514 : int_n_trees[i].signed_type);
9515
9516 if (unsignedp)
9517 return make_unsigned_type (size);
9518 else
9519 return make_signed_type (size);
9520 }
9521
9522 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9523
9524 static tree
9525 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9526 {
9527 if (satp)
9528 {
9529 if (size == SHORT_FRACT_TYPE_SIZE)
9530 return unsignedp ? sat_unsigned_short_fract_type_node
9531 : sat_short_fract_type_node;
9532 if (size == FRACT_TYPE_SIZE)
9533 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9534 if (size == LONG_FRACT_TYPE_SIZE)
9535 return unsignedp ? sat_unsigned_long_fract_type_node
9536 : sat_long_fract_type_node;
9537 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9538 return unsignedp ? sat_unsigned_long_long_fract_type_node
9539 : sat_long_long_fract_type_node;
9540 }
9541 else
9542 {
9543 if (size == SHORT_FRACT_TYPE_SIZE)
9544 return unsignedp ? unsigned_short_fract_type_node
9545 : short_fract_type_node;
9546 if (size == FRACT_TYPE_SIZE)
9547 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9548 if (size == LONG_FRACT_TYPE_SIZE)
9549 return unsignedp ? unsigned_long_fract_type_node
9550 : long_fract_type_node;
9551 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9552 return unsignedp ? unsigned_long_long_fract_type_node
9553 : long_long_fract_type_node;
9554 }
9555
9556 return make_fract_type (size, unsignedp, satp);
9557 }
9558
9559 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9560
9561 static tree
9562 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9563 {
9564 if (satp)
9565 {
9566 if (size == SHORT_ACCUM_TYPE_SIZE)
9567 return unsignedp ? sat_unsigned_short_accum_type_node
9568 : sat_short_accum_type_node;
9569 if (size == ACCUM_TYPE_SIZE)
9570 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9571 if (size == LONG_ACCUM_TYPE_SIZE)
9572 return unsignedp ? sat_unsigned_long_accum_type_node
9573 : sat_long_accum_type_node;
9574 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9575 return unsignedp ? sat_unsigned_long_long_accum_type_node
9576 : sat_long_long_accum_type_node;
9577 }
9578 else
9579 {
9580 if (size == SHORT_ACCUM_TYPE_SIZE)
9581 return unsignedp ? unsigned_short_accum_type_node
9582 : short_accum_type_node;
9583 if (size == ACCUM_TYPE_SIZE)
9584 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9585 if (size == LONG_ACCUM_TYPE_SIZE)
9586 return unsignedp ? unsigned_long_accum_type_node
9587 : long_accum_type_node;
9588 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9589 return unsignedp ? unsigned_long_long_accum_type_node
9590 : long_long_accum_type_node;
9591 }
9592
9593 return make_accum_type (size, unsignedp, satp);
9594 }
9595
9596
9597 /* Create an atomic variant node for TYPE. This routine is called
9598 during initialization of data types to create the 5 basic atomic
9599 types. The generic build_variant_type function requires these to
9600 already be set up in order to function properly, so cannot be
9601 called from there. If ALIGN is non-zero, then ensure alignment is
9602 overridden to this value. */
9603
9604 static tree
9605 build_atomic_base (tree type, unsigned int align)
9606 {
9607 tree t;
9608
9609 /* Make sure its not already registered. */
9610 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9611 return t;
9612
9613 t = build_variant_type_copy (type);
9614 set_type_quals (t, TYPE_QUAL_ATOMIC);
9615
9616 if (align)
9617 TYPE_ALIGN (t) = align;
9618
9619 return t;
9620 }
9621
9622 /* Create nodes for all integer types (and error_mark_node) using the sizes
9623 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9624 SHORT_DOUBLE specifies whether double should be of the same precision
9625 as float. */
9626
9627 void
9628 build_common_tree_nodes (bool signed_char, bool short_double)
9629 {
9630 int i;
9631
9632 error_mark_node = make_node (ERROR_MARK);
9633 TREE_TYPE (error_mark_node) = error_mark_node;
9634
9635 initialize_sizetypes ();
9636
9637 /* Define both `signed char' and `unsigned char'. */
9638 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9639 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9640 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9641 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9642
9643 /* Define `char', which is like either `signed char' or `unsigned char'
9644 but not the same as either. */
9645 char_type_node
9646 = (signed_char
9647 ? make_signed_type (CHAR_TYPE_SIZE)
9648 : make_unsigned_type (CHAR_TYPE_SIZE));
9649 TYPE_STRING_FLAG (char_type_node) = 1;
9650
9651 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9652 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9653 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9654 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9655 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9656 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9657 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9658 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9659
9660 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9661 {
9662 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9663 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9664 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9665 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9666
9667 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9668 && int_n_enabled_p[i])
9669 {
9670 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9671 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9672 }
9673 }
9674
9675 /* Define a boolean type. This type only represents boolean values but
9676 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9677 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9678 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9679 TYPE_PRECISION (boolean_type_node) = 1;
9680 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9681
9682 /* Define what type to use for size_t. */
9683 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9684 size_type_node = unsigned_type_node;
9685 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9686 size_type_node = long_unsigned_type_node;
9687 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9688 size_type_node = long_long_unsigned_type_node;
9689 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9690 size_type_node = short_unsigned_type_node;
9691 else
9692 {
9693 int i;
9694
9695 size_type_node = NULL_TREE;
9696 for (i = 0; i < NUM_INT_N_ENTS; i++)
9697 if (int_n_enabled_p[i])
9698 {
9699 char name[50];
9700 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9701
9702 if (strcmp (name, SIZE_TYPE) == 0)
9703 {
9704 size_type_node = int_n_trees[i].unsigned_type;
9705 }
9706 }
9707 if (size_type_node == NULL_TREE)
9708 gcc_unreachable ();
9709 }
9710
9711 /* Fill in the rest of the sized types. Reuse existing type nodes
9712 when possible. */
9713 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9714 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9715 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9716 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9717 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9718
9719 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9720 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9721 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9722 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9723 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9724
9725 /* Don't call build_qualified type for atomics. That routine does
9726 special processing for atomics, and until they are initialized
9727 it's better not to make that call.
9728
9729 Check to see if there is a target override for atomic types. */
9730
9731 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9732 targetm.atomic_align_for_mode (QImode));
9733 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9734 targetm.atomic_align_for_mode (HImode));
9735 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9736 targetm.atomic_align_for_mode (SImode));
9737 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9738 targetm.atomic_align_for_mode (DImode));
9739 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9740 targetm.atomic_align_for_mode (TImode));
9741
9742 access_public_node = get_identifier ("public");
9743 access_protected_node = get_identifier ("protected");
9744 access_private_node = get_identifier ("private");
9745
9746 /* Define these next since types below may used them. */
9747 integer_zero_node = build_int_cst (integer_type_node, 0);
9748 integer_one_node = build_int_cst (integer_type_node, 1);
9749 integer_three_node = build_int_cst (integer_type_node, 3);
9750 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9751
9752 size_zero_node = size_int (0);
9753 size_one_node = size_int (1);
9754 bitsize_zero_node = bitsize_int (0);
9755 bitsize_one_node = bitsize_int (1);
9756 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9757
9758 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9759 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9760
9761 void_type_node = make_node (VOID_TYPE);
9762 layout_type (void_type_node);
9763
9764 pointer_bounds_type_node = targetm.chkp_bound_type ();
9765
9766 /* We are not going to have real types in C with less than byte alignment,
9767 so we might as well not have any types that claim to have it. */
9768 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9769 TYPE_USER_ALIGN (void_type_node) = 0;
9770
9771 void_node = make_node (VOID_CST);
9772 TREE_TYPE (void_node) = void_type_node;
9773
9774 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9775 layout_type (TREE_TYPE (null_pointer_node));
9776
9777 ptr_type_node = build_pointer_type (void_type_node);
9778 const_ptr_type_node
9779 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9780 fileptr_type_node = ptr_type_node;
9781
9782 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9783
9784 float_type_node = make_node (REAL_TYPE);
9785 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9786 layout_type (float_type_node);
9787
9788 double_type_node = make_node (REAL_TYPE);
9789 if (short_double)
9790 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9791 else
9792 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9793 layout_type (double_type_node);
9794
9795 long_double_type_node = make_node (REAL_TYPE);
9796 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9797 layout_type (long_double_type_node);
9798
9799 float_ptr_type_node = build_pointer_type (float_type_node);
9800 double_ptr_type_node = build_pointer_type (double_type_node);
9801 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9802 integer_ptr_type_node = build_pointer_type (integer_type_node);
9803
9804 /* Fixed size integer types. */
9805 uint16_type_node = make_or_reuse_type (16, 1);
9806 uint32_type_node = make_or_reuse_type (32, 1);
9807 uint64_type_node = make_or_reuse_type (64, 1);
9808
9809 /* Decimal float types. */
9810 dfloat32_type_node = make_node (REAL_TYPE);
9811 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9812 layout_type (dfloat32_type_node);
9813 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9814 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9815
9816 dfloat64_type_node = make_node (REAL_TYPE);
9817 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9818 layout_type (dfloat64_type_node);
9819 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9820 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9821
9822 dfloat128_type_node = make_node (REAL_TYPE);
9823 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9824 layout_type (dfloat128_type_node);
9825 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9826 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9827
9828 complex_integer_type_node = build_complex_type (integer_type_node);
9829 complex_float_type_node = build_complex_type (float_type_node);
9830 complex_double_type_node = build_complex_type (double_type_node);
9831 complex_long_double_type_node = build_complex_type (long_double_type_node);
9832
9833 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9834 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9835 sat_ ## KIND ## _type_node = \
9836 make_sat_signed_ ## KIND ## _type (SIZE); \
9837 sat_unsigned_ ## KIND ## _type_node = \
9838 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9839 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9840 unsigned_ ## KIND ## _type_node = \
9841 make_unsigned_ ## KIND ## _type (SIZE);
9842
9843 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9844 sat_ ## WIDTH ## KIND ## _type_node = \
9845 make_sat_signed_ ## KIND ## _type (SIZE); \
9846 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9847 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9848 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9849 unsigned_ ## WIDTH ## KIND ## _type_node = \
9850 make_unsigned_ ## KIND ## _type (SIZE);
9851
9852 /* Make fixed-point type nodes based on four different widths. */
9853 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9854 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9855 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9856 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9857 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9858
9859 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9860 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9861 NAME ## _type_node = \
9862 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9863 u ## NAME ## _type_node = \
9864 make_or_reuse_unsigned_ ## KIND ## _type \
9865 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9866 sat_ ## NAME ## _type_node = \
9867 make_or_reuse_sat_signed_ ## KIND ## _type \
9868 (GET_MODE_BITSIZE (MODE ## mode)); \
9869 sat_u ## NAME ## _type_node = \
9870 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9871 (GET_MODE_BITSIZE (U ## MODE ## mode));
9872
9873 /* Fixed-point type and mode nodes. */
9874 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9875 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9876 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9877 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9878 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9879 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9880 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9881 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9882 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9883 MAKE_FIXED_MODE_NODE (accum, da, DA)
9884 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9885
9886 {
9887 tree t = targetm.build_builtin_va_list ();
9888
9889 /* Many back-ends define record types without setting TYPE_NAME.
9890 If we copied the record type here, we'd keep the original
9891 record type without a name. This breaks name mangling. So,
9892 don't copy record types and let c_common_nodes_and_builtins()
9893 declare the type to be __builtin_va_list. */
9894 if (TREE_CODE (t) != RECORD_TYPE)
9895 t = build_variant_type_copy (t);
9896
9897 va_list_type_node = t;
9898 }
9899 }
9900
9901 /* Modify DECL for given flags.
9902 TM_PURE attribute is set only on types, so the function will modify
9903 DECL's type when ECF_TM_PURE is used. */
9904
9905 void
9906 set_call_expr_flags (tree decl, int flags)
9907 {
9908 if (flags & ECF_NOTHROW)
9909 TREE_NOTHROW (decl) = 1;
9910 if (flags & ECF_CONST)
9911 TREE_READONLY (decl) = 1;
9912 if (flags & ECF_PURE)
9913 DECL_PURE_P (decl) = 1;
9914 if (flags & ECF_LOOPING_CONST_OR_PURE)
9915 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9916 if (flags & ECF_NOVOPS)
9917 DECL_IS_NOVOPS (decl) = 1;
9918 if (flags & ECF_NORETURN)
9919 TREE_THIS_VOLATILE (decl) = 1;
9920 if (flags & ECF_MALLOC)
9921 DECL_IS_MALLOC (decl) = 1;
9922 if (flags & ECF_RETURNS_TWICE)
9923 DECL_IS_RETURNS_TWICE (decl) = 1;
9924 if (flags & ECF_LEAF)
9925 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9926 NULL, DECL_ATTRIBUTES (decl));
9927 if ((flags & ECF_TM_PURE) && flag_tm)
9928 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9929 /* Looping const or pure is implied by noreturn.
9930 There is currently no way to declare looping const or looping pure alone. */
9931 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9932 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9933 }
9934
9935
9936 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9937
9938 static void
9939 local_define_builtin (const char *name, tree type, enum built_in_function code,
9940 const char *library_name, int ecf_flags)
9941 {
9942 tree decl;
9943
9944 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9945 library_name, NULL_TREE);
9946 set_call_expr_flags (decl, ecf_flags);
9947
9948 set_builtin_decl (code, decl, true);
9949 }
9950
9951 /* Call this function after instantiating all builtins that the language
9952 front end cares about. This will build the rest of the builtins
9953 and internal functions that are relied upon by the tree optimizers and
9954 the middle-end. */
9955
9956 void
9957 build_common_builtin_nodes (void)
9958 {
9959 tree tmp, ftype;
9960 int ecf_flags;
9961
9962 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9963 {
9964 ftype = build_function_type (void_type_node, void_list_node);
9965 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9966 "__builtin_unreachable",
9967 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9968 | ECF_CONST);
9969 }
9970
9971 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9972 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9973 {
9974 ftype = build_function_type_list (ptr_type_node,
9975 ptr_type_node, const_ptr_type_node,
9976 size_type_node, NULL_TREE);
9977
9978 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9979 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9980 "memcpy", ECF_NOTHROW | ECF_LEAF);
9981 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9982 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9983 "memmove", ECF_NOTHROW | ECF_LEAF);
9984 }
9985
9986 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9987 {
9988 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9989 const_ptr_type_node, size_type_node,
9990 NULL_TREE);
9991 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9992 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9993 }
9994
9995 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9996 {
9997 ftype = build_function_type_list (ptr_type_node,
9998 ptr_type_node, integer_type_node,
9999 size_type_node, NULL_TREE);
10000 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10001 "memset", ECF_NOTHROW | ECF_LEAF);
10002 }
10003
10004 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10005 {
10006 ftype = build_function_type_list (ptr_type_node,
10007 size_type_node, NULL_TREE);
10008 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10009 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10010 }
10011
10012 ftype = build_function_type_list (ptr_type_node, size_type_node,
10013 size_type_node, NULL_TREE);
10014 local_define_builtin ("__builtin_alloca_with_align", ftype,
10015 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
10016 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10017
10018 /* If we're checking the stack, `alloca' can throw. */
10019 if (flag_stack_check)
10020 {
10021 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10022 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10023 }
10024
10025 ftype = build_function_type_list (void_type_node,
10026 ptr_type_node, ptr_type_node,
10027 ptr_type_node, NULL_TREE);
10028 local_define_builtin ("__builtin_init_trampoline", ftype,
10029 BUILT_IN_INIT_TRAMPOLINE,
10030 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10031 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10032 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10033 "__builtin_init_heap_trampoline",
10034 ECF_NOTHROW | ECF_LEAF);
10035
10036 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10037 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10038 BUILT_IN_ADJUST_TRAMPOLINE,
10039 "__builtin_adjust_trampoline",
10040 ECF_CONST | ECF_NOTHROW);
10041
10042 ftype = build_function_type_list (void_type_node,
10043 ptr_type_node, ptr_type_node, NULL_TREE);
10044 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10045 BUILT_IN_NONLOCAL_GOTO,
10046 "__builtin_nonlocal_goto",
10047 ECF_NORETURN | ECF_NOTHROW);
10048
10049 ftype = build_function_type_list (void_type_node,
10050 ptr_type_node, ptr_type_node, NULL_TREE);
10051 local_define_builtin ("__builtin_setjmp_setup", ftype,
10052 BUILT_IN_SETJMP_SETUP,
10053 "__builtin_setjmp_setup", ECF_NOTHROW);
10054
10055 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10056 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10057 BUILT_IN_SETJMP_RECEIVER,
10058 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10059
10060 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10061 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10062 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10063
10064 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10065 local_define_builtin ("__builtin_stack_restore", ftype,
10066 BUILT_IN_STACK_RESTORE,
10067 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10068
10069 /* If there's a possibility that we might use the ARM EABI, build the
10070 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10071 if (targetm.arm_eabi_unwinder)
10072 {
10073 ftype = build_function_type_list (void_type_node, NULL_TREE);
10074 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10075 BUILT_IN_CXA_END_CLEANUP,
10076 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10077 }
10078
10079 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10080 local_define_builtin ("__builtin_unwind_resume", ftype,
10081 BUILT_IN_UNWIND_RESUME,
10082 ((targetm_common.except_unwind_info (&global_options)
10083 == UI_SJLJ)
10084 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10085 ECF_NORETURN);
10086
10087 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10088 {
10089 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10090 NULL_TREE);
10091 local_define_builtin ("__builtin_return_address", ftype,
10092 BUILT_IN_RETURN_ADDRESS,
10093 "__builtin_return_address",
10094 ECF_NOTHROW);
10095 }
10096
10097 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10098 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10099 {
10100 ftype = build_function_type_list (void_type_node, ptr_type_node,
10101 ptr_type_node, NULL_TREE);
10102 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10103 local_define_builtin ("__cyg_profile_func_enter", ftype,
10104 BUILT_IN_PROFILE_FUNC_ENTER,
10105 "__cyg_profile_func_enter", 0);
10106 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10107 local_define_builtin ("__cyg_profile_func_exit", ftype,
10108 BUILT_IN_PROFILE_FUNC_EXIT,
10109 "__cyg_profile_func_exit", 0);
10110 }
10111
10112 /* The exception object and filter values from the runtime. The argument
10113 must be zero before exception lowering, i.e. from the front end. After
10114 exception lowering, it will be the region number for the exception
10115 landing pad. These functions are PURE instead of CONST to prevent
10116 them from being hoisted past the exception edge that will initialize
10117 its value in the landing pad. */
10118 ftype = build_function_type_list (ptr_type_node,
10119 integer_type_node, NULL_TREE);
10120 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10121 /* Only use TM_PURE if we we have TM language support. */
10122 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10123 ecf_flags |= ECF_TM_PURE;
10124 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10125 "__builtin_eh_pointer", ecf_flags);
10126
10127 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10128 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10129 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10130 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10131
10132 ftype = build_function_type_list (void_type_node,
10133 integer_type_node, integer_type_node,
10134 NULL_TREE);
10135 local_define_builtin ("__builtin_eh_copy_values", ftype,
10136 BUILT_IN_EH_COPY_VALUES,
10137 "__builtin_eh_copy_values", ECF_NOTHROW);
10138
10139 /* Complex multiplication and division. These are handled as builtins
10140 rather than optabs because emit_library_call_value doesn't support
10141 complex. Further, we can do slightly better with folding these
10142 beasties if the real and complex parts of the arguments are separate. */
10143 {
10144 int mode;
10145
10146 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10147 {
10148 char mode_name_buf[4], *q;
10149 const char *p;
10150 enum built_in_function mcode, dcode;
10151 tree type, inner_type;
10152 const char *prefix = "__";
10153
10154 if (targetm.libfunc_gnu_prefix)
10155 prefix = "__gnu_";
10156
10157 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10158 if (type == NULL)
10159 continue;
10160 inner_type = TREE_TYPE (type);
10161
10162 ftype = build_function_type_list (type, inner_type, inner_type,
10163 inner_type, inner_type, NULL_TREE);
10164
10165 mcode = ((enum built_in_function)
10166 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10167 dcode = ((enum built_in_function)
10168 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10169
10170 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10171 *q = TOLOWER (*p);
10172 *q = '\0';
10173
10174 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10175 NULL);
10176 local_define_builtin (built_in_names[mcode], ftype, mcode,
10177 built_in_names[mcode],
10178 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10179
10180 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10181 NULL);
10182 local_define_builtin (built_in_names[dcode], ftype, dcode,
10183 built_in_names[dcode],
10184 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10185 }
10186 }
10187
10188 init_internal_fns ();
10189 }
10190
10191 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10192 better way.
10193
10194 If we requested a pointer to a vector, build up the pointers that
10195 we stripped off while looking for the inner type. Similarly for
10196 return values from functions.
10197
10198 The argument TYPE is the top of the chain, and BOTTOM is the
10199 new type which we will point to. */
10200
10201 tree
10202 reconstruct_complex_type (tree type, tree bottom)
10203 {
10204 tree inner, outer;
10205
10206 if (TREE_CODE (type) == POINTER_TYPE)
10207 {
10208 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10209 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10210 TYPE_REF_CAN_ALIAS_ALL (type));
10211 }
10212 else if (TREE_CODE (type) == REFERENCE_TYPE)
10213 {
10214 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10215 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10216 TYPE_REF_CAN_ALIAS_ALL (type));
10217 }
10218 else if (TREE_CODE (type) == ARRAY_TYPE)
10219 {
10220 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10221 outer = build_array_type (inner, TYPE_DOMAIN (type));
10222 }
10223 else if (TREE_CODE (type) == FUNCTION_TYPE)
10224 {
10225 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10226 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10227 }
10228 else if (TREE_CODE (type) == METHOD_TYPE)
10229 {
10230 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10231 /* The build_method_type_directly() routine prepends 'this' to argument list,
10232 so we must compensate by getting rid of it. */
10233 outer
10234 = build_method_type_directly
10235 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10236 inner,
10237 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10238 }
10239 else if (TREE_CODE (type) == OFFSET_TYPE)
10240 {
10241 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10242 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10243 }
10244 else
10245 return bottom;
10246
10247 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10248 TYPE_QUALS (type));
10249 }
10250
10251 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10252 the inner type. */
10253 tree
10254 build_vector_type_for_mode (tree innertype, machine_mode mode)
10255 {
10256 int nunits;
10257
10258 switch (GET_MODE_CLASS (mode))
10259 {
10260 case MODE_VECTOR_INT:
10261 case MODE_VECTOR_FLOAT:
10262 case MODE_VECTOR_FRACT:
10263 case MODE_VECTOR_UFRACT:
10264 case MODE_VECTOR_ACCUM:
10265 case MODE_VECTOR_UACCUM:
10266 nunits = GET_MODE_NUNITS (mode);
10267 break;
10268
10269 case MODE_INT:
10270 /* Check that there are no leftover bits. */
10271 gcc_assert (GET_MODE_BITSIZE (mode)
10272 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10273
10274 nunits = GET_MODE_BITSIZE (mode)
10275 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10276 break;
10277
10278 default:
10279 gcc_unreachable ();
10280 }
10281
10282 return make_vector_type (innertype, nunits, mode);
10283 }
10284
10285 /* Similarly, but takes the inner type and number of units, which must be
10286 a power of two. */
10287
10288 tree
10289 build_vector_type (tree innertype, int nunits)
10290 {
10291 return make_vector_type (innertype, nunits, VOIDmode);
10292 }
10293
10294 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10295
10296 tree
10297 build_opaque_vector_type (tree innertype, int nunits)
10298 {
10299 tree t = make_vector_type (innertype, nunits, VOIDmode);
10300 tree cand;
10301 /* We always build the non-opaque variant before the opaque one,
10302 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10303 cand = TYPE_NEXT_VARIANT (t);
10304 if (cand
10305 && TYPE_VECTOR_OPAQUE (cand)
10306 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10307 return cand;
10308 /* Othewise build a variant type and make sure to queue it after
10309 the non-opaque type. */
10310 cand = build_distinct_type_copy (t);
10311 TYPE_VECTOR_OPAQUE (cand) = true;
10312 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10313 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10314 TYPE_NEXT_VARIANT (t) = cand;
10315 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10316 return cand;
10317 }
10318
10319
10320 /* Given an initializer INIT, return TRUE if INIT is zero or some
10321 aggregate of zeros. Otherwise return FALSE. */
10322 bool
10323 initializer_zerop (const_tree init)
10324 {
10325 tree elt;
10326
10327 STRIP_NOPS (init);
10328
10329 switch (TREE_CODE (init))
10330 {
10331 case INTEGER_CST:
10332 return integer_zerop (init);
10333
10334 case REAL_CST:
10335 /* ??? Note that this is not correct for C4X float formats. There,
10336 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10337 negative exponent. */
10338 return real_zerop (init)
10339 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10340
10341 case FIXED_CST:
10342 return fixed_zerop (init);
10343
10344 case COMPLEX_CST:
10345 return integer_zerop (init)
10346 || (real_zerop (init)
10347 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10348 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10349
10350 case VECTOR_CST:
10351 {
10352 unsigned i;
10353 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10354 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10355 return false;
10356 return true;
10357 }
10358
10359 case CONSTRUCTOR:
10360 {
10361 unsigned HOST_WIDE_INT idx;
10362
10363 if (TREE_CLOBBER_P (init))
10364 return false;
10365 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10366 if (!initializer_zerop (elt))
10367 return false;
10368 return true;
10369 }
10370
10371 case STRING_CST:
10372 {
10373 int i;
10374
10375 /* We need to loop through all elements to handle cases like
10376 "\0" and "\0foobar". */
10377 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10378 if (TREE_STRING_POINTER (init)[i] != '\0')
10379 return false;
10380
10381 return true;
10382 }
10383
10384 default:
10385 return false;
10386 }
10387 }
10388
10389 /* Check if vector VEC consists of all the equal elements and
10390 that the number of elements corresponds to the type of VEC.
10391 The function returns first element of the vector
10392 or NULL_TREE if the vector is not uniform. */
10393 tree
10394 uniform_vector_p (const_tree vec)
10395 {
10396 tree first, t;
10397 unsigned i;
10398
10399 if (vec == NULL_TREE)
10400 return NULL_TREE;
10401
10402 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10403
10404 if (TREE_CODE (vec) == VECTOR_CST)
10405 {
10406 first = VECTOR_CST_ELT (vec, 0);
10407 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10408 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10409 return NULL_TREE;
10410
10411 return first;
10412 }
10413
10414 else if (TREE_CODE (vec) == CONSTRUCTOR)
10415 {
10416 first = error_mark_node;
10417
10418 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10419 {
10420 if (i == 0)
10421 {
10422 first = t;
10423 continue;
10424 }
10425 if (!operand_equal_p (first, t, 0))
10426 return NULL_TREE;
10427 }
10428 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10429 return NULL_TREE;
10430
10431 return first;
10432 }
10433
10434 return NULL_TREE;
10435 }
10436
10437 /* Build an empty statement at location LOC. */
10438
10439 tree
10440 build_empty_stmt (location_t loc)
10441 {
10442 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10443 SET_EXPR_LOCATION (t, loc);
10444 return t;
10445 }
10446
10447
10448 /* Build an OpenMP clause with code CODE. LOC is the location of the
10449 clause. */
10450
10451 tree
10452 build_omp_clause (location_t loc, enum omp_clause_code code)
10453 {
10454 tree t;
10455 int size, length;
10456
10457 length = omp_clause_num_ops[code];
10458 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10459
10460 record_node_allocation_statistics (OMP_CLAUSE, size);
10461
10462 t = (tree) ggc_internal_alloc (size);
10463 memset (t, 0, size);
10464 TREE_SET_CODE (t, OMP_CLAUSE);
10465 OMP_CLAUSE_SET_CODE (t, code);
10466 OMP_CLAUSE_LOCATION (t) = loc;
10467
10468 return t;
10469 }
10470
10471 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10472 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10473 Except for the CODE and operand count field, other storage for the
10474 object is initialized to zeros. */
10475
10476 tree
10477 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10478 {
10479 tree t;
10480 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10481
10482 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10483 gcc_assert (len >= 1);
10484
10485 record_node_allocation_statistics (code, length);
10486
10487 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10488
10489 TREE_SET_CODE (t, code);
10490
10491 /* Can't use TREE_OPERAND to store the length because if checking is
10492 enabled, it will try to check the length before we store it. :-P */
10493 t->exp.operands[0] = build_int_cst (sizetype, len);
10494
10495 return t;
10496 }
10497
10498 /* Helper function for build_call_* functions; build a CALL_EXPR with
10499 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10500 the argument slots. */
10501
10502 static tree
10503 build_call_1 (tree return_type, tree fn, int nargs)
10504 {
10505 tree t;
10506
10507 t = build_vl_exp (CALL_EXPR, nargs + 3);
10508 TREE_TYPE (t) = return_type;
10509 CALL_EXPR_FN (t) = fn;
10510 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10511
10512 return t;
10513 }
10514
10515 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10516 FN and a null static chain slot. NARGS is the number of call arguments
10517 which are specified as "..." arguments. */
10518
10519 tree
10520 build_call_nary (tree return_type, tree fn, int nargs, ...)
10521 {
10522 tree ret;
10523 va_list args;
10524 va_start (args, nargs);
10525 ret = build_call_valist (return_type, fn, nargs, args);
10526 va_end (args);
10527 return ret;
10528 }
10529
10530 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10531 FN and a null static chain slot. NARGS is the number of call arguments
10532 which are specified as a va_list ARGS. */
10533
10534 tree
10535 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10536 {
10537 tree t;
10538 int i;
10539
10540 t = build_call_1 (return_type, fn, nargs);
10541 for (i = 0; i < nargs; i++)
10542 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10543 process_call_operands (t);
10544 return t;
10545 }
10546
10547 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10548 FN and a null static chain slot. NARGS is the number of call arguments
10549 which are specified as a tree array ARGS. */
10550
10551 tree
10552 build_call_array_loc (location_t loc, tree return_type, tree fn,
10553 int nargs, const tree *args)
10554 {
10555 tree t;
10556 int i;
10557
10558 t = build_call_1 (return_type, fn, nargs);
10559 for (i = 0; i < nargs; i++)
10560 CALL_EXPR_ARG (t, i) = args[i];
10561 process_call_operands (t);
10562 SET_EXPR_LOCATION (t, loc);
10563 return t;
10564 }
10565
10566 /* Like build_call_array, but takes a vec. */
10567
10568 tree
10569 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10570 {
10571 tree ret, t;
10572 unsigned int ix;
10573
10574 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10575 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10576 CALL_EXPR_ARG (ret, ix) = t;
10577 process_call_operands (ret);
10578 return ret;
10579 }
10580
10581 /* Conveniently construct a function call expression. FNDECL names the
10582 function to be called and N arguments are passed in the array
10583 ARGARRAY. */
10584
10585 tree
10586 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10587 {
10588 tree fntype = TREE_TYPE (fndecl);
10589 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10590
10591 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10592 }
10593
10594 /* Conveniently construct a function call expression. FNDECL names the
10595 function to be called and the arguments are passed in the vector
10596 VEC. */
10597
10598 tree
10599 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10600 {
10601 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10602 vec_safe_address (vec));
10603 }
10604
10605
10606 /* Conveniently construct a function call expression. FNDECL names the
10607 function to be called, N is the number of arguments, and the "..."
10608 parameters are the argument expressions. */
10609
10610 tree
10611 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10612 {
10613 va_list ap;
10614 tree *argarray = XALLOCAVEC (tree, n);
10615 int i;
10616
10617 va_start (ap, n);
10618 for (i = 0; i < n; i++)
10619 argarray[i] = va_arg (ap, tree);
10620 va_end (ap);
10621 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10622 }
10623
10624 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10625 varargs macros aren't supported by all bootstrap compilers. */
10626
10627 tree
10628 build_call_expr (tree fndecl, int n, ...)
10629 {
10630 va_list ap;
10631 tree *argarray = XALLOCAVEC (tree, n);
10632 int i;
10633
10634 va_start (ap, n);
10635 for (i = 0; i < n; i++)
10636 argarray[i] = va_arg (ap, tree);
10637 va_end (ap);
10638 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10639 }
10640
10641 /* Build internal call expression. This is just like CALL_EXPR, except
10642 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10643 internal function. */
10644
10645 tree
10646 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10647 tree type, int n, ...)
10648 {
10649 va_list ap;
10650 int i;
10651
10652 tree fn = build_call_1 (type, NULL_TREE, n);
10653 va_start (ap, n);
10654 for (i = 0; i < n; i++)
10655 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10656 va_end (ap);
10657 SET_EXPR_LOCATION (fn, loc);
10658 CALL_EXPR_IFN (fn) = ifn;
10659 return fn;
10660 }
10661
10662 /* Create a new constant string literal and return a char* pointer to it.
10663 The STRING_CST value is the LEN characters at STR. */
10664 tree
10665 build_string_literal (int len, const char *str)
10666 {
10667 tree t, elem, index, type;
10668
10669 t = build_string (len, str);
10670 elem = build_type_variant (char_type_node, 1, 0);
10671 index = build_index_type (size_int (len - 1));
10672 type = build_array_type (elem, index);
10673 TREE_TYPE (t) = type;
10674 TREE_CONSTANT (t) = 1;
10675 TREE_READONLY (t) = 1;
10676 TREE_STATIC (t) = 1;
10677
10678 type = build_pointer_type (elem);
10679 t = build1 (ADDR_EXPR, type,
10680 build4 (ARRAY_REF, elem,
10681 t, integer_zero_node, NULL_TREE, NULL_TREE));
10682 return t;
10683 }
10684
10685
10686
10687 /* Return true if T (assumed to be a DECL) must be assigned a memory
10688 location. */
10689
10690 bool
10691 needs_to_live_in_memory (const_tree t)
10692 {
10693 return (TREE_ADDRESSABLE (t)
10694 || is_global_var (t)
10695 || (TREE_CODE (t) == RESULT_DECL
10696 && !DECL_BY_REFERENCE (t)
10697 && aggregate_value_p (t, current_function_decl)));
10698 }
10699
10700 /* Return value of a constant X and sign-extend it. */
10701
10702 HOST_WIDE_INT
10703 int_cst_value (const_tree x)
10704 {
10705 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10706 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10707
10708 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10709 gcc_assert (cst_and_fits_in_hwi (x));
10710
10711 if (bits < HOST_BITS_PER_WIDE_INT)
10712 {
10713 bool negative = ((val >> (bits - 1)) & 1) != 0;
10714 if (negative)
10715 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10716 else
10717 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10718 }
10719
10720 return val;
10721 }
10722
10723 /* If TYPE is an integral or pointer type, return an integer type with
10724 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10725 if TYPE is already an integer type of signedness UNSIGNEDP. */
10726
10727 tree
10728 signed_or_unsigned_type_for (int unsignedp, tree type)
10729 {
10730 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10731 return type;
10732
10733 if (TREE_CODE (type) == VECTOR_TYPE)
10734 {
10735 tree inner = TREE_TYPE (type);
10736 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10737 if (!inner2)
10738 return NULL_TREE;
10739 if (inner == inner2)
10740 return type;
10741 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10742 }
10743
10744 if (!INTEGRAL_TYPE_P (type)
10745 && !POINTER_TYPE_P (type)
10746 && TREE_CODE (type) != OFFSET_TYPE)
10747 return NULL_TREE;
10748
10749 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10750 }
10751
10752 /* If TYPE is an integral or pointer type, return an integer type with
10753 the same precision which is unsigned, or itself if TYPE is already an
10754 unsigned integer type. */
10755
10756 tree
10757 unsigned_type_for (tree type)
10758 {
10759 return signed_or_unsigned_type_for (1, type);
10760 }
10761
10762 /* If TYPE is an integral or pointer type, return an integer type with
10763 the same precision which is signed, or itself if TYPE is already a
10764 signed integer type. */
10765
10766 tree
10767 signed_type_for (tree type)
10768 {
10769 return signed_or_unsigned_type_for (0, type);
10770 }
10771
10772 /* If TYPE is a vector type, return a signed integer vector type with the
10773 same width and number of subparts. Otherwise return boolean_type_node. */
10774
10775 tree
10776 truth_type_for (tree type)
10777 {
10778 if (TREE_CODE (type) == VECTOR_TYPE)
10779 {
10780 tree elem = lang_hooks.types.type_for_size
10781 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10782 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10783 }
10784 else
10785 return boolean_type_node;
10786 }
10787
10788 /* Returns the largest value obtainable by casting something in INNER type to
10789 OUTER type. */
10790
10791 tree
10792 upper_bound_in_type (tree outer, tree inner)
10793 {
10794 unsigned int det = 0;
10795 unsigned oprec = TYPE_PRECISION (outer);
10796 unsigned iprec = TYPE_PRECISION (inner);
10797 unsigned prec;
10798
10799 /* Compute a unique number for every combination. */
10800 det |= (oprec > iprec) ? 4 : 0;
10801 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10802 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10803
10804 /* Determine the exponent to use. */
10805 switch (det)
10806 {
10807 case 0:
10808 case 1:
10809 /* oprec <= iprec, outer: signed, inner: don't care. */
10810 prec = oprec - 1;
10811 break;
10812 case 2:
10813 case 3:
10814 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10815 prec = oprec;
10816 break;
10817 case 4:
10818 /* oprec > iprec, outer: signed, inner: signed. */
10819 prec = iprec - 1;
10820 break;
10821 case 5:
10822 /* oprec > iprec, outer: signed, inner: unsigned. */
10823 prec = iprec;
10824 break;
10825 case 6:
10826 /* oprec > iprec, outer: unsigned, inner: signed. */
10827 prec = oprec;
10828 break;
10829 case 7:
10830 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10831 prec = iprec;
10832 break;
10833 default:
10834 gcc_unreachable ();
10835 }
10836
10837 return wide_int_to_tree (outer,
10838 wi::mask (prec, false, TYPE_PRECISION (outer)));
10839 }
10840
10841 /* Returns the smallest value obtainable by casting something in INNER type to
10842 OUTER type. */
10843
10844 tree
10845 lower_bound_in_type (tree outer, tree inner)
10846 {
10847 unsigned oprec = TYPE_PRECISION (outer);
10848 unsigned iprec = TYPE_PRECISION (inner);
10849
10850 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10851 and obtain 0. */
10852 if (TYPE_UNSIGNED (outer)
10853 /* If we are widening something of an unsigned type, OUTER type
10854 contains all values of INNER type. In particular, both INNER
10855 and OUTER types have zero in common. */
10856 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10857 return build_int_cst (outer, 0);
10858 else
10859 {
10860 /* If we are widening a signed type to another signed type, we
10861 want to obtain -2^^(iprec-1). If we are keeping the
10862 precision or narrowing to a signed type, we want to obtain
10863 -2^(oprec-1). */
10864 unsigned prec = oprec > iprec ? iprec : oprec;
10865 return wide_int_to_tree (outer,
10866 wi::mask (prec - 1, true,
10867 TYPE_PRECISION (outer)));
10868 }
10869 }
10870
10871 /* Return nonzero if two operands that are suitable for PHI nodes are
10872 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10873 SSA_NAME or invariant. Note that this is strictly an optimization.
10874 That is, callers of this function can directly call operand_equal_p
10875 and get the same result, only slower. */
10876
10877 int
10878 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10879 {
10880 if (arg0 == arg1)
10881 return 1;
10882 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10883 return 0;
10884 return operand_equal_p (arg0, arg1, 0);
10885 }
10886
10887 /* Returns number of zeros at the end of binary representation of X. */
10888
10889 tree
10890 num_ending_zeros (const_tree x)
10891 {
10892 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10893 }
10894
10895
10896 #define WALK_SUBTREE(NODE) \
10897 do \
10898 { \
10899 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10900 if (result) \
10901 return result; \
10902 } \
10903 while (0)
10904
10905 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10906 be walked whenever a type is seen in the tree. Rest of operands and return
10907 value are as for walk_tree. */
10908
10909 static tree
10910 walk_type_fields (tree type, walk_tree_fn func, void *data,
10911 hash_set<tree> *pset, walk_tree_lh lh)
10912 {
10913 tree result = NULL_TREE;
10914
10915 switch (TREE_CODE (type))
10916 {
10917 case POINTER_TYPE:
10918 case REFERENCE_TYPE:
10919 case VECTOR_TYPE:
10920 /* We have to worry about mutually recursive pointers. These can't
10921 be written in C. They can in Ada. It's pathological, but
10922 there's an ACATS test (c38102a) that checks it. Deal with this
10923 by checking if we're pointing to another pointer, that one
10924 points to another pointer, that one does too, and we have no htab.
10925 If so, get a hash table. We check three levels deep to avoid
10926 the cost of the hash table if we don't need one. */
10927 if (POINTER_TYPE_P (TREE_TYPE (type))
10928 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10929 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10930 && !pset)
10931 {
10932 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10933 func, data);
10934 if (result)
10935 return result;
10936
10937 break;
10938 }
10939
10940 /* ... fall through ... */
10941
10942 case COMPLEX_TYPE:
10943 WALK_SUBTREE (TREE_TYPE (type));
10944 break;
10945
10946 case METHOD_TYPE:
10947 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10948
10949 /* Fall through. */
10950
10951 case FUNCTION_TYPE:
10952 WALK_SUBTREE (TREE_TYPE (type));
10953 {
10954 tree arg;
10955
10956 /* We never want to walk into default arguments. */
10957 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10958 WALK_SUBTREE (TREE_VALUE (arg));
10959 }
10960 break;
10961
10962 case ARRAY_TYPE:
10963 /* Don't follow this nodes's type if a pointer for fear that
10964 we'll have infinite recursion. If we have a PSET, then we
10965 need not fear. */
10966 if (pset
10967 || (!POINTER_TYPE_P (TREE_TYPE (type))
10968 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10969 WALK_SUBTREE (TREE_TYPE (type));
10970 WALK_SUBTREE (TYPE_DOMAIN (type));
10971 break;
10972
10973 case OFFSET_TYPE:
10974 WALK_SUBTREE (TREE_TYPE (type));
10975 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10976 break;
10977
10978 default:
10979 break;
10980 }
10981
10982 return NULL_TREE;
10983 }
10984
10985 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10986 called with the DATA and the address of each sub-tree. If FUNC returns a
10987 non-NULL value, the traversal is stopped, and the value returned by FUNC
10988 is returned. If PSET is non-NULL it is used to record the nodes visited,
10989 and to avoid visiting a node more than once. */
10990
10991 tree
10992 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10993 hash_set<tree> *pset, walk_tree_lh lh)
10994 {
10995 enum tree_code code;
10996 int walk_subtrees;
10997 tree result;
10998
10999 #define WALK_SUBTREE_TAIL(NODE) \
11000 do \
11001 { \
11002 tp = & (NODE); \
11003 goto tail_recurse; \
11004 } \
11005 while (0)
11006
11007 tail_recurse:
11008 /* Skip empty subtrees. */
11009 if (!*tp)
11010 return NULL_TREE;
11011
11012 /* Don't walk the same tree twice, if the user has requested
11013 that we avoid doing so. */
11014 if (pset && pset->add (*tp))
11015 return NULL_TREE;
11016
11017 /* Call the function. */
11018 walk_subtrees = 1;
11019 result = (*func) (tp, &walk_subtrees, data);
11020
11021 /* If we found something, return it. */
11022 if (result)
11023 return result;
11024
11025 code = TREE_CODE (*tp);
11026
11027 /* Even if we didn't, FUNC may have decided that there was nothing
11028 interesting below this point in the tree. */
11029 if (!walk_subtrees)
11030 {
11031 /* But we still need to check our siblings. */
11032 if (code == TREE_LIST)
11033 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11034 else if (code == OMP_CLAUSE)
11035 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11036 else
11037 return NULL_TREE;
11038 }
11039
11040 if (lh)
11041 {
11042 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11043 if (result || !walk_subtrees)
11044 return result;
11045 }
11046
11047 switch (code)
11048 {
11049 case ERROR_MARK:
11050 case IDENTIFIER_NODE:
11051 case INTEGER_CST:
11052 case REAL_CST:
11053 case FIXED_CST:
11054 case VECTOR_CST:
11055 case STRING_CST:
11056 case BLOCK:
11057 case PLACEHOLDER_EXPR:
11058 case SSA_NAME:
11059 case FIELD_DECL:
11060 case RESULT_DECL:
11061 /* None of these have subtrees other than those already walked
11062 above. */
11063 break;
11064
11065 case TREE_LIST:
11066 WALK_SUBTREE (TREE_VALUE (*tp));
11067 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11068 break;
11069
11070 case TREE_VEC:
11071 {
11072 int len = TREE_VEC_LENGTH (*tp);
11073
11074 if (len == 0)
11075 break;
11076
11077 /* Walk all elements but the first. */
11078 while (--len)
11079 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11080
11081 /* Now walk the first one as a tail call. */
11082 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11083 }
11084
11085 case COMPLEX_CST:
11086 WALK_SUBTREE (TREE_REALPART (*tp));
11087 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11088
11089 case CONSTRUCTOR:
11090 {
11091 unsigned HOST_WIDE_INT idx;
11092 constructor_elt *ce;
11093
11094 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11095 idx++)
11096 WALK_SUBTREE (ce->value);
11097 }
11098 break;
11099
11100 case SAVE_EXPR:
11101 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11102
11103 case BIND_EXPR:
11104 {
11105 tree decl;
11106 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11107 {
11108 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11109 into declarations that are just mentioned, rather than
11110 declared; they don't really belong to this part of the tree.
11111 And, we can see cycles: the initializer for a declaration
11112 can refer to the declaration itself. */
11113 WALK_SUBTREE (DECL_INITIAL (decl));
11114 WALK_SUBTREE (DECL_SIZE (decl));
11115 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11116 }
11117 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11118 }
11119
11120 case STATEMENT_LIST:
11121 {
11122 tree_stmt_iterator i;
11123 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11124 WALK_SUBTREE (*tsi_stmt_ptr (i));
11125 }
11126 break;
11127
11128 case OMP_CLAUSE:
11129 switch (OMP_CLAUSE_CODE (*tp))
11130 {
11131 case OMP_CLAUSE_PRIVATE:
11132 case OMP_CLAUSE_SHARED:
11133 case OMP_CLAUSE_FIRSTPRIVATE:
11134 case OMP_CLAUSE_COPYIN:
11135 case OMP_CLAUSE_COPYPRIVATE:
11136 case OMP_CLAUSE_FINAL:
11137 case OMP_CLAUSE_IF:
11138 case OMP_CLAUSE_NUM_THREADS:
11139 case OMP_CLAUSE_SCHEDULE:
11140 case OMP_CLAUSE_UNIFORM:
11141 case OMP_CLAUSE_DEPEND:
11142 case OMP_CLAUSE_NUM_TEAMS:
11143 case OMP_CLAUSE_THREAD_LIMIT:
11144 case OMP_CLAUSE_DEVICE:
11145 case OMP_CLAUSE_DIST_SCHEDULE:
11146 case OMP_CLAUSE_SAFELEN:
11147 case OMP_CLAUSE_SIMDLEN:
11148 case OMP_CLAUSE__LOOPTEMP_:
11149 case OMP_CLAUSE__SIMDUID_:
11150 case OMP_CLAUSE__CILK_FOR_COUNT_:
11151 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11152 /* FALLTHRU */
11153
11154 case OMP_CLAUSE_NOWAIT:
11155 case OMP_CLAUSE_ORDERED:
11156 case OMP_CLAUSE_DEFAULT:
11157 case OMP_CLAUSE_UNTIED:
11158 case OMP_CLAUSE_MERGEABLE:
11159 case OMP_CLAUSE_PROC_BIND:
11160 case OMP_CLAUSE_INBRANCH:
11161 case OMP_CLAUSE_NOTINBRANCH:
11162 case OMP_CLAUSE_FOR:
11163 case OMP_CLAUSE_PARALLEL:
11164 case OMP_CLAUSE_SECTIONS:
11165 case OMP_CLAUSE_TASKGROUP:
11166 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11167
11168 case OMP_CLAUSE_LASTPRIVATE:
11169 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11170 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11171 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11172
11173 case OMP_CLAUSE_COLLAPSE:
11174 {
11175 int i;
11176 for (i = 0; i < 3; i++)
11177 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11178 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11179 }
11180
11181 case OMP_CLAUSE_LINEAR:
11182 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11183 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11184 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11185 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11186
11187 case OMP_CLAUSE_ALIGNED:
11188 case OMP_CLAUSE_FROM:
11189 case OMP_CLAUSE_TO:
11190 case OMP_CLAUSE_MAP:
11191 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11192 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11193 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11194
11195 case OMP_CLAUSE_REDUCTION:
11196 {
11197 int i;
11198 for (i = 0; i < 4; i++)
11199 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11200 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11201 }
11202
11203 default:
11204 gcc_unreachable ();
11205 }
11206 break;
11207
11208 case TARGET_EXPR:
11209 {
11210 int i, len;
11211
11212 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11213 But, we only want to walk once. */
11214 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11215 for (i = 0; i < len; ++i)
11216 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11217 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11218 }
11219
11220 case DECL_EXPR:
11221 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11222 defining. We only want to walk into these fields of a type in this
11223 case and not in the general case of a mere reference to the type.
11224
11225 The criterion is as follows: if the field can be an expression, it
11226 must be walked only here. This should be in keeping with the fields
11227 that are directly gimplified in gimplify_type_sizes in order for the
11228 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11229 variable-sized types.
11230
11231 Note that DECLs get walked as part of processing the BIND_EXPR. */
11232 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11233 {
11234 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11235 if (TREE_CODE (*type_p) == ERROR_MARK)
11236 return NULL_TREE;
11237
11238 /* Call the function for the type. See if it returns anything or
11239 doesn't want us to continue. If we are to continue, walk both
11240 the normal fields and those for the declaration case. */
11241 result = (*func) (type_p, &walk_subtrees, data);
11242 if (result || !walk_subtrees)
11243 return result;
11244
11245 /* But do not walk a pointed-to type since it may itself need to
11246 be walked in the declaration case if it isn't anonymous. */
11247 if (!POINTER_TYPE_P (*type_p))
11248 {
11249 result = walk_type_fields (*type_p, func, data, pset, lh);
11250 if (result)
11251 return result;
11252 }
11253
11254 /* If this is a record type, also walk the fields. */
11255 if (RECORD_OR_UNION_TYPE_P (*type_p))
11256 {
11257 tree field;
11258
11259 for (field = TYPE_FIELDS (*type_p); field;
11260 field = DECL_CHAIN (field))
11261 {
11262 /* We'd like to look at the type of the field, but we can
11263 easily get infinite recursion. So assume it's pointed
11264 to elsewhere in the tree. Also, ignore things that
11265 aren't fields. */
11266 if (TREE_CODE (field) != FIELD_DECL)
11267 continue;
11268
11269 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11270 WALK_SUBTREE (DECL_SIZE (field));
11271 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11272 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11273 WALK_SUBTREE (DECL_QUALIFIER (field));
11274 }
11275 }
11276
11277 /* Same for scalar types. */
11278 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11279 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11280 || TREE_CODE (*type_p) == INTEGER_TYPE
11281 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11282 || TREE_CODE (*type_p) == REAL_TYPE)
11283 {
11284 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11285 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11286 }
11287
11288 WALK_SUBTREE (TYPE_SIZE (*type_p));
11289 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11290 }
11291 /* FALLTHRU */
11292
11293 default:
11294 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11295 {
11296 int i, len;
11297
11298 /* Walk over all the sub-trees of this operand. */
11299 len = TREE_OPERAND_LENGTH (*tp);
11300
11301 /* Go through the subtrees. We need to do this in forward order so
11302 that the scope of a FOR_EXPR is handled properly. */
11303 if (len)
11304 {
11305 for (i = 0; i < len - 1; ++i)
11306 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11307 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11308 }
11309 }
11310 /* If this is a type, walk the needed fields in the type. */
11311 else if (TYPE_P (*tp))
11312 return walk_type_fields (*tp, func, data, pset, lh);
11313 break;
11314 }
11315
11316 /* We didn't find what we were looking for. */
11317 return NULL_TREE;
11318
11319 #undef WALK_SUBTREE_TAIL
11320 }
11321 #undef WALK_SUBTREE
11322
11323 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11324
11325 tree
11326 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11327 walk_tree_lh lh)
11328 {
11329 tree result;
11330
11331 hash_set<tree> pset;
11332 result = walk_tree_1 (tp, func, data, &pset, lh);
11333 return result;
11334 }
11335
11336
11337 tree
11338 tree_block (tree t)
11339 {
11340 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11341
11342 if (IS_EXPR_CODE_CLASS (c))
11343 return LOCATION_BLOCK (t->exp.locus);
11344 gcc_unreachable ();
11345 return NULL;
11346 }
11347
11348 void
11349 tree_set_block (tree t, tree b)
11350 {
11351 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11352
11353 if (IS_EXPR_CODE_CLASS (c))
11354 {
11355 if (b)
11356 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11357 else
11358 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11359 }
11360 else
11361 gcc_unreachable ();
11362 }
11363
11364 /* Create a nameless artificial label and put it in the current
11365 function context. The label has a location of LOC. Returns the
11366 newly created label. */
11367
11368 tree
11369 create_artificial_label (location_t loc)
11370 {
11371 tree lab = build_decl (loc,
11372 LABEL_DECL, NULL_TREE, void_type_node);
11373
11374 DECL_ARTIFICIAL (lab) = 1;
11375 DECL_IGNORED_P (lab) = 1;
11376 DECL_CONTEXT (lab) = current_function_decl;
11377 return lab;
11378 }
11379
11380 /* Given a tree, try to return a useful variable name that we can use
11381 to prefix a temporary that is being assigned the value of the tree.
11382 I.E. given <temp> = &A, return A. */
11383
11384 const char *
11385 get_name (tree t)
11386 {
11387 tree stripped_decl;
11388
11389 stripped_decl = t;
11390 STRIP_NOPS (stripped_decl);
11391 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11392 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11393 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11394 {
11395 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11396 if (!name)
11397 return NULL;
11398 return IDENTIFIER_POINTER (name);
11399 }
11400 else
11401 {
11402 switch (TREE_CODE (stripped_decl))
11403 {
11404 case ADDR_EXPR:
11405 return get_name (TREE_OPERAND (stripped_decl, 0));
11406 default:
11407 return NULL;
11408 }
11409 }
11410 }
11411
11412 /* Return true if TYPE has a variable argument list. */
11413
11414 bool
11415 stdarg_p (const_tree fntype)
11416 {
11417 function_args_iterator args_iter;
11418 tree n = NULL_TREE, t;
11419
11420 if (!fntype)
11421 return false;
11422
11423 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11424 {
11425 n = t;
11426 }
11427
11428 return n != NULL_TREE && n != void_type_node;
11429 }
11430
11431 /* Return true if TYPE has a prototype. */
11432
11433 bool
11434 prototype_p (tree fntype)
11435 {
11436 tree t;
11437
11438 gcc_assert (fntype != NULL_TREE);
11439
11440 t = TYPE_ARG_TYPES (fntype);
11441 return (t != NULL_TREE);
11442 }
11443
11444 /* If BLOCK is inlined from an __attribute__((__artificial__))
11445 routine, return pointer to location from where it has been
11446 called. */
11447 location_t *
11448 block_nonartificial_location (tree block)
11449 {
11450 location_t *ret = NULL;
11451
11452 while (block && TREE_CODE (block) == BLOCK
11453 && BLOCK_ABSTRACT_ORIGIN (block))
11454 {
11455 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11456
11457 while (TREE_CODE (ao) == BLOCK
11458 && BLOCK_ABSTRACT_ORIGIN (ao)
11459 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11460 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11461
11462 if (TREE_CODE (ao) == FUNCTION_DECL)
11463 {
11464 /* If AO is an artificial inline, point RET to the
11465 call site locus at which it has been inlined and continue
11466 the loop, in case AO's caller is also an artificial
11467 inline. */
11468 if (DECL_DECLARED_INLINE_P (ao)
11469 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11470 ret = &BLOCK_SOURCE_LOCATION (block);
11471 else
11472 break;
11473 }
11474 else if (TREE_CODE (ao) != BLOCK)
11475 break;
11476
11477 block = BLOCK_SUPERCONTEXT (block);
11478 }
11479 return ret;
11480 }
11481
11482
11483 /* If EXP is inlined from an __attribute__((__artificial__))
11484 function, return the location of the original call expression. */
11485
11486 location_t
11487 tree_nonartificial_location (tree exp)
11488 {
11489 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11490
11491 if (loc)
11492 return *loc;
11493 else
11494 return EXPR_LOCATION (exp);
11495 }
11496
11497
11498 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11499 nodes. */
11500
11501 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11502
11503 hashval_t
11504 cl_option_hasher::hash (tree x)
11505 {
11506 const_tree const t = x;
11507 const char *p;
11508 size_t i;
11509 size_t len = 0;
11510 hashval_t hash = 0;
11511
11512 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11513 {
11514 p = (const char *)TREE_OPTIMIZATION (t);
11515 len = sizeof (struct cl_optimization);
11516 }
11517
11518 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11519 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11520
11521 else
11522 gcc_unreachable ();
11523
11524 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11525 something else. */
11526 for (i = 0; i < len; i++)
11527 if (p[i])
11528 hash = (hash << 4) ^ ((i << 2) | p[i]);
11529
11530 return hash;
11531 }
11532
11533 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11534 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11535 same. */
11536
11537 bool
11538 cl_option_hasher::equal (tree x, tree y)
11539 {
11540 const_tree const xt = x;
11541 const_tree const yt = y;
11542 const char *xp;
11543 const char *yp;
11544 size_t len;
11545
11546 if (TREE_CODE (xt) != TREE_CODE (yt))
11547 return 0;
11548
11549 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11550 {
11551 xp = (const char *)TREE_OPTIMIZATION (xt);
11552 yp = (const char *)TREE_OPTIMIZATION (yt);
11553 len = sizeof (struct cl_optimization);
11554 }
11555
11556 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11557 {
11558 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11559 TREE_TARGET_OPTION (yt));
11560 }
11561
11562 else
11563 gcc_unreachable ();
11564
11565 return (memcmp (xp, yp, len) == 0);
11566 }
11567
11568 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11569
11570 tree
11571 build_optimization_node (struct gcc_options *opts)
11572 {
11573 tree t;
11574
11575 /* Use the cache of optimization nodes. */
11576
11577 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11578 opts);
11579
11580 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11581 t = *slot;
11582 if (!t)
11583 {
11584 /* Insert this one into the hash table. */
11585 t = cl_optimization_node;
11586 *slot = t;
11587
11588 /* Make a new node for next time round. */
11589 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11590 }
11591
11592 return t;
11593 }
11594
11595 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11596
11597 tree
11598 build_target_option_node (struct gcc_options *opts)
11599 {
11600 tree t;
11601
11602 /* Use the cache of optimization nodes. */
11603
11604 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11605 opts);
11606
11607 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11608 t = *slot;
11609 if (!t)
11610 {
11611 /* Insert this one into the hash table. */
11612 t = cl_target_option_node;
11613 *slot = t;
11614
11615 /* Make a new node for next time round. */
11616 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11617 }
11618
11619 return t;
11620 }
11621
11622 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11623 so that they aren't saved during PCH writing. */
11624
11625 void
11626 prepare_target_option_nodes_for_pch (void)
11627 {
11628 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11629 for (; iter != cl_option_hash_table->end (); ++iter)
11630 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11631 TREE_TARGET_GLOBALS (*iter) = NULL;
11632 }
11633
11634 /* Determine the "ultimate origin" of a block. The block may be an inlined
11635 instance of an inlined instance of a block which is local to an inline
11636 function, so we have to trace all of the way back through the origin chain
11637 to find out what sort of node actually served as the original seed for the
11638 given block. */
11639
11640 tree
11641 block_ultimate_origin (const_tree block)
11642 {
11643 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11644
11645 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11646 we're trying to output the abstract instance of this function. */
11647 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11648 return NULL_TREE;
11649
11650 if (immediate_origin == NULL_TREE)
11651 return NULL_TREE;
11652 else
11653 {
11654 tree ret_val;
11655 tree lookahead = immediate_origin;
11656
11657 do
11658 {
11659 ret_val = lookahead;
11660 lookahead = (TREE_CODE (ret_val) == BLOCK
11661 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11662 }
11663 while (lookahead != NULL && lookahead != ret_val);
11664
11665 /* The block's abstract origin chain may not be the *ultimate* origin of
11666 the block. It could lead to a DECL that has an abstract origin set.
11667 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11668 will give us if it has one). Note that DECL's abstract origins are
11669 supposed to be the most distant ancestor (or so decl_ultimate_origin
11670 claims), so we don't need to loop following the DECL origins. */
11671 if (DECL_P (ret_val))
11672 return DECL_ORIGIN (ret_val);
11673
11674 return ret_val;
11675 }
11676 }
11677
11678 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11679 no instruction. */
11680
11681 bool
11682 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11683 {
11684 /* Use precision rather then machine mode when we can, which gives
11685 the correct answer even for submode (bit-field) types. */
11686 if ((INTEGRAL_TYPE_P (outer_type)
11687 || POINTER_TYPE_P (outer_type)
11688 || TREE_CODE (outer_type) == OFFSET_TYPE)
11689 && (INTEGRAL_TYPE_P (inner_type)
11690 || POINTER_TYPE_P (inner_type)
11691 || TREE_CODE (inner_type) == OFFSET_TYPE))
11692 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11693
11694 /* Otherwise fall back on comparing machine modes (e.g. for
11695 aggregate types, floats). */
11696 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11697 }
11698
11699 /* Return true iff conversion in EXP generates no instruction. Mark
11700 it inline so that we fully inline into the stripping functions even
11701 though we have two uses of this function. */
11702
11703 static inline bool
11704 tree_nop_conversion (const_tree exp)
11705 {
11706 tree outer_type, inner_type;
11707
11708 if (!CONVERT_EXPR_P (exp)
11709 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11710 return false;
11711 if (TREE_OPERAND (exp, 0) == error_mark_node)
11712 return false;
11713
11714 outer_type = TREE_TYPE (exp);
11715 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11716
11717 if (!inner_type)
11718 return false;
11719
11720 return tree_nop_conversion_p (outer_type, inner_type);
11721 }
11722
11723 /* Return true iff conversion in EXP generates no instruction. Don't
11724 consider conversions changing the signedness. */
11725
11726 static bool
11727 tree_sign_nop_conversion (const_tree exp)
11728 {
11729 tree outer_type, inner_type;
11730
11731 if (!tree_nop_conversion (exp))
11732 return false;
11733
11734 outer_type = TREE_TYPE (exp);
11735 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11736
11737 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11738 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11739 }
11740
11741 /* Strip conversions from EXP according to tree_nop_conversion and
11742 return the resulting expression. */
11743
11744 tree
11745 tree_strip_nop_conversions (tree exp)
11746 {
11747 while (tree_nop_conversion (exp))
11748 exp = TREE_OPERAND (exp, 0);
11749 return exp;
11750 }
11751
11752 /* Strip conversions from EXP according to tree_sign_nop_conversion
11753 and return the resulting expression. */
11754
11755 tree
11756 tree_strip_sign_nop_conversions (tree exp)
11757 {
11758 while (tree_sign_nop_conversion (exp))
11759 exp = TREE_OPERAND (exp, 0);
11760 return exp;
11761 }
11762
11763 /* Avoid any floating point extensions from EXP. */
11764 tree
11765 strip_float_extensions (tree exp)
11766 {
11767 tree sub, expt, subt;
11768
11769 /* For floating point constant look up the narrowest type that can hold
11770 it properly and handle it like (type)(narrowest_type)constant.
11771 This way we can optimize for instance a=a*2.0 where "a" is float
11772 but 2.0 is double constant. */
11773 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11774 {
11775 REAL_VALUE_TYPE orig;
11776 tree type = NULL;
11777
11778 orig = TREE_REAL_CST (exp);
11779 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11780 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11781 type = float_type_node;
11782 else if (TYPE_PRECISION (TREE_TYPE (exp))
11783 > TYPE_PRECISION (double_type_node)
11784 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11785 type = double_type_node;
11786 if (type)
11787 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11788 }
11789
11790 if (!CONVERT_EXPR_P (exp))
11791 return exp;
11792
11793 sub = TREE_OPERAND (exp, 0);
11794 subt = TREE_TYPE (sub);
11795 expt = TREE_TYPE (exp);
11796
11797 if (!FLOAT_TYPE_P (subt))
11798 return exp;
11799
11800 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11801 return exp;
11802
11803 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11804 return exp;
11805
11806 return strip_float_extensions (sub);
11807 }
11808
11809 /* Strip out all handled components that produce invariant
11810 offsets. */
11811
11812 const_tree
11813 strip_invariant_refs (const_tree op)
11814 {
11815 while (handled_component_p (op))
11816 {
11817 switch (TREE_CODE (op))
11818 {
11819 case ARRAY_REF:
11820 case ARRAY_RANGE_REF:
11821 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11822 || TREE_OPERAND (op, 2) != NULL_TREE
11823 || TREE_OPERAND (op, 3) != NULL_TREE)
11824 return NULL;
11825 break;
11826
11827 case COMPONENT_REF:
11828 if (TREE_OPERAND (op, 2) != NULL_TREE)
11829 return NULL;
11830 break;
11831
11832 default:;
11833 }
11834 op = TREE_OPERAND (op, 0);
11835 }
11836
11837 return op;
11838 }
11839
11840 static GTY(()) tree gcc_eh_personality_decl;
11841
11842 /* Return the GCC personality function decl. */
11843
11844 tree
11845 lhd_gcc_personality (void)
11846 {
11847 if (!gcc_eh_personality_decl)
11848 gcc_eh_personality_decl = build_personality_function ("gcc");
11849 return gcc_eh_personality_decl;
11850 }
11851
11852 /* TARGET is a call target of GIMPLE call statement
11853 (obtained by gimple_call_fn). Return true if it is
11854 OBJ_TYPE_REF representing an virtual call of C++ method.
11855 (As opposed to OBJ_TYPE_REF representing objc calls
11856 through a cast where middle-end devirtualization machinery
11857 can't apply.) */
11858
11859 bool
11860 virtual_method_call_p (tree target)
11861 {
11862 if (TREE_CODE (target) != OBJ_TYPE_REF)
11863 return false;
11864 target = TREE_TYPE (target);
11865 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11866 target = TREE_TYPE (target);
11867 if (TREE_CODE (target) == FUNCTION_TYPE)
11868 return false;
11869 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11870 return true;
11871 }
11872
11873 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11874
11875 tree
11876 obj_type_ref_class (tree ref)
11877 {
11878 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11879 ref = TREE_TYPE (ref);
11880 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11881 ref = TREE_TYPE (ref);
11882 /* We look for type THIS points to. ObjC also builds
11883 OBJ_TYPE_REF with non-method calls, Their first parameter
11884 ID however also corresponds to class type. */
11885 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11886 || TREE_CODE (ref) == FUNCTION_TYPE);
11887 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11888 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11889 return TREE_TYPE (ref);
11890 }
11891
11892 /* Return true if T is in anonymous namespace. */
11893
11894 bool
11895 type_in_anonymous_namespace_p (const_tree t)
11896 {
11897 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11898 bulitin types; those have CONTEXT NULL. */
11899 if (!TYPE_CONTEXT (t))
11900 return false;
11901 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11902 }
11903
11904 /* Try to find a base info of BINFO that would have its field decl at offset
11905 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11906 found, return, otherwise return NULL_TREE. */
11907
11908 tree
11909 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11910 {
11911 tree type = BINFO_TYPE (binfo);
11912
11913 while (true)
11914 {
11915 HOST_WIDE_INT pos, size;
11916 tree fld;
11917 int i;
11918
11919 if (types_same_for_odr (type, expected_type))
11920 return binfo;
11921 if (offset < 0)
11922 return NULL_TREE;
11923
11924 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11925 {
11926 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
11927 continue;
11928
11929 pos = int_bit_position (fld);
11930 size = tree_to_uhwi (DECL_SIZE (fld));
11931 if (pos <= offset && (pos + size) > offset)
11932 break;
11933 }
11934 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11935 return NULL_TREE;
11936
11937 /* Offset 0 indicates the primary base, whose vtable contents are
11938 represented in the binfo for the derived class. */
11939 else if (offset != 0)
11940 {
11941 tree base_binfo, binfo2 = binfo;
11942
11943 /* Find BINFO corresponding to FLD. This is bit harder
11944 by a fact that in virtual inheritance we may need to walk down
11945 the non-virtual inheritance chain. */
11946 while (true)
11947 {
11948 tree containing_binfo = NULL, found_binfo = NULL;
11949 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11950 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11951 {
11952 found_binfo = base_binfo;
11953 break;
11954 }
11955 else
11956 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11957 - tree_to_shwi (BINFO_OFFSET (binfo)))
11958 * BITS_PER_UNIT < pos
11959 /* Rule out types with no virtual methods or we can get confused
11960 here by zero sized bases. */
11961 && TYPE_BINFO (BINFO_TYPE (base_binfo))
11962 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11963 && (!containing_binfo
11964 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11965 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11966 containing_binfo = base_binfo;
11967 if (found_binfo)
11968 {
11969 binfo = found_binfo;
11970 break;
11971 }
11972 if (!containing_binfo)
11973 return NULL_TREE;
11974 binfo2 = containing_binfo;
11975 }
11976 }
11977
11978 type = TREE_TYPE (fld);
11979 offset -= pos;
11980 }
11981 }
11982
11983 /* Returns true if X is a typedef decl. */
11984
11985 bool
11986 is_typedef_decl (tree x)
11987 {
11988 return (x && TREE_CODE (x) == TYPE_DECL
11989 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11990 }
11991
11992 /* Returns true iff TYPE is a type variant created for a typedef. */
11993
11994 bool
11995 typedef_variant_p (tree type)
11996 {
11997 return is_typedef_decl (TYPE_NAME (type));
11998 }
11999
12000 /* Warn about a use of an identifier which was marked deprecated. */
12001 void
12002 warn_deprecated_use (tree node, tree attr)
12003 {
12004 const char *msg;
12005
12006 if (node == 0 || !warn_deprecated_decl)
12007 return;
12008
12009 if (!attr)
12010 {
12011 if (DECL_P (node))
12012 attr = DECL_ATTRIBUTES (node);
12013 else if (TYPE_P (node))
12014 {
12015 tree decl = TYPE_STUB_DECL (node);
12016 if (decl)
12017 attr = lookup_attribute ("deprecated",
12018 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12019 }
12020 }
12021
12022 if (attr)
12023 attr = lookup_attribute ("deprecated", attr);
12024
12025 if (attr)
12026 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12027 else
12028 msg = NULL;
12029
12030 bool w;
12031 if (DECL_P (node))
12032 {
12033 if (msg)
12034 w = warning (OPT_Wdeprecated_declarations,
12035 "%qD is deprecated: %s", node, msg);
12036 else
12037 w = warning (OPT_Wdeprecated_declarations,
12038 "%qD is deprecated", node);
12039 if (w)
12040 inform (DECL_SOURCE_LOCATION (node), "declared here");
12041 }
12042 else if (TYPE_P (node))
12043 {
12044 tree what = NULL_TREE;
12045 tree decl = TYPE_STUB_DECL (node);
12046
12047 if (TYPE_NAME (node))
12048 {
12049 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12050 what = TYPE_NAME (node);
12051 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12052 && DECL_NAME (TYPE_NAME (node)))
12053 what = DECL_NAME (TYPE_NAME (node));
12054 }
12055
12056 if (decl)
12057 {
12058 if (what)
12059 {
12060 if (msg)
12061 w = warning (OPT_Wdeprecated_declarations,
12062 "%qE is deprecated: %s", what, msg);
12063 else
12064 w = warning (OPT_Wdeprecated_declarations,
12065 "%qE is deprecated", what);
12066 }
12067 else
12068 {
12069 if (msg)
12070 w = warning (OPT_Wdeprecated_declarations,
12071 "type is deprecated: %s", msg);
12072 else
12073 w = warning (OPT_Wdeprecated_declarations,
12074 "type is deprecated");
12075 }
12076 if (w)
12077 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12078 }
12079 else
12080 {
12081 if (what)
12082 {
12083 if (msg)
12084 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12085 what, msg);
12086 else
12087 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12088 }
12089 else
12090 {
12091 if (msg)
12092 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12093 msg);
12094 else
12095 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12096 }
12097 }
12098 }
12099 }
12100
12101 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12102 somewhere in it. */
12103
12104 bool
12105 contains_bitfld_component_ref_p (const_tree ref)
12106 {
12107 while (handled_component_p (ref))
12108 {
12109 if (TREE_CODE (ref) == COMPONENT_REF
12110 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12111 return true;
12112 ref = TREE_OPERAND (ref, 0);
12113 }
12114
12115 return false;
12116 }
12117
12118 /* Try to determine whether a TRY_CATCH expression can fall through.
12119 This is a subroutine of block_may_fallthru. */
12120
12121 static bool
12122 try_catch_may_fallthru (const_tree stmt)
12123 {
12124 tree_stmt_iterator i;
12125
12126 /* If the TRY block can fall through, the whole TRY_CATCH can
12127 fall through. */
12128 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12129 return true;
12130
12131 i = tsi_start (TREE_OPERAND (stmt, 1));
12132 switch (TREE_CODE (tsi_stmt (i)))
12133 {
12134 case CATCH_EXPR:
12135 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12136 catch expression and a body. The whole TRY_CATCH may fall
12137 through iff any of the catch bodies falls through. */
12138 for (; !tsi_end_p (i); tsi_next (&i))
12139 {
12140 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12141 return true;
12142 }
12143 return false;
12144
12145 case EH_FILTER_EXPR:
12146 /* The exception filter expression only matters if there is an
12147 exception. If the exception does not match EH_FILTER_TYPES,
12148 we will execute EH_FILTER_FAILURE, and we will fall through
12149 if that falls through. If the exception does match
12150 EH_FILTER_TYPES, the stack unwinder will continue up the
12151 stack, so we will not fall through. We don't know whether we
12152 will throw an exception which matches EH_FILTER_TYPES or not,
12153 so we just ignore EH_FILTER_TYPES and assume that we might
12154 throw an exception which doesn't match. */
12155 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12156
12157 default:
12158 /* This case represents statements to be executed when an
12159 exception occurs. Those statements are implicitly followed
12160 by a RESX statement to resume execution after the exception.
12161 So in this case the TRY_CATCH never falls through. */
12162 return false;
12163 }
12164 }
12165
12166 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12167 need not be 100% accurate; simply be conservative and return true if we
12168 don't know. This is used only to avoid stupidly generating extra code.
12169 If we're wrong, we'll just delete the extra code later. */
12170
12171 bool
12172 block_may_fallthru (const_tree block)
12173 {
12174 /* This CONST_CAST is okay because expr_last returns its argument
12175 unmodified and we assign it to a const_tree. */
12176 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12177
12178 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12179 {
12180 case GOTO_EXPR:
12181 case RETURN_EXPR:
12182 /* Easy cases. If the last statement of the block implies
12183 control transfer, then we can't fall through. */
12184 return false;
12185
12186 case SWITCH_EXPR:
12187 /* If SWITCH_LABELS is set, this is lowered, and represents a
12188 branch to a selected label and hence can not fall through.
12189 Otherwise SWITCH_BODY is set, and the switch can fall
12190 through. */
12191 return SWITCH_LABELS (stmt) == NULL_TREE;
12192
12193 case COND_EXPR:
12194 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12195 return true;
12196 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12197
12198 case BIND_EXPR:
12199 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12200
12201 case TRY_CATCH_EXPR:
12202 return try_catch_may_fallthru (stmt);
12203
12204 case TRY_FINALLY_EXPR:
12205 /* The finally clause is always executed after the try clause,
12206 so if it does not fall through, then the try-finally will not
12207 fall through. Otherwise, if the try clause does not fall
12208 through, then when the finally clause falls through it will
12209 resume execution wherever the try clause was going. So the
12210 whole try-finally will only fall through if both the try
12211 clause and the finally clause fall through. */
12212 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12213 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12214
12215 case MODIFY_EXPR:
12216 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12217 stmt = TREE_OPERAND (stmt, 1);
12218 else
12219 return true;
12220 /* FALLTHRU */
12221
12222 case CALL_EXPR:
12223 /* Functions that do not return do not fall through. */
12224 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12225
12226 case CLEANUP_POINT_EXPR:
12227 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12228
12229 case TARGET_EXPR:
12230 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12231
12232 case ERROR_MARK:
12233 return true;
12234
12235 default:
12236 return lang_hooks.block_may_fallthru (stmt);
12237 }
12238 }
12239
12240 /* True if we are using EH to handle cleanups. */
12241 static bool using_eh_for_cleanups_flag = false;
12242
12243 /* This routine is called from front ends to indicate eh should be used for
12244 cleanups. */
12245 void
12246 using_eh_for_cleanups (void)
12247 {
12248 using_eh_for_cleanups_flag = true;
12249 }
12250
12251 /* Query whether EH is used for cleanups. */
12252 bool
12253 using_eh_for_cleanups_p (void)
12254 {
12255 return using_eh_for_cleanups_flag;
12256 }
12257
12258 /* Wrapper for tree_code_name to ensure that tree code is valid */
12259 const char *
12260 get_tree_code_name (enum tree_code code)
12261 {
12262 const char *invalid = "<invalid tree code>";
12263
12264 if (code >= MAX_TREE_CODES)
12265 return invalid;
12266
12267 return tree_code_name[code];
12268 }
12269
12270 /* Drops the TREE_OVERFLOW flag from T. */
12271
12272 tree
12273 drop_tree_overflow (tree t)
12274 {
12275 gcc_checking_assert (TREE_OVERFLOW (t));
12276
12277 /* For tree codes with a sharing machinery re-build the result. */
12278 if (TREE_CODE (t) == INTEGER_CST)
12279 return wide_int_to_tree (TREE_TYPE (t), t);
12280
12281 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12282 and drop the flag. */
12283 t = copy_node (t);
12284 TREE_OVERFLOW (t) = 0;
12285 return t;
12286 }
12287
12288 /* Given a memory reference expression T, return its base address.
12289 The base address of a memory reference expression is the main
12290 object being referenced. For instance, the base address for
12291 'array[i].fld[j]' is 'array'. You can think of this as stripping
12292 away the offset part from a memory address.
12293
12294 This function calls handled_component_p to strip away all the inner
12295 parts of the memory reference until it reaches the base object. */
12296
12297 tree
12298 get_base_address (tree t)
12299 {
12300 while (handled_component_p (t))
12301 t = TREE_OPERAND (t, 0);
12302
12303 if ((TREE_CODE (t) == MEM_REF
12304 || TREE_CODE (t) == TARGET_MEM_REF)
12305 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12306 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12307
12308 /* ??? Either the alias oracle or all callers need to properly deal
12309 with WITH_SIZE_EXPRs before we can look through those. */
12310 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12311 return NULL_TREE;
12312
12313 return t;
12314 }
12315
12316 /* Return the machine mode of T. For vectors, returns the mode of the
12317 inner type. The main use case is to feed the result to HONOR_NANS,
12318 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12319
12320 machine_mode
12321 element_mode (const_tree t)
12322 {
12323 if (!TYPE_P (t))
12324 t = TREE_TYPE (t);
12325 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12326 t = TREE_TYPE (t);
12327 return TYPE_MODE (t);
12328 }
12329
12330 #include "gt-tree.h"