Merger of git branch "gimple-classes-v2-option-3"
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "hashtab.h"
42 #include "hash-set.h"
43 #include "vec.h"
44 #include "machmode.h"
45 #include "hard-reg-set.h"
46 #include "input.h"
47 #include "function.h"
48 #include "obstack.h"
49 #include "toplev.h" /* get_random_seed */
50 #include "inchash.h"
51 #include "filenames.h"
52 #include "output.h"
53 #include "target.h"
54 #include "common/common-target.h"
55 #include "langhooks.h"
56 #include "tree-inline.h"
57 #include "tree-iterator.h"
58 #include "predict.h"
59 #include "dominance.h"
60 #include "cfg.h"
61 #include "basic-block.h"
62 #include "bitmap.h"
63 #include "tree-ssa-alias.h"
64 #include "internal-fn.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimple-iterator.h"
69 #include "gimplify.h"
70 #include "gimple-ssa.h"
71 #include "hash-map.h"
72 #include "plugin-api.h"
73 #include "ipa-ref.h"
74 #include "cgraph.h"
75 #include "tree-phinodes.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
78 #include "expr.h"
79 #include "tree-dfa.h"
80 #include "params.h"
81 #include "tree-pass.h"
82 #include "langhooks-def.h"
83 #include "diagnostic.h"
84 #include "tree-diagnostic.h"
85 #include "tree-pretty-print.h"
86 #include "except.h"
87 #include "debug.h"
88 #include "intl.h"
89 #include "wide-int.h"
90 #include "builtins.h"
91
92 /* Tree code classes. */
93
94 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
95 #define END_OF_BASE_TREE_CODES tcc_exceptional,
96
97 const enum tree_code_class tree_code_type[] = {
98 #include "all-tree.def"
99 };
100
101 #undef DEFTREECODE
102 #undef END_OF_BASE_TREE_CODES
103
104 /* Table indexed by tree code giving number of expression
105 operands beyond the fixed part of the node structure.
106 Not used for types or decls. */
107
108 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
109 #define END_OF_BASE_TREE_CODES 0,
110
111 const unsigned char tree_code_length[] = {
112 #include "all-tree.def"
113 };
114
115 #undef DEFTREECODE
116 #undef END_OF_BASE_TREE_CODES
117
118 /* Names of tree components.
119 Used for printing out the tree and error messages. */
120 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
121 #define END_OF_BASE_TREE_CODES "@dummy",
122
123 static const char *const tree_code_name[] = {
124 #include "all-tree.def"
125 };
126
127 #undef DEFTREECODE
128 #undef END_OF_BASE_TREE_CODES
129
130 /* Each tree code class has an associated string representation.
131 These must correspond to the tree_code_class entries. */
132
133 const char *const tree_code_class_strings[] =
134 {
135 "exceptional",
136 "constant",
137 "type",
138 "declaration",
139 "reference",
140 "comparison",
141 "unary",
142 "binary",
143 "statement",
144 "vl_exp",
145 "expression"
146 };
147
148 /* obstack.[ch] explicitly declined to prototype this. */
149 extern int _obstack_allocated_p (struct obstack *h, void *obj);
150
151 /* Statistics-gathering stuff. */
152
153 static int tree_code_counts[MAX_TREE_CODES];
154 int tree_node_counts[(int) all_kinds];
155 int tree_node_sizes[(int) all_kinds];
156
157 /* Keep in sync with tree.h:enum tree_node_kind. */
158 static const char * const tree_node_kind_names[] = {
159 "decls",
160 "types",
161 "blocks",
162 "stmts",
163 "refs",
164 "exprs",
165 "constants",
166 "identifiers",
167 "vecs",
168 "binfos",
169 "ssa names",
170 "constructors",
171 "random kinds",
172 "lang_decl kinds",
173 "lang_type kinds",
174 "omp clauses",
175 };
176
177 /* Unique id for next decl created. */
178 static GTY(()) int next_decl_uid;
179 /* Unique id for next type created. */
180 static GTY(()) int next_type_uid = 1;
181 /* Unique id for next debug decl created. Use negative numbers,
182 to catch erroneous uses. */
183 static GTY(()) int next_debug_decl_uid;
184
185 /* Since we cannot rehash a type after it is in the table, we have to
186 keep the hash code. */
187
188 struct GTY(()) type_hash {
189 unsigned long hash;
190 tree type;
191 };
192
193 /* Initial size of the hash table (rounded to next prime). */
194 #define TYPE_HASH_INITIAL_SIZE 1000
195
196 /* Now here is the hash table. When recording a type, it is added to
197 the slot whose index is the hash code. Note that the hash table is
198 used for several kinds of types (function types, array types and
199 array index range types, for now). While all these live in the
200 same table, they are completely independent, and the hash code is
201 computed differently for each of these. */
202
203 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
204 htab_t type_hash_table;
205
206 /* Hash table and temporary node for larger integer const values. */
207 static GTY (()) tree int_cst_node;
208 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
209 htab_t int_cst_hash_table;
210
211 /* Hash table for optimization flags and target option flags. Use the same
212 hash table for both sets of options. Nodes for building the current
213 optimization and target option nodes. The assumption is most of the time
214 the options created will already be in the hash table, so we avoid
215 allocating and freeing up a node repeatably. */
216 static GTY (()) tree cl_optimization_node;
217 static GTY (()) tree cl_target_option_node;
218 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
219 htab_t cl_option_hash_table;
220
221 /* General tree->tree mapping structure for use in hash tables. */
222
223
224 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
225 htab_t debug_expr_for_decl;
226
227 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
228 htab_t value_expr_for_decl;
229
230 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
231 htab_t debug_args_for_decl;
232
233 static void set_type_quals (tree, int);
234 static int type_hash_eq (const void *, const void *);
235 static hashval_t type_hash_hash (const void *);
236 static hashval_t int_cst_hash_hash (const void *);
237 static int int_cst_hash_eq (const void *, const void *);
238 static hashval_t cl_option_hash_hash (const void *);
239 static int cl_option_hash_eq (const void *, const void *);
240 static void print_type_hash_statistics (void);
241 static void print_debug_expr_statistics (void);
242 static void print_value_expr_statistics (void);
243 static int type_hash_marked_p (const void *);
244 static void type_hash_list (const_tree, inchash::hash &);
245 static void attribute_hash_list (const_tree, inchash::hash &);
246
247 tree global_trees[TI_MAX];
248 tree integer_types[itk_none];
249
250 bool int_n_enabled_p[NUM_INT_N_ENTS];
251 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
252
253 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
254
255 /* Number of operands for each OpenMP clause. */
256 unsigned const char omp_clause_num_ops[] =
257 {
258 0, /* OMP_CLAUSE_ERROR */
259 1, /* OMP_CLAUSE_PRIVATE */
260 1, /* OMP_CLAUSE_SHARED */
261 1, /* OMP_CLAUSE_FIRSTPRIVATE */
262 2, /* OMP_CLAUSE_LASTPRIVATE */
263 4, /* OMP_CLAUSE_REDUCTION */
264 1, /* OMP_CLAUSE_COPYIN */
265 1, /* OMP_CLAUSE_COPYPRIVATE */
266 3, /* OMP_CLAUSE_LINEAR */
267 2, /* OMP_CLAUSE_ALIGNED */
268 1, /* OMP_CLAUSE_DEPEND */
269 1, /* OMP_CLAUSE_UNIFORM */
270 2, /* OMP_CLAUSE_FROM */
271 2, /* OMP_CLAUSE_TO */
272 2, /* OMP_CLAUSE_MAP */
273 1, /* OMP_CLAUSE__LOOPTEMP_ */
274 1, /* OMP_CLAUSE_IF */
275 1, /* OMP_CLAUSE_NUM_THREADS */
276 1, /* OMP_CLAUSE_SCHEDULE */
277 0, /* OMP_CLAUSE_NOWAIT */
278 0, /* OMP_CLAUSE_ORDERED */
279 0, /* OMP_CLAUSE_DEFAULT */
280 3, /* OMP_CLAUSE_COLLAPSE */
281 0, /* OMP_CLAUSE_UNTIED */
282 1, /* OMP_CLAUSE_FINAL */
283 0, /* OMP_CLAUSE_MERGEABLE */
284 1, /* OMP_CLAUSE_DEVICE */
285 1, /* OMP_CLAUSE_DIST_SCHEDULE */
286 0, /* OMP_CLAUSE_INBRANCH */
287 0, /* OMP_CLAUSE_NOTINBRANCH */
288 1, /* OMP_CLAUSE_NUM_TEAMS */
289 1, /* OMP_CLAUSE_THREAD_LIMIT */
290 0, /* OMP_CLAUSE_PROC_BIND */
291 1, /* OMP_CLAUSE_SAFELEN */
292 1, /* OMP_CLAUSE_SIMDLEN */
293 0, /* OMP_CLAUSE_FOR */
294 0, /* OMP_CLAUSE_PARALLEL */
295 0, /* OMP_CLAUSE_SECTIONS */
296 0, /* OMP_CLAUSE_TASKGROUP */
297 1, /* OMP_CLAUSE__SIMDUID_ */
298 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
299 };
300
301 const char * const omp_clause_code_name[] =
302 {
303 "error_clause",
304 "private",
305 "shared",
306 "firstprivate",
307 "lastprivate",
308 "reduction",
309 "copyin",
310 "copyprivate",
311 "linear",
312 "aligned",
313 "depend",
314 "uniform",
315 "from",
316 "to",
317 "map",
318 "_looptemp_",
319 "if",
320 "num_threads",
321 "schedule",
322 "nowait",
323 "ordered",
324 "default",
325 "collapse",
326 "untied",
327 "final",
328 "mergeable",
329 "device",
330 "dist_schedule",
331 "inbranch",
332 "notinbranch",
333 "num_teams",
334 "thread_limit",
335 "proc_bind",
336 "safelen",
337 "simdlen",
338 "for",
339 "parallel",
340 "sections",
341 "taskgroup",
342 "_simduid_",
343 "_Cilk_for_count_"
344 };
345
346
347 /* Return the tree node structure used by tree code CODE. */
348
349 static inline enum tree_node_structure_enum
350 tree_node_structure_for_code (enum tree_code code)
351 {
352 switch (TREE_CODE_CLASS (code))
353 {
354 case tcc_declaration:
355 {
356 switch (code)
357 {
358 case FIELD_DECL:
359 return TS_FIELD_DECL;
360 case PARM_DECL:
361 return TS_PARM_DECL;
362 case VAR_DECL:
363 return TS_VAR_DECL;
364 case LABEL_DECL:
365 return TS_LABEL_DECL;
366 case RESULT_DECL:
367 return TS_RESULT_DECL;
368 case DEBUG_EXPR_DECL:
369 return TS_DECL_WRTL;
370 case CONST_DECL:
371 return TS_CONST_DECL;
372 case TYPE_DECL:
373 return TS_TYPE_DECL;
374 case FUNCTION_DECL:
375 return TS_FUNCTION_DECL;
376 case TRANSLATION_UNIT_DECL:
377 return TS_TRANSLATION_UNIT_DECL;
378 default:
379 return TS_DECL_NON_COMMON;
380 }
381 }
382 case tcc_type:
383 return TS_TYPE_NON_COMMON;
384 case tcc_reference:
385 case tcc_comparison:
386 case tcc_unary:
387 case tcc_binary:
388 case tcc_expression:
389 case tcc_statement:
390 case tcc_vl_exp:
391 return TS_EXP;
392 default: /* tcc_constant and tcc_exceptional */
393 break;
394 }
395 switch (code)
396 {
397 /* tcc_constant cases. */
398 case VOID_CST: return TS_TYPED;
399 case INTEGER_CST: return TS_INT_CST;
400 case REAL_CST: return TS_REAL_CST;
401 case FIXED_CST: return TS_FIXED_CST;
402 case COMPLEX_CST: return TS_COMPLEX;
403 case VECTOR_CST: return TS_VECTOR;
404 case STRING_CST: return TS_STRING;
405 /* tcc_exceptional cases. */
406 case ERROR_MARK: return TS_COMMON;
407 case IDENTIFIER_NODE: return TS_IDENTIFIER;
408 case TREE_LIST: return TS_LIST;
409 case TREE_VEC: return TS_VEC;
410 case SSA_NAME: return TS_SSA_NAME;
411 case PLACEHOLDER_EXPR: return TS_COMMON;
412 case STATEMENT_LIST: return TS_STATEMENT_LIST;
413 case BLOCK: return TS_BLOCK;
414 case CONSTRUCTOR: return TS_CONSTRUCTOR;
415 case TREE_BINFO: return TS_BINFO;
416 case OMP_CLAUSE: return TS_OMP_CLAUSE;
417 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
418 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
419
420 default:
421 gcc_unreachable ();
422 }
423 }
424
425
426 /* Initialize tree_contains_struct to describe the hierarchy of tree
427 nodes. */
428
429 static void
430 initialize_tree_contains_struct (void)
431 {
432 unsigned i;
433
434 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
435 {
436 enum tree_code code;
437 enum tree_node_structure_enum ts_code;
438
439 code = (enum tree_code) i;
440 ts_code = tree_node_structure_for_code (code);
441
442 /* Mark the TS structure itself. */
443 tree_contains_struct[code][ts_code] = 1;
444
445 /* Mark all the structures that TS is derived from. */
446 switch (ts_code)
447 {
448 case TS_TYPED:
449 case TS_BLOCK:
450 MARK_TS_BASE (code);
451 break;
452
453 case TS_COMMON:
454 case TS_INT_CST:
455 case TS_REAL_CST:
456 case TS_FIXED_CST:
457 case TS_VECTOR:
458 case TS_STRING:
459 case TS_COMPLEX:
460 case TS_SSA_NAME:
461 case TS_CONSTRUCTOR:
462 case TS_EXP:
463 case TS_STATEMENT_LIST:
464 MARK_TS_TYPED (code);
465 break;
466
467 case TS_IDENTIFIER:
468 case TS_DECL_MINIMAL:
469 case TS_TYPE_COMMON:
470 case TS_LIST:
471 case TS_VEC:
472 case TS_BINFO:
473 case TS_OMP_CLAUSE:
474 case TS_OPTIMIZATION:
475 case TS_TARGET_OPTION:
476 MARK_TS_COMMON (code);
477 break;
478
479 case TS_TYPE_WITH_LANG_SPECIFIC:
480 MARK_TS_TYPE_COMMON (code);
481 break;
482
483 case TS_TYPE_NON_COMMON:
484 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
485 break;
486
487 case TS_DECL_COMMON:
488 MARK_TS_DECL_MINIMAL (code);
489 break;
490
491 case TS_DECL_WRTL:
492 case TS_CONST_DECL:
493 MARK_TS_DECL_COMMON (code);
494 break;
495
496 case TS_DECL_NON_COMMON:
497 MARK_TS_DECL_WITH_VIS (code);
498 break;
499
500 case TS_DECL_WITH_VIS:
501 case TS_PARM_DECL:
502 case TS_LABEL_DECL:
503 case TS_RESULT_DECL:
504 MARK_TS_DECL_WRTL (code);
505 break;
506
507 case TS_FIELD_DECL:
508 MARK_TS_DECL_COMMON (code);
509 break;
510
511 case TS_VAR_DECL:
512 MARK_TS_DECL_WITH_VIS (code);
513 break;
514
515 case TS_TYPE_DECL:
516 case TS_FUNCTION_DECL:
517 MARK_TS_DECL_NON_COMMON (code);
518 break;
519
520 case TS_TRANSLATION_UNIT_DECL:
521 MARK_TS_DECL_COMMON (code);
522 break;
523
524 default:
525 gcc_unreachable ();
526 }
527 }
528
529 /* Basic consistency checks for attributes used in fold. */
530 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
531 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
532 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
533 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
534 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
535 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
536 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
537 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
538 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
539 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
540 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
541 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
542 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
543 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
544 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
545 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
546 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
547 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
548 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
549 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
550 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
551 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
552 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
553 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
554 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
555 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
556 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
557 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
558 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
559 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
560 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
561 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
562 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
563 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
564 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
565 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
566 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
567 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
568 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
569 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
570 }
571
572
573 /* Init tree.c. */
574
575 void
576 init_ttree (void)
577 {
578 /* Initialize the hash table of types. */
579 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
580 type_hash_eq, 0);
581
582 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
583 tree_decl_map_eq, 0);
584
585 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
586 tree_decl_map_eq, 0);
587
588 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
589 int_cst_hash_eq, NULL);
590
591 int_cst_node = make_int_cst (1, 1);
592
593 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
594 cl_option_hash_eq, NULL);
595
596 cl_optimization_node = make_node (OPTIMIZATION_NODE);
597 cl_target_option_node = make_node (TARGET_OPTION_NODE);
598
599 /* Initialize the tree_contains_struct array. */
600 initialize_tree_contains_struct ();
601 lang_hooks.init_ts ();
602 }
603
604 \f
605 /* The name of the object as the assembler will see it (but before any
606 translations made by ASM_OUTPUT_LABELREF). Often this is the same
607 as DECL_NAME. It is an IDENTIFIER_NODE. */
608 tree
609 decl_assembler_name (tree decl)
610 {
611 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
612 lang_hooks.set_decl_assembler_name (decl);
613 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
614 }
615
616 /* When the target supports COMDAT groups, this indicates which group the
617 DECL is associated with. This can be either an IDENTIFIER_NODE or a
618 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
619 tree
620 decl_comdat_group (const_tree node)
621 {
622 struct symtab_node *snode = symtab_node::get (node);
623 if (!snode)
624 return NULL;
625 return snode->get_comdat_group ();
626 }
627
628 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
629 tree
630 decl_comdat_group_id (const_tree node)
631 {
632 struct symtab_node *snode = symtab_node::get (node);
633 if (!snode)
634 return NULL;
635 return snode->get_comdat_group_id ();
636 }
637
638 /* When the target supports named section, return its name as IDENTIFIER_NODE
639 or NULL if it is in no section. */
640 const char *
641 decl_section_name (const_tree node)
642 {
643 struct symtab_node *snode = symtab_node::get (node);
644 if (!snode)
645 return NULL;
646 return snode->get_section ();
647 }
648
649 /* Set section section name of NODE to VALUE (that is expected to
650 be identifier node) */
651 void
652 set_decl_section_name (tree node, const char *value)
653 {
654 struct symtab_node *snode;
655
656 if (value == NULL)
657 {
658 snode = symtab_node::get (node);
659 if (!snode)
660 return;
661 }
662 else if (TREE_CODE (node) == VAR_DECL)
663 snode = varpool_node::get_create (node);
664 else
665 snode = cgraph_node::get_create (node);
666 snode->set_section (value);
667 }
668
669 /* Return TLS model of a variable NODE. */
670 enum tls_model
671 decl_tls_model (const_tree node)
672 {
673 struct varpool_node *snode = varpool_node::get (node);
674 if (!snode)
675 return TLS_MODEL_NONE;
676 return snode->tls_model;
677 }
678
679 /* Set TLS model of variable NODE to MODEL. */
680 void
681 set_decl_tls_model (tree node, enum tls_model model)
682 {
683 struct varpool_node *vnode;
684
685 if (model == TLS_MODEL_NONE)
686 {
687 vnode = varpool_node::get (node);
688 if (!vnode)
689 return;
690 }
691 else
692 vnode = varpool_node::get_create (node);
693 vnode->tls_model = model;
694 }
695
696 /* Compute the number of bytes occupied by a tree with code CODE.
697 This function cannot be used for nodes that have variable sizes,
698 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
699 size_t
700 tree_code_size (enum tree_code code)
701 {
702 switch (TREE_CODE_CLASS (code))
703 {
704 case tcc_declaration: /* A decl node */
705 {
706 switch (code)
707 {
708 case FIELD_DECL:
709 return sizeof (struct tree_field_decl);
710 case PARM_DECL:
711 return sizeof (struct tree_parm_decl);
712 case VAR_DECL:
713 return sizeof (struct tree_var_decl);
714 case LABEL_DECL:
715 return sizeof (struct tree_label_decl);
716 case RESULT_DECL:
717 return sizeof (struct tree_result_decl);
718 case CONST_DECL:
719 return sizeof (struct tree_const_decl);
720 case TYPE_DECL:
721 return sizeof (struct tree_type_decl);
722 case FUNCTION_DECL:
723 return sizeof (struct tree_function_decl);
724 case DEBUG_EXPR_DECL:
725 return sizeof (struct tree_decl_with_rtl);
726 case TRANSLATION_UNIT_DECL:
727 return sizeof (struct tree_translation_unit_decl);
728 case NAMESPACE_DECL:
729 case IMPORTED_DECL:
730 case NAMELIST_DECL:
731 return sizeof (struct tree_decl_non_common);
732 default:
733 return lang_hooks.tree_size (code);
734 }
735 }
736
737 case tcc_type: /* a type node */
738 return sizeof (struct tree_type_non_common);
739
740 case tcc_reference: /* a reference */
741 case tcc_expression: /* an expression */
742 case tcc_statement: /* an expression with side effects */
743 case tcc_comparison: /* a comparison expression */
744 case tcc_unary: /* a unary arithmetic expression */
745 case tcc_binary: /* a binary arithmetic expression */
746 return (sizeof (struct tree_exp)
747 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
748
749 case tcc_constant: /* a constant */
750 switch (code)
751 {
752 case VOID_CST: return sizeof (struct tree_typed);
753 case INTEGER_CST: gcc_unreachable ();
754 case REAL_CST: return sizeof (struct tree_real_cst);
755 case FIXED_CST: return sizeof (struct tree_fixed_cst);
756 case COMPLEX_CST: return sizeof (struct tree_complex);
757 case VECTOR_CST: return sizeof (struct tree_vector);
758 case STRING_CST: gcc_unreachable ();
759 default:
760 return lang_hooks.tree_size (code);
761 }
762
763 case tcc_exceptional: /* something random, like an identifier. */
764 switch (code)
765 {
766 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
767 case TREE_LIST: return sizeof (struct tree_list);
768
769 case ERROR_MARK:
770 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
771
772 case TREE_VEC:
773 case OMP_CLAUSE: gcc_unreachable ();
774
775 case SSA_NAME: return sizeof (struct tree_ssa_name);
776
777 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
778 case BLOCK: return sizeof (struct tree_block);
779 case CONSTRUCTOR: return sizeof (struct tree_constructor);
780 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
781 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
782
783 default:
784 return lang_hooks.tree_size (code);
785 }
786
787 default:
788 gcc_unreachable ();
789 }
790 }
791
792 /* Compute the number of bytes occupied by NODE. This routine only
793 looks at TREE_CODE, except for those nodes that have variable sizes. */
794 size_t
795 tree_size (const_tree node)
796 {
797 const enum tree_code code = TREE_CODE (node);
798 switch (code)
799 {
800 case INTEGER_CST:
801 return (sizeof (struct tree_int_cst)
802 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
803
804 case TREE_BINFO:
805 return (offsetof (struct tree_binfo, base_binfos)
806 + vec<tree, va_gc>
807 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
808
809 case TREE_VEC:
810 return (sizeof (struct tree_vec)
811 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
812
813 case VECTOR_CST:
814 return (sizeof (struct tree_vector)
815 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
816
817 case STRING_CST:
818 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
819
820 case OMP_CLAUSE:
821 return (sizeof (struct tree_omp_clause)
822 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
823 * sizeof (tree));
824
825 default:
826 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
827 return (sizeof (struct tree_exp)
828 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
829 else
830 return tree_code_size (code);
831 }
832 }
833
834 /* Record interesting allocation statistics for a tree node with CODE
835 and LENGTH. */
836
837 static void
838 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
839 size_t length ATTRIBUTE_UNUSED)
840 {
841 enum tree_code_class type = TREE_CODE_CLASS (code);
842 tree_node_kind kind;
843
844 if (!GATHER_STATISTICS)
845 return;
846
847 switch (type)
848 {
849 case tcc_declaration: /* A decl node */
850 kind = d_kind;
851 break;
852
853 case tcc_type: /* a type node */
854 kind = t_kind;
855 break;
856
857 case tcc_statement: /* an expression with side effects */
858 kind = s_kind;
859 break;
860
861 case tcc_reference: /* a reference */
862 kind = r_kind;
863 break;
864
865 case tcc_expression: /* an expression */
866 case tcc_comparison: /* a comparison expression */
867 case tcc_unary: /* a unary arithmetic expression */
868 case tcc_binary: /* a binary arithmetic expression */
869 kind = e_kind;
870 break;
871
872 case tcc_constant: /* a constant */
873 kind = c_kind;
874 break;
875
876 case tcc_exceptional: /* something random, like an identifier. */
877 switch (code)
878 {
879 case IDENTIFIER_NODE:
880 kind = id_kind;
881 break;
882
883 case TREE_VEC:
884 kind = vec_kind;
885 break;
886
887 case TREE_BINFO:
888 kind = binfo_kind;
889 break;
890
891 case SSA_NAME:
892 kind = ssa_name_kind;
893 break;
894
895 case BLOCK:
896 kind = b_kind;
897 break;
898
899 case CONSTRUCTOR:
900 kind = constr_kind;
901 break;
902
903 case OMP_CLAUSE:
904 kind = omp_clause_kind;
905 break;
906
907 default:
908 kind = x_kind;
909 break;
910 }
911 break;
912
913 case tcc_vl_exp:
914 kind = e_kind;
915 break;
916
917 default:
918 gcc_unreachable ();
919 }
920
921 tree_code_counts[(int) code]++;
922 tree_node_counts[(int) kind]++;
923 tree_node_sizes[(int) kind] += length;
924 }
925
926 /* Allocate and return a new UID from the DECL_UID namespace. */
927
928 int
929 allocate_decl_uid (void)
930 {
931 return next_decl_uid++;
932 }
933
934 /* Return a newly allocated node of code CODE. For decl and type
935 nodes, some other fields are initialized. The rest of the node is
936 initialized to zero. This function cannot be used for TREE_VEC,
937 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
938 tree_code_size.
939
940 Achoo! I got a code in the node. */
941
942 tree
943 make_node_stat (enum tree_code code MEM_STAT_DECL)
944 {
945 tree t;
946 enum tree_code_class type = TREE_CODE_CLASS (code);
947 size_t length = tree_code_size (code);
948
949 record_node_allocation_statistics (code, length);
950
951 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
952 TREE_SET_CODE (t, code);
953
954 switch (type)
955 {
956 case tcc_statement:
957 TREE_SIDE_EFFECTS (t) = 1;
958 break;
959
960 case tcc_declaration:
961 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
962 {
963 if (code == FUNCTION_DECL)
964 {
965 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
966 DECL_MODE (t) = FUNCTION_MODE;
967 }
968 else
969 DECL_ALIGN (t) = 1;
970 }
971 DECL_SOURCE_LOCATION (t) = input_location;
972 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
973 DECL_UID (t) = --next_debug_decl_uid;
974 else
975 {
976 DECL_UID (t) = allocate_decl_uid ();
977 SET_DECL_PT_UID (t, -1);
978 }
979 if (TREE_CODE (t) == LABEL_DECL)
980 LABEL_DECL_UID (t) = -1;
981
982 break;
983
984 case tcc_type:
985 TYPE_UID (t) = next_type_uid++;
986 TYPE_ALIGN (t) = BITS_PER_UNIT;
987 TYPE_USER_ALIGN (t) = 0;
988 TYPE_MAIN_VARIANT (t) = t;
989 TYPE_CANONICAL (t) = t;
990
991 /* Default to no attributes for type, but let target change that. */
992 TYPE_ATTRIBUTES (t) = NULL_TREE;
993 targetm.set_default_type_attributes (t);
994
995 /* We have not yet computed the alias set for this type. */
996 TYPE_ALIAS_SET (t) = -1;
997 break;
998
999 case tcc_constant:
1000 TREE_CONSTANT (t) = 1;
1001 break;
1002
1003 case tcc_expression:
1004 switch (code)
1005 {
1006 case INIT_EXPR:
1007 case MODIFY_EXPR:
1008 case VA_ARG_EXPR:
1009 case PREDECREMENT_EXPR:
1010 case PREINCREMENT_EXPR:
1011 case POSTDECREMENT_EXPR:
1012 case POSTINCREMENT_EXPR:
1013 /* All of these have side-effects, no matter what their
1014 operands are. */
1015 TREE_SIDE_EFFECTS (t) = 1;
1016 break;
1017
1018 default:
1019 break;
1020 }
1021 break;
1022
1023 default:
1024 /* Other classes need no special treatment. */
1025 break;
1026 }
1027
1028 return t;
1029 }
1030 \f
1031 /* Return a new node with the same contents as NODE except that its
1032 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1033
1034 tree
1035 copy_node_stat (tree node MEM_STAT_DECL)
1036 {
1037 tree t;
1038 enum tree_code code = TREE_CODE (node);
1039 size_t length;
1040
1041 gcc_assert (code != STATEMENT_LIST);
1042
1043 length = tree_size (node);
1044 record_node_allocation_statistics (code, length);
1045 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1046 memcpy (t, node, length);
1047
1048 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1049 TREE_CHAIN (t) = 0;
1050 TREE_ASM_WRITTEN (t) = 0;
1051 TREE_VISITED (t) = 0;
1052
1053 if (TREE_CODE_CLASS (code) == tcc_declaration)
1054 {
1055 if (code == DEBUG_EXPR_DECL)
1056 DECL_UID (t) = --next_debug_decl_uid;
1057 else
1058 {
1059 DECL_UID (t) = allocate_decl_uid ();
1060 if (DECL_PT_UID_SET_P (node))
1061 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1062 }
1063 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1064 && DECL_HAS_VALUE_EXPR_P (node))
1065 {
1066 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1067 DECL_HAS_VALUE_EXPR_P (t) = 1;
1068 }
1069 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1070 if (TREE_CODE (node) == VAR_DECL)
1071 {
1072 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1073 t->decl_with_vis.symtab_node = NULL;
1074 }
1075 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1076 {
1077 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1078 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1079 }
1080 if (TREE_CODE (node) == FUNCTION_DECL)
1081 {
1082 DECL_STRUCT_FUNCTION (t) = NULL;
1083 t->decl_with_vis.symtab_node = NULL;
1084 }
1085 }
1086 else if (TREE_CODE_CLASS (code) == tcc_type)
1087 {
1088 TYPE_UID (t) = next_type_uid++;
1089 /* The following is so that the debug code for
1090 the copy is different from the original type.
1091 The two statements usually duplicate each other
1092 (because they clear fields of the same union),
1093 but the optimizer should catch that. */
1094 TYPE_SYMTAB_POINTER (t) = 0;
1095 TYPE_SYMTAB_ADDRESS (t) = 0;
1096
1097 /* Do not copy the values cache. */
1098 if (TYPE_CACHED_VALUES_P (t))
1099 {
1100 TYPE_CACHED_VALUES_P (t) = 0;
1101 TYPE_CACHED_VALUES (t) = NULL_TREE;
1102 }
1103 }
1104
1105 return t;
1106 }
1107
1108 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1109 For example, this can copy a list made of TREE_LIST nodes. */
1110
1111 tree
1112 copy_list (tree list)
1113 {
1114 tree head;
1115 tree prev, next;
1116
1117 if (list == 0)
1118 return 0;
1119
1120 head = prev = copy_node (list);
1121 next = TREE_CHAIN (list);
1122 while (next)
1123 {
1124 TREE_CHAIN (prev) = copy_node (next);
1125 prev = TREE_CHAIN (prev);
1126 next = TREE_CHAIN (next);
1127 }
1128 return head;
1129 }
1130
1131 \f
1132 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1133 INTEGER_CST with value CST and type TYPE. */
1134
1135 static unsigned int
1136 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1137 {
1138 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1139 /* We need an extra zero HWI if CST is an unsigned integer with its
1140 upper bit set, and if CST occupies a whole number of HWIs. */
1141 if (TYPE_UNSIGNED (type)
1142 && wi::neg_p (cst)
1143 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1144 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1145 return cst.get_len ();
1146 }
1147
1148 /* Return a new INTEGER_CST with value CST and type TYPE. */
1149
1150 static tree
1151 build_new_int_cst (tree type, const wide_int &cst)
1152 {
1153 unsigned int len = cst.get_len ();
1154 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1155 tree nt = make_int_cst (len, ext_len);
1156
1157 if (len < ext_len)
1158 {
1159 --ext_len;
1160 TREE_INT_CST_ELT (nt, ext_len) = 0;
1161 for (unsigned int i = len; i < ext_len; ++i)
1162 TREE_INT_CST_ELT (nt, i) = -1;
1163 }
1164 else if (TYPE_UNSIGNED (type)
1165 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1166 {
1167 len--;
1168 TREE_INT_CST_ELT (nt, len)
1169 = zext_hwi (cst.elt (len),
1170 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1171 }
1172
1173 for (unsigned int i = 0; i < len; i++)
1174 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1175 TREE_TYPE (nt) = type;
1176 return nt;
1177 }
1178
1179 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1180
1181 tree
1182 build_int_cst (tree type, HOST_WIDE_INT low)
1183 {
1184 /* Support legacy code. */
1185 if (!type)
1186 type = integer_type_node;
1187
1188 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1189 }
1190
1191 tree
1192 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1193 {
1194 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1195 }
1196
1197 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1198
1199 tree
1200 build_int_cst_type (tree type, HOST_WIDE_INT low)
1201 {
1202 gcc_assert (type);
1203 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1204 }
1205
1206 /* Constructs tree in type TYPE from with value given by CST. Signedness
1207 of CST is assumed to be the same as the signedness of TYPE. */
1208
1209 tree
1210 double_int_to_tree (tree type, double_int cst)
1211 {
1212 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1213 }
1214
1215 /* We force the wide_int CST to the range of the type TYPE by sign or
1216 zero extending it. OVERFLOWABLE indicates if we are interested in
1217 overflow of the value, when >0 we are only interested in signed
1218 overflow, for <0 we are interested in any overflow. OVERFLOWED
1219 indicates whether overflow has already occurred. CONST_OVERFLOWED
1220 indicates whether constant overflow has already occurred. We force
1221 T's value to be within range of T's type (by setting to 0 or 1 all
1222 the bits outside the type's range). We set TREE_OVERFLOWED if,
1223 OVERFLOWED is nonzero,
1224 or OVERFLOWABLE is >0 and signed overflow occurs
1225 or OVERFLOWABLE is <0 and any overflow occurs
1226 We return a new tree node for the extended wide_int. The node
1227 is shared if no overflow flags are set. */
1228
1229
1230 tree
1231 force_fit_type (tree type, const wide_int_ref &cst,
1232 int overflowable, bool overflowed)
1233 {
1234 signop sign = TYPE_SIGN (type);
1235
1236 /* If we need to set overflow flags, return a new unshared node. */
1237 if (overflowed || !wi::fits_to_tree_p (cst, type))
1238 {
1239 if (overflowed
1240 || overflowable < 0
1241 || (overflowable > 0 && sign == SIGNED))
1242 {
1243 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1244 tree t = build_new_int_cst (type, tmp);
1245 TREE_OVERFLOW (t) = 1;
1246 return t;
1247 }
1248 }
1249
1250 /* Else build a shared node. */
1251 return wide_int_to_tree (type, cst);
1252 }
1253
1254 /* These are the hash table functions for the hash table of INTEGER_CST
1255 nodes of a sizetype. */
1256
1257 /* Return the hash code code X, an INTEGER_CST. */
1258
1259 static hashval_t
1260 int_cst_hash_hash (const void *x)
1261 {
1262 const_tree const t = (const_tree) x;
1263 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1264 int i;
1265
1266 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1267 code ^= TREE_INT_CST_ELT (t, i);
1268
1269 return code;
1270 }
1271
1272 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1273 is the same as that given by *Y, which is the same. */
1274
1275 static int
1276 int_cst_hash_eq (const void *x, const void *y)
1277 {
1278 const_tree const xt = (const_tree) x;
1279 const_tree const yt = (const_tree) y;
1280
1281 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1282 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1283 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1284 return false;
1285
1286 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1287 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1288 return false;
1289
1290 return true;
1291 }
1292
1293 /* Create an INT_CST node of TYPE and value CST.
1294 The returned node is always shared. For small integers we use a
1295 per-type vector cache, for larger ones we use a single hash table.
1296 The value is extended from its precision according to the sign of
1297 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1298 the upper bits and ensures that hashing and value equality based
1299 upon the underlying HOST_WIDE_INTs works without masking. */
1300
1301 tree
1302 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1303 {
1304 tree t;
1305 int ix = -1;
1306 int limit = 0;
1307
1308 gcc_assert (type);
1309 unsigned int prec = TYPE_PRECISION (type);
1310 signop sgn = TYPE_SIGN (type);
1311
1312 /* Verify that everything is canonical. */
1313 int l = pcst.get_len ();
1314 if (l > 1)
1315 {
1316 if (pcst.elt (l - 1) == 0)
1317 gcc_checking_assert (pcst.elt (l - 2) < 0);
1318 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1319 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1320 }
1321
1322 wide_int cst = wide_int::from (pcst, prec, sgn);
1323 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1324
1325 if (ext_len == 1)
1326 {
1327 /* We just need to store a single HOST_WIDE_INT. */
1328 HOST_WIDE_INT hwi;
1329 if (TYPE_UNSIGNED (type))
1330 hwi = cst.to_uhwi ();
1331 else
1332 hwi = cst.to_shwi ();
1333
1334 switch (TREE_CODE (type))
1335 {
1336 case NULLPTR_TYPE:
1337 gcc_assert (hwi == 0);
1338 /* Fallthru. */
1339
1340 case POINTER_TYPE:
1341 case REFERENCE_TYPE:
1342 case POINTER_BOUNDS_TYPE:
1343 /* Cache NULL pointer and zero bounds. */
1344 if (hwi == 0)
1345 {
1346 limit = 1;
1347 ix = 0;
1348 }
1349 break;
1350
1351 case BOOLEAN_TYPE:
1352 /* Cache false or true. */
1353 limit = 2;
1354 if (hwi < 2)
1355 ix = hwi;
1356 break;
1357
1358 case INTEGER_TYPE:
1359 case OFFSET_TYPE:
1360 if (TYPE_SIGN (type) == UNSIGNED)
1361 {
1362 /* Cache [0, N). */
1363 limit = INTEGER_SHARE_LIMIT;
1364 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1365 ix = hwi;
1366 }
1367 else
1368 {
1369 /* Cache [-1, N). */
1370 limit = INTEGER_SHARE_LIMIT + 1;
1371 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1372 ix = hwi + 1;
1373 }
1374 break;
1375
1376 case ENUMERAL_TYPE:
1377 break;
1378
1379 default:
1380 gcc_unreachable ();
1381 }
1382
1383 if (ix >= 0)
1384 {
1385 /* Look for it in the type's vector of small shared ints. */
1386 if (!TYPE_CACHED_VALUES_P (type))
1387 {
1388 TYPE_CACHED_VALUES_P (type) = 1;
1389 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1390 }
1391
1392 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1393 if (t)
1394 /* Make sure no one is clobbering the shared constant. */
1395 gcc_checking_assert (TREE_TYPE (t) == type
1396 && TREE_INT_CST_NUNITS (t) == 1
1397 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1398 && TREE_INT_CST_EXT_NUNITS (t) == 1
1399 && TREE_INT_CST_ELT (t, 0) == hwi);
1400 else
1401 {
1402 /* Create a new shared int. */
1403 t = build_new_int_cst (type, cst);
1404 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1405 }
1406 }
1407 else
1408 {
1409 /* Use the cache of larger shared ints, using int_cst_node as
1410 a temporary. */
1411 void **slot;
1412
1413 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1414 TREE_TYPE (int_cst_node) = type;
1415
1416 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1417 t = (tree) *slot;
1418 if (!t)
1419 {
1420 /* Insert this one into the hash table. */
1421 t = int_cst_node;
1422 *slot = t;
1423 /* Make a new node for next time round. */
1424 int_cst_node = make_int_cst (1, 1);
1425 }
1426 }
1427 }
1428 else
1429 {
1430 /* The value either hashes properly or we drop it on the floor
1431 for the gc to take care of. There will not be enough of them
1432 to worry about. */
1433 void **slot;
1434
1435 tree nt = build_new_int_cst (type, cst);
1436 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1437 t = (tree) *slot;
1438 if (!t)
1439 {
1440 /* Insert this one into the hash table. */
1441 t = nt;
1442 *slot = t;
1443 }
1444 }
1445
1446 return t;
1447 }
1448
1449 void
1450 cache_integer_cst (tree t)
1451 {
1452 tree type = TREE_TYPE (t);
1453 int ix = -1;
1454 int limit = 0;
1455 int prec = TYPE_PRECISION (type);
1456
1457 gcc_assert (!TREE_OVERFLOW (t));
1458
1459 switch (TREE_CODE (type))
1460 {
1461 case NULLPTR_TYPE:
1462 gcc_assert (integer_zerop (t));
1463 /* Fallthru. */
1464
1465 case POINTER_TYPE:
1466 case REFERENCE_TYPE:
1467 /* Cache NULL pointer. */
1468 if (integer_zerop (t))
1469 {
1470 limit = 1;
1471 ix = 0;
1472 }
1473 break;
1474
1475 case BOOLEAN_TYPE:
1476 /* Cache false or true. */
1477 limit = 2;
1478 if (wi::ltu_p (t, 2))
1479 ix = TREE_INT_CST_ELT (t, 0);
1480 break;
1481
1482 case INTEGER_TYPE:
1483 case OFFSET_TYPE:
1484 if (TYPE_UNSIGNED (type))
1485 {
1486 /* Cache 0..N */
1487 limit = INTEGER_SHARE_LIMIT;
1488
1489 /* This is a little hokie, but if the prec is smaller than
1490 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1491 obvious test will not get the correct answer. */
1492 if (prec < HOST_BITS_PER_WIDE_INT)
1493 {
1494 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1495 ix = tree_to_uhwi (t);
1496 }
1497 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1498 ix = tree_to_uhwi (t);
1499 }
1500 else
1501 {
1502 /* Cache -1..N */
1503 limit = INTEGER_SHARE_LIMIT + 1;
1504
1505 if (integer_minus_onep (t))
1506 ix = 0;
1507 else if (!wi::neg_p (t))
1508 {
1509 if (prec < HOST_BITS_PER_WIDE_INT)
1510 {
1511 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1512 ix = tree_to_shwi (t) + 1;
1513 }
1514 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1515 ix = tree_to_shwi (t) + 1;
1516 }
1517 }
1518 break;
1519
1520 case ENUMERAL_TYPE:
1521 break;
1522
1523 default:
1524 gcc_unreachable ();
1525 }
1526
1527 if (ix >= 0)
1528 {
1529 /* Look for it in the type's vector of small shared ints. */
1530 if (!TYPE_CACHED_VALUES_P (type))
1531 {
1532 TYPE_CACHED_VALUES_P (type) = 1;
1533 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1534 }
1535
1536 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1537 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1538 }
1539 else
1540 {
1541 /* Use the cache of larger shared ints. */
1542 void **slot;
1543
1544 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1545 /* If there is already an entry for the number verify it's the
1546 same. */
1547 if (*slot)
1548 gcc_assert (wi::eq_p (tree (*slot), t));
1549 else
1550 /* Otherwise insert this one into the hash table. */
1551 *slot = t;
1552 }
1553 }
1554
1555
1556 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1557 and the rest are zeros. */
1558
1559 tree
1560 build_low_bits_mask (tree type, unsigned bits)
1561 {
1562 gcc_assert (bits <= TYPE_PRECISION (type));
1563
1564 return wide_int_to_tree (type, wi::mask (bits, false,
1565 TYPE_PRECISION (type)));
1566 }
1567
1568 /* Checks that X is integer constant that can be expressed in (unsigned)
1569 HOST_WIDE_INT without loss of precision. */
1570
1571 bool
1572 cst_and_fits_in_hwi (const_tree x)
1573 {
1574 if (TREE_CODE (x) != INTEGER_CST)
1575 return false;
1576
1577 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1578 return false;
1579
1580 return TREE_INT_CST_NUNITS (x) == 1;
1581 }
1582
1583 /* Build a newly constructed TREE_VEC node of length LEN. */
1584
1585 tree
1586 make_vector_stat (unsigned len MEM_STAT_DECL)
1587 {
1588 tree t;
1589 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1590
1591 record_node_allocation_statistics (VECTOR_CST, length);
1592
1593 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1594
1595 TREE_SET_CODE (t, VECTOR_CST);
1596 TREE_CONSTANT (t) = 1;
1597
1598 return t;
1599 }
1600
1601 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1602 are in a list pointed to by VALS. */
1603
1604 tree
1605 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1606 {
1607 int over = 0;
1608 unsigned cnt = 0;
1609 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1610 TREE_TYPE (v) = type;
1611
1612 /* Iterate through elements and check for overflow. */
1613 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1614 {
1615 tree value = vals[cnt];
1616
1617 VECTOR_CST_ELT (v, cnt) = value;
1618
1619 /* Don't crash if we get an address constant. */
1620 if (!CONSTANT_CLASS_P (value))
1621 continue;
1622
1623 over |= TREE_OVERFLOW (value);
1624 }
1625
1626 TREE_OVERFLOW (v) = over;
1627 return v;
1628 }
1629
1630 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1631 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1632
1633 tree
1634 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1635 {
1636 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1637 unsigned HOST_WIDE_INT idx;
1638 tree value;
1639
1640 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1641 vec[idx] = value;
1642 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1643 vec[idx] = build_zero_cst (TREE_TYPE (type));
1644
1645 return build_vector (type, vec);
1646 }
1647
1648 /* Build a vector of type VECTYPE where all the elements are SCs. */
1649 tree
1650 build_vector_from_val (tree vectype, tree sc)
1651 {
1652 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1653
1654 if (sc == error_mark_node)
1655 return sc;
1656
1657 /* Verify that the vector type is suitable for SC. Note that there
1658 is some inconsistency in the type-system with respect to restrict
1659 qualifications of pointers. Vector types always have a main-variant
1660 element type and the qualification is applied to the vector-type.
1661 So TREE_TYPE (vector-type) does not return a properly qualified
1662 vector element-type. */
1663 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1664 TREE_TYPE (vectype)));
1665
1666 if (CONSTANT_CLASS_P (sc))
1667 {
1668 tree *v = XALLOCAVEC (tree, nunits);
1669 for (i = 0; i < nunits; ++i)
1670 v[i] = sc;
1671 return build_vector (vectype, v);
1672 }
1673 else
1674 {
1675 vec<constructor_elt, va_gc> *v;
1676 vec_alloc (v, nunits);
1677 for (i = 0; i < nunits; ++i)
1678 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1679 return build_constructor (vectype, v);
1680 }
1681 }
1682
1683 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1684 are in the vec pointed to by VALS. */
1685 tree
1686 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1687 {
1688 tree c = make_node (CONSTRUCTOR);
1689 unsigned int i;
1690 constructor_elt *elt;
1691 bool constant_p = true;
1692 bool side_effects_p = false;
1693
1694 TREE_TYPE (c) = type;
1695 CONSTRUCTOR_ELTS (c) = vals;
1696
1697 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1698 {
1699 /* Mostly ctors will have elts that don't have side-effects, so
1700 the usual case is to scan all the elements. Hence a single
1701 loop for both const and side effects, rather than one loop
1702 each (with early outs). */
1703 if (!TREE_CONSTANT (elt->value))
1704 constant_p = false;
1705 if (TREE_SIDE_EFFECTS (elt->value))
1706 side_effects_p = true;
1707 }
1708
1709 TREE_SIDE_EFFECTS (c) = side_effects_p;
1710 TREE_CONSTANT (c) = constant_p;
1711
1712 return c;
1713 }
1714
1715 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1716 INDEX and VALUE. */
1717 tree
1718 build_constructor_single (tree type, tree index, tree value)
1719 {
1720 vec<constructor_elt, va_gc> *v;
1721 constructor_elt elt = {index, value};
1722
1723 vec_alloc (v, 1);
1724 v->quick_push (elt);
1725
1726 return build_constructor (type, v);
1727 }
1728
1729
1730 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1731 are in a list pointed to by VALS. */
1732 tree
1733 build_constructor_from_list (tree type, tree vals)
1734 {
1735 tree t;
1736 vec<constructor_elt, va_gc> *v = NULL;
1737
1738 if (vals)
1739 {
1740 vec_alloc (v, list_length (vals));
1741 for (t = vals; t; t = TREE_CHAIN (t))
1742 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1743 }
1744
1745 return build_constructor (type, v);
1746 }
1747
1748 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1749 of elements, provided as index/value pairs. */
1750
1751 tree
1752 build_constructor_va (tree type, int nelts, ...)
1753 {
1754 vec<constructor_elt, va_gc> *v = NULL;
1755 va_list p;
1756
1757 va_start (p, nelts);
1758 vec_alloc (v, nelts);
1759 while (nelts--)
1760 {
1761 tree index = va_arg (p, tree);
1762 tree value = va_arg (p, tree);
1763 CONSTRUCTOR_APPEND_ELT (v, index, value);
1764 }
1765 va_end (p);
1766 return build_constructor (type, v);
1767 }
1768
1769 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1770
1771 tree
1772 build_fixed (tree type, FIXED_VALUE_TYPE f)
1773 {
1774 tree v;
1775 FIXED_VALUE_TYPE *fp;
1776
1777 v = make_node (FIXED_CST);
1778 fp = ggc_alloc<fixed_value> ();
1779 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1780
1781 TREE_TYPE (v) = type;
1782 TREE_FIXED_CST_PTR (v) = fp;
1783 return v;
1784 }
1785
1786 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1787
1788 tree
1789 build_real (tree type, REAL_VALUE_TYPE d)
1790 {
1791 tree v;
1792 REAL_VALUE_TYPE *dp;
1793 int overflow = 0;
1794
1795 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1796 Consider doing it via real_convert now. */
1797
1798 v = make_node (REAL_CST);
1799 dp = ggc_alloc<real_value> ();
1800 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1801
1802 TREE_TYPE (v) = type;
1803 TREE_REAL_CST_PTR (v) = dp;
1804 TREE_OVERFLOW (v) = overflow;
1805 return v;
1806 }
1807
1808 /* Return a new REAL_CST node whose type is TYPE
1809 and whose value is the integer value of the INTEGER_CST node I. */
1810
1811 REAL_VALUE_TYPE
1812 real_value_from_int_cst (const_tree type, const_tree i)
1813 {
1814 REAL_VALUE_TYPE d;
1815
1816 /* Clear all bits of the real value type so that we can later do
1817 bitwise comparisons to see if two values are the same. */
1818 memset (&d, 0, sizeof d);
1819
1820 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1821 TYPE_SIGN (TREE_TYPE (i)));
1822 return d;
1823 }
1824
1825 /* Given a tree representing an integer constant I, return a tree
1826 representing the same value as a floating-point constant of type TYPE. */
1827
1828 tree
1829 build_real_from_int_cst (tree type, const_tree i)
1830 {
1831 tree v;
1832 int overflow = TREE_OVERFLOW (i);
1833
1834 v = build_real (type, real_value_from_int_cst (type, i));
1835
1836 TREE_OVERFLOW (v) |= overflow;
1837 return v;
1838 }
1839
1840 /* Return a newly constructed STRING_CST node whose value is
1841 the LEN characters at STR.
1842 Note that for a C string literal, LEN should include the trailing NUL.
1843 The TREE_TYPE is not initialized. */
1844
1845 tree
1846 build_string (int len, const char *str)
1847 {
1848 tree s;
1849 size_t length;
1850
1851 /* Do not waste bytes provided by padding of struct tree_string. */
1852 length = len + offsetof (struct tree_string, str) + 1;
1853
1854 record_node_allocation_statistics (STRING_CST, length);
1855
1856 s = (tree) ggc_internal_alloc (length);
1857
1858 memset (s, 0, sizeof (struct tree_typed));
1859 TREE_SET_CODE (s, STRING_CST);
1860 TREE_CONSTANT (s) = 1;
1861 TREE_STRING_LENGTH (s) = len;
1862 memcpy (s->string.str, str, len);
1863 s->string.str[len] = '\0';
1864
1865 return s;
1866 }
1867
1868 /* Return a newly constructed COMPLEX_CST node whose value is
1869 specified by the real and imaginary parts REAL and IMAG.
1870 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1871 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1872
1873 tree
1874 build_complex (tree type, tree real, tree imag)
1875 {
1876 tree t = make_node (COMPLEX_CST);
1877
1878 TREE_REALPART (t) = real;
1879 TREE_IMAGPART (t) = imag;
1880 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1881 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1882 return t;
1883 }
1884
1885 /* Return a constant of arithmetic type TYPE which is the
1886 multiplicative identity of the set TYPE. */
1887
1888 tree
1889 build_one_cst (tree type)
1890 {
1891 switch (TREE_CODE (type))
1892 {
1893 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1894 case POINTER_TYPE: case REFERENCE_TYPE:
1895 case OFFSET_TYPE:
1896 return build_int_cst (type, 1);
1897
1898 case REAL_TYPE:
1899 return build_real (type, dconst1);
1900
1901 case FIXED_POINT_TYPE:
1902 /* We can only generate 1 for accum types. */
1903 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1904 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1905
1906 case VECTOR_TYPE:
1907 {
1908 tree scalar = build_one_cst (TREE_TYPE (type));
1909
1910 return build_vector_from_val (type, scalar);
1911 }
1912
1913 case COMPLEX_TYPE:
1914 return build_complex (type,
1915 build_one_cst (TREE_TYPE (type)),
1916 build_zero_cst (TREE_TYPE (type)));
1917
1918 default:
1919 gcc_unreachable ();
1920 }
1921 }
1922
1923 /* Return an integer of type TYPE containing all 1's in as much precision as
1924 it contains, or a complex or vector whose subparts are such integers. */
1925
1926 tree
1927 build_all_ones_cst (tree type)
1928 {
1929 if (TREE_CODE (type) == COMPLEX_TYPE)
1930 {
1931 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1932 return build_complex (type, scalar, scalar);
1933 }
1934 else
1935 return build_minus_one_cst (type);
1936 }
1937
1938 /* Return a constant of arithmetic type TYPE which is the
1939 opposite of the multiplicative identity of the set TYPE. */
1940
1941 tree
1942 build_minus_one_cst (tree type)
1943 {
1944 switch (TREE_CODE (type))
1945 {
1946 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1947 case POINTER_TYPE: case REFERENCE_TYPE:
1948 case OFFSET_TYPE:
1949 return build_int_cst (type, -1);
1950
1951 case REAL_TYPE:
1952 return build_real (type, dconstm1);
1953
1954 case FIXED_POINT_TYPE:
1955 /* We can only generate 1 for accum types. */
1956 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1957 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1958 TYPE_MODE (type)));
1959
1960 case VECTOR_TYPE:
1961 {
1962 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1963
1964 return build_vector_from_val (type, scalar);
1965 }
1966
1967 case COMPLEX_TYPE:
1968 return build_complex (type,
1969 build_minus_one_cst (TREE_TYPE (type)),
1970 build_zero_cst (TREE_TYPE (type)));
1971
1972 default:
1973 gcc_unreachable ();
1974 }
1975 }
1976
1977 /* Build 0 constant of type TYPE. This is used by constructor folding
1978 and thus the constant should be represented in memory by
1979 zero(es). */
1980
1981 tree
1982 build_zero_cst (tree type)
1983 {
1984 switch (TREE_CODE (type))
1985 {
1986 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1987 case POINTER_TYPE: case REFERENCE_TYPE:
1988 case OFFSET_TYPE: case NULLPTR_TYPE:
1989 return build_int_cst (type, 0);
1990
1991 case REAL_TYPE:
1992 return build_real (type, dconst0);
1993
1994 case FIXED_POINT_TYPE:
1995 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1996
1997 case VECTOR_TYPE:
1998 {
1999 tree scalar = build_zero_cst (TREE_TYPE (type));
2000
2001 return build_vector_from_val (type, scalar);
2002 }
2003
2004 case COMPLEX_TYPE:
2005 {
2006 tree zero = build_zero_cst (TREE_TYPE (type));
2007
2008 return build_complex (type, zero, zero);
2009 }
2010
2011 default:
2012 if (!AGGREGATE_TYPE_P (type))
2013 return fold_convert (type, integer_zero_node);
2014 return build_constructor (type, NULL);
2015 }
2016 }
2017
2018
2019 /* Build a BINFO with LEN language slots. */
2020
2021 tree
2022 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2023 {
2024 tree t;
2025 size_t length = (offsetof (struct tree_binfo, base_binfos)
2026 + vec<tree, va_gc>::embedded_size (base_binfos));
2027
2028 record_node_allocation_statistics (TREE_BINFO, length);
2029
2030 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2031
2032 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2033
2034 TREE_SET_CODE (t, TREE_BINFO);
2035
2036 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2037
2038 return t;
2039 }
2040
2041 /* Create a CASE_LABEL_EXPR tree node and return it. */
2042
2043 tree
2044 build_case_label (tree low_value, tree high_value, tree label_decl)
2045 {
2046 tree t = make_node (CASE_LABEL_EXPR);
2047
2048 TREE_TYPE (t) = void_type_node;
2049 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2050
2051 CASE_LOW (t) = low_value;
2052 CASE_HIGH (t) = high_value;
2053 CASE_LABEL (t) = label_decl;
2054 CASE_CHAIN (t) = NULL_TREE;
2055
2056 return t;
2057 }
2058
2059 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2060 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2061 The latter determines the length of the HOST_WIDE_INT vector. */
2062
2063 tree
2064 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2065 {
2066 tree t;
2067 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2068 + sizeof (struct tree_int_cst));
2069
2070 gcc_assert (len);
2071 record_node_allocation_statistics (INTEGER_CST, length);
2072
2073 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2074
2075 TREE_SET_CODE (t, INTEGER_CST);
2076 TREE_INT_CST_NUNITS (t) = len;
2077 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2078 /* to_offset can only be applied to trees that are offset_int-sized
2079 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2080 must be exactly the precision of offset_int and so LEN is correct. */
2081 if (ext_len <= OFFSET_INT_ELTS)
2082 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2083 else
2084 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2085
2086 TREE_CONSTANT (t) = 1;
2087
2088 return t;
2089 }
2090
2091 /* Build a newly constructed TREE_VEC node of length LEN. */
2092
2093 tree
2094 make_tree_vec_stat (int len MEM_STAT_DECL)
2095 {
2096 tree t;
2097 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2098
2099 record_node_allocation_statistics (TREE_VEC, length);
2100
2101 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2102
2103 TREE_SET_CODE (t, TREE_VEC);
2104 TREE_VEC_LENGTH (t) = len;
2105
2106 return t;
2107 }
2108
2109 /* Grow a TREE_VEC node to new length LEN. */
2110
2111 tree
2112 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2113 {
2114 gcc_assert (TREE_CODE (v) == TREE_VEC);
2115
2116 int oldlen = TREE_VEC_LENGTH (v);
2117 gcc_assert (len > oldlen);
2118
2119 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2120 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2121
2122 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2123
2124 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2125
2126 TREE_VEC_LENGTH (v) = len;
2127
2128 return v;
2129 }
2130 \f
2131 /* Return 1 if EXPR is the integer constant zero or a complex constant
2132 of zero. */
2133
2134 int
2135 integer_zerop (const_tree expr)
2136 {
2137 STRIP_NOPS (expr);
2138
2139 switch (TREE_CODE (expr))
2140 {
2141 case INTEGER_CST:
2142 return wi::eq_p (expr, 0);
2143 case COMPLEX_CST:
2144 return (integer_zerop (TREE_REALPART (expr))
2145 && integer_zerop (TREE_IMAGPART (expr)));
2146 case VECTOR_CST:
2147 {
2148 unsigned i;
2149 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2150 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2151 return false;
2152 return true;
2153 }
2154 default:
2155 return false;
2156 }
2157 }
2158
2159 /* Return 1 if EXPR is the integer constant one or the corresponding
2160 complex constant. */
2161
2162 int
2163 integer_onep (const_tree expr)
2164 {
2165 STRIP_NOPS (expr);
2166
2167 switch (TREE_CODE (expr))
2168 {
2169 case INTEGER_CST:
2170 return wi::eq_p (wi::to_widest (expr), 1);
2171 case COMPLEX_CST:
2172 return (integer_onep (TREE_REALPART (expr))
2173 && integer_zerop (TREE_IMAGPART (expr)));
2174 case VECTOR_CST:
2175 {
2176 unsigned i;
2177 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2178 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2179 return false;
2180 return true;
2181 }
2182 default:
2183 return false;
2184 }
2185 }
2186
2187 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2188 return 1 if every piece is the integer constant one. */
2189
2190 int
2191 integer_each_onep (const_tree expr)
2192 {
2193 STRIP_NOPS (expr);
2194
2195 if (TREE_CODE (expr) == COMPLEX_CST)
2196 return (integer_onep (TREE_REALPART (expr))
2197 && integer_onep (TREE_IMAGPART (expr)));
2198 else
2199 return integer_onep (expr);
2200 }
2201
2202 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2203 it contains, or a complex or vector whose subparts are such integers. */
2204
2205 int
2206 integer_all_onesp (const_tree expr)
2207 {
2208 STRIP_NOPS (expr);
2209
2210 if (TREE_CODE (expr) == COMPLEX_CST
2211 && integer_all_onesp (TREE_REALPART (expr))
2212 && integer_all_onesp (TREE_IMAGPART (expr)))
2213 return 1;
2214
2215 else if (TREE_CODE (expr) == VECTOR_CST)
2216 {
2217 unsigned i;
2218 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2219 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2220 return 0;
2221 return 1;
2222 }
2223
2224 else if (TREE_CODE (expr) != INTEGER_CST)
2225 return 0;
2226
2227 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2228 }
2229
2230 /* Return 1 if EXPR is the integer constant minus one. */
2231
2232 int
2233 integer_minus_onep (const_tree expr)
2234 {
2235 STRIP_NOPS (expr);
2236
2237 if (TREE_CODE (expr) == COMPLEX_CST)
2238 return (integer_all_onesp (TREE_REALPART (expr))
2239 && integer_zerop (TREE_IMAGPART (expr)));
2240 else
2241 return integer_all_onesp (expr);
2242 }
2243
2244 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2245 one bit on). */
2246
2247 int
2248 integer_pow2p (const_tree expr)
2249 {
2250 STRIP_NOPS (expr);
2251
2252 if (TREE_CODE (expr) == COMPLEX_CST
2253 && integer_pow2p (TREE_REALPART (expr))
2254 && integer_zerop (TREE_IMAGPART (expr)))
2255 return 1;
2256
2257 if (TREE_CODE (expr) != INTEGER_CST)
2258 return 0;
2259
2260 return wi::popcount (expr) == 1;
2261 }
2262
2263 /* Return 1 if EXPR is an integer constant other than zero or a
2264 complex constant other than zero. */
2265
2266 int
2267 integer_nonzerop (const_tree expr)
2268 {
2269 STRIP_NOPS (expr);
2270
2271 return ((TREE_CODE (expr) == INTEGER_CST
2272 && !wi::eq_p (expr, 0))
2273 || (TREE_CODE (expr) == COMPLEX_CST
2274 && (integer_nonzerop (TREE_REALPART (expr))
2275 || integer_nonzerop (TREE_IMAGPART (expr)))));
2276 }
2277
2278 /* Return 1 if EXPR is the integer constant one. For vector,
2279 return 1 if every piece is the integer constant minus one
2280 (representing the value TRUE). */
2281
2282 int
2283 integer_truep (const_tree expr)
2284 {
2285 STRIP_NOPS (expr);
2286
2287 if (TREE_CODE (expr) == VECTOR_CST)
2288 return integer_all_onesp (expr);
2289 return integer_onep (expr);
2290 }
2291
2292 /* Return 1 if EXPR is the fixed-point constant zero. */
2293
2294 int
2295 fixed_zerop (const_tree expr)
2296 {
2297 return (TREE_CODE (expr) == FIXED_CST
2298 && TREE_FIXED_CST (expr).data.is_zero ());
2299 }
2300
2301 /* Return the power of two represented by a tree node known to be a
2302 power of two. */
2303
2304 int
2305 tree_log2 (const_tree expr)
2306 {
2307 STRIP_NOPS (expr);
2308
2309 if (TREE_CODE (expr) == COMPLEX_CST)
2310 return tree_log2 (TREE_REALPART (expr));
2311
2312 return wi::exact_log2 (expr);
2313 }
2314
2315 /* Similar, but return the largest integer Y such that 2 ** Y is less
2316 than or equal to EXPR. */
2317
2318 int
2319 tree_floor_log2 (const_tree expr)
2320 {
2321 STRIP_NOPS (expr);
2322
2323 if (TREE_CODE (expr) == COMPLEX_CST)
2324 return tree_log2 (TREE_REALPART (expr));
2325
2326 return wi::floor_log2 (expr);
2327 }
2328
2329 /* Return number of known trailing zero bits in EXPR, or, if the value of
2330 EXPR is known to be zero, the precision of it's type. */
2331
2332 unsigned int
2333 tree_ctz (const_tree expr)
2334 {
2335 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2336 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2337 return 0;
2338
2339 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2340 switch (TREE_CODE (expr))
2341 {
2342 case INTEGER_CST:
2343 ret1 = wi::ctz (expr);
2344 return MIN (ret1, prec);
2345 case SSA_NAME:
2346 ret1 = wi::ctz (get_nonzero_bits (expr));
2347 return MIN (ret1, prec);
2348 case PLUS_EXPR:
2349 case MINUS_EXPR:
2350 case BIT_IOR_EXPR:
2351 case BIT_XOR_EXPR:
2352 case MIN_EXPR:
2353 case MAX_EXPR:
2354 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2355 if (ret1 == 0)
2356 return ret1;
2357 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2358 return MIN (ret1, ret2);
2359 case POINTER_PLUS_EXPR:
2360 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2361 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2362 /* Second operand is sizetype, which could be in theory
2363 wider than pointer's precision. Make sure we never
2364 return more than prec. */
2365 ret2 = MIN (ret2, prec);
2366 return MIN (ret1, ret2);
2367 case BIT_AND_EXPR:
2368 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2369 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2370 return MAX (ret1, ret2);
2371 case MULT_EXPR:
2372 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2373 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2374 return MIN (ret1 + ret2, prec);
2375 case LSHIFT_EXPR:
2376 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2377 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2378 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2379 {
2380 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2381 return MIN (ret1 + ret2, prec);
2382 }
2383 return ret1;
2384 case RSHIFT_EXPR:
2385 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2386 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2387 {
2388 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2389 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2390 if (ret1 > ret2)
2391 return ret1 - ret2;
2392 }
2393 return 0;
2394 case TRUNC_DIV_EXPR:
2395 case CEIL_DIV_EXPR:
2396 case FLOOR_DIV_EXPR:
2397 case ROUND_DIV_EXPR:
2398 case EXACT_DIV_EXPR:
2399 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2400 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2401 {
2402 int l = tree_log2 (TREE_OPERAND (expr, 1));
2403 if (l >= 0)
2404 {
2405 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2406 ret2 = l;
2407 if (ret1 > ret2)
2408 return ret1 - ret2;
2409 }
2410 }
2411 return 0;
2412 CASE_CONVERT:
2413 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2414 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2415 ret1 = prec;
2416 return MIN (ret1, prec);
2417 case SAVE_EXPR:
2418 return tree_ctz (TREE_OPERAND (expr, 0));
2419 case COND_EXPR:
2420 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2421 if (ret1 == 0)
2422 return 0;
2423 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2424 return MIN (ret1, ret2);
2425 case COMPOUND_EXPR:
2426 return tree_ctz (TREE_OPERAND (expr, 1));
2427 case ADDR_EXPR:
2428 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2429 if (ret1 > BITS_PER_UNIT)
2430 {
2431 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2432 return MIN (ret1, prec);
2433 }
2434 return 0;
2435 default:
2436 return 0;
2437 }
2438 }
2439
2440 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2441 decimal float constants, so don't return 1 for them. */
2442
2443 int
2444 real_zerop (const_tree expr)
2445 {
2446 STRIP_NOPS (expr);
2447
2448 switch (TREE_CODE (expr))
2449 {
2450 case REAL_CST:
2451 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2452 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2453 case COMPLEX_CST:
2454 return real_zerop (TREE_REALPART (expr))
2455 && real_zerop (TREE_IMAGPART (expr));
2456 case VECTOR_CST:
2457 {
2458 unsigned i;
2459 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2460 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2461 return false;
2462 return true;
2463 }
2464 default:
2465 return false;
2466 }
2467 }
2468
2469 /* Return 1 if EXPR is the real constant one in real or complex form.
2470 Trailing zeroes matter for decimal float constants, so don't return
2471 1 for them. */
2472
2473 int
2474 real_onep (const_tree expr)
2475 {
2476 STRIP_NOPS (expr);
2477
2478 switch (TREE_CODE (expr))
2479 {
2480 case REAL_CST:
2481 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2482 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2483 case COMPLEX_CST:
2484 return real_onep (TREE_REALPART (expr))
2485 && real_zerop (TREE_IMAGPART (expr));
2486 case VECTOR_CST:
2487 {
2488 unsigned i;
2489 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2490 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2491 return false;
2492 return true;
2493 }
2494 default:
2495 return false;
2496 }
2497 }
2498
2499 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2500 matter for decimal float constants, so don't return 1 for them. */
2501
2502 int
2503 real_minus_onep (const_tree expr)
2504 {
2505 STRIP_NOPS (expr);
2506
2507 switch (TREE_CODE (expr))
2508 {
2509 case REAL_CST:
2510 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2511 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2512 case COMPLEX_CST:
2513 return real_minus_onep (TREE_REALPART (expr))
2514 && real_zerop (TREE_IMAGPART (expr));
2515 case VECTOR_CST:
2516 {
2517 unsigned i;
2518 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2519 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2520 return false;
2521 return true;
2522 }
2523 default:
2524 return false;
2525 }
2526 }
2527
2528 /* Nonzero if EXP is a constant or a cast of a constant. */
2529
2530 int
2531 really_constant_p (const_tree exp)
2532 {
2533 /* This is not quite the same as STRIP_NOPS. It does more. */
2534 while (CONVERT_EXPR_P (exp)
2535 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2536 exp = TREE_OPERAND (exp, 0);
2537 return TREE_CONSTANT (exp);
2538 }
2539 \f
2540 /* Return first list element whose TREE_VALUE is ELEM.
2541 Return 0 if ELEM is not in LIST. */
2542
2543 tree
2544 value_member (tree elem, tree list)
2545 {
2546 while (list)
2547 {
2548 if (elem == TREE_VALUE (list))
2549 return list;
2550 list = TREE_CHAIN (list);
2551 }
2552 return NULL_TREE;
2553 }
2554
2555 /* Return first list element whose TREE_PURPOSE is ELEM.
2556 Return 0 if ELEM is not in LIST. */
2557
2558 tree
2559 purpose_member (const_tree elem, tree list)
2560 {
2561 while (list)
2562 {
2563 if (elem == TREE_PURPOSE (list))
2564 return list;
2565 list = TREE_CHAIN (list);
2566 }
2567 return NULL_TREE;
2568 }
2569
2570 /* Return true if ELEM is in V. */
2571
2572 bool
2573 vec_member (const_tree elem, vec<tree, va_gc> *v)
2574 {
2575 unsigned ix;
2576 tree t;
2577 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2578 if (elem == t)
2579 return true;
2580 return false;
2581 }
2582
2583 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2584 NULL_TREE. */
2585
2586 tree
2587 chain_index (int idx, tree chain)
2588 {
2589 for (; chain && idx > 0; --idx)
2590 chain = TREE_CHAIN (chain);
2591 return chain;
2592 }
2593
2594 /* Return nonzero if ELEM is part of the chain CHAIN. */
2595
2596 int
2597 chain_member (const_tree elem, const_tree chain)
2598 {
2599 while (chain)
2600 {
2601 if (elem == chain)
2602 return 1;
2603 chain = DECL_CHAIN (chain);
2604 }
2605
2606 return 0;
2607 }
2608
2609 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2610 We expect a null pointer to mark the end of the chain.
2611 This is the Lisp primitive `length'. */
2612
2613 int
2614 list_length (const_tree t)
2615 {
2616 const_tree p = t;
2617 #ifdef ENABLE_TREE_CHECKING
2618 const_tree q = t;
2619 #endif
2620 int len = 0;
2621
2622 while (p)
2623 {
2624 p = TREE_CHAIN (p);
2625 #ifdef ENABLE_TREE_CHECKING
2626 if (len % 2)
2627 q = TREE_CHAIN (q);
2628 gcc_assert (p != q);
2629 #endif
2630 len++;
2631 }
2632
2633 return len;
2634 }
2635
2636 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2637 UNION_TYPE TYPE, or NULL_TREE if none. */
2638
2639 tree
2640 first_field (const_tree type)
2641 {
2642 tree t = TYPE_FIELDS (type);
2643 while (t && TREE_CODE (t) != FIELD_DECL)
2644 t = TREE_CHAIN (t);
2645 return t;
2646 }
2647
2648 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2649 by modifying the last node in chain 1 to point to chain 2.
2650 This is the Lisp primitive `nconc'. */
2651
2652 tree
2653 chainon (tree op1, tree op2)
2654 {
2655 tree t1;
2656
2657 if (!op1)
2658 return op2;
2659 if (!op2)
2660 return op1;
2661
2662 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2663 continue;
2664 TREE_CHAIN (t1) = op2;
2665
2666 #ifdef ENABLE_TREE_CHECKING
2667 {
2668 tree t2;
2669 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2670 gcc_assert (t2 != t1);
2671 }
2672 #endif
2673
2674 return op1;
2675 }
2676
2677 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2678
2679 tree
2680 tree_last (tree chain)
2681 {
2682 tree next;
2683 if (chain)
2684 while ((next = TREE_CHAIN (chain)))
2685 chain = next;
2686 return chain;
2687 }
2688
2689 /* Reverse the order of elements in the chain T,
2690 and return the new head of the chain (old last element). */
2691
2692 tree
2693 nreverse (tree t)
2694 {
2695 tree prev = 0, decl, next;
2696 for (decl = t; decl; decl = next)
2697 {
2698 /* We shouldn't be using this function to reverse BLOCK chains; we
2699 have blocks_nreverse for that. */
2700 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2701 next = TREE_CHAIN (decl);
2702 TREE_CHAIN (decl) = prev;
2703 prev = decl;
2704 }
2705 return prev;
2706 }
2707 \f
2708 /* Return a newly created TREE_LIST node whose
2709 purpose and value fields are PARM and VALUE. */
2710
2711 tree
2712 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2713 {
2714 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2715 TREE_PURPOSE (t) = parm;
2716 TREE_VALUE (t) = value;
2717 return t;
2718 }
2719
2720 /* Build a chain of TREE_LIST nodes from a vector. */
2721
2722 tree
2723 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2724 {
2725 tree ret = NULL_TREE;
2726 tree *pp = &ret;
2727 unsigned int i;
2728 tree t;
2729 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2730 {
2731 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2732 pp = &TREE_CHAIN (*pp);
2733 }
2734 return ret;
2735 }
2736
2737 /* Return a newly created TREE_LIST node whose
2738 purpose and value fields are PURPOSE and VALUE
2739 and whose TREE_CHAIN is CHAIN. */
2740
2741 tree
2742 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2743 {
2744 tree node;
2745
2746 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2747 memset (node, 0, sizeof (struct tree_common));
2748
2749 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2750
2751 TREE_SET_CODE (node, TREE_LIST);
2752 TREE_CHAIN (node) = chain;
2753 TREE_PURPOSE (node) = purpose;
2754 TREE_VALUE (node) = value;
2755 return node;
2756 }
2757
2758 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2759 trees. */
2760
2761 vec<tree, va_gc> *
2762 ctor_to_vec (tree ctor)
2763 {
2764 vec<tree, va_gc> *vec;
2765 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2766 unsigned int ix;
2767 tree val;
2768
2769 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2770 vec->quick_push (val);
2771
2772 return vec;
2773 }
2774 \f
2775 /* Return the size nominally occupied by an object of type TYPE
2776 when it resides in memory. The value is measured in units of bytes,
2777 and its data type is that normally used for type sizes
2778 (which is the first type created by make_signed_type or
2779 make_unsigned_type). */
2780
2781 tree
2782 size_in_bytes (const_tree type)
2783 {
2784 tree t;
2785
2786 if (type == error_mark_node)
2787 return integer_zero_node;
2788
2789 type = TYPE_MAIN_VARIANT (type);
2790 t = TYPE_SIZE_UNIT (type);
2791
2792 if (t == 0)
2793 {
2794 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2795 return size_zero_node;
2796 }
2797
2798 return t;
2799 }
2800
2801 /* Return the size of TYPE (in bytes) as a wide integer
2802 or return -1 if the size can vary or is larger than an integer. */
2803
2804 HOST_WIDE_INT
2805 int_size_in_bytes (const_tree type)
2806 {
2807 tree t;
2808
2809 if (type == error_mark_node)
2810 return 0;
2811
2812 type = TYPE_MAIN_VARIANT (type);
2813 t = TYPE_SIZE_UNIT (type);
2814
2815 if (t && tree_fits_uhwi_p (t))
2816 return TREE_INT_CST_LOW (t);
2817 else
2818 return -1;
2819 }
2820
2821 /* Return the maximum size of TYPE (in bytes) as a wide integer
2822 or return -1 if the size can vary or is larger than an integer. */
2823
2824 HOST_WIDE_INT
2825 max_int_size_in_bytes (const_tree type)
2826 {
2827 HOST_WIDE_INT size = -1;
2828 tree size_tree;
2829
2830 /* If this is an array type, check for a possible MAX_SIZE attached. */
2831
2832 if (TREE_CODE (type) == ARRAY_TYPE)
2833 {
2834 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2835
2836 if (size_tree && tree_fits_uhwi_p (size_tree))
2837 size = tree_to_uhwi (size_tree);
2838 }
2839
2840 /* If we still haven't been able to get a size, see if the language
2841 can compute a maximum size. */
2842
2843 if (size == -1)
2844 {
2845 size_tree = lang_hooks.types.max_size (type);
2846
2847 if (size_tree && tree_fits_uhwi_p (size_tree))
2848 size = tree_to_uhwi (size_tree);
2849 }
2850
2851 return size;
2852 }
2853 \f
2854 /* Return the bit position of FIELD, in bits from the start of the record.
2855 This is a tree of type bitsizetype. */
2856
2857 tree
2858 bit_position (const_tree field)
2859 {
2860 return bit_from_pos (DECL_FIELD_OFFSET (field),
2861 DECL_FIELD_BIT_OFFSET (field));
2862 }
2863 \f
2864 /* Return the byte position of FIELD, in bytes from the start of the record.
2865 This is a tree of type sizetype. */
2866
2867 tree
2868 byte_position (const_tree field)
2869 {
2870 return byte_from_pos (DECL_FIELD_OFFSET (field),
2871 DECL_FIELD_BIT_OFFSET (field));
2872 }
2873
2874 /* Likewise, but return as an integer. It must be representable in
2875 that way (since it could be a signed value, we don't have the
2876 option of returning -1 like int_size_in_byte can. */
2877
2878 HOST_WIDE_INT
2879 int_byte_position (const_tree field)
2880 {
2881 return tree_to_shwi (byte_position (field));
2882 }
2883 \f
2884 /* Return the strictest alignment, in bits, that T is known to have. */
2885
2886 unsigned int
2887 expr_align (const_tree t)
2888 {
2889 unsigned int align0, align1;
2890
2891 switch (TREE_CODE (t))
2892 {
2893 CASE_CONVERT: case NON_LVALUE_EXPR:
2894 /* If we have conversions, we know that the alignment of the
2895 object must meet each of the alignments of the types. */
2896 align0 = expr_align (TREE_OPERAND (t, 0));
2897 align1 = TYPE_ALIGN (TREE_TYPE (t));
2898 return MAX (align0, align1);
2899
2900 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2901 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2902 case CLEANUP_POINT_EXPR:
2903 /* These don't change the alignment of an object. */
2904 return expr_align (TREE_OPERAND (t, 0));
2905
2906 case COND_EXPR:
2907 /* The best we can do is say that the alignment is the least aligned
2908 of the two arms. */
2909 align0 = expr_align (TREE_OPERAND (t, 1));
2910 align1 = expr_align (TREE_OPERAND (t, 2));
2911 return MIN (align0, align1);
2912
2913 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2914 meaningfully, it's always 1. */
2915 case LABEL_DECL: case CONST_DECL:
2916 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2917 case FUNCTION_DECL:
2918 gcc_assert (DECL_ALIGN (t) != 0);
2919 return DECL_ALIGN (t);
2920
2921 default:
2922 break;
2923 }
2924
2925 /* Otherwise take the alignment from that of the type. */
2926 return TYPE_ALIGN (TREE_TYPE (t));
2927 }
2928 \f
2929 /* Return, as a tree node, the number of elements for TYPE (which is an
2930 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2931
2932 tree
2933 array_type_nelts (const_tree type)
2934 {
2935 tree index_type, min, max;
2936
2937 /* If they did it with unspecified bounds, then we should have already
2938 given an error about it before we got here. */
2939 if (! TYPE_DOMAIN (type))
2940 return error_mark_node;
2941
2942 index_type = TYPE_DOMAIN (type);
2943 min = TYPE_MIN_VALUE (index_type);
2944 max = TYPE_MAX_VALUE (index_type);
2945
2946 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2947 if (!max)
2948 return error_mark_node;
2949
2950 return (integer_zerop (min)
2951 ? max
2952 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2953 }
2954 \f
2955 /* If arg is static -- a reference to an object in static storage -- then
2956 return the object. This is not the same as the C meaning of `static'.
2957 If arg isn't static, return NULL. */
2958
2959 tree
2960 staticp (tree arg)
2961 {
2962 switch (TREE_CODE (arg))
2963 {
2964 case FUNCTION_DECL:
2965 /* Nested functions are static, even though taking their address will
2966 involve a trampoline as we unnest the nested function and create
2967 the trampoline on the tree level. */
2968 return arg;
2969
2970 case VAR_DECL:
2971 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2972 && ! DECL_THREAD_LOCAL_P (arg)
2973 && ! DECL_DLLIMPORT_P (arg)
2974 ? arg : NULL);
2975
2976 case CONST_DECL:
2977 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2978 ? arg : NULL);
2979
2980 case CONSTRUCTOR:
2981 return TREE_STATIC (arg) ? arg : NULL;
2982
2983 case LABEL_DECL:
2984 case STRING_CST:
2985 return arg;
2986
2987 case COMPONENT_REF:
2988 /* If the thing being referenced is not a field, then it is
2989 something language specific. */
2990 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2991
2992 /* If we are referencing a bitfield, we can't evaluate an
2993 ADDR_EXPR at compile time and so it isn't a constant. */
2994 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2995 return NULL;
2996
2997 return staticp (TREE_OPERAND (arg, 0));
2998
2999 case BIT_FIELD_REF:
3000 return NULL;
3001
3002 case INDIRECT_REF:
3003 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3004
3005 case ARRAY_REF:
3006 case ARRAY_RANGE_REF:
3007 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3008 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3009 return staticp (TREE_OPERAND (arg, 0));
3010 else
3011 return NULL;
3012
3013 case COMPOUND_LITERAL_EXPR:
3014 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3015
3016 default:
3017 return NULL;
3018 }
3019 }
3020
3021 \f
3022
3023
3024 /* Return whether OP is a DECL whose address is function-invariant. */
3025
3026 bool
3027 decl_address_invariant_p (const_tree op)
3028 {
3029 /* The conditions below are slightly less strict than the one in
3030 staticp. */
3031
3032 switch (TREE_CODE (op))
3033 {
3034 case PARM_DECL:
3035 case RESULT_DECL:
3036 case LABEL_DECL:
3037 case FUNCTION_DECL:
3038 return true;
3039
3040 case VAR_DECL:
3041 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3042 || DECL_THREAD_LOCAL_P (op)
3043 || DECL_CONTEXT (op) == current_function_decl
3044 || decl_function_context (op) == current_function_decl)
3045 return true;
3046 break;
3047
3048 case CONST_DECL:
3049 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3050 || decl_function_context (op) == current_function_decl)
3051 return true;
3052 break;
3053
3054 default:
3055 break;
3056 }
3057
3058 return false;
3059 }
3060
3061 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3062
3063 bool
3064 decl_address_ip_invariant_p (const_tree op)
3065 {
3066 /* The conditions below are slightly less strict than the one in
3067 staticp. */
3068
3069 switch (TREE_CODE (op))
3070 {
3071 case LABEL_DECL:
3072 case FUNCTION_DECL:
3073 case STRING_CST:
3074 return true;
3075
3076 case VAR_DECL:
3077 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3078 && !DECL_DLLIMPORT_P (op))
3079 || DECL_THREAD_LOCAL_P (op))
3080 return true;
3081 break;
3082
3083 case CONST_DECL:
3084 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3085 return true;
3086 break;
3087
3088 default:
3089 break;
3090 }
3091
3092 return false;
3093 }
3094
3095
3096 /* Return true if T is function-invariant (internal function, does
3097 not handle arithmetic; that's handled in skip_simple_arithmetic and
3098 tree_invariant_p). */
3099
3100 static bool tree_invariant_p (tree t);
3101
3102 static bool
3103 tree_invariant_p_1 (tree t)
3104 {
3105 tree op;
3106
3107 if (TREE_CONSTANT (t)
3108 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3109 return true;
3110
3111 switch (TREE_CODE (t))
3112 {
3113 case SAVE_EXPR:
3114 return true;
3115
3116 case ADDR_EXPR:
3117 op = TREE_OPERAND (t, 0);
3118 while (handled_component_p (op))
3119 {
3120 switch (TREE_CODE (op))
3121 {
3122 case ARRAY_REF:
3123 case ARRAY_RANGE_REF:
3124 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3125 || TREE_OPERAND (op, 2) != NULL_TREE
3126 || TREE_OPERAND (op, 3) != NULL_TREE)
3127 return false;
3128 break;
3129
3130 case COMPONENT_REF:
3131 if (TREE_OPERAND (op, 2) != NULL_TREE)
3132 return false;
3133 break;
3134
3135 default:;
3136 }
3137 op = TREE_OPERAND (op, 0);
3138 }
3139
3140 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3141
3142 default:
3143 break;
3144 }
3145
3146 return false;
3147 }
3148
3149 /* Return true if T is function-invariant. */
3150
3151 static bool
3152 tree_invariant_p (tree t)
3153 {
3154 tree inner = skip_simple_arithmetic (t);
3155 return tree_invariant_p_1 (inner);
3156 }
3157
3158 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3159 Do this to any expression which may be used in more than one place,
3160 but must be evaluated only once.
3161
3162 Normally, expand_expr would reevaluate the expression each time.
3163 Calling save_expr produces something that is evaluated and recorded
3164 the first time expand_expr is called on it. Subsequent calls to
3165 expand_expr just reuse the recorded value.
3166
3167 The call to expand_expr that generates code that actually computes
3168 the value is the first call *at compile time*. Subsequent calls
3169 *at compile time* generate code to use the saved value.
3170 This produces correct result provided that *at run time* control
3171 always flows through the insns made by the first expand_expr
3172 before reaching the other places where the save_expr was evaluated.
3173 You, the caller of save_expr, must make sure this is so.
3174
3175 Constants, and certain read-only nodes, are returned with no
3176 SAVE_EXPR because that is safe. Expressions containing placeholders
3177 are not touched; see tree.def for an explanation of what these
3178 are used for. */
3179
3180 tree
3181 save_expr (tree expr)
3182 {
3183 tree t = fold (expr);
3184 tree inner;
3185
3186 /* If the tree evaluates to a constant, then we don't want to hide that
3187 fact (i.e. this allows further folding, and direct checks for constants).
3188 However, a read-only object that has side effects cannot be bypassed.
3189 Since it is no problem to reevaluate literals, we just return the
3190 literal node. */
3191 inner = skip_simple_arithmetic (t);
3192 if (TREE_CODE (inner) == ERROR_MARK)
3193 return inner;
3194
3195 if (tree_invariant_p_1 (inner))
3196 return t;
3197
3198 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3199 it means that the size or offset of some field of an object depends on
3200 the value within another field.
3201
3202 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3203 and some variable since it would then need to be both evaluated once and
3204 evaluated more than once. Front-ends must assure this case cannot
3205 happen by surrounding any such subexpressions in their own SAVE_EXPR
3206 and forcing evaluation at the proper time. */
3207 if (contains_placeholder_p (inner))
3208 return t;
3209
3210 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3211 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3212
3213 /* This expression might be placed ahead of a jump to ensure that the
3214 value was computed on both sides of the jump. So make sure it isn't
3215 eliminated as dead. */
3216 TREE_SIDE_EFFECTS (t) = 1;
3217 return t;
3218 }
3219
3220 /* Look inside EXPR into any simple arithmetic operations. Return the
3221 outermost non-arithmetic or non-invariant node. */
3222
3223 tree
3224 skip_simple_arithmetic (tree expr)
3225 {
3226 /* We don't care about whether this can be used as an lvalue in this
3227 context. */
3228 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3229 expr = TREE_OPERAND (expr, 0);
3230
3231 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3232 a constant, it will be more efficient to not make another SAVE_EXPR since
3233 it will allow better simplification and GCSE will be able to merge the
3234 computations if they actually occur. */
3235 while (true)
3236 {
3237 if (UNARY_CLASS_P (expr))
3238 expr = TREE_OPERAND (expr, 0);
3239 else if (BINARY_CLASS_P (expr))
3240 {
3241 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3242 expr = TREE_OPERAND (expr, 0);
3243 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3244 expr = TREE_OPERAND (expr, 1);
3245 else
3246 break;
3247 }
3248 else
3249 break;
3250 }
3251
3252 return expr;
3253 }
3254
3255 /* Look inside EXPR into simple arithmetic operations involving constants.
3256 Return the outermost non-arithmetic or non-constant node. */
3257
3258 tree
3259 skip_simple_constant_arithmetic (tree expr)
3260 {
3261 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3262 expr = TREE_OPERAND (expr, 0);
3263
3264 while (true)
3265 {
3266 if (UNARY_CLASS_P (expr))
3267 expr = TREE_OPERAND (expr, 0);
3268 else if (BINARY_CLASS_P (expr))
3269 {
3270 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3271 expr = TREE_OPERAND (expr, 0);
3272 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3273 expr = TREE_OPERAND (expr, 1);
3274 else
3275 break;
3276 }
3277 else
3278 break;
3279 }
3280
3281 return expr;
3282 }
3283
3284 /* Return which tree structure is used by T. */
3285
3286 enum tree_node_structure_enum
3287 tree_node_structure (const_tree t)
3288 {
3289 const enum tree_code code = TREE_CODE (t);
3290 return tree_node_structure_for_code (code);
3291 }
3292
3293 /* Set various status flags when building a CALL_EXPR object T. */
3294
3295 static void
3296 process_call_operands (tree t)
3297 {
3298 bool side_effects = TREE_SIDE_EFFECTS (t);
3299 bool read_only = false;
3300 int i = call_expr_flags (t);
3301
3302 /* Calls have side-effects, except those to const or pure functions. */
3303 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3304 side_effects = true;
3305 /* Propagate TREE_READONLY of arguments for const functions. */
3306 if (i & ECF_CONST)
3307 read_only = true;
3308
3309 if (!side_effects || read_only)
3310 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3311 {
3312 tree op = TREE_OPERAND (t, i);
3313 if (op && TREE_SIDE_EFFECTS (op))
3314 side_effects = true;
3315 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3316 read_only = false;
3317 }
3318
3319 TREE_SIDE_EFFECTS (t) = side_effects;
3320 TREE_READONLY (t) = read_only;
3321 }
3322 \f
3323 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3324 size or offset that depends on a field within a record. */
3325
3326 bool
3327 contains_placeholder_p (const_tree exp)
3328 {
3329 enum tree_code code;
3330
3331 if (!exp)
3332 return 0;
3333
3334 code = TREE_CODE (exp);
3335 if (code == PLACEHOLDER_EXPR)
3336 return 1;
3337
3338 switch (TREE_CODE_CLASS (code))
3339 {
3340 case tcc_reference:
3341 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3342 position computations since they will be converted into a
3343 WITH_RECORD_EXPR involving the reference, which will assume
3344 here will be valid. */
3345 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3346
3347 case tcc_exceptional:
3348 if (code == TREE_LIST)
3349 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3350 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3351 break;
3352
3353 case tcc_unary:
3354 case tcc_binary:
3355 case tcc_comparison:
3356 case tcc_expression:
3357 switch (code)
3358 {
3359 case COMPOUND_EXPR:
3360 /* Ignoring the first operand isn't quite right, but works best. */
3361 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3362
3363 case COND_EXPR:
3364 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3365 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3366 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3367
3368 case SAVE_EXPR:
3369 /* The save_expr function never wraps anything containing
3370 a PLACEHOLDER_EXPR. */
3371 return 0;
3372
3373 default:
3374 break;
3375 }
3376
3377 switch (TREE_CODE_LENGTH (code))
3378 {
3379 case 1:
3380 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3381 case 2:
3382 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3383 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3384 default:
3385 return 0;
3386 }
3387
3388 case tcc_vl_exp:
3389 switch (code)
3390 {
3391 case CALL_EXPR:
3392 {
3393 const_tree arg;
3394 const_call_expr_arg_iterator iter;
3395 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3396 if (CONTAINS_PLACEHOLDER_P (arg))
3397 return 1;
3398 return 0;
3399 }
3400 default:
3401 return 0;
3402 }
3403
3404 default:
3405 return 0;
3406 }
3407 return 0;
3408 }
3409
3410 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3411 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3412 field positions. */
3413
3414 static bool
3415 type_contains_placeholder_1 (const_tree type)
3416 {
3417 /* If the size contains a placeholder or the parent type (component type in
3418 the case of arrays) type involves a placeholder, this type does. */
3419 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3420 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3421 || (!POINTER_TYPE_P (type)
3422 && TREE_TYPE (type)
3423 && type_contains_placeholder_p (TREE_TYPE (type))))
3424 return true;
3425
3426 /* Now do type-specific checks. Note that the last part of the check above
3427 greatly limits what we have to do below. */
3428 switch (TREE_CODE (type))
3429 {
3430 case VOID_TYPE:
3431 case POINTER_BOUNDS_TYPE:
3432 case COMPLEX_TYPE:
3433 case ENUMERAL_TYPE:
3434 case BOOLEAN_TYPE:
3435 case POINTER_TYPE:
3436 case OFFSET_TYPE:
3437 case REFERENCE_TYPE:
3438 case METHOD_TYPE:
3439 case FUNCTION_TYPE:
3440 case VECTOR_TYPE:
3441 case NULLPTR_TYPE:
3442 return false;
3443
3444 case INTEGER_TYPE:
3445 case REAL_TYPE:
3446 case FIXED_POINT_TYPE:
3447 /* Here we just check the bounds. */
3448 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3449 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3450
3451 case ARRAY_TYPE:
3452 /* We have already checked the component type above, so just check the
3453 domain type. */
3454 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3455
3456 case RECORD_TYPE:
3457 case UNION_TYPE:
3458 case QUAL_UNION_TYPE:
3459 {
3460 tree field;
3461
3462 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3463 if (TREE_CODE (field) == FIELD_DECL
3464 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3465 || (TREE_CODE (type) == QUAL_UNION_TYPE
3466 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3467 || type_contains_placeholder_p (TREE_TYPE (field))))
3468 return true;
3469
3470 return false;
3471 }
3472
3473 default:
3474 gcc_unreachable ();
3475 }
3476 }
3477
3478 /* Wrapper around above function used to cache its result. */
3479
3480 bool
3481 type_contains_placeholder_p (tree type)
3482 {
3483 bool result;
3484
3485 /* If the contains_placeholder_bits field has been initialized,
3486 then we know the answer. */
3487 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3488 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3489
3490 /* Indicate that we've seen this type node, and the answer is false.
3491 This is what we want to return if we run into recursion via fields. */
3492 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3493
3494 /* Compute the real value. */
3495 result = type_contains_placeholder_1 (type);
3496
3497 /* Store the real value. */
3498 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3499
3500 return result;
3501 }
3502 \f
3503 /* Push tree EXP onto vector QUEUE if it is not already present. */
3504
3505 static void
3506 push_without_duplicates (tree exp, vec<tree> *queue)
3507 {
3508 unsigned int i;
3509 tree iter;
3510
3511 FOR_EACH_VEC_ELT (*queue, i, iter)
3512 if (simple_cst_equal (iter, exp) == 1)
3513 break;
3514
3515 if (!iter)
3516 queue->safe_push (exp);
3517 }
3518
3519 /* Given a tree EXP, find all occurrences of references to fields
3520 in a PLACEHOLDER_EXPR and place them in vector REFS without
3521 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3522 we assume here that EXP contains only arithmetic expressions
3523 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3524 argument list. */
3525
3526 void
3527 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3528 {
3529 enum tree_code code = TREE_CODE (exp);
3530 tree inner;
3531 int i;
3532
3533 /* We handle TREE_LIST and COMPONENT_REF separately. */
3534 if (code == TREE_LIST)
3535 {
3536 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3537 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3538 }
3539 else if (code == COMPONENT_REF)
3540 {
3541 for (inner = TREE_OPERAND (exp, 0);
3542 REFERENCE_CLASS_P (inner);
3543 inner = TREE_OPERAND (inner, 0))
3544 ;
3545
3546 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3547 push_without_duplicates (exp, refs);
3548 else
3549 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3550 }
3551 else
3552 switch (TREE_CODE_CLASS (code))
3553 {
3554 case tcc_constant:
3555 break;
3556
3557 case tcc_declaration:
3558 /* Variables allocated to static storage can stay. */
3559 if (!TREE_STATIC (exp))
3560 push_without_duplicates (exp, refs);
3561 break;
3562
3563 case tcc_expression:
3564 /* This is the pattern built in ada/make_aligning_type. */
3565 if (code == ADDR_EXPR
3566 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3567 {
3568 push_without_duplicates (exp, refs);
3569 break;
3570 }
3571
3572 /* Fall through... */
3573
3574 case tcc_exceptional:
3575 case tcc_unary:
3576 case tcc_binary:
3577 case tcc_comparison:
3578 case tcc_reference:
3579 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3580 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3581 break;
3582
3583 case tcc_vl_exp:
3584 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3585 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3586 break;
3587
3588 default:
3589 gcc_unreachable ();
3590 }
3591 }
3592
3593 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3594 return a tree with all occurrences of references to F in a
3595 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3596 CONST_DECLs. Note that we assume here that EXP contains only
3597 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3598 occurring only in their argument list. */
3599
3600 tree
3601 substitute_in_expr (tree exp, tree f, tree r)
3602 {
3603 enum tree_code code = TREE_CODE (exp);
3604 tree op0, op1, op2, op3;
3605 tree new_tree;
3606
3607 /* We handle TREE_LIST and COMPONENT_REF separately. */
3608 if (code == TREE_LIST)
3609 {
3610 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3611 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3612 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3613 return exp;
3614
3615 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3616 }
3617 else if (code == COMPONENT_REF)
3618 {
3619 tree inner;
3620
3621 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3622 and it is the right field, replace it with R. */
3623 for (inner = TREE_OPERAND (exp, 0);
3624 REFERENCE_CLASS_P (inner);
3625 inner = TREE_OPERAND (inner, 0))
3626 ;
3627
3628 /* The field. */
3629 op1 = TREE_OPERAND (exp, 1);
3630
3631 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3632 return r;
3633
3634 /* If this expression hasn't been completed let, leave it alone. */
3635 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3636 return exp;
3637
3638 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3639 if (op0 == TREE_OPERAND (exp, 0))
3640 return exp;
3641
3642 new_tree
3643 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3644 }
3645 else
3646 switch (TREE_CODE_CLASS (code))
3647 {
3648 case tcc_constant:
3649 return exp;
3650
3651 case tcc_declaration:
3652 if (exp == f)
3653 return r;
3654 else
3655 return exp;
3656
3657 case tcc_expression:
3658 if (exp == f)
3659 return r;
3660
3661 /* Fall through... */
3662
3663 case tcc_exceptional:
3664 case tcc_unary:
3665 case tcc_binary:
3666 case tcc_comparison:
3667 case tcc_reference:
3668 switch (TREE_CODE_LENGTH (code))
3669 {
3670 case 0:
3671 return exp;
3672
3673 case 1:
3674 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3675 if (op0 == TREE_OPERAND (exp, 0))
3676 return exp;
3677
3678 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3679 break;
3680
3681 case 2:
3682 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3683 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3684
3685 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3686 return exp;
3687
3688 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3689 break;
3690
3691 case 3:
3692 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3693 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3694 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3695
3696 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3697 && op2 == TREE_OPERAND (exp, 2))
3698 return exp;
3699
3700 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3701 break;
3702
3703 case 4:
3704 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3705 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3706 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3707 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3708
3709 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3710 && op2 == TREE_OPERAND (exp, 2)
3711 && op3 == TREE_OPERAND (exp, 3))
3712 return exp;
3713
3714 new_tree
3715 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3716 break;
3717
3718 default:
3719 gcc_unreachable ();
3720 }
3721 break;
3722
3723 case tcc_vl_exp:
3724 {
3725 int i;
3726
3727 new_tree = NULL_TREE;
3728
3729 /* If we are trying to replace F with a constant, inline back
3730 functions which do nothing else than computing a value from
3731 the arguments they are passed. This makes it possible to
3732 fold partially or entirely the replacement expression. */
3733 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3734 {
3735 tree t = maybe_inline_call_in_expr (exp);
3736 if (t)
3737 return SUBSTITUTE_IN_EXPR (t, f, r);
3738 }
3739
3740 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3741 {
3742 tree op = TREE_OPERAND (exp, i);
3743 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3744 if (new_op != op)
3745 {
3746 if (!new_tree)
3747 new_tree = copy_node (exp);
3748 TREE_OPERAND (new_tree, i) = new_op;
3749 }
3750 }
3751
3752 if (new_tree)
3753 {
3754 new_tree = fold (new_tree);
3755 if (TREE_CODE (new_tree) == CALL_EXPR)
3756 process_call_operands (new_tree);
3757 }
3758 else
3759 return exp;
3760 }
3761 break;
3762
3763 default:
3764 gcc_unreachable ();
3765 }
3766
3767 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3768
3769 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3770 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3771
3772 return new_tree;
3773 }
3774
3775 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3776 for it within OBJ, a tree that is an object or a chain of references. */
3777
3778 tree
3779 substitute_placeholder_in_expr (tree exp, tree obj)
3780 {
3781 enum tree_code code = TREE_CODE (exp);
3782 tree op0, op1, op2, op3;
3783 tree new_tree;
3784
3785 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3786 in the chain of OBJ. */
3787 if (code == PLACEHOLDER_EXPR)
3788 {
3789 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3790 tree elt;
3791
3792 for (elt = obj; elt != 0;
3793 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3794 || TREE_CODE (elt) == COND_EXPR)
3795 ? TREE_OPERAND (elt, 1)
3796 : (REFERENCE_CLASS_P (elt)
3797 || UNARY_CLASS_P (elt)
3798 || BINARY_CLASS_P (elt)
3799 || VL_EXP_CLASS_P (elt)
3800 || EXPRESSION_CLASS_P (elt))
3801 ? TREE_OPERAND (elt, 0) : 0))
3802 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3803 return elt;
3804
3805 for (elt = obj; elt != 0;
3806 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3807 || TREE_CODE (elt) == COND_EXPR)
3808 ? TREE_OPERAND (elt, 1)
3809 : (REFERENCE_CLASS_P (elt)
3810 || UNARY_CLASS_P (elt)
3811 || BINARY_CLASS_P (elt)
3812 || VL_EXP_CLASS_P (elt)
3813 || EXPRESSION_CLASS_P (elt))
3814 ? TREE_OPERAND (elt, 0) : 0))
3815 if (POINTER_TYPE_P (TREE_TYPE (elt))
3816 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3817 == need_type))
3818 return fold_build1 (INDIRECT_REF, need_type, elt);
3819
3820 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3821 survives until RTL generation, there will be an error. */
3822 return exp;
3823 }
3824
3825 /* TREE_LIST is special because we need to look at TREE_VALUE
3826 and TREE_CHAIN, not TREE_OPERANDS. */
3827 else if (code == TREE_LIST)
3828 {
3829 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3830 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3831 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3832 return exp;
3833
3834 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3835 }
3836 else
3837 switch (TREE_CODE_CLASS (code))
3838 {
3839 case tcc_constant:
3840 case tcc_declaration:
3841 return exp;
3842
3843 case tcc_exceptional:
3844 case tcc_unary:
3845 case tcc_binary:
3846 case tcc_comparison:
3847 case tcc_expression:
3848 case tcc_reference:
3849 case tcc_statement:
3850 switch (TREE_CODE_LENGTH (code))
3851 {
3852 case 0:
3853 return exp;
3854
3855 case 1:
3856 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3857 if (op0 == TREE_OPERAND (exp, 0))
3858 return exp;
3859
3860 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3861 break;
3862
3863 case 2:
3864 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3865 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3866
3867 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3868 return exp;
3869
3870 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3871 break;
3872
3873 case 3:
3874 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3875 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3876 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3877
3878 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3879 && op2 == TREE_OPERAND (exp, 2))
3880 return exp;
3881
3882 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3883 break;
3884
3885 case 4:
3886 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3887 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3888 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3889 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3890
3891 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3892 && op2 == TREE_OPERAND (exp, 2)
3893 && op3 == TREE_OPERAND (exp, 3))
3894 return exp;
3895
3896 new_tree
3897 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3898 break;
3899
3900 default:
3901 gcc_unreachable ();
3902 }
3903 break;
3904
3905 case tcc_vl_exp:
3906 {
3907 int i;
3908
3909 new_tree = NULL_TREE;
3910
3911 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3912 {
3913 tree op = TREE_OPERAND (exp, i);
3914 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3915 if (new_op != op)
3916 {
3917 if (!new_tree)
3918 new_tree = copy_node (exp);
3919 TREE_OPERAND (new_tree, i) = new_op;
3920 }
3921 }
3922
3923 if (new_tree)
3924 {
3925 new_tree = fold (new_tree);
3926 if (TREE_CODE (new_tree) == CALL_EXPR)
3927 process_call_operands (new_tree);
3928 }
3929 else
3930 return exp;
3931 }
3932 break;
3933
3934 default:
3935 gcc_unreachable ();
3936 }
3937
3938 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3939
3940 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3941 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3942
3943 return new_tree;
3944 }
3945 \f
3946
3947 /* Subroutine of stabilize_reference; this is called for subtrees of
3948 references. Any expression with side-effects must be put in a SAVE_EXPR
3949 to ensure that it is only evaluated once.
3950
3951 We don't put SAVE_EXPR nodes around everything, because assigning very
3952 simple expressions to temporaries causes us to miss good opportunities
3953 for optimizations. Among other things, the opportunity to fold in the
3954 addition of a constant into an addressing mode often gets lost, e.g.
3955 "y[i+1] += x;". In general, we take the approach that we should not make
3956 an assignment unless we are forced into it - i.e., that any non-side effect
3957 operator should be allowed, and that cse should take care of coalescing
3958 multiple utterances of the same expression should that prove fruitful. */
3959
3960 static tree
3961 stabilize_reference_1 (tree e)
3962 {
3963 tree result;
3964 enum tree_code code = TREE_CODE (e);
3965
3966 /* We cannot ignore const expressions because it might be a reference
3967 to a const array but whose index contains side-effects. But we can
3968 ignore things that are actual constant or that already have been
3969 handled by this function. */
3970
3971 if (tree_invariant_p (e))
3972 return e;
3973
3974 switch (TREE_CODE_CLASS (code))
3975 {
3976 case tcc_exceptional:
3977 case tcc_type:
3978 case tcc_declaration:
3979 case tcc_comparison:
3980 case tcc_statement:
3981 case tcc_expression:
3982 case tcc_reference:
3983 case tcc_vl_exp:
3984 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3985 so that it will only be evaluated once. */
3986 /* The reference (r) and comparison (<) classes could be handled as
3987 below, but it is generally faster to only evaluate them once. */
3988 if (TREE_SIDE_EFFECTS (e))
3989 return save_expr (e);
3990 return e;
3991
3992 case tcc_constant:
3993 /* Constants need no processing. In fact, we should never reach
3994 here. */
3995 return e;
3996
3997 case tcc_binary:
3998 /* Division is slow and tends to be compiled with jumps,
3999 especially the division by powers of 2 that is often
4000 found inside of an array reference. So do it just once. */
4001 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4002 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4003 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4004 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4005 return save_expr (e);
4006 /* Recursively stabilize each operand. */
4007 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4008 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4009 break;
4010
4011 case tcc_unary:
4012 /* Recursively stabilize each operand. */
4013 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4014 break;
4015
4016 default:
4017 gcc_unreachable ();
4018 }
4019
4020 TREE_TYPE (result) = TREE_TYPE (e);
4021 TREE_READONLY (result) = TREE_READONLY (e);
4022 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4023 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4024
4025 return result;
4026 }
4027
4028 /* Stabilize a reference so that we can use it any number of times
4029 without causing its operands to be evaluated more than once.
4030 Returns the stabilized reference. This works by means of save_expr,
4031 so see the caveats in the comments about save_expr.
4032
4033 Also allows conversion expressions whose operands are references.
4034 Any other kind of expression is returned unchanged. */
4035
4036 tree
4037 stabilize_reference (tree ref)
4038 {
4039 tree result;
4040 enum tree_code code = TREE_CODE (ref);
4041
4042 switch (code)
4043 {
4044 case VAR_DECL:
4045 case PARM_DECL:
4046 case RESULT_DECL:
4047 /* No action is needed in this case. */
4048 return ref;
4049
4050 CASE_CONVERT:
4051 case FLOAT_EXPR:
4052 case FIX_TRUNC_EXPR:
4053 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4054 break;
4055
4056 case INDIRECT_REF:
4057 result = build_nt (INDIRECT_REF,
4058 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4059 break;
4060
4061 case COMPONENT_REF:
4062 result = build_nt (COMPONENT_REF,
4063 stabilize_reference (TREE_OPERAND (ref, 0)),
4064 TREE_OPERAND (ref, 1), NULL_TREE);
4065 break;
4066
4067 case BIT_FIELD_REF:
4068 result = build_nt (BIT_FIELD_REF,
4069 stabilize_reference (TREE_OPERAND (ref, 0)),
4070 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4071 break;
4072
4073 case ARRAY_REF:
4074 result = build_nt (ARRAY_REF,
4075 stabilize_reference (TREE_OPERAND (ref, 0)),
4076 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4077 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4078 break;
4079
4080 case ARRAY_RANGE_REF:
4081 result = build_nt (ARRAY_RANGE_REF,
4082 stabilize_reference (TREE_OPERAND (ref, 0)),
4083 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4084 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4085 break;
4086
4087 case COMPOUND_EXPR:
4088 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4089 it wouldn't be ignored. This matters when dealing with
4090 volatiles. */
4091 return stabilize_reference_1 (ref);
4092
4093 /* If arg isn't a kind of lvalue we recognize, make no change.
4094 Caller should recognize the error for an invalid lvalue. */
4095 default:
4096 return ref;
4097
4098 case ERROR_MARK:
4099 return error_mark_node;
4100 }
4101
4102 TREE_TYPE (result) = TREE_TYPE (ref);
4103 TREE_READONLY (result) = TREE_READONLY (ref);
4104 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4105 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4106
4107 return result;
4108 }
4109 \f
4110 /* Low-level constructors for expressions. */
4111
4112 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4113 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4114
4115 void
4116 recompute_tree_invariant_for_addr_expr (tree t)
4117 {
4118 tree node;
4119 bool tc = true, se = false;
4120
4121 /* We started out assuming this address is both invariant and constant, but
4122 does not have side effects. Now go down any handled components and see if
4123 any of them involve offsets that are either non-constant or non-invariant.
4124 Also check for side-effects.
4125
4126 ??? Note that this code makes no attempt to deal with the case where
4127 taking the address of something causes a copy due to misalignment. */
4128
4129 #define UPDATE_FLAGS(NODE) \
4130 do { tree _node = (NODE); \
4131 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4132 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4133
4134 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4135 node = TREE_OPERAND (node, 0))
4136 {
4137 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4138 array reference (probably made temporarily by the G++ front end),
4139 so ignore all the operands. */
4140 if ((TREE_CODE (node) == ARRAY_REF
4141 || TREE_CODE (node) == ARRAY_RANGE_REF)
4142 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4143 {
4144 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4145 if (TREE_OPERAND (node, 2))
4146 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4147 if (TREE_OPERAND (node, 3))
4148 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4149 }
4150 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4151 FIELD_DECL, apparently. The G++ front end can put something else
4152 there, at least temporarily. */
4153 else if (TREE_CODE (node) == COMPONENT_REF
4154 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4155 {
4156 if (TREE_OPERAND (node, 2))
4157 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4158 }
4159 }
4160
4161 node = lang_hooks.expr_to_decl (node, &tc, &se);
4162
4163 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4164 the address, since &(*a)->b is a form of addition. If it's a constant, the
4165 address is constant too. If it's a decl, its address is constant if the
4166 decl is static. Everything else is not constant and, furthermore,
4167 taking the address of a volatile variable is not volatile. */
4168 if (TREE_CODE (node) == INDIRECT_REF
4169 || TREE_CODE (node) == MEM_REF)
4170 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4171 else if (CONSTANT_CLASS_P (node))
4172 ;
4173 else if (DECL_P (node))
4174 tc &= (staticp (node) != NULL_TREE);
4175 else
4176 {
4177 tc = false;
4178 se |= TREE_SIDE_EFFECTS (node);
4179 }
4180
4181
4182 TREE_CONSTANT (t) = tc;
4183 TREE_SIDE_EFFECTS (t) = se;
4184 #undef UPDATE_FLAGS
4185 }
4186
4187 /* Build an expression of code CODE, data type TYPE, and operands as
4188 specified. Expressions and reference nodes can be created this way.
4189 Constants, decls, types and misc nodes cannot be.
4190
4191 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4192 enough for all extant tree codes. */
4193
4194 tree
4195 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4196 {
4197 tree t;
4198
4199 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4200
4201 t = make_node_stat (code PASS_MEM_STAT);
4202 TREE_TYPE (t) = tt;
4203
4204 return t;
4205 }
4206
4207 tree
4208 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4209 {
4210 int length = sizeof (struct tree_exp);
4211 tree t;
4212
4213 record_node_allocation_statistics (code, length);
4214
4215 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4216
4217 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4218
4219 memset (t, 0, sizeof (struct tree_common));
4220
4221 TREE_SET_CODE (t, code);
4222
4223 TREE_TYPE (t) = type;
4224 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4225 TREE_OPERAND (t, 0) = node;
4226 if (node && !TYPE_P (node))
4227 {
4228 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4229 TREE_READONLY (t) = TREE_READONLY (node);
4230 }
4231
4232 if (TREE_CODE_CLASS (code) == tcc_statement)
4233 TREE_SIDE_EFFECTS (t) = 1;
4234 else switch (code)
4235 {
4236 case VA_ARG_EXPR:
4237 /* All of these have side-effects, no matter what their
4238 operands are. */
4239 TREE_SIDE_EFFECTS (t) = 1;
4240 TREE_READONLY (t) = 0;
4241 break;
4242
4243 case INDIRECT_REF:
4244 /* Whether a dereference is readonly has nothing to do with whether
4245 its operand is readonly. */
4246 TREE_READONLY (t) = 0;
4247 break;
4248
4249 case ADDR_EXPR:
4250 if (node)
4251 recompute_tree_invariant_for_addr_expr (t);
4252 break;
4253
4254 default:
4255 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4256 && node && !TYPE_P (node)
4257 && TREE_CONSTANT (node))
4258 TREE_CONSTANT (t) = 1;
4259 if (TREE_CODE_CLASS (code) == tcc_reference
4260 && node && TREE_THIS_VOLATILE (node))
4261 TREE_THIS_VOLATILE (t) = 1;
4262 break;
4263 }
4264
4265 return t;
4266 }
4267
4268 #define PROCESS_ARG(N) \
4269 do { \
4270 TREE_OPERAND (t, N) = arg##N; \
4271 if (arg##N &&!TYPE_P (arg##N)) \
4272 { \
4273 if (TREE_SIDE_EFFECTS (arg##N)) \
4274 side_effects = 1; \
4275 if (!TREE_READONLY (arg##N) \
4276 && !CONSTANT_CLASS_P (arg##N)) \
4277 (void) (read_only = 0); \
4278 if (!TREE_CONSTANT (arg##N)) \
4279 (void) (constant = 0); \
4280 } \
4281 } while (0)
4282
4283 tree
4284 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4285 {
4286 bool constant, read_only, side_effects;
4287 tree t;
4288
4289 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4290
4291 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4292 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4293 /* When sizetype precision doesn't match that of pointers
4294 we need to be able to build explicit extensions or truncations
4295 of the offset argument. */
4296 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4297 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4298 && TREE_CODE (arg1) == INTEGER_CST);
4299
4300 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4301 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4302 && ptrofftype_p (TREE_TYPE (arg1)));
4303
4304 t = make_node_stat (code PASS_MEM_STAT);
4305 TREE_TYPE (t) = tt;
4306
4307 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4308 result based on those same flags for the arguments. But if the
4309 arguments aren't really even `tree' expressions, we shouldn't be trying
4310 to do this. */
4311
4312 /* Expressions without side effects may be constant if their
4313 arguments are as well. */
4314 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4315 || TREE_CODE_CLASS (code) == tcc_binary);
4316 read_only = 1;
4317 side_effects = TREE_SIDE_EFFECTS (t);
4318
4319 PROCESS_ARG (0);
4320 PROCESS_ARG (1);
4321
4322 TREE_READONLY (t) = read_only;
4323 TREE_CONSTANT (t) = constant;
4324 TREE_SIDE_EFFECTS (t) = side_effects;
4325 TREE_THIS_VOLATILE (t)
4326 = (TREE_CODE_CLASS (code) == tcc_reference
4327 && arg0 && TREE_THIS_VOLATILE (arg0));
4328
4329 return t;
4330 }
4331
4332
4333 tree
4334 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4335 tree arg2 MEM_STAT_DECL)
4336 {
4337 bool constant, read_only, side_effects;
4338 tree t;
4339
4340 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4341 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4342
4343 t = make_node_stat (code PASS_MEM_STAT);
4344 TREE_TYPE (t) = tt;
4345
4346 read_only = 1;
4347
4348 /* As a special exception, if COND_EXPR has NULL branches, we
4349 assume that it is a gimple statement and always consider
4350 it to have side effects. */
4351 if (code == COND_EXPR
4352 && tt == void_type_node
4353 && arg1 == NULL_TREE
4354 && arg2 == NULL_TREE)
4355 side_effects = true;
4356 else
4357 side_effects = TREE_SIDE_EFFECTS (t);
4358
4359 PROCESS_ARG (0);
4360 PROCESS_ARG (1);
4361 PROCESS_ARG (2);
4362
4363 if (code == COND_EXPR)
4364 TREE_READONLY (t) = read_only;
4365
4366 TREE_SIDE_EFFECTS (t) = side_effects;
4367 TREE_THIS_VOLATILE (t)
4368 = (TREE_CODE_CLASS (code) == tcc_reference
4369 && arg0 && TREE_THIS_VOLATILE (arg0));
4370
4371 return t;
4372 }
4373
4374 tree
4375 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4376 tree arg2, tree arg3 MEM_STAT_DECL)
4377 {
4378 bool constant, read_only, side_effects;
4379 tree t;
4380
4381 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4382
4383 t = make_node_stat (code PASS_MEM_STAT);
4384 TREE_TYPE (t) = tt;
4385
4386 side_effects = TREE_SIDE_EFFECTS (t);
4387
4388 PROCESS_ARG (0);
4389 PROCESS_ARG (1);
4390 PROCESS_ARG (2);
4391 PROCESS_ARG (3);
4392
4393 TREE_SIDE_EFFECTS (t) = side_effects;
4394 TREE_THIS_VOLATILE (t)
4395 = (TREE_CODE_CLASS (code) == tcc_reference
4396 && arg0 && TREE_THIS_VOLATILE (arg0));
4397
4398 return t;
4399 }
4400
4401 tree
4402 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4403 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4404 {
4405 bool constant, read_only, side_effects;
4406 tree t;
4407
4408 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4409
4410 t = make_node_stat (code PASS_MEM_STAT);
4411 TREE_TYPE (t) = tt;
4412
4413 side_effects = TREE_SIDE_EFFECTS (t);
4414
4415 PROCESS_ARG (0);
4416 PROCESS_ARG (1);
4417 PROCESS_ARG (2);
4418 PROCESS_ARG (3);
4419 PROCESS_ARG (4);
4420
4421 TREE_SIDE_EFFECTS (t) = side_effects;
4422 TREE_THIS_VOLATILE (t)
4423 = (TREE_CODE_CLASS (code) == tcc_reference
4424 && arg0 && TREE_THIS_VOLATILE (arg0));
4425
4426 return t;
4427 }
4428
4429 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4430 on the pointer PTR. */
4431
4432 tree
4433 build_simple_mem_ref_loc (location_t loc, tree ptr)
4434 {
4435 HOST_WIDE_INT offset = 0;
4436 tree ptype = TREE_TYPE (ptr);
4437 tree tem;
4438 /* For convenience allow addresses that collapse to a simple base
4439 and offset. */
4440 if (TREE_CODE (ptr) == ADDR_EXPR
4441 && (handled_component_p (TREE_OPERAND (ptr, 0))
4442 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4443 {
4444 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4445 gcc_assert (ptr);
4446 ptr = build_fold_addr_expr (ptr);
4447 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4448 }
4449 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4450 ptr, build_int_cst (ptype, offset));
4451 SET_EXPR_LOCATION (tem, loc);
4452 return tem;
4453 }
4454
4455 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4456
4457 offset_int
4458 mem_ref_offset (const_tree t)
4459 {
4460 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4461 }
4462
4463 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4464 offsetted by OFFSET units. */
4465
4466 tree
4467 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4468 {
4469 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4470 build_fold_addr_expr (base),
4471 build_int_cst (ptr_type_node, offset));
4472 tree addr = build1 (ADDR_EXPR, type, ref);
4473 recompute_tree_invariant_for_addr_expr (addr);
4474 return addr;
4475 }
4476
4477 /* Similar except don't specify the TREE_TYPE
4478 and leave the TREE_SIDE_EFFECTS as 0.
4479 It is permissible for arguments to be null,
4480 or even garbage if their values do not matter. */
4481
4482 tree
4483 build_nt (enum tree_code code, ...)
4484 {
4485 tree t;
4486 int length;
4487 int i;
4488 va_list p;
4489
4490 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4491
4492 va_start (p, code);
4493
4494 t = make_node (code);
4495 length = TREE_CODE_LENGTH (code);
4496
4497 for (i = 0; i < length; i++)
4498 TREE_OPERAND (t, i) = va_arg (p, tree);
4499
4500 va_end (p);
4501 return t;
4502 }
4503
4504 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4505 tree vec. */
4506
4507 tree
4508 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4509 {
4510 tree ret, t;
4511 unsigned int ix;
4512
4513 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4514 CALL_EXPR_FN (ret) = fn;
4515 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4516 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4517 CALL_EXPR_ARG (ret, ix) = t;
4518 return ret;
4519 }
4520 \f
4521 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4522 We do NOT enter this node in any sort of symbol table.
4523
4524 LOC is the location of the decl.
4525
4526 layout_decl is used to set up the decl's storage layout.
4527 Other slots are initialized to 0 or null pointers. */
4528
4529 tree
4530 build_decl_stat (location_t loc, enum tree_code code, tree name,
4531 tree type MEM_STAT_DECL)
4532 {
4533 tree t;
4534
4535 t = make_node_stat (code PASS_MEM_STAT);
4536 DECL_SOURCE_LOCATION (t) = loc;
4537
4538 /* if (type == error_mark_node)
4539 type = integer_type_node; */
4540 /* That is not done, deliberately, so that having error_mark_node
4541 as the type can suppress useless errors in the use of this variable. */
4542
4543 DECL_NAME (t) = name;
4544 TREE_TYPE (t) = type;
4545
4546 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4547 layout_decl (t, 0);
4548
4549 return t;
4550 }
4551
4552 /* Builds and returns function declaration with NAME and TYPE. */
4553
4554 tree
4555 build_fn_decl (const char *name, tree type)
4556 {
4557 tree id = get_identifier (name);
4558 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4559
4560 DECL_EXTERNAL (decl) = 1;
4561 TREE_PUBLIC (decl) = 1;
4562 DECL_ARTIFICIAL (decl) = 1;
4563 TREE_NOTHROW (decl) = 1;
4564
4565 return decl;
4566 }
4567
4568 vec<tree, va_gc> *all_translation_units;
4569
4570 /* Builds a new translation-unit decl with name NAME, queues it in the
4571 global list of translation-unit decls and returns it. */
4572
4573 tree
4574 build_translation_unit_decl (tree name)
4575 {
4576 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4577 name, NULL_TREE);
4578 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4579 vec_safe_push (all_translation_units, tu);
4580 return tu;
4581 }
4582
4583 \f
4584 /* BLOCK nodes are used to represent the structure of binding contours
4585 and declarations, once those contours have been exited and their contents
4586 compiled. This information is used for outputting debugging info. */
4587
4588 tree
4589 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4590 {
4591 tree block = make_node (BLOCK);
4592
4593 BLOCK_VARS (block) = vars;
4594 BLOCK_SUBBLOCKS (block) = subblocks;
4595 BLOCK_SUPERCONTEXT (block) = supercontext;
4596 BLOCK_CHAIN (block) = chain;
4597 return block;
4598 }
4599
4600 \f
4601 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4602
4603 LOC is the location to use in tree T. */
4604
4605 void
4606 protected_set_expr_location (tree t, location_t loc)
4607 {
4608 if (CAN_HAVE_LOCATION_P (t))
4609 SET_EXPR_LOCATION (t, loc);
4610 }
4611 \f
4612 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4613 is ATTRIBUTE. */
4614
4615 tree
4616 build_decl_attribute_variant (tree ddecl, tree attribute)
4617 {
4618 DECL_ATTRIBUTES (ddecl) = attribute;
4619 return ddecl;
4620 }
4621
4622 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4623 is ATTRIBUTE and its qualifiers are QUALS.
4624
4625 Record such modified types already made so we don't make duplicates. */
4626
4627 tree
4628 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4629 {
4630 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4631 {
4632 inchash::hash hstate;
4633 tree ntype;
4634 int i;
4635 tree t;
4636 enum tree_code code = TREE_CODE (ttype);
4637
4638 /* Building a distinct copy of a tagged type is inappropriate; it
4639 causes breakage in code that expects there to be a one-to-one
4640 relationship between a struct and its fields.
4641 build_duplicate_type is another solution (as used in
4642 handle_transparent_union_attribute), but that doesn't play well
4643 with the stronger C++ type identity model. */
4644 if (TREE_CODE (ttype) == RECORD_TYPE
4645 || TREE_CODE (ttype) == UNION_TYPE
4646 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4647 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4648 {
4649 warning (OPT_Wattributes,
4650 "ignoring attributes applied to %qT after definition",
4651 TYPE_MAIN_VARIANT (ttype));
4652 return build_qualified_type (ttype, quals);
4653 }
4654
4655 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4656 ntype = build_distinct_type_copy (ttype);
4657
4658 TYPE_ATTRIBUTES (ntype) = attribute;
4659
4660 hstate.add_int (code);
4661 if (TREE_TYPE (ntype))
4662 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4663 attribute_hash_list (attribute, hstate);
4664
4665 switch (TREE_CODE (ntype))
4666 {
4667 case FUNCTION_TYPE:
4668 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4669 break;
4670 case ARRAY_TYPE:
4671 if (TYPE_DOMAIN (ntype))
4672 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4673 break;
4674 case INTEGER_TYPE:
4675 t = TYPE_MAX_VALUE (ntype);
4676 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4677 hstate.add_object (TREE_INT_CST_ELT (t, i));
4678 break;
4679 case REAL_TYPE:
4680 case FIXED_POINT_TYPE:
4681 {
4682 unsigned int precision = TYPE_PRECISION (ntype);
4683 hstate.add_object (precision);
4684 }
4685 break;
4686 default:
4687 break;
4688 }
4689
4690 ntype = type_hash_canon (hstate.end(), ntype);
4691
4692 /* If the target-dependent attributes make NTYPE different from
4693 its canonical type, we will need to use structural equality
4694 checks for this type. */
4695 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4696 || !comp_type_attributes (ntype, ttype))
4697 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4698 else if (TYPE_CANONICAL (ntype) == ntype)
4699 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4700
4701 ttype = build_qualified_type (ntype, quals);
4702 }
4703 else if (TYPE_QUALS (ttype) != quals)
4704 ttype = build_qualified_type (ttype, quals);
4705
4706 return ttype;
4707 }
4708
4709 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4710 the same. */
4711
4712 static bool
4713 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4714 {
4715 tree cl1, cl2;
4716 for (cl1 = clauses1, cl2 = clauses2;
4717 cl1 && cl2;
4718 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4719 {
4720 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4721 return false;
4722 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4723 {
4724 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4725 OMP_CLAUSE_DECL (cl2)) != 1)
4726 return false;
4727 }
4728 switch (OMP_CLAUSE_CODE (cl1))
4729 {
4730 case OMP_CLAUSE_ALIGNED:
4731 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4732 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4733 return false;
4734 break;
4735 case OMP_CLAUSE_LINEAR:
4736 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4737 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4738 return false;
4739 break;
4740 case OMP_CLAUSE_SIMDLEN:
4741 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4742 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4743 return false;
4744 default:
4745 break;
4746 }
4747 }
4748 return true;
4749 }
4750
4751 /* Compare two constructor-element-type constants. Return 1 if the lists
4752 are known to be equal; otherwise return 0. */
4753
4754 static bool
4755 simple_cst_list_equal (const_tree l1, const_tree l2)
4756 {
4757 while (l1 != NULL_TREE && l2 != NULL_TREE)
4758 {
4759 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4760 return false;
4761
4762 l1 = TREE_CHAIN (l1);
4763 l2 = TREE_CHAIN (l2);
4764 }
4765
4766 return l1 == l2;
4767 }
4768
4769 /* Compare two attributes for their value identity. Return true if the
4770 attribute values are known to be equal; otherwise return false.
4771 */
4772
4773 static bool
4774 attribute_value_equal (const_tree attr1, const_tree attr2)
4775 {
4776 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4777 return true;
4778
4779 if (TREE_VALUE (attr1) != NULL_TREE
4780 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4781 && TREE_VALUE (attr2) != NULL
4782 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4783 return (simple_cst_list_equal (TREE_VALUE (attr1),
4784 TREE_VALUE (attr2)) == 1);
4785
4786 if ((flag_openmp || flag_openmp_simd)
4787 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4788 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4789 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4790 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4791 TREE_VALUE (attr2));
4792
4793 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4794 }
4795
4796 /* Return 0 if the attributes for two types are incompatible, 1 if they
4797 are compatible, and 2 if they are nearly compatible (which causes a
4798 warning to be generated). */
4799 int
4800 comp_type_attributes (const_tree type1, const_tree type2)
4801 {
4802 const_tree a1 = TYPE_ATTRIBUTES (type1);
4803 const_tree a2 = TYPE_ATTRIBUTES (type2);
4804 const_tree a;
4805
4806 if (a1 == a2)
4807 return 1;
4808 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4809 {
4810 const struct attribute_spec *as;
4811 const_tree attr;
4812
4813 as = lookup_attribute_spec (get_attribute_name (a));
4814 if (!as || as->affects_type_identity == false)
4815 continue;
4816
4817 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4818 if (!attr || !attribute_value_equal (a, attr))
4819 break;
4820 }
4821 if (!a)
4822 {
4823 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4824 {
4825 const struct attribute_spec *as;
4826
4827 as = lookup_attribute_spec (get_attribute_name (a));
4828 if (!as || as->affects_type_identity == false)
4829 continue;
4830
4831 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4832 break;
4833 /* We don't need to compare trees again, as we did this
4834 already in first loop. */
4835 }
4836 /* All types - affecting identity - are equal, so
4837 there is no need to call target hook for comparison. */
4838 if (!a)
4839 return 1;
4840 }
4841 /* As some type combinations - like default calling-convention - might
4842 be compatible, we have to call the target hook to get the final result. */
4843 return targetm.comp_type_attributes (type1, type2);
4844 }
4845
4846 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4847 is ATTRIBUTE.
4848
4849 Record such modified types already made so we don't make duplicates. */
4850
4851 tree
4852 build_type_attribute_variant (tree ttype, tree attribute)
4853 {
4854 return build_type_attribute_qual_variant (ttype, attribute,
4855 TYPE_QUALS (ttype));
4856 }
4857
4858
4859 /* Reset the expression *EXPR_P, a size or position.
4860
4861 ??? We could reset all non-constant sizes or positions. But it's cheap
4862 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4863
4864 We need to reset self-referential sizes or positions because they cannot
4865 be gimplified and thus can contain a CALL_EXPR after the gimplification
4866 is finished, which will run afoul of LTO streaming. And they need to be
4867 reset to something essentially dummy but not constant, so as to preserve
4868 the properties of the object they are attached to. */
4869
4870 static inline void
4871 free_lang_data_in_one_sizepos (tree *expr_p)
4872 {
4873 tree expr = *expr_p;
4874 if (CONTAINS_PLACEHOLDER_P (expr))
4875 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4876 }
4877
4878
4879 /* Reset all the fields in a binfo node BINFO. We only keep
4880 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4881
4882 static void
4883 free_lang_data_in_binfo (tree binfo)
4884 {
4885 unsigned i;
4886 tree t;
4887
4888 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4889
4890 BINFO_VIRTUALS (binfo) = NULL_TREE;
4891 BINFO_BASE_ACCESSES (binfo) = NULL;
4892 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4893 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4894
4895 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4896 free_lang_data_in_binfo (t);
4897 }
4898
4899
4900 /* Reset all language specific information still present in TYPE. */
4901
4902 static void
4903 free_lang_data_in_type (tree type)
4904 {
4905 gcc_assert (TYPE_P (type));
4906
4907 /* Give the FE a chance to remove its own data first. */
4908 lang_hooks.free_lang_data (type);
4909
4910 TREE_LANG_FLAG_0 (type) = 0;
4911 TREE_LANG_FLAG_1 (type) = 0;
4912 TREE_LANG_FLAG_2 (type) = 0;
4913 TREE_LANG_FLAG_3 (type) = 0;
4914 TREE_LANG_FLAG_4 (type) = 0;
4915 TREE_LANG_FLAG_5 (type) = 0;
4916 TREE_LANG_FLAG_6 (type) = 0;
4917
4918 if (TREE_CODE (type) == FUNCTION_TYPE)
4919 {
4920 /* Remove the const and volatile qualifiers from arguments. The
4921 C++ front end removes them, but the C front end does not,
4922 leading to false ODR violation errors when merging two
4923 instances of the same function signature compiled by
4924 different front ends. */
4925 tree p;
4926
4927 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4928 {
4929 tree arg_type = TREE_VALUE (p);
4930
4931 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4932 {
4933 int quals = TYPE_QUALS (arg_type)
4934 & ~TYPE_QUAL_CONST
4935 & ~TYPE_QUAL_VOLATILE;
4936 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4937 free_lang_data_in_type (TREE_VALUE (p));
4938 }
4939 }
4940 }
4941
4942 /* Remove members that are not actually FIELD_DECLs from the field
4943 list of an aggregate. These occur in C++. */
4944 if (RECORD_OR_UNION_TYPE_P (type))
4945 {
4946 tree prev, member;
4947
4948 /* Note that TYPE_FIELDS can be shared across distinct
4949 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4950 to be removed, we cannot set its TREE_CHAIN to NULL.
4951 Otherwise, we would not be able to find all the other fields
4952 in the other instances of this TREE_TYPE.
4953
4954 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4955 prev = NULL_TREE;
4956 member = TYPE_FIELDS (type);
4957 while (member)
4958 {
4959 if (TREE_CODE (member) == FIELD_DECL
4960 || TREE_CODE (member) == TYPE_DECL)
4961 {
4962 if (prev)
4963 TREE_CHAIN (prev) = member;
4964 else
4965 TYPE_FIELDS (type) = member;
4966 prev = member;
4967 }
4968
4969 member = TREE_CHAIN (member);
4970 }
4971
4972 if (prev)
4973 TREE_CHAIN (prev) = NULL_TREE;
4974 else
4975 TYPE_FIELDS (type) = NULL_TREE;
4976
4977 TYPE_METHODS (type) = NULL_TREE;
4978 if (TYPE_BINFO (type))
4979 free_lang_data_in_binfo (TYPE_BINFO (type));
4980 }
4981 else
4982 {
4983 /* For non-aggregate types, clear out the language slot (which
4984 overloads TYPE_BINFO). */
4985 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4986
4987 if (INTEGRAL_TYPE_P (type)
4988 || SCALAR_FLOAT_TYPE_P (type)
4989 || FIXED_POINT_TYPE_P (type))
4990 {
4991 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4992 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4993 }
4994 }
4995
4996 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4997 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4998
4999 if (TYPE_CONTEXT (type)
5000 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5001 {
5002 tree ctx = TYPE_CONTEXT (type);
5003 do
5004 {
5005 ctx = BLOCK_SUPERCONTEXT (ctx);
5006 }
5007 while (ctx && TREE_CODE (ctx) == BLOCK);
5008 TYPE_CONTEXT (type) = ctx;
5009 }
5010 }
5011
5012
5013 /* Return true if DECL may need an assembler name to be set. */
5014
5015 static inline bool
5016 need_assembler_name_p (tree decl)
5017 {
5018 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule
5019 merging. */
5020 if (flag_lto_odr_type_mering
5021 && TREE_CODE (decl) == TYPE_DECL
5022 && DECL_NAME (decl)
5023 && decl == TYPE_NAME (TREE_TYPE (decl))
5024 && !is_lang_specific (TREE_TYPE (decl))
5025 && AGGREGATE_TYPE_P (TREE_TYPE (decl))
5026 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)
5027 && !type_in_anonymous_namespace_p (TREE_TYPE (decl)))
5028 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5029 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5030 if (TREE_CODE (decl) != FUNCTION_DECL
5031 && TREE_CODE (decl) != VAR_DECL)
5032 return false;
5033
5034 /* If DECL already has its assembler name set, it does not need a
5035 new one. */
5036 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5037 || DECL_ASSEMBLER_NAME_SET_P (decl))
5038 return false;
5039
5040 /* Abstract decls do not need an assembler name. */
5041 if (DECL_ABSTRACT_P (decl))
5042 return false;
5043
5044 /* For VAR_DECLs, only static, public and external symbols need an
5045 assembler name. */
5046 if (TREE_CODE (decl) == VAR_DECL
5047 && !TREE_STATIC (decl)
5048 && !TREE_PUBLIC (decl)
5049 && !DECL_EXTERNAL (decl))
5050 return false;
5051
5052 if (TREE_CODE (decl) == FUNCTION_DECL)
5053 {
5054 /* Do not set assembler name on builtins. Allow RTL expansion to
5055 decide whether to expand inline or via a regular call. */
5056 if (DECL_BUILT_IN (decl)
5057 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5058 return false;
5059
5060 /* Functions represented in the callgraph need an assembler name. */
5061 if (cgraph_node::get (decl) != NULL)
5062 return true;
5063
5064 /* Unused and not public functions don't need an assembler name. */
5065 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5066 return false;
5067 }
5068
5069 return true;
5070 }
5071
5072
5073 /* Reset all language specific information still present in symbol
5074 DECL. */
5075
5076 static void
5077 free_lang_data_in_decl (tree decl)
5078 {
5079 gcc_assert (DECL_P (decl));
5080
5081 /* Give the FE a chance to remove its own data first. */
5082 lang_hooks.free_lang_data (decl);
5083
5084 TREE_LANG_FLAG_0 (decl) = 0;
5085 TREE_LANG_FLAG_1 (decl) = 0;
5086 TREE_LANG_FLAG_2 (decl) = 0;
5087 TREE_LANG_FLAG_3 (decl) = 0;
5088 TREE_LANG_FLAG_4 (decl) = 0;
5089 TREE_LANG_FLAG_5 (decl) = 0;
5090 TREE_LANG_FLAG_6 (decl) = 0;
5091
5092 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5093 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5094 if (TREE_CODE (decl) == FIELD_DECL)
5095 {
5096 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5097 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5098 DECL_QUALIFIER (decl) = NULL_TREE;
5099 }
5100
5101 if (TREE_CODE (decl) == FUNCTION_DECL)
5102 {
5103 struct cgraph_node *node;
5104 if (!(node = cgraph_node::get (decl))
5105 || (!node->definition && !node->clones))
5106 {
5107 if (node)
5108 node->release_body ();
5109 else
5110 {
5111 release_function_body (decl);
5112 DECL_ARGUMENTS (decl) = NULL;
5113 DECL_RESULT (decl) = NULL;
5114 DECL_INITIAL (decl) = error_mark_node;
5115 }
5116 }
5117 if (gimple_has_body_p (decl))
5118 {
5119 tree t;
5120
5121 /* If DECL has a gimple body, then the context for its
5122 arguments must be DECL. Otherwise, it doesn't really
5123 matter, as we will not be emitting any code for DECL. In
5124 general, there may be other instances of DECL created by
5125 the front end and since PARM_DECLs are generally shared,
5126 their DECL_CONTEXT changes as the replicas of DECL are
5127 created. The only time where DECL_CONTEXT is important
5128 is for the FUNCTION_DECLs that have a gimple body (since
5129 the PARM_DECL will be used in the function's body). */
5130 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5131 DECL_CONTEXT (t) = decl;
5132 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5133 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5134 = target_option_default_node;
5135 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5136 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5137 = optimization_default_node;
5138 }
5139
5140 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5141 At this point, it is not needed anymore. */
5142 DECL_SAVED_TREE (decl) = NULL_TREE;
5143
5144 /* Clear the abstract origin if it refers to a method. Otherwise
5145 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5146 origin will not be output correctly. */
5147 if (DECL_ABSTRACT_ORIGIN (decl)
5148 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5149 && RECORD_OR_UNION_TYPE_P
5150 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5151 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5152
5153 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5154 DECL_VINDEX referring to itself into a vtable slot number as it
5155 should. Happens with functions that are copied and then forgotten
5156 about. Just clear it, it won't matter anymore. */
5157 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5158 DECL_VINDEX (decl) = NULL_TREE;
5159 }
5160 else if (TREE_CODE (decl) == VAR_DECL)
5161 {
5162 if ((DECL_EXTERNAL (decl)
5163 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5164 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5165 DECL_INITIAL (decl) = NULL_TREE;
5166 }
5167 else if (TREE_CODE (decl) == TYPE_DECL
5168 || TREE_CODE (decl) == FIELD_DECL)
5169 DECL_INITIAL (decl) = NULL_TREE;
5170 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5171 && DECL_INITIAL (decl)
5172 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5173 {
5174 /* Strip builtins from the translation-unit BLOCK. We still have targets
5175 without builtin_decl_explicit support and also builtins are shared
5176 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5177 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5178 while (*nextp)
5179 {
5180 tree var = *nextp;
5181 if (TREE_CODE (var) == FUNCTION_DECL
5182 && DECL_BUILT_IN (var))
5183 *nextp = TREE_CHAIN (var);
5184 else
5185 nextp = &TREE_CHAIN (var);
5186 }
5187 }
5188 }
5189
5190
5191 /* Data used when collecting DECLs and TYPEs for language data removal. */
5192
5193 struct free_lang_data_d
5194 {
5195 /* Worklist to avoid excessive recursion. */
5196 vec<tree> worklist;
5197
5198 /* Set of traversed objects. Used to avoid duplicate visits. */
5199 hash_set<tree> *pset;
5200
5201 /* Array of symbols to process with free_lang_data_in_decl. */
5202 vec<tree> decls;
5203
5204 /* Array of types to process with free_lang_data_in_type. */
5205 vec<tree> types;
5206 };
5207
5208
5209 /* Save all language fields needed to generate proper debug information
5210 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5211
5212 static void
5213 save_debug_info_for_decl (tree t)
5214 {
5215 /*struct saved_debug_info_d *sdi;*/
5216
5217 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5218
5219 /* FIXME. Partial implementation for saving debug info removed. */
5220 }
5221
5222
5223 /* Save all language fields needed to generate proper debug information
5224 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5225
5226 static void
5227 save_debug_info_for_type (tree t)
5228 {
5229 /*struct saved_debug_info_d *sdi;*/
5230
5231 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5232
5233 /* FIXME. Partial implementation for saving debug info removed. */
5234 }
5235
5236
5237 /* Add type or decl T to one of the list of tree nodes that need their
5238 language data removed. The lists are held inside FLD. */
5239
5240 static void
5241 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5242 {
5243 if (DECL_P (t))
5244 {
5245 fld->decls.safe_push (t);
5246 if (debug_info_level > DINFO_LEVEL_TERSE)
5247 save_debug_info_for_decl (t);
5248 }
5249 else if (TYPE_P (t))
5250 {
5251 fld->types.safe_push (t);
5252 if (debug_info_level > DINFO_LEVEL_TERSE)
5253 save_debug_info_for_type (t);
5254 }
5255 else
5256 gcc_unreachable ();
5257 }
5258
5259 /* Push tree node T into FLD->WORKLIST. */
5260
5261 static inline void
5262 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5263 {
5264 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5265 fld->worklist.safe_push ((t));
5266 }
5267
5268
5269 /* Operand callback helper for free_lang_data_in_node. *TP is the
5270 subtree operand being considered. */
5271
5272 static tree
5273 find_decls_types_r (tree *tp, int *ws, void *data)
5274 {
5275 tree t = *tp;
5276 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5277
5278 if (TREE_CODE (t) == TREE_LIST)
5279 return NULL_TREE;
5280
5281 /* Language specific nodes will be removed, so there is no need
5282 to gather anything under them. */
5283 if (is_lang_specific (t))
5284 {
5285 *ws = 0;
5286 return NULL_TREE;
5287 }
5288
5289 if (DECL_P (t))
5290 {
5291 /* Note that walk_tree does not traverse every possible field in
5292 decls, so we have to do our own traversals here. */
5293 add_tree_to_fld_list (t, fld);
5294
5295 fld_worklist_push (DECL_NAME (t), fld);
5296 fld_worklist_push (DECL_CONTEXT (t), fld);
5297 fld_worklist_push (DECL_SIZE (t), fld);
5298 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5299
5300 /* We are going to remove everything under DECL_INITIAL for
5301 TYPE_DECLs. No point walking them. */
5302 if (TREE_CODE (t) != TYPE_DECL)
5303 fld_worklist_push (DECL_INITIAL (t), fld);
5304
5305 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5306 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5307
5308 if (TREE_CODE (t) == FUNCTION_DECL)
5309 {
5310 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5311 fld_worklist_push (DECL_RESULT (t), fld);
5312 }
5313 else if (TREE_CODE (t) == TYPE_DECL)
5314 {
5315 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5316 }
5317 else if (TREE_CODE (t) == FIELD_DECL)
5318 {
5319 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5320 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5321 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5322 fld_worklist_push (DECL_FCONTEXT (t), fld);
5323 }
5324
5325 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5326 && DECL_HAS_VALUE_EXPR_P (t))
5327 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5328
5329 if (TREE_CODE (t) != FIELD_DECL
5330 && TREE_CODE (t) != TYPE_DECL)
5331 fld_worklist_push (TREE_CHAIN (t), fld);
5332 *ws = 0;
5333 }
5334 else if (TYPE_P (t))
5335 {
5336 /* Note that walk_tree does not traverse every possible field in
5337 types, so we have to do our own traversals here. */
5338 add_tree_to_fld_list (t, fld);
5339
5340 if (!RECORD_OR_UNION_TYPE_P (t))
5341 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5342 fld_worklist_push (TYPE_SIZE (t), fld);
5343 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5344 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5345 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5346 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5347 fld_worklist_push (TYPE_NAME (t), fld);
5348 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5349 them and thus do not and want not to reach unused pointer types
5350 this way. */
5351 if (!POINTER_TYPE_P (t))
5352 fld_worklist_push (TYPE_MINVAL (t), fld);
5353 if (!RECORD_OR_UNION_TYPE_P (t))
5354 fld_worklist_push (TYPE_MAXVAL (t), fld);
5355 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5356 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5357 do not and want not to reach unused variants this way. */
5358 if (TYPE_CONTEXT (t))
5359 {
5360 tree ctx = TYPE_CONTEXT (t);
5361 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5362 So push that instead. */
5363 while (ctx && TREE_CODE (ctx) == BLOCK)
5364 ctx = BLOCK_SUPERCONTEXT (ctx);
5365 fld_worklist_push (ctx, fld);
5366 }
5367 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5368 and want not to reach unused types this way. */
5369
5370 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5371 {
5372 unsigned i;
5373 tree tem;
5374 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5375 fld_worklist_push (TREE_TYPE (tem), fld);
5376 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5377 if (tem
5378 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5379 && TREE_CODE (tem) == TREE_LIST)
5380 do
5381 {
5382 fld_worklist_push (TREE_VALUE (tem), fld);
5383 tem = TREE_CHAIN (tem);
5384 }
5385 while (tem);
5386 }
5387 if (RECORD_OR_UNION_TYPE_P (t))
5388 {
5389 tree tem;
5390 /* Push all TYPE_FIELDS - there can be interleaving interesting
5391 and non-interesting things. */
5392 tem = TYPE_FIELDS (t);
5393 while (tem)
5394 {
5395 if (TREE_CODE (tem) == FIELD_DECL
5396 || TREE_CODE (tem) == TYPE_DECL)
5397 fld_worklist_push (tem, fld);
5398 tem = TREE_CHAIN (tem);
5399 }
5400 }
5401
5402 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5403 *ws = 0;
5404 }
5405 else if (TREE_CODE (t) == BLOCK)
5406 {
5407 tree tem;
5408 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5409 fld_worklist_push (tem, fld);
5410 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5411 fld_worklist_push (tem, fld);
5412 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5413 }
5414
5415 if (TREE_CODE (t) != IDENTIFIER_NODE
5416 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5417 fld_worklist_push (TREE_TYPE (t), fld);
5418
5419 return NULL_TREE;
5420 }
5421
5422
5423 /* Find decls and types in T. */
5424
5425 static void
5426 find_decls_types (tree t, struct free_lang_data_d *fld)
5427 {
5428 while (1)
5429 {
5430 if (!fld->pset->contains (t))
5431 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5432 if (fld->worklist.is_empty ())
5433 break;
5434 t = fld->worklist.pop ();
5435 }
5436 }
5437
5438 /* Translate all the types in LIST with the corresponding runtime
5439 types. */
5440
5441 static tree
5442 get_eh_types_for_runtime (tree list)
5443 {
5444 tree head, prev;
5445
5446 if (list == NULL_TREE)
5447 return NULL_TREE;
5448
5449 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5450 prev = head;
5451 list = TREE_CHAIN (list);
5452 while (list)
5453 {
5454 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5455 TREE_CHAIN (prev) = n;
5456 prev = TREE_CHAIN (prev);
5457 list = TREE_CHAIN (list);
5458 }
5459
5460 return head;
5461 }
5462
5463
5464 /* Find decls and types referenced in EH region R and store them in
5465 FLD->DECLS and FLD->TYPES. */
5466
5467 static void
5468 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5469 {
5470 switch (r->type)
5471 {
5472 case ERT_CLEANUP:
5473 break;
5474
5475 case ERT_TRY:
5476 {
5477 eh_catch c;
5478
5479 /* The types referenced in each catch must first be changed to the
5480 EH types used at runtime. This removes references to FE types
5481 in the region. */
5482 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5483 {
5484 c->type_list = get_eh_types_for_runtime (c->type_list);
5485 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5486 }
5487 }
5488 break;
5489
5490 case ERT_ALLOWED_EXCEPTIONS:
5491 r->u.allowed.type_list
5492 = get_eh_types_for_runtime (r->u.allowed.type_list);
5493 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5494 break;
5495
5496 case ERT_MUST_NOT_THROW:
5497 walk_tree (&r->u.must_not_throw.failure_decl,
5498 find_decls_types_r, fld, fld->pset);
5499 break;
5500 }
5501 }
5502
5503
5504 /* Find decls and types referenced in cgraph node N and store them in
5505 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5506 look for *every* kind of DECL and TYPE node reachable from N,
5507 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5508 NAMESPACE_DECLs, etc). */
5509
5510 static void
5511 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5512 {
5513 basic_block bb;
5514 struct function *fn;
5515 unsigned ix;
5516 tree t;
5517
5518 find_decls_types (n->decl, fld);
5519
5520 if (!gimple_has_body_p (n->decl))
5521 return;
5522
5523 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5524
5525 fn = DECL_STRUCT_FUNCTION (n->decl);
5526
5527 /* Traverse locals. */
5528 FOR_EACH_LOCAL_DECL (fn, ix, t)
5529 find_decls_types (t, fld);
5530
5531 /* Traverse EH regions in FN. */
5532 {
5533 eh_region r;
5534 FOR_ALL_EH_REGION_FN (r, fn)
5535 find_decls_types_in_eh_region (r, fld);
5536 }
5537
5538 /* Traverse every statement in FN. */
5539 FOR_EACH_BB_FN (bb, fn)
5540 {
5541 gphi_iterator psi;
5542 gimple_stmt_iterator si;
5543 unsigned i;
5544
5545 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5546 {
5547 gphi *phi = psi.phi ();
5548
5549 for (i = 0; i < gimple_phi_num_args (phi); i++)
5550 {
5551 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5552 find_decls_types (*arg_p, fld);
5553 }
5554 }
5555
5556 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5557 {
5558 gimple stmt = gsi_stmt (si);
5559
5560 if (is_gimple_call (stmt))
5561 find_decls_types (gimple_call_fntype (stmt), fld);
5562
5563 for (i = 0; i < gimple_num_ops (stmt); i++)
5564 {
5565 tree arg = gimple_op (stmt, i);
5566 find_decls_types (arg, fld);
5567 }
5568 }
5569 }
5570 }
5571
5572
5573 /* Find decls and types referenced in varpool node N and store them in
5574 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5575 look for *every* kind of DECL and TYPE node reachable from N,
5576 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5577 NAMESPACE_DECLs, etc). */
5578
5579 static void
5580 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5581 {
5582 find_decls_types (v->decl, fld);
5583 }
5584
5585 /* If T needs an assembler name, have one created for it. */
5586
5587 void
5588 assign_assembler_name_if_neeeded (tree t)
5589 {
5590 if (need_assembler_name_p (t))
5591 {
5592 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5593 diagnostics that use input_location to show locus
5594 information. The problem here is that, at this point,
5595 input_location is generally anchored to the end of the file
5596 (since the parser is long gone), so we don't have a good
5597 position to pin it to.
5598
5599 To alleviate this problem, this uses the location of T's
5600 declaration. Examples of this are
5601 testsuite/g++.dg/template/cond2.C and
5602 testsuite/g++.dg/template/pr35240.C. */
5603 location_t saved_location = input_location;
5604 input_location = DECL_SOURCE_LOCATION (t);
5605
5606 decl_assembler_name (t);
5607
5608 input_location = saved_location;
5609 }
5610 }
5611
5612
5613 /* Free language specific information for every operand and expression
5614 in every node of the call graph. This process operates in three stages:
5615
5616 1- Every callgraph node and varpool node is traversed looking for
5617 decls and types embedded in them. This is a more exhaustive
5618 search than that done by find_referenced_vars, because it will
5619 also collect individual fields, decls embedded in types, etc.
5620
5621 2- All the decls found are sent to free_lang_data_in_decl.
5622
5623 3- All the types found are sent to free_lang_data_in_type.
5624
5625 The ordering between decls and types is important because
5626 free_lang_data_in_decl sets assembler names, which includes
5627 mangling. So types cannot be freed up until assembler names have
5628 been set up. */
5629
5630 static void
5631 free_lang_data_in_cgraph (void)
5632 {
5633 struct cgraph_node *n;
5634 varpool_node *v;
5635 struct free_lang_data_d fld;
5636 tree t;
5637 unsigned i;
5638 alias_pair *p;
5639
5640 /* Initialize sets and arrays to store referenced decls and types. */
5641 fld.pset = new hash_set<tree>;
5642 fld.worklist.create (0);
5643 fld.decls.create (100);
5644 fld.types.create (100);
5645
5646 /* Find decls and types in the body of every function in the callgraph. */
5647 FOR_EACH_FUNCTION (n)
5648 find_decls_types_in_node (n, &fld);
5649
5650 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5651 find_decls_types (p->decl, &fld);
5652
5653 /* Find decls and types in every varpool symbol. */
5654 FOR_EACH_VARIABLE (v)
5655 find_decls_types_in_var (v, &fld);
5656
5657 /* Set the assembler name on every decl found. We need to do this
5658 now because free_lang_data_in_decl will invalidate data needed
5659 for mangling. This breaks mangling on interdependent decls. */
5660 FOR_EACH_VEC_ELT (fld.decls, i, t)
5661 assign_assembler_name_if_neeeded (t);
5662
5663 /* Traverse every decl found freeing its language data. */
5664 FOR_EACH_VEC_ELT (fld.decls, i, t)
5665 free_lang_data_in_decl (t);
5666
5667 /* Traverse every type found freeing its language data. */
5668 FOR_EACH_VEC_ELT (fld.types, i, t)
5669 free_lang_data_in_type (t);
5670
5671 delete fld.pset;
5672 fld.worklist.release ();
5673 fld.decls.release ();
5674 fld.types.release ();
5675 }
5676
5677
5678 /* Free resources that are used by FE but are not needed once they are done. */
5679
5680 static unsigned
5681 free_lang_data (void)
5682 {
5683 unsigned i;
5684
5685 /* If we are the LTO frontend we have freed lang-specific data already. */
5686 if (in_lto_p
5687 || !flag_generate_lto)
5688 return 0;
5689
5690 /* Allocate and assign alias sets to the standard integer types
5691 while the slots are still in the way the frontends generated them. */
5692 for (i = 0; i < itk_none; ++i)
5693 if (integer_types[i])
5694 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5695
5696 /* Traverse the IL resetting language specific information for
5697 operands, expressions, etc. */
5698 free_lang_data_in_cgraph ();
5699
5700 /* Create gimple variants for common types. */
5701 ptrdiff_type_node = integer_type_node;
5702 fileptr_type_node = ptr_type_node;
5703
5704 /* Reset some langhooks. Do not reset types_compatible_p, it may
5705 still be used indirectly via the get_alias_set langhook. */
5706 lang_hooks.dwarf_name = lhd_dwarf_name;
5707 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5708 /* We do not want the default decl_assembler_name implementation,
5709 rather if we have fixed everything we want a wrapper around it
5710 asserting that all non-local symbols already got their assembler
5711 name and only produce assembler names for local symbols. Or rather
5712 make sure we never call decl_assembler_name on local symbols and
5713 devise a separate, middle-end private scheme for it. */
5714
5715 /* Reset diagnostic machinery. */
5716 tree_diagnostics_defaults (global_dc);
5717
5718 return 0;
5719 }
5720
5721
5722 namespace {
5723
5724 const pass_data pass_data_ipa_free_lang_data =
5725 {
5726 SIMPLE_IPA_PASS, /* type */
5727 "*free_lang_data", /* name */
5728 OPTGROUP_NONE, /* optinfo_flags */
5729 TV_IPA_FREE_LANG_DATA, /* tv_id */
5730 0, /* properties_required */
5731 0, /* properties_provided */
5732 0, /* properties_destroyed */
5733 0, /* todo_flags_start */
5734 0, /* todo_flags_finish */
5735 };
5736
5737 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5738 {
5739 public:
5740 pass_ipa_free_lang_data (gcc::context *ctxt)
5741 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5742 {}
5743
5744 /* opt_pass methods: */
5745 virtual unsigned int execute (function *) { return free_lang_data (); }
5746
5747 }; // class pass_ipa_free_lang_data
5748
5749 } // anon namespace
5750
5751 simple_ipa_opt_pass *
5752 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5753 {
5754 return new pass_ipa_free_lang_data (ctxt);
5755 }
5756
5757 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5758 ATTR_NAME. Also used internally by remove_attribute(). */
5759 bool
5760 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5761 {
5762 size_t ident_len = IDENTIFIER_LENGTH (ident);
5763
5764 if (ident_len == attr_len)
5765 {
5766 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5767 return true;
5768 }
5769 else if (ident_len == attr_len + 4)
5770 {
5771 /* There is the possibility that ATTR is 'text' and IDENT is
5772 '__text__'. */
5773 const char *p = IDENTIFIER_POINTER (ident);
5774 if (p[0] == '_' && p[1] == '_'
5775 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5776 && strncmp (attr_name, p + 2, attr_len) == 0)
5777 return true;
5778 }
5779
5780 return false;
5781 }
5782
5783 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5784 of ATTR_NAME, and LIST is not NULL_TREE. */
5785 tree
5786 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5787 {
5788 while (list)
5789 {
5790 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5791
5792 if (ident_len == attr_len)
5793 {
5794 if (!strcmp (attr_name,
5795 IDENTIFIER_POINTER (get_attribute_name (list))))
5796 break;
5797 }
5798 /* TODO: If we made sure that attributes were stored in the
5799 canonical form without '__...__' (ie, as in 'text' as opposed
5800 to '__text__') then we could avoid the following case. */
5801 else if (ident_len == attr_len + 4)
5802 {
5803 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5804 if (p[0] == '_' && p[1] == '_'
5805 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5806 && strncmp (attr_name, p + 2, attr_len) == 0)
5807 break;
5808 }
5809 list = TREE_CHAIN (list);
5810 }
5811
5812 return list;
5813 }
5814
5815 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5816 return a pointer to the attribute's list first element if the attribute
5817 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5818 '__text__'). */
5819
5820 tree
5821 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5822 tree list)
5823 {
5824 while (list)
5825 {
5826 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5827
5828 if (attr_len > ident_len)
5829 {
5830 list = TREE_CHAIN (list);
5831 continue;
5832 }
5833
5834 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5835
5836 if (strncmp (attr_name, p, attr_len) == 0)
5837 break;
5838
5839 /* TODO: If we made sure that attributes were stored in the
5840 canonical form without '__...__' (ie, as in 'text' as opposed
5841 to '__text__') then we could avoid the following case. */
5842 if (p[0] == '_' && p[1] == '_' &&
5843 strncmp (attr_name, p + 2, attr_len) == 0)
5844 break;
5845
5846 list = TREE_CHAIN (list);
5847 }
5848
5849 return list;
5850 }
5851
5852
5853 /* A variant of lookup_attribute() that can be used with an identifier
5854 as the first argument, and where the identifier can be either
5855 'text' or '__text__'.
5856
5857 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5858 return a pointer to the attribute's list element if the attribute
5859 is part of the list, or NULL_TREE if not found. If the attribute
5860 appears more than once, this only returns the first occurrence; the
5861 TREE_CHAIN of the return value should be passed back in if further
5862 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5863 can be in the form 'text' or '__text__'. */
5864 static tree
5865 lookup_ident_attribute (tree attr_identifier, tree list)
5866 {
5867 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5868
5869 while (list)
5870 {
5871 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5872 == IDENTIFIER_NODE);
5873
5874 /* Identifiers can be compared directly for equality. */
5875 if (attr_identifier == get_attribute_name (list))
5876 break;
5877
5878 /* If they are not equal, they may still be one in the form
5879 'text' while the other one is in the form '__text__'. TODO:
5880 If we were storing attributes in normalized 'text' form, then
5881 this could all go away and we could take full advantage of
5882 the fact that we're comparing identifiers. :-) */
5883 {
5884 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5885 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5886
5887 if (ident_len == attr_len + 4)
5888 {
5889 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5890 const char *q = IDENTIFIER_POINTER (attr_identifier);
5891 if (p[0] == '_' && p[1] == '_'
5892 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5893 && strncmp (q, p + 2, attr_len) == 0)
5894 break;
5895 }
5896 else if (ident_len + 4 == attr_len)
5897 {
5898 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5899 const char *q = IDENTIFIER_POINTER (attr_identifier);
5900 if (q[0] == '_' && q[1] == '_'
5901 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5902 && strncmp (q + 2, p, ident_len) == 0)
5903 break;
5904 }
5905 }
5906 list = TREE_CHAIN (list);
5907 }
5908
5909 return list;
5910 }
5911
5912 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5913 modified list. */
5914
5915 tree
5916 remove_attribute (const char *attr_name, tree list)
5917 {
5918 tree *p;
5919 size_t attr_len = strlen (attr_name);
5920
5921 gcc_checking_assert (attr_name[0] != '_');
5922
5923 for (p = &list; *p; )
5924 {
5925 tree l = *p;
5926 /* TODO: If we were storing attributes in normalized form, here
5927 we could use a simple strcmp(). */
5928 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5929 *p = TREE_CHAIN (l);
5930 else
5931 p = &TREE_CHAIN (l);
5932 }
5933
5934 return list;
5935 }
5936
5937 /* Return an attribute list that is the union of a1 and a2. */
5938
5939 tree
5940 merge_attributes (tree a1, tree a2)
5941 {
5942 tree attributes;
5943
5944 /* Either one unset? Take the set one. */
5945
5946 if ((attributes = a1) == 0)
5947 attributes = a2;
5948
5949 /* One that completely contains the other? Take it. */
5950
5951 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5952 {
5953 if (attribute_list_contained (a2, a1))
5954 attributes = a2;
5955 else
5956 {
5957 /* Pick the longest list, and hang on the other list. */
5958
5959 if (list_length (a1) < list_length (a2))
5960 attributes = a2, a2 = a1;
5961
5962 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5963 {
5964 tree a;
5965 for (a = lookup_ident_attribute (get_attribute_name (a2),
5966 attributes);
5967 a != NULL_TREE && !attribute_value_equal (a, a2);
5968 a = lookup_ident_attribute (get_attribute_name (a2),
5969 TREE_CHAIN (a)))
5970 ;
5971 if (a == NULL_TREE)
5972 {
5973 a1 = copy_node (a2);
5974 TREE_CHAIN (a1) = attributes;
5975 attributes = a1;
5976 }
5977 }
5978 }
5979 }
5980 return attributes;
5981 }
5982
5983 /* Given types T1 and T2, merge their attributes and return
5984 the result. */
5985
5986 tree
5987 merge_type_attributes (tree t1, tree t2)
5988 {
5989 return merge_attributes (TYPE_ATTRIBUTES (t1),
5990 TYPE_ATTRIBUTES (t2));
5991 }
5992
5993 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5994 the result. */
5995
5996 tree
5997 merge_decl_attributes (tree olddecl, tree newdecl)
5998 {
5999 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6000 DECL_ATTRIBUTES (newdecl));
6001 }
6002
6003 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6004
6005 /* Specialization of merge_decl_attributes for various Windows targets.
6006
6007 This handles the following situation:
6008
6009 __declspec (dllimport) int foo;
6010 int foo;
6011
6012 The second instance of `foo' nullifies the dllimport. */
6013
6014 tree
6015 merge_dllimport_decl_attributes (tree old, tree new_tree)
6016 {
6017 tree a;
6018 int delete_dllimport_p = 1;
6019
6020 /* What we need to do here is remove from `old' dllimport if it doesn't
6021 appear in `new'. dllimport behaves like extern: if a declaration is
6022 marked dllimport and a definition appears later, then the object
6023 is not dllimport'd. We also remove a `new' dllimport if the old list
6024 contains dllexport: dllexport always overrides dllimport, regardless
6025 of the order of declaration. */
6026 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6027 delete_dllimport_p = 0;
6028 else if (DECL_DLLIMPORT_P (new_tree)
6029 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6030 {
6031 DECL_DLLIMPORT_P (new_tree) = 0;
6032 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6033 "dllimport ignored", new_tree);
6034 }
6035 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6036 {
6037 /* Warn about overriding a symbol that has already been used, e.g.:
6038 extern int __attribute__ ((dllimport)) foo;
6039 int* bar () {return &foo;}
6040 int foo;
6041 */
6042 if (TREE_USED (old))
6043 {
6044 warning (0, "%q+D redeclared without dllimport attribute "
6045 "after being referenced with dll linkage", new_tree);
6046 /* If we have used a variable's address with dllimport linkage,
6047 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6048 decl may already have had TREE_CONSTANT computed.
6049 We still remove the attribute so that assembler code refers
6050 to '&foo rather than '_imp__foo'. */
6051 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6052 DECL_DLLIMPORT_P (new_tree) = 1;
6053 }
6054
6055 /* Let an inline definition silently override the external reference,
6056 but otherwise warn about attribute inconsistency. */
6057 else if (TREE_CODE (new_tree) == VAR_DECL
6058 || !DECL_DECLARED_INLINE_P (new_tree))
6059 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6060 "previous dllimport ignored", new_tree);
6061 }
6062 else
6063 delete_dllimport_p = 0;
6064
6065 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6066
6067 if (delete_dllimport_p)
6068 a = remove_attribute ("dllimport", a);
6069
6070 return a;
6071 }
6072
6073 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6074 struct attribute_spec.handler. */
6075
6076 tree
6077 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6078 bool *no_add_attrs)
6079 {
6080 tree node = *pnode;
6081 bool is_dllimport;
6082
6083 /* These attributes may apply to structure and union types being created,
6084 but otherwise should pass to the declaration involved. */
6085 if (!DECL_P (node))
6086 {
6087 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6088 | (int) ATTR_FLAG_ARRAY_NEXT))
6089 {
6090 *no_add_attrs = true;
6091 return tree_cons (name, args, NULL_TREE);
6092 }
6093 if (TREE_CODE (node) == RECORD_TYPE
6094 || TREE_CODE (node) == UNION_TYPE)
6095 {
6096 node = TYPE_NAME (node);
6097 if (!node)
6098 return NULL_TREE;
6099 }
6100 else
6101 {
6102 warning (OPT_Wattributes, "%qE attribute ignored",
6103 name);
6104 *no_add_attrs = true;
6105 return NULL_TREE;
6106 }
6107 }
6108
6109 if (TREE_CODE (node) != FUNCTION_DECL
6110 && TREE_CODE (node) != VAR_DECL
6111 && TREE_CODE (node) != TYPE_DECL)
6112 {
6113 *no_add_attrs = true;
6114 warning (OPT_Wattributes, "%qE attribute ignored",
6115 name);
6116 return NULL_TREE;
6117 }
6118
6119 if (TREE_CODE (node) == TYPE_DECL
6120 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6121 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6122 {
6123 *no_add_attrs = true;
6124 warning (OPT_Wattributes, "%qE attribute ignored",
6125 name);
6126 return NULL_TREE;
6127 }
6128
6129 is_dllimport = is_attribute_p ("dllimport", name);
6130
6131 /* Report error on dllimport ambiguities seen now before they cause
6132 any damage. */
6133 if (is_dllimport)
6134 {
6135 /* Honor any target-specific overrides. */
6136 if (!targetm.valid_dllimport_attribute_p (node))
6137 *no_add_attrs = true;
6138
6139 else if (TREE_CODE (node) == FUNCTION_DECL
6140 && DECL_DECLARED_INLINE_P (node))
6141 {
6142 warning (OPT_Wattributes, "inline function %q+D declared as "
6143 " dllimport: attribute ignored", node);
6144 *no_add_attrs = true;
6145 }
6146 /* Like MS, treat definition of dllimported variables and
6147 non-inlined functions on declaration as syntax errors. */
6148 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6149 {
6150 error ("function %q+D definition is marked dllimport", node);
6151 *no_add_attrs = true;
6152 }
6153
6154 else if (TREE_CODE (node) == VAR_DECL)
6155 {
6156 if (DECL_INITIAL (node))
6157 {
6158 error ("variable %q+D definition is marked dllimport",
6159 node);
6160 *no_add_attrs = true;
6161 }
6162
6163 /* `extern' needn't be specified with dllimport.
6164 Specify `extern' now and hope for the best. Sigh. */
6165 DECL_EXTERNAL (node) = 1;
6166 /* Also, implicitly give dllimport'd variables declared within
6167 a function global scope, unless declared static. */
6168 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6169 TREE_PUBLIC (node) = 1;
6170 }
6171
6172 if (*no_add_attrs == false)
6173 DECL_DLLIMPORT_P (node) = 1;
6174 }
6175 else if (TREE_CODE (node) == FUNCTION_DECL
6176 && DECL_DECLARED_INLINE_P (node)
6177 && flag_keep_inline_dllexport)
6178 /* An exported function, even if inline, must be emitted. */
6179 DECL_EXTERNAL (node) = 0;
6180
6181 /* Report error if symbol is not accessible at global scope. */
6182 if (!TREE_PUBLIC (node)
6183 && (TREE_CODE (node) == VAR_DECL
6184 || TREE_CODE (node) == FUNCTION_DECL))
6185 {
6186 error ("external linkage required for symbol %q+D because of "
6187 "%qE attribute", node, name);
6188 *no_add_attrs = true;
6189 }
6190
6191 /* A dllexport'd entity must have default visibility so that other
6192 program units (shared libraries or the main executable) can see
6193 it. A dllimport'd entity must have default visibility so that
6194 the linker knows that undefined references within this program
6195 unit can be resolved by the dynamic linker. */
6196 if (!*no_add_attrs)
6197 {
6198 if (DECL_VISIBILITY_SPECIFIED (node)
6199 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6200 error ("%qE implies default visibility, but %qD has already "
6201 "been declared with a different visibility",
6202 name, node);
6203 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6204 DECL_VISIBILITY_SPECIFIED (node) = 1;
6205 }
6206
6207 return NULL_TREE;
6208 }
6209
6210 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6211 \f
6212 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6213 of the various TYPE_QUAL values. */
6214
6215 static void
6216 set_type_quals (tree type, int type_quals)
6217 {
6218 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6219 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6220 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6221 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6222 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6223 }
6224
6225 /* Returns true iff unqualified CAND and BASE are equivalent. */
6226
6227 bool
6228 check_base_type (const_tree cand, const_tree base)
6229 {
6230 return (TYPE_NAME (cand) == TYPE_NAME (base)
6231 /* Apparently this is needed for Objective-C. */
6232 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6233 /* Check alignment. */
6234 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6235 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6236 TYPE_ATTRIBUTES (base)));
6237 }
6238
6239 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6240
6241 bool
6242 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6243 {
6244 return (TYPE_QUALS (cand) == type_quals
6245 && check_base_type (cand, base));
6246 }
6247
6248 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6249
6250 static bool
6251 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6252 {
6253 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6254 && TYPE_NAME (cand) == TYPE_NAME (base)
6255 /* Apparently this is needed for Objective-C. */
6256 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6257 /* Check alignment. */
6258 && TYPE_ALIGN (cand) == align
6259 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6260 TYPE_ATTRIBUTES (base)));
6261 }
6262
6263 /* This function checks to see if TYPE matches the size one of the built-in
6264 atomic types, and returns that core atomic type. */
6265
6266 static tree
6267 find_atomic_core_type (tree type)
6268 {
6269 tree base_atomic_type;
6270
6271 /* Only handle complete types. */
6272 if (TYPE_SIZE (type) == NULL_TREE)
6273 return NULL_TREE;
6274
6275 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6276 switch (type_size)
6277 {
6278 case 8:
6279 base_atomic_type = atomicQI_type_node;
6280 break;
6281
6282 case 16:
6283 base_atomic_type = atomicHI_type_node;
6284 break;
6285
6286 case 32:
6287 base_atomic_type = atomicSI_type_node;
6288 break;
6289
6290 case 64:
6291 base_atomic_type = atomicDI_type_node;
6292 break;
6293
6294 case 128:
6295 base_atomic_type = atomicTI_type_node;
6296 break;
6297
6298 default:
6299 base_atomic_type = NULL_TREE;
6300 }
6301
6302 return base_atomic_type;
6303 }
6304
6305 /* Return a version of the TYPE, qualified as indicated by the
6306 TYPE_QUALS, if one exists. If no qualified version exists yet,
6307 return NULL_TREE. */
6308
6309 tree
6310 get_qualified_type (tree type, int type_quals)
6311 {
6312 tree t;
6313
6314 if (TYPE_QUALS (type) == type_quals)
6315 return type;
6316
6317 /* Search the chain of variants to see if there is already one there just
6318 like the one we need to have. If so, use that existing one. We must
6319 preserve the TYPE_NAME, since there is code that depends on this. */
6320 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6321 if (check_qualified_type (t, type, type_quals))
6322 return t;
6323
6324 return NULL_TREE;
6325 }
6326
6327 /* Like get_qualified_type, but creates the type if it does not
6328 exist. This function never returns NULL_TREE. */
6329
6330 tree
6331 build_qualified_type (tree type, int type_quals)
6332 {
6333 tree t;
6334
6335 /* See if we already have the appropriate qualified variant. */
6336 t = get_qualified_type (type, type_quals);
6337
6338 /* If not, build it. */
6339 if (!t)
6340 {
6341 t = build_variant_type_copy (type);
6342 set_type_quals (t, type_quals);
6343
6344 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6345 {
6346 /* See if this object can map to a basic atomic type. */
6347 tree atomic_type = find_atomic_core_type (type);
6348 if (atomic_type)
6349 {
6350 /* Ensure the alignment of this type is compatible with
6351 the required alignment of the atomic type. */
6352 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6353 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6354 }
6355 }
6356
6357 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6358 /* Propagate structural equality. */
6359 SET_TYPE_STRUCTURAL_EQUALITY (t);
6360 else if (TYPE_CANONICAL (type) != type)
6361 /* Build the underlying canonical type, since it is different
6362 from TYPE. */
6363 {
6364 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6365 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6366 }
6367 else
6368 /* T is its own canonical type. */
6369 TYPE_CANONICAL (t) = t;
6370
6371 }
6372
6373 return t;
6374 }
6375
6376 /* Create a variant of type T with alignment ALIGN. */
6377
6378 tree
6379 build_aligned_type (tree type, unsigned int align)
6380 {
6381 tree t;
6382
6383 if (TYPE_PACKED (type)
6384 || TYPE_ALIGN (type) == align)
6385 return type;
6386
6387 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6388 if (check_aligned_type (t, type, align))
6389 return t;
6390
6391 t = build_variant_type_copy (type);
6392 TYPE_ALIGN (t) = align;
6393
6394 return t;
6395 }
6396
6397 /* Create a new distinct copy of TYPE. The new type is made its own
6398 MAIN_VARIANT. If TYPE requires structural equality checks, the
6399 resulting type requires structural equality checks; otherwise, its
6400 TYPE_CANONICAL points to itself. */
6401
6402 tree
6403 build_distinct_type_copy (tree type)
6404 {
6405 tree t = copy_node (type);
6406
6407 TYPE_POINTER_TO (t) = 0;
6408 TYPE_REFERENCE_TO (t) = 0;
6409
6410 /* Set the canonical type either to a new equivalence class, or
6411 propagate the need for structural equality checks. */
6412 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6413 SET_TYPE_STRUCTURAL_EQUALITY (t);
6414 else
6415 TYPE_CANONICAL (t) = t;
6416
6417 /* Make it its own variant. */
6418 TYPE_MAIN_VARIANT (t) = t;
6419 TYPE_NEXT_VARIANT (t) = 0;
6420
6421 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6422 whose TREE_TYPE is not t. This can also happen in the Ada
6423 frontend when using subtypes. */
6424
6425 return t;
6426 }
6427
6428 /* Create a new variant of TYPE, equivalent but distinct. This is so
6429 the caller can modify it. TYPE_CANONICAL for the return type will
6430 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6431 are considered equal by the language itself (or that both types
6432 require structural equality checks). */
6433
6434 tree
6435 build_variant_type_copy (tree type)
6436 {
6437 tree t, m = TYPE_MAIN_VARIANT (type);
6438
6439 t = build_distinct_type_copy (type);
6440
6441 /* Since we're building a variant, assume that it is a non-semantic
6442 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6443 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6444
6445 /* Add the new type to the chain of variants of TYPE. */
6446 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6447 TYPE_NEXT_VARIANT (m) = t;
6448 TYPE_MAIN_VARIANT (t) = m;
6449
6450 return t;
6451 }
6452 \f
6453 /* Return true if the from tree in both tree maps are equal. */
6454
6455 int
6456 tree_map_base_eq (const void *va, const void *vb)
6457 {
6458 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6459 *const b = (const struct tree_map_base *) vb;
6460 return (a->from == b->from);
6461 }
6462
6463 /* Hash a from tree in a tree_base_map. */
6464
6465 unsigned int
6466 tree_map_base_hash (const void *item)
6467 {
6468 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6469 }
6470
6471 /* Return true if this tree map structure is marked for garbage collection
6472 purposes. We simply return true if the from tree is marked, so that this
6473 structure goes away when the from tree goes away. */
6474
6475 int
6476 tree_map_base_marked_p (const void *p)
6477 {
6478 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6479 }
6480
6481 /* Hash a from tree in a tree_map. */
6482
6483 unsigned int
6484 tree_map_hash (const void *item)
6485 {
6486 return (((const struct tree_map *) item)->hash);
6487 }
6488
6489 /* Hash a from tree in a tree_decl_map. */
6490
6491 unsigned int
6492 tree_decl_map_hash (const void *item)
6493 {
6494 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6495 }
6496
6497 /* Return the initialization priority for DECL. */
6498
6499 priority_type
6500 decl_init_priority_lookup (tree decl)
6501 {
6502 symtab_node *snode = symtab_node::get (decl);
6503
6504 if (!snode)
6505 return DEFAULT_INIT_PRIORITY;
6506 return
6507 snode->get_init_priority ();
6508 }
6509
6510 /* Return the finalization priority for DECL. */
6511
6512 priority_type
6513 decl_fini_priority_lookup (tree decl)
6514 {
6515 cgraph_node *node = cgraph_node::get (decl);
6516
6517 if (!node)
6518 return DEFAULT_INIT_PRIORITY;
6519 return
6520 node->get_fini_priority ();
6521 }
6522
6523 /* Set the initialization priority for DECL to PRIORITY. */
6524
6525 void
6526 decl_init_priority_insert (tree decl, priority_type priority)
6527 {
6528 struct symtab_node *snode;
6529
6530 if (priority == DEFAULT_INIT_PRIORITY)
6531 {
6532 snode = symtab_node::get (decl);
6533 if (!snode)
6534 return;
6535 }
6536 else if (TREE_CODE (decl) == VAR_DECL)
6537 snode = varpool_node::get_create (decl);
6538 else
6539 snode = cgraph_node::get_create (decl);
6540 snode->set_init_priority (priority);
6541 }
6542
6543 /* Set the finalization priority for DECL to PRIORITY. */
6544
6545 void
6546 decl_fini_priority_insert (tree decl, priority_type priority)
6547 {
6548 struct cgraph_node *node;
6549
6550 if (priority == DEFAULT_INIT_PRIORITY)
6551 {
6552 node = cgraph_node::get (decl);
6553 if (!node)
6554 return;
6555 }
6556 else
6557 node = cgraph_node::get_create (decl);
6558 node->set_fini_priority (priority);
6559 }
6560
6561 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6562
6563 static void
6564 print_debug_expr_statistics (void)
6565 {
6566 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6567 (long) htab_size (debug_expr_for_decl),
6568 (long) htab_elements (debug_expr_for_decl),
6569 htab_collisions (debug_expr_for_decl));
6570 }
6571
6572 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6573
6574 static void
6575 print_value_expr_statistics (void)
6576 {
6577 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6578 (long) htab_size (value_expr_for_decl),
6579 (long) htab_elements (value_expr_for_decl),
6580 htab_collisions (value_expr_for_decl));
6581 }
6582
6583 /* Lookup a debug expression for FROM, and return it if we find one. */
6584
6585 tree
6586 decl_debug_expr_lookup (tree from)
6587 {
6588 struct tree_decl_map *h, in;
6589 in.base.from = from;
6590
6591 h = (struct tree_decl_map *)
6592 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6593 if (h)
6594 return h->to;
6595 return NULL_TREE;
6596 }
6597
6598 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6599
6600 void
6601 decl_debug_expr_insert (tree from, tree to)
6602 {
6603 struct tree_decl_map *h;
6604 void **loc;
6605
6606 h = ggc_alloc<tree_decl_map> ();
6607 h->base.from = from;
6608 h->to = to;
6609 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6610 INSERT);
6611 *(struct tree_decl_map **) loc = h;
6612 }
6613
6614 /* Lookup a value expression for FROM, and return it if we find one. */
6615
6616 tree
6617 decl_value_expr_lookup (tree from)
6618 {
6619 struct tree_decl_map *h, in;
6620 in.base.from = from;
6621
6622 h = (struct tree_decl_map *)
6623 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6624 if (h)
6625 return h->to;
6626 return NULL_TREE;
6627 }
6628
6629 /* Insert a mapping FROM->TO in the value expression hashtable. */
6630
6631 void
6632 decl_value_expr_insert (tree from, tree to)
6633 {
6634 struct tree_decl_map *h;
6635 void **loc;
6636
6637 h = ggc_alloc<tree_decl_map> ();
6638 h->base.from = from;
6639 h->to = to;
6640 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6641 INSERT);
6642 *(struct tree_decl_map **) loc = h;
6643 }
6644
6645 /* Lookup a vector of debug arguments for FROM, and return it if we
6646 find one. */
6647
6648 vec<tree, va_gc> **
6649 decl_debug_args_lookup (tree from)
6650 {
6651 struct tree_vec_map *h, in;
6652
6653 if (!DECL_HAS_DEBUG_ARGS_P (from))
6654 return NULL;
6655 gcc_checking_assert (debug_args_for_decl != NULL);
6656 in.base.from = from;
6657 h = (struct tree_vec_map *)
6658 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6659 if (h)
6660 return &h->to;
6661 return NULL;
6662 }
6663
6664 /* Insert a mapping FROM->empty vector of debug arguments in the value
6665 expression hashtable. */
6666
6667 vec<tree, va_gc> **
6668 decl_debug_args_insert (tree from)
6669 {
6670 struct tree_vec_map *h;
6671 void **loc;
6672
6673 if (DECL_HAS_DEBUG_ARGS_P (from))
6674 return decl_debug_args_lookup (from);
6675 if (debug_args_for_decl == NULL)
6676 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6677 tree_vec_map_eq, 0);
6678 h = ggc_alloc<tree_vec_map> ();
6679 h->base.from = from;
6680 h->to = NULL;
6681 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6682 INSERT);
6683 *(struct tree_vec_map **) loc = h;
6684 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6685 return &h->to;
6686 }
6687
6688 /* Hashing of types so that we don't make duplicates.
6689 The entry point is `type_hash_canon'. */
6690
6691 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6692 with types in the TREE_VALUE slots), by adding the hash codes
6693 of the individual types. */
6694
6695 static void
6696 type_hash_list (const_tree list, inchash::hash &hstate)
6697 {
6698 const_tree tail;
6699
6700 for (tail = list; tail; tail = TREE_CHAIN (tail))
6701 if (TREE_VALUE (tail) != error_mark_node)
6702 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6703 }
6704
6705 /* These are the Hashtable callback functions. */
6706
6707 /* Returns true iff the types are equivalent. */
6708
6709 static int
6710 type_hash_eq (const void *va, const void *vb)
6711 {
6712 const struct type_hash *const a = (const struct type_hash *) va,
6713 *const b = (const struct type_hash *) vb;
6714
6715 /* First test the things that are the same for all types. */
6716 if (a->hash != b->hash
6717 || TREE_CODE (a->type) != TREE_CODE (b->type)
6718 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6719 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6720 TYPE_ATTRIBUTES (b->type))
6721 || (TREE_CODE (a->type) != COMPLEX_TYPE
6722 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6723 return 0;
6724
6725 /* Be careful about comparing arrays before and after the element type
6726 has been completed; don't compare TYPE_ALIGN unless both types are
6727 complete. */
6728 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6729 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6730 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6731 return 0;
6732
6733 switch (TREE_CODE (a->type))
6734 {
6735 case VOID_TYPE:
6736 case COMPLEX_TYPE:
6737 case POINTER_TYPE:
6738 case REFERENCE_TYPE:
6739 case NULLPTR_TYPE:
6740 return 1;
6741
6742 case VECTOR_TYPE:
6743 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6744
6745 case ENUMERAL_TYPE:
6746 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6747 && !(TYPE_VALUES (a->type)
6748 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6749 && TYPE_VALUES (b->type)
6750 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6751 && type_list_equal (TYPE_VALUES (a->type),
6752 TYPE_VALUES (b->type))))
6753 return 0;
6754
6755 /* ... fall through ... */
6756
6757 case INTEGER_TYPE:
6758 case REAL_TYPE:
6759 case BOOLEAN_TYPE:
6760 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6761 return false;
6762 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6763 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6764 TYPE_MAX_VALUE (b->type)))
6765 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6766 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6767 TYPE_MIN_VALUE (b->type))));
6768
6769 case FIXED_POINT_TYPE:
6770 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6771
6772 case OFFSET_TYPE:
6773 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6774
6775 case METHOD_TYPE:
6776 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6777 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6778 || (TYPE_ARG_TYPES (a->type)
6779 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6780 && TYPE_ARG_TYPES (b->type)
6781 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6782 && type_list_equal (TYPE_ARG_TYPES (a->type),
6783 TYPE_ARG_TYPES (b->type)))))
6784 break;
6785 return 0;
6786 case ARRAY_TYPE:
6787 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6788
6789 case RECORD_TYPE:
6790 case UNION_TYPE:
6791 case QUAL_UNION_TYPE:
6792 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6793 || (TYPE_FIELDS (a->type)
6794 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6795 && TYPE_FIELDS (b->type)
6796 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6797 && type_list_equal (TYPE_FIELDS (a->type),
6798 TYPE_FIELDS (b->type))));
6799
6800 case FUNCTION_TYPE:
6801 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6802 || (TYPE_ARG_TYPES (a->type)
6803 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6804 && TYPE_ARG_TYPES (b->type)
6805 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6806 && type_list_equal (TYPE_ARG_TYPES (a->type),
6807 TYPE_ARG_TYPES (b->type))))
6808 break;
6809 return 0;
6810
6811 default:
6812 return 0;
6813 }
6814
6815 if (lang_hooks.types.type_hash_eq != NULL)
6816 return lang_hooks.types.type_hash_eq (a->type, b->type);
6817
6818 return 1;
6819 }
6820
6821 /* Return the cached hash value. */
6822
6823 static hashval_t
6824 type_hash_hash (const void *item)
6825 {
6826 return ((const struct type_hash *) item)->hash;
6827 }
6828
6829 /* Given TYPE, and HASHCODE its hash code, return the canonical
6830 object for an identical type if one already exists.
6831 Otherwise, return TYPE, and record it as the canonical object.
6832
6833 To use this function, first create a type of the sort you want.
6834 Then compute its hash code from the fields of the type that
6835 make it different from other similar types.
6836 Then call this function and use the value. */
6837
6838 tree
6839 type_hash_canon (unsigned int hashcode, tree type)
6840 {
6841 type_hash in;
6842 void **loc;
6843
6844 /* The hash table only contains main variants, so ensure that's what we're
6845 being passed. */
6846 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6847
6848 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6849 must call that routine before comparing TYPE_ALIGNs. */
6850 layout_type (type);
6851
6852 in.hash = hashcode;
6853 in.type = type;
6854
6855 loc = htab_find_slot_with_hash (type_hash_table, &in, hashcode, INSERT);
6856 if (*loc)
6857 {
6858 tree t1 = ((type_hash *) *loc)->type;
6859 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6860 if (GATHER_STATISTICS)
6861 {
6862 tree_code_counts[(int) TREE_CODE (type)]--;
6863 tree_node_counts[(int) t_kind]--;
6864 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6865 }
6866 return t1;
6867 }
6868 else
6869 {
6870 struct type_hash *h;
6871
6872 h = ggc_alloc<type_hash> ();
6873 h->hash = hashcode;
6874 h->type = type;
6875 *loc = (void *)h;
6876
6877 return type;
6878 }
6879 }
6880
6881 /* See if the data pointed to by the type hash table is marked. We consider
6882 it marked if the type is marked or if a debug type number or symbol
6883 table entry has been made for the type. */
6884
6885 static int
6886 type_hash_marked_p (const void *p)
6887 {
6888 const_tree const type = ((const struct type_hash *) p)->type;
6889
6890 return ggc_marked_p (type);
6891 }
6892
6893 static void
6894 print_type_hash_statistics (void)
6895 {
6896 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6897 (long) htab_size (type_hash_table),
6898 (long) htab_elements (type_hash_table),
6899 htab_collisions (type_hash_table));
6900 }
6901
6902 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6903 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6904 by adding the hash codes of the individual attributes. */
6905
6906 static void
6907 attribute_hash_list (const_tree list, inchash::hash &hstate)
6908 {
6909 const_tree tail;
6910
6911 for (tail = list; tail; tail = TREE_CHAIN (tail))
6912 /* ??? Do we want to add in TREE_VALUE too? */
6913 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6914 }
6915
6916 /* Given two lists of attributes, return true if list l2 is
6917 equivalent to l1. */
6918
6919 int
6920 attribute_list_equal (const_tree l1, const_tree l2)
6921 {
6922 if (l1 == l2)
6923 return 1;
6924
6925 return attribute_list_contained (l1, l2)
6926 && attribute_list_contained (l2, l1);
6927 }
6928
6929 /* Given two lists of attributes, return true if list L2 is
6930 completely contained within L1. */
6931 /* ??? This would be faster if attribute names were stored in a canonicalized
6932 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6933 must be used to show these elements are equivalent (which they are). */
6934 /* ??? It's not clear that attributes with arguments will always be handled
6935 correctly. */
6936
6937 int
6938 attribute_list_contained (const_tree l1, const_tree l2)
6939 {
6940 const_tree t1, t2;
6941
6942 /* First check the obvious, maybe the lists are identical. */
6943 if (l1 == l2)
6944 return 1;
6945
6946 /* Maybe the lists are similar. */
6947 for (t1 = l1, t2 = l2;
6948 t1 != 0 && t2 != 0
6949 && get_attribute_name (t1) == get_attribute_name (t2)
6950 && TREE_VALUE (t1) == TREE_VALUE (t2);
6951 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6952 ;
6953
6954 /* Maybe the lists are equal. */
6955 if (t1 == 0 && t2 == 0)
6956 return 1;
6957
6958 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6959 {
6960 const_tree attr;
6961 /* This CONST_CAST is okay because lookup_attribute does not
6962 modify its argument and the return value is assigned to a
6963 const_tree. */
6964 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6965 CONST_CAST_TREE (l1));
6966 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6967 attr = lookup_ident_attribute (get_attribute_name (t2),
6968 TREE_CHAIN (attr)))
6969 ;
6970
6971 if (attr == NULL_TREE)
6972 return 0;
6973 }
6974
6975 return 1;
6976 }
6977
6978 /* Given two lists of types
6979 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6980 return 1 if the lists contain the same types in the same order.
6981 Also, the TREE_PURPOSEs must match. */
6982
6983 int
6984 type_list_equal (const_tree l1, const_tree l2)
6985 {
6986 const_tree t1, t2;
6987
6988 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6989 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6990 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6991 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6992 && (TREE_TYPE (TREE_PURPOSE (t1))
6993 == TREE_TYPE (TREE_PURPOSE (t2))))))
6994 return 0;
6995
6996 return t1 == t2;
6997 }
6998
6999 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7000 given by TYPE. If the argument list accepts variable arguments,
7001 then this function counts only the ordinary arguments. */
7002
7003 int
7004 type_num_arguments (const_tree type)
7005 {
7006 int i = 0;
7007 tree t;
7008
7009 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7010 /* If the function does not take a variable number of arguments,
7011 the last element in the list will have type `void'. */
7012 if (VOID_TYPE_P (TREE_VALUE (t)))
7013 break;
7014 else
7015 ++i;
7016
7017 return i;
7018 }
7019
7020 /* Nonzero if integer constants T1 and T2
7021 represent the same constant value. */
7022
7023 int
7024 tree_int_cst_equal (const_tree t1, const_tree t2)
7025 {
7026 if (t1 == t2)
7027 return 1;
7028
7029 if (t1 == 0 || t2 == 0)
7030 return 0;
7031
7032 if (TREE_CODE (t1) == INTEGER_CST
7033 && TREE_CODE (t2) == INTEGER_CST
7034 && wi::to_widest (t1) == wi::to_widest (t2))
7035 return 1;
7036
7037 return 0;
7038 }
7039
7040 /* Return true if T is an INTEGER_CST whose numerical value (extended
7041 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7042
7043 bool
7044 tree_fits_shwi_p (const_tree t)
7045 {
7046 return (t != NULL_TREE
7047 && TREE_CODE (t) == INTEGER_CST
7048 && wi::fits_shwi_p (wi::to_widest (t)));
7049 }
7050
7051 /* Return true if T is an INTEGER_CST whose numerical value (extended
7052 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7053
7054 bool
7055 tree_fits_uhwi_p (const_tree t)
7056 {
7057 return (t != NULL_TREE
7058 && TREE_CODE (t) == INTEGER_CST
7059 && wi::fits_uhwi_p (wi::to_widest (t)));
7060 }
7061
7062 /* T is an INTEGER_CST whose numerical value (extended according to
7063 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7064 HOST_WIDE_INT. */
7065
7066 HOST_WIDE_INT
7067 tree_to_shwi (const_tree t)
7068 {
7069 gcc_assert (tree_fits_shwi_p (t));
7070 return TREE_INT_CST_LOW (t);
7071 }
7072
7073 /* T is an INTEGER_CST whose numerical value (extended according to
7074 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7075 HOST_WIDE_INT. */
7076
7077 unsigned HOST_WIDE_INT
7078 tree_to_uhwi (const_tree t)
7079 {
7080 gcc_assert (tree_fits_uhwi_p (t));
7081 return TREE_INT_CST_LOW (t);
7082 }
7083
7084 /* Return the most significant (sign) bit of T. */
7085
7086 int
7087 tree_int_cst_sign_bit (const_tree t)
7088 {
7089 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7090
7091 return wi::extract_uhwi (t, bitno, 1);
7092 }
7093
7094 /* Return an indication of the sign of the integer constant T.
7095 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7096 Note that -1 will never be returned if T's type is unsigned. */
7097
7098 int
7099 tree_int_cst_sgn (const_tree t)
7100 {
7101 if (wi::eq_p (t, 0))
7102 return 0;
7103 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7104 return 1;
7105 else if (wi::neg_p (t))
7106 return -1;
7107 else
7108 return 1;
7109 }
7110
7111 /* Return the minimum number of bits needed to represent VALUE in a
7112 signed or unsigned type, UNSIGNEDP says which. */
7113
7114 unsigned int
7115 tree_int_cst_min_precision (tree value, signop sgn)
7116 {
7117 /* If the value is negative, compute its negative minus 1. The latter
7118 adjustment is because the absolute value of the largest negative value
7119 is one larger than the largest positive value. This is equivalent to
7120 a bit-wise negation, so use that operation instead. */
7121
7122 if (tree_int_cst_sgn (value) < 0)
7123 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7124
7125 /* Return the number of bits needed, taking into account the fact
7126 that we need one more bit for a signed than unsigned type.
7127 If value is 0 or -1, the minimum precision is 1 no matter
7128 whether unsignedp is true or false. */
7129
7130 if (integer_zerop (value))
7131 return 1;
7132 else
7133 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7134 }
7135
7136 /* Return truthvalue of whether T1 is the same tree structure as T2.
7137 Return 1 if they are the same.
7138 Return 0 if they are understandably different.
7139 Return -1 if either contains tree structure not understood by
7140 this function. */
7141
7142 int
7143 simple_cst_equal (const_tree t1, const_tree t2)
7144 {
7145 enum tree_code code1, code2;
7146 int cmp;
7147 int i;
7148
7149 if (t1 == t2)
7150 return 1;
7151 if (t1 == 0 || t2 == 0)
7152 return 0;
7153
7154 code1 = TREE_CODE (t1);
7155 code2 = TREE_CODE (t2);
7156
7157 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7158 {
7159 if (CONVERT_EXPR_CODE_P (code2)
7160 || code2 == NON_LVALUE_EXPR)
7161 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7162 else
7163 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7164 }
7165
7166 else if (CONVERT_EXPR_CODE_P (code2)
7167 || code2 == NON_LVALUE_EXPR)
7168 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7169
7170 if (code1 != code2)
7171 return 0;
7172
7173 switch (code1)
7174 {
7175 case INTEGER_CST:
7176 return wi::to_widest (t1) == wi::to_widest (t2);
7177
7178 case REAL_CST:
7179 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7180
7181 case FIXED_CST:
7182 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7183
7184 case STRING_CST:
7185 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7186 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7187 TREE_STRING_LENGTH (t1)));
7188
7189 case CONSTRUCTOR:
7190 {
7191 unsigned HOST_WIDE_INT idx;
7192 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7193 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7194
7195 if (vec_safe_length (v1) != vec_safe_length (v2))
7196 return false;
7197
7198 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7199 /* ??? Should we handle also fields here? */
7200 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7201 return false;
7202 return true;
7203 }
7204
7205 case SAVE_EXPR:
7206 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7207
7208 case CALL_EXPR:
7209 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7210 if (cmp <= 0)
7211 return cmp;
7212 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7213 return 0;
7214 {
7215 const_tree arg1, arg2;
7216 const_call_expr_arg_iterator iter1, iter2;
7217 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7218 arg2 = first_const_call_expr_arg (t2, &iter2);
7219 arg1 && arg2;
7220 arg1 = next_const_call_expr_arg (&iter1),
7221 arg2 = next_const_call_expr_arg (&iter2))
7222 {
7223 cmp = simple_cst_equal (arg1, arg2);
7224 if (cmp <= 0)
7225 return cmp;
7226 }
7227 return arg1 == arg2;
7228 }
7229
7230 case TARGET_EXPR:
7231 /* Special case: if either target is an unallocated VAR_DECL,
7232 it means that it's going to be unified with whatever the
7233 TARGET_EXPR is really supposed to initialize, so treat it
7234 as being equivalent to anything. */
7235 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7236 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7237 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7238 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7239 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7240 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7241 cmp = 1;
7242 else
7243 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7244
7245 if (cmp <= 0)
7246 return cmp;
7247
7248 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7249
7250 case WITH_CLEANUP_EXPR:
7251 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7252 if (cmp <= 0)
7253 return cmp;
7254
7255 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7256
7257 case COMPONENT_REF:
7258 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7259 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7260
7261 return 0;
7262
7263 case VAR_DECL:
7264 case PARM_DECL:
7265 case CONST_DECL:
7266 case FUNCTION_DECL:
7267 return 0;
7268
7269 default:
7270 break;
7271 }
7272
7273 /* This general rule works for most tree codes. All exceptions should be
7274 handled above. If this is a language-specific tree code, we can't
7275 trust what might be in the operand, so say we don't know
7276 the situation. */
7277 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7278 return -1;
7279
7280 switch (TREE_CODE_CLASS (code1))
7281 {
7282 case tcc_unary:
7283 case tcc_binary:
7284 case tcc_comparison:
7285 case tcc_expression:
7286 case tcc_reference:
7287 case tcc_statement:
7288 cmp = 1;
7289 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7290 {
7291 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7292 if (cmp <= 0)
7293 return cmp;
7294 }
7295
7296 return cmp;
7297
7298 default:
7299 return -1;
7300 }
7301 }
7302
7303 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7304 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7305 than U, respectively. */
7306
7307 int
7308 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7309 {
7310 if (tree_int_cst_sgn (t) < 0)
7311 return -1;
7312 else if (!tree_fits_uhwi_p (t))
7313 return 1;
7314 else if (TREE_INT_CST_LOW (t) == u)
7315 return 0;
7316 else if (TREE_INT_CST_LOW (t) < u)
7317 return -1;
7318 else
7319 return 1;
7320 }
7321
7322 /* Return true if SIZE represents a constant size that is in bounds of
7323 what the middle-end and the backend accepts (covering not more than
7324 half of the address-space). */
7325
7326 bool
7327 valid_constant_size_p (const_tree size)
7328 {
7329 if (! tree_fits_uhwi_p (size)
7330 || TREE_OVERFLOW (size)
7331 || tree_int_cst_sign_bit (size) != 0)
7332 return false;
7333 return true;
7334 }
7335
7336 /* Return the precision of the type, or for a complex or vector type the
7337 precision of the type of its elements. */
7338
7339 unsigned int
7340 element_precision (const_tree type)
7341 {
7342 enum tree_code code = TREE_CODE (type);
7343 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7344 type = TREE_TYPE (type);
7345
7346 return TYPE_PRECISION (type);
7347 }
7348
7349 /* Return true if CODE represents an associative tree code. Otherwise
7350 return false. */
7351 bool
7352 associative_tree_code (enum tree_code code)
7353 {
7354 switch (code)
7355 {
7356 case BIT_IOR_EXPR:
7357 case BIT_AND_EXPR:
7358 case BIT_XOR_EXPR:
7359 case PLUS_EXPR:
7360 case MULT_EXPR:
7361 case MIN_EXPR:
7362 case MAX_EXPR:
7363 return true;
7364
7365 default:
7366 break;
7367 }
7368 return false;
7369 }
7370
7371 /* Return true if CODE represents a commutative tree code. Otherwise
7372 return false. */
7373 bool
7374 commutative_tree_code (enum tree_code code)
7375 {
7376 switch (code)
7377 {
7378 case PLUS_EXPR:
7379 case MULT_EXPR:
7380 case MULT_HIGHPART_EXPR:
7381 case MIN_EXPR:
7382 case MAX_EXPR:
7383 case BIT_IOR_EXPR:
7384 case BIT_XOR_EXPR:
7385 case BIT_AND_EXPR:
7386 case NE_EXPR:
7387 case EQ_EXPR:
7388 case UNORDERED_EXPR:
7389 case ORDERED_EXPR:
7390 case UNEQ_EXPR:
7391 case LTGT_EXPR:
7392 case TRUTH_AND_EXPR:
7393 case TRUTH_XOR_EXPR:
7394 case TRUTH_OR_EXPR:
7395 case WIDEN_MULT_EXPR:
7396 case VEC_WIDEN_MULT_HI_EXPR:
7397 case VEC_WIDEN_MULT_LO_EXPR:
7398 case VEC_WIDEN_MULT_EVEN_EXPR:
7399 case VEC_WIDEN_MULT_ODD_EXPR:
7400 return true;
7401
7402 default:
7403 break;
7404 }
7405 return false;
7406 }
7407
7408 /* Return true if CODE represents a ternary tree code for which the
7409 first two operands are commutative. Otherwise return false. */
7410 bool
7411 commutative_ternary_tree_code (enum tree_code code)
7412 {
7413 switch (code)
7414 {
7415 case WIDEN_MULT_PLUS_EXPR:
7416 case WIDEN_MULT_MINUS_EXPR:
7417 case DOT_PROD_EXPR:
7418 case FMA_EXPR:
7419 return true;
7420
7421 default:
7422 break;
7423 }
7424 return false;
7425 }
7426
7427 namespace inchash
7428 {
7429
7430 /* Generate a hash value for an expression. This can be used iteratively
7431 by passing a previous result as the HSTATE argument.
7432
7433 This function is intended to produce the same hash for expressions which
7434 would compare equal using operand_equal_p. */
7435 void
7436 add_expr (const_tree t, inchash::hash &hstate)
7437 {
7438 int i;
7439 enum tree_code code;
7440 enum tree_code_class tclass;
7441
7442 if (t == NULL_TREE)
7443 {
7444 hstate.merge_hash (0);
7445 return;
7446 }
7447
7448 code = TREE_CODE (t);
7449
7450 switch (code)
7451 {
7452 /* Alas, constants aren't shared, so we can't rely on pointer
7453 identity. */
7454 case VOID_CST:
7455 hstate.merge_hash (0);
7456 return;
7457 case INTEGER_CST:
7458 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7459 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7460 return;
7461 case REAL_CST:
7462 {
7463 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7464 hstate.merge_hash (val2);
7465 return;
7466 }
7467 case FIXED_CST:
7468 {
7469 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7470 hstate.merge_hash (val2);
7471 return;
7472 }
7473 case STRING_CST:
7474 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7475 return;
7476 case COMPLEX_CST:
7477 inchash::add_expr (TREE_REALPART (t), hstate);
7478 inchash::add_expr (TREE_IMAGPART (t), hstate);
7479 return;
7480 case VECTOR_CST:
7481 {
7482 unsigned i;
7483 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7484 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7485 return;
7486 }
7487 case SSA_NAME:
7488 /* We can just compare by pointer. */
7489 hstate.add_wide_int (SSA_NAME_VERSION (t));
7490 return;
7491 case PLACEHOLDER_EXPR:
7492 /* The node itself doesn't matter. */
7493 return;
7494 case TREE_LIST:
7495 /* A list of expressions, for a CALL_EXPR or as the elements of a
7496 VECTOR_CST. */
7497 for (; t; t = TREE_CHAIN (t))
7498 inchash::add_expr (TREE_VALUE (t), hstate);
7499 return;
7500 case CONSTRUCTOR:
7501 {
7502 unsigned HOST_WIDE_INT idx;
7503 tree field, value;
7504 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7505 {
7506 inchash::add_expr (field, hstate);
7507 inchash::add_expr (value, hstate);
7508 }
7509 return;
7510 }
7511 case FUNCTION_DECL:
7512 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7513 Otherwise nodes that compare equal according to operand_equal_p might
7514 get different hash codes. However, don't do this for machine specific
7515 or front end builtins, since the function code is overloaded in those
7516 cases. */
7517 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7518 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7519 {
7520 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7521 code = TREE_CODE (t);
7522 }
7523 /* FALL THROUGH */
7524 default:
7525 tclass = TREE_CODE_CLASS (code);
7526
7527 if (tclass == tcc_declaration)
7528 {
7529 /* DECL's have a unique ID */
7530 hstate.add_wide_int (DECL_UID (t));
7531 }
7532 else
7533 {
7534 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7535
7536 hstate.add_object (code);
7537
7538 /* Don't hash the type, that can lead to having nodes which
7539 compare equal according to operand_equal_p, but which
7540 have different hash codes. */
7541 if (CONVERT_EXPR_CODE_P (code)
7542 || code == NON_LVALUE_EXPR)
7543 {
7544 /* Make sure to include signness in the hash computation. */
7545 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7546 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7547 }
7548
7549 else if (commutative_tree_code (code))
7550 {
7551 /* It's a commutative expression. We want to hash it the same
7552 however it appears. We do this by first hashing both operands
7553 and then rehashing based on the order of their independent
7554 hashes. */
7555 inchash::hash one, two;
7556 inchash::add_expr (TREE_OPERAND (t, 0), one);
7557 inchash::add_expr (TREE_OPERAND (t, 1), two);
7558 hstate.add_commutative (one, two);
7559 }
7560 else
7561 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7562 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7563 }
7564 return;
7565 }
7566 }
7567
7568 }
7569
7570 /* Constructors for pointer, array and function types.
7571 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7572 constructed by language-dependent code, not here.) */
7573
7574 /* Construct, lay out and return the type of pointers to TO_TYPE with
7575 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7576 reference all of memory. If such a type has already been
7577 constructed, reuse it. */
7578
7579 tree
7580 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7581 bool can_alias_all)
7582 {
7583 tree t;
7584
7585 if (to_type == error_mark_node)
7586 return error_mark_node;
7587
7588 /* If the pointed-to type has the may_alias attribute set, force
7589 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7590 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7591 can_alias_all = true;
7592
7593 /* In some cases, languages will have things that aren't a POINTER_TYPE
7594 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7595 In that case, return that type without regard to the rest of our
7596 operands.
7597
7598 ??? This is a kludge, but consistent with the way this function has
7599 always operated and there doesn't seem to be a good way to avoid this
7600 at the moment. */
7601 if (TYPE_POINTER_TO (to_type) != 0
7602 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7603 return TYPE_POINTER_TO (to_type);
7604
7605 /* First, if we already have a type for pointers to TO_TYPE and it's
7606 the proper mode, use it. */
7607 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7608 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7609 return t;
7610
7611 t = make_node (POINTER_TYPE);
7612
7613 TREE_TYPE (t) = to_type;
7614 SET_TYPE_MODE (t, mode);
7615 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7616 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7617 TYPE_POINTER_TO (to_type) = t;
7618
7619 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7620 SET_TYPE_STRUCTURAL_EQUALITY (t);
7621 else if (TYPE_CANONICAL (to_type) != to_type)
7622 TYPE_CANONICAL (t)
7623 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7624 mode, can_alias_all);
7625
7626 /* Lay out the type. This function has many callers that are concerned
7627 with expression-construction, and this simplifies them all. */
7628 layout_type (t);
7629
7630 return t;
7631 }
7632
7633 /* By default build pointers in ptr_mode. */
7634
7635 tree
7636 build_pointer_type (tree to_type)
7637 {
7638 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7639 : TYPE_ADDR_SPACE (to_type);
7640 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7641 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7642 }
7643
7644 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7645
7646 tree
7647 build_reference_type_for_mode (tree to_type, machine_mode mode,
7648 bool can_alias_all)
7649 {
7650 tree t;
7651
7652 if (to_type == error_mark_node)
7653 return error_mark_node;
7654
7655 /* If the pointed-to type has the may_alias attribute set, force
7656 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7657 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7658 can_alias_all = true;
7659
7660 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7661 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7662 In that case, return that type without regard to the rest of our
7663 operands.
7664
7665 ??? This is a kludge, but consistent with the way this function has
7666 always operated and there doesn't seem to be a good way to avoid this
7667 at the moment. */
7668 if (TYPE_REFERENCE_TO (to_type) != 0
7669 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7670 return TYPE_REFERENCE_TO (to_type);
7671
7672 /* First, if we already have a type for pointers to TO_TYPE and it's
7673 the proper mode, use it. */
7674 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7675 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7676 return t;
7677
7678 t = make_node (REFERENCE_TYPE);
7679
7680 TREE_TYPE (t) = to_type;
7681 SET_TYPE_MODE (t, mode);
7682 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7683 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7684 TYPE_REFERENCE_TO (to_type) = t;
7685
7686 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7687 SET_TYPE_STRUCTURAL_EQUALITY (t);
7688 else if (TYPE_CANONICAL (to_type) != to_type)
7689 TYPE_CANONICAL (t)
7690 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7691 mode, can_alias_all);
7692
7693 layout_type (t);
7694
7695 return t;
7696 }
7697
7698
7699 /* Build the node for the type of references-to-TO_TYPE by default
7700 in ptr_mode. */
7701
7702 tree
7703 build_reference_type (tree to_type)
7704 {
7705 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7706 : TYPE_ADDR_SPACE (to_type);
7707 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7708 return build_reference_type_for_mode (to_type, pointer_mode, false);
7709 }
7710
7711 #define MAX_INT_CACHED_PREC \
7712 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7713 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7714
7715 /* Builds a signed or unsigned integer type of precision PRECISION.
7716 Used for C bitfields whose precision does not match that of
7717 built-in target types. */
7718 tree
7719 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7720 int unsignedp)
7721 {
7722 tree itype, ret;
7723
7724 if (unsignedp)
7725 unsignedp = MAX_INT_CACHED_PREC + 1;
7726
7727 if (precision <= MAX_INT_CACHED_PREC)
7728 {
7729 itype = nonstandard_integer_type_cache[precision + unsignedp];
7730 if (itype)
7731 return itype;
7732 }
7733
7734 itype = make_node (INTEGER_TYPE);
7735 TYPE_PRECISION (itype) = precision;
7736
7737 if (unsignedp)
7738 fixup_unsigned_type (itype);
7739 else
7740 fixup_signed_type (itype);
7741
7742 ret = itype;
7743 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7744 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7745 if (precision <= MAX_INT_CACHED_PREC)
7746 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7747
7748 return ret;
7749 }
7750
7751 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7752 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7753 is true, reuse such a type that has already been constructed. */
7754
7755 static tree
7756 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7757 {
7758 tree itype = make_node (INTEGER_TYPE);
7759 inchash::hash hstate;
7760
7761 TREE_TYPE (itype) = type;
7762
7763 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7764 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7765
7766 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7767 SET_TYPE_MODE (itype, TYPE_MODE (type));
7768 TYPE_SIZE (itype) = TYPE_SIZE (type);
7769 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7770 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7771 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7772
7773 if (!shared)
7774 return itype;
7775
7776 if ((TYPE_MIN_VALUE (itype)
7777 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7778 || (TYPE_MAX_VALUE (itype)
7779 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7780 {
7781 /* Since we cannot reliably merge this type, we need to compare it using
7782 structural equality checks. */
7783 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7784 return itype;
7785 }
7786
7787 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7788 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7789 hstate.merge_hash (TYPE_HASH (type));
7790 itype = type_hash_canon (hstate.end (), itype);
7791
7792 return itype;
7793 }
7794
7795 /* Wrapper around build_range_type_1 with SHARED set to true. */
7796
7797 tree
7798 build_range_type (tree type, tree lowval, tree highval)
7799 {
7800 return build_range_type_1 (type, lowval, highval, true);
7801 }
7802
7803 /* Wrapper around build_range_type_1 with SHARED set to false. */
7804
7805 tree
7806 build_nonshared_range_type (tree type, tree lowval, tree highval)
7807 {
7808 return build_range_type_1 (type, lowval, highval, false);
7809 }
7810
7811 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7812 MAXVAL should be the maximum value in the domain
7813 (one less than the length of the array).
7814
7815 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7816 We don't enforce this limit, that is up to caller (e.g. language front end).
7817 The limit exists because the result is a signed type and we don't handle
7818 sizes that use more than one HOST_WIDE_INT. */
7819
7820 tree
7821 build_index_type (tree maxval)
7822 {
7823 return build_range_type (sizetype, size_zero_node, maxval);
7824 }
7825
7826 /* Return true if the debug information for TYPE, a subtype, should be emitted
7827 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7828 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7829 debug info and doesn't reflect the source code. */
7830
7831 bool
7832 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7833 {
7834 tree base_type = TREE_TYPE (type), low, high;
7835
7836 /* Subrange types have a base type which is an integral type. */
7837 if (!INTEGRAL_TYPE_P (base_type))
7838 return false;
7839
7840 /* Get the real bounds of the subtype. */
7841 if (lang_hooks.types.get_subrange_bounds)
7842 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7843 else
7844 {
7845 low = TYPE_MIN_VALUE (type);
7846 high = TYPE_MAX_VALUE (type);
7847 }
7848
7849 /* If the type and its base type have the same representation and the same
7850 name, then the type is not a subrange but a copy of the base type. */
7851 if ((TREE_CODE (base_type) == INTEGER_TYPE
7852 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7853 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7854 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7855 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7856 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7857 return false;
7858
7859 if (lowval)
7860 *lowval = low;
7861 if (highval)
7862 *highval = high;
7863 return true;
7864 }
7865
7866 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7867 and number of elements specified by the range of values of INDEX_TYPE.
7868 If SHARED is true, reuse such a type that has already been constructed. */
7869
7870 static tree
7871 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7872 {
7873 tree t;
7874
7875 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7876 {
7877 error ("arrays of functions are not meaningful");
7878 elt_type = integer_type_node;
7879 }
7880
7881 t = make_node (ARRAY_TYPE);
7882 TREE_TYPE (t) = elt_type;
7883 TYPE_DOMAIN (t) = index_type;
7884 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7885 layout_type (t);
7886
7887 /* If the element type is incomplete at this point we get marked for
7888 structural equality. Do not record these types in the canonical
7889 type hashtable. */
7890 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7891 return t;
7892
7893 if (shared)
7894 {
7895 inchash::hash hstate;
7896 hstate.add_object (TYPE_HASH (elt_type));
7897 if (index_type)
7898 hstate.add_object (TYPE_HASH (index_type));
7899 t = type_hash_canon (hstate.end (), t);
7900 }
7901
7902 if (TYPE_CANONICAL (t) == t)
7903 {
7904 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7905 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7906 SET_TYPE_STRUCTURAL_EQUALITY (t);
7907 else if (TYPE_CANONICAL (elt_type) != elt_type
7908 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7909 TYPE_CANONICAL (t)
7910 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7911 index_type
7912 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7913 shared);
7914 }
7915
7916 return t;
7917 }
7918
7919 /* Wrapper around build_array_type_1 with SHARED set to true. */
7920
7921 tree
7922 build_array_type (tree elt_type, tree index_type)
7923 {
7924 return build_array_type_1 (elt_type, index_type, true);
7925 }
7926
7927 /* Wrapper around build_array_type_1 with SHARED set to false. */
7928
7929 tree
7930 build_nonshared_array_type (tree elt_type, tree index_type)
7931 {
7932 return build_array_type_1 (elt_type, index_type, false);
7933 }
7934
7935 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7936 sizetype. */
7937
7938 tree
7939 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7940 {
7941 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7942 }
7943
7944 /* Recursively examines the array elements of TYPE, until a non-array
7945 element type is found. */
7946
7947 tree
7948 strip_array_types (tree type)
7949 {
7950 while (TREE_CODE (type) == ARRAY_TYPE)
7951 type = TREE_TYPE (type);
7952
7953 return type;
7954 }
7955
7956 /* Computes the canonical argument types from the argument type list
7957 ARGTYPES.
7958
7959 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7960 on entry to this function, or if any of the ARGTYPES are
7961 structural.
7962
7963 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7964 true on entry to this function, or if any of the ARGTYPES are
7965 non-canonical.
7966
7967 Returns a canonical argument list, which may be ARGTYPES when the
7968 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7969 true) or would not differ from ARGTYPES. */
7970
7971 static tree
7972 maybe_canonicalize_argtypes (tree argtypes,
7973 bool *any_structural_p,
7974 bool *any_noncanonical_p)
7975 {
7976 tree arg;
7977 bool any_noncanonical_argtypes_p = false;
7978
7979 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7980 {
7981 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7982 /* Fail gracefully by stating that the type is structural. */
7983 *any_structural_p = true;
7984 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7985 *any_structural_p = true;
7986 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7987 || TREE_PURPOSE (arg))
7988 /* If the argument has a default argument, we consider it
7989 non-canonical even though the type itself is canonical.
7990 That way, different variants of function and method types
7991 with default arguments will all point to the variant with
7992 no defaults as their canonical type. */
7993 any_noncanonical_argtypes_p = true;
7994 }
7995
7996 if (*any_structural_p)
7997 return argtypes;
7998
7999 if (any_noncanonical_argtypes_p)
8000 {
8001 /* Build the canonical list of argument types. */
8002 tree canon_argtypes = NULL_TREE;
8003 bool is_void = false;
8004
8005 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8006 {
8007 if (arg == void_list_node)
8008 is_void = true;
8009 else
8010 canon_argtypes = tree_cons (NULL_TREE,
8011 TYPE_CANONICAL (TREE_VALUE (arg)),
8012 canon_argtypes);
8013 }
8014
8015 canon_argtypes = nreverse (canon_argtypes);
8016 if (is_void)
8017 canon_argtypes = chainon (canon_argtypes, void_list_node);
8018
8019 /* There is a non-canonical type. */
8020 *any_noncanonical_p = true;
8021 return canon_argtypes;
8022 }
8023
8024 /* The canonical argument types are the same as ARGTYPES. */
8025 return argtypes;
8026 }
8027
8028 /* Construct, lay out and return
8029 the type of functions returning type VALUE_TYPE
8030 given arguments of types ARG_TYPES.
8031 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8032 are data type nodes for the arguments of the function.
8033 If such a type has already been constructed, reuse it. */
8034
8035 tree
8036 build_function_type (tree value_type, tree arg_types)
8037 {
8038 tree t;
8039 inchash::hash hstate;
8040 bool any_structural_p, any_noncanonical_p;
8041 tree canon_argtypes;
8042
8043 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8044 {
8045 error ("function return type cannot be function");
8046 value_type = integer_type_node;
8047 }
8048
8049 /* Make a node of the sort we want. */
8050 t = make_node (FUNCTION_TYPE);
8051 TREE_TYPE (t) = value_type;
8052 TYPE_ARG_TYPES (t) = arg_types;
8053
8054 /* If we already have such a type, use the old one. */
8055 hstate.add_object (TYPE_HASH (value_type));
8056 type_hash_list (arg_types, hstate);
8057 t = type_hash_canon (hstate.end (), t);
8058
8059 /* Set up the canonical type. */
8060 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8061 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8062 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8063 &any_structural_p,
8064 &any_noncanonical_p);
8065 if (any_structural_p)
8066 SET_TYPE_STRUCTURAL_EQUALITY (t);
8067 else if (any_noncanonical_p)
8068 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8069 canon_argtypes);
8070
8071 if (!COMPLETE_TYPE_P (t))
8072 layout_type (t);
8073 return t;
8074 }
8075
8076 /* Build a function type. The RETURN_TYPE is the type returned by the
8077 function. If VAARGS is set, no void_type_node is appended to the
8078 the list. ARGP must be always be terminated be a NULL_TREE. */
8079
8080 static tree
8081 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8082 {
8083 tree t, args, last;
8084
8085 t = va_arg (argp, tree);
8086 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8087 args = tree_cons (NULL_TREE, t, args);
8088
8089 if (vaargs)
8090 {
8091 last = args;
8092 if (args != NULL_TREE)
8093 args = nreverse (args);
8094 gcc_assert (last != void_list_node);
8095 }
8096 else if (args == NULL_TREE)
8097 args = void_list_node;
8098 else
8099 {
8100 last = args;
8101 args = nreverse (args);
8102 TREE_CHAIN (last) = void_list_node;
8103 }
8104 args = build_function_type (return_type, args);
8105
8106 return args;
8107 }
8108
8109 /* Build a function type. The RETURN_TYPE is the type returned by the
8110 function. If additional arguments are provided, they are
8111 additional argument types. The list of argument types must always
8112 be terminated by NULL_TREE. */
8113
8114 tree
8115 build_function_type_list (tree return_type, ...)
8116 {
8117 tree args;
8118 va_list p;
8119
8120 va_start (p, return_type);
8121 args = build_function_type_list_1 (false, return_type, p);
8122 va_end (p);
8123 return args;
8124 }
8125
8126 /* Build a variable argument function type. The RETURN_TYPE is the
8127 type returned by the function. If additional arguments are provided,
8128 they are additional argument types. The list of argument types must
8129 always be terminated by NULL_TREE. */
8130
8131 tree
8132 build_varargs_function_type_list (tree return_type, ...)
8133 {
8134 tree args;
8135 va_list p;
8136
8137 va_start (p, return_type);
8138 args = build_function_type_list_1 (true, return_type, p);
8139 va_end (p);
8140
8141 return args;
8142 }
8143
8144 /* Build a function type. RETURN_TYPE is the type returned by the
8145 function; VAARGS indicates whether the function takes varargs. The
8146 function takes N named arguments, the types of which are provided in
8147 ARG_TYPES. */
8148
8149 static tree
8150 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8151 tree *arg_types)
8152 {
8153 int i;
8154 tree t = vaargs ? NULL_TREE : void_list_node;
8155
8156 for (i = n - 1; i >= 0; i--)
8157 t = tree_cons (NULL_TREE, arg_types[i], t);
8158
8159 return build_function_type (return_type, t);
8160 }
8161
8162 /* Build a function type. RETURN_TYPE is the type returned by the
8163 function. The function takes N named arguments, the types of which
8164 are provided in ARG_TYPES. */
8165
8166 tree
8167 build_function_type_array (tree return_type, int n, tree *arg_types)
8168 {
8169 return build_function_type_array_1 (false, return_type, n, arg_types);
8170 }
8171
8172 /* Build a variable argument function type. RETURN_TYPE is the type
8173 returned by the function. The function takes N named arguments, the
8174 types of which are provided in ARG_TYPES. */
8175
8176 tree
8177 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8178 {
8179 return build_function_type_array_1 (true, return_type, n, arg_types);
8180 }
8181
8182 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8183 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8184 for the method. An implicit additional parameter (of type
8185 pointer-to-BASETYPE) is added to the ARGTYPES. */
8186
8187 tree
8188 build_method_type_directly (tree basetype,
8189 tree rettype,
8190 tree argtypes)
8191 {
8192 tree t;
8193 tree ptype;
8194 inchash::hash hstate;
8195 bool any_structural_p, any_noncanonical_p;
8196 tree canon_argtypes;
8197
8198 /* Make a node of the sort we want. */
8199 t = make_node (METHOD_TYPE);
8200
8201 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8202 TREE_TYPE (t) = rettype;
8203 ptype = build_pointer_type (basetype);
8204
8205 /* The actual arglist for this function includes a "hidden" argument
8206 which is "this". Put it into the list of argument types. */
8207 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8208 TYPE_ARG_TYPES (t) = argtypes;
8209
8210 /* If we already have such a type, use the old one. */
8211 hstate.add_object (TYPE_HASH (basetype));
8212 hstate.add_object (TYPE_HASH (rettype));
8213 type_hash_list (argtypes, hstate);
8214 t = type_hash_canon (hstate.end (), t);
8215
8216 /* Set up the canonical type. */
8217 any_structural_p
8218 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8219 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8220 any_noncanonical_p
8221 = (TYPE_CANONICAL (basetype) != basetype
8222 || TYPE_CANONICAL (rettype) != rettype);
8223 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8224 &any_structural_p,
8225 &any_noncanonical_p);
8226 if (any_structural_p)
8227 SET_TYPE_STRUCTURAL_EQUALITY (t);
8228 else if (any_noncanonical_p)
8229 TYPE_CANONICAL (t)
8230 = build_method_type_directly (TYPE_CANONICAL (basetype),
8231 TYPE_CANONICAL (rettype),
8232 canon_argtypes);
8233 if (!COMPLETE_TYPE_P (t))
8234 layout_type (t);
8235
8236 return t;
8237 }
8238
8239 /* Construct, lay out and return the type of methods belonging to class
8240 BASETYPE and whose arguments and values are described by TYPE.
8241 If that type exists already, reuse it.
8242 TYPE must be a FUNCTION_TYPE node. */
8243
8244 tree
8245 build_method_type (tree basetype, tree type)
8246 {
8247 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8248
8249 return build_method_type_directly (basetype,
8250 TREE_TYPE (type),
8251 TYPE_ARG_TYPES (type));
8252 }
8253
8254 /* Construct, lay out and return the type of offsets to a value
8255 of type TYPE, within an object of type BASETYPE.
8256 If a suitable offset type exists already, reuse it. */
8257
8258 tree
8259 build_offset_type (tree basetype, tree type)
8260 {
8261 tree t;
8262 inchash::hash hstate;
8263
8264 /* Make a node of the sort we want. */
8265 t = make_node (OFFSET_TYPE);
8266
8267 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8268 TREE_TYPE (t) = type;
8269
8270 /* If we already have such a type, use the old one. */
8271 hstate.add_object (TYPE_HASH (basetype));
8272 hstate.add_object (TYPE_HASH (type));
8273 t = type_hash_canon (hstate.end (), t);
8274
8275 if (!COMPLETE_TYPE_P (t))
8276 layout_type (t);
8277
8278 if (TYPE_CANONICAL (t) == t)
8279 {
8280 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8281 || TYPE_STRUCTURAL_EQUALITY_P (type))
8282 SET_TYPE_STRUCTURAL_EQUALITY (t);
8283 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8284 || TYPE_CANONICAL (type) != type)
8285 TYPE_CANONICAL (t)
8286 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8287 TYPE_CANONICAL (type));
8288 }
8289
8290 return t;
8291 }
8292
8293 /* Create a complex type whose components are COMPONENT_TYPE. */
8294
8295 tree
8296 build_complex_type (tree component_type)
8297 {
8298 tree t;
8299 inchash::hash hstate;
8300
8301 gcc_assert (INTEGRAL_TYPE_P (component_type)
8302 || SCALAR_FLOAT_TYPE_P (component_type)
8303 || FIXED_POINT_TYPE_P (component_type));
8304
8305 /* Make a node of the sort we want. */
8306 t = make_node (COMPLEX_TYPE);
8307
8308 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8309
8310 /* If we already have such a type, use the old one. */
8311 hstate.add_object (TYPE_HASH (component_type));
8312 t = type_hash_canon (hstate.end (), t);
8313
8314 if (!COMPLETE_TYPE_P (t))
8315 layout_type (t);
8316
8317 if (TYPE_CANONICAL (t) == t)
8318 {
8319 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8320 SET_TYPE_STRUCTURAL_EQUALITY (t);
8321 else if (TYPE_CANONICAL (component_type) != component_type)
8322 TYPE_CANONICAL (t)
8323 = build_complex_type (TYPE_CANONICAL (component_type));
8324 }
8325
8326 /* We need to create a name, since complex is a fundamental type. */
8327 if (! TYPE_NAME (t))
8328 {
8329 const char *name;
8330 if (component_type == char_type_node)
8331 name = "complex char";
8332 else if (component_type == signed_char_type_node)
8333 name = "complex signed char";
8334 else if (component_type == unsigned_char_type_node)
8335 name = "complex unsigned char";
8336 else if (component_type == short_integer_type_node)
8337 name = "complex short int";
8338 else if (component_type == short_unsigned_type_node)
8339 name = "complex short unsigned int";
8340 else if (component_type == integer_type_node)
8341 name = "complex int";
8342 else if (component_type == unsigned_type_node)
8343 name = "complex unsigned int";
8344 else if (component_type == long_integer_type_node)
8345 name = "complex long int";
8346 else if (component_type == long_unsigned_type_node)
8347 name = "complex long unsigned int";
8348 else if (component_type == long_long_integer_type_node)
8349 name = "complex long long int";
8350 else if (component_type == long_long_unsigned_type_node)
8351 name = "complex long long unsigned int";
8352 else
8353 name = 0;
8354
8355 if (name != 0)
8356 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8357 get_identifier (name), t);
8358 }
8359
8360 return build_qualified_type (t, TYPE_QUALS (component_type));
8361 }
8362
8363 /* If TYPE is a real or complex floating-point type and the target
8364 does not directly support arithmetic on TYPE then return the wider
8365 type to be used for arithmetic on TYPE. Otherwise, return
8366 NULL_TREE. */
8367
8368 tree
8369 excess_precision_type (tree type)
8370 {
8371 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8372 {
8373 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8374 switch (TREE_CODE (type))
8375 {
8376 case REAL_TYPE:
8377 switch (flt_eval_method)
8378 {
8379 case 1:
8380 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8381 return double_type_node;
8382 break;
8383 case 2:
8384 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8385 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8386 return long_double_type_node;
8387 break;
8388 default:
8389 gcc_unreachable ();
8390 }
8391 break;
8392 case COMPLEX_TYPE:
8393 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8394 return NULL_TREE;
8395 switch (flt_eval_method)
8396 {
8397 case 1:
8398 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8399 return complex_double_type_node;
8400 break;
8401 case 2:
8402 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8403 || (TYPE_MODE (TREE_TYPE (type))
8404 == TYPE_MODE (double_type_node)))
8405 return complex_long_double_type_node;
8406 break;
8407 default:
8408 gcc_unreachable ();
8409 }
8410 break;
8411 default:
8412 break;
8413 }
8414 }
8415 return NULL_TREE;
8416 }
8417 \f
8418 /* Return OP, stripped of any conversions to wider types as much as is safe.
8419 Converting the value back to OP's type makes a value equivalent to OP.
8420
8421 If FOR_TYPE is nonzero, we return a value which, if converted to
8422 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8423
8424 OP must have integer, real or enumeral type. Pointers are not allowed!
8425
8426 There are some cases where the obvious value we could return
8427 would regenerate to OP if converted to OP's type,
8428 but would not extend like OP to wider types.
8429 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8430 For example, if OP is (unsigned short)(signed char)-1,
8431 we avoid returning (signed char)-1 if FOR_TYPE is int,
8432 even though extending that to an unsigned short would regenerate OP,
8433 since the result of extending (signed char)-1 to (int)
8434 is different from (int) OP. */
8435
8436 tree
8437 get_unwidened (tree op, tree for_type)
8438 {
8439 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8440 tree type = TREE_TYPE (op);
8441 unsigned final_prec
8442 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8443 int uns
8444 = (for_type != 0 && for_type != type
8445 && final_prec > TYPE_PRECISION (type)
8446 && TYPE_UNSIGNED (type));
8447 tree win = op;
8448
8449 while (CONVERT_EXPR_P (op))
8450 {
8451 int bitschange;
8452
8453 /* TYPE_PRECISION on vector types has different meaning
8454 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8455 so avoid them here. */
8456 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8457 break;
8458
8459 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8460 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8461
8462 /* Truncations are many-one so cannot be removed.
8463 Unless we are later going to truncate down even farther. */
8464 if (bitschange < 0
8465 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8466 break;
8467
8468 /* See what's inside this conversion. If we decide to strip it,
8469 we will set WIN. */
8470 op = TREE_OPERAND (op, 0);
8471
8472 /* If we have not stripped any zero-extensions (uns is 0),
8473 we can strip any kind of extension.
8474 If we have previously stripped a zero-extension,
8475 only zero-extensions can safely be stripped.
8476 Any extension can be stripped if the bits it would produce
8477 are all going to be discarded later by truncating to FOR_TYPE. */
8478
8479 if (bitschange > 0)
8480 {
8481 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8482 win = op;
8483 /* TYPE_UNSIGNED says whether this is a zero-extension.
8484 Let's avoid computing it if it does not affect WIN
8485 and if UNS will not be needed again. */
8486 if ((uns
8487 || CONVERT_EXPR_P (op))
8488 && TYPE_UNSIGNED (TREE_TYPE (op)))
8489 {
8490 uns = 1;
8491 win = op;
8492 }
8493 }
8494 }
8495
8496 /* If we finally reach a constant see if it fits in for_type and
8497 in that case convert it. */
8498 if (for_type
8499 && TREE_CODE (win) == INTEGER_CST
8500 && TREE_TYPE (win) != for_type
8501 && int_fits_type_p (win, for_type))
8502 win = fold_convert (for_type, win);
8503
8504 return win;
8505 }
8506 \f
8507 /* Return OP or a simpler expression for a narrower value
8508 which can be sign-extended or zero-extended to give back OP.
8509 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8510 or 0 if the value should be sign-extended. */
8511
8512 tree
8513 get_narrower (tree op, int *unsignedp_ptr)
8514 {
8515 int uns = 0;
8516 int first = 1;
8517 tree win = op;
8518 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8519
8520 while (TREE_CODE (op) == NOP_EXPR)
8521 {
8522 int bitschange
8523 = (TYPE_PRECISION (TREE_TYPE (op))
8524 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8525
8526 /* Truncations are many-one so cannot be removed. */
8527 if (bitschange < 0)
8528 break;
8529
8530 /* See what's inside this conversion. If we decide to strip it,
8531 we will set WIN. */
8532
8533 if (bitschange > 0)
8534 {
8535 op = TREE_OPERAND (op, 0);
8536 /* An extension: the outermost one can be stripped,
8537 but remember whether it is zero or sign extension. */
8538 if (first)
8539 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8540 /* Otherwise, if a sign extension has been stripped,
8541 only sign extensions can now be stripped;
8542 if a zero extension has been stripped, only zero-extensions. */
8543 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8544 break;
8545 first = 0;
8546 }
8547 else /* bitschange == 0 */
8548 {
8549 /* A change in nominal type can always be stripped, but we must
8550 preserve the unsignedness. */
8551 if (first)
8552 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8553 first = 0;
8554 op = TREE_OPERAND (op, 0);
8555 /* Keep trying to narrow, but don't assign op to win if it
8556 would turn an integral type into something else. */
8557 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8558 continue;
8559 }
8560
8561 win = op;
8562 }
8563
8564 if (TREE_CODE (op) == COMPONENT_REF
8565 /* Since type_for_size always gives an integer type. */
8566 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8567 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8568 /* Ensure field is laid out already. */
8569 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8570 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8571 {
8572 unsigned HOST_WIDE_INT innerprec
8573 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8574 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8575 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8576 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8577
8578 /* We can get this structure field in a narrower type that fits it,
8579 but the resulting extension to its nominal type (a fullword type)
8580 must satisfy the same conditions as for other extensions.
8581
8582 Do this only for fields that are aligned (not bit-fields),
8583 because when bit-field insns will be used there is no
8584 advantage in doing this. */
8585
8586 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8587 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8588 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8589 && type != 0)
8590 {
8591 if (first)
8592 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8593 win = fold_convert (type, op);
8594 }
8595 }
8596
8597 *unsignedp_ptr = uns;
8598 return win;
8599 }
8600 \f
8601 /* Returns true if integer constant C has a value that is permissible
8602 for type TYPE (an INTEGER_TYPE). */
8603
8604 bool
8605 int_fits_type_p (const_tree c, const_tree type)
8606 {
8607 tree type_low_bound, type_high_bound;
8608 bool ok_for_low_bound, ok_for_high_bound;
8609 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8610
8611 retry:
8612 type_low_bound = TYPE_MIN_VALUE (type);
8613 type_high_bound = TYPE_MAX_VALUE (type);
8614
8615 /* If at least one bound of the type is a constant integer, we can check
8616 ourselves and maybe make a decision. If no such decision is possible, but
8617 this type is a subtype, try checking against that. Otherwise, use
8618 fits_to_tree_p, which checks against the precision.
8619
8620 Compute the status for each possibly constant bound, and return if we see
8621 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8622 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8623 for "constant known to fit". */
8624
8625 /* Check if c >= type_low_bound. */
8626 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8627 {
8628 if (tree_int_cst_lt (c, type_low_bound))
8629 return false;
8630 ok_for_low_bound = true;
8631 }
8632 else
8633 ok_for_low_bound = false;
8634
8635 /* Check if c <= type_high_bound. */
8636 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8637 {
8638 if (tree_int_cst_lt (type_high_bound, c))
8639 return false;
8640 ok_for_high_bound = true;
8641 }
8642 else
8643 ok_for_high_bound = false;
8644
8645 /* If the constant fits both bounds, the result is known. */
8646 if (ok_for_low_bound && ok_for_high_bound)
8647 return true;
8648
8649 /* Perform some generic filtering which may allow making a decision
8650 even if the bounds are not constant. First, negative integers
8651 never fit in unsigned types, */
8652 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8653 return false;
8654
8655 /* Second, narrower types always fit in wider ones. */
8656 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8657 return true;
8658
8659 /* Third, unsigned integers with top bit set never fit signed types. */
8660 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8661 {
8662 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8663 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8664 {
8665 /* When a tree_cst is converted to a wide-int, the precision
8666 is taken from the type. However, if the precision of the
8667 mode underneath the type is smaller than that, it is
8668 possible that the value will not fit. The test below
8669 fails if any bit is set between the sign bit of the
8670 underlying mode and the top bit of the type. */
8671 if (wi::ne_p (wi::zext (c, prec - 1), c))
8672 return false;
8673 }
8674 else if (wi::neg_p (c))
8675 return false;
8676 }
8677
8678 /* If we haven't been able to decide at this point, there nothing more we
8679 can check ourselves here. Look at the base type if we have one and it
8680 has the same precision. */
8681 if (TREE_CODE (type) == INTEGER_TYPE
8682 && TREE_TYPE (type) != 0
8683 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8684 {
8685 type = TREE_TYPE (type);
8686 goto retry;
8687 }
8688
8689 /* Or to fits_to_tree_p, if nothing else. */
8690 return wi::fits_to_tree_p (c, type);
8691 }
8692
8693 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8694 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8695 represented (assuming two's-complement arithmetic) within the bit
8696 precision of the type are returned instead. */
8697
8698 void
8699 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8700 {
8701 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8702 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8703 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8704 else
8705 {
8706 if (TYPE_UNSIGNED (type))
8707 mpz_set_ui (min, 0);
8708 else
8709 {
8710 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8711 wi::to_mpz (mn, min, SIGNED);
8712 }
8713 }
8714
8715 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8716 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8717 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8718 else
8719 {
8720 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8721 wi::to_mpz (mn, max, TYPE_SIGN (type));
8722 }
8723 }
8724
8725 /* Return true if VAR is an automatic variable defined in function FN. */
8726
8727 bool
8728 auto_var_in_fn_p (const_tree var, const_tree fn)
8729 {
8730 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8731 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8732 || TREE_CODE (var) == PARM_DECL)
8733 && ! TREE_STATIC (var))
8734 || TREE_CODE (var) == LABEL_DECL
8735 || TREE_CODE (var) == RESULT_DECL));
8736 }
8737
8738 /* Subprogram of following function. Called by walk_tree.
8739
8740 Return *TP if it is an automatic variable or parameter of the
8741 function passed in as DATA. */
8742
8743 static tree
8744 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8745 {
8746 tree fn = (tree) data;
8747
8748 if (TYPE_P (*tp))
8749 *walk_subtrees = 0;
8750
8751 else if (DECL_P (*tp)
8752 && auto_var_in_fn_p (*tp, fn))
8753 return *tp;
8754
8755 return NULL_TREE;
8756 }
8757
8758 /* Returns true if T is, contains, or refers to a type with variable
8759 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8760 arguments, but not the return type. If FN is nonzero, only return
8761 true if a modifier of the type or position of FN is a variable or
8762 parameter inside FN.
8763
8764 This concept is more general than that of C99 'variably modified types':
8765 in C99, a struct type is never variably modified because a VLA may not
8766 appear as a structure member. However, in GNU C code like:
8767
8768 struct S { int i[f()]; };
8769
8770 is valid, and other languages may define similar constructs. */
8771
8772 bool
8773 variably_modified_type_p (tree type, tree fn)
8774 {
8775 tree t;
8776
8777 /* Test if T is either variable (if FN is zero) or an expression containing
8778 a variable in FN. If TYPE isn't gimplified, return true also if
8779 gimplify_one_sizepos would gimplify the expression into a local
8780 variable. */
8781 #define RETURN_TRUE_IF_VAR(T) \
8782 do { tree _t = (T); \
8783 if (_t != NULL_TREE \
8784 && _t != error_mark_node \
8785 && TREE_CODE (_t) != INTEGER_CST \
8786 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8787 && (!fn \
8788 || (!TYPE_SIZES_GIMPLIFIED (type) \
8789 && !is_gimple_sizepos (_t)) \
8790 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8791 return true; } while (0)
8792
8793 if (type == error_mark_node)
8794 return false;
8795
8796 /* If TYPE itself has variable size, it is variably modified. */
8797 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8798 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8799
8800 switch (TREE_CODE (type))
8801 {
8802 case POINTER_TYPE:
8803 case REFERENCE_TYPE:
8804 case VECTOR_TYPE:
8805 if (variably_modified_type_p (TREE_TYPE (type), fn))
8806 return true;
8807 break;
8808
8809 case FUNCTION_TYPE:
8810 case METHOD_TYPE:
8811 /* If TYPE is a function type, it is variably modified if the
8812 return type is variably modified. */
8813 if (variably_modified_type_p (TREE_TYPE (type), fn))
8814 return true;
8815 break;
8816
8817 case INTEGER_TYPE:
8818 case REAL_TYPE:
8819 case FIXED_POINT_TYPE:
8820 case ENUMERAL_TYPE:
8821 case BOOLEAN_TYPE:
8822 /* Scalar types are variably modified if their end points
8823 aren't constant. */
8824 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8825 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8826 break;
8827
8828 case RECORD_TYPE:
8829 case UNION_TYPE:
8830 case QUAL_UNION_TYPE:
8831 /* We can't see if any of the fields are variably-modified by the
8832 definition we normally use, since that would produce infinite
8833 recursion via pointers. */
8834 /* This is variably modified if some field's type is. */
8835 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8836 if (TREE_CODE (t) == FIELD_DECL)
8837 {
8838 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8839 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8840 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8841
8842 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8843 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8844 }
8845 break;
8846
8847 case ARRAY_TYPE:
8848 /* Do not call ourselves to avoid infinite recursion. This is
8849 variably modified if the element type is. */
8850 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8851 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8852 break;
8853
8854 default:
8855 break;
8856 }
8857
8858 /* The current language may have other cases to check, but in general,
8859 all other types are not variably modified. */
8860 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8861
8862 #undef RETURN_TRUE_IF_VAR
8863 }
8864
8865 /* Given a DECL or TYPE, return the scope in which it was declared, or
8866 NULL_TREE if there is no containing scope. */
8867
8868 tree
8869 get_containing_scope (const_tree t)
8870 {
8871 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8872 }
8873
8874 /* Return the innermost context enclosing DECL that is
8875 a FUNCTION_DECL, or zero if none. */
8876
8877 tree
8878 decl_function_context (const_tree decl)
8879 {
8880 tree context;
8881
8882 if (TREE_CODE (decl) == ERROR_MARK)
8883 return 0;
8884
8885 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8886 where we look up the function at runtime. Such functions always take
8887 a first argument of type 'pointer to real context'.
8888
8889 C++ should really be fixed to use DECL_CONTEXT for the real context,
8890 and use something else for the "virtual context". */
8891 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8892 context
8893 = TYPE_MAIN_VARIANT
8894 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8895 else
8896 context = DECL_CONTEXT (decl);
8897
8898 while (context && TREE_CODE (context) != FUNCTION_DECL)
8899 {
8900 if (TREE_CODE (context) == BLOCK)
8901 context = BLOCK_SUPERCONTEXT (context);
8902 else
8903 context = get_containing_scope (context);
8904 }
8905
8906 return context;
8907 }
8908
8909 /* Return the innermost context enclosing DECL that is
8910 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8911 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8912
8913 tree
8914 decl_type_context (const_tree decl)
8915 {
8916 tree context = DECL_CONTEXT (decl);
8917
8918 while (context)
8919 switch (TREE_CODE (context))
8920 {
8921 case NAMESPACE_DECL:
8922 case TRANSLATION_UNIT_DECL:
8923 return NULL_TREE;
8924
8925 case RECORD_TYPE:
8926 case UNION_TYPE:
8927 case QUAL_UNION_TYPE:
8928 return context;
8929
8930 case TYPE_DECL:
8931 case FUNCTION_DECL:
8932 context = DECL_CONTEXT (context);
8933 break;
8934
8935 case BLOCK:
8936 context = BLOCK_SUPERCONTEXT (context);
8937 break;
8938
8939 default:
8940 gcc_unreachable ();
8941 }
8942
8943 return NULL_TREE;
8944 }
8945
8946 /* CALL is a CALL_EXPR. Return the declaration for the function
8947 called, or NULL_TREE if the called function cannot be
8948 determined. */
8949
8950 tree
8951 get_callee_fndecl (const_tree call)
8952 {
8953 tree addr;
8954
8955 if (call == error_mark_node)
8956 return error_mark_node;
8957
8958 /* It's invalid to call this function with anything but a
8959 CALL_EXPR. */
8960 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8961
8962 /* The first operand to the CALL is the address of the function
8963 called. */
8964 addr = CALL_EXPR_FN (call);
8965
8966 /* If there is no function, return early. */
8967 if (addr == NULL_TREE)
8968 return NULL_TREE;
8969
8970 STRIP_NOPS (addr);
8971
8972 /* If this is a readonly function pointer, extract its initial value. */
8973 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8974 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8975 && DECL_INITIAL (addr))
8976 addr = DECL_INITIAL (addr);
8977
8978 /* If the address is just `&f' for some function `f', then we know
8979 that `f' is being called. */
8980 if (TREE_CODE (addr) == ADDR_EXPR
8981 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8982 return TREE_OPERAND (addr, 0);
8983
8984 /* We couldn't figure out what was being called. */
8985 return NULL_TREE;
8986 }
8987
8988 /* Print debugging information about tree nodes generated during the compile,
8989 and any language-specific information. */
8990
8991 void
8992 dump_tree_statistics (void)
8993 {
8994 if (GATHER_STATISTICS)
8995 {
8996 int i;
8997 int total_nodes, total_bytes;
8998 fprintf (stderr, "Kind Nodes Bytes\n");
8999 fprintf (stderr, "---------------------------------------\n");
9000 total_nodes = total_bytes = 0;
9001 for (i = 0; i < (int) all_kinds; i++)
9002 {
9003 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9004 tree_node_counts[i], tree_node_sizes[i]);
9005 total_nodes += tree_node_counts[i];
9006 total_bytes += tree_node_sizes[i];
9007 }
9008 fprintf (stderr, "---------------------------------------\n");
9009 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9010 fprintf (stderr, "---------------------------------------\n");
9011 fprintf (stderr, "Code Nodes\n");
9012 fprintf (stderr, "----------------------------\n");
9013 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9014 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9015 tree_code_counts[i]);
9016 fprintf (stderr, "----------------------------\n");
9017 ssanames_print_statistics ();
9018 phinodes_print_statistics ();
9019 }
9020 else
9021 fprintf (stderr, "(No per-node statistics)\n");
9022
9023 print_type_hash_statistics ();
9024 print_debug_expr_statistics ();
9025 print_value_expr_statistics ();
9026 lang_hooks.print_statistics ();
9027 }
9028 \f
9029 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9030
9031 /* Generate a crc32 of a byte. */
9032
9033 static unsigned
9034 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9035 {
9036 unsigned ix;
9037
9038 for (ix = bits; ix--; value <<= 1)
9039 {
9040 unsigned feedback;
9041
9042 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9043 chksum <<= 1;
9044 chksum ^= feedback;
9045 }
9046 return chksum;
9047 }
9048
9049 /* Generate a crc32 of a 32-bit unsigned. */
9050
9051 unsigned
9052 crc32_unsigned (unsigned chksum, unsigned value)
9053 {
9054 return crc32_unsigned_bits (chksum, value, 32);
9055 }
9056
9057 /* Generate a crc32 of a byte. */
9058
9059 unsigned
9060 crc32_byte (unsigned chksum, char byte)
9061 {
9062 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9063 }
9064
9065 /* Generate a crc32 of a string. */
9066
9067 unsigned
9068 crc32_string (unsigned chksum, const char *string)
9069 {
9070 do
9071 {
9072 chksum = crc32_byte (chksum, *string);
9073 }
9074 while (*string++);
9075 return chksum;
9076 }
9077
9078 /* P is a string that will be used in a symbol. Mask out any characters
9079 that are not valid in that context. */
9080
9081 void
9082 clean_symbol_name (char *p)
9083 {
9084 for (; *p; p++)
9085 if (! (ISALNUM (*p)
9086 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9087 || *p == '$'
9088 #endif
9089 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9090 || *p == '.'
9091 #endif
9092 ))
9093 *p = '_';
9094 }
9095
9096 /* Generate a name for a special-purpose function.
9097 The generated name may need to be unique across the whole link.
9098 Changes to this function may also require corresponding changes to
9099 xstrdup_mask_random.
9100 TYPE is some string to identify the purpose of this function to the
9101 linker or collect2; it must start with an uppercase letter,
9102 one of:
9103 I - for constructors
9104 D - for destructors
9105 N - for C++ anonymous namespaces
9106 F - for DWARF unwind frame information. */
9107
9108 tree
9109 get_file_function_name (const char *type)
9110 {
9111 char *buf;
9112 const char *p;
9113 char *q;
9114
9115 /* If we already have a name we know to be unique, just use that. */
9116 if (first_global_object_name)
9117 p = q = ASTRDUP (first_global_object_name);
9118 /* If the target is handling the constructors/destructors, they
9119 will be local to this file and the name is only necessary for
9120 debugging purposes.
9121 We also assign sub_I and sub_D sufixes to constructors called from
9122 the global static constructors. These are always local. */
9123 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9124 || (strncmp (type, "sub_", 4) == 0
9125 && (type[4] == 'I' || type[4] == 'D')))
9126 {
9127 const char *file = main_input_filename;
9128 if (! file)
9129 file = LOCATION_FILE (input_location);
9130 /* Just use the file's basename, because the full pathname
9131 might be quite long. */
9132 p = q = ASTRDUP (lbasename (file));
9133 }
9134 else
9135 {
9136 /* Otherwise, the name must be unique across the entire link.
9137 We don't have anything that we know to be unique to this translation
9138 unit, so use what we do have and throw in some randomness. */
9139 unsigned len;
9140 const char *name = weak_global_object_name;
9141 const char *file = main_input_filename;
9142
9143 if (! name)
9144 name = "";
9145 if (! file)
9146 file = LOCATION_FILE (input_location);
9147
9148 len = strlen (file);
9149 q = (char *) alloca (9 + 17 + len + 1);
9150 memcpy (q, file, len + 1);
9151
9152 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9153 crc32_string (0, name), get_random_seed (false));
9154
9155 p = q;
9156 }
9157
9158 clean_symbol_name (q);
9159 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9160 + strlen (type));
9161
9162 /* Set up the name of the file-level functions we may need.
9163 Use a global object (which is already required to be unique over
9164 the program) rather than the file name (which imposes extra
9165 constraints). */
9166 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9167
9168 return get_identifier (buf);
9169 }
9170 \f
9171 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9172
9173 /* Complain that the tree code of NODE does not match the expected 0
9174 terminated list of trailing codes. The trailing code list can be
9175 empty, for a more vague error message. FILE, LINE, and FUNCTION
9176 are of the caller. */
9177
9178 void
9179 tree_check_failed (const_tree node, const char *file,
9180 int line, const char *function, ...)
9181 {
9182 va_list args;
9183 const char *buffer;
9184 unsigned length = 0;
9185 enum tree_code code;
9186
9187 va_start (args, function);
9188 while ((code = (enum tree_code) va_arg (args, int)))
9189 length += 4 + strlen (get_tree_code_name (code));
9190 va_end (args);
9191 if (length)
9192 {
9193 char *tmp;
9194 va_start (args, function);
9195 length += strlen ("expected ");
9196 buffer = tmp = (char *) alloca (length);
9197 length = 0;
9198 while ((code = (enum tree_code) va_arg (args, int)))
9199 {
9200 const char *prefix = length ? " or " : "expected ";
9201
9202 strcpy (tmp + length, prefix);
9203 length += strlen (prefix);
9204 strcpy (tmp + length, get_tree_code_name (code));
9205 length += strlen (get_tree_code_name (code));
9206 }
9207 va_end (args);
9208 }
9209 else
9210 buffer = "unexpected node";
9211
9212 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9213 buffer, get_tree_code_name (TREE_CODE (node)),
9214 function, trim_filename (file), line);
9215 }
9216
9217 /* Complain that the tree code of NODE does match the expected 0
9218 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9219 the caller. */
9220
9221 void
9222 tree_not_check_failed (const_tree node, const char *file,
9223 int line, const char *function, ...)
9224 {
9225 va_list args;
9226 char *buffer;
9227 unsigned length = 0;
9228 enum tree_code code;
9229
9230 va_start (args, function);
9231 while ((code = (enum tree_code) va_arg (args, int)))
9232 length += 4 + strlen (get_tree_code_name (code));
9233 va_end (args);
9234 va_start (args, function);
9235 buffer = (char *) alloca (length);
9236 length = 0;
9237 while ((code = (enum tree_code) va_arg (args, int)))
9238 {
9239 if (length)
9240 {
9241 strcpy (buffer + length, " or ");
9242 length += 4;
9243 }
9244 strcpy (buffer + length, get_tree_code_name (code));
9245 length += strlen (get_tree_code_name (code));
9246 }
9247 va_end (args);
9248
9249 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9250 buffer, get_tree_code_name (TREE_CODE (node)),
9251 function, trim_filename (file), line);
9252 }
9253
9254 /* Similar to tree_check_failed, except that we check for a class of tree
9255 code, given in CL. */
9256
9257 void
9258 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9259 const char *file, int line, const char *function)
9260 {
9261 internal_error
9262 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9263 TREE_CODE_CLASS_STRING (cl),
9264 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9265 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9266 }
9267
9268 /* Similar to tree_check_failed, except that instead of specifying a
9269 dozen codes, use the knowledge that they're all sequential. */
9270
9271 void
9272 tree_range_check_failed (const_tree node, const char *file, int line,
9273 const char *function, enum tree_code c1,
9274 enum tree_code c2)
9275 {
9276 char *buffer;
9277 unsigned length = 0;
9278 unsigned int c;
9279
9280 for (c = c1; c <= c2; ++c)
9281 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9282
9283 length += strlen ("expected ");
9284 buffer = (char *) alloca (length);
9285 length = 0;
9286
9287 for (c = c1; c <= c2; ++c)
9288 {
9289 const char *prefix = length ? " or " : "expected ";
9290
9291 strcpy (buffer + length, prefix);
9292 length += strlen (prefix);
9293 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9294 length += strlen (get_tree_code_name ((enum tree_code) c));
9295 }
9296
9297 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9298 buffer, get_tree_code_name (TREE_CODE (node)),
9299 function, trim_filename (file), line);
9300 }
9301
9302
9303 /* Similar to tree_check_failed, except that we check that a tree does
9304 not have the specified code, given in CL. */
9305
9306 void
9307 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9308 const char *file, int line, const char *function)
9309 {
9310 internal_error
9311 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9312 TREE_CODE_CLASS_STRING (cl),
9313 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9314 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9315 }
9316
9317
9318 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9319
9320 void
9321 omp_clause_check_failed (const_tree node, const char *file, int line,
9322 const char *function, enum omp_clause_code code)
9323 {
9324 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9325 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9326 function, trim_filename (file), line);
9327 }
9328
9329
9330 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9331
9332 void
9333 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9334 const char *function, enum omp_clause_code c1,
9335 enum omp_clause_code c2)
9336 {
9337 char *buffer;
9338 unsigned length = 0;
9339 unsigned int c;
9340
9341 for (c = c1; c <= c2; ++c)
9342 length += 4 + strlen (omp_clause_code_name[c]);
9343
9344 length += strlen ("expected ");
9345 buffer = (char *) alloca (length);
9346 length = 0;
9347
9348 for (c = c1; c <= c2; ++c)
9349 {
9350 const char *prefix = length ? " or " : "expected ";
9351
9352 strcpy (buffer + length, prefix);
9353 length += strlen (prefix);
9354 strcpy (buffer + length, omp_clause_code_name[c]);
9355 length += strlen (omp_clause_code_name[c]);
9356 }
9357
9358 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9359 buffer, omp_clause_code_name[TREE_CODE (node)],
9360 function, trim_filename (file), line);
9361 }
9362
9363
9364 #undef DEFTREESTRUCT
9365 #define DEFTREESTRUCT(VAL, NAME) NAME,
9366
9367 static const char *ts_enum_names[] = {
9368 #include "treestruct.def"
9369 };
9370 #undef DEFTREESTRUCT
9371
9372 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9373
9374 /* Similar to tree_class_check_failed, except that we check for
9375 whether CODE contains the tree structure identified by EN. */
9376
9377 void
9378 tree_contains_struct_check_failed (const_tree node,
9379 const enum tree_node_structure_enum en,
9380 const char *file, int line,
9381 const char *function)
9382 {
9383 internal_error
9384 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9385 TS_ENUM_NAME (en),
9386 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9387 }
9388
9389
9390 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9391 (dynamically sized) vector. */
9392
9393 void
9394 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9395 const char *function)
9396 {
9397 internal_error
9398 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9399 idx + 1, len, function, trim_filename (file), line);
9400 }
9401
9402 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9403 (dynamically sized) vector. */
9404
9405 void
9406 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9407 const char *function)
9408 {
9409 internal_error
9410 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9411 idx + 1, len, function, trim_filename (file), line);
9412 }
9413
9414 /* Similar to above, except that the check is for the bounds of the operand
9415 vector of an expression node EXP. */
9416
9417 void
9418 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9419 int line, const char *function)
9420 {
9421 enum tree_code code = TREE_CODE (exp);
9422 internal_error
9423 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9424 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9425 function, trim_filename (file), line);
9426 }
9427
9428 /* Similar to above, except that the check is for the number of
9429 operands of an OMP_CLAUSE node. */
9430
9431 void
9432 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9433 int line, const char *function)
9434 {
9435 internal_error
9436 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9437 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9438 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9439 trim_filename (file), line);
9440 }
9441 #endif /* ENABLE_TREE_CHECKING */
9442 \f
9443 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9444 and mapped to the machine mode MODE. Initialize its fields and build
9445 the information necessary for debugging output. */
9446
9447 static tree
9448 make_vector_type (tree innertype, int nunits, machine_mode mode)
9449 {
9450 tree t;
9451 inchash::hash hstate;
9452
9453 t = make_node (VECTOR_TYPE);
9454 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9455 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9456 SET_TYPE_MODE (t, mode);
9457
9458 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9459 SET_TYPE_STRUCTURAL_EQUALITY (t);
9460 else if (TYPE_CANONICAL (innertype) != innertype
9461 || mode != VOIDmode)
9462 TYPE_CANONICAL (t)
9463 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9464
9465 layout_type (t);
9466
9467 hstate.add_wide_int (VECTOR_TYPE);
9468 hstate.add_wide_int (nunits);
9469 hstate.add_wide_int (mode);
9470 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9471 t = type_hash_canon (hstate.end (), t);
9472
9473 /* We have built a main variant, based on the main variant of the
9474 inner type. Use it to build the variant we return. */
9475 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9476 && TREE_TYPE (t) != innertype)
9477 return build_type_attribute_qual_variant (t,
9478 TYPE_ATTRIBUTES (innertype),
9479 TYPE_QUALS (innertype));
9480
9481 return t;
9482 }
9483
9484 static tree
9485 make_or_reuse_type (unsigned size, int unsignedp)
9486 {
9487 int i;
9488
9489 if (size == INT_TYPE_SIZE)
9490 return unsignedp ? unsigned_type_node : integer_type_node;
9491 if (size == CHAR_TYPE_SIZE)
9492 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9493 if (size == SHORT_TYPE_SIZE)
9494 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9495 if (size == LONG_TYPE_SIZE)
9496 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9497 if (size == LONG_LONG_TYPE_SIZE)
9498 return (unsignedp ? long_long_unsigned_type_node
9499 : long_long_integer_type_node);
9500
9501 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9502 if (size == int_n_data[i].bitsize
9503 && int_n_enabled_p[i])
9504 return (unsignedp ? int_n_trees[i].unsigned_type
9505 : int_n_trees[i].signed_type);
9506
9507 if (unsignedp)
9508 return make_unsigned_type (size);
9509 else
9510 return make_signed_type (size);
9511 }
9512
9513 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9514
9515 static tree
9516 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9517 {
9518 if (satp)
9519 {
9520 if (size == SHORT_FRACT_TYPE_SIZE)
9521 return unsignedp ? sat_unsigned_short_fract_type_node
9522 : sat_short_fract_type_node;
9523 if (size == FRACT_TYPE_SIZE)
9524 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9525 if (size == LONG_FRACT_TYPE_SIZE)
9526 return unsignedp ? sat_unsigned_long_fract_type_node
9527 : sat_long_fract_type_node;
9528 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9529 return unsignedp ? sat_unsigned_long_long_fract_type_node
9530 : sat_long_long_fract_type_node;
9531 }
9532 else
9533 {
9534 if (size == SHORT_FRACT_TYPE_SIZE)
9535 return unsignedp ? unsigned_short_fract_type_node
9536 : short_fract_type_node;
9537 if (size == FRACT_TYPE_SIZE)
9538 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9539 if (size == LONG_FRACT_TYPE_SIZE)
9540 return unsignedp ? unsigned_long_fract_type_node
9541 : long_fract_type_node;
9542 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9543 return unsignedp ? unsigned_long_long_fract_type_node
9544 : long_long_fract_type_node;
9545 }
9546
9547 return make_fract_type (size, unsignedp, satp);
9548 }
9549
9550 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9551
9552 static tree
9553 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9554 {
9555 if (satp)
9556 {
9557 if (size == SHORT_ACCUM_TYPE_SIZE)
9558 return unsignedp ? sat_unsigned_short_accum_type_node
9559 : sat_short_accum_type_node;
9560 if (size == ACCUM_TYPE_SIZE)
9561 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9562 if (size == LONG_ACCUM_TYPE_SIZE)
9563 return unsignedp ? sat_unsigned_long_accum_type_node
9564 : sat_long_accum_type_node;
9565 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9566 return unsignedp ? sat_unsigned_long_long_accum_type_node
9567 : sat_long_long_accum_type_node;
9568 }
9569 else
9570 {
9571 if (size == SHORT_ACCUM_TYPE_SIZE)
9572 return unsignedp ? unsigned_short_accum_type_node
9573 : short_accum_type_node;
9574 if (size == ACCUM_TYPE_SIZE)
9575 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9576 if (size == LONG_ACCUM_TYPE_SIZE)
9577 return unsignedp ? unsigned_long_accum_type_node
9578 : long_accum_type_node;
9579 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9580 return unsignedp ? unsigned_long_long_accum_type_node
9581 : long_long_accum_type_node;
9582 }
9583
9584 return make_accum_type (size, unsignedp, satp);
9585 }
9586
9587
9588 /* Create an atomic variant node for TYPE. This routine is called
9589 during initialization of data types to create the 5 basic atomic
9590 types. The generic build_variant_type function requires these to
9591 already be set up in order to function properly, so cannot be
9592 called from there. If ALIGN is non-zero, then ensure alignment is
9593 overridden to this value. */
9594
9595 static tree
9596 build_atomic_base (tree type, unsigned int align)
9597 {
9598 tree t;
9599
9600 /* Make sure its not already registered. */
9601 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9602 return t;
9603
9604 t = build_variant_type_copy (type);
9605 set_type_quals (t, TYPE_QUAL_ATOMIC);
9606
9607 if (align)
9608 TYPE_ALIGN (t) = align;
9609
9610 return t;
9611 }
9612
9613 /* Create nodes for all integer types (and error_mark_node) using the sizes
9614 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9615 SHORT_DOUBLE specifies whether double should be of the same precision
9616 as float. */
9617
9618 void
9619 build_common_tree_nodes (bool signed_char, bool short_double)
9620 {
9621 int i;
9622
9623 error_mark_node = make_node (ERROR_MARK);
9624 TREE_TYPE (error_mark_node) = error_mark_node;
9625
9626 initialize_sizetypes ();
9627
9628 /* Define both `signed char' and `unsigned char'. */
9629 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9630 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9631 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9632 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9633
9634 /* Define `char', which is like either `signed char' or `unsigned char'
9635 but not the same as either. */
9636 char_type_node
9637 = (signed_char
9638 ? make_signed_type (CHAR_TYPE_SIZE)
9639 : make_unsigned_type (CHAR_TYPE_SIZE));
9640 TYPE_STRING_FLAG (char_type_node) = 1;
9641
9642 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9643 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9644 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9645 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9646 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9647 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9648 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9649 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9650
9651 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9652 {
9653 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9654 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9655 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9656 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9657
9658 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9659 && int_n_enabled_p[i])
9660 {
9661 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9662 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9663 }
9664 }
9665
9666 /* Define a boolean type. This type only represents boolean values but
9667 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9668 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9669 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9670 TYPE_PRECISION (boolean_type_node) = 1;
9671 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9672
9673 /* Define what type to use for size_t. */
9674 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9675 size_type_node = unsigned_type_node;
9676 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9677 size_type_node = long_unsigned_type_node;
9678 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9679 size_type_node = long_long_unsigned_type_node;
9680 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9681 size_type_node = short_unsigned_type_node;
9682 else
9683 {
9684 int i;
9685
9686 size_type_node = NULL_TREE;
9687 for (i = 0; i < NUM_INT_N_ENTS; i++)
9688 if (int_n_enabled_p[i])
9689 {
9690 char name[50];
9691 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9692
9693 if (strcmp (name, SIZE_TYPE) == 0)
9694 {
9695 size_type_node = int_n_trees[i].unsigned_type;
9696 }
9697 }
9698 if (size_type_node == NULL_TREE)
9699 gcc_unreachable ();
9700 }
9701
9702 /* Fill in the rest of the sized types. Reuse existing type nodes
9703 when possible. */
9704 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9705 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9706 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9707 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9708 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9709
9710 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9711 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9712 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9713 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9714 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9715
9716 /* Don't call build_qualified type for atomics. That routine does
9717 special processing for atomics, and until they are initialized
9718 it's better not to make that call.
9719
9720 Check to see if there is a target override for atomic types. */
9721
9722 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9723 targetm.atomic_align_for_mode (QImode));
9724 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9725 targetm.atomic_align_for_mode (HImode));
9726 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9727 targetm.atomic_align_for_mode (SImode));
9728 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9729 targetm.atomic_align_for_mode (DImode));
9730 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9731 targetm.atomic_align_for_mode (TImode));
9732
9733 access_public_node = get_identifier ("public");
9734 access_protected_node = get_identifier ("protected");
9735 access_private_node = get_identifier ("private");
9736
9737 /* Define these next since types below may used them. */
9738 integer_zero_node = build_int_cst (integer_type_node, 0);
9739 integer_one_node = build_int_cst (integer_type_node, 1);
9740 integer_three_node = build_int_cst (integer_type_node, 3);
9741 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9742
9743 size_zero_node = size_int (0);
9744 size_one_node = size_int (1);
9745 bitsize_zero_node = bitsize_int (0);
9746 bitsize_one_node = bitsize_int (1);
9747 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9748
9749 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9750 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9751
9752 void_type_node = make_node (VOID_TYPE);
9753 layout_type (void_type_node);
9754
9755 pointer_bounds_type_node = targetm.chkp_bound_type ();
9756
9757 /* We are not going to have real types in C with less than byte alignment,
9758 so we might as well not have any types that claim to have it. */
9759 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9760 TYPE_USER_ALIGN (void_type_node) = 0;
9761
9762 void_node = make_node (VOID_CST);
9763 TREE_TYPE (void_node) = void_type_node;
9764
9765 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9766 layout_type (TREE_TYPE (null_pointer_node));
9767
9768 ptr_type_node = build_pointer_type (void_type_node);
9769 const_ptr_type_node
9770 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9771 fileptr_type_node = ptr_type_node;
9772
9773 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9774
9775 float_type_node = make_node (REAL_TYPE);
9776 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9777 layout_type (float_type_node);
9778
9779 double_type_node = make_node (REAL_TYPE);
9780 if (short_double)
9781 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9782 else
9783 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9784 layout_type (double_type_node);
9785
9786 long_double_type_node = make_node (REAL_TYPE);
9787 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9788 layout_type (long_double_type_node);
9789
9790 float_ptr_type_node = build_pointer_type (float_type_node);
9791 double_ptr_type_node = build_pointer_type (double_type_node);
9792 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9793 integer_ptr_type_node = build_pointer_type (integer_type_node);
9794
9795 /* Fixed size integer types. */
9796 uint16_type_node = make_or_reuse_type (16, 1);
9797 uint32_type_node = make_or_reuse_type (32, 1);
9798 uint64_type_node = make_or_reuse_type (64, 1);
9799
9800 /* Decimal float types. */
9801 dfloat32_type_node = make_node (REAL_TYPE);
9802 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9803 layout_type (dfloat32_type_node);
9804 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9805 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9806
9807 dfloat64_type_node = make_node (REAL_TYPE);
9808 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9809 layout_type (dfloat64_type_node);
9810 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9811 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9812
9813 dfloat128_type_node = make_node (REAL_TYPE);
9814 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9815 layout_type (dfloat128_type_node);
9816 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9817 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9818
9819 complex_integer_type_node = build_complex_type (integer_type_node);
9820 complex_float_type_node = build_complex_type (float_type_node);
9821 complex_double_type_node = build_complex_type (double_type_node);
9822 complex_long_double_type_node = build_complex_type (long_double_type_node);
9823
9824 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9825 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9826 sat_ ## KIND ## _type_node = \
9827 make_sat_signed_ ## KIND ## _type (SIZE); \
9828 sat_unsigned_ ## KIND ## _type_node = \
9829 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9830 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9831 unsigned_ ## KIND ## _type_node = \
9832 make_unsigned_ ## KIND ## _type (SIZE);
9833
9834 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9835 sat_ ## WIDTH ## KIND ## _type_node = \
9836 make_sat_signed_ ## KIND ## _type (SIZE); \
9837 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9838 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9839 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9840 unsigned_ ## WIDTH ## KIND ## _type_node = \
9841 make_unsigned_ ## KIND ## _type (SIZE);
9842
9843 /* Make fixed-point type nodes based on four different widths. */
9844 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9845 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9846 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9847 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9848 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9849
9850 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9851 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9852 NAME ## _type_node = \
9853 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9854 u ## NAME ## _type_node = \
9855 make_or_reuse_unsigned_ ## KIND ## _type \
9856 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9857 sat_ ## NAME ## _type_node = \
9858 make_or_reuse_sat_signed_ ## KIND ## _type \
9859 (GET_MODE_BITSIZE (MODE ## mode)); \
9860 sat_u ## NAME ## _type_node = \
9861 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9862 (GET_MODE_BITSIZE (U ## MODE ## mode));
9863
9864 /* Fixed-point type and mode nodes. */
9865 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9866 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9867 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9868 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9869 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9870 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9871 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9872 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9873 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9874 MAKE_FIXED_MODE_NODE (accum, da, DA)
9875 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9876
9877 {
9878 tree t = targetm.build_builtin_va_list ();
9879
9880 /* Many back-ends define record types without setting TYPE_NAME.
9881 If we copied the record type here, we'd keep the original
9882 record type without a name. This breaks name mangling. So,
9883 don't copy record types and let c_common_nodes_and_builtins()
9884 declare the type to be __builtin_va_list. */
9885 if (TREE_CODE (t) != RECORD_TYPE)
9886 t = build_variant_type_copy (t);
9887
9888 va_list_type_node = t;
9889 }
9890 }
9891
9892 /* Modify DECL for given flags.
9893 TM_PURE attribute is set only on types, so the function will modify
9894 DECL's type when ECF_TM_PURE is used. */
9895
9896 void
9897 set_call_expr_flags (tree decl, int flags)
9898 {
9899 if (flags & ECF_NOTHROW)
9900 TREE_NOTHROW (decl) = 1;
9901 if (flags & ECF_CONST)
9902 TREE_READONLY (decl) = 1;
9903 if (flags & ECF_PURE)
9904 DECL_PURE_P (decl) = 1;
9905 if (flags & ECF_LOOPING_CONST_OR_PURE)
9906 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9907 if (flags & ECF_NOVOPS)
9908 DECL_IS_NOVOPS (decl) = 1;
9909 if (flags & ECF_NORETURN)
9910 TREE_THIS_VOLATILE (decl) = 1;
9911 if (flags & ECF_MALLOC)
9912 DECL_IS_MALLOC (decl) = 1;
9913 if (flags & ECF_RETURNS_TWICE)
9914 DECL_IS_RETURNS_TWICE (decl) = 1;
9915 if (flags & ECF_LEAF)
9916 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9917 NULL, DECL_ATTRIBUTES (decl));
9918 if ((flags & ECF_TM_PURE) && flag_tm)
9919 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9920 /* Looping const or pure is implied by noreturn.
9921 There is currently no way to declare looping const or looping pure alone. */
9922 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9923 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9924 }
9925
9926
9927 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9928
9929 static void
9930 local_define_builtin (const char *name, tree type, enum built_in_function code,
9931 const char *library_name, int ecf_flags)
9932 {
9933 tree decl;
9934
9935 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9936 library_name, NULL_TREE);
9937 set_call_expr_flags (decl, ecf_flags);
9938
9939 set_builtin_decl (code, decl, true);
9940 }
9941
9942 /* Call this function after instantiating all builtins that the language
9943 front end cares about. This will build the rest of the builtins
9944 and internal functions that are relied upon by the tree optimizers and
9945 the middle-end. */
9946
9947 void
9948 build_common_builtin_nodes (void)
9949 {
9950 tree tmp, ftype;
9951 int ecf_flags;
9952
9953 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9954 {
9955 ftype = build_function_type (void_type_node, void_list_node);
9956 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9957 "__builtin_unreachable",
9958 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9959 | ECF_CONST);
9960 }
9961
9962 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9963 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9964 {
9965 ftype = build_function_type_list (ptr_type_node,
9966 ptr_type_node, const_ptr_type_node,
9967 size_type_node, NULL_TREE);
9968
9969 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9970 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9971 "memcpy", ECF_NOTHROW | ECF_LEAF);
9972 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9973 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9974 "memmove", ECF_NOTHROW | ECF_LEAF);
9975 }
9976
9977 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9978 {
9979 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9980 const_ptr_type_node, size_type_node,
9981 NULL_TREE);
9982 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9983 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9984 }
9985
9986 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9987 {
9988 ftype = build_function_type_list (ptr_type_node,
9989 ptr_type_node, integer_type_node,
9990 size_type_node, NULL_TREE);
9991 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9992 "memset", ECF_NOTHROW | ECF_LEAF);
9993 }
9994
9995 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9996 {
9997 ftype = build_function_type_list (ptr_type_node,
9998 size_type_node, NULL_TREE);
9999 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10000 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10001 }
10002
10003 ftype = build_function_type_list (ptr_type_node, size_type_node,
10004 size_type_node, NULL_TREE);
10005 local_define_builtin ("__builtin_alloca_with_align", ftype,
10006 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
10007 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10008
10009 /* If we're checking the stack, `alloca' can throw. */
10010 if (flag_stack_check)
10011 {
10012 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10013 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10014 }
10015
10016 ftype = build_function_type_list (void_type_node,
10017 ptr_type_node, ptr_type_node,
10018 ptr_type_node, NULL_TREE);
10019 local_define_builtin ("__builtin_init_trampoline", ftype,
10020 BUILT_IN_INIT_TRAMPOLINE,
10021 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10022 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10023 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10024 "__builtin_init_heap_trampoline",
10025 ECF_NOTHROW | ECF_LEAF);
10026
10027 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10028 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10029 BUILT_IN_ADJUST_TRAMPOLINE,
10030 "__builtin_adjust_trampoline",
10031 ECF_CONST | ECF_NOTHROW);
10032
10033 ftype = build_function_type_list (void_type_node,
10034 ptr_type_node, ptr_type_node, NULL_TREE);
10035 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10036 BUILT_IN_NONLOCAL_GOTO,
10037 "__builtin_nonlocal_goto",
10038 ECF_NORETURN | ECF_NOTHROW);
10039
10040 ftype = build_function_type_list (void_type_node,
10041 ptr_type_node, ptr_type_node, NULL_TREE);
10042 local_define_builtin ("__builtin_setjmp_setup", ftype,
10043 BUILT_IN_SETJMP_SETUP,
10044 "__builtin_setjmp_setup", ECF_NOTHROW);
10045
10046 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10047 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10048 BUILT_IN_SETJMP_RECEIVER,
10049 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10050
10051 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10052 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10053 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10054
10055 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10056 local_define_builtin ("__builtin_stack_restore", ftype,
10057 BUILT_IN_STACK_RESTORE,
10058 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10059
10060 /* If there's a possibility that we might use the ARM EABI, build the
10061 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10062 if (targetm.arm_eabi_unwinder)
10063 {
10064 ftype = build_function_type_list (void_type_node, NULL_TREE);
10065 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10066 BUILT_IN_CXA_END_CLEANUP,
10067 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10068 }
10069
10070 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10071 local_define_builtin ("__builtin_unwind_resume", ftype,
10072 BUILT_IN_UNWIND_RESUME,
10073 ((targetm_common.except_unwind_info (&global_options)
10074 == UI_SJLJ)
10075 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10076 ECF_NORETURN);
10077
10078 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10079 {
10080 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10081 NULL_TREE);
10082 local_define_builtin ("__builtin_return_address", ftype,
10083 BUILT_IN_RETURN_ADDRESS,
10084 "__builtin_return_address",
10085 ECF_NOTHROW);
10086 }
10087
10088 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10089 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10090 {
10091 ftype = build_function_type_list (void_type_node, ptr_type_node,
10092 ptr_type_node, NULL_TREE);
10093 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10094 local_define_builtin ("__cyg_profile_func_enter", ftype,
10095 BUILT_IN_PROFILE_FUNC_ENTER,
10096 "__cyg_profile_func_enter", 0);
10097 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10098 local_define_builtin ("__cyg_profile_func_exit", ftype,
10099 BUILT_IN_PROFILE_FUNC_EXIT,
10100 "__cyg_profile_func_exit", 0);
10101 }
10102
10103 /* The exception object and filter values from the runtime. The argument
10104 must be zero before exception lowering, i.e. from the front end. After
10105 exception lowering, it will be the region number for the exception
10106 landing pad. These functions are PURE instead of CONST to prevent
10107 them from being hoisted past the exception edge that will initialize
10108 its value in the landing pad. */
10109 ftype = build_function_type_list (ptr_type_node,
10110 integer_type_node, NULL_TREE);
10111 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10112 /* Only use TM_PURE if we we have TM language support. */
10113 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10114 ecf_flags |= ECF_TM_PURE;
10115 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10116 "__builtin_eh_pointer", ecf_flags);
10117
10118 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10119 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10120 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10121 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10122
10123 ftype = build_function_type_list (void_type_node,
10124 integer_type_node, integer_type_node,
10125 NULL_TREE);
10126 local_define_builtin ("__builtin_eh_copy_values", ftype,
10127 BUILT_IN_EH_COPY_VALUES,
10128 "__builtin_eh_copy_values", ECF_NOTHROW);
10129
10130 /* Complex multiplication and division. These are handled as builtins
10131 rather than optabs because emit_library_call_value doesn't support
10132 complex. Further, we can do slightly better with folding these
10133 beasties if the real and complex parts of the arguments are separate. */
10134 {
10135 int mode;
10136
10137 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10138 {
10139 char mode_name_buf[4], *q;
10140 const char *p;
10141 enum built_in_function mcode, dcode;
10142 tree type, inner_type;
10143 const char *prefix = "__";
10144
10145 if (targetm.libfunc_gnu_prefix)
10146 prefix = "__gnu_";
10147
10148 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10149 if (type == NULL)
10150 continue;
10151 inner_type = TREE_TYPE (type);
10152
10153 ftype = build_function_type_list (type, inner_type, inner_type,
10154 inner_type, inner_type, NULL_TREE);
10155
10156 mcode = ((enum built_in_function)
10157 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10158 dcode = ((enum built_in_function)
10159 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10160
10161 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10162 *q = TOLOWER (*p);
10163 *q = '\0';
10164
10165 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10166 NULL);
10167 local_define_builtin (built_in_names[mcode], ftype, mcode,
10168 built_in_names[mcode],
10169 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10170
10171 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10172 NULL);
10173 local_define_builtin (built_in_names[dcode], ftype, dcode,
10174 built_in_names[dcode],
10175 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10176 }
10177 }
10178
10179 init_internal_fns ();
10180 }
10181
10182 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10183 better way.
10184
10185 If we requested a pointer to a vector, build up the pointers that
10186 we stripped off while looking for the inner type. Similarly for
10187 return values from functions.
10188
10189 The argument TYPE is the top of the chain, and BOTTOM is the
10190 new type which we will point to. */
10191
10192 tree
10193 reconstruct_complex_type (tree type, tree bottom)
10194 {
10195 tree inner, outer;
10196
10197 if (TREE_CODE (type) == POINTER_TYPE)
10198 {
10199 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10200 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10201 TYPE_REF_CAN_ALIAS_ALL (type));
10202 }
10203 else if (TREE_CODE (type) == REFERENCE_TYPE)
10204 {
10205 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10206 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10207 TYPE_REF_CAN_ALIAS_ALL (type));
10208 }
10209 else if (TREE_CODE (type) == ARRAY_TYPE)
10210 {
10211 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10212 outer = build_array_type (inner, TYPE_DOMAIN (type));
10213 }
10214 else if (TREE_CODE (type) == FUNCTION_TYPE)
10215 {
10216 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10217 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10218 }
10219 else if (TREE_CODE (type) == METHOD_TYPE)
10220 {
10221 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10222 /* The build_method_type_directly() routine prepends 'this' to argument list,
10223 so we must compensate by getting rid of it. */
10224 outer
10225 = build_method_type_directly
10226 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10227 inner,
10228 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10229 }
10230 else if (TREE_CODE (type) == OFFSET_TYPE)
10231 {
10232 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10233 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10234 }
10235 else
10236 return bottom;
10237
10238 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10239 TYPE_QUALS (type));
10240 }
10241
10242 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10243 the inner type. */
10244 tree
10245 build_vector_type_for_mode (tree innertype, machine_mode mode)
10246 {
10247 int nunits;
10248
10249 switch (GET_MODE_CLASS (mode))
10250 {
10251 case MODE_VECTOR_INT:
10252 case MODE_VECTOR_FLOAT:
10253 case MODE_VECTOR_FRACT:
10254 case MODE_VECTOR_UFRACT:
10255 case MODE_VECTOR_ACCUM:
10256 case MODE_VECTOR_UACCUM:
10257 nunits = GET_MODE_NUNITS (mode);
10258 break;
10259
10260 case MODE_INT:
10261 /* Check that there are no leftover bits. */
10262 gcc_assert (GET_MODE_BITSIZE (mode)
10263 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10264
10265 nunits = GET_MODE_BITSIZE (mode)
10266 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10267 break;
10268
10269 default:
10270 gcc_unreachable ();
10271 }
10272
10273 return make_vector_type (innertype, nunits, mode);
10274 }
10275
10276 /* Similarly, but takes the inner type and number of units, which must be
10277 a power of two. */
10278
10279 tree
10280 build_vector_type (tree innertype, int nunits)
10281 {
10282 return make_vector_type (innertype, nunits, VOIDmode);
10283 }
10284
10285 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10286
10287 tree
10288 build_opaque_vector_type (tree innertype, int nunits)
10289 {
10290 tree t = make_vector_type (innertype, nunits, VOIDmode);
10291 tree cand;
10292 /* We always build the non-opaque variant before the opaque one,
10293 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10294 cand = TYPE_NEXT_VARIANT (t);
10295 if (cand
10296 && TYPE_VECTOR_OPAQUE (cand)
10297 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10298 return cand;
10299 /* Othewise build a variant type and make sure to queue it after
10300 the non-opaque type. */
10301 cand = build_distinct_type_copy (t);
10302 TYPE_VECTOR_OPAQUE (cand) = true;
10303 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10304 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10305 TYPE_NEXT_VARIANT (t) = cand;
10306 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10307 return cand;
10308 }
10309
10310
10311 /* Given an initializer INIT, return TRUE if INIT is zero or some
10312 aggregate of zeros. Otherwise return FALSE. */
10313 bool
10314 initializer_zerop (const_tree init)
10315 {
10316 tree elt;
10317
10318 STRIP_NOPS (init);
10319
10320 switch (TREE_CODE (init))
10321 {
10322 case INTEGER_CST:
10323 return integer_zerop (init);
10324
10325 case REAL_CST:
10326 /* ??? Note that this is not correct for C4X float formats. There,
10327 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10328 negative exponent. */
10329 return real_zerop (init)
10330 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10331
10332 case FIXED_CST:
10333 return fixed_zerop (init);
10334
10335 case COMPLEX_CST:
10336 return integer_zerop (init)
10337 || (real_zerop (init)
10338 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10339 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10340
10341 case VECTOR_CST:
10342 {
10343 unsigned i;
10344 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10345 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10346 return false;
10347 return true;
10348 }
10349
10350 case CONSTRUCTOR:
10351 {
10352 unsigned HOST_WIDE_INT idx;
10353
10354 if (TREE_CLOBBER_P (init))
10355 return false;
10356 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10357 if (!initializer_zerop (elt))
10358 return false;
10359 return true;
10360 }
10361
10362 case STRING_CST:
10363 {
10364 int i;
10365
10366 /* We need to loop through all elements to handle cases like
10367 "\0" and "\0foobar". */
10368 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10369 if (TREE_STRING_POINTER (init)[i] != '\0')
10370 return false;
10371
10372 return true;
10373 }
10374
10375 default:
10376 return false;
10377 }
10378 }
10379
10380 /* Check if vector VEC consists of all the equal elements and
10381 that the number of elements corresponds to the type of VEC.
10382 The function returns first element of the vector
10383 or NULL_TREE if the vector is not uniform. */
10384 tree
10385 uniform_vector_p (const_tree vec)
10386 {
10387 tree first, t;
10388 unsigned i;
10389
10390 if (vec == NULL_TREE)
10391 return NULL_TREE;
10392
10393 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10394
10395 if (TREE_CODE (vec) == VECTOR_CST)
10396 {
10397 first = VECTOR_CST_ELT (vec, 0);
10398 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10399 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10400 return NULL_TREE;
10401
10402 return first;
10403 }
10404
10405 else if (TREE_CODE (vec) == CONSTRUCTOR)
10406 {
10407 first = error_mark_node;
10408
10409 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10410 {
10411 if (i == 0)
10412 {
10413 first = t;
10414 continue;
10415 }
10416 if (!operand_equal_p (first, t, 0))
10417 return NULL_TREE;
10418 }
10419 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10420 return NULL_TREE;
10421
10422 return first;
10423 }
10424
10425 return NULL_TREE;
10426 }
10427
10428 /* Build an empty statement at location LOC. */
10429
10430 tree
10431 build_empty_stmt (location_t loc)
10432 {
10433 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10434 SET_EXPR_LOCATION (t, loc);
10435 return t;
10436 }
10437
10438
10439 /* Build an OpenMP clause with code CODE. LOC is the location of the
10440 clause. */
10441
10442 tree
10443 build_omp_clause (location_t loc, enum omp_clause_code code)
10444 {
10445 tree t;
10446 int size, length;
10447
10448 length = omp_clause_num_ops[code];
10449 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10450
10451 record_node_allocation_statistics (OMP_CLAUSE, size);
10452
10453 t = (tree) ggc_internal_alloc (size);
10454 memset (t, 0, size);
10455 TREE_SET_CODE (t, OMP_CLAUSE);
10456 OMP_CLAUSE_SET_CODE (t, code);
10457 OMP_CLAUSE_LOCATION (t) = loc;
10458
10459 return t;
10460 }
10461
10462 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10463 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10464 Except for the CODE and operand count field, other storage for the
10465 object is initialized to zeros. */
10466
10467 tree
10468 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10469 {
10470 tree t;
10471 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10472
10473 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10474 gcc_assert (len >= 1);
10475
10476 record_node_allocation_statistics (code, length);
10477
10478 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10479
10480 TREE_SET_CODE (t, code);
10481
10482 /* Can't use TREE_OPERAND to store the length because if checking is
10483 enabled, it will try to check the length before we store it. :-P */
10484 t->exp.operands[0] = build_int_cst (sizetype, len);
10485
10486 return t;
10487 }
10488
10489 /* Helper function for build_call_* functions; build a CALL_EXPR with
10490 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10491 the argument slots. */
10492
10493 static tree
10494 build_call_1 (tree return_type, tree fn, int nargs)
10495 {
10496 tree t;
10497
10498 t = build_vl_exp (CALL_EXPR, nargs + 3);
10499 TREE_TYPE (t) = return_type;
10500 CALL_EXPR_FN (t) = fn;
10501 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10502
10503 return t;
10504 }
10505
10506 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10507 FN and a null static chain slot. NARGS is the number of call arguments
10508 which are specified as "..." arguments. */
10509
10510 tree
10511 build_call_nary (tree return_type, tree fn, int nargs, ...)
10512 {
10513 tree ret;
10514 va_list args;
10515 va_start (args, nargs);
10516 ret = build_call_valist (return_type, fn, nargs, args);
10517 va_end (args);
10518 return ret;
10519 }
10520
10521 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10522 FN and a null static chain slot. NARGS is the number of call arguments
10523 which are specified as a va_list ARGS. */
10524
10525 tree
10526 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10527 {
10528 tree t;
10529 int i;
10530
10531 t = build_call_1 (return_type, fn, nargs);
10532 for (i = 0; i < nargs; i++)
10533 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10534 process_call_operands (t);
10535 return t;
10536 }
10537
10538 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10539 FN and a null static chain slot. NARGS is the number of call arguments
10540 which are specified as a tree array ARGS. */
10541
10542 tree
10543 build_call_array_loc (location_t loc, tree return_type, tree fn,
10544 int nargs, const tree *args)
10545 {
10546 tree t;
10547 int i;
10548
10549 t = build_call_1 (return_type, fn, nargs);
10550 for (i = 0; i < nargs; i++)
10551 CALL_EXPR_ARG (t, i) = args[i];
10552 process_call_operands (t);
10553 SET_EXPR_LOCATION (t, loc);
10554 return t;
10555 }
10556
10557 /* Like build_call_array, but takes a vec. */
10558
10559 tree
10560 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10561 {
10562 tree ret, t;
10563 unsigned int ix;
10564
10565 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10566 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10567 CALL_EXPR_ARG (ret, ix) = t;
10568 process_call_operands (ret);
10569 return ret;
10570 }
10571
10572 /* Conveniently construct a function call expression. FNDECL names the
10573 function to be called and N arguments are passed in the array
10574 ARGARRAY. */
10575
10576 tree
10577 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10578 {
10579 tree fntype = TREE_TYPE (fndecl);
10580 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10581
10582 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10583 }
10584
10585 /* Conveniently construct a function call expression. FNDECL names the
10586 function to be called and the arguments are passed in the vector
10587 VEC. */
10588
10589 tree
10590 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10591 {
10592 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10593 vec_safe_address (vec));
10594 }
10595
10596
10597 /* Conveniently construct a function call expression. FNDECL names the
10598 function to be called, N is the number of arguments, and the "..."
10599 parameters are the argument expressions. */
10600
10601 tree
10602 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10603 {
10604 va_list ap;
10605 tree *argarray = XALLOCAVEC (tree, n);
10606 int i;
10607
10608 va_start (ap, n);
10609 for (i = 0; i < n; i++)
10610 argarray[i] = va_arg (ap, tree);
10611 va_end (ap);
10612 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10613 }
10614
10615 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10616 varargs macros aren't supported by all bootstrap compilers. */
10617
10618 tree
10619 build_call_expr (tree fndecl, int n, ...)
10620 {
10621 va_list ap;
10622 tree *argarray = XALLOCAVEC (tree, n);
10623 int i;
10624
10625 va_start (ap, n);
10626 for (i = 0; i < n; i++)
10627 argarray[i] = va_arg (ap, tree);
10628 va_end (ap);
10629 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10630 }
10631
10632 /* Build internal call expression. This is just like CALL_EXPR, except
10633 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10634 internal function. */
10635
10636 tree
10637 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10638 tree type, int n, ...)
10639 {
10640 va_list ap;
10641 int i;
10642
10643 tree fn = build_call_1 (type, NULL_TREE, n);
10644 va_start (ap, n);
10645 for (i = 0; i < n; i++)
10646 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10647 va_end (ap);
10648 SET_EXPR_LOCATION (fn, loc);
10649 CALL_EXPR_IFN (fn) = ifn;
10650 return fn;
10651 }
10652
10653 /* Create a new constant string literal and return a char* pointer to it.
10654 The STRING_CST value is the LEN characters at STR. */
10655 tree
10656 build_string_literal (int len, const char *str)
10657 {
10658 tree t, elem, index, type;
10659
10660 t = build_string (len, str);
10661 elem = build_type_variant (char_type_node, 1, 0);
10662 index = build_index_type (size_int (len - 1));
10663 type = build_array_type (elem, index);
10664 TREE_TYPE (t) = type;
10665 TREE_CONSTANT (t) = 1;
10666 TREE_READONLY (t) = 1;
10667 TREE_STATIC (t) = 1;
10668
10669 type = build_pointer_type (elem);
10670 t = build1 (ADDR_EXPR, type,
10671 build4 (ARRAY_REF, elem,
10672 t, integer_zero_node, NULL_TREE, NULL_TREE));
10673 return t;
10674 }
10675
10676
10677
10678 /* Return true if T (assumed to be a DECL) must be assigned a memory
10679 location. */
10680
10681 bool
10682 needs_to_live_in_memory (const_tree t)
10683 {
10684 return (TREE_ADDRESSABLE (t)
10685 || is_global_var (t)
10686 || (TREE_CODE (t) == RESULT_DECL
10687 && !DECL_BY_REFERENCE (t)
10688 && aggregate_value_p (t, current_function_decl)));
10689 }
10690
10691 /* Return value of a constant X and sign-extend it. */
10692
10693 HOST_WIDE_INT
10694 int_cst_value (const_tree x)
10695 {
10696 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10697 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10698
10699 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10700 gcc_assert (cst_and_fits_in_hwi (x));
10701
10702 if (bits < HOST_BITS_PER_WIDE_INT)
10703 {
10704 bool negative = ((val >> (bits - 1)) & 1) != 0;
10705 if (negative)
10706 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10707 else
10708 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10709 }
10710
10711 return val;
10712 }
10713
10714 /* If TYPE is an integral or pointer type, return an integer type with
10715 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10716 if TYPE is already an integer type of signedness UNSIGNEDP. */
10717
10718 tree
10719 signed_or_unsigned_type_for (int unsignedp, tree type)
10720 {
10721 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10722 return type;
10723
10724 if (TREE_CODE (type) == VECTOR_TYPE)
10725 {
10726 tree inner = TREE_TYPE (type);
10727 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10728 if (!inner2)
10729 return NULL_TREE;
10730 if (inner == inner2)
10731 return type;
10732 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10733 }
10734
10735 if (!INTEGRAL_TYPE_P (type)
10736 && !POINTER_TYPE_P (type)
10737 && TREE_CODE (type) != OFFSET_TYPE)
10738 return NULL_TREE;
10739
10740 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10741 }
10742
10743 /* If TYPE is an integral or pointer type, return an integer type with
10744 the same precision which is unsigned, or itself if TYPE is already an
10745 unsigned integer type. */
10746
10747 tree
10748 unsigned_type_for (tree type)
10749 {
10750 return signed_or_unsigned_type_for (1, type);
10751 }
10752
10753 /* If TYPE is an integral or pointer type, return an integer type with
10754 the same precision which is signed, or itself if TYPE is already a
10755 signed integer type. */
10756
10757 tree
10758 signed_type_for (tree type)
10759 {
10760 return signed_or_unsigned_type_for (0, type);
10761 }
10762
10763 /* If TYPE is a vector type, return a signed integer vector type with the
10764 same width and number of subparts. Otherwise return boolean_type_node. */
10765
10766 tree
10767 truth_type_for (tree type)
10768 {
10769 if (TREE_CODE (type) == VECTOR_TYPE)
10770 {
10771 tree elem = lang_hooks.types.type_for_size
10772 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10773 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10774 }
10775 else
10776 return boolean_type_node;
10777 }
10778
10779 /* Returns the largest value obtainable by casting something in INNER type to
10780 OUTER type. */
10781
10782 tree
10783 upper_bound_in_type (tree outer, tree inner)
10784 {
10785 unsigned int det = 0;
10786 unsigned oprec = TYPE_PRECISION (outer);
10787 unsigned iprec = TYPE_PRECISION (inner);
10788 unsigned prec;
10789
10790 /* Compute a unique number for every combination. */
10791 det |= (oprec > iprec) ? 4 : 0;
10792 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10793 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10794
10795 /* Determine the exponent to use. */
10796 switch (det)
10797 {
10798 case 0:
10799 case 1:
10800 /* oprec <= iprec, outer: signed, inner: don't care. */
10801 prec = oprec - 1;
10802 break;
10803 case 2:
10804 case 3:
10805 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10806 prec = oprec;
10807 break;
10808 case 4:
10809 /* oprec > iprec, outer: signed, inner: signed. */
10810 prec = iprec - 1;
10811 break;
10812 case 5:
10813 /* oprec > iprec, outer: signed, inner: unsigned. */
10814 prec = iprec;
10815 break;
10816 case 6:
10817 /* oprec > iprec, outer: unsigned, inner: signed. */
10818 prec = oprec;
10819 break;
10820 case 7:
10821 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10822 prec = iprec;
10823 break;
10824 default:
10825 gcc_unreachable ();
10826 }
10827
10828 return wide_int_to_tree (outer,
10829 wi::mask (prec, false, TYPE_PRECISION (outer)));
10830 }
10831
10832 /* Returns the smallest value obtainable by casting something in INNER type to
10833 OUTER type. */
10834
10835 tree
10836 lower_bound_in_type (tree outer, tree inner)
10837 {
10838 unsigned oprec = TYPE_PRECISION (outer);
10839 unsigned iprec = TYPE_PRECISION (inner);
10840
10841 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10842 and obtain 0. */
10843 if (TYPE_UNSIGNED (outer)
10844 /* If we are widening something of an unsigned type, OUTER type
10845 contains all values of INNER type. In particular, both INNER
10846 and OUTER types have zero in common. */
10847 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10848 return build_int_cst (outer, 0);
10849 else
10850 {
10851 /* If we are widening a signed type to another signed type, we
10852 want to obtain -2^^(iprec-1). If we are keeping the
10853 precision or narrowing to a signed type, we want to obtain
10854 -2^(oprec-1). */
10855 unsigned prec = oprec > iprec ? iprec : oprec;
10856 return wide_int_to_tree (outer,
10857 wi::mask (prec - 1, true,
10858 TYPE_PRECISION (outer)));
10859 }
10860 }
10861
10862 /* Return nonzero if two operands that are suitable for PHI nodes are
10863 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10864 SSA_NAME or invariant. Note that this is strictly an optimization.
10865 That is, callers of this function can directly call operand_equal_p
10866 and get the same result, only slower. */
10867
10868 int
10869 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10870 {
10871 if (arg0 == arg1)
10872 return 1;
10873 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10874 return 0;
10875 return operand_equal_p (arg0, arg1, 0);
10876 }
10877
10878 /* Returns number of zeros at the end of binary representation of X. */
10879
10880 tree
10881 num_ending_zeros (const_tree x)
10882 {
10883 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10884 }
10885
10886
10887 #define WALK_SUBTREE(NODE) \
10888 do \
10889 { \
10890 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10891 if (result) \
10892 return result; \
10893 } \
10894 while (0)
10895
10896 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10897 be walked whenever a type is seen in the tree. Rest of operands and return
10898 value are as for walk_tree. */
10899
10900 static tree
10901 walk_type_fields (tree type, walk_tree_fn func, void *data,
10902 hash_set<tree> *pset, walk_tree_lh lh)
10903 {
10904 tree result = NULL_TREE;
10905
10906 switch (TREE_CODE (type))
10907 {
10908 case POINTER_TYPE:
10909 case REFERENCE_TYPE:
10910 case VECTOR_TYPE:
10911 /* We have to worry about mutually recursive pointers. These can't
10912 be written in C. They can in Ada. It's pathological, but
10913 there's an ACATS test (c38102a) that checks it. Deal with this
10914 by checking if we're pointing to another pointer, that one
10915 points to another pointer, that one does too, and we have no htab.
10916 If so, get a hash table. We check three levels deep to avoid
10917 the cost of the hash table if we don't need one. */
10918 if (POINTER_TYPE_P (TREE_TYPE (type))
10919 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10920 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10921 && !pset)
10922 {
10923 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10924 func, data);
10925 if (result)
10926 return result;
10927
10928 break;
10929 }
10930
10931 /* ... fall through ... */
10932
10933 case COMPLEX_TYPE:
10934 WALK_SUBTREE (TREE_TYPE (type));
10935 break;
10936
10937 case METHOD_TYPE:
10938 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10939
10940 /* Fall through. */
10941
10942 case FUNCTION_TYPE:
10943 WALK_SUBTREE (TREE_TYPE (type));
10944 {
10945 tree arg;
10946
10947 /* We never want to walk into default arguments. */
10948 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10949 WALK_SUBTREE (TREE_VALUE (arg));
10950 }
10951 break;
10952
10953 case ARRAY_TYPE:
10954 /* Don't follow this nodes's type if a pointer for fear that
10955 we'll have infinite recursion. If we have a PSET, then we
10956 need not fear. */
10957 if (pset
10958 || (!POINTER_TYPE_P (TREE_TYPE (type))
10959 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10960 WALK_SUBTREE (TREE_TYPE (type));
10961 WALK_SUBTREE (TYPE_DOMAIN (type));
10962 break;
10963
10964 case OFFSET_TYPE:
10965 WALK_SUBTREE (TREE_TYPE (type));
10966 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10967 break;
10968
10969 default:
10970 break;
10971 }
10972
10973 return NULL_TREE;
10974 }
10975
10976 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10977 called with the DATA and the address of each sub-tree. If FUNC returns a
10978 non-NULL value, the traversal is stopped, and the value returned by FUNC
10979 is returned. If PSET is non-NULL it is used to record the nodes visited,
10980 and to avoid visiting a node more than once. */
10981
10982 tree
10983 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10984 hash_set<tree> *pset, walk_tree_lh lh)
10985 {
10986 enum tree_code code;
10987 int walk_subtrees;
10988 tree result;
10989
10990 #define WALK_SUBTREE_TAIL(NODE) \
10991 do \
10992 { \
10993 tp = & (NODE); \
10994 goto tail_recurse; \
10995 } \
10996 while (0)
10997
10998 tail_recurse:
10999 /* Skip empty subtrees. */
11000 if (!*tp)
11001 return NULL_TREE;
11002
11003 /* Don't walk the same tree twice, if the user has requested
11004 that we avoid doing so. */
11005 if (pset && pset->add (*tp))
11006 return NULL_TREE;
11007
11008 /* Call the function. */
11009 walk_subtrees = 1;
11010 result = (*func) (tp, &walk_subtrees, data);
11011
11012 /* If we found something, return it. */
11013 if (result)
11014 return result;
11015
11016 code = TREE_CODE (*tp);
11017
11018 /* Even if we didn't, FUNC may have decided that there was nothing
11019 interesting below this point in the tree. */
11020 if (!walk_subtrees)
11021 {
11022 /* But we still need to check our siblings. */
11023 if (code == TREE_LIST)
11024 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11025 else if (code == OMP_CLAUSE)
11026 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11027 else
11028 return NULL_TREE;
11029 }
11030
11031 if (lh)
11032 {
11033 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11034 if (result || !walk_subtrees)
11035 return result;
11036 }
11037
11038 switch (code)
11039 {
11040 case ERROR_MARK:
11041 case IDENTIFIER_NODE:
11042 case INTEGER_CST:
11043 case REAL_CST:
11044 case FIXED_CST:
11045 case VECTOR_CST:
11046 case STRING_CST:
11047 case BLOCK:
11048 case PLACEHOLDER_EXPR:
11049 case SSA_NAME:
11050 case FIELD_DECL:
11051 case RESULT_DECL:
11052 /* None of these have subtrees other than those already walked
11053 above. */
11054 break;
11055
11056 case TREE_LIST:
11057 WALK_SUBTREE (TREE_VALUE (*tp));
11058 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11059 break;
11060
11061 case TREE_VEC:
11062 {
11063 int len = TREE_VEC_LENGTH (*tp);
11064
11065 if (len == 0)
11066 break;
11067
11068 /* Walk all elements but the first. */
11069 while (--len)
11070 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11071
11072 /* Now walk the first one as a tail call. */
11073 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11074 }
11075
11076 case COMPLEX_CST:
11077 WALK_SUBTREE (TREE_REALPART (*tp));
11078 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11079
11080 case CONSTRUCTOR:
11081 {
11082 unsigned HOST_WIDE_INT idx;
11083 constructor_elt *ce;
11084
11085 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11086 idx++)
11087 WALK_SUBTREE (ce->value);
11088 }
11089 break;
11090
11091 case SAVE_EXPR:
11092 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11093
11094 case BIND_EXPR:
11095 {
11096 tree decl;
11097 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11098 {
11099 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11100 into declarations that are just mentioned, rather than
11101 declared; they don't really belong to this part of the tree.
11102 And, we can see cycles: the initializer for a declaration
11103 can refer to the declaration itself. */
11104 WALK_SUBTREE (DECL_INITIAL (decl));
11105 WALK_SUBTREE (DECL_SIZE (decl));
11106 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11107 }
11108 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11109 }
11110
11111 case STATEMENT_LIST:
11112 {
11113 tree_stmt_iterator i;
11114 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11115 WALK_SUBTREE (*tsi_stmt_ptr (i));
11116 }
11117 break;
11118
11119 case OMP_CLAUSE:
11120 switch (OMP_CLAUSE_CODE (*tp))
11121 {
11122 case OMP_CLAUSE_PRIVATE:
11123 case OMP_CLAUSE_SHARED:
11124 case OMP_CLAUSE_FIRSTPRIVATE:
11125 case OMP_CLAUSE_COPYIN:
11126 case OMP_CLAUSE_COPYPRIVATE:
11127 case OMP_CLAUSE_FINAL:
11128 case OMP_CLAUSE_IF:
11129 case OMP_CLAUSE_NUM_THREADS:
11130 case OMP_CLAUSE_SCHEDULE:
11131 case OMP_CLAUSE_UNIFORM:
11132 case OMP_CLAUSE_DEPEND:
11133 case OMP_CLAUSE_NUM_TEAMS:
11134 case OMP_CLAUSE_THREAD_LIMIT:
11135 case OMP_CLAUSE_DEVICE:
11136 case OMP_CLAUSE_DIST_SCHEDULE:
11137 case OMP_CLAUSE_SAFELEN:
11138 case OMP_CLAUSE_SIMDLEN:
11139 case OMP_CLAUSE__LOOPTEMP_:
11140 case OMP_CLAUSE__SIMDUID_:
11141 case OMP_CLAUSE__CILK_FOR_COUNT_:
11142 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11143 /* FALLTHRU */
11144
11145 case OMP_CLAUSE_NOWAIT:
11146 case OMP_CLAUSE_ORDERED:
11147 case OMP_CLAUSE_DEFAULT:
11148 case OMP_CLAUSE_UNTIED:
11149 case OMP_CLAUSE_MERGEABLE:
11150 case OMP_CLAUSE_PROC_BIND:
11151 case OMP_CLAUSE_INBRANCH:
11152 case OMP_CLAUSE_NOTINBRANCH:
11153 case OMP_CLAUSE_FOR:
11154 case OMP_CLAUSE_PARALLEL:
11155 case OMP_CLAUSE_SECTIONS:
11156 case OMP_CLAUSE_TASKGROUP:
11157 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11158
11159 case OMP_CLAUSE_LASTPRIVATE:
11160 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11161 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11162 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11163
11164 case OMP_CLAUSE_COLLAPSE:
11165 {
11166 int i;
11167 for (i = 0; i < 3; i++)
11168 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11169 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11170 }
11171
11172 case OMP_CLAUSE_LINEAR:
11173 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11174 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11175 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11176 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11177
11178 case OMP_CLAUSE_ALIGNED:
11179 case OMP_CLAUSE_FROM:
11180 case OMP_CLAUSE_TO:
11181 case OMP_CLAUSE_MAP:
11182 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11183 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11184 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11185
11186 case OMP_CLAUSE_REDUCTION:
11187 {
11188 int i;
11189 for (i = 0; i < 4; i++)
11190 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11191 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11192 }
11193
11194 default:
11195 gcc_unreachable ();
11196 }
11197 break;
11198
11199 case TARGET_EXPR:
11200 {
11201 int i, len;
11202
11203 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11204 But, we only want to walk once. */
11205 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11206 for (i = 0; i < len; ++i)
11207 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11208 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11209 }
11210
11211 case DECL_EXPR:
11212 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11213 defining. We only want to walk into these fields of a type in this
11214 case and not in the general case of a mere reference to the type.
11215
11216 The criterion is as follows: if the field can be an expression, it
11217 must be walked only here. This should be in keeping with the fields
11218 that are directly gimplified in gimplify_type_sizes in order for the
11219 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11220 variable-sized types.
11221
11222 Note that DECLs get walked as part of processing the BIND_EXPR. */
11223 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11224 {
11225 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11226 if (TREE_CODE (*type_p) == ERROR_MARK)
11227 return NULL_TREE;
11228
11229 /* Call the function for the type. See if it returns anything or
11230 doesn't want us to continue. If we are to continue, walk both
11231 the normal fields and those for the declaration case. */
11232 result = (*func) (type_p, &walk_subtrees, data);
11233 if (result || !walk_subtrees)
11234 return result;
11235
11236 /* But do not walk a pointed-to type since it may itself need to
11237 be walked in the declaration case if it isn't anonymous. */
11238 if (!POINTER_TYPE_P (*type_p))
11239 {
11240 result = walk_type_fields (*type_p, func, data, pset, lh);
11241 if (result)
11242 return result;
11243 }
11244
11245 /* If this is a record type, also walk the fields. */
11246 if (RECORD_OR_UNION_TYPE_P (*type_p))
11247 {
11248 tree field;
11249
11250 for (field = TYPE_FIELDS (*type_p); field;
11251 field = DECL_CHAIN (field))
11252 {
11253 /* We'd like to look at the type of the field, but we can
11254 easily get infinite recursion. So assume it's pointed
11255 to elsewhere in the tree. Also, ignore things that
11256 aren't fields. */
11257 if (TREE_CODE (field) != FIELD_DECL)
11258 continue;
11259
11260 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11261 WALK_SUBTREE (DECL_SIZE (field));
11262 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11263 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11264 WALK_SUBTREE (DECL_QUALIFIER (field));
11265 }
11266 }
11267
11268 /* Same for scalar types. */
11269 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11270 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11271 || TREE_CODE (*type_p) == INTEGER_TYPE
11272 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11273 || TREE_CODE (*type_p) == REAL_TYPE)
11274 {
11275 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11276 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11277 }
11278
11279 WALK_SUBTREE (TYPE_SIZE (*type_p));
11280 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11281 }
11282 /* FALLTHRU */
11283
11284 default:
11285 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11286 {
11287 int i, len;
11288
11289 /* Walk over all the sub-trees of this operand. */
11290 len = TREE_OPERAND_LENGTH (*tp);
11291
11292 /* Go through the subtrees. We need to do this in forward order so
11293 that the scope of a FOR_EXPR is handled properly. */
11294 if (len)
11295 {
11296 for (i = 0; i < len - 1; ++i)
11297 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11298 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11299 }
11300 }
11301 /* If this is a type, walk the needed fields in the type. */
11302 else if (TYPE_P (*tp))
11303 return walk_type_fields (*tp, func, data, pset, lh);
11304 break;
11305 }
11306
11307 /* We didn't find what we were looking for. */
11308 return NULL_TREE;
11309
11310 #undef WALK_SUBTREE_TAIL
11311 }
11312 #undef WALK_SUBTREE
11313
11314 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11315
11316 tree
11317 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11318 walk_tree_lh lh)
11319 {
11320 tree result;
11321
11322 hash_set<tree> pset;
11323 result = walk_tree_1 (tp, func, data, &pset, lh);
11324 return result;
11325 }
11326
11327
11328 tree
11329 tree_block (tree t)
11330 {
11331 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11332
11333 if (IS_EXPR_CODE_CLASS (c))
11334 return LOCATION_BLOCK (t->exp.locus);
11335 gcc_unreachable ();
11336 return NULL;
11337 }
11338
11339 void
11340 tree_set_block (tree t, tree b)
11341 {
11342 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11343
11344 if (IS_EXPR_CODE_CLASS (c))
11345 {
11346 if (b)
11347 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11348 else
11349 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11350 }
11351 else
11352 gcc_unreachable ();
11353 }
11354
11355 /* Create a nameless artificial label and put it in the current
11356 function context. The label has a location of LOC. Returns the
11357 newly created label. */
11358
11359 tree
11360 create_artificial_label (location_t loc)
11361 {
11362 tree lab = build_decl (loc,
11363 LABEL_DECL, NULL_TREE, void_type_node);
11364
11365 DECL_ARTIFICIAL (lab) = 1;
11366 DECL_IGNORED_P (lab) = 1;
11367 DECL_CONTEXT (lab) = current_function_decl;
11368 return lab;
11369 }
11370
11371 /* Given a tree, try to return a useful variable name that we can use
11372 to prefix a temporary that is being assigned the value of the tree.
11373 I.E. given <temp> = &A, return A. */
11374
11375 const char *
11376 get_name (tree t)
11377 {
11378 tree stripped_decl;
11379
11380 stripped_decl = t;
11381 STRIP_NOPS (stripped_decl);
11382 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11383 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11384 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11385 {
11386 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11387 if (!name)
11388 return NULL;
11389 return IDENTIFIER_POINTER (name);
11390 }
11391 else
11392 {
11393 switch (TREE_CODE (stripped_decl))
11394 {
11395 case ADDR_EXPR:
11396 return get_name (TREE_OPERAND (stripped_decl, 0));
11397 default:
11398 return NULL;
11399 }
11400 }
11401 }
11402
11403 /* Return true if TYPE has a variable argument list. */
11404
11405 bool
11406 stdarg_p (const_tree fntype)
11407 {
11408 function_args_iterator args_iter;
11409 tree n = NULL_TREE, t;
11410
11411 if (!fntype)
11412 return false;
11413
11414 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11415 {
11416 n = t;
11417 }
11418
11419 return n != NULL_TREE && n != void_type_node;
11420 }
11421
11422 /* Return true if TYPE has a prototype. */
11423
11424 bool
11425 prototype_p (tree fntype)
11426 {
11427 tree t;
11428
11429 gcc_assert (fntype != NULL_TREE);
11430
11431 t = TYPE_ARG_TYPES (fntype);
11432 return (t != NULL_TREE);
11433 }
11434
11435 /* If BLOCK is inlined from an __attribute__((__artificial__))
11436 routine, return pointer to location from where it has been
11437 called. */
11438 location_t *
11439 block_nonartificial_location (tree block)
11440 {
11441 location_t *ret = NULL;
11442
11443 while (block && TREE_CODE (block) == BLOCK
11444 && BLOCK_ABSTRACT_ORIGIN (block))
11445 {
11446 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11447
11448 while (TREE_CODE (ao) == BLOCK
11449 && BLOCK_ABSTRACT_ORIGIN (ao)
11450 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11451 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11452
11453 if (TREE_CODE (ao) == FUNCTION_DECL)
11454 {
11455 /* If AO is an artificial inline, point RET to the
11456 call site locus at which it has been inlined and continue
11457 the loop, in case AO's caller is also an artificial
11458 inline. */
11459 if (DECL_DECLARED_INLINE_P (ao)
11460 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11461 ret = &BLOCK_SOURCE_LOCATION (block);
11462 else
11463 break;
11464 }
11465 else if (TREE_CODE (ao) != BLOCK)
11466 break;
11467
11468 block = BLOCK_SUPERCONTEXT (block);
11469 }
11470 return ret;
11471 }
11472
11473
11474 /* If EXP is inlined from an __attribute__((__artificial__))
11475 function, return the location of the original call expression. */
11476
11477 location_t
11478 tree_nonartificial_location (tree exp)
11479 {
11480 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11481
11482 if (loc)
11483 return *loc;
11484 else
11485 return EXPR_LOCATION (exp);
11486 }
11487
11488
11489 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11490 nodes. */
11491
11492 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11493
11494 static hashval_t
11495 cl_option_hash_hash (const void *x)
11496 {
11497 const_tree const t = (const_tree) x;
11498 const char *p;
11499 size_t i;
11500 size_t len = 0;
11501 hashval_t hash = 0;
11502
11503 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11504 {
11505 p = (const char *)TREE_OPTIMIZATION (t);
11506 len = sizeof (struct cl_optimization);
11507 }
11508
11509 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11510 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11511
11512 else
11513 gcc_unreachable ();
11514
11515 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11516 something else. */
11517 for (i = 0; i < len; i++)
11518 if (p[i])
11519 hash = (hash << 4) ^ ((i << 2) | p[i]);
11520
11521 return hash;
11522 }
11523
11524 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11525 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11526 same. */
11527
11528 static int
11529 cl_option_hash_eq (const void *x, const void *y)
11530 {
11531 const_tree const xt = (const_tree) x;
11532 const_tree const yt = (const_tree) y;
11533 const char *xp;
11534 const char *yp;
11535 size_t len;
11536
11537 if (TREE_CODE (xt) != TREE_CODE (yt))
11538 return 0;
11539
11540 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11541 {
11542 xp = (const char *)TREE_OPTIMIZATION (xt);
11543 yp = (const char *)TREE_OPTIMIZATION (yt);
11544 len = sizeof (struct cl_optimization);
11545 }
11546
11547 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11548 {
11549 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11550 TREE_TARGET_OPTION (yt));
11551 }
11552
11553 else
11554 gcc_unreachable ();
11555
11556 return (memcmp (xp, yp, len) == 0);
11557 }
11558
11559 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11560
11561 tree
11562 build_optimization_node (struct gcc_options *opts)
11563 {
11564 tree t;
11565 void **slot;
11566
11567 /* Use the cache of optimization nodes. */
11568
11569 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11570 opts);
11571
11572 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11573 t = (tree) *slot;
11574 if (!t)
11575 {
11576 /* Insert this one into the hash table. */
11577 t = cl_optimization_node;
11578 *slot = t;
11579
11580 /* Make a new node for next time round. */
11581 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11582 }
11583
11584 return t;
11585 }
11586
11587 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11588
11589 tree
11590 build_target_option_node (struct gcc_options *opts)
11591 {
11592 tree t;
11593 void **slot;
11594
11595 /* Use the cache of optimization nodes. */
11596
11597 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11598 opts);
11599
11600 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11601 t = (tree) *slot;
11602 if (!t)
11603 {
11604 /* Insert this one into the hash table. */
11605 t = cl_target_option_node;
11606 *slot = t;
11607
11608 /* Make a new node for next time round. */
11609 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11610 }
11611
11612 return t;
11613 }
11614
11615 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11616 Called through htab_traverse. */
11617
11618 static int
11619 prepare_target_option_node_for_pch (void **slot, void *)
11620 {
11621 tree node = (tree) *slot;
11622 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11623 TREE_TARGET_GLOBALS (node) = NULL;
11624 return 1;
11625 }
11626
11627 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11628 so that they aren't saved during PCH writing. */
11629
11630 void
11631 prepare_target_option_nodes_for_pch (void)
11632 {
11633 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11634 NULL);
11635 }
11636
11637 /* Determine the "ultimate origin" of a block. The block may be an inlined
11638 instance of an inlined instance of a block which is local to an inline
11639 function, so we have to trace all of the way back through the origin chain
11640 to find out what sort of node actually served as the original seed for the
11641 given block. */
11642
11643 tree
11644 block_ultimate_origin (const_tree block)
11645 {
11646 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11647
11648 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11649 we're trying to output the abstract instance of this function. */
11650 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11651 return NULL_TREE;
11652
11653 if (immediate_origin == NULL_TREE)
11654 return NULL_TREE;
11655 else
11656 {
11657 tree ret_val;
11658 tree lookahead = immediate_origin;
11659
11660 do
11661 {
11662 ret_val = lookahead;
11663 lookahead = (TREE_CODE (ret_val) == BLOCK
11664 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11665 }
11666 while (lookahead != NULL && lookahead != ret_val);
11667
11668 /* The block's abstract origin chain may not be the *ultimate* origin of
11669 the block. It could lead to a DECL that has an abstract origin set.
11670 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11671 will give us if it has one). Note that DECL's abstract origins are
11672 supposed to be the most distant ancestor (or so decl_ultimate_origin
11673 claims), so we don't need to loop following the DECL origins. */
11674 if (DECL_P (ret_val))
11675 return DECL_ORIGIN (ret_val);
11676
11677 return ret_val;
11678 }
11679 }
11680
11681 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11682 no instruction. */
11683
11684 bool
11685 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11686 {
11687 /* Use precision rather then machine mode when we can, which gives
11688 the correct answer even for submode (bit-field) types. */
11689 if ((INTEGRAL_TYPE_P (outer_type)
11690 || POINTER_TYPE_P (outer_type)
11691 || TREE_CODE (outer_type) == OFFSET_TYPE)
11692 && (INTEGRAL_TYPE_P (inner_type)
11693 || POINTER_TYPE_P (inner_type)
11694 || TREE_CODE (inner_type) == OFFSET_TYPE))
11695 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11696
11697 /* Otherwise fall back on comparing machine modes (e.g. for
11698 aggregate types, floats). */
11699 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11700 }
11701
11702 /* Return true iff conversion in EXP generates no instruction. Mark
11703 it inline so that we fully inline into the stripping functions even
11704 though we have two uses of this function. */
11705
11706 static inline bool
11707 tree_nop_conversion (const_tree exp)
11708 {
11709 tree outer_type, inner_type;
11710
11711 if (!CONVERT_EXPR_P (exp)
11712 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11713 return false;
11714 if (TREE_OPERAND (exp, 0) == error_mark_node)
11715 return false;
11716
11717 outer_type = TREE_TYPE (exp);
11718 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11719
11720 if (!inner_type)
11721 return false;
11722
11723 return tree_nop_conversion_p (outer_type, inner_type);
11724 }
11725
11726 /* Return true iff conversion in EXP generates no instruction. Don't
11727 consider conversions changing the signedness. */
11728
11729 static bool
11730 tree_sign_nop_conversion (const_tree exp)
11731 {
11732 tree outer_type, inner_type;
11733
11734 if (!tree_nop_conversion (exp))
11735 return false;
11736
11737 outer_type = TREE_TYPE (exp);
11738 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11739
11740 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11741 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11742 }
11743
11744 /* Strip conversions from EXP according to tree_nop_conversion and
11745 return the resulting expression. */
11746
11747 tree
11748 tree_strip_nop_conversions (tree exp)
11749 {
11750 while (tree_nop_conversion (exp))
11751 exp = TREE_OPERAND (exp, 0);
11752 return exp;
11753 }
11754
11755 /* Strip conversions from EXP according to tree_sign_nop_conversion
11756 and return the resulting expression. */
11757
11758 tree
11759 tree_strip_sign_nop_conversions (tree exp)
11760 {
11761 while (tree_sign_nop_conversion (exp))
11762 exp = TREE_OPERAND (exp, 0);
11763 return exp;
11764 }
11765
11766 /* Avoid any floating point extensions from EXP. */
11767 tree
11768 strip_float_extensions (tree exp)
11769 {
11770 tree sub, expt, subt;
11771
11772 /* For floating point constant look up the narrowest type that can hold
11773 it properly and handle it like (type)(narrowest_type)constant.
11774 This way we can optimize for instance a=a*2.0 where "a" is float
11775 but 2.0 is double constant. */
11776 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11777 {
11778 REAL_VALUE_TYPE orig;
11779 tree type = NULL;
11780
11781 orig = TREE_REAL_CST (exp);
11782 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11783 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11784 type = float_type_node;
11785 else if (TYPE_PRECISION (TREE_TYPE (exp))
11786 > TYPE_PRECISION (double_type_node)
11787 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11788 type = double_type_node;
11789 if (type)
11790 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11791 }
11792
11793 if (!CONVERT_EXPR_P (exp))
11794 return exp;
11795
11796 sub = TREE_OPERAND (exp, 0);
11797 subt = TREE_TYPE (sub);
11798 expt = TREE_TYPE (exp);
11799
11800 if (!FLOAT_TYPE_P (subt))
11801 return exp;
11802
11803 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11804 return exp;
11805
11806 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11807 return exp;
11808
11809 return strip_float_extensions (sub);
11810 }
11811
11812 /* Strip out all handled components that produce invariant
11813 offsets. */
11814
11815 const_tree
11816 strip_invariant_refs (const_tree op)
11817 {
11818 while (handled_component_p (op))
11819 {
11820 switch (TREE_CODE (op))
11821 {
11822 case ARRAY_REF:
11823 case ARRAY_RANGE_REF:
11824 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11825 || TREE_OPERAND (op, 2) != NULL_TREE
11826 || TREE_OPERAND (op, 3) != NULL_TREE)
11827 return NULL;
11828 break;
11829
11830 case COMPONENT_REF:
11831 if (TREE_OPERAND (op, 2) != NULL_TREE)
11832 return NULL;
11833 break;
11834
11835 default:;
11836 }
11837 op = TREE_OPERAND (op, 0);
11838 }
11839
11840 return op;
11841 }
11842
11843 static GTY(()) tree gcc_eh_personality_decl;
11844
11845 /* Return the GCC personality function decl. */
11846
11847 tree
11848 lhd_gcc_personality (void)
11849 {
11850 if (!gcc_eh_personality_decl)
11851 gcc_eh_personality_decl = build_personality_function ("gcc");
11852 return gcc_eh_personality_decl;
11853 }
11854
11855 /* TARGET is a call target of GIMPLE call statement
11856 (obtained by gimple_call_fn). Return true if it is
11857 OBJ_TYPE_REF representing an virtual call of C++ method.
11858 (As opposed to OBJ_TYPE_REF representing objc calls
11859 through a cast where middle-end devirtualization machinery
11860 can't apply.) */
11861
11862 bool
11863 virtual_method_call_p (tree target)
11864 {
11865 if (TREE_CODE (target) != OBJ_TYPE_REF)
11866 return false;
11867 target = TREE_TYPE (target);
11868 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11869 target = TREE_TYPE (target);
11870 if (TREE_CODE (target) == FUNCTION_TYPE)
11871 return false;
11872 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11873 return true;
11874 }
11875
11876 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11877
11878 tree
11879 obj_type_ref_class (tree ref)
11880 {
11881 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11882 ref = TREE_TYPE (ref);
11883 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11884 ref = TREE_TYPE (ref);
11885 /* We look for type THIS points to. ObjC also builds
11886 OBJ_TYPE_REF with non-method calls, Their first parameter
11887 ID however also corresponds to class type. */
11888 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11889 || TREE_CODE (ref) == FUNCTION_TYPE);
11890 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11891 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11892 return TREE_TYPE (ref);
11893 }
11894
11895 /* Return true if T is in anonymous namespace. */
11896
11897 bool
11898 type_in_anonymous_namespace_p (const_tree t)
11899 {
11900 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11901 bulitin types; those have CONTEXT NULL. */
11902 if (!TYPE_CONTEXT (t))
11903 return false;
11904 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11905 }
11906
11907 /* Try to find a base info of BINFO that would have its field decl at offset
11908 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11909 found, return, otherwise return NULL_TREE. */
11910
11911 tree
11912 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11913 {
11914 tree type = BINFO_TYPE (binfo);
11915
11916 while (true)
11917 {
11918 HOST_WIDE_INT pos, size;
11919 tree fld;
11920 int i;
11921
11922 if (types_same_for_odr (type, expected_type))
11923 return binfo;
11924 if (offset < 0)
11925 return NULL_TREE;
11926
11927 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11928 {
11929 if (TREE_CODE (fld) != FIELD_DECL)
11930 continue;
11931
11932 pos = int_bit_position (fld);
11933 size = tree_to_uhwi (DECL_SIZE (fld));
11934 if (pos <= offset && (pos + size) > offset)
11935 break;
11936 }
11937 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11938 return NULL_TREE;
11939
11940 if (!DECL_ARTIFICIAL (fld))
11941 {
11942 binfo = TYPE_BINFO (TREE_TYPE (fld));
11943 if (!binfo)
11944 return NULL_TREE;
11945 }
11946 /* Offset 0 indicates the primary base, whose vtable contents are
11947 represented in the binfo for the derived class. */
11948 else if (offset != 0)
11949 {
11950 tree base_binfo, binfo2 = binfo;
11951
11952 /* Find BINFO corresponding to FLD. This is bit harder
11953 by a fact that in virtual inheritance we may need to walk down
11954 the non-virtual inheritance chain. */
11955 while (true)
11956 {
11957 tree containing_binfo = NULL, found_binfo = NULL;
11958 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11959 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11960 {
11961 found_binfo = base_binfo;
11962 break;
11963 }
11964 else
11965 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11966 - tree_to_shwi (BINFO_OFFSET (binfo)))
11967 * BITS_PER_UNIT < pos
11968 /* Rule out types with no virtual methods or we can get confused
11969 here by zero sized bases. */
11970 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11971 && (!containing_binfo
11972 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11973 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11974 containing_binfo = base_binfo;
11975 if (found_binfo)
11976 {
11977 binfo = found_binfo;
11978 break;
11979 }
11980 if (!containing_binfo)
11981 return NULL_TREE;
11982 binfo2 = containing_binfo;
11983 }
11984 }
11985
11986 type = TREE_TYPE (fld);
11987 offset -= pos;
11988 }
11989 }
11990
11991 /* Returns true if X is a typedef decl. */
11992
11993 bool
11994 is_typedef_decl (tree x)
11995 {
11996 return (x && TREE_CODE (x) == TYPE_DECL
11997 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11998 }
11999
12000 /* Returns true iff TYPE is a type variant created for a typedef. */
12001
12002 bool
12003 typedef_variant_p (tree type)
12004 {
12005 return is_typedef_decl (TYPE_NAME (type));
12006 }
12007
12008 /* Warn about a use of an identifier which was marked deprecated. */
12009 void
12010 warn_deprecated_use (tree node, tree attr)
12011 {
12012 const char *msg;
12013
12014 if (node == 0 || !warn_deprecated_decl)
12015 return;
12016
12017 if (!attr)
12018 {
12019 if (DECL_P (node))
12020 attr = DECL_ATTRIBUTES (node);
12021 else if (TYPE_P (node))
12022 {
12023 tree decl = TYPE_STUB_DECL (node);
12024 if (decl)
12025 attr = lookup_attribute ("deprecated",
12026 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12027 }
12028 }
12029
12030 if (attr)
12031 attr = lookup_attribute ("deprecated", attr);
12032
12033 if (attr)
12034 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12035 else
12036 msg = NULL;
12037
12038 bool w;
12039 if (DECL_P (node))
12040 {
12041 if (msg)
12042 w = warning (OPT_Wdeprecated_declarations,
12043 "%qD is deprecated: %s", node, msg);
12044 else
12045 w = warning (OPT_Wdeprecated_declarations,
12046 "%qD is deprecated", node);
12047 if (w)
12048 inform (DECL_SOURCE_LOCATION (node), "declared here");
12049 }
12050 else if (TYPE_P (node))
12051 {
12052 tree what = NULL_TREE;
12053 tree decl = TYPE_STUB_DECL (node);
12054
12055 if (TYPE_NAME (node))
12056 {
12057 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12058 what = TYPE_NAME (node);
12059 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12060 && DECL_NAME (TYPE_NAME (node)))
12061 what = DECL_NAME (TYPE_NAME (node));
12062 }
12063
12064 if (decl)
12065 {
12066 if (what)
12067 {
12068 if (msg)
12069 w = warning (OPT_Wdeprecated_declarations,
12070 "%qE is deprecated: %s", what, msg);
12071 else
12072 w = warning (OPT_Wdeprecated_declarations,
12073 "%qE is deprecated", what);
12074 }
12075 else
12076 {
12077 if (msg)
12078 w = warning (OPT_Wdeprecated_declarations,
12079 "type is deprecated: %s", msg);
12080 else
12081 w = warning (OPT_Wdeprecated_declarations,
12082 "type is deprecated");
12083 }
12084 if (w)
12085 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12086 }
12087 else
12088 {
12089 if (what)
12090 {
12091 if (msg)
12092 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12093 what, msg);
12094 else
12095 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12096 }
12097 else
12098 {
12099 if (msg)
12100 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12101 msg);
12102 else
12103 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12104 }
12105 }
12106 }
12107 }
12108
12109 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12110 somewhere in it. */
12111
12112 bool
12113 contains_bitfld_component_ref_p (const_tree ref)
12114 {
12115 while (handled_component_p (ref))
12116 {
12117 if (TREE_CODE (ref) == COMPONENT_REF
12118 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12119 return true;
12120 ref = TREE_OPERAND (ref, 0);
12121 }
12122
12123 return false;
12124 }
12125
12126 /* Try to determine whether a TRY_CATCH expression can fall through.
12127 This is a subroutine of block_may_fallthru. */
12128
12129 static bool
12130 try_catch_may_fallthru (const_tree stmt)
12131 {
12132 tree_stmt_iterator i;
12133
12134 /* If the TRY block can fall through, the whole TRY_CATCH can
12135 fall through. */
12136 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12137 return true;
12138
12139 i = tsi_start (TREE_OPERAND (stmt, 1));
12140 switch (TREE_CODE (tsi_stmt (i)))
12141 {
12142 case CATCH_EXPR:
12143 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12144 catch expression and a body. The whole TRY_CATCH may fall
12145 through iff any of the catch bodies falls through. */
12146 for (; !tsi_end_p (i); tsi_next (&i))
12147 {
12148 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12149 return true;
12150 }
12151 return false;
12152
12153 case EH_FILTER_EXPR:
12154 /* The exception filter expression only matters if there is an
12155 exception. If the exception does not match EH_FILTER_TYPES,
12156 we will execute EH_FILTER_FAILURE, and we will fall through
12157 if that falls through. If the exception does match
12158 EH_FILTER_TYPES, the stack unwinder will continue up the
12159 stack, so we will not fall through. We don't know whether we
12160 will throw an exception which matches EH_FILTER_TYPES or not,
12161 so we just ignore EH_FILTER_TYPES and assume that we might
12162 throw an exception which doesn't match. */
12163 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12164
12165 default:
12166 /* This case represents statements to be executed when an
12167 exception occurs. Those statements are implicitly followed
12168 by a RESX statement to resume execution after the exception.
12169 So in this case the TRY_CATCH never falls through. */
12170 return false;
12171 }
12172 }
12173
12174 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12175 need not be 100% accurate; simply be conservative and return true if we
12176 don't know. This is used only to avoid stupidly generating extra code.
12177 If we're wrong, we'll just delete the extra code later. */
12178
12179 bool
12180 block_may_fallthru (const_tree block)
12181 {
12182 /* This CONST_CAST is okay because expr_last returns its argument
12183 unmodified and we assign it to a const_tree. */
12184 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12185
12186 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12187 {
12188 case GOTO_EXPR:
12189 case RETURN_EXPR:
12190 /* Easy cases. If the last statement of the block implies
12191 control transfer, then we can't fall through. */
12192 return false;
12193
12194 case SWITCH_EXPR:
12195 /* If SWITCH_LABELS is set, this is lowered, and represents a
12196 branch to a selected label and hence can not fall through.
12197 Otherwise SWITCH_BODY is set, and the switch can fall
12198 through. */
12199 return SWITCH_LABELS (stmt) == NULL_TREE;
12200
12201 case COND_EXPR:
12202 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12203 return true;
12204 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12205
12206 case BIND_EXPR:
12207 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12208
12209 case TRY_CATCH_EXPR:
12210 return try_catch_may_fallthru (stmt);
12211
12212 case TRY_FINALLY_EXPR:
12213 /* The finally clause is always executed after the try clause,
12214 so if it does not fall through, then the try-finally will not
12215 fall through. Otherwise, if the try clause does not fall
12216 through, then when the finally clause falls through it will
12217 resume execution wherever the try clause was going. So the
12218 whole try-finally will only fall through if both the try
12219 clause and the finally clause fall through. */
12220 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12221 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12222
12223 case MODIFY_EXPR:
12224 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12225 stmt = TREE_OPERAND (stmt, 1);
12226 else
12227 return true;
12228 /* FALLTHRU */
12229
12230 case CALL_EXPR:
12231 /* Functions that do not return do not fall through. */
12232 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12233
12234 case CLEANUP_POINT_EXPR:
12235 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12236
12237 case TARGET_EXPR:
12238 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12239
12240 case ERROR_MARK:
12241 return true;
12242
12243 default:
12244 return lang_hooks.block_may_fallthru (stmt);
12245 }
12246 }
12247
12248 /* True if we are using EH to handle cleanups. */
12249 static bool using_eh_for_cleanups_flag = false;
12250
12251 /* This routine is called from front ends to indicate eh should be used for
12252 cleanups. */
12253 void
12254 using_eh_for_cleanups (void)
12255 {
12256 using_eh_for_cleanups_flag = true;
12257 }
12258
12259 /* Query whether EH is used for cleanups. */
12260 bool
12261 using_eh_for_cleanups_p (void)
12262 {
12263 return using_eh_for_cleanups_flag;
12264 }
12265
12266 /* Wrapper for tree_code_name to ensure that tree code is valid */
12267 const char *
12268 get_tree_code_name (enum tree_code code)
12269 {
12270 const char *invalid = "<invalid tree code>";
12271
12272 if (code >= MAX_TREE_CODES)
12273 return invalid;
12274
12275 return tree_code_name[code];
12276 }
12277
12278 /* Drops the TREE_OVERFLOW flag from T. */
12279
12280 tree
12281 drop_tree_overflow (tree t)
12282 {
12283 gcc_checking_assert (TREE_OVERFLOW (t));
12284
12285 /* For tree codes with a sharing machinery re-build the result. */
12286 if (TREE_CODE (t) == INTEGER_CST)
12287 return wide_int_to_tree (TREE_TYPE (t), t);
12288
12289 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12290 and drop the flag. */
12291 t = copy_node (t);
12292 TREE_OVERFLOW (t) = 0;
12293 return t;
12294 }
12295
12296 /* Given a memory reference expression T, return its base address.
12297 The base address of a memory reference expression is the main
12298 object being referenced. For instance, the base address for
12299 'array[i].fld[j]' is 'array'. You can think of this as stripping
12300 away the offset part from a memory address.
12301
12302 This function calls handled_component_p to strip away all the inner
12303 parts of the memory reference until it reaches the base object. */
12304
12305 tree
12306 get_base_address (tree t)
12307 {
12308 while (handled_component_p (t))
12309 t = TREE_OPERAND (t, 0);
12310
12311 if ((TREE_CODE (t) == MEM_REF
12312 || TREE_CODE (t) == TARGET_MEM_REF)
12313 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12314 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12315
12316 /* ??? Either the alias oracle or all callers need to properly deal
12317 with WITH_SIZE_EXPRs before we can look through those. */
12318 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12319 return NULL_TREE;
12320
12321 return t;
12322 }
12323
12324 /* Return the machine mode of T. For vectors, returns the mode of the
12325 inner type. The main use case is to feed the result to HONOR_NANS,
12326 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12327
12328 machine_mode
12329 element_mode (const_tree t)
12330 {
12331 if (!TYPE_P (t))
12332 t = TREE_TYPE (t);
12333 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12334 t = TREE_TYPE (t);
12335 return TYPE_MODE (t);
12336 }
12337
12338 #include "gt-tree.h"