decl.c (gnat_to_gnu_entity): For a derived untagged type that renames discriminants...
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "hashtab.h"
42 #include "hash-set.h"
43 #include "vec.h"
44 #include "machmode.h"
45 #include "hard-reg-set.h"
46 #include "input.h"
47 #include "function.h"
48 #include "obstack.h"
49 #include "toplev.h" /* get_random_seed */
50 #include "inchash.h"
51 #include "filenames.h"
52 #include "output.h"
53 #include "target.h"
54 #include "common/common-target.h"
55 #include "langhooks.h"
56 #include "tree-inline.h"
57 #include "tree-iterator.h"
58 #include "predict.h"
59 #include "dominance.h"
60 #include "cfg.h"
61 #include "basic-block.h"
62 #include "bitmap.h"
63 #include "tree-ssa-alias.h"
64 #include "internal-fn.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimple-iterator.h"
69 #include "gimplify.h"
70 #include "gimple-ssa.h"
71 #include "hash-map.h"
72 #include "plugin-api.h"
73 #include "ipa-ref.h"
74 #include "cgraph.h"
75 #include "tree-phinodes.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
78 #include "expr.h"
79 #include "tree-dfa.h"
80 #include "params.h"
81 #include "tree-pass.h"
82 #include "langhooks-def.h"
83 #include "diagnostic.h"
84 #include "tree-diagnostic.h"
85 #include "tree-pretty-print.h"
86 #include "except.h"
87 #include "debug.h"
88 #include "intl.h"
89 #include "wide-int.h"
90 #include "builtins.h"
91
92 /* Tree code classes. */
93
94 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
95 #define END_OF_BASE_TREE_CODES tcc_exceptional,
96
97 const enum tree_code_class tree_code_type[] = {
98 #include "all-tree.def"
99 };
100
101 #undef DEFTREECODE
102 #undef END_OF_BASE_TREE_CODES
103
104 /* Table indexed by tree code giving number of expression
105 operands beyond the fixed part of the node structure.
106 Not used for types or decls. */
107
108 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
109 #define END_OF_BASE_TREE_CODES 0,
110
111 const unsigned char tree_code_length[] = {
112 #include "all-tree.def"
113 };
114
115 #undef DEFTREECODE
116 #undef END_OF_BASE_TREE_CODES
117
118 /* Names of tree components.
119 Used for printing out the tree and error messages. */
120 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
121 #define END_OF_BASE_TREE_CODES "@dummy",
122
123 static const char *const tree_code_name[] = {
124 #include "all-tree.def"
125 };
126
127 #undef DEFTREECODE
128 #undef END_OF_BASE_TREE_CODES
129
130 /* Each tree code class has an associated string representation.
131 These must correspond to the tree_code_class entries. */
132
133 const char *const tree_code_class_strings[] =
134 {
135 "exceptional",
136 "constant",
137 "type",
138 "declaration",
139 "reference",
140 "comparison",
141 "unary",
142 "binary",
143 "statement",
144 "vl_exp",
145 "expression"
146 };
147
148 /* obstack.[ch] explicitly declined to prototype this. */
149 extern int _obstack_allocated_p (struct obstack *h, void *obj);
150
151 /* Statistics-gathering stuff. */
152
153 static int tree_code_counts[MAX_TREE_CODES];
154 int tree_node_counts[(int) all_kinds];
155 int tree_node_sizes[(int) all_kinds];
156
157 /* Keep in sync with tree.h:enum tree_node_kind. */
158 static const char * const tree_node_kind_names[] = {
159 "decls",
160 "types",
161 "blocks",
162 "stmts",
163 "refs",
164 "exprs",
165 "constants",
166 "identifiers",
167 "vecs",
168 "binfos",
169 "ssa names",
170 "constructors",
171 "random kinds",
172 "lang_decl kinds",
173 "lang_type kinds",
174 "omp clauses",
175 };
176
177 /* Unique id for next decl created. */
178 static GTY(()) int next_decl_uid;
179 /* Unique id for next type created. */
180 static GTY(()) int next_type_uid = 1;
181 /* Unique id for next debug decl created. Use negative numbers,
182 to catch erroneous uses. */
183 static GTY(()) int next_debug_decl_uid;
184
185 /* Since we cannot rehash a type after it is in the table, we have to
186 keep the hash code. */
187
188 struct GTY(()) type_hash {
189 unsigned long hash;
190 tree type;
191 };
192
193 /* Initial size of the hash table (rounded to next prime). */
194 #define TYPE_HASH_INITIAL_SIZE 1000
195
196 /* Now here is the hash table. When recording a type, it is added to
197 the slot whose index is the hash code. Note that the hash table is
198 used for several kinds of types (function types, array types and
199 array index range types, for now). While all these live in the
200 same table, they are completely independent, and the hash code is
201 computed differently for each of these. */
202
203 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
204 htab_t type_hash_table;
205
206 /* Hash table and temporary node for larger integer const values. */
207 static GTY (()) tree int_cst_node;
208 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
209 htab_t int_cst_hash_table;
210
211 /* Hash table for optimization flags and target option flags. Use the same
212 hash table for both sets of options. Nodes for building the current
213 optimization and target option nodes. The assumption is most of the time
214 the options created will already be in the hash table, so we avoid
215 allocating and freeing up a node repeatably. */
216 static GTY (()) tree cl_optimization_node;
217 static GTY (()) tree cl_target_option_node;
218 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
219 htab_t cl_option_hash_table;
220
221 /* General tree->tree mapping structure for use in hash tables. */
222
223
224 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
225 htab_t debug_expr_for_decl;
226
227 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
228 htab_t value_expr_for_decl;
229
230 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
231 htab_t debug_args_for_decl;
232
233 static void set_type_quals (tree, int);
234 static int type_hash_eq (const void *, const void *);
235 static hashval_t type_hash_hash (const void *);
236 static hashval_t int_cst_hash_hash (const void *);
237 static int int_cst_hash_eq (const void *, const void *);
238 static hashval_t cl_option_hash_hash (const void *);
239 static int cl_option_hash_eq (const void *, const void *);
240 static void print_type_hash_statistics (void);
241 static void print_debug_expr_statistics (void);
242 static void print_value_expr_statistics (void);
243 static int type_hash_marked_p (const void *);
244 static void type_hash_list (const_tree, inchash::hash &);
245 static void attribute_hash_list (const_tree, inchash::hash &);
246
247 tree global_trees[TI_MAX];
248 tree integer_types[itk_none];
249
250 bool int_n_enabled_p[NUM_INT_N_ENTS];
251 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
252
253 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
254
255 /* Number of operands for each OpenMP clause. */
256 unsigned const char omp_clause_num_ops[] =
257 {
258 0, /* OMP_CLAUSE_ERROR */
259 1, /* OMP_CLAUSE_PRIVATE */
260 1, /* OMP_CLAUSE_SHARED */
261 1, /* OMP_CLAUSE_FIRSTPRIVATE */
262 2, /* OMP_CLAUSE_LASTPRIVATE */
263 4, /* OMP_CLAUSE_REDUCTION */
264 1, /* OMP_CLAUSE_COPYIN */
265 1, /* OMP_CLAUSE_COPYPRIVATE */
266 3, /* OMP_CLAUSE_LINEAR */
267 2, /* OMP_CLAUSE_ALIGNED */
268 1, /* OMP_CLAUSE_DEPEND */
269 1, /* OMP_CLAUSE_UNIFORM */
270 2, /* OMP_CLAUSE_FROM */
271 2, /* OMP_CLAUSE_TO */
272 2, /* OMP_CLAUSE_MAP */
273 1, /* OMP_CLAUSE__LOOPTEMP_ */
274 1, /* OMP_CLAUSE_IF */
275 1, /* OMP_CLAUSE_NUM_THREADS */
276 1, /* OMP_CLAUSE_SCHEDULE */
277 0, /* OMP_CLAUSE_NOWAIT */
278 0, /* OMP_CLAUSE_ORDERED */
279 0, /* OMP_CLAUSE_DEFAULT */
280 3, /* OMP_CLAUSE_COLLAPSE */
281 0, /* OMP_CLAUSE_UNTIED */
282 1, /* OMP_CLAUSE_FINAL */
283 0, /* OMP_CLAUSE_MERGEABLE */
284 1, /* OMP_CLAUSE_DEVICE */
285 1, /* OMP_CLAUSE_DIST_SCHEDULE */
286 0, /* OMP_CLAUSE_INBRANCH */
287 0, /* OMP_CLAUSE_NOTINBRANCH */
288 1, /* OMP_CLAUSE_NUM_TEAMS */
289 1, /* OMP_CLAUSE_THREAD_LIMIT */
290 0, /* OMP_CLAUSE_PROC_BIND */
291 1, /* OMP_CLAUSE_SAFELEN */
292 1, /* OMP_CLAUSE_SIMDLEN */
293 0, /* OMP_CLAUSE_FOR */
294 0, /* OMP_CLAUSE_PARALLEL */
295 0, /* OMP_CLAUSE_SECTIONS */
296 0, /* OMP_CLAUSE_TASKGROUP */
297 1, /* OMP_CLAUSE__SIMDUID_ */
298 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
299 };
300
301 const char * const omp_clause_code_name[] =
302 {
303 "error_clause",
304 "private",
305 "shared",
306 "firstprivate",
307 "lastprivate",
308 "reduction",
309 "copyin",
310 "copyprivate",
311 "linear",
312 "aligned",
313 "depend",
314 "uniform",
315 "from",
316 "to",
317 "map",
318 "_looptemp_",
319 "if",
320 "num_threads",
321 "schedule",
322 "nowait",
323 "ordered",
324 "default",
325 "collapse",
326 "untied",
327 "final",
328 "mergeable",
329 "device",
330 "dist_schedule",
331 "inbranch",
332 "notinbranch",
333 "num_teams",
334 "thread_limit",
335 "proc_bind",
336 "safelen",
337 "simdlen",
338 "for",
339 "parallel",
340 "sections",
341 "taskgroup",
342 "_simduid_",
343 "_Cilk_for_count_"
344 };
345
346
347 /* Return the tree node structure used by tree code CODE. */
348
349 static inline enum tree_node_structure_enum
350 tree_node_structure_for_code (enum tree_code code)
351 {
352 switch (TREE_CODE_CLASS (code))
353 {
354 case tcc_declaration:
355 {
356 switch (code)
357 {
358 case FIELD_DECL:
359 return TS_FIELD_DECL;
360 case PARM_DECL:
361 return TS_PARM_DECL;
362 case VAR_DECL:
363 return TS_VAR_DECL;
364 case LABEL_DECL:
365 return TS_LABEL_DECL;
366 case RESULT_DECL:
367 return TS_RESULT_DECL;
368 case DEBUG_EXPR_DECL:
369 return TS_DECL_WRTL;
370 case CONST_DECL:
371 return TS_CONST_DECL;
372 case TYPE_DECL:
373 return TS_TYPE_DECL;
374 case FUNCTION_DECL:
375 return TS_FUNCTION_DECL;
376 case TRANSLATION_UNIT_DECL:
377 return TS_TRANSLATION_UNIT_DECL;
378 default:
379 return TS_DECL_NON_COMMON;
380 }
381 }
382 case tcc_type:
383 return TS_TYPE_NON_COMMON;
384 case tcc_reference:
385 case tcc_comparison:
386 case tcc_unary:
387 case tcc_binary:
388 case tcc_expression:
389 case tcc_statement:
390 case tcc_vl_exp:
391 return TS_EXP;
392 default: /* tcc_constant and tcc_exceptional */
393 break;
394 }
395 switch (code)
396 {
397 /* tcc_constant cases. */
398 case VOID_CST: return TS_TYPED;
399 case INTEGER_CST: return TS_INT_CST;
400 case REAL_CST: return TS_REAL_CST;
401 case FIXED_CST: return TS_FIXED_CST;
402 case COMPLEX_CST: return TS_COMPLEX;
403 case VECTOR_CST: return TS_VECTOR;
404 case STRING_CST: return TS_STRING;
405 /* tcc_exceptional cases. */
406 case ERROR_MARK: return TS_COMMON;
407 case IDENTIFIER_NODE: return TS_IDENTIFIER;
408 case TREE_LIST: return TS_LIST;
409 case TREE_VEC: return TS_VEC;
410 case SSA_NAME: return TS_SSA_NAME;
411 case PLACEHOLDER_EXPR: return TS_COMMON;
412 case STATEMENT_LIST: return TS_STATEMENT_LIST;
413 case BLOCK: return TS_BLOCK;
414 case CONSTRUCTOR: return TS_CONSTRUCTOR;
415 case TREE_BINFO: return TS_BINFO;
416 case OMP_CLAUSE: return TS_OMP_CLAUSE;
417 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
418 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
419
420 default:
421 gcc_unreachable ();
422 }
423 }
424
425
426 /* Initialize tree_contains_struct to describe the hierarchy of tree
427 nodes. */
428
429 static void
430 initialize_tree_contains_struct (void)
431 {
432 unsigned i;
433
434 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
435 {
436 enum tree_code code;
437 enum tree_node_structure_enum ts_code;
438
439 code = (enum tree_code) i;
440 ts_code = tree_node_structure_for_code (code);
441
442 /* Mark the TS structure itself. */
443 tree_contains_struct[code][ts_code] = 1;
444
445 /* Mark all the structures that TS is derived from. */
446 switch (ts_code)
447 {
448 case TS_TYPED:
449 case TS_BLOCK:
450 MARK_TS_BASE (code);
451 break;
452
453 case TS_COMMON:
454 case TS_INT_CST:
455 case TS_REAL_CST:
456 case TS_FIXED_CST:
457 case TS_VECTOR:
458 case TS_STRING:
459 case TS_COMPLEX:
460 case TS_SSA_NAME:
461 case TS_CONSTRUCTOR:
462 case TS_EXP:
463 case TS_STATEMENT_LIST:
464 MARK_TS_TYPED (code);
465 break;
466
467 case TS_IDENTIFIER:
468 case TS_DECL_MINIMAL:
469 case TS_TYPE_COMMON:
470 case TS_LIST:
471 case TS_VEC:
472 case TS_BINFO:
473 case TS_OMP_CLAUSE:
474 case TS_OPTIMIZATION:
475 case TS_TARGET_OPTION:
476 MARK_TS_COMMON (code);
477 break;
478
479 case TS_TYPE_WITH_LANG_SPECIFIC:
480 MARK_TS_TYPE_COMMON (code);
481 break;
482
483 case TS_TYPE_NON_COMMON:
484 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
485 break;
486
487 case TS_DECL_COMMON:
488 MARK_TS_DECL_MINIMAL (code);
489 break;
490
491 case TS_DECL_WRTL:
492 case TS_CONST_DECL:
493 MARK_TS_DECL_COMMON (code);
494 break;
495
496 case TS_DECL_NON_COMMON:
497 MARK_TS_DECL_WITH_VIS (code);
498 break;
499
500 case TS_DECL_WITH_VIS:
501 case TS_PARM_DECL:
502 case TS_LABEL_DECL:
503 case TS_RESULT_DECL:
504 MARK_TS_DECL_WRTL (code);
505 break;
506
507 case TS_FIELD_DECL:
508 MARK_TS_DECL_COMMON (code);
509 break;
510
511 case TS_VAR_DECL:
512 MARK_TS_DECL_WITH_VIS (code);
513 break;
514
515 case TS_TYPE_DECL:
516 case TS_FUNCTION_DECL:
517 MARK_TS_DECL_NON_COMMON (code);
518 break;
519
520 case TS_TRANSLATION_UNIT_DECL:
521 MARK_TS_DECL_COMMON (code);
522 break;
523
524 default:
525 gcc_unreachable ();
526 }
527 }
528
529 /* Basic consistency checks for attributes used in fold. */
530 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
531 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
532 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
533 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
534 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
535 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
536 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
537 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
538 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
539 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
540 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
541 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
542 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
543 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
544 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
545 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
546 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
547 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
548 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
549 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
550 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
551 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
552 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
553 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
554 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
555 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
556 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
557 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
558 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
559 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
560 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
561 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
562 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
563 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
564 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
565 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
566 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
567 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
568 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
569 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
570 }
571
572
573 /* Init tree.c. */
574
575 void
576 init_ttree (void)
577 {
578 /* Initialize the hash table of types. */
579 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
580 type_hash_eq, 0);
581
582 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
583 tree_decl_map_eq, 0);
584
585 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
586 tree_decl_map_eq, 0);
587
588 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
589 int_cst_hash_eq, NULL);
590
591 int_cst_node = make_int_cst (1, 1);
592
593 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
594 cl_option_hash_eq, NULL);
595
596 cl_optimization_node = make_node (OPTIMIZATION_NODE);
597 cl_target_option_node = make_node (TARGET_OPTION_NODE);
598
599 /* Initialize the tree_contains_struct array. */
600 initialize_tree_contains_struct ();
601 lang_hooks.init_ts ();
602 }
603
604 \f
605 /* The name of the object as the assembler will see it (but before any
606 translations made by ASM_OUTPUT_LABELREF). Often this is the same
607 as DECL_NAME. It is an IDENTIFIER_NODE. */
608 tree
609 decl_assembler_name (tree decl)
610 {
611 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
612 lang_hooks.set_decl_assembler_name (decl);
613 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
614 }
615
616 /* When the target supports COMDAT groups, this indicates which group the
617 DECL is associated with. This can be either an IDENTIFIER_NODE or a
618 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
619 tree
620 decl_comdat_group (const_tree node)
621 {
622 struct symtab_node *snode = symtab_node::get (node);
623 if (!snode)
624 return NULL;
625 return snode->get_comdat_group ();
626 }
627
628 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
629 tree
630 decl_comdat_group_id (const_tree node)
631 {
632 struct symtab_node *snode = symtab_node::get (node);
633 if (!snode)
634 return NULL;
635 return snode->get_comdat_group_id ();
636 }
637
638 /* When the target supports named section, return its name as IDENTIFIER_NODE
639 or NULL if it is in no section. */
640 const char *
641 decl_section_name (const_tree node)
642 {
643 struct symtab_node *snode = symtab_node::get (node);
644 if (!snode)
645 return NULL;
646 return snode->get_section ();
647 }
648
649 /* Set section section name of NODE to VALUE (that is expected to
650 be identifier node) */
651 void
652 set_decl_section_name (tree node, const char *value)
653 {
654 struct symtab_node *snode;
655
656 if (value == NULL)
657 {
658 snode = symtab_node::get (node);
659 if (!snode)
660 return;
661 }
662 else if (TREE_CODE (node) == VAR_DECL)
663 snode = varpool_node::get_create (node);
664 else
665 snode = cgraph_node::get_create (node);
666 snode->set_section (value);
667 }
668
669 /* Return TLS model of a variable NODE. */
670 enum tls_model
671 decl_tls_model (const_tree node)
672 {
673 struct varpool_node *snode = varpool_node::get (node);
674 if (!snode)
675 return TLS_MODEL_NONE;
676 return snode->tls_model;
677 }
678
679 /* Set TLS model of variable NODE to MODEL. */
680 void
681 set_decl_tls_model (tree node, enum tls_model model)
682 {
683 struct varpool_node *vnode;
684
685 if (model == TLS_MODEL_NONE)
686 {
687 vnode = varpool_node::get (node);
688 if (!vnode)
689 return;
690 }
691 else
692 vnode = varpool_node::get_create (node);
693 vnode->tls_model = model;
694 }
695
696 /* Compute the number of bytes occupied by a tree with code CODE.
697 This function cannot be used for nodes that have variable sizes,
698 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
699 size_t
700 tree_code_size (enum tree_code code)
701 {
702 switch (TREE_CODE_CLASS (code))
703 {
704 case tcc_declaration: /* A decl node */
705 {
706 switch (code)
707 {
708 case FIELD_DECL:
709 return sizeof (struct tree_field_decl);
710 case PARM_DECL:
711 return sizeof (struct tree_parm_decl);
712 case VAR_DECL:
713 return sizeof (struct tree_var_decl);
714 case LABEL_DECL:
715 return sizeof (struct tree_label_decl);
716 case RESULT_DECL:
717 return sizeof (struct tree_result_decl);
718 case CONST_DECL:
719 return sizeof (struct tree_const_decl);
720 case TYPE_DECL:
721 return sizeof (struct tree_type_decl);
722 case FUNCTION_DECL:
723 return sizeof (struct tree_function_decl);
724 case DEBUG_EXPR_DECL:
725 return sizeof (struct tree_decl_with_rtl);
726 case TRANSLATION_UNIT_DECL:
727 return sizeof (struct tree_translation_unit_decl);
728 case NAMESPACE_DECL:
729 case IMPORTED_DECL:
730 case NAMELIST_DECL:
731 return sizeof (struct tree_decl_non_common);
732 default:
733 return lang_hooks.tree_size (code);
734 }
735 }
736
737 case tcc_type: /* a type node */
738 return sizeof (struct tree_type_non_common);
739
740 case tcc_reference: /* a reference */
741 case tcc_expression: /* an expression */
742 case tcc_statement: /* an expression with side effects */
743 case tcc_comparison: /* a comparison expression */
744 case tcc_unary: /* a unary arithmetic expression */
745 case tcc_binary: /* a binary arithmetic expression */
746 return (sizeof (struct tree_exp)
747 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
748
749 case tcc_constant: /* a constant */
750 switch (code)
751 {
752 case VOID_CST: return sizeof (struct tree_typed);
753 case INTEGER_CST: gcc_unreachable ();
754 case REAL_CST: return sizeof (struct tree_real_cst);
755 case FIXED_CST: return sizeof (struct tree_fixed_cst);
756 case COMPLEX_CST: return sizeof (struct tree_complex);
757 case VECTOR_CST: return sizeof (struct tree_vector);
758 case STRING_CST: gcc_unreachable ();
759 default:
760 return lang_hooks.tree_size (code);
761 }
762
763 case tcc_exceptional: /* something random, like an identifier. */
764 switch (code)
765 {
766 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
767 case TREE_LIST: return sizeof (struct tree_list);
768
769 case ERROR_MARK:
770 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
771
772 case TREE_VEC:
773 case OMP_CLAUSE: gcc_unreachable ();
774
775 case SSA_NAME: return sizeof (struct tree_ssa_name);
776
777 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
778 case BLOCK: return sizeof (struct tree_block);
779 case CONSTRUCTOR: return sizeof (struct tree_constructor);
780 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
781 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
782
783 default:
784 return lang_hooks.tree_size (code);
785 }
786
787 default:
788 gcc_unreachable ();
789 }
790 }
791
792 /* Compute the number of bytes occupied by NODE. This routine only
793 looks at TREE_CODE, except for those nodes that have variable sizes. */
794 size_t
795 tree_size (const_tree node)
796 {
797 const enum tree_code code = TREE_CODE (node);
798 switch (code)
799 {
800 case INTEGER_CST:
801 return (sizeof (struct tree_int_cst)
802 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
803
804 case TREE_BINFO:
805 return (offsetof (struct tree_binfo, base_binfos)
806 + vec<tree, va_gc>
807 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
808
809 case TREE_VEC:
810 return (sizeof (struct tree_vec)
811 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
812
813 case VECTOR_CST:
814 return (sizeof (struct tree_vector)
815 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
816
817 case STRING_CST:
818 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
819
820 case OMP_CLAUSE:
821 return (sizeof (struct tree_omp_clause)
822 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
823 * sizeof (tree));
824
825 default:
826 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
827 return (sizeof (struct tree_exp)
828 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
829 else
830 return tree_code_size (code);
831 }
832 }
833
834 /* Record interesting allocation statistics for a tree node with CODE
835 and LENGTH. */
836
837 static void
838 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
839 size_t length ATTRIBUTE_UNUSED)
840 {
841 enum tree_code_class type = TREE_CODE_CLASS (code);
842 tree_node_kind kind;
843
844 if (!GATHER_STATISTICS)
845 return;
846
847 switch (type)
848 {
849 case tcc_declaration: /* A decl node */
850 kind = d_kind;
851 break;
852
853 case tcc_type: /* a type node */
854 kind = t_kind;
855 break;
856
857 case tcc_statement: /* an expression with side effects */
858 kind = s_kind;
859 break;
860
861 case tcc_reference: /* a reference */
862 kind = r_kind;
863 break;
864
865 case tcc_expression: /* an expression */
866 case tcc_comparison: /* a comparison expression */
867 case tcc_unary: /* a unary arithmetic expression */
868 case tcc_binary: /* a binary arithmetic expression */
869 kind = e_kind;
870 break;
871
872 case tcc_constant: /* a constant */
873 kind = c_kind;
874 break;
875
876 case tcc_exceptional: /* something random, like an identifier. */
877 switch (code)
878 {
879 case IDENTIFIER_NODE:
880 kind = id_kind;
881 break;
882
883 case TREE_VEC:
884 kind = vec_kind;
885 break;
886
887 case TREE_BINFO:
888 kind = binfo_kind;
889 break;
890
891 case SSA_NAME:
892 kind = ssa_name_kind;
893 break;
894
895 case BLOCK:
896 kind = b_kind;
897 break;
898
899 case CONSTRUCTOR:
900 kind = constr_kind;
901 break;
902
903 case OMP_CLAUSE:
904 kind = omp_clause_kind;
905 break;
906
907 default:
908 kind = x_kind;
909 break;
910 }
911 break;
912
913 case tcc_vl_exp:
914 kind = e_kind;
915 break;
916
917 default:
918 gcc_unreachable ();
919 }
920
921 tree_code_counts[(int) code]++;
922 tree_node_counts[(int) kind]++;
923 tree_node_sizes[(int) kind] += length;
924 }
925
926 /* Allocate and return a new UID from the DECL_UID namespace. */
927
928 int
929 allocate_decl_uid (void)
930 {
931 return next_decl_uid++;
932 }
933
934 /* Return a newly allocated node of code CODE. For decl and type
935 nodes, some other fields are initialized. The rest of the node is
936 initialized to zero. This function cannot be used for TREE_VEC,
937 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
938 tree_code_size.
939
940 Achoo! I got a code in the node. */
941
942 tree
943 make_node_stat (enum tree_code code MEM_STAT_DECL)
944 {
945 tree t;
946 enum tree_code_class type = TREE_CODE_CLASS (code);
947 size_t length = tree_code_size (code);
948
949 record_node_allocation_statistics (code, length);
950
951 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
952 TREE_SET_CODE (t, code);
953
954 switch (type)
955 {
956 case tcc_statement:
957 TREE_SIDE_EFFECTS (t) = 1;
958 break;
959
960 case tcc_declaration:
961 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
962 {
963 if (code == FUNCTION_DECL)
964 {
965 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
966 DECL_MODE (t) = FUNCTION_MODE;
967 }
968 else
969 DECL_ALIGN (t) = 1;
970 }
971 DECL_SOURCE_LOCATION (t) = input_location;
972 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
973 DECL_UID (t) = --next_debug_decl_uid;
974 else
975 {
976 DECL_UID (t) = allocate_decl_uid ();
977 SET_DECL_PT_UID (t, -1);
978 }
979 if (TREE_CODE (t) == LABEL_DECL)
980 LABEL_DECL_UID (t) = -1;
981
982 break;
983
984 case tcc_type:
985 TYPE_UID (t) = next_type_uid++;
986 TYPE_ALIGN (t) = BITS_PER_UNIT;
987 TYPE_USER_ALIGN (t) = 0;
988 TYPE_MAIN_VARIANT (t) = t;
989 TYPE_CANONICAL (t) = t;
990
991 /* Default to no attributes for type, but let target change that. */
992 TYPE_ATTRIBUTES (t) = NULL_TREE;
993 targetm.set_default_type_attributes (t);
994
995 /* We have not yet computed the alias set for this type. */
996 TYPE_ALIAS_SET (t) = -1;
997 break;
998
999 case tcc_constant:
1000 TREE_CONSTANT (t) = 1;
1001 break;
1002
1003 case tcc_expression:
1004 switch (code)
1005 {
1006 case INIT_EXPR:
1007 case MODIFY_EXPR:
1008 case VA_ARG_EXPR:
1009 case PREDECREMENT_EXPR:
1010 case PREINCREMENT_EXPR:
1011 case POSTDECREMENT_EXPR:
1012 case POSTINCREMENT_EXPR:
1013 /* All of these have side-effects, no matter what their
1014 operands are. */
1015 TREE_SIDE_EFFECTS (t) = 1;
1016 break;
1017
1018 default:
1019 break;
1020 }
1021 break;
1022
1023 default:
1024 /* Other classes need no special treatment. */
1025 break;
1026 }
1027
1028 return t;
1029 }
1030 \f
1031 /* Return a new node with the same contents as NODE except that its
1032 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1033
1034 tree
1035 copy_node_stat (tree node MEM_STAT_DECL)
1036 {
1037 tree t;
1038 enum tree_code code = TREE_CODE (node);
1039 size_t length;
1040
1041 gcc_assert (code != STATEMENT_LIST);
1042
1043 length = tree_size (node);
1044 record_node_allocation_statistics (code, length);
1045 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1046 memcpy (t, node, length);
1047
1048 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1049 TREE_CHAIN (t) = 0;
1050 TREE_ASM_WRITTEN (t) = 0;
1051 TREE_VISITED (t) = 0;
1052
1053 if (TREE_CODE_CLASS (code) == tcc_declaration)
1054 {
1055 if (code == DEBUG_EXPR_DECL)
1056 DECL_UID (t) = --next_debug_decl_uid;
1057 else
1058 {
1059 DECL_UID (t) = allocate_decl_uid ();
1060 if (DECL_PT_UID_SET_P (node))
1061 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1062 }
1063 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1064 && DECL_HAS_VALUE_EXPR_P (node))
1065 {
1066 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1067 DECL_HAS_VALUE_EXPR_P (t) = 1;
1068 }
1069 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1070 if (TREE_CODE (node) == VAR_DECL)
1071 {
1072 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1073 t->decl_with_vis.symtab_node = NULL;
1074 }
1075 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1076 {
1077 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1078 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1079 }
1080 if (TREE_CODE (node) == FUNCTION_DECL)
1081 {
1082 DECL_STRUCT_FUNCTION (t) = NULL;
1083 t->decl_with_vis.symtab_node = NULL;
1084 }
1085 }
1086 else if (TREE_CODE_CLASS (code) == tcc_type)
1087 {
1088 TYPE_UID (t) = next_type_uid++;
1089 /* The following is so that the debug code for
1090 the copy is different from the original type.
1091 The two statements usually duplicate each other
1092 (because they clear fields of the same union),
1093 but the optimizer should catch that. */
1094 TYPE_SYMTAB_POINTER (t) = 0;
1095 TYPE_SYMTAB_ADDRESS (t) = 0;
1096
1097 /* Do not copy the values cache. */
1098 if (TYPE_CACHED_VALUES_P (t))
1099 {
1100 TYPE_CACHED_VALUES_P (t) = 0;
1101 TYPE_CACHED_VALUES (t) = NULL_TREE;
1102 }
1103 }
1104
1105 return t;
1106 }
1107
1108 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1109 For example, this can copy a list made of TREE_LIST nodes. */
1110
1111 tree
1112 copy_list (tree list)
1113 {
1114 tree head;
1115 tree prev, next;
1116
1117 if (list == 0)
1118 return 0;
1119
1120 head = prev = copy_node (list);
1121 next = TREE_CHAIN (list);
1122 while (next)
1123 {
1124 TREE_CHAIN (prev) = copy_node (next);
1125 prev = TREE_CHAIN (prev);
1126 next = TREE_CHAIN (next);
1127 }
1128 return head;
1129 }
1130
1131 \f
1132 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1133 INTEGER_CST with value CST and type TYPE. */
1134
1135 static unsigned int
1136 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1137 {
1138 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1139 /* We need an extra zero HWI if CST is an unsigned integer with its
1140 upper bit set, and if CST occupies a whole number of HWIs. */
1141 if (TYPE_UNSIGNED (type)
1142 && wi::neg_p (cst)
1143 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1144 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1145 return cst.get_len ();
1146 }
1147
1148 /* Return a new INTEGER_CST with value CST and type TYPE. */
1149
1150 static tree
1151 build_new_int_cst (tree type, const wide_int &cst)
1152 {
1153 unsigned int len = cst.get_len ();
1154 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1155 tree nt = make_int_cst (len, ext_len);
1156
1157 if (len < ext_len)
1158 {
1159 --ext_len;
1160 TREE_INT_CST_ELT (nt, ext_len) = 0;
1161 for (unsigned int i = len; i < ext_len; ++i)
1162 TREE_INT_CST_ELT (nt, i) = -1;
1163 }
1164 else if (TYPE_UNSIGNED (type)
1165 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1166 {
1167 len--;
1168 TREE_INT_CST_ELT (nt, len)
1169 = zext_hwi (cst.elt (len),
1170 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1171 }
1172
1173 for (unsigned int i = 0; i < len; i++)
1174 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1175 TREE_TYPE (nt) = type;
1176 return nt;
1177 }
1178
1179 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1180
1181 tree
1182 build_int_cst (tree type, HOST_WIDE_INT low)
1183 {
1184 /* Support legacy code. */
1185 if (!type)
1186 type = integer_type_node;
1187
1188 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1189 }
1190
1191 tree
1192 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1193 {
1194 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1195 }
1196
1197 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1198
1199 tree
1200 build_int_cst_type (tree type, HOST_WIDE_INT low)
1201 {
1202 gcc_assert (type);
1203 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1204 }
1205
1206 /* Constructs tree in type TYPE from with value given by CST. Signedness
1207 of CST is assumed to be the same as the signedness of TYPE. */
1208
1209 tree
1210 double_int_to_tree (tree type, double_int cst)
1211 {
1212 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1213 }
1214
1215 /* We force the wide_int CST to the range of the type TYPE by sign or
1216 zero extending it. OVERFLOWABLE indicates if we are interested in
1217 overflow of the value, when >0 we are only interested in signed
1218 overflow, for <0 we are interested in any overflow. OVERFLOWED
1219 indicates whether overflow has already occurred. CONST_OVERFLOWED
1220 indicates whether constant overflow has already occurred. We force
1221 T's value to be within range of T's type (by setting to 0 or 1 all
1222 the bits outside the type's range). We set TREE_OVERFLOWED if,
1223 OVERFLOWED is nonzero,
1224 or OVERFLOWABLE is >0 and signed overflow occurs
1225 or OVERFLOWABLE is <0 and any overflow occurs
1226 We return a new tree node for the extended wide_int. The node
1227 is shared if no overflow flags are set. */
1228
1229
1230 tree
1231 force_fit_type (tree type, const wide_int_ref &cst,
1232 int overflowable, bool overflowed)
1233 {
1234 signop sign = TYPE_SIGN (type);
1235
1236 /* If we need to set overflow flags, return a new unshared node. */
1237 if (overflowed || !wi::fits_to_tree_p (cst, type))
1238 {
1239 if (overflowed
1240 || overflowable < 0
1241 || (overflowable > 0 && sign == SIGNED))
1242 {
1243 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1244 tree t = build_new_int_cst (type, tmp);
1245 TREE_OVERFLOW (t) = 1;
1246 return t;
1247 }
1248 }
1249
1250 /* Else build a shared node. */
1251 return wide_int_to_tree (type, cst);
1252 }
1253
1254 /* These are the hash table functions for the hash table of INTEGER_CST
1255 nodes of a sizetype. */
1256
1257 /* Return the hash code code X, an INTEGER_CST. */
1258
1259 static hashval_t
1260 int_cst_hash_hash (const void *x)
1261 {
1262 const_tree const t = (const_tree) x;
1263 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1264 int i;
1265
1266 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1267 code ^= TREE_INT_CST_ELT (t, i);
1268
1269 return code;
1270 }
1271
1272 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1273 is the same as that given by *Y, which is the same. */
1274
1275 static int
1276 int_cst_hash_eq (const void *x, const void *y)
1277 {
1278 const_tree const xt = (const_tree) x;
1279 const_tree const yt = (const_tree) y;
1280
1281 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1282 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1283 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1284 return false;
1285
1286 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1287 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1288 return false;
1289
1290 return true;
1291 }
1292
1293 /* Create an INT_CST node of TYPE and value CST.
1294 The returned node is always shared. For small integers we use a
1295 per-type vector cache, for larger ones we use a single hash table.
1296 The value is extended from its precision according to the sign of
1297 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1298 the upper bits and ensures that hashing and value equality based
1299 upon the underlying HOST_WIDE_INTs works without masking. */
1300
1301 tree
1302 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1303 {
1304 tree t;
1305 int ix = -1;
1306 int limit = 0;
1307
1308 gcc_assert (type);
1309 unsigned int prec = TYPE_PRECISION (type);
1310 signop sgn = TYPE_SIGN (type);
1311
1312 /* Verify that everything is canonical. */
1313 int l = pcst.get_len ();
1314 if (l > 1)
1315 {
1316 if (pcst.elt (l - 1) == 0)
1317 gcc_checking_assert (pcst.elt (l - 2) < 0);
1318 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1319 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1320 }
1321
1322 wide_int cst = wide_int::from (pcst, prec, sgn);
1323 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1324
1325 if (ext_len == 1)
1326 {
1327 /* We just need to store a single HOST_WIDE_INT. */
1328 HOST_WIDE_INT hwi;
1329 if (TYPE_UNSIGNED (type))
1330 hwi = cst.to_uhwi ();
1331 else
1332 hwi = cst.to_shwi ();
1333
1334 switch (TREE_CODE (type))
1335 {
1336 case NULLPTR_TYPE:
1337 gcc_assert (hwi == 0);
1338 /* Fallthru. */
1339
1340 case POINTER_TYPE:
1341 case REFERENCE_TYPE:
1342 case POINTER_BOUNDS_TYPE:
1343 /* Cache NULL pointer and zero bounds. */
1344 if (hwi == 0)
1345 {
1346 limit = 1;
1347 ix = 0;
1348 }
1349 break;
1350
1351 case BOOLEAN_TYPE:
1352 /* Cache false or true. */
1353 limit = 2;
1354 if (hwi < 2)
1355 ix = hwi;
1356 break;
1357
1358 case INTEGER_TYPE:
1359 case OFFSET_TYPE:
1360 if (TYPE_SIGN (type) == UNSIGNED)
1361 {
1362 /* Cache [0, N). */
1363 limit = INTEGER_SHARE_LIMIT;
1364 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1365 ix = hwi;
1366 }
1367 else
1368 {
1369 /* Cache [-1, N). */
1370 limit = INTEGER_SHARE_LIMIT + 1;
1371 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1372 ix = hwi + 1;
1373 }
1374 break;
1375
1376 case ENUMERAL_TYPE:
1377 break;
1378
1379 default:
1380 gcc_unreachable ();
1381 }
1382
1383 if (ix >= 0)
1384 {
1385 /* Look for it in the type's vector of small shared ints. */
1386 if (!TYPE_CACHED_VALUES_P (type))
1387 {
1388 TYPE_CACHED_VALUES_P (type) = 1;
1389 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1390 }
1391
1392 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1393 if (t)
1394 /* Make sure no one is clobbering the shared constant. */
1395 gcc_checking_assert (TREE_TYPE (t) == type
1396 && TREE_INT_CST_NUNITS (t) == 1
1397 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1398 && TREE_INT_CST_EXT_NUNITS (t) == 1
1399 && TREE_INT_CST_ELT (t, 0) == hwi);
1400 else
1401 {
1402 /* Create a new shared int. */
1403 t = build_new_int_cst (type, cst);
1404 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1405 }
1406 }
1407 else
1408 {
1409 /* Use the cache of larger shared ints, using int_cst_node as
1410 a temporary. */
1411 void **slot;
1412
1413 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1414 TREE_TYPE (int_cst_node) = type;
1415
1416 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1417 t = (tree) *slot;
1418 if (!t)
1419 {
1420 /* Insert this one into the hash table. */
1421 t = int_cst_node;
1422 *slot = t;
1423 /* Make a new node for next time round. */
1424 int_cst_node = make_int_cst (1, 1);
1425 }
1426 }
1427 }
1428 else
1429 {
1430 /* The value either hashes properly or we drop it on the floor
1431 for the gc to take care of. There will not be enough of them
1432 to worry about. */
1433 void **slot;
1434
1435 tree nt = build_new_int_cst (type, cst);
1436 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1437 t = (tree) *slot;
1438 if (!t)
1439 {
1440 /* Insert this one into the hash table. */
1441 t = nt;
1442 *slot = t;
1443 }
1444 }
1445
1446 return t;
1447 }
1448
1449 void
1450 cache_integer_cst (tree t)
1451 {
1452 tree type = TREE_TYPE (t);
1453 int ix = -1;
1454 int limit = 0;
1455 int prec = TYPE_PRECISION (type);
1456
1457 gcc_assert (!TREE_OVERFLOW (t));
1458
1459 switch (TREE_CODE (type))
1460 {
1461 case NULLPTR_TYPE:
1462 gcc_assert (integer_zerop (t));
1463 /* Fallthru. */
1464
1465 case POINTER_TYPE:
1466 case REFERENCE_TYPE:
1467 /* Cache NULL pointer. */
1468 if (integer_zerop (t))
1469 {
1470 limit = 1;
1471 ix = 0;
1472 }
1473 break;
1474
1475 case BOOLEAN_TYPE:
1476 /* Cache false or true. */
1477 limit = 2;
1478 if (wi::ltu_p (t, 2))
1479 ix = TREE_INT_CST_ELT (t, 0);
1480 break;
1481
1482 case INTEGER_TYPE:
1483 case OFFSET_TYPE:
1484 if (TYPE_UNSIGNED (type))
1485 {
1486 /* Cache 0..N */
1487 limit = INTEGER_SHARE_LIMIT;
1488
1489 /* This is a little hokie, but if the prec is smaller than
1490 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1491 obvious test will not get the correct answer. */
1492 if (prec < HOST_BITS_PER_WIDE_INT)
1493 {
1494 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1495 ix = tree_to_uhwi (t);
1496 }
1497 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1498 ix = tree_to_uhwi (t);
1499 }
1500 else
1501 {
1502 /* Cache -1..N */
1503 limit = INTEGER_SHARE_LIMIT + 1;
1504
1505 if (integer_minus_onep (t))
1506 ix = 0;
1507 else if (!wi::neg_p (t))
1508 {
1509 if (prec < HOST_BITS_PER_WIDE_INT)
1510 {
1511 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1512 ix = tree_to_shwi (t) + 1;
1513 }
1514 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1515 ix = tree_to_shwi (t) + 1;
1516 }
1517 }
1518 break;
1519
1520 case ENUMERAL_TYPE:
1521 break;
1522
1523 default:
1524 gcc_unreachable ();
1525 }
1526
1527 if (ix >= 0)
1528 {
1529 /* Look for it in the type's vector of small shared ints. */
1530 if (!TYPE_CACHED_VALUES_P (type))
1531 {
1532 TYPE_CACHED_VALUES_P (type) = 1;
1533 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1534 }
1535
1536 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1537 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1538 }
1539 else
1540 {
1541 /* Use the cache of larger shared ints. */
1542 void **slot;
1543
1544 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1545 /* If there is already an entry for the number verify it's the
1546 same. */
1547 if (*slot)
1548 gcc_assert (wi::eq_p (tree (*slot), t));
1549 else
1550 /* Otherwise insert this one into the hash table. */
1551 *slot = t;
1552 }
1553 }
1554
1555
1556 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1557 and the rest are zeros. */
1558
1559 tree
1560 build_low_bits_mask (tree type, unsigned bits)
1561 {
1562 gcc_assert (bits <= TYPE_PRECISION (type));
1563
1564 return wide_int_to_tree (type, wi::mask (bits, false,
1565 TYPE_PRECISION (type)));
1566 }
1567
1568 /* Checks that X is integer constant that can be expressed in (unsigned)
1569 HOST_WIDE_INT without loss of precision. */
1570
1571 bool
1572 cst_and_fits_in_hwi (const_tree x)
1573 {
1574 if (TREE_CODE (x) != INTEGER_CST)
1575 return false;
1576
1577 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1578 return false;
1579
1580 return TREE_INT_CST_NUNITS (x) == 1;
1581 }
1582
1583 /* Build a newly constructed TREE_VEC node of length LEN. */
1584
1585 tree
1586 make_vector_stat (unsigned len MEM_STAT_DECL)
1587 {
1588 tree t;
1589 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1590
1591 record_node_allocation_statistics (VECTOR_CST, length);
1592
1593 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1594
1595 TREE_SET_CODE (t, VECTOR_CST);
1596 TREE_CONSTANT (t) = 1;
1597
1598 return t;
1599 }
1600
1601 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1602 are in a list pointed to by VALS. */
1603
1604 tree
1605 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1606 {
1607 int over = 0;
1608 unsigned cnt = 0;
1609 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1610 TREE_TYPE (v) = type;
1611
1612 /* Iterate through elements and check for overflow. */
1613 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1614 {
1615 tree value = vals[cnt];
1616
1617 VECTOR_CST_ELT (v, cnt) = value;
1618
1619 /* Don't crash if we get an address constant. */
1620 if (!CONSTANT_CLASS_P (value))
1621 continue;
1622
1623 over |= TREE_OVERFLOW (value);
1624 }
1625
1626 TREE_OVERFLOW (v) = over;
1627 return v;
1628 }
1629
1630 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1631 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1632
1633 tree
1634 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1635 {
1636 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1637 unsigned HOST_WIDE_INT idx;
1638 tree value;
1639
1640 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1641 vec[idx] = value;
1642 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1643 vec[idx] = build_zero_cst (TREE_TYPE (type));
1644
1645 return build_vector (type, vec);
1646 }
1647
1648 /* Build a vector of type VECTYPE where all the elements are SCs. */
1649 tree
1650 build_vector_from_val (tree vectype, tree sc)
1651 {
1652 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1653
1654 if (sc == error_mark_node)
1655 return sc;
1656
1657 /* Verify that the vector type is suitable for SC. Note that there
1658 is some inconsistency in the type-system with respect to restrict
1659 qualifications of pointers. Vector types always have a main-variant
1660 element type and the qualification is applied to the vector-type.
1661 So TREE_TYPE (vector-type) does not return a properly qualified
1662 vector element-type. */
1663 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1664 TREE_TYPE (vectype)));
1665
1666 if (CONSTANT_CLASS_P (sc))
1667 {
1668 tree *v = XALLOCAVEC (tree, nunits);
1669 for (i = 0; i < nunits; ++i)
1670 v[i] = sc;
1671 return build_vector (vectype, v);
1672 }
1673 else
1674 {
1675 vec<constructor_elt, va_gc> *v;
1676 vec_alloc (v, nunits);
1677 for (i = 0; i < nunits; ++i)
1678 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1679 return build_constructor (vectype, v);
1680 }
1681 }
1682
1683 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1684 are in the vec pointed to by VALS. */
1685 tree
1686 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1687 {
1688 tree c = make_node (CONSTRUCTOR);
1689 unsigned int i;
1690 constructor_elt *elt;
1691 bool constant_p = true;
1692 bool side_effects_p = false;
1693
1694 TREE_TYPE (c) = type;
1695 CONSTRUCTOR_ELTS (c) = vals;
1696
1697 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1698 {
1699 /* Mostly ctors will have elts that don't have side-effects, so
1700 the usual case is to scan all the elements. Hence a single
1701 loop for both const and side effects, rather than one loop
1702 each (with early outs). */
1703 if (!TREE_CONSTANT (elt->value))
1704 constant_p = false;
1705 if (TREE_SIDE_EFFECTS (elt->value))
1706 side_effects_p = true;
1707 }
1708
1709 TREE_SIDE_EFFECTS (c) = side_effects_p;
1710 TREE_CONSTANT (c) = constant_p;
1711
1712 return c;
1713 }
1714
1715 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1716 INDEX and VALUE. */
1717 tree
1718 build_constructor_single (tree type, tree index, tree value)
1719 {
1720 vec<constructor_elt, va_gc> *v;
1721 constructor_elt elt = {index, value};
1722
1723 vec_alloc (v, 1);
1724 v->quick_push (elt);
1725
1726 return build_constructor (type, v);
1727 }
1728
1729
1730 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1731 are in a list pointed to by VALS. */
1732 tree
1733 build_constructor_from_list (tree type, tree vals)
1734 {
1735 tree t;
1736 vec<constructor_elt, va_gc> *v = NULL;
1737
1738 if (vals)
1739 {
1740 vec_alloc (v, list_length (vals));
1741 for (t = vals; t; t = TREE_CHAIN (t))
1742 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1743 }
1744
1745 return build_constructor (type, v);
1746 }
1747
1748 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1749 of elements, provided as index/value pairs. */
1750
1751 tree
1752 build_constructor_va (tree type, int nelts, ...)
1753 {
1754 vec<constructor_elt, va_gc> *v = NULL;
1755 va_list p;
1756
1757 va_start (p, nelts);
1758 vec_alloc (v, nelts);
1759 while (nelts--)
1760 {
1761 tree index = va_arg (p, tree);
1762 tree value = va_arg (p, tree);
1763 CONSTRUCTOR_APPEND_ELT (v, index, value);
1764 }
1765 va_end (p);
1766 return build_constructor (type, v);
1767 }
1768
1769 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1770
1771 tree
1772 build_fixed (tree type, FIXED_VALUE_TYPE f)
1773 {
1774 tree v;
1775 FIXED_VALUE_TYPE *fp;
1776
1777 v = make_node (FIXED_CST);
1778 fp = ggc_alloc<fixed_value> ();
1779 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1780
1781 TREE_TYPE (v) = type;
1782 TREE_FIXED_CST_PTR (v) = fp;
1783 return v;
1784 }
1785
1786 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1787
1788 tree
1789 build_real (tree type, REAL_VALUE_TYPE d)
1790 {
1791 tree v;
1792 REAL_VALUE_TYPE *dp;
1793 int overflow = 0;
1794
1795 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1796 Consider doing it via real_convert now. */
1797
1798 v = make_node (REAL_CST);
1799 dp = ggc_alloc<real_value> ();
1800 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1801
1802 TREE_TYPE (v) = type;
1803 TREE_REAL_CST_PTR (v) = dp;
1804 TREE_OVERFLOW (v) = overflow;
1805 return v;
1806 }
1807
1808 /* Return a new REAL_CST node whose type is TYPE
1809 and whose value is the integer value of the INTEGER_CST node I. */
1810
1811 REAL_VALUE_TYPE
1812 real_value_from_int_cst (const_tree type, const_tree i)
1813 {
1814 REAL_VALUE_TYPE d;
1815
1816 /* Clear all bits of the real value type so that we can later do
1817 bitwise comparisons to see if two values are the same. */
1818 memset (&d, 0, sizeof d);
1819
1820 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1821 TYPE_SIGN (TREE_TYPE (i)));
1822 return d;
1823 }
1824
1825 /* Given a tree representing an integer constant I, return a tree
1826 representing the same value as a floating-point constant of type TYPE. */
1827
1828 tree
1829 build_real_from_int_cst (tree type, const_tree i)
1830 {
1831 tree v;
1832 int overflow = TREE_OVERFLOW (i);
1833
1834 v = build_real (type, real_value_from_int_cst (type, i));
1835
1836 TREE_OVERFLOW (v) |= overflow;
1837 return v;
1838 }
1839
1840 /* Return a newly constructed STRING_CST node whose value is
1841 the LEN characters at STR.
1842 Note that for a C string literal, LEN should include the trailing NUL.
1843 The TREE_TYPE is not initialized. */
1844
1845 tree
1846 build_string (int len, const char *str)
1847 {
1848 tree s;
1849 size_t length;
1850
1851 /* Do not waste bytes provided by padding of struct tree_string. */
1852 length = len + offsetof (struct tree_string, str) + 1;
1853
1854 record_node_allocation_statistics (STRING_CST, length);
1855
1856 s = (tree) ggc_internal_alloc (length);
1857
1858 memset (s, 0, sizeof (struct tree_typed));
1859 TREE_SET_CODE (s, STRING_CST);
1860 TREE_CONSTANT (s) = 1;
1861 TREE_STRING_LENGTH (s) = len;
1862 memcpy (s->string.str, str, len);
1863 s->string.str[len] = '\0';
1864
1865 return s;
1866 }
1867
1868 /* Return a newly constructed COMPLEX_CST node whose value is
1869 specified by the real and imaginary parts REAL and IMAG.
1870 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1871 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1872
1873 tree
1874 build_complex (tree type, tree real, tree imag)
1875 {
1876 tree t = make_node (COMPLEX_CST);
1877
1878 TREE_REALPART (t) = real;
1879 TREE_IMAGPART (t) = imag;
1880 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1881 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1882 return t;
1883 }
1884
1885 /* Return a constant of arithmetic type TYPE which is the
1886 multiplicative identity of the set TYPE. */
1887
1888 tree
1889 build_one_cst (tree type)
1890 {
1891 switch (TREE_CODE (type))
1892 {
1893 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1894 case POINTER_TYPE: case REFERENCE_TYPE:
1895 case OFFSET_TYPE:
1896 return build_int_cst (type, 1);
1897
1898 case REAL_TYPE:
1899 return build_real (type, dconst1);
1900
1901 case FIXED_POINT_TYPE:
1902 /* We can only generate 1 for accum types. */
1903 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1904 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1905
1906 case VECTOR_TYPE:
1907 {
1908 tree scalar = build_one_cst (TREE_TYPE (type));
1909
1910 return build_vector_from_val (type, scalar);
1911 }
1912
1913 case COMPLEX_TYPE:
1914 return build_complex (type,
1915 build_one_cst (TREE_TYPE (type)),
1916 build_zero_cst (TREE_TYPE (type)));
1917
1918 default:
1919 gcc_unreachable ();
1920 }
1921 }
1922
1923 /* Return an integer of type TYPE containing all 1's in as much precision as
1924 it contains, or a complex or vector whose subparts are such integers. */
1925
1926 tree
1927 build_all_ones_cst (tree type)
1928 {
1929 if (TREE_CODE (type) == COMPLEX_TYPE)
1930 {
1931 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1932 return build_complex (type, scalar, scalar);
1933 }
1934 else
1935 return build_minus_one_cst (type);
1936 }
1937
1938 /* Return a constant of arithmetic type TYPE which is the
1939 opposite of the multiplicative identity of the set TYPE. */
1940
1941 tree
1942 build_minus_one_cst (tree type)
1943 {
1944 switch (TREE_CODE (type))
1945 {
1946 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1947 case POINTER_TYPE: case REFERENCE_TYPE:
1948 case OFFSET_TYPE:
1949 return build_int_cst (type, -1);
1950
1951 case REAL_TYPE:
1952 return build_real (type, dconstm1);
1953
1954 case FIXED_POINT_TYPE:
1955 /* We can only generate 1 for accum types. */
1956 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1957 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1958 TYPE_MODE (type)));
1959
1960 case VECTOR_TYPE:
1961 {
1962 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1963
1964 return build_vector_from_val (type, scalar);
1965 }
1966
1967 case COMPLEX_TYPE:
1968 return build_complex (type,
1969 build_minus_one_cst (TREE_TYPE (type)),
1970 build_zero_cst (TREE_TYPE (type)));
1971
1972 default:
1973 gcc_unreachable ();
1974 }
1975 }
1976
1977 /* Build 0 constant of type TYPE. This is used by constructor folding
1978 and thus the constant should be represented in memory by
1979 zero(es). */
1980
1981 tree
1982 build_zero_cst (tree type)
1983 {
1984 switch (TREE_CODE (type))
1985 {
1986 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1987 case POINTER_TYPE: case REFERENCE_TYPE:
1988 case OFFSET_TYPE: case NULLPTR_TYPE:
1989 return build_int_cst (type, 0);
1990
1991 case REAL_TYPE:
1992 return build_real (type, dconst0);
1993
1994 case FIXED_POINT_TYPE:
1995 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1996
1997 case VECTOR_TYPE:
1998 {
1999 tree scalar = build_zero_cst (TREE_TYPE (type));
2000
2001 return build_vector_from_val (type, scalar);
2002 }
2003
2004 case COMPLEX_TYPE:
2005 {
2006 tree zero = build_zero_cst (TREE_TYPE (type));
2007
2008 return build_complex (type, zero, zero);
2009 }
2010
2011 default:
2012 if (!AGGREGATE_TYPE_P (type))
2013 return fold_convert (type, integer_zero_node);
2014 return build_constructor (type, NULL);
2015 }
2016 }
2017
2018
2019 /* Build a BINFO with LEN language slots. */
2020
2021 tree
2022 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2023 {
2024 tree t;
2025 size_t length = (offsetof (struct tree_binfo, base_binfos)
2026 + vec<tree, va_gc>::embedded_size (base_binfos));
2027
2028 record_node_allocation_statistics (TREE_BINFO, length);
2029
2030 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2031
2032 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2033
2034 TREE_SET_CODE (t, TREE_BINFO);
2035
2036 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2037
2038 return t;
2039 }
2040
2041 /* Create a CASE_LABEL_EXPR tree node and return it. */
2042
2043 tree
2044 build_case_label (tree low_value, tree high_value, tree label_decl)
2045 {
2046 tree t = make_node (CASE_LABEL_EXPR);
2047
2048 TREE_TYPE (t) = void_type_node;
2049 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2050
2051 CASE_LOW (t) = low_value;
2052 CASE_HIGH (t) = high_value;
2053 CASE_LABEL (t) = label_decl;
2054 CASE_CHAIN (t) = NULL_TREE;
2055
2056 return t;
2057 }
2058
2059 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2060 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2061 The latter determines the length of the HOST_WIDE_INT vector. */
2062
2063 tree
2064 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2065 {
2066 tree t;
2067 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2068 + sizeof (struct tree_int_cst));
2069
2070 gcc_assert (len);
2071 record_node_allocation_statistics (INTEGER_CST, length);
2072
2073 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2074
2075 TREE_SET_CODE (t, INTEGER_CST);
2076 TREE_INT_CST_NUNITS (t) = len;
2077 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2078 /* to_offset can only be applied to trees that are offset_int-sized
2079 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2080 must be exactly the precision of offset_int and so LEN is correct. */
2081 if (ext_len <= OFFSET_INT_ELTS)
2082 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2083 else
2084 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2085
2086 TREE_CONSTANT (t) = 1;
2087
2088 return t;
2089 }
2090
2091 /* Build a newly constructed TREE_VEC node of length LEN. */
2092
2093 tree
2094 make_tree_vec_stat (int len MEM_STAT_DECL)
2095 {
2096 tree t;
2097 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2098
2099 record_node_allocation_statistics (TREE_VEC, length);
2100
2101 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2102
2103 TREE_SET_CODE (t, TREE_VEC);
2104 TREE_VEC_LENGTH (t) = len;
2105
2106 return t;
2107 }
2108
2109 /* Grow a TREE_VEC node to new length LEN. */
2110
2111 tree
2112 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2113 {
2114 gcc_assert (TREE_CODE (v) == TREE_VEC);
2115
2116 int oldlen = TREE_VEC_LENGTH (v);
2117 gcc_assert (len > oldlen);
2118
2119 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2120 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2121
2122 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2123
2124 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2125
2126 TREE_VEC_LENGTH (v) = len;
2127
2128 return v;
2129 }
2130 \f
2131 /* Return 1 if EXPR is the integer constant zero or a complex constant
2132 of zero. */
2133
2134 int
2135 integer_zerop (const_tree expr)
2136 {
2137 STRIP_NOPS (expr);
2138
2139 switch (TREE_CODE (expr))
2140 {
2141 case INTEGER_CST:
2142 return wi::eq_p (expr, 0);
2143 case COMPLEX_CST:
2144 return (integer_zerop (TREE_REALPART (expr))
2145 && integer_zerop (TREE_IMAGPART (expr)));
2146 case VECTOR_CST:
2147 {
2148 unsigned i;
2149 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2150 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2151 return false;
2152 return true;
2153 }
2154 default:
2155 return false;
2156 }
2157 }
2158
2159 /* Return 1 if EXPR is the integer constant one or the corresponding
2160 complex constant. */
2161
2162 int
2163 integer_onep (const_tree expr)
2164 {
2165 STRIP_NOPS (expr);
2166
2167 switch (TREE_CODE (expr))
2168 {
2169 case INTEGER_CST:
2170 return wi::eq_p (wi::to_widest (expr), 1);
2171 case COMPLEX_CST:
2172 return (integer_onep (TREE_REALPART (expr))
2173 && integer_zerop (TREE_IMAGPART (expr)));
2174 case VECTOR_CST:
2175 {
2176 unsigned i;
2177 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2178 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2179 return false;
2180 return true;
2181 }
2182 default:
2183 return false;
2184 }
2185 }
2186
2187 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2188 return 1 if every piece is the integer constant one. */
2189
2190 int
2191 integer_each_onep (const_tree expr)
2192 {
2193 STRIP_NOPS (expr);
2194
2195 if (TREE_CODE (expr) == COMPLEX_CST)
2196 return (integer_onep (TREE_REALPART (expr))
2197 && integer_onep (TREE_IMAGPART (expr)));
2198 else
2199 return integer_onep (expr);
2200 }
2201
2202 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2203 it contains, or a complex or vector whose subparts are such integers. */
2204
2205 int
2206 integer_all_onesp (const_tree expr)
2207 {
2208 STRIP_NOPS (expr);
2209
2210 if (TREE_CODE (expr) == COMPLEX_CST
2211 && integer_all_onesp (TREE_REALPART (expr))
2212 && integer_all_onesp (TREE_IMAGPART (expr)))
2213 return 1;
2214
2215 else if (TREE_CODE (expr) == VECTOR_CST)
2216 {
2217 unsigned i;
2218 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2219 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2220 return 0;
2221 return 1;
2222 }
2223
2224 else if (TREE_CODE (expr) != INTEGER_CST)
2225 return 0;
2226
2227 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2228 }
2229
2230 /* Return 1 if EXPR is the integer constant minus one. */
2231
2232 int
2233 integer_minus_onep (const_tree expr)
2234 {
2235 STRIP_NOPS (expr);
2236
2237 if (TREE_CODE (expr) == COMPLEX_CST)
2238 return (integer_all_onesp (TREE_REALPART (expr))
2239 && integer_zerop (TREE_IMAGPART (expr)));
2240 else
2241 return integer_all_onesp (expr);
2242 }
2243
2244 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2245 one bit on). */
2246
2247 int
2248 integer_pow2p (const_tree expr)
2249 {
2250 STRIP_NOPS (expr);
2251
2252 if (TREE_CODE (expr) == COMPLEX_CST
2253 && integer_pow2p (TREE_REALPART (expr))
2254 && integer_zerop (TREE_IMAGPART (expr)))
2255 return 1;
2256
2257 if (TREE_CODE (expr) != INTEGER_CST)
2258 return 0;
2259
2260 return wi::popcount (expr) == 1;
2261 }
2262
2263 /* Return 1 if EXPR is an integer constant other than zero or a
2264 complex constant other than zero. */
2265
2266 int
2267 integer_nonzerop (const_tree expr)
2268 {
2269 STRIP_NOPS (expr);
2270
2271 return ((TREE_CODE (expr) == INTEGER_CST
2272 && !wi::eq_p (expr, 0))
2273 || (TREE_CODE (expr) == COMPLEX_CST
2274 && (integer_nonzerop (TREE_REALPART (expr))
2275 || integer_nonzerop (TREE_IMAGPART (expr)))));
2276 }
2277
2278 /* Return 1 if EXPR is the fixed-point constant zero. */
2279
2280 int
2281 fixed_zerop (const_tree expr)
2282 {
2283 return (TREE_CODE (expr) == FIXED_CST
2284 && TREE_FIXED_CST (expr).data.is_zero ());
2285 }
2286
2287 /* Return the power of two represented by a tree node known to be a
2288 power of two. */
2289
2290 int
2291 tree_log2 (const_tree expr)
2292 {
2293 STRIP_NOPS (expr);
2294
2295 if (TREE_CODE (expr) == COMPLEX_CST)
2296 return tree_log2 (TREE_REALPART (expr));
2297
2298 return wi::exact_log2 (expr);
2299 }
2300
2301 /* Similar, but return the largest integer Y such that 2 ** Y is less
2302 than or equal to EXPR. */
2303
2304 int
2305 tree_floor_log2 (const_tree expr)
2306 {
2307 STRIP_NOPS (expr);
2308
2309 if (TREE_CODE (expr) == COMPLEX_CST)
2310 return tree_log2 (TREE_REALPART (expr));
2311
2312 return wi::floor_log2 (expr);
2313 }
2314
2315 /* Return number of known trailing zero bits in EXPR, or, if the value of
2316 EXPR is known to be zero, the precision of it's type. */
2317
2318 unsigned int
2319 tree_ctz (const_tree expr)
2320 {
2321 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2322 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2323 return 0;
2324
2325 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2326 switch (TREE_CODE (expr))
2327 {
2328 case INTEGER_CST:
2329 ret1 = wi::ctz (expr);
2330 return MIN (ret1, prec);
2331 case SSA_NAME:
2332 ret1 = wi::ctz (get_nonzero_bits (expr));
2333 return MIN (ret1, prec);
2334 case PLUS_EXPR:
2335 case MINUS_EXPR:
2336 case BIT_IOR_EXPR:
2337 case BIT_XOR_EXPR:
2338 case MIN_EXPR:
2339 case MAX_EXPR:
2340 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2341 if (ret1 == 0)
2342 return ret1;
2343 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2344 return MIN (ret1, ret2);
2345 case POINTER_PLUS_EXPR:
2346 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2347 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2348 /* Second operand is sizetype, which could be in theory
2349 wider than pointer's precision. Make sure we never
2350 return more than prec. */
2351 ret2 = MIN (ret2, prec);
2352 return MIN (ret1, ret2);
2353 case BIT_AND_EXPR:
2354 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2355 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2356 return MAX (ret1, ret2);
2357 case MULT_EXPR:
2358 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2359 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2360 return MIN (ret1 + ret2, prec);
2361 case LSHIFT_EXPR:
2362 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2363 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2364 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2365 {
2366 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2367 return MIN (ret1 + ret2, prec);
2368 }
2369 return ret1;
2370 case RSHIFT_EXPR:
2371 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2372 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2373 {
2374 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2375 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2376 if (ret1 > ret2)
2377 return ret1 - ret2;
2378 }
2379 return 0;
2380 case TRUNC_DIV_EXPR:
2381 case CEIL_DIV_EXPR:
2382 case FLOOR_DIV_EXPR:
2383 case ROUND_DIV_EXPR:
2384 case EXACT_DIV_EXPR:
2385 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2386 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2387 {
2388 int l = tree_log2 (TREE_OPERAND (expr, 1));
2389 if (l >= 0)
2390 {
2391 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2392 ret2 = l;
2393 if (ret1 > ret2)
2394 return ret1 - ret2;
2395 }
2396 }
2397 return 0;
2398 CASE_CONVERT:
2399 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2400 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2401 ret1 = prec;
2402 return MIN (ret1, prec);
2403 case SAVE_EXPR:
2404 return tree_ctz (TREE_OPERAND (expr, 0));
2405 case COND_EXPR:
2406 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2407 if (ret1 == 0)
2408 return 0;
2409 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2410 return MIN (ret1, ret2);
2411 case COMPOUND_EXPR:
2412 return tree_ctz (TREE_OPERAND (expr, 1));
2413 case ADDR_EXPR:
2414 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2415 if (ret1 > BITS_PER_UNIT)
2416 {
2417 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2418 return MIN (ret1, prec);
2419 }
2420 return 0;
2421 default:
2422 return 0;
2423 }
2424 }
2425
2426 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2427 decimal float constants, so don't return 1 for them. */
2428
2429 int
2430 real_zerop (const_tree expr)
2431 {
2432 STRIP_NOPS (expr);
2433
2434 switch (TREE_CODE (expr))
2435 {
2436 case REAL_CST:
2437 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2438 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2439 case COMPLEX_CST:
2440 return real_zerop (TREE_REALPART (expr))
2441 && real_zerop (TREE_IMAGPART (expr));
2442 case VECTOR_CST:
2443 {
2444 unsigned i;
2445 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2446 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2447 return false;
2448 return true;
2449 }
2450 default:
2451 return false;
2452 }
2453 }
2454
2455 /* Return 1 if EXPR is the real constant one in real or complex form.
2456 Trailing zeroes matter for decimal float constants, so don't return
2457 1 for them. */
2458
2459 int
2460 real_onep (const_tree expr)
2461 {
2462 STRIP_NOPS (expr);
2463
2464 switch (TREE_CODE (expr))
2465 {
2466 case REAL_CST:
2467 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2468 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2469 case COMPLEX_CST:
2470 return real_onep (TREE_REALPART (expr))
2471 && real_zerop (TREE_IMAGPART (expr));
2472 case VECTOR_CST:
2473 {
2474 unsigned i;
2475 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2476 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2477 return false;
2478 return true;
2479 }
2480 default:
2481 return false;
2482 }
2483 }
2484
2485 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2486 matter for decimal float constants, so don't return 1 for them. */
2487
2488 int
2489 real_minus_onep (const_tree expr)
2490 {
2491 STRIP_NOPS (expr);
2492
2493 switch (TREE_CODE (expr))
2494 {
2495 case REAL_CST:
2496 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2497 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2498 case COMPLEX_CST:
2499 return real_minus_onep (TREE_REALPART (expr))
2500 && real_zerop (TREE_IMAGPART (expr));
2501 case VECTOR_CST:
2502 {
2503 unsigned i;
2504 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2505 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2506 return false;
2507 return true;
2508 }
2509 default:
2510 return false;
2511 }
2512 }
2513
2514 /* Nonzero if EXP is a constant or a cast of a constant. */
2515
2516 int
2517 really_constant_p (const_tree exp)
2518 {
2519 /* This is not quite the same as STRIP_NOPS. It does more. */
2520 while (CONVERT_EXPR_P (exp)
2521 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2522 exp = TREE_OPERAND (exp, 0);
2523 return TREE_CONSTANT (exp);
2524 }
2525 \f
2526 /* Return first list element whose TREE_VALUE is ELEM.
2527 Return 0 if ELEM is not in LIST. */
2528
2529 tree
2530 value_member (tree elem, tree list)
2531 {
2532 while (list)
2533 {
2534 if (elem == TREE_VALUE (list))
2535 return list;
2536 list = TREE_CHAIN (list);
2537 }
2538 return NULL_TREE;
2539 }
2540
2541 /* Return first list element whose TREE_PURPOSE is ELEM.
2542 Return 0 if ELEM is not in LIST. */
2543
2544 tree
2545 purpose_member (const_tree elem, tree list)
2546 {
2547 while (list)
2548 {
2549 if (elem == TREE_PURPOSE (list))
2550 return list;
2551 list = TREE_CHAIN (list);
2552 }
2553 return NULL_TREE;
2554 }
2555
2556 /* Return true if ELEM is in V. */
2557
2558 bool
2559 vec_member (const_tree elem, vec<tree, va_gc> *v)
2560 {
2561 unsigned ix;
2562 tree t;
2563 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2564 if (elem == t)
2565 return true;
2566 return false;
2567 }
2568
2569 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2570 NULL_TREE. */
2571
2572 tree
2573 chain_index (int idx, tree chain)
2574 {
2575 for (; chain && idx > 0; --idx)
2576 chain = TREE_CHAIN (chain);
2577 return chain;
2578 }
2579
2580 /* Return nonzero if ELEM is part of the chain CHAIN. */
2581
2582 int
2583 chain_member (const_tree elem, const_tree chain)
2584 {
2585 while (chain)
2586 {
2587 if (elem == chain)
2588 return 1;
2589 chain = DECL_CHAIN (chain);
2590 }
2591
2592 return 0;
2593 }
2594
2595 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2596 We expect a null pointer to mark the end of the chain.
2597 This is the Lisp primitive `length'. */
2598
2599 int
2600 list_length (const_tree t)
2601 {
2602 const_tree p = t;
2603 #ifdef ENABLE_TREE_CHECKING
2604 const_tree q = t;
2605 #endif
2606 int len = 0;
2607
2608 while (p)
2609 {
2610 p = TREE_CHAIN (p);
2611 #ifdef ENABLE_TREE_CHECKING
2612 if (len % 2)
2613 q = TREE_CHAIN (q);
2614 gcc_assert (p != q);
2615 #endif
2616 len++;
2617 }
2618
2619 return len;
2620 }
2621
2622 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2623 UNION_TYPE TYPE, or NULL_TREE if none. */
2624
2625 tree
2626 first_field (const_tree type)
2627 {
2628 tree t = TYPE_FIELDS (type);
2629 while (t && TREE_CODE (t) != FIELD_DECL)
2630 t = TREE_CHAIN (t);
2631 return t;
2632 }
2633
2634 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2635 by modifying the last node in chain 1 to point to chain 2.
2636 This is the Lisp primitive `nconc'. */
2637
2638 tree
2639 chainon (tree op1, tree op2)
2640 {
2641 tree t1;
2642
2643 if (!op1)
2644 return op2;
2645 if (!op2)
2646 return op1;
2647
2648 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2649 continue;
2650 TREE_CHAIN (t1) = op2;
2651
2652 #ifdef ENABLE_TREE_CHECKING
2653 {
2654 tree t2;
2655 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2656 gcc_assert (t2 != t1);
2657 }
2658 #endif
2659
2660 return op1;
2661 }
2662
2663 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2664
2665 tree
2666 tree_last (tree chain)
2667 {
2668 tree next;
2669 if (chain)
2670 while ((next = TREE_CHAIN (chain)))
2671 chain = next;
2672 return chain;
2673 }
2674
2675 /* Reverse the order of elements in the chain T,
2676 and return the new head of the chain (old last element). */
2677
2678 tree
2679 nreverse (tree t)
2680 {
2681 tree prev = 0, decl, next;
2682 for (decl = t; decl; decl = next)
2683 {
2684 /* We shouldn't be using this function to reverse BLOCK chains; we
2685 have blocks_nreverse for that. */
2686 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2687 next = TREE_CHAIN (decl);
2688 TREE_CHAIN (decl) = prev;
2689 prev = decl;
2690 }
2691 return prev;
2692 }
2693 \f
2694 /* Return a newly created TREE_LIST node whose
2695 purpose and value fields are PARM and VALUE. */
2696
2697 tree
2698 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2699 {
2700 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2701 TREE_PURPOSE (t) = parm;
2702 TREE_VALUE (t) = value;
2703 return t;
2704 }
2705
2706 /* Build a chain of TREE_LIST nodes from a vector. */
2707
2708 tree
2709 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2710 {
2711 tree ret = NULL_TREE;
2712 tree *pp = &ret;
2713 unsigned int i;
2714 tree t;
2715 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2716 {
2717 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2718 pp = &TREE_CHAIN (*pp);
2719 }
2720 return ret;
2721 }
2722
2723 /* Return a newly created TREE_LIST node whose
2724 purpose and value fields are PURPOSE and VALUE
2725 and whose TREE_CHAIN is CHAIN. */
2726
2727 tree
2728 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2729 {
2730 tree node;
2731
2732 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2733 memset (node, 0, sizeof (struct tree_common));
2734
2735 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2736
2737 TREE_SET_CODE (node, TREE_LIST);
2738 TREE_CHAIN (node) = chain;
2739 TREE_PURPOSE (node) = purpose;
2740 TREE_VALUE (node) = value;
2741 return node;
2742 }
2743
2744 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2745 trees. */
2746
2747 vec<tree, va_gc> *
2748 ctor_to_vec (tree ctor)
2749 {
2750 vec<tree, va_gc> *vec;
2751 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2752 unsigned int ix;
2753 tree val;
2754
2755 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2756 vec->quick_push (val);
2757
2758 return vec;
2759 }
2760 \f
2761 /* Return the size nominally occupied by an object of type TYPE
2762 when it resides in memory. The value is measured in units of bytes,
2763 and its data type is that normally used for type sizes
2764 (which is the first type created by make_signed_type or
2765 make_unsigned_type). */
2766
2767 tree
2768 size_in_bytes (const_tree type)
2769 {
2770 tree t;
2771
2772 if (type == error_mark_node)
2773 return integer_zero_node;
2774
2775 type = TYPE_MAIN_VARIANT (type);
2776 t = TYPE_SIZE_UNIT (type);
2777
2778 if (t == 0)
2779 {
2780 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2781 return size_zero_node;
2782 }
2783
2784 return t;
2785 }
2786
2787 /* Return the size of TYPE (in bytes) as a wide integer
2788 or return -1 if the size can vary or is larger than an integer. */
2789
2790 HOST_WIDE_INT
2791 int_size_in_bytes (const_tree type)
2792 {
2793 tree t;
2794
2795 if (type == error_mark_node)
2796 return 0;
2797
2798 type = TYPE_MAIN_VARIANT (type);
2799 t = TYPE_SIZE_UNIT (type);
2800
2801 if (t && tree_fits_uhwi_p (t))
2802 return TREE_INT_CST_LOW (t);
2803 else
2804 return -1;
2805 }
2806
2807 /* Return the maximum size of TYPE (in bytes) as a wide integer
2808 or return -1 if the size can vary or is larger than an integer. */
2809
2810 HOST_WIDE_INT
2811 max_int_size_in_bytes (const_tree type)
2812 {
2813 HOST_WIDE_INT size = -1;
2814 tree size_tree;
2815
2816 /* If this is an array type, check for a possible MAX_SIZE attached. */
2817
2818 if (TREE_CODE (type) == ARRAY_TYPE)
2819 {
2820 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2821
2822 if (size_tree && tree_fits_uhwi_p (size_tree))
2823 size = tree_to_uhwi (size_tree);
2824 }
2825
2826 /* If we still haven't been able to get a size, see if the language
2827 can compute a maximum size. */
2828
2829 if (size == -1)
2830 {
2831 size_tree = lang_hooks.types.max_size (type);
2832
2833 if (size_tree && tree_fits_uhwi_p (size_tree))
2834 size = tree_to_uhwi (size_tree);
2835 }
2836
2837 return size;
2838 }
2839 \f
2840 /* Return the bit position of FIELD, in bits from the start of the record.
2841 This is a tree of type bitsizetype. */
2842
2843 tree
2844 bit_position (const_tree field)
2845 {
2846 return bit_from_pos (DECL_FIELD_OFFSET (field),
2847 DECL_FIELD_BIT_OFFSET (field));
2848 }
2849 \f
2850 /* Return the byte position of FIELD, in bytes from the start of the record.
2851 This is a tree of type sizetype. */
2852
2853 tree
2854 byte_position (const_tree field)
2855 {
2856 return byte_from_pos (DECL_FIELD_OFFSET (field),
2857 DECL_FIELD_BIT_OFFSET (field));
2858 }
2859
2860 /* Likewise, but return as an integer. It must be representable in
2861 that way (since it could be a signed value, we don't have the
2862 option of returning -1 like int_size_in_byte can. */
2863
2864 HOST_WIDE_INT
2865 int_byte_position (const_tree field)
2866 {
2867 return tree_to_shwi (byte_position (field));
2868 }
2869 \f
2870 /* Return the strictest alignment, in bits, that T is known to have. */
2871
2872 unsigned int
2873 expr_align (const_tree t)
2874 {
2875 unsigned int align0, align1;
2876
2877 switch (TREE_CODE (t))
2878 {
2879 CASE_CONVERT: case NON_LVALUE_EXPR:
2880 /* If we have conversions, we know that the alignment of the
2881 object must meet each of the alignments of the types. */
2882 align0 = expr_align (TREE_OPERAND (t, 0));
2883 align1 = TYPE_ALIGN (TREE_TYPE (t));
2884 return MAX (align0, align1);
2885
2886 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2887 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2888 case CLEANUP_POINT_EXPR:
2889 /* These don't change the alignment of an object. */
2890 return expr_align (TREE_OPERAND (t, 0));
2891
2892 case COND_EXPR:
2893 /* The best we can do is say that the alignment is the least aligned
2894 of the two arms. */
2895 align0 = expr_align (TREE_OPERAND (t, 1));
2896 align1 = expr_align (TREE_OPERAND (t, 2));
2897 return MIN (align0, align1);
2898
2899 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2900 meaningfully, it's always 1. */
2901 case LABEL_DECL: case CONST_DECL:
2902 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2903 case FUNCTION_DECL:
2904 gcc_assert (DECL_ALIGN (t) != 0);
2905 return DECL_ALIGN (t);
2906
2907 default:
2908 break;
2909 }
2910
2911 /* Otherwise take the alignment from that of the type. */
2912 return TYPE_ALIGN (TREE_TYPE (t));
2913 }
2914 \f
2915 /* Return, as a tree node, the number of elements for TYPE (which is an
2916 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2917
2918 tree
2919 array_type_nelts (const_tree type)
2920 {
2921 tree index_type, min, max;
2922
2923 /* If they did it with unspecified bounds, then we should have already
2924 given an error about it before we got here. */
2925 if (! TYPE_DOMAIN (type))
2926 return error_mark_node;
2927
2928 index_type = TYPE_DOMAIN (type);
2929 min = TYPE_MIN_VALUE (index_type);
2930 max = TYPE_MAX_VALUE (index_type);
2931
2932 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2933 if (!max)
2934 return error_mark_node;
2935
2936 return (integer_zerop (min)
2937 ? max
2938 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2939 }
2940 \f
2941 /* If arg is static -- a reference to an object in static storage -- then
2942 return the object. This is not the same as the C meaning of `static'.
2943 If arg isn't static, return NULL. */
2944
2945 tree
2946 staticp (tree arg)
2947 {
2948 switch (TREE_CODE (arg))
2949 {
2950 case FUNCTION_DECL:
2951 /* Nested functions are static, even though taking their address will
2952 involve a trampoline as we unnest the nested function and create
2953 the trampoline on the tree level. */
2954 return arg;
2955
2956 case VAR_DECL:
2957 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2958 && ! DECL_THREAD_LOCAL_P (arg)
2959 && ! DECL_DLLIMPORT_P (arg)
2960 ? arg : NULL);
2961
2962 case CONST_DECL:
2963 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2964 ? arg : NULL);
2965
2966 case CONSTRUCTOR:
2967 return TREE_STATIC (arg) ? arg : NULL;
2968
2969 case LABEL_DECL:
2970 case STRING_CST:
2971 return arg;
2972
2973 case COMPONENT_REF:
2974 /* If the thing being referenced is not a field, then it is
2975 something language specific. */
2976 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2977
2978 /* If we are referencing a bitfield, we can't evaluate an
2979 ADDR_EXPR at compile time and so it isn't a constant. */
2980 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2981 return NULL;
2982
2983 return staticp (TREE_OPERAND (arg, 0));
2984
2985 case BIT_FIELD_REF:
2986 return NULL;
2987
2988 case INDIRECT_REF:
2989 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2990
2991 case ARRAY_REF:
2992 case ARRAY_RANGE_REF:
2993 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2994 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2995 return staticp (TREE_OPERAND (arg, 0));
2996 else
2997 return NULL;
2998
2999 case COMPOUND_LITERAL_EXPR:
3000 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3001
3002 default:
3003 return NULL;
3004 }
3005 }
3006
3007 \f
3008
3009
3010 /* Return whether OP is a DECL whose address is function-invariant. */
3011
3012 bool
3013 decl_address_invariant_p (const_tree op)
3014 {
3015 /* The conditions below are slightly less strict than the one in
3016 staticp. */
3017
3018 switch (TREE_CODE (op))
3019 {
3020 case PARM_DECL:
3021 case RESULT_DECL:
3022 case LABEL_DECL:
3023 case FUNCTION_DECL:
3024 return true;
3025
3026 case VAR_DECL:
3027 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3028 || DECL_THREAD_LOCAL_P (op)
3029 || DECL_CONTEXT (op) == current_function_decl
3030 || decl_function_context (op) == current_function_decl)
3031 return true;
3032 break;
3033
3034 case CONST_DECL:
3035 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3036 || decl_function_context (op) == current_function_decl)
3037 return true;
3038 break;
3039
3040 default:
3041 break;
3042 }
3043
3044 return false;
3045 }
3046
3047 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3048
3049 bool
3050 decl_address_ip_invariant_p (const_tree op)
3051 {
3052 /* The conditions below are slightly less strict than the one in
3053 staticp. */
3054
3055 switch (TREE_CODE (op))
3056 {
3057 case LABEL_DECL:
3058 case FUNCTION_DECL:
3059 case STRING_CST:
3060 return true;
3061
3062 case VAR_DECL:
3063 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3064 && !DECL_DLLIMPORT_P (op))
3065 || DECL_THREAD_LOCAL_P (op))
3066 return true;
3067 break;
3068
3069 case CONST_DECL:
3070 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3071 return true;
3072 break;
3073
3074 default:
3075 break;
3076 }
3077
3078 return false;
3079 }
3080
3081
3082 /* Return true if T is function-invariant (internal function, does
3083 not handle arithmetic; that's handled in skip_simple_arithmetic and
3084 tree_invariant_p). */
3085
3086 static bool tree_invariant_p (tree t);
3087
3088 static bool
3089 tree_invariant_p_1 (tree t)
3090 {
3091 tree op;
3092
3093 if (TREE_CONSTANT (t)
3094 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3095 return true;
3096
3097 switch (TREE_CODE (t))
3098 {
3099 case SAVE_EXPR:
3100 return true;
3101
3102 case ADDR_EXPR:
3103 op = TREE_OPERAND (t, 0);
3104 while (handled_component_p (op))
3105 {
3106 switch (TREE_CODE (op))
3107 {
3108 case ARRAY_REF:
3109 case ARRAY_RANGE_REF:
3110 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3111 || TREE_OPERAND (op, 2) != NULL_TREE
3112 || TREE_OPERAND (op, 3) != NULL_TREE)
3113 return false;
3114 break;
3115
3116 case COMPONENT_REF:
3117 if (TREE_OPERAND (op, 2) != NULL_TREE)
3118 return false;
3119 break;
3120
3121 default:;
3122 }
3123 op = TREE_OPERAND (op, 0);
3124 }
3125
3126 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3127
3128 default:
3129 break;
3130 }
3131
3132 return false;
3133 }
3134
3135 /* Return true if T is function-invariant. */
3136
3137 static bool
3138 tree_invariant_p (tree t)
3139 {
3140 tree inner = skip_simple_arithmetic (t);
3141 return tree_invariant_p_1 (inner);
3142 }
3143
3144 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3145 Do this to any expression which may be used in more than one place,
3146 but must be evaluated only once.
3147
3148 Normally, expand_expr would reevaluate the expression each time.
3149 Calling save_expr produces something that is evaluated and recorded
3150 the first time expand_expr is called on it. Subsequent calls to
3151 expand_expr just reuse the recorded value.
3152
3153 The call to expand_expr that generates code that actually computes
3154 the value is the first call *at compile time*. Subsequent calls
3155 *at compile time* generate code to use the saved value.
3156 This produces correct result provided that *at run time* control
3157 always flows through the insns made by the first expand_expr
3158 before reaching the other places where the save_expr was evaluated.
3159 You, the caller of save_expr, must make sure this is so.
3160
3161 Constants, and certain read-only nodes, are returned with no
3162 SAVE_EXPR because that is safe. Expressions containing placeholders
3163 are not touched; see tree.def for an explanation of what these
3164 are used for. */
3165
3166 tree
3167 save_expr (tree expr)
3168 {
3169 tree t = fold (expr);
3170 tree inner;
3171
3172 /* If the tree evaluates to a constant, then we don't want to hide that
3173 fact (i.e. this allows further folding, and direct checks for constants).
3174 However, a read-only object that has side effects cannot be bypassed.
3175 Since it is no problem to reevaluate literals, we just return the
3176 literal node. */
3177 inner = skip_simple_arithmetic (t);
3178 if (TREE_CODE (inner) == ERROR_MARK)
3179 return inner;
3180
3181 if (tree_invariant_p_1 (inner))
3182 return t;
3183
3184 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3185 it means that the size or offset of some field of an object depends on
3186 the value within another field.
3187
3188 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3189 and some variable since it would then need to be both evaluated once and
3190 evaluated more than once. Front-ends must assure this case cannot
3191 happen by surrounding any such subexpressions in their own SAVE_EXPR
3192 and forcing evaluation at the proper time. */
3193 if (contains_placeholder_p (inner))
3194 return t;
3195
3196 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3197 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3198
3199 /* This expression might be placed ahead of a jump to ensure that the
3200 value was computed on both sides of the jump. So make sure it isn't
3201 eliminated as dead. */
3202 TREE_SIDE_EFFECTS (t) = 1;
3203 return t;
3204 }
3205
3206 /* Look inside EXPR into any simple arithmetic operations. Return the
3207 outermost non-arithmetic or non-invariant node. */
3208
3209 tree
3210 skip_simple_arithmetic (tree expr)
3211 {
3212 /* We don't care about whether this can be used as an lvalue in this
3213 context. */
3214 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3215 expr = TREE_OPERAND (expr, 0);
3216
3217 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3218 a constant, it will be more efficient to not make another SAVE_EXPR since
3219 it will allow better simplification and GCSE will be able to merge the
3220 computations if they actually occur. */
3221 while (true)
3222 {
3223 if (UNARY_CLASS_P (expr))
3224 expr = TREE_OPERAND (expr, 0);
3225 else if (BINARY_CLASS_P (expr))
3226 {
3227 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3228 expr = TREE_OPERAND (expr, 0);
3229 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3230 expr = TREE_OPERAND (expr, 1);
3231 else
3232 break;
3233 }
3234 else
3235 break;
3236 }
3237
3238 return expr;
3239 }
3240
3241 /* Look inside EXPR into simple arithmetic operations involving constants.
3242 Return the outermost non-arithmetic or non-constant node. */
3243
3244 tree
3245 skip_simple_constant_arithmetic (tree expr)
3246 {
3247 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3248 expr = TREE_OPERAND (expr, 0);
3249
3250 while (true)
3251 {
3252 if (UNARY_CLASS_P (expr))
3253 expr = TREE_OPERAND (expr, 0);
3254 else if (BINARY_CLASS_P (expr))
3255 {
3256 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3257 expr = TREE_OPERAND (expr, 0);
3258 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3259 expr = TREE_OPERAND (expr, 1);
3260 else
3261 break;
3262 }
3263 else
3264 break;
3265 }
3266
3267 return expr;
3268 }
3269
3270 /* Return which tree structure is used by T. */
3271
3272 enum tree_node_structure_enum
3273 tree_node_structure (const_tree t)
3274 {
3275 const enum tree_code code = TREE_CODE (t);
3276 return tree_node_structure_for_code (code);
3277 }
3278
3279 /* Set various status flags when building a CALL_EXPR object T. */
3280
3281 static void
3282 process_call_operands (tree t)
3283 {
3284 bool side_effects = TREE_SIDE_EFFECTS (t);
3285 bool read_only = false;
3286 int i = call_expr_flags (t);
3287
3288 /* Calls have side-effects, except those to const or pure functions. */
3289 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3290 side_effects = true;
3291 /* Propagate TREE_READONLY of arguments for const functions. */
3292 if (i & ECF_CONST)
3293 read_only = true;
3294
3295 if (!side_effects || read_only)
3296 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3297 {
3298 tree op = TREE_OPERAND (t, i);
3299 if (op && TREE_SIDE_EFFECTS (op))
3300 side_effects = true;
3301 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3302 read_only = false;
3303 }
3304
3305 TREE_SIDE_EFFECTS (t) = side_effects;
3306 TREE_READONLY (t) = read_only;
3307 }
3308 \f
3309 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3310 size or offset that depends on a field within a record. */
3311
3312 bool
3313 contains_placeholder_p (const_tree exp)
3314 {
3315 enum tree_code code;
3316
3317 if (!exp)
3318 return 0;
3319
3320 code = TREE_CODE (exp);
3321 if (code == PLACEHOLDER_EXPR)
3322 return 1;
3323
3324 switch (TREE_CODE_CLASS (code))
3325 {
3326 case tcc_reference:
3327 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3328 position computations since they will be converted into a
3329 WITH_RECORD_EXPR involving the reference, which will assume
3330 here will be valid. */
3331 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3332
3333 case tcc_exceptional:
3334 if (code == TREE_LIST)
3335 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3336 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3337 break;
3338
3339 case tcc_unary:
3340 case tcc_binary:
3341 case tcc_comparison:
3342 case tcc_expression:
3343 switch (code)
3344 {
3345 case COMPOUND_EXPR:
3346 /* Ignoring the first operand isn't quite right, but works best. */
3347 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3348
3349 case COND_EXPR:
3350 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3351 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3352 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3353
3354 case SAVE_EXPR:
3355 /* The save_expr function never wraps anything containing
3356 a PLACEHOLDER_EXPR. */
3357 return 0;
3358
3359 default:
3360 break;
3361 }
3362
3363 switch (TREE_CODE_LENGTH (code))
3364 {
3365 case 1:
3366 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3367 case 2:
3368 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3369 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3370 default:
3371 return 0;
3372 }
3373
3374 case tcc_vl_exp:
3375 switch (code)
3376 {
3377 case CALL_EXPR:
3378 {
3379 const_tree arg;
3380 const_call_expr_arg_iterator iter;
3381 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3382 if (CONTAINS_PLACEHOLDER_P (arg))
3383 return 1;
3384 return 0;
3385 }
3386 default:
3387 return 0;
3388 }
3389
3390 default:
3391 return 0;
3392 }
3393 return 0;
3394 }
3395
3396 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3397 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3398 field positions. */
3399
3400 static bool
3401 type_contains_placeholder_1 (const_tree type)
3402 {
3403 /* If the size contains a placeholder or the parent type (component type in
3404 the case of arrays) type involves a placeholder, this type does. */
3405 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3406 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3407 || (!POINTER_TYPE_P (type)
3408 && TREE_TYPE (type)
3409 && type_contains_placeholder_p (TREE_TYPE (type))))
3410 return true;
3411
3412 /* Now do type-specific checks. Note that the last part of the check above
3413 greatly limits what we have to do below. */
3414 switch (TREE_CODE (type))
3415 {
3416 case VOID_TYPE:
3417 case POINTER_BOUNDS_TYPE:
3418 case COMPLEX_TYPE:
3419 case ENUMERAL_TYPE:
3420 case BOOLEAN_TYPE:
3421 case POINTER_TYPE:
3422 case OFFSET_TYPE:
3423 case REFERENCE_TYPE:
3424 case METHOD_TYPE:
3425 case FUNCTION_TYPE:
3426 case VECTOR_TYPE:
3427 case NULLPTR_TYPE:
3428 return false;
3429
3430 case INTEGER_TYPE:
3431 case REAL_TYPE:
3432 case FIXED_POINT_TYPE:
3433 /* Here we just check the bounds. */
3434 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3435 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3436
3437 case ARRAY_TYPE:
3438 /* We have already checked the component type above, so just check the
3439 domain type. */
3440 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3441
3442 case RECORD_TYPE:
3443 case UNION_TYPE:
3444 case QUAL_UNION_TYPE:
3445 {
3446 tree field;
3447
3448 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3449 if (TREE_CODE (field) == FIELD_DECL
3450 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3451 || (TREE_CODE (type) == QUAL_UNION_TYPE
3452 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3453 || type_contains_placeholder_p (TREE_TYPE (field))))
3454 return true;
3455
3456 return false;
3457 }
3458
3459 default:
3460 gcc_unreachable ();
3461 }
3462 }
3463
3464 /* Wrapper around above function used to cache its result. */
3465
3466 bool
3467 type_contains_placeholder_p (tree type)
3468 {
3469 bool result;
3470
3471 /* If the contains_placeholder_bits field has been initialized,
3472 then we know the answer. */
3473 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3474 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3475
3476 /* Indicate that we've seen this type node, and the answer is false.
3477 This is what we want to return if we run into recursion via fields. */
3478 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3479
3480 /* Compute the real value. */
3481 result = type_contains_placeholder_1 (type);
3482
3483 /* Store the real value. */
3484 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3485
3486 return result;
3487 }
3488 \f
3489 /* Push tree EXP onto vector QUEUE if it is not already present. */
3490
3491 static void
3492 push_without_duplicates (tree exp, vec<tree> *queue)
3493 {
3494 unsigned int i;
3495 tree iter;
3496
3497 FOR_EACH_VEC_ELT (*queue, i, iter)
3498 if (simple_cst_equal (iter, exp) == 1)
3499 break;
3500
3501 if (!iter)
3502 queue->safe_push (exp);
3503 }
3504
3505 /* Given a tree EXP, find all occurrences of references to fields
3506 in a PLACEHOLDER_EXPR and place them in vector REFS without
3507 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3508 we assume here that EXP contains only arithmetic expressions
3509 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3510 argument list. */
3511
3512 void
3513 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3514 {
3515 enum tree_code code = TREE_CODE (exp);
3516 tree inner;
3517 int i;
3518
3519 /* We handle TREE_LIST and COMPONENT_REF separately. */
3520 if (code == TREE_LIST)
3521 {
3522 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3523 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3524 }
3525 else if (code == COMPONENT_REF)
3526 {
3527 for (inner = TREE_OPERAND (exp, 0);
3528 REFERENCE_CLASS_P (inner);
3529 inner = TREE_OPERAND (inner, 0))
3530 ;
3531
3532 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3533 push_without_duplicates (exp, refs);
3534 else
3535 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3536 }
3537 else
3538 switch (TREE_CODE_CLASS (code))
3539 {
3540 case tcc_constant:
3541 break;
3542
3543 case tcc_declaration:
3544 /* Variables allocated to static storage can stay. */
3545 if (!TREE_STATIC (exp))
3546 push_without_duplicates (exp, refs);
3547 break;
3548
3549 case tcc_expression:
3550 /* This is the pattern built in ada/make_aligning_type. */
3551 if (code == ADDR_EXPR
3552 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3553 {
3554 push_without_duplicates (exp, refs);
3555 break;
3556 }
3557
3558 /* Fall through... */
3559
3560 case tcc_exceptional:
3561 case tcc_unary:
3562 case tcc_binary:
3563 case tcc_comparison:
3564 case tcc_reference:
3565 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3566 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3567 break;
3568
3569 case tcc_vl_exp:
3570 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3571 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3572 break;
3573
3574 default:
3575 gcc_unreachable ();
3576 }
3577 }
3578
3579 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3580 return a tree with all occurrences of references to F in a
3581 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3582 CONST_DECLs. Note that we assume here that EXP contains only
3583 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3584 occurring only in their argument list. */
3585
3586 tree
3587 substitute_in_expr (tree exp, tree f, tree r)
3588 {
3589 enum tree_code code = TREE_CODE (exp);
3590 tree op0, op1, op2, op3;
3591 tree new_tree;
3592
3593 /* We handle TREE_LIST and COMPONENT_REF separately. */
3594 if (code == TREE_LIST)
3595 {
3596 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3597 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3598 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3599 return exp;
3600
3601 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3602 }
3603 else if (code == COMPONENT_REF)
3604 {
3605 tree inner;
3606
3607 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3608 and it is the right field, replace it with R. */
3609 for (inner = TREE_OPERAND (exp, 0);
3610 REFERENCE_CLASS_P (inner);
3611 inner = TREE_OPERAND (inner, 0))
3612 ;
3613
3614 /* The field. */
3615 op1 = TREE_OPERAND (exp, 1);
3616
3617 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3618 return r;
3619
3620 /* If this expression hasn't been completed let, leave it alone. */
3621 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3622 return exp;
3623
3624 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3625 if (op0 == TREE_OPERAND (exp, 0))
3626 return exp;
3627
3628 new_tree
3629 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3630 }
3631 else
3632 switch (TREE_CODE_CLASS (code))
3633 {
3634 case tcc_constant:
3635 return exp;
3636
3637 case tcc_declaration:
3638 if (exp == f)
3639 return r;
3640 else
3641 return exp;
3642
3643 case tcc_expression:
3644 if (exp == f)
3645 return r;
3646
3647 /* Fall through... */
3648
3649 case tcc_exceptional:
3650 case tcc_unary:
3651 case tcc_binary:
3652 case tcc_comparison:
3653 case tcc_reference:
3654 switch (TREE_CODE_LENGTH (code))
3655 {
3656 case 0:
3657 return exp;
3658
3659 case 1:
3660 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3661 if (op0 == TREE_OPERAND (exp, 0))
3662 return exp;
3663
3664 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3665 break;
3666
3667 case 2:
3668 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3669 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3670
3671 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3672 return exp;
3673
3674 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3675 break;
3676
3677 case 3:
3678 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3679 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3680 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3681
3682 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3683 && op2 == TREE_OPERAND (exp, 2))
3684 return exp;
3685
3686 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3687 break;
3688
3689 case 4:
3690 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3691 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3692 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3693 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3694
3695 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3696 && op2 == TREE_OPERAND (exp, 2)
3697 && op3 == TREE_OPERAND (exp, 3))
3698 return exp;
3699
3700 new_tree
3701 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3702 break;
3703
3704 default:
3705 gcc_unreachable ();
3706 }
3707 break;
3708
3709 case tcc_vl_exp:
3710 {
3711 int i;
3712
3713 new_tree = NULL_TREE;
3714
3715 /* If we are trying to replace F with a constant, inline back
3716 functions which do nothing else than computing a value from
3717 the arguments they are passed. This makes it possible to
3718 fold partially or entirely the replacement expression. */
3719 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3720 {
3721 tree t = maybe_inline_call_in_expr (exp);
3722 if (t)
3723 return SUBSTITUTE_IN_EXPR (t, f, r);
3724 }
3725
3726 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3727 {
3728 tree op = TREE_OPERAND (exp, i);
3729 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3730 if (new_op != op)
3731 {
3732 if (!new_tree)
3733 new_tree = copy_node (exp);
3734 TREE_OPERAND (new_tree, i) = new_op;
3735 }
3736 }
3737
3738 if (new_tree)
3739 {
3740 new_tree = fold (new_tree);
3741 if (TREE_CODE (new_tree) == CALL_EXPR)
3742 process_call_operands (new_tree);
3743 }
3744 else
3745 return exp;
3746 }
3747 break;
3748
3749 default:
3750 gcc_unreachable ();
3751 }
3752
3753 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3754
3755 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3756 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3757
3758 return new_tree;
3759 }
3760
3761 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3762 for it within OBJ, a tree that is an object or a chain of references. */
3763
3764 tree
3765 substitute_placeholder_in_expr (tree exp, tree obj)
3766 {
3767 enum tree_code code = TREE_CODE (exp);
3768 tree op0, op1, op2, op3;
3769 tree new_tree;
3770
3771 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3772 in the chain of OBJ. */
3773 if (code == PLACEHOLDER_EXPR)
3774 {
3775 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3776 tree elt;
3777
3778 for (elt = obj; elt != 0;
3779 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3780 || TREE_CODE (elt) == COND_EXPR)
3781 ? TREE_OPERAND (elt, 1)
3782 : (REFERENCE_CLASS_P (elt)
3783 || UNARY_CLASS_P (elt)
3784 || BINARY_CLASS_P (elt)
3785 || VL_EXP_CLASS_P (elt)
3786 || EXPRESSION_CLASS_P (elt))
3787 ? TREE_OPERAND (elt, 0) : 0))
3788 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3789 return elt;
3790
3791 for (elt = obj; elt != 0;
3792 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3793 || TREE_CODE (elt) == COND_EXPR)
3794 ? TREE_OPERAND (elt, 1)
3795 : (REFERENCE_CLASS_P (elt)
3796 || UNARY_CLASS_P (elt)
3797 || BINARY_CLASS_P (elt)
3798 || VL_EXP_CLASS_P (elt)
3799 || EXPRESSION_CLASS_P (elt))
3800 ? TREE_OPERAND (elt, 0) : 0))
3801 if (POINTER_TYPE_P (TREE_TYPE (elt))
3802 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3803 == need_type))
3804 return fold_build1 (INDIRECT_REF, need_type, elt);
3805
3806 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3807 survives until RTL generation, there will be an error. */
3808 return exp;
3809 }
3810
3811 /* TREE_LIST is special because we need to look at TREE_VALUE
3812 and TREE_CHAIN, not TREE_OPERANDS. */
3813 else if (code == TREE_LIST)
3814 {
3815 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3816 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3817 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3818 return exp;
3819
3820 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3821 }
3822 else
3823 switch (TREE_CODE_CLASS (code))
3824 {
3825 case tcc_constant:
3826 case tcc_declaration:
3827 return exp;
3828
3829 case tcc_exceptional:
3830 case tcc_unary:
3831 case tcc_binary:
3832 case tcc_comparison:
3833 case tcc_expression:
3834 case tcc_reference:
3835 case tcc_statement:
3836 switch (TREE_CODE_LENGTH (code))
3837 {
3838 case 0:
3839 return exp;
3840
3841 case 1:
3842 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3843 if (op0 == TREE_OPERAND (exp, 0))
3844 return exp;
3845
3846 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3847 break;
3848
3849 case 2:
3850 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3851 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3852
3853 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3854 return exp;
3855
3856 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3857 break;
3858
3859 case 3:
3860 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3861 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3862 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3863
3864 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3865 && op2 == TREE_OPERAND (exp, 2))
3866 return exp;
3867
3868 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3869 break;
3870
3871 case 4:
3872 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3873 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3874 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3875 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3876
3877 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3878 && op2 == TREE_OPERAND (exp, 2)
3879 && op3 == TREE_OPERAND (exp, 3))
3880 return exp;
3881
3882 new_tree
3883 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3884 break;
3885
3886 default:
3887 gcc_unreachable ();
3888 }
3889 break;
3890
3891 case tcc_vl_exp:
3892 {
3893 int i;
3894
3895 new_tree = NULL_TREE;
3896
3897 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3898 {
3899 tree op = TREE_OPERAND (exp, i);
3900 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3901 if (new_op != op)
3902 {
3903 if (!new_tree)
3904 new_tree = copy_node (exp);
3905 TREE_OPERAND (new_tree, i) = new_op;
3906 }
3907 }
3908
3909 if (new_tree)
3910 {
3911 new_tree = fold (new_tree);
3912 if (TREE_CODE (new_tree) == CALL_EXPR)
3913 process_call_operands (new_tree);
3914 }
3915 else
3916 return exp;
3917 }
3918 break;
3919
3920 default:
3921 gcc_unreachable ();
3922 }
3923
3924 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3925
3926 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3927 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3928
3929 return new_tree;
3930 }
3931 \f
3932
3933 /* Subroutine of stabilize_reference; this is called for subtrees of
3934 references. Any expression with side-effects must be put in a SAVE_EXPR
3935 to ensure that it is only evaluated once.
3936
3937 We don't put SAVE_EXPR nodes around everything, because assigning very
3938 simple expressions to temporaries causes us to miss good opportunities
3939 for optimizations. Among other things, the opportunity to fold in the
3940 addition of a constant into an addressing mode often gets lost, e.g.
3941 "y[i+1] += x;". In general, we take the approach that we should not make
3942 an assignment unless we are forced into it - i.e., that any non-side effect
3943 operator should be allowed, and that cse should take care of coalescing
3944 multiple utterances of the same expression should that prove fruitful. */
3945
3946 static tree
3947 stabilize_reference_1 (tree e)
3948 {
3949 tree result;
3950 enum tree_code code = TREE_CODE (e);
3951
3952 /* We cannot ignore const expressions because it might be a reference
3953 to a const array but whose index contains side-effects. But we can
3954 ignore things that are actual constant or that already have been
3955 handled by this function. */
3956
3957 if (tree_invariant_p (e))
3958 return e;
3959
3960 switch (TREE_CODE_CLASS (code))
3961 {
3962 case tcc_exceptional:
3963 case tcc_type:
3964 case tcc_declaration:
3965 case tcc_comparison:
3966 case tcc_statement:
3967 case tcc_expression:
3968 case tcc_reference:
3969 case tcc_vl_exp:
3970 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3971 so that it will only be evaluated once. */
3972 /* The reference (r) and comparison (<) classes could be handled as
3973 below, but it is generally faster to only evaluate them once. */
3974 if (TREE_SIDE_EFFECTS (e))
3975 return save_expr (e);
3976 return e;
3977
3978 case tcc_constant:
3979 /* Constants need no processing. In fact, we should never reach
3980 here. */
3981 return e;
3982
3983 case tcc_binary:
3984 /* Division is slow and tends to be compiled with jumps,
3985 especially the division by powers of 2 that is often
3986 found inside of an array reference. So do it just once. */
3987 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3988 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3989 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3990 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3991 return save_expr (e);
3992 /* Recursively stabilize each operand. */
3993 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3994 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3995 break;
3996
3997 case tcc_unary:
3998 /* Recursively stabilize each operand. */
3999 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4000 break;
4001
4002 default:
4003 gcc_unreachable ();
4004 }
4005
4006 TREE_TYPE (result) = TREE_TYPE (e);
4007 TREE_READONLY (result) = TREE_READONLY (e);
4008 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4009 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4010
4011 return result;
4012 }
4013
4014 /* Stabilize a reference so that we can use it any number of times
4015 without causing its operands to be evaluated more than once.
4016 Returns the stabilized reference. This works by means of save_expr,
4017 so see the caveats in the comments about save_expr.
4018
4019 Also allows conversion expressions whose operands are references.
4020 Any other kind of expression is returned unchanged. */
4021
4022 tree
4023 stabilize_reference (tree ref)
4024 {
4025 tree result;
4026 enum tree_code code = TREE_CODE (ref);
4027
4028 switch (code)
4029 {
4030 case VAR_DECL:
4031 case PARM_DECL:
4032 case RESULT_DECL:
4033 /* No action is needed in this case. */
4034 return ref;
4035
4036 CASE_CONVERT:
4037 case FLOAT_EXPR:
4038 case FIX_TRUNC_EXPR:
4039 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4040 break;
4041
4042 case INDIRECT_REF:
4043 result = build_nt (INDIRECT_REF,
4044 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4045 break;
4046
4047 case COMPONENT_REF:
4048 result = build_nt (COMPONENT_REF,
4049 stabilize_reference (TREE_OPERAND (ref, 0)),
4050 TREE_OPERAND (ref, 1), NULL_TREE);
4051 break;
4052
4053 case BIT_FIELD_REF:
4054 result = build_nt (BIT_FIELD_REF,
4055 stabilize_reference (TREE_OPERAND (ref, 0)),
4056 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4057 break;
4058
4059 case ARRAY_REF:
4060 result = build_nt (ARRAY_REF,
4061 stabilize_reference (TREE_OPERAND (ref, 0)),
4062 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4063 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4064 break;
4065
4066 case ARRAY_RANGE_REF:
4067 result = build_nt (ARRAY_RANGE_REF,
4068 stabilize_reference (TREE_OPERAND (ref, 0)),
4069 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4070 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4071 break;
4072
4073 case COMPOUND_EXPR:
4074 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4075 it wouldn't be ignored. This matters when dealing with
4076 volatiles. */
4077 return stabilize_reference_1 (ref);
4078
4079 /* If arg isn't a kind of lvalue we recognize, make no change.
4080 Caller should recognize the error for an invalid lvalue. */
4081 default:
4082 return ref;
4083
4084 case ERROR_MARK:
4085 return error_mark_node;
4086 }
4087
4088 TREE_TYPE (result) = TREE_TYPE (ref);
4089 TREE_READONLY (result) = TREE_READONLY (ref);
4090 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4091 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4092
4093 return result;
4094 }
4095 \f
4096 /* Low-level constructors for expressions. */
4097
4098 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4099 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4100
4101 void
4102 recompute_tree_invariant_for_addr_expr (tree t)
4103 {
4104 tree node;
4105 bool tc = true, se = false;
4106
4107 /* We started out assuming this address is both invariant and constant, but
4108 does not have side effects. Now go down any handled components and see if
4109 any of them involve offsets that are either non-constant or non-invariant.
4110 Also check for side-effects.
4111
4112 ??? Note that this code makes no attempt to deal with the case where
4113 taking the address of something causes a copy due to misalignment. */
4114
4115 #define UPDATE_FLAGS(NODE) \
4116 do { tree _node = (NODE); \
4117 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4118 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4119
4120 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4121 node = TREE_OPERAND (node, 0))
4122 {
4123 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4124 array reference (probably made temporarily by the G++ front end),
4125 so ignore all the operands. */
4126 if ((TREE_CODE (node) == ARRAY_REF
4127 || TREE_CODE (node) == ARRAY_RANGE_REF)
4128 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4129 {
4130 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4131 if (TREE_OPERAND (node, 2))
4132 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4133 if (TREE_OPERAND (node, 3))
4134 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4135 }
4136 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4137 FIELD_DECL, apparently. The G++ front end can put something else
4138 there, at least temporarily. */
4139 else if (TREE_CODE (node) == COMPONENT_REF
4140 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4141 {
4142 if (TREE_OPERAND (node, 2))
4143 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4144 }
4145 }
4146
4147 node = lang_hooks.expr_to_decl (node, &tc, &se);
4148
4149 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4150 the address, since &(*a)->b is a form of addition. If it's a constant, the
4151 address is constant too. If it's a decl, its address is constant if the
4152 decl is static. Everything else is not constant and, furthermore,
4153 taking the address of a volatile variable is not volatile. */
4154 if (TREE_CODE (node) == INDIRECT_REF
4155 || TREE_CODE (node) == MEM_REF)
4156 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4157 else if (CONSTANT_CLASS_P (node))
4158 ;
4159 else if (DECL_P (node))
4160 tc &= (staticp (node) != NULL_TREE);
4161 else
4162 {
4163 tc = false;
4164 se |= TREE_SIDE_EFFECTS (node);
4165 }
4166
4167
4168 TREE_CONSTANT (t) = tc;
4169 TREE_SIDE_EFFECTS (t) = se;
4170 #undef UPDATE_FLAGS
4171 }
4172
4173 /* Build an expression of code CODE, data type TYPE, and operands as
4174 specified. Expressions and reference nodes can be created this way.
4175 Constants, decls, types and misc nodes cannot be.
4176
4177 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4178 enough for all extant tree codes. */
4179
4180 tree
4181 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4182 {
4183 tree t;
4184
4185 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4186
4187 t = make_node_stat (code PASS_MEM_STAT);
4188 TREE_TYPE (t) = tt;
4189
4190 return t;
4191 }
4192
4193 tree
4194 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4195 {
4196 int length = sizeof (struct tree_exp);
4197 tree t;
4198
4199 record_node_allocation_statistics (code, length);
4200
4201 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4202
4203 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4204
4205 memset (t, 0, sizeof (struct tree_common));
4206
4207 TREE_SET_CODE (t, code);
4208
4209 TREE_TYPE (t) = type;
4210 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4211 TREE_OPERAND (t, 0) = node;
4212 if (node && !TYPE_P (node))
4213 {
4214 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4215 TREE_READONLY (t) = TREE_READONLY (node);
4216 }
4217
4218 if (TREE_CODE_CLASS (code) == tcc_statement)
4219 TREE_SIDE_EFFECTS (t) = 1;
4220 else switch (code)
4221 {
4222 case VA_ARG_EXPR:
4223 /* All of these have side-effects, no matter what their
4224 operands are. */
4225 TREE_SIDE_EFFECTS (t) = 1;
4226 TREE_READONLY (t) = 0;
4227 break;
4228
4229 case INDIRECT_REF:
4230 /* Whether a dereference is readonly has nothing to do with whether
4231 its operand is readonly. */
4232 TREE_READONLY (t) = 0;
4233 break;
4234
4235 case ADDR_EXPR:
4236 if (node)
4237 recompute_tree_invariant_for_addr_expr (t);
4238 break;
4239
4240 default:
4241 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4242 && node && !TYPE_P (node)
4243 && TREE_CONSTANT (node))
4244 TREE_CONSTANT (t) = 1;
4245 if (TREE_CODE_CLASS (code) == tcc_reference
4246 && node && TREE_THIS_VOLATILE (node))
4247 TREE_THIS_VOLATILE (t) = 1;
4248 break;
4249 }
4250
4251 return t;
4252 }
4253
4254 #define PROCESS_ARG(N) \
4255 do { \
4256 TREE_OPERAND (t, N) = arg##N; \
4257 if (arg##N &&!TYPE_P (arg##N)) \
4258 { \
4259 if (TREE_SIDE_EFFECTS (arg##N)) \
4260 side_effects = 1; \
4261 if (!TREE_READONLY (arg##N) \
4262 && !CONSTANT_CLASS_P (arg##N)) \
4263 (void) (read_only = 0); \
4264 if (!TREE_CONSTANT (arg##N)) \
4265 (void) (constant = 0); \
4266 } \
4267 } while (0)
4268
4269 tree
4270 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4271 {
4272 bool constant, read_only, side_effects;
4273 tree t;
4274
4275 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4276
4277 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4278 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4279 /* When sizetype precision doesn't match that of pointers
4280 we need to be able to build explicit extensions or truncations
4281 of the offset argument. */
4282 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4283 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4284 && TREE_CODE (arg1) == INTEGER_CST);
4285
4286 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4287 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4288 && ptrofftype_p (TREE_TYPE (arg1)));
4289
4290 t = make_node_stat (code PASS_MEM_STAT);
4291 TREE_TYPE (t) = tt;
4292
4293 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4294 result based on those same flags for the arguments. But if the
4295 arguments aren't really even `tree' expressions, we shouldn't be trying
4296 to do this. */
4297
4298 /* Expressions without side effects may be constant if their
4299 arguments are as well. */
4300 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4301 || TREE_CODE_CLASS (code) == tcc_binary);
4302 read_only = 1;
4303 side_effects = TREE_SIDE_EFFECTS (t);
4304
4305 PROCESS_ARG (0);
4306 PROCESS_ARG (1);
4307
4308 TREE_READONLY (t) = read_only;
4309 TREE_CONSTANT (t) = constant;
4310 TREE_SIDE_EFFECTS (t) = side_effects;
4311 TREE_THIS_VOLATILE (t)
4312 = (TREE_CODE_CLASS (code) == tcc_reference
4313 && arg0 && TREE_THIS_VOLATILE (arg0));
4314
4315 return t;
4316 }
4317
4318
4319 tree
4320 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4321 tree arg2 MEM_STAT_DECL)
4322 {
4323 bool constant, read_only, side_effects;
4324 tree t;
4325
4326 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4327 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4328
4329 t = make_node_stat (code PASS_MEM_STAT);
4330 TREE_TYPE (t) = tt;
4331
4332 read_only = 1;
4333
4334 /* As a special exception, if COND_EXPR has NULL branches, we
4335 assume that it is a gimple statement and always consider
4336 it to have side effects. */
4337 if (code == COND_EXPR
4338 && tt == void_type_node
4339 && arg1 == NULL_TREE
4340 && arg2 == NULL_TREE)
4341 side_effects = true;
4342 else
4343 side_effects = TREE_SIDE_EFFECTS (t);
4344
4345 PROCESS_ARG (0);
4346 PROCESS_ARG (1);
4347 PROCESS_ARG (2);
4348
4349 if (code == COND_EXPR)
4350 TREE_READONLY (t) = read_only;
4351
4352 TREE_SIDE_EFFECTS (t) = side_effects;
4353 TREE_THIS_VOLATILE (t)
4354 = (TREE_CODE_CLASS (code) == tcc_reference
4355 && arg0 && TREE_THIS_VOLATILE (arg0));
4356
4357 return t;
4358 }
4359
4360 tree
4361 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4362 tree arg2, tree arg3 MEM_STAT_DECL)
4363 {
4364 bool constant, read_only, side_effects;
4365 tree t;
4366
4367 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4368
4369 t = make_node_stat (code PASS_MEM_STAT);
4370 TREE_TYPE (t) = tt;
4371
4372 side_effects = TREE_SIDE_EFFECTS (t);
4373
4374 PROCESS_ARG (0);
4375 PROCESS_ARG (1);
4376 PROCESS_ARG (2);
4377 PROCESS_ARG (3);
4378
4379 TREE_SIDE_EFFECTS (t) = side_effects;
4380 TREE_THIS_VOLATILE (t)
4381 = (TREE_CODE_CLASS (code) == tcc_reference
4382 && arg0 && TREE_THIS_VOLATILE (arg0));
4383
4384 return t;
4385 }
4386
4387 tree
4388 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4389 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4390 {
4391 bool constant, read_only, side_effects;
4392 tree t;
4393
4394 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4395
4396 t = make_node_stat (code PASS_MEM_STAT);
4397 TREE_TYPE (t) = tt;
4398
4399 side_effects = TREE_SIDE_EFFECTS (t);
4400
4401 PROCESS_ARG (0);
4402 PROCESS_ARG (1);
4403 PROCESS_ARG (2);
4404 PROCESS_ARG (3);
4405 PROCESS_ARG (4);
4406
4407 TREE_SIDE_EFFECTS (t) = side_effects;
4408 TREE_THIS_VOLATILE (t)
4409 = (TREE_CODE_CLASS (code) == tcc_reference
4410 && arg0 && TREE_THIS_VOLATILE (arg0));
4411
4412 return t;
4413 }
4414
4415 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4416 on the pointer PTR. */
4417
4418 tree
4419 build_simple_mem_ref_loc (location_t loc, tree ptr)
4420 {
4421 HOST_WIDE_INT offset = 0;
4422 tree ptype = TREE_TYPE (ptr);
4423 tree tem;
4424 /* For convenience allow addresses that collapse to a simple base
4425 and offset. */
4426 if (TREE_CODE (ptr) == ADDR_EXPR
4427 && (handled_component_p (TREE_OPERAND (ptr, 0))
4428 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4429 {
4430 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4431 gcc_assert (ptr);
4432 ptr = build_fold_addr_expr (ptr);
4433 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4434 }
4435 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4436 ptr, build_int_cst (ptype, offset));
4437 SET_EXPR_LOCATION (tem, loc);
4438 return tem;
4439 }
4440
4441 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4442
4443 offset_int
4444 mem_ref_offset (const_tree t)
4445 {
4446 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4447 }
4448
4449 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4450 offsetted by OFFSET units. */
4451
4452 tree
4453 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4454 {
4455 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4456 build_fold_addr_expr (base),
4457 build_int_cst (ptr_type_node, offset));
4458 tree addr = build1 (ADDR_EXPR, type, ref);
4459 recompute_tree_invariant_for_addr_expr (addr);
4460 return addr;
4461 }
4462
4463 /* Similar except don't specify the TREE_TYPE
4464 and leave the TREE_SIDE_EFFECTS as 0.
4465 It is permissible for arguments to be null,
4466 or even garbage if their values do not matter. */
4467
4468 tree
4469 build_nt (enum tree_code code, ...)
4470 {
4471 tree t;
4472 int length;
4473 int i;
4474 va_list p;
4475
4476 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4477
4478 va_start (p, code);
4479
4480 t = make_node (code);
4481 length = TREE_CODE_LENGTH (code);
4482
4483 for (i = 0; i < length; i++)
4484 TREE_OPERAND (t, i) = va_arg (p, tree);
4485
4486 va_end (p);
4487 return t;
4488 }
4489
4490 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4491 tree vec. */
4492
4493 tree
4494 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4495 {
4496 tree ret, t;
4497 unsigned int ix;
4498
4499 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4500 CALL_EXPR_FN (ret) = fn;
4501 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4502 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4503 CALL_EXPR_ARG (ret, ix) = t;
4504 return ret;
4505 }
4506 \f
4507 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4508 We do NOT enter this node in any sort of symbol table.
4509
4510 LOC is the location of the decl.
4511
4512 layout_decl is used to set up the decl's storage layout.
4513 Other slots are initialized to 0 or null pointers. */
4514
4515 tree
4516 build_decl_stat (location_t loc, enum tree_code code, tree name,
4517 tree type MEM_STAT_DECL)
4518 {
4519 tree t;
4520
4521 t = make_node_stat (code PASS_MEM_STAT);
4522 DECL_SOURCE_LOCATION (t) = loc;
4523
4524 /* if (type == error_mark_node)
4525 type = integer_type_node; */
4526 /* That is not done, deliberately, so that having error_mark_node
4527 as the type can suppress useless errors in the use of this variable. */
4528
4529 DECL_NAME (t) = name;
4530 TREE_TYPE (t) = type;
4531
4532 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4533 layout_decl (t, 0);
4534
4535 return t;
4536 }
4537
4538 /* Builds and returns function declaration with NAME and TYPE. */
4539
4540 tree
4541 build_fn_decl (const char *name, tree type)
4542 {
4543 tree id = get_identifier (name);
4544 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4545
4546 DECL_EXTERNAL (decl) = 1;
4547 TREE_PUBLIC (decl) = 1;
4548 DECL_ARTIFICIAL (decl) = 1;
4549 TREE_NOTHROW (decl) = 1;
4550
4551 return decl;
4552 }
4553
4554 vec<tree, va_gc> *all_translation_units;
4555
4556 /* Builds a new translation-unit decl with name NAME, queues it in the
4557 global list of translation-unit decls and returns it. */
4558
4559 tree
4560 build_translation_unit_decl (tree name)
4561 {
4562 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4563 name, NULL_TREE);
4564 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4565 vec_safe_push (all_translation_units, tu);
4566 return tu;
4567 }
4568
4569 \f
4570 /* BLOCK nodes are used to represent the structure of binding contours
4571 and declarations, once those contours have been exited and their contents
4572 compiled. This information is used for outputting debugging info. */
4573
4574 tree
4575 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4576 {
4577 tree block = make_node (BLOCK);
4578
4579 BLOCK_VARS (block) = vars;
4580 BLOCK_SUBBLOCKS (block) = subblocks;
4581 BLOCK_SUPERCONTEXT (block) = supercontext;
4582 BLOCK_CHAIN (block) = chain;
4583 return block;
4584 }
4585
4586 \f
4587 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4588
4589 LOC is the location to use in tree T. */
4590
4591 void
4592 protected_set_expr_location (tree t, location_t loc)
4593 {
4594 if (CAN_HAVE_LOCATION_P (t))
4595 SET_EXPR_LOCATION (t, loc);
4596 }
4597 \f
4598 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4599 is ATTRIBUTE. */
4600
4601 tree
4602 build_decl_attribute_variant (tree ddecl, tree attribute)
4603 {
4604 DECL_ATTRIBUTES (ddecl) = attribute;
4605 return ddecl;
4606 }
4607
4608 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4609 is ATTRIBUTE and its qualifiers are QUALS.
4610
4611 Record such modified types already made so we don't make duplicates. */
4612
4613 tree
4614 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4615 {
4616 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4617 {
4618 inchash::hash hstate;
4619 tree ntype;
4620 int i;
4621 tree t;
4622 enum tree_code code = TREE_CODE (ttype);
4623
4624 /* Building a distinct copy of a tagged type is inappropriate; it
4625 causes breakage in code that expects there to be a one-to-one
4626 relationship between a struct and its fields.
4627 build_duplicate_type is another solution (as used in
4628 handle_transparent_union_attribute), but that doesn't play well
4629 with the stronger C++ type identity model. */
4630 if (TREE_CODE (ttype) == RECORD_TYPE
4631 || TREE_CODE (ttype) == UNION_TYPE
4632 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4633 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4634 {
4635 warning (OPT_Wattributes,
4636 "ignoring attributes applied to %qT after definition",
4637 TYPE_MAIN_VARIANT (ttype));
4638 return build_qualified_type (ttype, quals);
4639 }
4640
4641 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4642 ntype = build_distinct_type_copy (ttype);
4643
4644 TYPE_ATTRIBUTES (ntype) = attribute;
4645
4646 hstate.add_int (code);
4647 if (TREE_TYPE (ntype))
4648 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4649 attribute_hash_list (attribute, hstate);
4650
4651 switch (TREE_CODE (ntype))
4652 {
4653 case FUNCTION_TYPE:
4654 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4655 break;
4656 case ARRAY_TYPE:
4657 if (TYPE_DOMAIN (ntype))
4658 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4659 break;
4660 case INTEGER_TYPE:
4661 t = TYPE_MAX_VALUE (ntype);
4662 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4663 hstate.add_object (TREE_INT_CST_ELT (t, i));
4664 break;
4665 case REAL_TYPE:
4666 case FIXED_POINT_TYPE:
4667 {
4668 unsigned int precision = TYPE_PRECISION (ntype);
4669 hstate.add_object (precision);
4670 }
4671 break;
4672 default:
4673 break;
4674 }
4675
4676 ntype = type_hash_canon (hstate.end(), ntype);
4677
4678 /* If the target-dependent attributes make NTYPE different from
4679 its canonical type, we will need to use structural equality
4680 checks for this type. */
4681 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4682 || !comp_type_attributes (ntype, ttype))
4683 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4684 else if (TYPE_CANONICAL (ntype) == ntype)
4685 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4686
4687 ttype = build_qualified_type (ntype, quals);
4688 }
4689 else if (TYPE_QUALS (ttype) != quals)
4690 ttype = build_qualified_type (ttype, quals);
4691
4692 return ttype;
4693 }
4694
4695 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4696 the same. */
4697
4698 static bool
4699 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4700 {
4701 tree cl1, cl2;
4702 for (cl1 = clauses1, cl2 = clauses2;
4703 cl1 && cl2;
4704 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4705 {
4706 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4707 return false;
4708 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4709 {
4710 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4711 OMP_CLAUSE_DECL (cl2)) != 1)
4712 return false;
4713 }
4714 switch (OMP_CLAUSE_CODE (cl1))
4715 {
4716 case OMP_CLAUSE_ALIGNED:
4717 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4718 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4719 return false;
4720 break;
4721 case OMP_CLAUSE_LINEAR:
4722 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4723 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4724 return false;
4725 break;
4726 case OMP_CLAUSE_SIMDLEN:
4727 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4728 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4729 return false;
4730 default:
4731 break;
4732 }
4733 }
4734 return true;
4735 }
4736
4737 /* Compare two constructor-element-type constants. Return 1 if the lists
4738 are known to be equal; otherwise return 0. */
4739
4740 static bool
4741 simple_cst_list_equal (const_tree l1, const_tree l2)
4742 {
4743 while (l1 != NULL_TREE && l2 != NULL_TREE)
4744 {
4745 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4746 return false;
4747
4748 l1 = TREE_CHAIN (l1);
4749 l2 = TREE_CHAIN (l2);
4750 }
4751
4752 return l1 == l2;
4753 }
4754
4755 /* Compare two attributes for their value identity. Return true if the
4756 attribute values are known to be equal; otherwise return false.
4757 */
4758
4759 static bool
4760 attribute_value_equal (const_tree attr1, const_tree attr2)
4761 {
4762 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4763 return true;
4764
4765 if (TREE_VALUE (attr1) != NULL_TREE
4766 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4767 && TREE_VALUE (attr2) != NULL
4768 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4769 return (simple_cst_list_equal (TREE_VALUE (attr1),
4770 TREE_VALUE (attr2)) == 1);
4771
4772 if ((flag_openmp || flag_openmp_simd)
4773 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4774 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4775 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4776 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4777 TREE_VALUE (attr2));
4778
4779 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4780 }
4781
4782 /* Return 0 if the attributes for two types are incompatible, 1 if they
4783 are compatible, and 2 if they are nearly compatible (which causes a
4784 warning to be generated). */
4785 int
4786 comp_type_attributes (const_tree type1, const_tree type2)
4787 {
4788 const_tree a1 = TYPE_ATTRIBUTES (type1);
4789 const_tree a2 = TYPE_ATTRIBUTES (type2);
4790 const_tree a;
4791
4792 if (a1 == a2)
4793 return 1;
4794 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4795 {
4796 const struct attribute_spec *as;
4797 const_tree attr;
4798
4799 as = lookup_attribute_spec (get_attribute_name (a));
4800 if (!as || as->affects_type_identity == false)
4801 continue;
4802
4803 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4804 if (!attr || !attribute_value_equal (a, attr))
4805 break;
4806 }
4807 if (!a)
4808 {
4809 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4810 {
4811 const struct attribute_spec *as;
4812
4813 as = lookup_attribute_spec (get_attribute_name (a));
4814 if (!as || as->affects_type_identity == false)
4815 continue;
4816
4817 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4818 break;
4819 /* We don't need to compare trees again, as we did this
4820 already in first loop. */
4821 }
4822 /* All types - affecting identity - are equal, so
4823 there is no need to call target hook for comparison. */
4824 if (!a)
4825 return 1;
4826 }
4827 /* As some type combinations - like default calling-convention - might
4828 be compatible, we have to call the target hook to get the final result. */
4829 return targetm.comp_type_attributes (type1, type2);
4830 }
4831
4832 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4833 is ATTRIBUTE.
4834
4835 Record such modified types already made so we don't make duplicates. */
4836
4837 tree
4838 build_type_attribute_variant (tree ttype, tree attribute)
4839 {
4840 return build_type_attribute_qual_variant (ttype, attribute,
4841 TYPE_QUALS (ttype));
4842 }
4843
4844
4845 /* Reset the expression *EXPR_P, a size or position.
4846
4847 ??? We could reset all non-constant sizes or positions. But it's cheap
4848 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4849
4850 We need to reset self-referential sizes or positions because they cannot
4851 be gimplified and thus can contain a CALL_EXPR after the gimplification
4852 is finished, which will run afoul of LTO streaming. And they need to be
4853 reset to something essentially dummy but not constant, so as to preserve
4854 the properties of the object they are attached to. */
4855
4856 static inline void
4857 free_lang_data_in_one_sizepos (tree *expr_p)
4858 {
4859 tree expr = *expr_p;
4860 if (CONTAINS_PLACEHOLDER_P (expr))
4861 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4862 }
4863
4864
4865 /* Reset all the fields in a binfo node BINFO. We only keep
4866 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4867
4868 static void
4869 free_lang_data_in_binfo (tree binfo)
4870 {
4871 unsigned i;
4872 tree t;
4873
4874 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4875
4876 BINFO_VIRTUALS (binfo) = NULL_TREE;
4877 BINFO_BASE_ACCESSES (binfo) = NULL;
4878 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4879 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4880
4881 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4882 free_lang_data_in_binfo (t);
4883 }
4884
4885
4886 /* Reset all language specific information still present in TYPE. */
4887
4888 static void
4889 free_lang_data_in_type (tree type)
4890 {
4891 gcc_assert (TYPE_P (type));
4892
4893 /* Give the FE a chance to remove its own data first. */
4894 lang_hooks.free_lang_data (type);
4895
4896 TREE_LANG_FLAG_0 (type) = 0;
4897 TREE_LANG_FLAG_1 (type) = 0;
4898 TREE_LANG_FLAG_2 (type) = 0;
4899 TREE_LANG_FLAG_3 (type) = 0;
4900 TREE_LANG_FLAG_4 (type) = 0;
4901 TREE_LANG_FLAG_5 (type) = 0;
4902 TREE_LANG_FLAG_6 (type) = 0;
4903
4904 if (TREE_CODE (type) == FUNCTION_TYPE)
4905 {
4906 /* Remove the const and volatile qualifiers from arguments. The
4907 C++ front end removes them, but the C front end does not,
4908 leading to false ODR violation errors when merging two
4909 instances of the same function signature compiled by
4910 different front ends. */
4911 tree p;
4912
4913 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4914 {
4915 tree arg_type = TREE_VALUE (p);
4916
4917 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4918 {
4919 int quals = TYPE_QUALS (arg_type)
4920 & ~TYPE_QUAL_CONST
4921 & ~TYPE_QUAL_VOLATILE;
4922 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4923 free_lang_data_in_type (TREE_VALUE (p));
4924 }
4925 }
4926 }
4927
4928 /* Remove members that are not actually FIELD_DECLs from the field
4929 list of an aggregate. These occur in C++. */
4930 if (RECORD_OR_UNION_TYPE_P (type))
4931 {
4932 tree prev, member;
4933
4934 /* Note that TYPE_FIELDS can be shared across distinct
4935 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4936 to be removed, we cannot set its TREE_CHAIN to NULL.
4937 Otherwise, we would not be able to find all the other fields
4938 in the other instances of this TREE_TYPE.
4939
4940 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4941 prev = NULL_TREE;
4942 member = TYPE_FIELDS (type);
4943 while (member)
4944 {
4945 if (TREE_CODE (member) == FIELD_DECL
4946 || TREE_CODE (member) == TYPE_DECL)
4947 {
4948 if (prev)
4949 TREE_CHAIN (prev) = member;
4950 else
4951 TYPE_FIELDS (type) = member;
4952 prev = member;
4953 }
4954
4955 member = TREE_CHAIN (member);
4956 }
4957
4958 if (prev)
4959 TREE_CHAIN (prev) = NULL_TREE;
4960 else
4961 TYPE_FIELDS (type) = NULL_TREE;
4962
4963 TYPE_METHODS (type) = NULL_TREE;
4964 if (TYPE_BINFO (type))
4965 free_lang_data_in_binfo (TYPE_BINFO (type));
4966 }
4967 else
4968 {
4969 /* For non-aggregate types, clear out the language slot (which
4970 overloads TYPE_BINFO). */
4971 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4972
4973 if (INTEGRAL_TYPE_P (type)
4974 || SCALAR_FLOAT_TYPE_P (type)
4975 || FIXED_POINT_TYPE_P (type))
4976 {
4977 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4978 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4979 }
4980 }
4981
4982 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4983 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4984
4985 if (TYPE_CONTEXT (type)
4986 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4987 {
4988 tree ctx = TYPE_CONTEXT (type);
4989 do
4990 {
4991 ctx = BLOCK_SUPERCONTEXT (ctx);
4992 }
4993 while (ctx && TREE_CODE (ctx) == BLOCK);
4994 TYPE_CONTEXT (type) = ctx;
4995 }
4996 }
4997
4998
4999 /* Return true if DECL may need an assembler name to be set. */
5000
5001 static inline bool
5002 need_assembler_name_p (tree decl)
5003 {
5004 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule
5005 merging. */
5006 if (flag_lto_odr_type_mering
5007 && TREE_CODE (decl) == TYPE_DECL
5008 && DECL_NAME (decl)
5009 && decl == TYPE_NAME (TREE_TYPE (decl))
5010 && !is_lang_specific (TREE_TYPE (decl))
5011 && AGGREGATE_TYPE_P (TREE_TYPE (decl))
5012 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)
5013 && !type_in_anonymous_namespace_p (TREE_TYPE (decl)))
5014 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5015 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5016 if (TREE_CODE (decl) != FUNCTION_DECL
5017 && TREE_CODE (decl) != VAR_DECL)
5018 return false;
5019
5020 /* If DECL already has its assembler name set, it does not need a
5021 new one. */
5022 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5023 || DECL_ASSEMBLER_NAME_SET_P (decl))
5024 return false;
5025
5026 /* Abstract decls do not need an assembler name. */
5027 if (DECL_ABSTRACT_P (decl))
5028 return false;
5029
5030 /* For VAR_DECLs, only static, public and external symbols need an
5031 assembler name. */
5032 if (TREE_CODE (decl) == VAR_DECL
5033 && !TREE_STATIC (decl)
5034 && !TREE_PUBLIC (decl)
5035 && !DECL_EXTERNAL (decl))
5036 return false;
5037
5038 if (TREE_CODE (decl) == FUNCTION_DECL)
5039 {
5040 /* Do not set assembler name on builtins. Allow RTL expansion to
5041 decide whether to expand inline or via a regular call. */
5042 if (DECL_BUILT_IN (decl)
5043 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5044 return false;
5045
5046 /* Functions represented in the callgraph need an assembler name. */
5047 if (cgraph_node::get (decl) != NULL)
5048 return true;
5049
5050 /* Unused and not public functions don't need an assembler name. */
5051 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5052 return false;
5053 }
5054
5055 return true;
5056 }
5057
5058
5059 /* Reset all language specific information still present in symbol
5060 DECL. */
5061
5062 static void
5063 free_lang_data_in_decl (tree decl)
5064 {
5065 gcc_assert (DECL_P (decl));
5066
5067 /* Give the FE a chance to remove its own data first. */
5068 lang_hooks.free_lang_data (decl);
5069
5070 TREE_LANG_FLAG_0 (decl) = 0;
5071 TREE_LANG_FLAG_1 (decl) = 0;
5072 TREE_LANG_FLAG_2 (decl) = 0;
5073 TREE_LANG_FLAG_3 (decl) = 0;
5074 TREE_LANG_FLAG_4 (decl) = 0;
5075 TREE_LANG_FLAG_5 (decl) = 0;
5076 TREE_LANG_FLAG_6 (decl) = 0;
5077
5078 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5079 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5080 if (TREE_CODE (decl) == FIELD_DECL)
5081 {
5082 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5083 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5084 DECL_QUALIFIER (decl) = NULL_TREE;
5085 }
5086
5087 if (TREE_CODE (decl) == FUNCTION_DECL)
5088 {
5089 struct cgraph_node *node;
5090 if (!(node = cgraph_node::get (decl))
5091 || (!node->definition && !node->clones))
5092 {
5093 if (node)
5094 node->release_body ();
5095 else
5096 {
5097 release_function_body (decl);
5098 DECL_ARGUMENTS (decl) = NULL;
5099 DECL_RESULT (decl) = NULL;
5100 DECL_INITIAL (decl) = error_mark_node;
5101 }
5102 }
5103 if (gimple_has_body_p (decl))
5104 {
5105 tree t;
5106
5107 /* If DECL has a gimple body, then the context for its
5108 arguments must be DECL. Otherwise, it doesn't really
5109 matter, as we will not be emitting any code for DECL. In
5110 general, there may be other instances of DECL created by
5111 the front end and since PARM_DECLs are generally shared,
5112 their DECL_CONTEXT changes as the replicas of DECL are
5113 created. The only time where DECL_CONTEXT is important
5114 is for the FUNCTION_DECLs that have a gimple body (since
5115 the PARM_DECL will be used in the function's body). */
5116 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5117 DECL_CONTEXT (t) = decl;
5118 }
5119
5120 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5121 At this point, it is not needed anymore. */
5122 DECL_SAVED_TREE (decl) = NULL_TREE;
5123
5124 /* Clear the abstract origin if it refers to a method. Otherwise
5125 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5126 origin will not be output correctly. */
5127 if (DECL_ABSTRACT_ORIGIN (decl)
5128 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5129 && RECORD_OR_UNION_TYPE_P
5130 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5131 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5132
5133 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5134 DECL_VINDEX referring to itself into a vtable slot number as it
5135 should. Happens with functions that are copied and then forgotten
5136 about. Just clear it, it won't matter anymore. */
5137 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5138 DECL_VINDEX (decl) = NULL_TREE;
5139 }
5140 else if (TREE_CODE (decl) == VAR_DECL)
5141 {
5142 if ((DECL_EXTERNAL (decl)
5143 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5144 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5145 DECL_INITIAL (decl) = NULL_TREE;
5146 }
5147 else if (TREE_CODE (decl) == TYPE_DECL
5148 || TREE_CODE (decl) == FIELD_DECL)
5149 DECL_INITIAL (decl) = NULL_TREE;
5150 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5151 && DECL_INITIAL (decl)
5152 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5153 {
5154 /* Strip builtins from the translation-unit BLOCK. We still have targets
5155 without builtin_decl_explicit support and also builtins are shared
5156 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5157 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5158 while (*nextp)
5159 {
5160 tree var = *nextp;
5161 if (TREE_CODE (var) == FUNCTION_DECL
5162 && DECL_BUILT_IN (var))
5163 *nextp = TREE_CHAIN (var);
5164 else
5165 nextp = &TREE_CHAIN (var);
5166 }
5167 }
5168 }
5169
5170
5171 /* Data used when collecting DECLs and TYPEs for language data removal. */
5172
5173 struct free_lang_data_d
5174 {
5175 /* Worklist to avoid excessive recursion. */
5176 vec<tree> worklist;
5177
5178 /* Set of traversed objects. Used to avoid duplicate visits. */
5179 hash_set<tree> *pset;
5180
5181 /* Array of symbols to process with free_lang_data_in_decl. */
5182 vec<tree> decls;
5183
5184 /* Array of types to process with free_lang_data_in_type. */
5185 vec<tree> types;
5186 };
5187
5188
5189 /* Save all language fields needed to generate proper debug information
5190 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5191
5192 static void
5193 save_debug_info_for_decl (tree t)
5194 {
5195 /*struct saved_debug_info_d *sdi;*/
5196
5197 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5198
5199 /* FIXME. Partial implementation for saving debug info removed. */
5200 }
5201
5202
5203 /* Save all language fields needed to generate proper debug information
5204 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5205
5206 static void
5207 save_debug_info_for_type (tree t)
5208 {
5209 /*struct saved_debug_info_d *sdi;*/
5210
5211 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5212
5213 /* FIXME. Partial implementation for saving debug info removed. */
5214 }
5215
5216
5217 /* Add type or decl T to one of the list of tree nodes that need their
5218 language data removed. The lists are held inside FLD. */
5219
5220 static void
5221 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5222 {
5223 if (DECL_P (t))
5224 {
5225 fld->decls.safe_push (t);
5226 if (debug_info_level > DINFO_LEVEL_TERSE)
5227 save_debug_info_for_decl (t);
5228 }
5229 else if (TYPE_P (t))
5230 {
5231 fld->types.safe_push (t);
5232 if (debug_info_level > DINFO_LEVEL_TERSE)
5233 save_debug_info_for_type (t);
5234 }
5235 else
5236 gcc_unreachable ();
5237 }
5238
5239 /* Push tree node T into FLD->WORKLIST. */
5240
5241 static inline void
5242 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5243 {
5244 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5245 fld->worklist.safe_push ((t));
5246 }
5247
5248
5249 /* Operand callback helper for free_lang_data_in_node. *TP is the
5250 subtree operand being considered. */
5251
5252 static tree
5253 find_decls_types_r (tree *tp, int *ws, void *data)
5254 {
5255 tree t = *tp;
5256 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5257
5258 if (TREE_CODE (t) == TREE_LIST)
5259 return NULL_TREE;
5260
5261 /* Language specific nodes will be removed, so there is no need
5262 to gather anything under them. */
5263 if (is_lang_specific (t))
5264 {
5265 *ws = 0;
5266 return NULL_TREE;
5267 }
5268
5269 if (DECL_P (t))
5270 {
5271 /* Note that walk_tree does not traverse every possible field in
5272 decls, so we have to do our own traversals here. */
5273 add_tree_to_fld_list (t, fld);
5274
5275 fld_worklist_push (DECL_NAME (t), fld);
5276 fld_worklist_push (DECL_CONTEXT (t), fld);
5277 fld_worklist_push (DECL_SIZE (t), fld);
5278 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5279
5280 /* We are going to remove everything under DECL_INITIAL for
5281 TYPE_DECLs. No point walking them. */
5282 if (TREE_CODE (t) != TYPE_DECL)
5283 fld_worklist_push (DECL_INITIAL (t), fld);
5284
5285 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5286 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5287
5288 if (TREE_CODE (t) == FUNCTION_DECL)
5289 {
5290 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5291 fld_worklist_push (DECL_RESULT (t), fld);
5292 }
5293 else if (TREE_CODE (t) == TYPE_DECL)
5294 {
5295 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5296 }
5297 else if (TREE_CODE (t) == FIELD_DECL)
5298 {
5299 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5300 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5301 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5302 fld_worklist_push (DECL_FCONTEXT (t), fld);
5303 }
5304
5305 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5306 && DECL_HAS_VALUE_EXPR_P (t))
5307 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5308
5309 if (TREE_CODE (t) != FIELD_DECL
5310 && TREE_CODE (t) != TYPE_DECL)
5311 fld_worklist_push (TREE_CHAIN (t), fld);
5312 *ws = 0;
5313 }
5314 else if (TYPE_P (t))
5315 {
5316 /* Note that walk_tree does not traverse every possible field in
5317 types, so we have to do our own traversals here. */
5318 add_tree_to_fld_list (t, fld);
5319
5320 if (!RECORD_OR_UNION_TYPE_P (t))
5321 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5322 fld_worklist_push (TYPE_SIZE (t), fld);
5323 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5324 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5325 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5326 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5327 fld_worklist_push (TYPE_NAME (t), fld);
5328 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5329 them and thus do not and want not to reach unused pointer types
5330 this way. */
5331 if (!POINTER_TYPE_P (t))
5332 fld_worklist_push (TYPE_MINVAL (t), fld);
5333 if (!RECORD_OR_UNION_TYPE_P (t))
5334 fld_worklist_push (TYPE_MAXVAL (t), fld);
5335 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5336 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5337 do not and want not to reach unused variants this way. */
5338 if (TYPE_CONTEXT (t))
5339 {
5340 tree ctx = TYPE_CONTEXT (t);
5341 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5342 So push that instead. */
5343 while (ctx && TREE_CODE (ctx) == BLOCK)
5344 ctx = BLOCK_SUPERCONTEXT (ctx);
5345 fld_worklist_push (ctx, fld);
5346 }
5347 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5348 and want not to reach unused types this way. */
5349
5350 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5351 {
5352 unsigned i;
5353 tree tem;
5354 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5355 fld_worklist_push (TREE_TYPE (tem), fld);
5356 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5357 if (tem
5358 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5359 && TREE_CODE (tem) == TREE_LIST)
5360 do
5361 {
5362 fld_worklist_push (TREE_VALUE (tem), fld);
5363 tem = TREE_CHAIN (tem);
5364 }
5365 while (tem);
5366 }
5367 if (RECORD_OR_UNION_TYPE_P (t))
5368 {
5369 tree tem;
5370 /* Push all TYPE_FIELDS - there can be interleaving interesting
5371 and non-interesting things. */
5372 tem = TYPE_FIELDS (t);
5373 while (tem)
5374 {
5375 if (TREE_CODE (tem) == FIELD_DECL
5376 || TREE_CODE (tem) == TYPE_DECL)
5377 fld_worklist_push (tem, fld);
5378 tem = TREE_CHAIN (tem);
5379 }
5380 }
5381
5382 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5383 *ws = 0;
5384 }
5385 else if (TREE_CODE (t) == BLOCK)
5386 {
5387 tree tem;
5388 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5389 fld_worklist_push (tem, fld);
5390 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5391 fld_worklist_push (tem, fld);
5392 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5393 }
5394
5395 if (TREE_CODE (t) != IDENTIFIER_NODE
5396 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5397 fld_worklist_push (TREE_TYPE (t), fld);
5398
5399 return NULL_TREE;
5400 }
5401
5402
5403 /* Find decls and types in T. */
5404
5405 static void
5406 find_decls_types (tree t, struct free_lang_data_d *fld)
5407 {
5408 while (1)
5409 {
5410 if (!fld->pset->contains (t))
5411 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5412 if (fld->worklist.is_empty ())
5413 break;
5414 t = fld->worklist.pop ();
5415 }
5416 }
5417
5418 /* Translate all the types in LIST with the corresponding runtime
5419 types. */
5420
5421 static tree
5422 get_eh_types_for_runtime (tree list)
5423 {
5424 tree head, prev;
5425
5426 if (list == NULL_TREE)
5427 return NULL_TREE;
5428
5429 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5430 prev = head;
5431 list = TREE_CHAIN (list);
5432 while (list)
5433 {
5434 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5435 TREE_CHAIN (prev) = n;
5436 prev = TREE_CHAIN (prev);
5437 list = TREE_CHAIN (list);
5438 }
5439
5440 return head;
5441 }
5442
5443
5444 /* Find decls and types referenced in EH region R and store them in
5445 FLD->DECLS and FLD->TYPES. */
5446
5447 static void
5448 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5449 {
5450 switch (r->type)
5451 {
5452 case ERT_CLEANUP:
5453 break;
5454
5455 case ERT_TRY:
5456 {
5457 eh_catch c;
5458
5459 /* The types referenced in each catch must first be changed to the
5460 EH types used at runtime. This removes references to FE types
5461 in the region. */
5462 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5463 {
5464 c->type_list = get_eh_types_for_runtime (c->type_list);
5465 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5466 }
5467 }
5468 break;
5469
5470 case ERT_ALLOWED_EXCEPTIONS:
5471 r->u.allowed.type_list
5472 = get_eh_types_for_runtime (r->u.allowed.type_list);
5473 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5474 break;
5475
5476 case ERT_MUST_NOT_THROW:
5477 walk_tree (&r->u.must_not_throw.failure_decl,
5478 find_decls_types_r, fld, fld->pset);
5479 break;
5480 }
5481 }
5482
5483
5484 /* Find decls and types referenced in cgraph node N and store them in
5485 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5486 look for *every* kind of DECL and TYPE node reachable from N,
5487 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5488 NAMESPACE_DECLs, etc). */
5489
5490 static void
5491 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5492 {
5493 basic_block bb;
5494 struct function *fn;
5495 unsigned ix;
5496 tree t;
5497
5498 find_decls_types (n->decl, fld);
5499
5500 if (!gimple_has_body_p (n->decl))
5501 return;
5502
5503 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5504
5505 fn = DECL_STRUCT_FUNCTION (n->decl);
5506
5507 /* Traverse locals. */
5508 FOR_EACH_LOCAL_DECL (fn, ix, t)
5509 find_decls_types (t, fld);
5510
5511 /* Traverse EH regions in FN. */
5512 {
5513 eh_region r;
5514 FOR_ALL_EH_REGION_FN (r, fn)
5515 find_decls_types_in_eh_region (r, fld);
5516 }
5517
5518 /* Traverse every statement in FN. */
5519 FOR_EACH_BB_FN (bb, fn)
5520 {
5521 gimple_stmt_iterator si;
5522 unsigned i;
5523
5524 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5525 {
5526 gimple phi = gsi_stmt (si);
5527
5528 for (i = 0; i < gimple_phi_num_args (phi); i++)
5529 {
5530 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5531 find_decls_types (*arg_p, fld);
5532 }
5533 }
5534
5535 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5536 {
5537 gimple stmt = gsi_stmt (si);
5538
5539 if (is_gimple_call (stmt))
5540 find_decls_types (gimple_call_fntype (stmt), fld);
5541
5542 for (i = 0; i < gimple_num_ops (stmt); i++)
5543 {
5544 tree arg = gimple_op (stmt, i);
5545 find_decls_types (arg, fld);
5546 }
5547 }
5548 }
5549 }
5550
5551
5552 /* Find decls and types referenced in varpool node N and store them in
5553 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5554 look for *every* kind of DECL and TYPE node reachable from N,
5555 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5556 NAMESPACE_DECLs, etc). */
5557
5558 static void
5559 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5560 {
5561 find_decls_types (v->decl, fld);
5562 }
5563
5564 /* If T needs an assembler name, have one created for it. */
5565
5566 void
5567 assign_assembler_name_if_neeeded (tree t)
5568 {
5569 if (need_assembler_name_p (t))
5570 {
5571 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5572 diagnostics that use input_location to show locus
5573 information. The problem here is that, at this point,
5574 input_location is generally anchored to the end of the file
5575 (since the parser is long gone), so we don't have a good
5576 position to pin it to.
5577
5578 To alleviate this problem, this uses the location of T's
5579 declaration. Examples of this are
5580 testsuite/g++.dg/template/cond2.C and
5581 testsuite/g++.dg/template/pr35240.C. */
5582 location_t saved_location = input_location;
5583 input_location = DECL_SOURCE_LOCATION (t);
5584
5585 decl_assembler_name (t);
5586
5587 input_location = saved_location;
5588 }
5589 }
5590
5591
5592 /* Free language specific information for every operand and expression
5593 in every node of the call graph. This process operates in three stages:
5594
5595 1- Every callgraph node and varpool node is traversed looking for
5596 decls and types embedded in them. This is a more exhaustive
5597 search than that done by find_referenced_vars, because it will
5598 also collect individual fields, decls embedded in types, etc.
5599
5600 2- All the decls found are sent to free_lang_data_in_decl.
5601
5602 3- All the types found are sent to free_lang_data_in_type.
5603
5604 The ordering between decls and types is important because
5605 free_lang_data_in_decl sets assembler names, which includes
5606 mangling. So types cannot be freed up until assembler names have
5607 been set up. */
5608
5609 static void
5610 free_lang_data_in_cgraph (void)
5611 {
5612 struct cgraph_node *n;
5613 varpool_node *v;
5614 struct free_lang_data_d fld;
5615 tree t;
5616 unsigned i;
5617 alias_pair *p;
5618
5619 /* Initialize sets and arrays to store referenced decls and types. */
5620 fld.pset = new hash_set<tree>;
5621 fld.worklist.create (0);
5622 fld.decls.create (100);
5623 fld.types.create (100);
5624
5625 /* Find decls and types in the body of every function in the callgraph. */
5626 FOR_EACH_FUNCTION (n)
5627 find_decls_types_in_node (n, &fld);
5628
5629 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5630 find_decls_types (p->decl, &fld);
5631
5632 /* Find decls and types in every varpool symbol. */
5633 FOR_EACH_VARIABLE (v)
5634 find_decls_types_in_var (v, &fld);
5635
5636 /* Set the assembler name on every decl found. We need to do this
5637 now because free_lang_data_in_decl will invalidate data needed
5638 for mangling. This breaks mangling on interdependent decls. */
5639 FOR_EACH_VEC_ELT (fld.decls, i, t)
5640 assign_assembler_name_if_neeeded (t);
5641
5642 /* Traverse every decl found freeing its language data. */
5643 FOR_EACH_VEC_ELT (fld.decls, i, t)
5644 free_lang_data_in_decl (t);
5645
5646 /* Traverse every type found freeing its language data. */
5647 FOR_EACH_VEC_ELT (fld.types, i, t)
5648 free_lang_data_in_type (t);
5649
5650 delete fld.pset;
5651 fld.worklist.release ();
5652 fld.decls.release ();
5653 fld.types.release ();
5654 }
5655
5656
5657 /* Free resources that are used by FE but are not needed once they are done. */
5658
5659 static unsigned
5660 free_lang_data (void)
5661 {
5662 unsigned i;
5663
5664 /* If we are the LTO frontend we have freed lang-specific data already. */
5665 if (in_lto_p
5666 || !flag_generate_lto)
5667 return 0;
5668
5669 /* Allocate and assign alias sets to the standard integer types
5670 while the slots are still in the way the frontends generated them. */
5671 for (i = 0; i < itk_none; ++i)
5672 if (integer_types[i])
5673 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5674
5675 /* Traverse the IL resetting language specific information for
5676 operands, expressions, etc. */
5677 free_lang_data_in_cgraph ();
5678
5679 /* Create gimple variants for common types. */
5680 ptrdiff_type_node = integer_type_node;
5681 fileptr_type_node = ptr_type_node;
5682
5683 /* Reset some langhooks. Do not reset types_compatible_p, it may
5684 still be used indirectly via the get_alias_set langhook. */
5685 lang_hooks.dwarf_name = lhd_dwarf_name;
5686 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5687 /* We do not want the default decl_assembler_name implementation,
5688 rather if we have fixed everything we want a wrapper around it
5689 asserting that all non-local symbols already got their assembler
5690 name and only produce assembler names for local symbols. Or rather
5691 make sure we never call decl_assembler_name on local symbols and
5692 devise a separate, middle-end private scheme for it. */
5693
5694 /* Reset diagnostic machinery. */
5695 tree_diagnostics_defaults (global_dc);
5696
5697 return 0;
5698 }
5699
5700
5701 namespace {
5702
5703 const pass_data pass_data_ipa_free_lang_data =
5704 {
5705 SIMPLE_IPA_PASS, /* type */
5706 "*free_lang_data", /* name */
5707 OPTGROUP_NONE, /* optinfo_flags */
5708 TV_IPA_FREE_LANG_DATA, /* tv_id */
5709 0, /* properties_required */
5710 0, /* properties_provided */
5711 0, /* properties_destroyed */
5712 0, /* todo_flags_start */
5713 0, /* todo_flags_finish */
5714 };
5715
5716 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5717 {
5718 public:
5719 pass_ipa_free_lang_data (gcc::context *ctxt)
5720 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5721 {}
5722
5723 /* opt_pass methods: */
5724 virtual unsigned int execute (function *) { return free_lang_data (); }
5725
5726 }; // class pass_ipa_free_lang_data
5727
5728 } // anon namespace
5729
5730 simple_ipa_opt_pass *
5731 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5732 {
5733 return new pass_ipa_free_lang_data (ctxt);
5734 }
5735
5736 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5737 ATTR_NAME. Also used internally by remove_attribute(). */
5738 bool
5739 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5740 {
5741 size_t ident_len = IDENTIFIER_LENGTH (ident);
5742
5743 if (ident_len == attr_len)
5744 {
5745 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5746 return true;
5747 }
5748 else if (ident_len == attr_len + 4)
5749 {
5750 /* There is the possibility that ATTR is 'text' and IDENT is
5751 '__text__'. */
5752 const char *p = IDENTIFIER_POINTER (ident);
5753 if (p[0] == '_' && p[1] == '_'
5754 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5755 && strncmp (attr_name, p + 2, attr_len) == 0)
5756 return true;
5757 }
5758
5759 return false;
5760 }
5761
5762 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5763 of ATTR_NAME, and LIST is not NULL_TREE. */
5764 tree
5765 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5766 {
5767 while (list)
5768 {
5769 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5770
5771 if (ident_len == attr_len)
5772 {
5773 if (!strcmp (attr_name,
5774 IDENTIFIER_POINTER (get_attribute_name (list))))
5775 break;
5776 }
5777 /* TODO: If we made sure that attributes were stored in the
5778 canonical form without '__...__' (ie, as in 'text' as opposed
5779 to '__text__') then we could avoid the following case. */
5780 else if (ident_len == attr_len + 4)
5781 {
5782 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5783 if (p[0] == '_' && p[1] == '_'
5784 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5785 && strncmp (attr_name, p + 2, attr_len) == 0)
5786 break;
5787 }
5788 list = TREE_CHAIN (list);
5789 }
5790
5791 return list;
5792 }
5793
5794 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5795 return a pointer to the attribute's list first element if the attribute
5796 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5797 '__text__'). */
5798
5799 tree
5800 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5801 tree list)
5802 {
5803 while (list)
5804 {
5805 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5806
5807 if (attr_len > ident_len)
5808 {
5809 list = TREE_CHAIN (list);
5810 continue;
5811 }
5812
5813 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5814
5815 if (strncmp (attr_name, p, attr_len) == 0)
5816 break;
5817
5818 /* TODO: If we made sure that attributes were stored in the
5819 canonical form without '__...__' (ie, as in 'text' as opposed
5820 to '__text__') then we could avoid the following case. */
5821 if (p[0] == '_' && p[1] == '_' &&
5822 strncmp (attr_name, p + 2, attr_len) == 0)
5823 break;
5824
5825 list = TREE_CHAIN (list);
5826 }
5827
5828 return list;
5829 }
5830
5831
5832 /* A variant of lookup_attribute() that can be used with an identifier
5833 as the first argument, and where the identifier can be either
5834 'text' or '__text__'.
5835
5836 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5837 return a pointer to the attribute's list element if the attribute
5838 is part of the list, or NULL_TREE if not found. If the attribute
5839 appears more than once, this only returns the first occurrence; the
5840 TREE_CHAIN of the return value should be passed back in if further
5841 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5842 can be in the form 'text' or '__text__'. */
5843 static tree
5844 lookup_ident_attribute (tree attr_identifier, tree list)
5845 {
5846 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5847
5848 while (list)
5849 {
5850 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5851 == IDENTIFIER_NODE);
5852
5853 /* Identifiers can be compared directly for equality. */
5854 if (attr_identifier == get_attribute_name (list))
5855 break;
5856
5857 /* If they are not equal, they may still be one in the form
5858 'text' while the other one is in the form '__text__'. TODO:
5859 If we were storing attributes in normalized 'text' form, then
5860 this could all go away and we could take full advantage of
5861 the fact that we're comparing identifiers. :-) */
5862 {
5863 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5864 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5865
5866 if (ident_len == attr_len + 4)
5867 {
5868 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5869 const char *q = IDENTIFIER_POINTER (attr_identifier);
5870 if (p[0] == '_' && p[1] == '_'
5871 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5872 && strncmp (q, p + 2, attr_len) == 0)
5873 break;
5874 }
5875 else if (ident_len + 4 == attr_len)
5876 {
5877 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5878 const char *q = IDENTIFIER_POINTER (attr_identifier);
5879 if (q[0] == '_' && q[1] == '_'
5880 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5881 && strncmp (q + 2, p, ident_len) == 0)
5882 break;
5883 }
5884 }
5885 list = TREE_CHAIN (list);
5886 }
5887
5888 return list;
5889 }
5890
5891 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5892 modified list. */
5893
5894 tree
5895 remove_attribute (const char *attr_name, tree list)
5896 {
5897 tree *p;
5898 size_t attr_len = strlen (attr_name);
5899
5900 gcc_checking_assert (attr_name[0] != '_');
5901
5902 for (p = &list; *p; )
5903 {
5904 tree l = *p;
5905 /* TODO: If we were storing attributes in normalized form, here
5906 we could use a simple strcmp(). */
5907 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5908 *p = TREE_CHAIN (l);
5909 else
5910 p = &TREE_CHAIN (l);
5911 }
5912
5913 return list;
5914 }
5915
5916 /* Return an attribute list that is the union of a1 and a2. */
5917
5918 tree
5919 merge_attributes (tree a1, tree a2)
5920 {
5921 tree attributes;
5922
5923 /* Either one unset? Take the set one. */
5924
5925 if ((attributes = a1) == 0)
5926 attributes = a2;
5927
5928 /* One that completely contains the other? Take it. */
5929
5930 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5931 {
5932 if (attribute_list_contained (a2, a1))
5933 attributes = a2;
5934 else
5935 {
5936 /* Pick the longest list, and hang on the other list. */
5937
5938 if (list_length (a1) < list_length (a2))
5939 attributes = a2, a2 = a1;
5940
5941 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5942 {
5943 tree a;
5944 for (a = lookup_ident_attribute (get_attribute_name (a2),
5945 attributes);
5946 a != NULL_TREE && !attribute_value_equal (a, a2);
5947 a = lookup_ident_attribute (get_attribute_name (a2),
5948 TREE_CHAIN (a)))
5949 ;
5950 if (a == NULL_TREE)
5951 {
5952 a1 = copy_node (a2);
5953 TREE_CHAIN (a1) = attributes;
5954 attributes = a1;
5955 }
5956 }
5957 }
5958 }
5959 return attributes;
5960 }
5961
5962 /* Given types T1 and T2, merge their attributes and return
5963 the result. */
5964
5965 tree
5966 merge_type_attributes (tree t1, tree t2)
5967 {
5968 return merge_attributes (TYPE_ATTRIBUTES (t1),
5969 TYPE_ATTRIBUTES (t2));
5970 }
5971
5972 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5973 the result. */
5974
5975 tree
5976 merge_decl_attributes (tree olddecl, tree newdecl)
5977 {
5978 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5979 DECL_ATTRIBUTES (newdecl));
5980 }
5981
5982 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5983
5984 /* Specialization of merge_decl_attributes for various Windows targets.
5985
5986 This handles the following situation:
5987
5988 __declspec (dllimport) int foo;
5989 int foo;
5990
5991 The second instance of `foo' nullifies the dllimport. */
5992
5993 tree
5994 merge_dllimport_decl_attributes (tree old, tree new_tree)
5995 {
5996 tree a;
5997 int delete_dllimport_p = 1;
5998
5999 /* What we need to do here is remove from `old' dllimport if it doesn't
6000 appear in `new'. dllimport behaves like extern: if a declaration is
6001 marked dllimport and a definition appears later, then the object
6002 is not dllimport'd. We also remove a `new' dllimport if the old list
6003 contains dllexport: dllexport always overrides dllimport, regardless
6004 of the order of declaration. */
6005 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6006 delete_dllimport_p = 0;
6007 else if (DECL_DLLIMPORT_P (new_tree)
6008 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6009 {
6010 DECL_DLLIMPORT_P (new_tree) = 0;
6011 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6012 "dllimport ignored", new_tree);
6013 }
6014 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6015 {
6016 /* Warn about overriding a symbol that has already been used, e.g.:
6017 extern int __attribute__ ((dllimport)) foo;
6018 int* bar () {return &foo;}
6019 int foo;
6020 */
6021 if (TREE_USED (old))
6022 {
6023 warning (0, "%q+D redeclared without dllimport attribute "
6024 "after being referenced with dll linkage", new_tree);
6025 /* If we have used a variable's address with dllimport linkage,
6026 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6027 decl may already have had TREE_CONSTANT computed.
6028 We still remove the attribute so that assembler code refers
6029 to '&foo rather than '_imp__foo'. */
6030 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6031 DECL_DLLIMPORT_P (new_tree) = 1;
6032 }
6033
6034 /* Let an inline definition silently override the external reference,
6035 but otherwise warn about attribute inconsistency. */
6036 else if (TREE_CODE (new_tree) == VAR_DECL
6037 || !DECL_DECLARED_INLINE_P (new_tree))
6038 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6039 "previous dllimport ignored", new_tree);
6040 }
6041 else
6042 delete_dllimport_p = 0;
6043
6044 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6045
6046 if (delete_dllimport_p)
6047 a = remove_attribute ("dllimport", a);
6048
6049 return a;
6050 }
6051
6052 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6053 struct attribute_spec.handler. */
6054
6055 tree
6056 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6057 bool *no_add_attrs)
6058 {
6059 tree node = *pnode;
6060 bool is_dllimport;
6061
6062 /* These attributes may apply to structure and union types being created,
6063 but otherwise should pass to the declaration involved. */
6064 if (!DECL_P (node))
6065 {
6066 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6067 | (int) ATTR_FLAG_ARRAY_NEXT))
6068 {
6069 *no_add_attrs = true;
6070 return tree_cons (name, args, NULL_TREE);
6071 }
6072 if (TREE_CODE (node) == RECORD_TYPE
6073 || TREE_CODE (node) == UNION_TYPE)
6074 {
6075 node = TYPE_NAME (node);
6076 if (!node)
6077 return NULL_TREE;
6078 }
6079 else
6080 {
6081 warning (OPT_Wattributes, "%qE attribute ignored",
6082 name);
6083 *no_add_attrs = true;
6084 return NULL_TREE;
6085 }
6086 }
6087
6088 if (TREE_CODE (node) != FUNCTION_DECL
6089 && TREE_CODE (node) != VAR_DECL
6090 && TREE_CODE (node) != TYPE_DECL)
6091 {
6092 *no_add_attrs = true;
6093 warning (OPT_Wattributes, "%qE attribute ignored",
6094 name);
6095 return NULL_TREE;
6096 }
6097
6098 if (TREE_CODE (node) == TYPE_DECL
6099 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6100 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6101 {
6102 *no_add_attrs = true;
6103 warning (OPT_Wattributes, "%qE attribute ignored",
6104 name);
6105 return NULL_TREE;
6106 }
6107
6108 is_dllimport = is_attribute_p ("dllimport", name);
6109
6110 /* Report error on dllimport ambiguities seen now before they cause
6111 any damage. */
6112 if (is_dllimport)
6113 {
6114 /* Honor any target-specific overrides. */
6115 if (!targetm.valid_dllimport_attribute_p (node))
6116 *no_add_attrs = true;
6117
6118 else if (TREE_CODE (node) == FUNCTION_DECL
6119 && DECL_DECLARED_INLINE_P (node))
6120 {
6121 warning (OPT_Wattributes, "inline function %q+D declared as "
6122 " dllimport: attribute ignored", node);
6123 *no_add_attrs = true;
6124 }
6125 /* Like MS, treat definition of dllimported variables and
6126 non-inlined functions on declaration as syntax errors. */
6127 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6128 {
6129 error ("function %q+D definition is marked dllimport", node);
6130 *no_add_attrs = true;
6131 }
6132
6133 else if (TREE_CODE (node) == VAR_DECL)
6134 {
6135 if (DECL_INITIAL (node))
6136 {
6137 error ("variable %q+D definition is marked dllimport",
6138 node);
6139 *no_add_attrs = true;
6140 }
6141
6142 /* `extern' needn't be specified with dllimport.
6143 Specify `extern' now and hope for the best. Sigh. */
6144 DECL_EXTERNAL (node) = 1;
6145 /* Also, implicitly give dllimport'd variables declared within
6146 a function global scope, unless declared static. */
6147 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6148 TREE_PUBLIC (node) = 1;
6149 }
6150
6151 if (*no_add_attrs == false)
6152 DECL_DLLIMPORT_P (node) = 1;
6153 }
6154 else if (TREE_CODE (node) == FUNCTION_DECL
6155 && DECL_DECLARED_INLINE_P (node)
6156 && flag_keep_inline_dllexport)
6157 /* An exported function, even if inline, must be emitted. */
6158 DECL_EXTERNAL (node) = 0;
6159
6160 /* Report error if symbol is not accessible at global scope. */
6161 if (!TREE_PUBLIC (node)
6162 && (TREE_CODE (node) == VAR_DECL
6163 || TREE_CODE (node) == FUNCTION_DECL))
6164 {
6165 error ("external linkage required for symbol %q+D because of "
6166 "%qE attribute", node, name);
6167 *no_add_attrs = true;
6168 }
6169
6170 /* A dllexport'd entity must have default visibility so that other
6171 program units (shared libraries or the main executable) can see
6172 it. A dllimport'd entity must have default visibility so that
6173 the linker knows that undefined references within this program
6174 unit can be resolved by the dynamic linker. */
6175 if (!*no_add_attrs)
6176 {
6177 if (DECL_VISIBILITY_SPECIFIED (node)
6178 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6179 error ("%qE implies default visibility, but %qD has already "
6180 "been declared with a different visibility",
6181 name, node);
6182 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6183 DECL_VISIBILITY_SPECIFIED (node) = 1;
6184 }
6185
6186 return NULL_TREE;
6187 }
6188
6189 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6190 \f
6191 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6192 of the various TYPE_QUAL values. */
6193
6194 static void
6195 set_type_quals (tree type, int type_quals)
6196 {
6197 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6198 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6199 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6200 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6201 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6202 }
6203
6204 /* Returns true iff unqualified CAND and BASE are equivalent. */
6205
6206 bool
6207 check_base_type (const_tree cand, const_tree base)
6208 {
6209 return (TYPE_NAME (cand) == TYPE_NAME (base)
6210 /* Apparently this is needed for Objective-C. */
6211 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6212 /* Check alignment. */
6213 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6214 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6215 TYPE_ATTRIBUTES (base)));
6216 }
6217
6218 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6219
6220 bool
6221 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6222 {
6223 return (TYPE_QUALS (cand) == type_quals
6224 && check_base_type (cand, base));
6225 }
6226
6227 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6228
6229 static bool
6230 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6231 {
6232 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6233 && TYPE_NAME (cand) == TYPE_NAME (base)
6234 /* Apparently this is needed for Objective-C. */
6235 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6236 /* Check alignment. */
6237 && TYPE_ALIGN (cand) == align
6238 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6239 TYPE_ATTRIBUTES (base)));
6240 }
6241
6242 /* This function checks to see if TYPE matches the size one of the built-in
6243 atomic types, and returns that core atomic type. */
6244
6245 static tree
6246 find_atomic_core_type (tree type)
6247 {
6248 tree base_atomic_type;
6249
6250 /* Only handle complete types. */
6251 if (TYPE_SIZE (type) == NULL_TREE)
6252 return NULL_TREE;
6253
6254 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6255 switch (type_size)
6256 {
6257 case 8:
6258 base_atomic_type = atomicQI_type_node;
6259 break;
6260
6261 case 16:
6262 base_atomic_type = atomicHI_type_node;
6263 break;
6264
6265 case 32:
6266 base_atomic_type = atomicSI_type_node;
6267 break;
6268
6269 case 64:
6270 base_atomic_type = atomicDI_type_node;
6271 break;
6272
6273 case 128:
6274 base_atomic_type = atomicTI_type_node;
6275 break;
6276
6277 default:
6278 base_atomic_type = NULL_TREE;
6279 }
6280
6281 return base_atomic_type;
6282 }
6283
6284 /* Return a version of the TYPE, qualified as indicated by the
6285 TYPE_QUALS, if one exists. If no qualified version exists yet,
6286 return NULL_TREE. */
6287
6288 tree
6289 get_qualified_type (tree type, int type_quals)
6290 {
6291 tree t;
6292
6293 if (TYPE_QUALS (type) == type_quals)
6294 return type;
6295
6296 /* Search the chain of variants to see if there is already one there just
6297 like the one we need to have. If so, use that existing one. We must
6298 preserve the TYPE_NAME, since there is code that depends on this. */
6299 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6300 if (check_qualified_type (t, type, type_quals))
6301 return t;
6302
6303 return NULL_TREE;
6304 }
6305
6306 /* Like get_qualified_type, but creates the type if it does not
6307 exist. This function never returns NULL_TREE. */
6308
6309 tree
6310 build_qualified_type (tree type, int type_quals)
6311 {
6312 tree t;
6313
6314 /* See if we already have the appropriate qualified variant. */
6315 t = get_qualified_type (type, type_quals);
6316
6317 /* If not, build it. */
6318 if (!t)
6319 {
6320 t = build_variant_type_copy (type);
6321 set_type_quals (t, type_quals);
6322
6323 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6324 {
6325 /* See if this object can map to a basic atomic type. */
6326 tree atomic_type = find_atomic_core_type (type);
6327 if (atomic_type)
6328 {
6329 /* Ensure the alignment of this type is compatible with
6330 the required alignment of the atomic type. */
6331 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6332 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6333 }
6334 }
6335
6336 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6337 /* Propagate structural equality. */
6338 SET_TYPE_STRUCTURAL_EQUALITY (t);
6339 else if (TYPE_CANONICAL (type) != type)
6340 /* Build the underlying canonical type, since it is different
6341 from TYPE. */
6342 {
6343 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6344 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6345 }
6346 else
6347 /* T is its own canonical type. */
6348 TYPE_CANONICAL (t) = t;
6349
6350 }
6351
6352 return t;
6353 }
6354
6355 /* Create a variant of type T with alignment ALIGN. */
6356
6357 tree
6358 build_aligned_type (tree type, unsigned int align)
6359 {
6360 tree t;
6361
6362 if (TYPE_PACKED (type)
6363 || TYPE_ALIGN (type) == align)
6364 return type;
6365
6366 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6367 if (check_aligned_type (t, type, align))
6368 return t;
6369
6370 t = build_variant_type_copy (type);
6371 TYPE_ALIGN (t) = align;
6372
6373 return t;
6374 }
6375
6376 /* Create a new distinct copy of TYPE. The new type is made its own
6377 MAIN_VARIANT. If TYPE requires structural equality checks, the
6378 resulting type requires structural equality checks; otherwise, its
6379 TYPE_CANONICAL points to itself. */
6380
6381 tree
6382 build_distinct_type_copy (tree type)
6383 {
6384 tree t = copy_node (type);
6385
6386 TYPE_POINTER_TO (t) = 0;
6387 TYPE_REFERENCE_TO (t) = 0;
6388
6389 /* Set the canonical type either to a new equivalence class, or
6390 propagate the need for structural equality checks. */
6391 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6392 SET_TYPE_STRUCTURAL_EQUALITY (t);
6393 else
6394 TYPE_CANONICAL (t) = t;
6395
6396 /* Make it its own variant. */
6397 TYPE_MAIN_VARIANT (t) = t;
6398 TYPE_NEXT_VARIANT (t) = 0;
6399
6400 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6401 whose TREE_TYPE is not t. This can also happen in the Ada
6402 frontend when using subtypes. */
6403
6404 return t;
6405 }
6406
6407 /* Create a new variant of TYPE, equivalent but distinct. This is so
6408 the caller can modify it. TYPE_CANONICAL for the return type will
6409 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6410 are considered equal by the language itself (or that both types
6411 require structural equality checks). */
6412
6413 tree
6414 build_variant_type_copy (tree type)
6415 {
6416 tree t, m = TYPE_MAIN_VARIANT (type);
6417
6418 t = build_distinct_type_copy (type);
6419
6420 /* Since we're building a variant, assume that it is a non-semantic
6421 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6422 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6423
6424 /* Add the new type to the chain of variants of TYPE. */
6425 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6426 TYPE_NEXT_VARIANT (m) = t;
6427 TYPE_MAIN_VARIANT (t) = m;
6428
6429 return t;
6430 }
6431 \f
6432 /* Return true if the from tree in both tree maps are equal. */
6433
6434 int
6435 tree_map_base_eq (const void *va, const void *vb)
6436 {
6437 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6438 *const b = (const struct tree_map_base *) vb;
6439 return (a->from == b->from);
6440 }
6441
6442 /* Hash a from tree in a tree_base_map. */
6443
6444 unsigned int
6445 tree_map_base_hash (const void *item)
6446 {
6447 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6448 }
6449
6450 /* Return true if this tree map structure is marked for garbage collection
6451 purposes. We simply return true if the from tree is marked, so that this
6452 structure goes away when the from tree goes away. */
6453
6454 int
6455 tree_map_base_marked_p (const void *p)
6456 {
6457 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6458 }
6459
6460 /* Hash a from tree in a tree_map. */
6461
6462 unsigned int
6463 tree_map_hash (const void *item)
6464 {
6465 return (((const struct tree_map *) item)->hash);
6466 }
6467
6468 /* Hash a from tree in a tree_decl_map. */
6469
6470 unsigned int
6471 tree_decl_map_hash (const void *item)
6472 {
6473 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6474 }
6475
6476 /* Return the initialization priority for DECL. */
6477
6478 priority_type
6479 decl_init_priority_lookup (tree decl)
6480 {
6481 symtab_node *snode = symtab_node::get (decl);
6482
6483 if (!snode)
6484 return DEFAULT_INIT_PRIORITY;
6485 return
6486 snode->get_init_priority ();
6487 }
6488
6489 /* Return the finalization priority for DECL. */
6490
6491 priority_type
6492 decl_fini_priority_lookup (tree decl)
6493 {
6494 cgraph_node *node = cgraph_node::get (decl);
6495
6496 if (!node)
6497 return DEFAULT_INIT_PRIORITY;
6498 return
6499 node->get_fini_priority ();
6500 }
6501
6502 /* Set the initialization priority for DECL to PRIORITY. */
6503
6504 void
6505 decl_init_priority_insert (tree decl, priority_type priority)
6506 {
6507 struct symtab_node *snode;
6508
6509 if (priority == DEFAULT_INIT_PRIORITY)
6510 {
6511 snode = symtab_node::get (decl);
6512 if (!snode)
6513 return;
6514 }
6515 else if (TREE_CODE (decl) == VAR_DECL)
6516 snode = varpool_node::get_create (decl);
6517 else
6518 snode = cgraph_node::get_create (decl);
6519 snode->set_init_priority (priority);
6520 }
6521
6522 /* Set the finalization priority for DECL to PRIORITY. */
6523
6524 void
6525 decl_fini_priority_insert (tree decl, priority_type priority)
6526 {
6527 struct cgraph_node *node;
6528
6529 if (priority == DEFAULT_INIT_PRIORITY)
6530 {
6531 node = cgraph_node::get (decl);
6532 if (!node)
6533 return;
6534 }
6535 else
6536 node = cgraph_node::get_create (decl);
6537 node->set_fini_priority (priority);
6538 }
6539
6540 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6541
6542 static void
6543 print_debug_expr_statistics (void)
6544 {
6545 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6546 (long) htab_size (debug_expr_for_decl),
6547 (long) htab_elements (debug_expr_for_decl),
6548 htab_collisions (debug_expr_for_decl));
6549 }
6550
6551 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6552
6553 static void
6554 print_value_expr_statistics (void)
6555 {
6556 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6557 (long) htab_size (value_expr_for_decl),
6558 (long) htab_elements (value_expr_for_decl),
6559 htab_collisions (value_expr_for_decl));
6560 }
6561
6562 /* Lookup a debug expression for FROM, and return it if we find one. */
6563
6564 tree
6565 decl_debug_expr_lookup (tree from)
6566 {
6567 struct tree_decl_map *h, in;
6568 in.base.from = from;
6569
6570 h = (struct tree_decl_map *)
6571 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6572 if (h)
6573 return h->to;
6574 return NULL_TREE;
6575 }
6576
6577 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6578
6579 void
6580 decl_debug_expr_insert (tree from, tree to)
6581 {
6582 struct tree_decl_map *h;
6583 void **loc;
6584
6585 h = ggc_alloc<tree_decl_map> ();
6586 h->base.from = from;
6587 h->to = to;
6588 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6589 INSERT);
6590 *(struct tree_decl_map **) loc = h;
6591 }
6592
6593 /* Lookup a value expression for FROM, and return it if we find one. */
6594
6595 tree
6596 decl_value_expr_lookup (tree from)
6597 {
6598 struct tree_decl_map *h, in;
6599 in.base.from = from;
6600
6601 h = (struct tree_decl_map *)
6602 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6603 if (h)
6604 return h->to;
6605 return NULL_TREE;
6606 }
6607
6608 /* Insert a mapping FROM->TO in the value expression hashtable. */
6609
6610 void
6611 decl_value_expr_insert (tree from, tree to)
6612 {
6613 struct tree_decl_map *h;
6614 void **loc;
6615
6616 h = ggc_alloc<tree_decl_map> ();
6617 h->base.from = from;
6618 h->to = to;
6619 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6620 INSERT);
6621 *(struct tree_decl_map **) loc = h;
6622 }
6623
6624 /* Lookup a vector of debug arguments for FROM, and return it if we
6625 find one. */
6626
6627 vec<tree, va_gc> **
6628 decl_debug_args_lookup (tree from)
6629 {
6630 struct tree_vec_map *h, in;
6631
6632 if (!DECL_HAS_DEBUG_ARGS_P (from))
6633 return NULL;
6634 gcc_checking_assert (debug_args_for_decl != NULL);
6635 in.base.from = from;
6636 h = (struct tree_vec_map *)
6637 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6638 if (h)
6639 return &h->to;
6640 return NULL;
6641 }
6642
6643 /* Insert a mapping FROM->empty vector of debug arguments in the value
6644 expression hashtable. */
6645
6646 vec<tree, va_gc> **
6647 decl_debug_args_insert (tree from)
6648 {
6649 struct tree_vec_map *h;
6650 void **loc;
6651
6652 if (DECL_HAS_DEBUG_ARGS_P (from))
6653 return decl_debug_args_lookup (from);
6654 if (debug_args_for_decl == NULL)
6655 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6656 tree_vec_map_eq, 0);
6657 h = ggc_alloc<tree_vec_map> ();
6658 h->base.from = from;
6659 h->to = NULL;
6660 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6661 INSERT);
6662 *(struct tree_vec_map **) loc = h;
6663 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6664 return &h->to;
6665 }
6666
6667 /* Hashing of types so that we don't make duplicates.
6668 The entry point is `type_hash_canon'. */
6669
6670 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6671 with types in the TREE_VALUE slots), by adding the hash codes
6672 of the individual types. */
6673
6674 static void
6675 type_hash_list (const_tree list, inchash::hash &hstate)
6676 {
6677 const_tree tail;
6678
6679 for (tail = list; tail; tail = TREE_CHAIN (tail))
6680 if (TREE_VALUE (tail) != error_mark_node)
6681 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6682 }
6683
6684 /* These are the Hashtable callback functions. */
6685
6686 /* Returns true iff the types are equivalent. */
6687
6688 static int
6689 type_hash_eq (const void *va, const void *vb)
6690 {
6691 const struct type_hash *const a = (const struct type_hash *) va,
6692 *const b = (const struct type_hash *) vb;
6693
6694 /* First test the things that are the same for all types. */
6695 if (a->hash != b->hash
6696 || TREE_CODE (a->type) != TREE_CODE (b->type)
6697 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6698 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6699 TYPE_ATTRIBUTES (b->type))
6700 || (TREE_CODE (a->type) != COMPLEX_TYPE
6701 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6702 return 0;
6703
6704 /* Be careful about comparing arrays before and after the element type
6705 has been completed; don't compare TYPE_ALIGN unless both types are
6706 complete. */
6707 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6708 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6709 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6710 return 0;
6711
6712 switch (TREE_CODE (a->type))
6713 {
6714 case VOID_TYPE:
6715 case COMPLEX_TYPE:
6716 case POINTER_TYPE:
6717 case REFERENCE_TYPE:
6718 case NULLPTR_TYPE:
6719 return 1;
6720
6721 case VECTOR_TYPE:
6722 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6723
6724 case ENUMERAL_TYPE:
6725 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6726 && !(TYPE_VALUES (a->type)
6727 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6728 && TYPE_VALUES (b->type)
6729 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6730 && type_list_equal (TYPE_VALUES (a->type),
6731 TYPE_VALUES (b->type))))
6732 return 0;
6733
6734 /* ... fall through ... */
6735
6736 case INTEGER_TYPE:
6737 case REAL_TYPE:
6738 case BOOLEAN_TYPE:
6739 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6740 return false;
6741 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6742 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6743 TYPE_MAX_VALUE (b->type)))
6744 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6745 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6746 TYPE_MIN_VALUE (b->type))));
6747
6748 case FIXED_POINT_TYPE:
6749 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6750
6751 case OFFSET_TYPE:
6752 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6753
6754 case METHOD_TYPE:
6755 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6756 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6757 || (TYPE_ARG_TYPES (a->type)
6758 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6759 && TYPE_ARG_TYPES (b->type)
6760 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6761 && type_list_equal (TYPE_ARG_TYPES (a->type),
6762 TYPE_ARG_TYPES (b->type)))))
6763 break;
6764 return 0;
6765 case ARRAY_TYPE:
6766 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6767
6768 case RECORD_TYPE:
6769 case UNION_TYPE:
6770 case QUAL_UNION_TYPE:
6771 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6772 || (TYPE_FIELDS (a->type)
6773 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6774 && TYPE_FIELDS (b->type)
6775 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6776 && type_list_equal (TYPE_FIELDS (a->type),
6777 TYPE_FIELDS (b->type))));
6778
6779 case FUNCTION_TYPE:
6780 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6781 || (TYPE_ARG_TYPES (a->type)
6782 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6783 && TYPE_ARG_TYPES (b->type)
6784 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6785 && type_list_equal (TYPE_ARG_TYPES (a->type),
6786 TYPE_ARG_TYPES (b->type))))
6787 break;
6788 return 0;
6789
6790 default:
6791 return 0;
6792 }
6793
6794 if (lang_hooks.types.type_hash_eq != NULL)
6795 return lang_hooks.types.type_hash_eq (a->type, b->type);
6796
6797 return 1;
6798 }
6799
6800 /* Return the cached hash value. */
6801
6802 static hashval_t
6803 type_hash_hash (const void *item)
6804 {
6805 return ((const struct type_hash *) item)->hash;
6806 }
6807
6808 /* Given TYPE, and HASHCODE its hash code, return the canonical
6809 object for an identical type if one already exists.
6810 Otherwise, return TYPE, and record it as the canonical object.
6811
6812 To use this function, first create a type of the sort you want.
6813 Then compute its hash code from the fields of the type that
6814 make it different from other similar types.
6815 Then call this function and use the value. */
6816
6817 tree
6818 type_hash_canon (unsigned int hashcode, tree type)
6819 {
6820 type_hash in;
6821 void **loc;
6822
6823 /* The hash table only contains main variants, so ensure that's what we're
6824 being passed. */
6825 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6826
6827 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6828 must call that routine before comparing TYPE_ALIGNs. */
6829 layout_type (type);
6830
6831 in.hash = hashcode;
6832 in.type = type;
6833
6834 loc = htab_find_slot_with_hash (type_hash_table, &in, hashcode, INSERT);
6835 if (*loc)
6836 {
6837 tree t1 = ((type_hash *) *loc)->type;
6838 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6839 if (GATHER_STATISTICS)
6840 {
6841 tree_code_counts[(int) TREE_CODE (type)]--;
6842 tree_node_counts[(int) t_kind]--;
6843 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6844 }
6845 return t1;
6846 }
6847 else
6848 {
6849 struct type_hash *h;
6850
6851 h = ggc_alloc<type_hash> ();
6852 h->hash = hashcode;
6853 h->type = type;
6854 *loc = (void *)h;
6855
6856 return type;
6857 }
6858 }
6859
6860 /* See if the data pointed to by the type hash table is marked. We consider
6861 it marked if the type is marked or if a debug type number or symbol
6862 table entry has been made for the type. */
6863
6864 static int
6865 type_hash_marked_p (const void *p)
6866 {
6867 const_tree const type = ((const struct type_hash *) p)->type;
6868
6869 return ggc_marked_p (type);
6870 }
6871
6872 static void
6873 print_type_hash_statistics (void)
6874 {
6875 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6876 (long) htab_size (type_hash_table),
6877 (long) htab_elements (type_hash_table),
6878 htab_collisions (type_hash_table));
6879 }
6880
6881 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6882 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6883 by adding the hash codes of the individual attributes. */
6884
6885 static void
6886 attribute_hash_list (const_tree list, inchash::hash &hstate)
6887 {
6888 const_tree tail;
6889
6890 for (tail = list; tail; tail = TREE_CHAIN (tail))
6891 /* ??? Do we want to add in TREE_VALUE too? */
6892 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6893 }
6894
6895 /* Given two lists of attributes, return true if list l2 is
6896 equivalent to l1. */
6897
6898 int
6899 attribute_list_equal (const_tree l1, const_tree l2)
6900 {
6901 if (l1 == l2)
6902 return 1;
6903
6904 return attribute_list_contained (l1, l2)
6905 && attribute_list_contained (l2, l1);
6906 }
6907
6908 /* Given two lists of attributes, return true if list L2 is
6909 completely contained within L1. */
6910 /* ??? This would be faster if attribute names were stored in a canonicalized
6911 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6912 must be used to show these elements are equivalent (which they are). */
6913 /* ??? It's not clear that attributes with arguments will always be handled
6914 correctly. */
6915
6916 int
6917 attribute_list_contained (const_tree l1, const_tree l2)
6918 {
6919 const_tree t1, t2;
6920
6921 /* First check the obvious, maybe the lists are identical. */
6922 if (l1 == l2)
6923 return 1;
6924
6925 /* Maybe the lists are similar. */
6926 for (t1 = l1, t2 = l2;
6927 t1 != 0 && t2 != 0
6928 && get_attribute_name (t1) == get_attribute_name (t2)
6929 && TREE_VALUE (t1) == TREE_VALUE (t2);
6930 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6931 ;
6932
6933 /* Maybe the lists are equal. */
6934 if (t1 == 0 && t2 == 0)
6935 return 1;
6936
6937 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6938 {
6939 const_tree attr;
6940 /* This CONST_CAST is okay because lookup_attribute does not
6941 modify its argument and the return value is assigned to a
6942 const_tree. */
6943 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6944 CONST_CAST_TREE (l1));
6945 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6946 attr = lookup_ident_attribute (get_attribute_name (t2),
6947 TREE_CHAIN (attr)))
6948 ;
6949
6950 if (attr == NULL_TREE)
6951 return 0;
6952 }
6953
6954 return 1;
6955 }
6956
6957 /* Given two lists of types
6958 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6959 return 1 if the lists contain the same types in the same order.
6960 Also, the TREE_PURPOSEs must match. */
6961
6962 int
6963 type_list_equal (const_tree l1, const_tree l2)
6964 {
6965 const_tree t1, t2;
6966
6967 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6968 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6969 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6970 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6971 && (TREE_TYPE (TREE_PURPOSE (t1))
6972 == TREE_TYPE (TREE_PURPOSE (t2))))))
6973 return 0;
6974
6975 return t1 == t2;
6976 }
6977
6978 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6979 given by TYPE. If the argument list accepts variable arguments,
6980 then this function counts only the ordinary arguments. */
6981
6982 int
6983 type_num_arguments (const_tree type)
6984 {
6985 int i = 0;
6986 tree t;
6987
6988 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6989 /* If the function does not take a variable number of arguments,
6990 the last element in the list will have type `void'. */
6991 if (VOID_TYPE_P (TREE_VALUE (t)))
6992 break;
6993 else
6994 ++i;
6995
6996 return i;
6997 }
6998
6999 /* Nonzero if integer constants T1 and T2
7000 represent the same constant value. */
7001
7002 int
7003 tree_int_cst_equal (const_tree t1, const_tree t2)
7004 {
7005 if (t1 == t2)
7006 return 1;
7007
7008 if (t1 == 0 || t2 == 0)
7009 return 0;
7010
7011 if (TREE_CODE (t1) == INTEGER_CST
7012 && TREE_CODE (t2) == INTEGER_CST
7013 && wi::to_widest (t1) == wi::to_widest (t2))
7014 return 1;
7015
7016 return 0;
7017 }
7018
7019 /* Return true if T is an INTEGER_CST whose numerical value (extended
7020 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7021
7022 bool
7023 tree_fits_shwi_p (const_tree t)
7024 {
7025 return (t != NULL_TREE
7026 && TREE_CODE (t) == INTEGER_CST
7027 && wi::fits_shwi_p (wi::to_widest (t)));
7028 }
7029
7030 /* Return true if T is an INTEGER_CST whose numerical value (extended
7031 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7032
7033 bool
7034 tree_fits_uhwi_p (const_tree t)
7035 {
7036 return (t != NULL_TREE
7037 && TREE_CODE (t) == INTEGER_CST
7038 && wi::fits_uhwi_p (wi::to_widest (t)));
7039 }
7040
7041 /* T is an INTEGER_CST whose numerical value (extended according to
7042 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7043 HOST_WIDE_INT. */
7044
7045 HOST_WIDE_INT
7046 tree_to_shwi (const_tree t)
7047 {
7048 gcc_assert (tree_fits_shwi_p (t));
7049 return TREE_INT_CST_LOW (t);
7050 }
7051
7052 /* T is an INTEGER_CST whose numerical value (extended according to
7053 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7054 HOST_WIDE_INT. */
7055
7056 unsigned HOST_WIDE_INT
7057 tree_to_uhwi (const_tree t)
7058 {
7059 gcc_assert (tree_fits_uhwi_p (t));
7060 return TREE_INT_CST_LOW (t);
7061 }
7062
7063 /* Return the most significant (sign) bit of T. */
7064
7065 int
7066 tree_int_cst_sign_bit (const_tree t)
7067 {
7068 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7069
7070 return wi::extract_uhwi (t, bitno, 1);
7071 }
7072
7073 /* Return an indication of the sign of the integer constant T.
7074 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7075 Note that -1 will never be returned if T's type is unsigned. */
7076
7077 int
7078 tree_int_cst_sgn (const_tree t)
7079 {
7080 if (wi::eq_p (t, 0))
7081 return 0;
7082 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7083 return 1;
7084 else if (wi::neg_p (t))
7085 return -1;
7086 else
7087 return 1;
7088 }
7089
7090 /* Return the minimum number of bits needed to represent VALUE in a
7091 signed or unsigned type, UNSIGNEDP says which. */
7092
7093 unsigned int
7094 tree_int_cst_min_precision (tree value, signop sgn)
7095 {
7096 /* If the value is negative, compute its negative minus 1. The latter
7097 adjustment is because the absolute value of the largest negative value
7098 is one larger than the largest positive value. This is equivalent to
7099 a bit-wise negation, so use that operation instead. */
7100
7101 if (tree_int_cst_sgn (value) < 0)
7102 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7103
7104 /* Return the number of bits needed, taking into account the fact
7105 that we need one more bit for a signed than unsigned type.
7106 If value is 0 or -1, the minimum precision is 1 no matter
7107 whether unsignedp is true or false. */
7108
7109 if (integer_zerop (value))
7110 return 1;
7111 else
7112 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7113 }
7114
7115 /* Return truthvalue of whether T1 is the same tree structure as T2.
7116 Return 1 if they are the same.
7117 Return 0 if they are understandably different.
7118 Return -1 if either contains tree structure not understood by
7119 this function. */
7120
7121 int
7122 simple_cst_equal (const_tree t1, const_tree t2)
7123 {
7124 enum tree_code code1, code2;
7125 int cmp;
7126 int i;
7127
7128 if (t1 == t2)
7129 return 1;
7130 if (t1 == 0 || t2 == 0)
7131 return 0;
7132
7133 code1 = TREE_CODE (t1);
7134 code2 = TREE_CODE (t2);
7135
7136 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7137 {
7138 if (CONVERT_EXPR_CODE_P (code2)
7139 || code2 == NON_LVALUE_EXPR)
7140 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7141 else
7142 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7143 }
7144
7145 else if (CONVERT_EXPR_CODE_P (code2)
7146 || code2 == NON_LVALUE_EXPR)
7147 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7148
7149 if (code1 != code2)
7150 return 0;
7151
7152 switch (code1)
7153 {
7154 case INTEGER_CST:
7155 return wi::to_widest (t1) == wi::to_widest (t2);
7156
7157 case REAL_CST:
7158 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7159
7160 case FIXED_CST:
7161 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7162
7163 case STRING_CST:
7164 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7165 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7166 TREE_STRING_LENGTH (t1)));
7167
7168 case CONSTRUCTOR:
7169 {
7170 unsigned HOST_WIDE_INT idx;
7171 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7172 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7173
7174 if (vec_safe_length (v1) != vec_safe_length (v2))
7175 return false;
7176
7177 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7178 /* ??? Should we handle also fields here? */
7179 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7180 return false;
7181 return true;
7182 }
7183
7184 case SAVE_EXPR:
7185 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7186
7187 case CALL_EXPR:
7188 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7189 if (cmp <= 0)
7190 return cmp;
7191 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7192 return 0;
7193 {
7194 const_tree arg1, arg2;
7195 const_call_expr_arg_iterator iter1, iter2;
7196 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7197 arg2 = first_const_call_expr_arg (t2, &iter2);
7198 arg1 && arg2;
7199 arg1 = next_const_call_expr_arg (&iter1),
7200 arg2 = next_const_call_expr_arg (&iter2))
7201 {
7202 cmp = simple_cst_equal (arg1, arg2);
7203 if (cmp <= 0)
7204 return cmp;
7205 }
7206 return arg1 == arg2;
7207 }
7208
7209 case TARGET_EXPR:
7210 /* Special case: if either target is an unallocated VAR_DECL,
7211 it means that it's going to be unified with whatever the
7212 TARGET_EXPR is really supposed to initialize, so treat it
7213 as being equivalent to anything. */
7214 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7215 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7216 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7217 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7218 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7219 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7220 cmp = 1;
7221 else
7222 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7223
7224 if (cmp <= 0)
7225 return cmp;
7226
7227 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7228
7229 case WITH_CLEANUP_EXPR:
7230 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7231 if (cmp <= 0)
7232 return cmp;
7233
7234 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7235
7236 case COMPONENT_REF:
7237 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7238 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7239
7240 return 0;
7241
7242 case VAR_DECL:
7243 case PARM_DECL:
7244 case CONST_DECL:
7245 case FUNCTION_DECL:
7246 return 0;
7247
7248 default:
7249 break;
7250 }
7251
7252 /* This general rule works for most tree codes. All exceptions should be
7253 handled above. If this is a language-specific tree code, we can't
7254 trust what might be in the operand, so say we don't know
7255 the situation. */
7256 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7257 return -1;
7258
7259 switch (TREE_CODE_CLASS (code1))
7260 {
7261 case tcc_unary:
7262 case tcc_binary:
7263 case tcc_comparison:
7264 case tcc_expression:
7265 case tcc_reference:
7266 case tcc_statement:
7267 cmp = 1;
7268 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7269 {
7270 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7271 if (cmp <= 0)
7272 return cmp;
7273 }
7274
7275 return cmp;
7276
7277 default:
7278 return -1;
7279 }
7280 }
7281
7282 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7283 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7284 than U, respectively. */
7285
7286 int
7287 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7288 {
7289 if (tree_int_cst_sgn (t) < 0)
7290 return -1;
7291 else if (!tree_fits_uhwi_p (t))
7292 return 1;
7293 else if (TREE_INT_CST_LOW (t) == u)
7294 return 0;
7295 else if (TREE_INT_CST_LOW (t) < u)
7296 return -1;
7297 else
7298 return 1;
7299 }
7300
7301 /* Return true if SIZE represents a constant size that is in bounds of
7302 what the middle-end and the backend accepts (covering not more than
7303 half of the address-space). */
7304
7305 bool
7306 valid_constant_size_p (const_tree size)
7307 {
7308 if (! tree_fits_uhwi_p (size)
7309 || TREE_OVERFLOW (size)
7310 || tree_int_cst_sign_bit (size) != 0)
7311 return false;
7312 return true;
7313 }
7314
7315 /* Return the precision of the type, or for a complex or vector type the
7316 precision of the type of its elements. */
7317
7318 unsigned int
7319 element_precision (const_tree type)
7320 {
7321 enum tree_code code = TREE_CODE (type);
7322 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7323 type = TREE_TYPE (type);
7324
7325 return TYPE_PRECISION (type);
7326 }
7327
7328 /* Return true if CODE represents an associative tree code. Otherwise
7329 return false. */
7330 bool
7331 associative_tree_code (enum tree_code code)
7332 {
7333 switch (code)
7334 {
7335 case BIT_IOR_EXPR:
7336 case BIT_AND_EXPR:
7337 case BIT_XOR_EXPR:
7338 case PLUS_EXPR:
7339 case MULT_EXPR:
7340 case MIN_EXPR:
7341 case MAX_EXPR:
7342 return true;
7343
7344 default:
7345 break;
7346 }
7347 return false;
7348 }
7349
7350 /* Return true if CODE represents a commutative tree code. Otherwise
7351 return false. */
7352 bool
7353 commutative_tree_code (enum tree_code code)
7354 {
7355 switch (code)
7356 {
7357 case PLUS_EXPR:
7358 case MULT_EXPR:
7359 case MULT_HIGHPART_EXPR:
7360 case MIN_EXPR:
7361 case MAX_EXPR:
7362 case BIT_IOR_EXPR:
7363 case BIT_XOR_EXPR:
7364 case BIT_AND_EXPR:
7365 case NE_EXPR:
7366 case EQ_EXPR:
7367 case UNORDERED_EXPR:
7368 case ORDERED_EXPR:
7369 case UNEQ_EXPR:
7370 case LTGT_EXPR:
7371 case TRUTH_AND_EXPR:
7372 case TRUTH_XOR_EXPR:
7373 case TRUTH_OR_EXPR:
7374 case WIDEN_MULT_EXPR:
7375 case VEC_WIDEN_MULT_HI_EXPR:
7376 case VEC_WIDEN_MULT_LO_EXPR:
7377 case VEC_WIDEN_MULT_EVEN_EXPR:
7378 case VEC_WIDEN_MULT_ODD_EXPR:
7379 return true;
7380
7381 default:
7382 break;
7383 }
7384 return false;
7385 }
7386
7387 /* Return true if CODE represents a ternary tree code for which the
7388 first two operands are commutative. Otherwise return false. */
7389 bool
7390 commutative_ternary_tree_code (enum tree_code code)
7391 {
7392 switch (code)
7393 {
7394 case WIDEN_MULT_PLUS_EXPR:
7395 case WIDEN_MULT_MINUS_EXPR:
7396 case DOT_PROD_EXPR:
7397 case FMA_EXPR:
7398 return true;
7399
7400 default:
7401 break;
7402 }
7403 return false;
7404 }
7405
7406 namespace inchash
7407 {
7408
7409 /* Generate a hash value for an expression. This can be used iteratively
7410 by passing a previous result as the HSTATE argument.
7411
7412 This function is intended to produce the same hash for expressions which
7413 would compare equal using operand_equal_p. */
7414 void
7415 add_expr (const_tree t, inchash::hash &hstate)
7416 {
7417 int i;
7418 enum tree_code code;
7419 enum tree_code_class tclass;
7420
7421 if (t == NULL_TREE)
7422 {
7423 hstate.merge_hash (0);
7424 return;
7425 }
7426
7427 code = TREE_CODE (t);
7428
7429 switch (code)
7430 {
7431 /* Alas, constants aren't shared, so we can't rely on pointer
7432 identity. */
7433 case VOID_CST:
7434 hstate.merge_hash (0);
7435 return;
7436 case INTEGER_CST:
7437 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7438 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7439 return;
7440 case REAL_CST:
7441 {
7442 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7443 hstate.merge_hash (val2);
7444 return;
7445 }
7446 case FIXED_CST:
7447 {
7448 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7449 hstate.merge_hash (val2);
7450 return;
7451 }
7452 case STRING_CST:
7453 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7454 return;
7455 case COMPLEX_CST:
7456 inchash::add_expr (TREE_REALPART (t), hstate);
7457 inchash::add_expr (TREE_IMAGPART (t), hstate);
7458 return;
7459 case VECTOR_CST:
7460 {
7461 unsigned i;
7462 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7463 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7464 return;
7465 }
7466 case SSA_NAME:
7467 /* We can just compare by pointer. */
7468 hstate.add_wide_int (SSA_NAME_VERSION (t));
7469 return;
7470 case PLACEHOLDER_EXPR:
7471 /* The node itself doesn't matter. */
7472 return;
7473 case TREE_LIST:
7474 /* A list of expressions, for a CALL_EXPR or as the elements of a
7475 VECTOR_CST. */
7476 for (; t; t = TREE_CHAIN (t))
7477 inchash::add_expr (TREE_VALUE (t), hstate);
7478 return;
7479 case CONSTRUCTOR:
7480 {
7481 unsigned HOST_WIDE_INT idx;
7482 tree field, value;
7483 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7484 {
7485 inchash::add_expr (field, hstate);
7486 inchash::add_expr (value, hstate);
7487 }
7488 return;
7489 }
7490 case FUNCTION_DECL:
7491 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7492 Otherwise nodes that compare equal according to operand_equal_p might
7493 get different hash codes. However, don't do this for machine specific
7494 or front end builtins, since the function code is overloaded in those
7495 cases. */
7496 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7497 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7498 {
7499 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7500 code = TREE_CODE (t);
7501 }
7502 /* FALL THROUGH */
7503 default:
7504 tclass = TREE_CODE_CLASS (code);
7505
7506 if (tclass == tcc_declaration)
7507 {
7508 /* DECL's have a unique ID */
7509 hstate.add_wide_int (DECL_UID (t));
7510 }
7511 else
7512 {
7513 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7514
7515 hstate.add_object (code);
7516
7517 /* Don't hash the type, that can lead to having nodes which
7518 compare equal according to operand_equal_p, but which
7519 have different hash codes. */
7520 if (CONVERT_EXPR_CODE_P (code)
7521 || code == NON_LVALUE_EXPR)
7522 {
7523 /* Make sure to include signness in the hash computation. */
7524 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7525 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7526 }
7527
7528 else if (commutative_tree_code (code))
7529 {
7530 /* It's a commutative expression. We want to hash it the same
7531 however it appears. We do this by first hashing both operands
7532 and then rehashing based on the order of their independent
7533 hashes. */
7534 inchash::hash one, two;
7535 inchash::add_expr (TREE_OPERAND (t, 0), one);
7536 inchash::add_expr (TREE_OPERAND (t, 1), two);
7537 hstate.add_commutative (one, two);
7538 }
7539 else
7540 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7541 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7542 }
7543 return;
7544 }
7545 }
7546
7547 }
7548
7549 /* Constructors for pointer, array and function types.
7550 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7551 constructed by language-dependent code, not here.) */
7552
7553 /* Construct, lay out and return the type of pointers to TO_TYPE with
7554 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7555 reference all of memory. If such a type has already been
7556 constructed, reuse it. */
7557
7558 tree
7559 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7560 bool can_alias_all)
7561 {
7562 tree t;
7563
7564 if (to_type == error_mark_node)
7565 return error_mark_node;
7566
7567 /* If the pointed-to type has the may_alias attribute set, force
7568 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7569 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7570 can_alias_all = true;
7571
7572 /* In some cases, languages will have things that aren't a POINTER_TYPE
7573 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7574 In that case, return that type without regard to the rest of our
7575 operands.
7576
7577 ??? This is a kludge, but consistent with the way this function has
7578 always operated and there doesn't seem to be a good way to avoid this
7579 at the moment. */
7580 if (TYPE_POINTER_TO (to_type) != 0
7581 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7582 return TYPE_POINTER_TO (to_type);
7583
7584 /* First, if we already have a type for pointers to TO_TYPE and it's
7585 the proper mode, use it. */
7586 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7587 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7588 return t;
7589
7590 t = make_node (POINTER_TYPE);
7591
7592 TREE_TYPE (t) = to_type;
7593 SET_TYPE_MODE (t, mode);
7594 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7595 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7596 TYPE_POINTER_TO (to_type) = t;
7597
7598 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7599 SET_TYPE_STRUCTURAL_EQUALITY (t);
7600 else if (TYPE_CANONICAL (to_type) != to_type)
7601 TYPE_CANONICAL (t)
7602 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7603 mode, can_alias_all);
7604
7605 /* Lay out the type. This function has many callers that are concerned
7606 with expression-construction, and this simplifies them all. */
7607 layout_type (t);
7608
7609 return t;
7610 }
7611
7612 /* By default build pointers in ptr_mode. */
7613
7614 tree
7615 build_pointer_type (tree to_type)
7616 {
7617 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7618 : TYPE_ADDR_SPACE (to_type);
7619 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7620 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7621 }
7622
7623 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7624
7625 tree
7626 build_reference_type_for_mode (tree to_type, machine_mode mode,
7627 bool can_alias_all)
7628 {
7629 tree t;
7630
7631 if (to_type == error_mark_node)
7632 return error_mark_node;
7633
7634 /* If the pointed-to type has the may_alias attribute set, force
7635 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7636 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7637 can_alias_all = true;
7638
7639 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7640 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7641 In that case, return that type without regard to the rest of our
7642 operands.
7643
7644 ??? This is a kludge, but consistent with the way this function has
7645 always operated and there doesn't seem to be a good way to avoid this
7646 at the moment. */
7647 if (TYPE_REFERENCE_TO (to_type) != 0
7648 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7649 return TYPE_REFERENCE_TO (to_type);
7650
7651 /* First, if we already have a type for pointers to TO_TYPE and it's
7652 the proper mode, use it. */
7653 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7654 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7655 return t;
7656
7657 t = make_node (REFERENCE_TYPE);
7658
7659 TREE_TYPE (t) = to_type;
7660 SET_TYPE_MODE (t, mode);
7661 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7662 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7663 TYPE_REFERENCE_TO (to_type) = t;
7664
7665 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7666 SET_TYPE_STRUCTURAL_EQUALITY (t);
7667 else if (TYPE_CANONICAL (to_type) != to_type)
7668 TYPE_CANONICAL (t)
7669 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7670 mode, can_alias_all);
7671
7672 layout_type (t);
7673
7674 return t;
7675 }
7676
7677
7678 /* Build the node for the type of references-to-TO_TYPE by default
7679 in ptr_mode. */
7680
7681 tree
7682 build_reference_type (tree to_type)
7683 {
7684 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7685 : TYPE_ADDR_SPACE (to_type);
7686 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7687 return build_reference_type_for_mode (to_type, pointer_mode, false);
7688 }
7689
7690 #define MAX_INT_CACHED_PREC \
7691 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7692 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7693
7694 /* Builds a signed or unsigned integer type of precision PRECISION.
7695 Used for C bitfields whose precision does not match that of
7696 built-in target types. */
7697 tree
7698 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7699 int unsignedp)
7700 {
7701 tree itype, ret;
7702
7703 if (unsignedp)
7704 unsignedp = MAX_INT_CACHED_PREC + 1;
7705
7706 if (precision <= MAX_INT_CACHED_PREC)
7707 {
7708 itype = nonstandard_integer_type_cache[precision + unsignedp];
7709 if (itype)
7710 return itype;
7711 }
7712
7713 itype = make_node (INTEGER_TYPE);
7714 TYPE_PRECISION (itype) = precision;
7715
7716 if (unsignedp)
7717 fixup_unsigned_type (itype);
7718 else
7719 fixup_signed_type (itype);
7720
7721 ret = itype;
7722 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7723 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7724 if (precision <= MAX_INT_CACHED_PREC)
7725 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7726
7727 return ret;
7728 }
7729
7730 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7731 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7732 is true, reuse such a type that has already been constructed. */
7733
7734 static tree
7735 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7736 {
7737 tree itype = make_node (INTEGER_TYPE);
7738 inchash::hash hstate;
7739
7740 TREE_TYPE (itype) = type;
7741
7742 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7743 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7744
7745 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7746 SET_TYPE_MODE (itype, TYPE_MODE (type));
7747 TYPE_SIZE (itype) = TYPE_SIZE (type);
7748 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7749 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7750 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7751
7752 if (!shared)
7753 return itype;
7754
7755 if ((TYPE_MIN_VALUE (itype)
7756 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7757 || (TYPE_MAX_VALUE (itype)
7758 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7759 {
7760 /* Since we cannot reliably merge this type, we need to compare it using
7761 structural equality checks. */
7762 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7763 return itype;
7764 }
7765
7766 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7767 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7768 hstate.merge_hash (TYPE_HASH (type));
7769 itype = type_hash_canon (hstate.end (), itype);
7770
7771 return itype;
7772 }
7773
7774 /* Wrapper around build_range_type_1 with SHARED set to true. */
7775
7776 tree
7777 build_range_type (tree type, tree lowval, tree highval)
7778 {
7779 return build_range_type_1 (type, lowval, highval, true);
7780 }
7781
7782 /* Wrapper around build_range_type_1 with SHARED set to false. */
7783
7784 tree
7785 build_nonshared_range_type (tree type, tree lowval, tree highval)
7786 {
7787 return build_range_type_1 (type, lowval, highval, false);
7788 }
7789
7790 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7791 MAXVAL should be the maximum value in the domain
7792 (one less than the length of the array).
7793
7794 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7795 We don't enforce this limit, that is up to caller (e.g. language front end).
7796 The limit exists because the result is a signed type and we don't handle
7797 sizes that use more than one HOST_WIDE_INT. */
7798
7799 tree
7800 build_index_type (tree maxval)
7801 {
7802 return build_range_type (sizetype, size_zero_node, maxval);
7803 }
7804
7805 /* Return true if the debug information for TYPE, a subtype, should be emitted
7806 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7807 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7808 debug info and doesn't reflect the source code. */
7809
7810 bool
7811 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7812 {
7813 tree base_type = TREE_TYPE (type), low, high;
7814
7815 /* Subrange types have a base type which is an integral type. */
7816 if (!INTEGRAL_TYPE_P (base_type))
7817 return false;
7818
7819 /* Get the real bounds of the subtype. */
7820 if (lang_hooks.types.get_subrange_bounds)
7821 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7822 else
7823 {
7824 low = TYPE_MIN_VALUE (type);
7825 high = TYPE_MAX_VALUE (type);
7826 }
7827
7828 /* If the type and its base type have the same representation and the same
7829 name, then the type is not a subrange but a copy of the base type. */
7830 if ((TREE_CODE (base_type) == INTEGER_TYPE
7831 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7832 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7833 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7834 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7835 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7836 return false;
7837
7838 if (lowval)
7839 *lowval = low;
7840 if (highval)
7841 *highval = high;
7842 return true;
7843 }
7844
7845 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7846 and number of elements specified by the range of values of INDEX_TYPE.
7847 If SHARED is true, reuse such a type that has already been constructed. */
7848
7849 static tree
7850 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7851 {
7852 tree t;
7853
7854 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7855 {
7856 error ("arrays of functions are not meaningful");
7857 elt_type = integer_type_node;
7858 }
7859
7860 t = make_node (ARRAY_TYPE);
7861 TREE_TYPE (t) = elt_type;
7862 TYPE_DOMAIN (t) = index_type;
7863 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7864 layout_type (t);
7865
7866 /* If the element type is incomplete at this point we get marked for
7867 structural equality. Do not record these types in the canonical
7868 type hashtable. */
7869 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7870 return t;
7871
7872 if (shared)
7873 {
7874 inchash::hash hstate;
7875 hstate.add_object (TYPE_HASH (elt_type));
7876 if (index_type)
7877 hstate.add_object (TYPE_HASH (index_type));
7878 t = type_hash_canon (hstate.end (), t);
7879 }
7880
7881 if (TYPE_CANONICAL (t) == t)
7882 {
7883 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7884 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7885 SET_TYPE_STRUCTURAL_EQUALITY (t);
7886 else if (TYPE_CANONICAL (elt_type) != elt_type
7887 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7888 TYPE_CANONICAL (t)
7889 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7890 index_type
7891 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7892 shared);
7893 }
7894
7895 return t;
7896 }
7897
7898 /* Wrapper around build_array_type_1 with SHARED set to true. */
7899
7900 tree
7901 build_array_type (tree elt_type, tree index_type)
7902 {
7903 return build_array_type_1 (elt_type, index_type, true);
7904 }
7905
7906 /* Wrapper around build_array_type_1 with SHARED set to false. */
7907
7908 tree
7909 build_nonshared_array_type (tree elt_type, tree index_type)
7910 {
7911 return build_array_type_1 (elt_type, index_type, false);
7912 }
7913
7914 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7915 sizetype. */
7916
7917 tree
7918 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7919 {
7920 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7921 }
7922
7923 /* Recursively examines the array elements of TYPE, until a non-array
7924 element type is found. */
7925
7926 tree
7927 strip_array_types (tree type)
7928 {
7929 while (TREE_CODE (type) == ARRAY_TYPE)
7930 type = TREE_TYPE (type);
7931
7932 return type;
7933 }
7934
7935 /* Computes the canonical argument types from the argument type list
7936 ARGTYPES.
7937
7938 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7939 on entry to this function, or if any of the ARGTYPES are
7940 structural.
7941
7942 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7943 true on entry to this function, or if any of the ARGTYPES are
7944 non-canonical.
7945
7946 Returns a canonical argument list, which may be ARGTYPES when the
7947 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7948 true) or would not differ from ARGTYPES. */
7949
7950 static tree
7951 maybe_canonicalize_argtypes (tree argtypes,
7952 bool *any_structural_p,
7953 bool *any_noncanonical_p)
7954 {
7955 tree arg;
7956 bool any_noncanonical_argtypes_p = false;
7957
7958 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7959 {
7960 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7961 /* Fail gracefully by stating that the type is structural. */
7962 *any_structural_p = true;
7963 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7964 *any_structural_p = true;
7965 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7966 || TREE_PURPOSE (arg))
7967 /* If the argument has a default argument, we consider it
7968 non-canonical even though the type itself is canonical.
7969 That way, different variants of function and method types
7970 with default arguments will all point to the variant with
7971 no defaults as their canonical type. */
7972 any_noncanonical_argtypes_p = true;
7973 }
7974
7975 if (*any_structural_p)
7976 return argtypes;
7977
7978 if (any_noncanonical_argtypes_p)
7979 {
7980 /* Build the canonical list of argument types. */
7981 tree canon_argtypes = NULL_TREE;
7982 bool is_void = false;
7983
7984 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7985 {
7986 if (arg == void_list_node)
7987 is_void = true;
7988 else
7989 canon_argtypes = tree_cons (NULL_TREE,
7990 TYPE_CANONICAL (TREE_VALUE (arg)),
7991 canon_argtypes);
7992 }
7993
7994 canon_argtypes = nreverse (canon_argtypes);
7995 if (is_void)
7996 canon_argtypes = chainon (canon_argtypes, void_list_node);
7997
7998 /* There is a non-canonical type. */
7999 *any_noncanonical_p = true;
8000 return canon_argtypes;
8001 }
8002
8003 /* The canonical argument types are the same as ARGTYPES. */
8004 return argtypes;
8005 }
8006
8007 /* Construct, lay out and return
8008 the type of functions returning type VALUE_TYPE
8009 given arguments of types ARG_TYPES.
8010 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8011 are data type nodes for the arguments of the function.
8012 If such a type has already been constructed, reuse it. */
8013
8014 tree
8015 build_function_type (tree value_type, tree arg_types)
8016 {
8017 tree t;
8018 inchash::hash hstate;
8019 bool any_structural_p, any_noncanonical_p;
8020 tree canon_argtypes;
8021
8022 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8023 {
8024 error ("function return type cannot be function");
8025 value_type = integer_type_node;
8026 }
8027
8028 /* Make a node of the sort we want. */
8029 t = make_node (FUNCTION_TYPE);
8030 TREE_TYPE (t) = value_type;
8031 TYPE_ARG_TYPES (t) = arg_types;
8032
8033 /* If we already have such a type, use the old one. */
8034 hstate.add_object (TYPE_HASH (value_type));
8035 type_hash_list (arg_types, hstate);
8036 t = type_hash_canon (hstate.end (), t);
8037
8038 /* Set up the canonical type. */
8039 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8040 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8041 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8042 &any_structural_p,
8043 &any_noncanonical_p);
8044 if (any_structural_p)
8045 SET_TYPE_STRUCTURAL_EQUALITY (t);
8046 else if (any_noncanonical_p)
8047 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8048 canon_argtypes);
8049
8050 if (!COMPLETE_TYPE_P (t))
8051 layout_type (t);
8052 return t;
8053 }
8054
8055 /* Build a function type. The RETURN_TYPE is the type returned by the
8056 function. If VAARGS is set, no void_type_node is appended to the
8057 the list. ARGP must be always be terminated be a NULL_TREE. */
8058
8059 static tree
8060 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8061 {
8062 tree t, args, last;
8063
8064 t = va_arg (argp, tree);
8065 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8066 args = tree_cons (NULL_TREE, t, args);
8067
8068 if (vaargs)
8069 {
8070 last = args;
8071 if (args != NULL_TREE)
8072 args = nreverse (args);
8073 gcc_assert (last != void_list_node);
8074 }
8075 else if (args == NULL_TREE)
8076 args = void_list_node;
8077 else
8078 {
8079 last = args;
8080 args = nreverse (args);
8081 TREE_CHAIN (last) = void_list_node;
8082 }
8083 args = build_function_type (return_type, args);
8084
8085 return args;
8086 }
8087
8088 /* Build a function type. The RETURN_TYPE is the type returned by the
8089 function. If additional arguments are provided, they are
8090 additional argument types. The list of argument types must always
8091 be terminated by NULL_TREE. */
8092
8093 tree
8094 build_function_type_list (tree return_type, ...)
8095 {
8096 tree args;
8097 va_list p;
8098
8099 va_start (p, return_type);
8100 args = build_function_type_list_1 (false, return_type, p);
8101 va_end (p);
8102 return args;
8103 }
8104
8105 /* Build a variable argument function type. The RETURN_TYPE is the
8106 type returned by the function. If additional arguments are provided,
8107 they are additional argument types. The list of argument types must
8108 always be terminated by NULL_TREE. */
8109
8110 tree
8111 build_varargs_function_type_list (tree return_type, ...)
8112 {
8113 tree args;
8114 va_list p;
8115
8116 va_start (p, return_type);
8117 args = build_function_type_list_1 (true, return_type, p);
8118 va_end (p);
8119
8120 return args;
8121 }
8122
8123 /* Build a function type. RETURN_TYPE is the type returned by the
8124 function; VAARGS indicates whether the function takes varargs. The
8125 function takes N named arguments, the types of which are provided in
8126 ARG_TYPES. */
8127
8128 static tree
8129 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8130 tree *arg_types)
8131 {
8132 int i;
8133 tree t = vaargs ? NULL_TREE : void_list_node;
8134
8135 for (i = n - 1; i >= 0; i--)
8136 t = tree_cons (NULL_TREE, arg_types[i], t);
8137
8138 return build_function_type (return_type, t);
8139 }
8140
8141 /* Build a function type. RETURN_TYPE is the type returned by the
8142 function. The function takes N named arguments, the types of which
8143 are provided in ARG_TYPES. */
8144
8145 tree
8146 build_function_type_array (tree return_type, int n, tree *arg_types)
8147 {
8148 return build_function_type_array_1 (false, return_type, n, arg_types);
8149 }
8150
8151 /* Build a variable argument function type. RETURN_TYPE is the type
8152 returned by the function. The function takes N named arguments, the
8153 types of which are provided in ARG_TYPES. */
8154
8155 tree
8156 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8157 {
8158 return build_function_type_array_1 (true, return_type, n, arg_types);
8159 }
8160
8161 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8162 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8163 for the method. An implicit additional parameter (of type
8164 pointer-to-BASETYPE) is added to the ARGTYPES. */
8165
8166 tree
8167 build_method_type_directly (tree basetype,
8168 tree rettype,
8169 tree argtypes)
8170 {
8171 tree t;
8172 tree ptype;
8173 inchash::hash hstate;
8174 bool any_structural_p, any_noncanonical_p;
8175 tree canon_argtypes;
8176
8177 /* Make a node of the sort we want. */
8178 t = make_node (METHOD_TYPE);
8179
8180 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8181 TREE_TYPE (t) = rettype;
8182 ptype = build_pointer_type (basetype);
8183
8184 /* The actual arglist for this function includes a "hidden" argument
8185 which is "this". Put it into the list of argument types. */
8186 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8187 TYPE_ARG_TYPES (t) = argtypes;
8188
8189 /* If we already have such a type, use the old one. */
8190 hstate.add_object (TYPE_HASH (basetype));
8191 hstate.add_object (TYPE_HASH (rettype));
8192 type_hash_list (argtypes, hstate);
8193 t = type_hash_canon (hstate.end (), t);
8194
8195 /* Set up the canonical type. */
8196 any_structural_p
8197 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8198 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8199 any_noncanonical_p
8200 = (TYPE_CANONICAL (basetype) != basetype
8201 || TYPE_CANONICAL (rettype) != rettype);
8202 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8203 &any_structural_p,
8204 &any_noncanonical_p);
8205 if (any_structural_p)
8206 SET_TYPE_STRUCTURAL_EQUALITY (t);
8207 else if (any_noncanonical_p)
8208 TYPE_CANONICAL (t)
8209 = build_method_type_directly (TYPE_CANONICAL (basetype),
8210 TYPE_CANONICAL (rettype),
8211 canon_argtypes);
8212 if (!COMPLETE_TYPE_P (t))
8213 layout_type (t);
8214
8215 return t;
8216 }
8217
8218 /* Construct, lay out and return the type of methods belonging to class
8219 BASETYPE and whose arguments and values are described by TYPE.
8220 If that type exists already, reuse it.
8221 TYPE must be a FUNCTION_TYPE node. */
8222
8223 tree
8224 build_method_type (tree basetype, tree type)
8225 {
8226 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8227
8228 return build_method_type_directly (basetype,
8229 TREE_TYPE (type),
8230 TYPE_ARG_TYPES (type));
8231 }
8232
8233 /* Construct, lay out and return the type of offsets to a value
8234 of type TYPE, within an object of type BASETYPE.
8235 If a suitable offset type exists already, reuse it. */
8236
8237 tree
8238 build_offset_type (tree basetype, tree type)
8239 {
8240 tree t;
8241 inchash::hash hstate;
8242
8243 /* Make a node of the sort we want. */
8244 t = make_node (OFFSET_TYPE);
8245
8246 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8247 TREE_TYPE (t) = type;
8248
8249 /* If we already have such a type, use the old one. */
8250 hstate.add_object (TYPE_HASH (basetype));
8251 hstate.add_object (TYPE_HASH (type));
8252 t = type_hash_canon (hstate.end (), t);
8253
8254 if (!COMPLETE_TYPE_P (t))
8255 layout_type (t);
8256
8257 if (TYPE_CANONICAL (t) == t)
8258 {
8259 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8260 || TYPE_STRUCTURAL_EQUALITY_P (type))
8261 SET_TYPE_STRUCTURAL_EQUALITY (t);
8262 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8263 || TYPE_CANONICAL (type) != type)
8264 TYPE_CANONICAL (t)
8265 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8266 TYPE_CANONICAL (type));
8267 }
8268
8269 return t;
8270 }
8271
8272 /* Create a complex type whose components are COMPONENT_TYPE. */
8273
8274 tree
8275 build_complex_type (tree component_type)
8276 {
8277 tree t;
8278 inchash::hash hstate;
8279
8280 gcc_assert (INTEGRAL_TYPE_P (component_type)
8281 || SCALAR_FLOAT_TYPE_P (component_type)
8282 || FIXED_POINT_TYPE_P (component_type));
8283
8284 /* Make a node of the sort we want. */
8285 t = make_node (COMPLEX_TYPE);
8286
8287 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8288
8289 /* If we already have such a type, use the old one. */
8290 hstate.add_object (TYPE_HASH (component_type));
8291 t = type_hash_canon (hstate.end (), t);
8292
8293 if (!COMPLETE_TYPE_P (t))
8294 layout_type (t);
8295
8296 if (TYPE_CANONICAL (t) == t)
8297 {
8298 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8299 SET_TYPE_STRUCTURAL_EQUALITY (t);
8300 else if (TYPE_CANONICAL (component_type) != component_type)
8301 TYPE_CANONICAL (t)
8302 = build_complex_type (TYPE_CANONICAL (component_type));
8303 }
8304
8305 /* We need to create a name, since complex is a fundamental type. */
8306 if (! TYPE_NAME (t))
8307 {
8308 const char *name;
8309 if (component_type == char_type_node)
8310 name = "complex char";
8311 else if (component_type == signed_char_type_node)
8312 name = "complex signed char";
8313 else if (component_type == unsigned_char_type_node)
8314 name = "complex unsigned char";
8315 else if (component_type == short_integer_type_node)
8316 name = "complex short int";
8317 else if (component_type == short_unsigned_type_node)
8318 name = "complex short unsigned int";
8319 else if (component_type == integer_type_node)
8320 name = "complex int";
8321 else if (component_type == unsigned_type_node)
8322 name = "complex unsigned int";
8323 else if (component_type == long_integer_type_node)
8324 name = "complex long int";
8325 else if (component_type == long_unsigned_type_node)
8326 name = "complex long unsigned int";
8327 else if (component_type == long_long_integer_type_node)
8328 name = "complex long long int";
8329 else if (component_type == long_long_unsigned_type_node)
8330 name = "complex long long unsigned int";
8331 else
8332 name = 0;
8333
8334 if (name != 0)
8335 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8336 get_identifier (name), t);
8337 }
8338
8339 return build_qualified_type (t, TYPE_QUALS (component_type));
8340 }
8341
8342 /* If TYPE is a real or complex floating-point type and the target
8343 does not directly support arithmetic on TYPE then return the wider
8344 type to be used for arithmetic on TYPE. Otherwise, return
8345 NULL_TREE. */
8346
8347 tree
8348 excess_precision_type (tree type)
8349 {
8350 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8351 {
8352 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8353 switch (TREE_CODE (type))
8354 {
8355 case REAL_TYPE:
8356 switch (flt_eval_method)
8357 {
8358 case 1:
8359 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8360 return double_type_node;
8361 break;
8362 case 2:
8363 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8364 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8365 return long_double_type_node;
8366 break;
8367 default:
8368 gcc_unreachable ();
8369 }
8370 break;
8371 case COMPLEX_TYPE:
8372 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8373 return NULL_TREE;
8374 switch (flt_eval_method)
8375 {
8376 case 1:
8377 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8378 return complex_double_type_node;
8379 break;
8380 case 2:
8381 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8382 || (TYPE_MODE (TREE_TYPE (type))
8383 == TYPE_MODE (double_type_node)))
8384 return complex_long_double_type_node;
8385 break;
8386 default:
8387 gcc_unreachable ();
8388 }
8389 break;
8390 default:
8391 break;
8392 }
8393 }
8394 return NULL_TREE;
8395 }
8396 \f
8397 /* Return OP, stripped of any conversions to wider types as much as is safe.
8398 Converting the value back to OP's type makes a value equivalent to OP.
8399
8400 If FOR_TYPE is nonzero, we return a value which, if converted to
8401 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8402
8403 OP must have integer, real or enumeral type. Pointers are not allowed!
8404
8405 There are some cases where the obvious value we could return
8406 would regenerate to OP if converted to OP's type,
8407 but would not extend like OP to wider types.
8408 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8409 For example, if OP is (unsigned short)(signed char)-1,
8410 we avoid returning (signed char)-1 if FOR_TYPE is int,
8411 even though extending that to an unsigned short would regenerate OP,
8412 since the result of extending (signed char)-1 to (int)
8413 is different from (int) OP. */
8414
8415 tree
8416 get_unwidened (tree op, tree for_type)
8417 {
8418 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8419 tree type = TREE_TYPE (op);
8420 unsigned final_prec
8421 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8422 int uns
8423 = (for_type != 0 && for_type != type
8424 && final_prec > TYPE_PRECISION (type)
8425 && TYPE_UNSIGNED (type));
8426 tree win = op;
8427
8428 while (CONVERT_EXPR_P (op))
8429 {
8430 int bitschange;
8431
8432 /* TYPE_PRECISION on vector types has different meaning
8433 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8434 so avoid them here. */
8435 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8436 break;
8437
8438 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8439 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8440
8441 /* Truncations are many-one so cannot be removed.
8442 Unless we are later going to truncate down even farther. */
8443 if (bitschange < 0
8444 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8445 break;
8446
8447 /* See what's inside this conversion. If we decide to strip it,
8448 we will set WIN. */
8449 op = TREE_OPERAND (op, 0);
8450
8451 /* If we have not stripped any zero-extensions (uns is 0),
8452 we can strip any kind of extension.
8453 If we have previously stripped a zero-extension,
8454 only zero-extensions can safely be stripped.
8455 Any extension can be stripped if the bits it would produce
8456 are all going to be discarded later by truncating to FOR_TYPE. */
8457
8458 if (bitschange > 0)
8459 {
8460 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8461 win = op;
8462 /* TYPE_UNSIGNED says whether this is a zero-extension.
8463 Let's avoid computing it if it does not affect WIN
8464 and if UNS will not be needed again. */
8465 if ((uns
8466 || CONVERT_EXPR_P (op))
8467 && TYPE_UNSIGNED (TREE_TYPE (op)))
8468 {
8469 uns = 1;
8470 win = op;
8471 }
8472 }
8473 }
8474
8475 /* If we finally reach a constant see if it fits in for_type and
8476 in that case convert it. */
8477 if (for_type
8478 && TREE_CODE (win) == INTEGER_CST
8479 && TREE_TYPE (win) != for_type
8480 && int_fits_type_p (win, for_type))
8481 win = fold_convert (for_type, win);
8482
8483 return win;
8484 }
8485 \f
8486 /* Return OP or a simpler expression for a narrower value
8487 which can be sign-extended or zero-extended to give back OP.
8488 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8489 or 0 if the value should be sign-extended. */
8490
8491 tree
8492 get_narrower (tree op, int *unsignedp_ptr)
8493 {
8494 int uns = 0;
8495 int first = 1;
8496 tree win = op;
8497 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8498
8499 while (TREE_CODE (op) == NOP_EXPR)
8500 {
8501 int bitschange
8502 = (TYPE_PRECISION (TREE_TYPE (op))
8503 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8504
8505 /* Truncations are many-one so cannot be removed. */
8506 if (bitschange < 0)
8507 break;
8508
8509 /* See what's inside this conversion. If we decide to strip it,
8510 we will set WIN. */
8511
8512 if (bitschange > 0)
8513 {
8514 op = TREE_OPERAND (op, 0);
8515 /* An extension: the outermost one can be stripped,
8516 but remember whether it is zero or sign extension. */
8517 if (first)
8518 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8519 /* Otherwise, if a sign extension has been stripped,
8520 only sign extensions can now be stripped;
8521 if a zero extension has been stripped, only zero-extensions. */
8522 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8523 break;
8524 first = 0;
8525 }
8526 else /* bitschange == 0 */
8527 {
8528 /* A change in nominal type can always be stripped, but we must
8529 preserve the unsignedness. */
8530 if (first)
8531 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8532 first = 0;
8533 op = TREE_OPERAND (op, 0);
8534 /* Keep trying to narrow, but don't assign op to win if it
8535 would turn an integral type into something else. */
8536 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8537 continue;
8538 }
8539
8540 win = op;
8541 }
8542
8543 if (TREE_CODE (op) == COMPONENT_REF
8544 /* Since type_for_size always gives an integer type. */
8545 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8546 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8547 /* Ensure field is laid out already. */
8548 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8549 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8550 {
8551 unsigned HOST_WIDE_INT innerprec
8552 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8553 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8554 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8555 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8556
8557 /* We can get this structure field in a narrower type that fits it,
8558 but the resulting extension to its nominal type (a fullword type)
8559 must satisfy the same conditions as for other extensions.
8560
8561 Do this only for fields that are aligned (not bit-fields),
8562 because when bit-field insns will be used there is no
8563 advantage in doing this. */
8564
8565 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8566 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8567 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8568 && type != 0)
8569 {
8570 if (first)
8571 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8572 win = fold_convert (type, op);
8573 }
8574 }
8575
8576 *unsignedp_ptr = uns;
8577 return win;
8578 }
8579 \f
8580 /* Returns true if integer constant C has a value that is permissible
8581 for type TYPE (an INTEGER_TYPE). */
8582
8583 bool
8584 int_fits_type_p (const_tree c, const_tree type)
8585 {
8586 tree type_low_bound, type_high_bound;
8587 bool ok_for_low_bound, ok_for_high_bound;
8588 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8589
8590 retry:
8591 type_low_bound = TYPE_MIN_VALUE (type);
8592 type_high_bound = TYPE_MAX_VALUE (type);
8593
8594 /* If at least one bound of the type is a constant integer, we can check
8595 ourselves and maybe make a decision. If no such decision is possible, but
8596 this type is a subtype, try checking against that. Otherwise, use
8597 fits_to_tree_p, which checks against the precision.
8598
8599 Compute the status for each possibly constant bound, and return if we see
8600 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8601 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8602 for "constant known to fit". */
8603
8604 /* Check if c >= type_low_bound. */
8605 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8606 {
8607 if (tree_int_cst_lt (c, type_low_bound))
8608 return false;
8609 ok_for_low_bound = true;
8610 }
8611 else
8612 ok_for_low_bound = false;
8613
8614 /* Check if c <= type_high_bound. */
8615 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8616 {
8617 if (tree_int_cst_lt (type_high_bound, c))
8618 return false;
8619 ok_for_high_bound = true;
8620 }
8621 else
8622 ok_for_high_bound = false;
8623
8624 /* If the constant fits both bounds, the result is known. */
8625 if (ok_for_low_bound && ok_for_high_bound)
8626 return true;
8627
8628 /* Perform some generic filtering which may allow making a decision
8629 even if the bounds are not constant. First, negative integers
8630 never fit in unsigned types, */
8631 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8632 return false;
8633
8634 /* Second, narrower types always fit in wider ones. */
8635 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8636 return true;
8637
8638 /* Third, unsigned integers with top bit set never fit signed types. */
8639 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8640 {
8641 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8642 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8643 {
8644 /* When a tree_cst is converted to a wide-int, the precision
8645 is taken from the type. However, if the precision of the
8646 mode underneath the type is smaller than that, it is
8647 possible that the value will not fit. The test below
8648 fails if any bit is set between the sign bit of the
8649 underlying mode and the top bit of the type. */
8650 if (wi::ne_p (wi::zext (c, prec - 1), c))
8651 return false;
8652 }
8653 else if (wi::neg_p (c))
8654 return false;
8655 }
8656
8657 /* If we haven't been able to decide at this point, there nothing more we
8658 can check ourselves here. Look at the base type if we have one and it
8659 has the same precision. */
8660 if (TREE_CODE (type) == INTEGER_TYPE
8661 && TREE_TYPE (type) != 0
8662 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8663 {
8664 type = TREE_TYPE (type);
8665 goto retry;
8666 }
8667
8668 /* Or to fits_to_tree_p, if nothing else. */
8669 return wi::fits_to_tree_p (c, type);
8670 }
8671
8672 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8673 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8674 represented (assuming two's-complement arithmetic) within the bit
8675 precision of the type are returned instead. */
8676
8677 void
8678 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8679 {
8680 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8681 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8682 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8683 else
8684 {
8685 if (TYPE_UNSIGNED (type))
8686 mpz_set_ui (min, 0);
8687 else
8688 {
8689 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8690 wi::to_mpz (mn, min, SIGNED);
8691 }
8692 }
8693
8694 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8695 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8696 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8697 else
8698 {
8699 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8700 wi::to_mpz (mn, max, TYPE_SIGN (type));
8701 }
8702 }
8703
8704 /* Return true if VAR is an automatic variable defined in function FN. */
8705
8706 bool
8707 auto_var_in_fn_p (const_tree var, const_tree fn)
8708 {
8709 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8710 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8711 || TREE_CODE (var) == PARM_DECL)
8712 && ! TREE_STATIC (var))
8713 || TREE_CODE (var) == LABEL_DECL
8714 || TREE_CODE (var) == RESULT_DECL));
8715 }
8716
8717 /* Subprogram of following function. Called by walk_tree.
8718
8719 Return *TP if it is an automatic variable or parameter of the
8720 function passed in as DATA. */
8721
8722 static tree
8723 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8724 {
8725 tree fn = (tree) data;
8726
8727 if (TYPE_P (*tp))
8728 *walk_subtrees = 0;
8729
8730 else if (DECL_P (*tp)
8731 && auto_var_in_fn_p (*tp, fn))
8732 return *tp;
8733
8734 return NULL_TREE;
8735 }
8736
8737 /* Returns true if T is, contains, or refers to a type with variable
8738 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8739 arguments, but not the return type. If FN is nonzero, only return
8740 true if a modifier of the type or position of FN is a variable or
8741 parameter inside FN.
8742
8743 This concept is more general than that of C99 'variably modified types':
8744 in C99, a struct type is never variably modified because a VLA may not
8745 appear as a structure member. However, in GNU C code like:
8746
8747 struct S { int i[f()]; };
8748
8749 is valid, and other languages may define similar constructs. */
8750
8751 bool
8752 variably_modified_type_p (tree type, tree fn)
8753 {
8754 tree t;
8755
8756 /* Test if T is either variable (if FN is zero) or an expression containing
8757 a variable in FN. If TYPE isn't gimplified, return true also if
8758 gimplify_one_sizepos would gimplify the expression into a local
8759 variable. */
8760 #define RETURN_TRUE_IF_VAR(T) \
8761 do { tree _t = (T); \
8762 if (_t != NULL_TREE \
8763 && _t != error_mark_node \
8764 && TREE_CODE (_t) != INTEGER_CST \
8765 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8766 && (!fn \
8767 || (!TYPE_SIZES_GIMPLIFIED (type) \
8768 && !is_gimple_sizepos (_t)) \
8769 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8770 return true; } while (0)
8771
8772 if (type == error_mark_node)
8773 return false;
8774
8775 /* If TYPE itself has variable size, it is variably modified. */
8776 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8777 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8778
8779 switch (TREE_CODE (type))
8780 {
8781 case POINTER_TYPE:
8782 case REFERENCE_TYPE:
8783 case VECTOR_TYPE:
8784 if (variably_modified_type_p (TREE_TYPE (type), fn))
8785 return true;
8786 break;
8787
8788 case FUNCTION_TYPE:
8789 case METHOD_TYPE:
8790 /* If TYPE is a function type, it is variably modified if the
8791 return type is variably modified. */
8792 if (variably_modified_type_p (TREE_TYPE (type), fn))
8793 return true;
8794 break;
8795
8796 case INTEGER_TYPE:
8797 case REAL_TYPE:
8798 case FIXED_POINT_TYPE:
8799 case ENUMERAL_TYPE:
8800 case BOOLEAN_TYPE:
8801 /* Scalar types are variably modified if their end points
8802 aren't constant. */
8803 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8804 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8805 break;
8806
8807 case RECORD_TYPE:
8808 case UNION_TYPE:
8809 case QUAL_UNION_TYPE:
8810 /* We can't see if any of the fields are variably-modified by the
8811 definition we normally use, since that would produce infinite
8812 recursion via pointers. */
8813 /* This is variably modified if some field's type is. */
8814 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8815 if (TREE_CODE (t) == FIELD_DECL)
8816 {
8817 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8818 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8819 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8820
8821 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8822 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8823 }
8824 break;
8825
8826 case ARRAY_TYPE:
8827 /* Do not call ourselves to avoid infinite recursion. This is
8828 variably modified if the element type is. */
8829 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8830 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8831 break;
8832
8833 default:
8834 break;
8835 }
8836
8837 /* The current language may have other cases to check, but in general,
8838 all other types are not variably modified. */
8839 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8840
8841 #undef RETURN_TRUE_IF_VAR
8842 }
8843
8844 /* Given a DECL or TYPE, return the scope in which it was declared, or
8845 NULL_TREE if there is no containing scope. */
8846
8847 tree
8848 get_containing_scope (const_tree t)
8849 {
8850 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8851 }
8852
8853 /* Return the innermost context enclosing DECL that is
8854 a FUNCTION_DECL, or zero if none. */
8855
8856 tree
8857 decl_function_context (const_tree decl)
8858 {
8859 tree context;
8860
8861 if (TREE_CODE (decl) == ERROR_MARK)
8862 return 0;
8863
8864 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8865 where we look up the function at runtime. Such functions always take
8866 a first argument of type 'pointer to real context'.
8867
8868 C++ should really be fixed to use DECL_CONTEXT for the real context,
8869 and use something else for the "virtual context". */
8870 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8871 context
8872 = TYPE_MAIN_VARIANT
8873 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8874 else
8875 context = DECL_CONTEXT (decl);
8876
8877 while (context && TREE_CODE (context) != FUNCTION_DECL)
8878 {
8879 if (TREE_CODE (context) == BLOCK)
8880 context = BLOCK_SUPERCONTEXT (context);
8881 else
8882 context = get_containing_scope (context);
8883 }
8884
8885 return context;
8886 }
8887
8888 /* Return the innermost context enclosing DECL that is
8889 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8890 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8891
8892 tree
8893 decl_type_context (const_tree decl)
8894 {
8895 tree context = DECL_CONTEXT (decl);
8896
8897 while (context)
8898 switch (TREE_CODE (context))
8899 {
8900 case NAMESPACE_DECL:
8901 case TRANSLATION_UNIT_DECL:
8902 return NULL_TREE;
8903
8904 case RECORD_TYPE:
8905 case UNION_TYPE:
8906 case QUAL_UNION_TYPE:
8907 return context;
8908
8909 case TYPE_DECL:
8910 case FUNCTION_DECL:
8911 context = DECL_CONTEXT (context);
8912 break;
8913
8914 case BLOCK:
8915 context = BLOCK_SUPERCONTEXT (context);
8916 break;
8917
8918 default:
8919 gcc_unreachable ();
8920 }
8921
8922 return NULL_TREE;
8923 }
8924
8925 /* CALL is a CALL_EXPR. Return the declaration for the function
8926 called, or NULL_TREE if the called function cannot be
8927 determined. */
8928
8929 tree
8930 get_callee_fndecl (const_tree call)
8931 {
8932 tree addr;
8933
8934 if (call == error_mark_node)
8935 return error_mark_node;
8936
8937 /* It's invalid to call this function with anything but a
8938 CALL_EXPR. */
8939 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8940
8941 /* The first operand to the CALL is the address of the function
8942 called. */
8943 addr = CALL_EXPR_FN (call);
8944
8945 /* If there is no function, return early. */
8946 if (addr == NULL_TREE)
8947 return NULL_TREE;
8948
8949 STRIP_NOPS (addr);
8950
8951 /* If this is a readonly function pointer, extract its initial value. */
8952 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8953 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8954 && DECL_INITIAL (addr))
8955 addr = DECL_INITIAL (addr);
8956
8957 /* If the address is just `&f' for some function `f', then we know
8958 that `f' is being called. */
8959 if (TREE_CODE (addr) == ADDR_EXPR
8960 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8961 return TREE_OPERAND (addr, 0);
8962
8963 /* We couldn't figure out what was being called. */
8964 return NULL_TREE;
8965 }
8966
8967 /* Print debugging information about tree nodes generated during the compile,
8968 and any language-specific information. */
8969
8970 void
8971 dump_tree_statistics (void)
8972 {
8973 if (GATHER_STATISTICS)
8974 {
8975 int i;
8976 int total_nodes, total_bytes;
8977 fprintf (stderr, "Kind Nodes Bytes\n");
8978 fprintf (stderr, "---------------------------------------\n");
8979 total_nodes = total_bytes = 0;
8980 for (i = 0; i < (int) all_kinds; i++)
8981 {
8982 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8983 tree_node_counts[i], tree_node_sizes[i]);
8984 total_nodes += tree_node_counts[i];
8985 total_bytes += tree_node_sizes[i];
8986 }
8987 fprintf (stderr, "---------------------------------------\n");
8988 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8989 fprintf (stderr, "---------------------------------------\n");
8990 fprintf (stderr, "Code Nodes\n");
8991 fprintf (stderr, "----------------------------\n");
8992 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8993 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
8994 tree_code_counts[i]);
8995 fprintf (stderr, "----------------------------\n");
8996 ssanames_print_statistics ();
8997 phinodes_print_statistics ();
8998 }
8999 else
9000 fprintf (stderr, "(No per-node statistics)\n");
9001
9002 print_type_hash_statistics ();
9003 print_debug_expr_statistics ();
9004 print_value_expr_statistics ();
9005 lang_hooks.print_statistics ();
9006 }
9007 \f
9008 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9009
9010 /* Generate a crc32 of a byte. */
9011
9012 static unsigned
9013 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9014 {
9015 unsigned ix;
9016
9017 for (ix = bits; ix--; value <<= 1)
9018 {
9019 unsigned feedback;
9020
9021 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9022 chksum <<= 1;
9023 chksum ^= feedback;
9024 }
9025 return chksum;
9026 }
9027
9028 /* Generate a crc32 of a 32-bit unsigned. */
9029
9030 unsigned
9031 crc32_unsigned (unsigned chksum, unsigned value)
9032 {
9033 return crc32_unsigned_bits (chksum, value, 32);
9034 }
9035
9036 /* Generate a crc32 of a byte. */
9037
9038 unsigned
9039 crc32_byte (unsigned chksum, char byte)
9040 {
9041 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9042 }
9043
9044 /* Generate a crc32 of a string. */
9045
9046 unsigned
9047 crc32_string (unsigned chksum, const char *string)
9048 {
9049 do
9050 {
9051 chksum = crc32_byte (chksum, *string);
9052 }
9053 while (*string++);
9054 return chksum;
9055 }
9056
9057 /* P is a string that will be used in a symbol. Mask out any characters
9058 that are not valid in that context. */
9059
9060 void
9061 clean_symbol_name (char *p)
9062 {
9063 for (; *p; p++)
9064 if (! (ISALNUM (*p)
9065 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9066 || *p == '$'
9067 #endif
9068 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9069 || *p == '.'
9070 #endif
9071 ))
9072 *p = '_';
9073 }
9074
9075 /* Generate a name for a special-purpose function.
9076 The generated name may need to be unique across the whole link.
9077 Changes to this function may also require corresponding changes to
9078 xstrdup_mask_random.
9079 TYPE is some string to identify the purpose of this function to the
9080 linker or collect2; it must start with an uppercase letter,
9081 one of:
9082 I - for constructors
9083 D - for destructors
9084 N - for C++ anonymous namespaces
9085 F - for DWARF unwind frame information. */
9086
9087 tree
9088 get_file_function_name (const char *type)
9089 {
9090 char *buf;
9091 const char *p;
9092 char *q;
9093
9094 /* If we already have a name we know to be unique, just use that. */
9095 if (first_global_object_name)
9096 p = q = ASTRDUP (first_global_object_name);
9097 /* If the target is handling the constructors/destructors, they
9098 will be local to this file and the name is only necessary for
9099 debugging purposes.
9100 We also assign sub_I and sub_D sufixes to constructors called from
9101 the global static constructors. These are always local. */
9102 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9103 || (strncmp (type, "sub_", 4) == 0
9104 && (type[4] == 'I' || type[4] == 'D')))
9105 {
9106 const char *file = main_input_filename;
9107 if (! file)
9108 file = LOCATION_FILE (input_location);
9109 /* Just use the file's basename, because the full pathname
9110 might be quite long. */
9111 p = q = ASTRDUP (lbasename (file));
9112 }
9113 else
9114 {
9115 /* Otherwise, the name must be unique across the entire link.
9116 We don't have anything that we know to be unique to this translation
9117 unit, so use what we do have and throw in some randomness. */
9118 unsigned len;
9119 const char *name = weak_global_object_name;
9120 const char *file = main_input_filename;
9121
9122 if (! name)
9123 name = "";
9124 if (! file)
9125 file = LOCATION_FILE (input_location);
9126
9127 len = strlen (file);
9128 q = (char *) alloca (9 + 17 + len + 1);
9129 memcpy (q, file, len + 1);
9130
9131 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9132 crc32_string (0, name), get_random_seed (false));
9133
9134 p = q;
9135 }
9136
9137 clean_symbol_name (q);
9138 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9139 + strlen (type));
9140
9141 /* Set up the name of the file-level functions we may need.
9142 Use a global object (which is already required to be unique over
9143 the program) rather than the file name (which imposes extra
9144 constraints). */
9145 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9146
9147 return get_identifier (buf);
9148 }
9149 \f
9150 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9151
9152 /* Complain that the tree code of NODE does not match the expected 0
9153 terminated list of trailing codes. The trailing code list can be
9154 empty, for a more vague error message. FILE, LINE, and FUNCTION
9155 are of the caller. */
9156
9157 void
9158 tree_check_failed (const_tree node, const char *file,
9159 int line, const char *function, ...)
9160 {
9161 va_list args;
9162 const char *buffer;
9163 unsigned length = 0;
9164 enum tree_code code;
9165
9166 va_start (args, function);
9167 while ((code = (enum tree_code) va_arg (args, int)))
9168 length += 4 + strlen (get_tree_code_name (code));
9169 va_end (args);
9170 if (length)
9171 {
9172 char *tmp;
9173 va_start (args, function);
9174 length += strlen ("expected ");
9175 buffer = tmp = (char *) alloca (length);
9176 length = 0;
9177 while ((code = (enum tree_code) va_arg (args, int)))
9178 {
9179 const char *prefix = length ? " or " : "expected ";
9180
9181 strcpy (tmp + length, prefix);
9182 length += strlen (prefix);
9183 strcpy (tmp + length, get_tree_code_name (code));
9184 length += strlen (get_tree_code_name (code));
9185 }
9186 va_end (args);
9187 }
9188 else
9189 buffer = "unexpected node";
9190
9191 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9192 buffer, get_tree_code_name (TREE_CODE (node)),
9193 function, trim_filename (file), line);
9194 }
9195
9196 /* Complain that the tree code of NODE does match the expected 0
9197 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9198 the caller. */
9199
9200 void
9201 tree_not_check_failed (const_tree node, const char *file,
9202 int line, const char *function, ...)
9203 {
9204 va_list args;
9205 char *buffer;
9206 unsigned length = 0;
9207 enum tree_code code;
9208
9209 va_start (args, function);
9210 while ((code = (enum tree_code) va_arg (args, int)))
9211 length += 4 + strlen (get_tree_code_name (code));
9212 va_end (args);
9213 va_start (args, function);
9214 buffer = (char *) alloca (length);
9215 length = 0;
9216 while ((code = (enum tree_code) va_arg (args, int)))
9217 {
9218 if (length)
9219 {
9220 strcpy (buffer + length, " or ");
9221 length += 4;
9222 }
9223 strcpy (buffer + length, get_tree_code_name (code));
9224 length += strlen (get_tree_code_name (code));
9225 }
9226 va_end (args);
9227
9228 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9229 buffer, get_tree_code_name (TREE_CODE (node)),
9230 function, trim_filename (file), line);
9231 }
9232
9233 /* Similar to tree_check_failed, except that we check for a class of tree
9234 code, given in CL. */
9235
9236 void
9237 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9238 const char *file, int line, const char *function)
9239 {
9240 internal_error
9241 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9242 TREE_CODE_CLASS_STRING (cl),
9243 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9244 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9245 }
9246
9247 /* Similar to tree_check_failed, except that instead of specifying a
9248 dozen codes, use the knowledge that they're all sequential. */
9249
9250 void
9251 tree_range_check_failed (const_tree node, const char *file, int line,
9252 const char *function, enum tree_code c1,
9253 enum tree_code c2)
9254 {
9255 char *buffer;
9256 unsigned length = 0;
9257 unsigned int c;
9258
9259 for (c = c1; c <= c2; ++c)
9260 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9261
9262 length += strlen ("expected ");
9263 buffer = (char *) alloca (length);
9264 length = 0;
9265
9266 for (c = c1; c <= c2; ++c)
9267 {
9268 const char *prefix = length ? " or " : "expected ";
9269
9270 strcpy (buffer + length, prefix);
9271 length += strlen (prefix);
9272 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9273 length += strlen (get_tree_code_name ((enum tree_code) c));
9274 }
9275
9276 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9277 buffer, get_tree_code_name (TREE_CODE (node)),
9278 function, trim_filename (file), line);
9279 }
9280
9281
9282 /* Similar to tree_check_failed, except that we check that a tree does
9283 not have the specified code, given in CL. */
9284
9285 void
9286 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9287 const char *file, int line, const char *function)
9288 {
9289 internal_error
9290 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9291 TREE_CODE_CLASS_STRING (cl),
9292 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9293 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9294 }
9295
9296
9297 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9298
9299 void
9300 omp_clause_check_failed (const_tree node, const char *file, int line,
9301 const char *function, enum omp_clause_code code)
9302 {
9303 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9304 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9305 function, trim_filename (file), line);
9306 }
9307
9308
9309 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9310
9311 void
9312 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9313 const char *function, enum omp_clause_code c1,
9314 enum omp_clause_code c2)
9315 {
9316 char *buffer;
9317 unsigned length = 0;
9318 unsigned int c;
9319
9320 for (c = c1; c <= c2; ++c)
9321 length += 4 + strlen (omp_clause_code_name[c]);
9322
9323 length += strlen ("expected ");
9324 buffer = (char *) alloca (length);
9325 length = 0;
9326
9327 for (c = c1; c <= c2; ++c)
9328 {
9329 const char *prefix = length ? " or " : "expected ";
9330
9331 strcpy (buffer + length, prefix);
9332 length += strlen (prefix);
9333 strcpy (buffer + length, omp_clause_code_name[c]);
9334 length += strlen (omp_clause_code_name[c]);
9335 }
9336
9337 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9338 buffer, omp_clause_code_name[TREE_CODE (node)],
9339 function, trim_filename (file), line);
9340 }
9341
9342
9343 #undef DEFTREESTRUCT
9344 #define DEFTREESTRUCT(VAL, NAME) NAME,
9345
9346 static const char *ts_enum_names[] = {
9347 #include "treestruct.def"
9348 };
9349 #undef DEFTREESTRUCT
9350
9351 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9352
9353 /* Similar to tree_class_check_failed, except that we check for
9354 whether CODE contains the tree structure identified by EN. */
9355
9356 void
9357 tree_contains_struct_check_failed (const_tree node,
9358 const enum tree_node_structure_enum en,
9359 const char *file, int line,
9360 const char *function)
9361 {
9362 internal_error
9363 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9364 TS_ENUM_NAME (en),
9365 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9366 }
9367
9368
9369 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9370 (dynamically sized) vector. */
9371
9372 void
9373 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9374 const char *function)
9375 {
9376 internal_error
9377 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9378 idx + 1, len, function, trim_filename (file), line);
9379 }
9380
9381 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9382 (dynamically sized) vector. */
9383
9384 void
9385 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9386 const char *function)
9387 {
9388 internal_error
9389 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9390 idx + 1, len, function, trim_filename (file), line);
9391 }
9392
9393 /* Similar to above, except that the check is for the bounds of the operand
9394 vector of an expression node EXP. */
9395
9396 void
9397 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9398 int line, const char *function)
9399 {
9400 enum tree_code code = TREE_CODE (exp);
9401 internal_error
9402 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9403 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9404 function, trim_filename (file), line);
9405 }
9406
9407 /* Similar to above, except that the check is for the number of
9408 operands of an OMP_CLAUSE node. */
9409
9410 void
9411 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9412 int line, const char *function)
9413 {
9414 internal_error
9415 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9416 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9417 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9418 trim_filename (file), line);
9419 }
9420 #endif /* ENABLE_TREE_CHECKING */
9421 \f
9422 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9423 and mapped to the machine mode MODE. Initialize its fields and build
9424 the information necessary for debugging output. */
9425
9426 static tree
9427 make_vector_type (tree innertype, int nunits, machine_mode mode)
9428 {
9429 tree t;
9430 inchash::hash hstate;
9431
9432 t = make_node (VECTOR_TYPE);
9433 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9434 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9435 SET_TYPE_MODE (t, mode);
9436
9437 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9438 SET_TYPE_STRUCTURAL_EQUALITY (t);
9439 else if (TYPE_CANONICAL (innertype) != innertype
9440 || mode != VOIDmode)
9441 TYPE_CANONICAL (t)
9442 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9443
9444 layout_type (t);
9445
9446 hstate.add_wide_int (VECTOR_TYPE);
9447 hstate.add_wide_int (nunits);
9448 hstate.add_wide_int (mode);
9449 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9450 t = type_hash_canon (hstate.end (), t);
9451
9452 /* We have built a main variant, based on the main variant of the
9453 inner type. Use it to build the variant we return. */
9454 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9455 && TREE_TYPE (t) != innertype)
9456 return build_type_attribute_qual_variant (t,
9457 TYPE_ATTRIBUTES (innertype),
9458 TYPE_QUALS (innertype));
9459
9460 return t;
9461 }
9462
9463 static tree
9464 make_or_reuse_type (unsigned size, int unsignedp)
9465 {
9466 int i;
9467
9468 if (size == INT_TYPE_SIZE)
9469 return unsignedp ? unsigned_type_node : integer_type_node;
9470 if (size == CHAR_TYPE_SIZE)
9471 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9472 if (size == SHORT_TYPE_SIZE)
9473 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9474 if (size == LONG_TYPE_SIZE)
9475 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9476 if (size == LONG_LONG_TYPE_SIZE)
9477 return (unsignedp ? long_long_unsigned_type_node
9478 : long_long_integer_type_node);
9479
9480 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9481 if (size == int_n_data[i].bitsize
9482 && int_n_enabled_p[i])
9483 return (unsignedp ? int_n_trees[i].unsigned_type
9484 : int_n_trees[i].signed_type);
9485
9486 if (unsignedp)
9487 return make_unsigned_type (size);
9488 else
9489 return make_signed_type (size);
9490 }
9491
9492 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9493
9494 static tree
9495 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9496 {
9497 if (satp)
9498 {
9499 if (size == SHORT_FRACT_TYPE_SIZE)
9500 return unsignedp ? sat_unsigned_short_fract_type_node
9501 : sat_short_fract_type_node;
9502 if (size == FRACT_TYPE_SIZE)
9503 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9504 if (size == LONG_FRACT_TYPE_SIZE)
9505 return unsignedp ? sat_unsigned_long_fract_type_node
9506 : sat_long_fract_type_node;
9507 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9508 return unsignedp ? sat_unsigned_long_long_fract_type_node
9509 : sat_long_long_fract_type_node;
9510 }
9511 else
9512 {
9513 if (size == SHORT_FRACT_TYPE_SIZE)
9514 return unsignedp ? unsigned_short_fract_type_node
9515 : short_fract_type_node;
9516 if (size == FRACT_TYPE_SIZE)
9517 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9518 if (size == LONG_FRACT_TYPE_SIZE)
9519 return unsignedp ? unsigned_long_fract_type_node
9520 : long_fract_type_node;
9521 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9522 return unsignedp ? unsigned_long_long_fract_type_node
9523 : long_long_fract_type_node;
9524 }
9525
9526 return make_fract_type (size, unsignedp, satp);
9527 }
9528
9529 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9530
9531 static tree
9532 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9533 {
9534 if (satp)
9535 {
9536 if (size == SHORT_ACCUM_TYPE_SIZE)
9537 return unsignedp ? sat_unsigned_short_accum_type_node
9538 : sat_short_accum_type_node;
9539 if (size == ACCUM_TYPE_SIZE)
9540 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9541 if (size == LONG_ACCUM_TYPE_SIZE)
9542 return unsignedp ? sat_unsigned_long_accum_type_node
9543 : sat_long_accum_type_node;
9544 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9545 return unsignedp ? sat_unsigned_long_long_accum_type_node
9546 : sat_long_long_accum_type_node;
9547 }
9548 else
9549 {
9550 if (size == SHORT_ACCUM_TYPE_SIZE)
9551 return unsignedp ? unsigned_short_accum_type_node
9552 : short_accum_type_node;
9553 if (size == ACCUM_TYPE_SIZE)
9554 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9555 if (size == LONG_ACCUM_TYPE_SIZE)
9556 return unsignedp ? unsigned_long_accum_type_node
9557 : long_accum_type_node;
9558 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9559 return unsignedp ? unsigned_long_long_accum_type_node
9560 : long_long_accum_type_node;
9561 }
9562
9563 return make_accum_type (size, unsignedp, satp);
9564 }
9565
9566
9567 /* Create an atomic variant node for TYPE. This routine is called
9568 during initialization of data types to create the 5 basic atomic
9569 types. The generic build_variant_type function requires these to
9570 already be set up in order to function properly, so cannot be
9571 called from there. If ALIGN is non-zero, then ensure alignment is
9572 overridden to this value. */
9573
9574 static tree
9575 build_atomic_base (tree type, unsigned int align)
9576 {
9577 tree t;
9578
9579 /* Make sure its not already registered. */
9580 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9581 return t;
9582
9583 t = build_variant_type_copy (type);
9584 set_type_quals (t, TYPE_QUAL_ATOMIC);
9585
9586 if (align)
9587 TYPE_ALIGN (t) = align;
9588
9589 return t;
9590 }
9591
9592 /* Create nodes for all integer types (and error_mark_node) using the sizes
9593 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9594 SHORT_DOUBLE specifies whether double should be of the same precision
9595 as float. */
9596
9597 void
9598 build_common_tree_nodes (bool signed_char, bool short_double)
9599 {
9600 int i;
9601
9602 error_mark_node = make_node (ERROR_MARK);
9603 TREE_TYPE (error_mark_node) = error_mark_node;
9604
9605 initialize_sizetypes ();
9606
9607 /* Define both `signed char' and `unsigned char'. */
9608 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9609 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9610 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9611 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9612
9613 /* Define `char', which is like either `signed char' or `unsigned char'
9614 but not the same as either. */
9615 char_type_node
9616 = (signed_char
9617 ? make_signed_type (CHAR_TYPE_SIZE)
9618 : make_unsigned_type (CHAR_TYPE_SIZE));
9619 TYPE_STRING_FLAG (char_type_node) = 1;
9620
9621 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9622 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9623 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9624 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9625 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9626 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9627 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9628 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9629
9630 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9631 {
9632 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9633 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9634 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9635 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9636
9637 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9638 && int_n_enabled_p[i])
9639 {
9640 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9641 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9642 }
9643 }
9644
9645 /* Define a boolean type. This type only represents boolean values but
9646 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9647 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9648 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9649 TYPE_PRECISION (boolean_type_node) = 1;
9650 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9651
9652 /* Define what type to use for size_t. */
9653 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9654 size_type_node = unsigned_type_node;
9655 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9656 size_type_node = long_unsigned_type_node;
9657 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9658 size_type_node = long_long_unsigned_type_node;
9659 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9660 size_type_node = short_unsigned_type_node;
9661 else
9662 {
9663 int i;
9664
9665 size_type_node = NULL_TREE;
9666 for (i = 0; i < NUM_INT_N_ENTS; i++)
9667 if (int_n_enabled_p[i])
9668 {
9669 char name[50];
9670 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9671
9672 if (strcmp (name, SIZE_TYPE) == 0)
9673 {
9674 size_type_node = int_n_trees[i].unsigned_type;
9675 }
9676 }
9677 if (size_type_node == NULL_TREE)
9678 gcc_unreachable ();
9679 }
9680
9681 /* Fill in the rest of the sized types. Reuse existing type nodes
9682 when possible. */
9683 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9684 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9685 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9686 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9687 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9688
9689 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9690 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9691 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9692 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9693 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9694
9695 /* Don't call build_qualified type for atomics. That routine does
9696 special processing for atomics, and until they are initialized
9697 it's better not to make that call.
9698
9699 Check to see if there is a target override for atomic types. */
9700
9701 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9702 targetm.atomic_align_for_mode (QImode));
9703 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9704 targetm.atomic_align_for_mode (HImode));
9705 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9706 targetm.atomic_align_for_mode (SImode));
9707 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9708 targetm.atomic_align_for_mode (DImode));
9709 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9710 targetm.atomic_align_for_mode (TImode));
9711
9712 access_public_node = get_identifier ("public");
9713 access_protected_node = get_identifier ("protected");
9714 access_private_node = get_identifier ("private");
9715
9716 /* Define these next since types below may used them. */
9717 integer_zero_node = build_int_cst (integer_type_node, 0);
9718 integer_one_node = build_int_cst (integer_type_node, 1);
9719 integer_three_node = build_int_cst (integer_type_node, 3);
9720 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9721
9722 size_zero_node = size_int (0);
9723 size_one_node = size_int (1);
9724 bitsize_zero_node = bitsize_int (0);
9725 bitsize_one_node = bitsize_int (1);
9726 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9727
9728 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9729 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9730
9731 void_type_node = make_node (VOID_TYPE);
9732 layout_type (void_type_node);
9733
9734 pointer_bounds_type_node = targetm.chkp_bound_type ();
9735
9736 /* We are not going to have real types in C with less than byte alignment,
9737 so we might as well not have any types that claim to have it. */
9738 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9739 TYPE_USER_ALIGN (void_type_node) = 0;
9740
9741 void_node = make_node (VOID_CST);
9742 TREE_TYPE (void_node) = void_type_node;
9743
9744 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9745 layout_type (TREE_TYPE (null_pointer_node));
9746
9747 ptr_type_node = build_pointer_type (void_type_node);
9748 const_ptr_type_node
9749 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9750 fileptr_type_node = ptr_type_node;
9751
9752 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9753
9754 float_type_node = make_node (REAL_TYPE);
9755 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9756 layout_type (float_type_node);
9757
9758 double_type_node = make_node (REAL_TYPE);
9759 if (short_double)
9760 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9761 else
9762 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9763 layout_type (double_type_node);
9764
9765 long_double_type_node = make_node (REAL_TYPE);
9766 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9767 layout_type (long_double_type_node);
9768
9769 float_ptr_type_node = build_pointer_type (float_type_node);
9770 double_ptr_type_node = build_pointer_type (double_type_node);
9771 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9772 integer_ptr_type_node = build_pointer_type (integer_type_node);
9773
9774 /* Fixed size integer types. */
9775 uint16_type_node = make_or_reuse_type (16, 1);
9776 uint32_type_node = make_or_reuse_type (32, 1);
9777 uint64_type_node = make_or_reuse_type (64, 1);
9778
9779 /* Decimal float types. */
9780 dfloat32_type_node = make_node (REAL_TYPE);
9781 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9782 layout_type (dfloat32_type_node);
9783 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9784 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9785
9786 dfloat64_type_node = make_node (REAL_TYPE);
9787 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9788 layout_type (dfloat64_type_node);
9789 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9790 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9791
9792 dfloat128_type_node = make_node (REAL_TYPE);
9793 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9794 layout_type (dfloat128_type_node);
9795 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9796 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9797
9798 complex_integer_type_node = build_complex_type (integer_type_node);
9799 complex_float_type_node = build_complex_type (float_type_node);
9800 complex_double_type_node = build_complex_type (double_type_node);
9801 complex_long_double_type_node = build_complex_type (long_double_type_node);
9802
9803 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9804 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9805 sat_ ## KIND ## _type_node = \
9806 make_sat_signed_ ## KIND ## _type (SIZE); \
9807 sat_unsigned_ ## KIND ## _type_node = \
9808 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9809 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9810 unsigned_ ## KIND ## _type_node = \
9811 make_unsigned_ ## KIND ## _type (SIZE);
9812
9813 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9814 sat_ ## WIDTH ## KIND ## _type_node = \
9815 make_sat_signed_ ## KIND ## _type (SIZE); \
9816 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9817 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9818 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9819 unsigned_ ## WIDTH ## KIND ## _type_node = \
9820 make_unsigned_ ## KIND ## _type (SIZE);
9821
9822 /* Make fixed-point type nodes based on four different widths. */
9823 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9824 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9825 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9826 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9827 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9828
9829 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9830 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9831 NAME ## _type_node = \
9832 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9833 u ## NAME ## _type_node = \
9834 make_or_reuse_unsigned_ ## KIND ## _type \
9835 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9836 sat_ ## NAME ## _type_node = \
9837 make_or_reuse_sat_signed_ ## KIND ## _type \
9838 (GET_MODE_BITSIZE (MODE ## mode)); \
9839 sat_u ## NAME ## _type_node = \
9840 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9841 (GET_MODE_BITSIZE (U ## MODE ## mode));
9842
9843 /* Fixed-point type and mode nodes. */
9844 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9845 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9846 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9847 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9848 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9849 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9850 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9851 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9852 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9853 MAKE_FIXED_MODE_NODE (accum, da, DA)
9854 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9855
9856 {
9857 tree t = targetm.build_builtin_va_list ();
9858
9859 /* Many back-ends define record types without setting TYPE_NAME.
9860 If we copied the record type here, we'd keep the original
9861 record type without a name. This breaks name mangling. So,
9862 don't copy record types and let c_common_nodes_and_builtins()
9863 declare the type to be __builtin_va_list. */
9864 if (TREE_CODE (t) != RECORD_TYPE)
9865 t = build_variant_type_copy (t);
9866
9867 va_list_type_node = t;
9868 }
9869 }
9870
9871 /* Modify DECL for given flags.
9872 TM_PURE attribute is set only on types, so the function will modify
9873 DECL's type when ECF_TM_PURE is used. */
9874
9875 void
9876 set_call_expr_flags (tree decl, int flags)
9877 {
9878 if (flags & ECF_NOTHROW)
9879 TREE_NOTHROW (decl) = 1;
9880 if (flags & ECF_CONST)
9881 TREE_READONLY (decl) = 1;
9882 if (flags & ECF_PURE)
9883 DECL_PURE_P (decl) = 1;
9884 if (flags & ECF_LOOPING_CONST_OR_PURE)
9885 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9886 if (flags & ECF_NOVOPS)
9887 DECL_IS_NOVOPS (decl) = 1;
9888 if (flags & ECF_NORETURN)
9889 TREE_THIS_VOLATILE (decl) = 1;
9890 if (flags & ECF_MALLOC)
9891 DECL_IS_MALLOC (decl) = 1;
9892 if (flags & ECF_RETURNS_TWICE)
9893 DECL_IS_RETURNS_TWICE (decl) = 1;
9894 if (flags & ECF_LEAF)
9895 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9896 NULL, DECL_ATTRIBUTES (decl));
9897 if ((flags & ECF_TM_PURE) && flag_tm)
9898 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9899 /* Looping const or pure is implied by noreturn.
9900 There is currently no way to declare looping const or looping pure alone. */
9901 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9902 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9903 }
9904
9905
9906 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9907
9908 static void
9909 local_define_builtin (const char *name, tree type, enum built_in_function code,
9910 const char *library_name, int ecf_flags)
9911 {
9912 tree decl;
9913
9914 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9915 library_name, NULL_TREE);
9916 set_call_expr_flags (decl, ecf_flags);
9917
9918 set_builtin_decl (code, decl, true);
9919 }
9920
9921 /* Call this function after instantiating all builtins that the language
9922 front end cares about. This will build the rest of the builtins
9923 and internal functions that are relied upon by the tree optimizers and
9924 the middle-end. */
9925
9926 void
9927 build_common_builtin_nodes (void)
9928 {
9929 tree tmp, ftype;
9930 int ecf_flags;
9931
9932 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9933 {
9934 ftype = build_function_type (void_type_node, void_list_node);
9935 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9936 "__builtin_unreachable",
9937 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9938 | ECF_CONST | ECF_LEAF);
9939 }
9940
9941 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9942 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9943 {
9944 ftype = build_function_type_list (ptr_type_node,
9945 ptr_type_node, const_ptr_type_node,
9946 size_type_node, NULL_TREE);
9947
9948 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9949 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9950 "memcpy", ECF_NOTHROW | ECF_LEAF);
9951 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9952 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9953 "memmove", ECF_NOTHROW | ECF_LEAF);
9954 }
9955
9956 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9957 {
9958 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9959 const_ptr_type_node, size_type_node,
9960 NULL_TREE);
9961 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9962 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9963 }
9964
9965 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9966 {
9967 ftype = build_function_type_list (ptr_type_node,
9968 ptr_type_node, integer_type_node,
9969 size_type_node, NULL_TREE);
9970 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9971 "memset", ECF_NOTHROW | ECF_LEAF);
9972 }
9973
9974 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9975 {
9976 ftype = build_function_type_list (ptr_type_node,
9977 size_type_node, NULL_TREE);
9978 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9979 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9980 }
9981
9982 ftype = build_function_type_list (ptr_type_node, size_type_node,
9983 size_type_node, NULL_TREE);
9984 local_define_builtin ("__builtin_alloca_with_align", ftype,
9985 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9986 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9987
9988 /* If we're checking the stack, `alloca' can throw. */
9989 if (flag_stack_check)
9990 {
9991 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9992 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9993 }
9994
9995 ftype = build_function_type_list (void_type_node,
9996 ptr_type_node, ptr_type_node,
9997 ptr_type_node, NULL_TREE);
9998 local_define_builtin ("__builtin_init_trampoline", ftype,
9999 BUILT_IN_INIT_TRAMPOLINE,
10000 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10001 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10002 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10003 "__builtin_init_heap_trampoline",
10004 ECF_NOTHROW | ECF_LEAF);
10005
10006 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10007 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10008 BUILT_IN_ADJUST_TRAMPOLINE,
10009 "__builtin_adjust_trampoline",
10010 ECF_CONST | ECF_NOTHROW);
10011
10012 ftype = build_function_type_list (void_type_node,
10013 ptr_type_node, ptr_type_node, NULL_TREE);
10014 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10015 BUILT_IN_NONLOCAL_GOTO,
10016 "__builtin_nonlocal_goto",
10017 ECF_NORETURN | ECF_NOTHROW);
10018
10019 ftype = build_function_type_list (void_type_node,
10020 ptr_type_node, ptr_type_node, NULL_TREE);
10021 local_define_builtin ("__builtin_setjmp_setup", ftype,
10022 BUILT_IN_SETJMP_SETUP,
10023 "__builtin_setjmp_setup", ECF_NOTHROW);
10024
10025 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10026 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10027 BUILT_IN_SETJMP_RECEIVER,
10028 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10029
10030 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10031 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10032 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10033
10034 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10035 local_define_builtin ("__builtin_stack_restore", ftype,
10036 BUILT_IN_STACK_RESTORE,
10037 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10038
10039 /* If there's a possibility that we might use the ARM EABI, build the
10040 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10041 if (targetm.arm_eabi_unwinder)
10042 {
10043 ftype = build_function_type_list (void_type_node, NULL_TREE);
10044 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10045 BUILT_IN_CXA_END_CLEANUP,
10046 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10047 }
10048
10049 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10050 local_define_builtin ("__builtin_unwind_resume", ftype,
10051 BUILT_IN_UNWIND_RESUME,
10052 ((targetm_common.except_unwind_info (&global_options)
10053 == UI_SJLJ)
10054 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10055 ECF_NORETURN);
10056
10057 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10058 {
10059 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10060 NULL_TREE);
10061 local_define_builtin ("__builtin_return_address", ftype,
10062 BUILT_IN_RETURN_ADDRESS,
10063 "__builtin_return_address",
10064 ECF_NOTHROW);
10065 }
10066
10067 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10068 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10069 {
10070 ftype = build_function_type_list (void_type_node, ptr_type_node,
10071 ptr_type_node, NULL_TREE);
10072 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10073 local_define_builtin ("__cyg_profile_func_enter", ftype,
10074 BUILT_IN_PROFILE_FUNC_ENTER,
10075 "__cyg_profile_func_enter", 0);
10076 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10077 local_define_builtin ("__cyg_profile_func_exit", ftype,
10078 BUILT_IN_PROFILE_FUNC_EXIT,
10079 "__cyg_profile_func_exit", 0);
10080 }
10081
10082 /* The exception object and filter values from the runtime. The argument
10083 must be zero before exception lowering, i.e. from the front end. After
10084 exception lowering, it will be the region number for the exception
10085 landing pad. These functions are PURE instead of CONST to prevent
10086 them from being hoisted past the exception edge that will initialize
10087 its value in the landing pad. */
10088 ftype = build_function_type_list (ptr_type_node,
10089 integer_type_node, NULL_TREE);
10090 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10091 /* Only use TM_PURE if we we have TM language support. */
10092 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10093 ecf_flags |= ECF_TM_PURE;
10094 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10095 "__builtin_eh_pointer", ecf_flags);
10096
10097 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10098 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10099 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10100 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10101
10102 ftype = build_function_type_list (void_type_node,
10103 integer_type_node, integer_type_node,
10104 NULL_TREE);
10105 local_define_builtin ("__builtin_eh_copy_values", ftype,
10106 BUILT_IN_EH_COPY_VALUES,
10107 "__builtin_eh_copy_values", ECF_NOTHROW);
10108
10109 /* Complex multiplication and division. These are handled as builtins
10110 rather than optabs because emit_library_call_value doesn't support
10111 complex. Further, we can do slightly better with folding these
10112 beasties if the real and complex parts of the arguments are separate. */
10113 {
10114 int mode;
10115
10116 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10117 {
10118 char mode_name_buf[4], *q;
10119 const char *p;
10120 enum built_in_function mcode, dcode;
10121 tree type, inner_type;
10122 const char *prefix = "__";
10123
10124 if (targetm.libfunc_gnu_prefix)
10125 prefix = "__gnu_";
10126
10127 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10128 if (type == NULL)
10129 continue;
10130 inner_type = TREE_TYPE (type);
10131
10132 ftype = build_function_type_list (type, inner_type, inner_type,
10133 inner_type, inner_type, NULL_TREE);
10134
10135 mcode = ((enum built_in_function)
10136 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10137 dcode = ((enum built_in_function)
10138 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10139
10140 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10141 *q = TOLOWER (*p);
10142 *q = '\0';
10143
10144 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10145 NULL);
10146 local_define_builtin (built_in_names[mcode], ftype, mcode,
10147 built_in_names[mcode],
10148 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10149
10150 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10151 NULL);
10152 local_define_builtin (built_in_names[dcode], ftype, dcode,
10153 built_in_names[dcode],
10154 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10155 }
10156 }
10157
10158 init_internal_fns ();
10159 }
10160
10161 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10162 better way.
10163
10164 If we requested a pointer to a vector, build up the pointers that
10165 we stripped off while looking for the inner type. Similarly for
10166 return values from functions.
10167
10168 The argument TYPE is the top of the chain, and BOTTOM is the
10169 new type which we will point to. */
10170
10171 tree
10172 reconstruct_complex_type (tree type, tree bottom)
10173 {
10174 tree inner, outer;
10175
10176 if (TREE_CODE (type) == POINTER_TYPE)
10177 {
10178 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10179 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10180 TYPE_REF_CAN_ALIAS_ALL (type));
10181 }
10182 else if (TREE_CODE (type) == REFERENCE_TYPE)
10183 {
10184 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10185 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10186 TYPE_REF_CAN_ALIAS_ALL (type));
10187 }
10188 else if (TREE_CODE (type) == ARRAY_TYPE)
10189 {
10190 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10191 outer = build_array_type (inner, TYPE_DOMAIN (type));
10192 }
10193 else if (TREE_CODE (type) == FUNCTION_TYPE)
10194 {
10195 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10196 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10197 }
10198 else if (TREE_CODE (type) == METHOD_TYPE)
10199 {
10200 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10201 /* The build_method_type_directly() routine prepends 'this' to argument list,
10202 so we must compensate by getting rid of it. */
10203 outer
10204 = build_method_type_directly
10205 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10206 inner,
10207 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10208 }
10209 else if (TREE_CODE (type) == OFFSET_TYPE)
10210 {
10211 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10212 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10213 }
10214 else
10215 return bottom;
10216
10217 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10218 TYPE_QUALS (type));
10219 }
10220
10221 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10222 the inner type. */
10223 tree
10224 build_vector_type_for_mode (tree innertype, machine_mode mode)
10225 {
10226 int nunits;
10227
10228 switch (GET_MODE_CLASS (mode))
10229 {
10230 case MODE_VECTOR_INT:
10231 case MODE_VECTOR_FLOAT:
10232 case MODE_VECTOR_FRACT:
10233 case MODE_VECTOR_UFRACT:
10234 case MODE_VECTOR_ACCUM:
10235 case MODE_VECTOR_UACCUM:
10236 nunits = GET_MODE_NUNITS (mode);
10237 break;
10238
10239 case MODE_INT:
10240 /* Check that there are no leftover bits. */
10241 gcc_assert (GET_MODE_BITSIZE (mode)
10242 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10243
10244 nunits = GET_MODE_BITSIZE (mode)
10245 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10246 break;
10247
10248 default:
10249 gcc_unreachable ();
10250 }
10251
10252 return make_vector_type (innertype, nunits, mode);
10253 }
10254
10255 /* Similarly, but takes the inner type and number of units, which must be
10256 a power of two. */
10257
10258 tree
10259 build_vector_type (tree innertype, int nunits)
10260 {
10261 return make_vector_type (innertype, nunits, VOIDmode);
10262 }
10263
10264 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10265
10266 tree
10267 build_opaque_vector_type (tree innertype, int nunits)
10268 {
10269 tree t = make_vector_type (innertype, nunits, VOIDmode);
10270 tree cand;
10271 /* We always build the non-opaque variant before the opaque one,
10272 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10273 cand = TYPE_NEXT_VARIANT (t);
10274 if (cand
10275 && TYPE_VECTOR_OPAQUE (cand)
10276 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10277 return cand;
10278 /* Othewise build a variant type and make sure to queue it after
10279 the non-opaque type. */
10280 cand = build_distinct_type_copy (t);
10281 TYPE_VECTOR_OPAQUE (cand) = true;
10282 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10283 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10284 TYPE_NEXT_VARIANT (t) = cand;
10285 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10286 return cand;
10287 }
10288
10289
10290 /* Given an initializer INIT, return TRUE if INIT is zero or some
10291 aggregate of zeros. Otherwise return FALSE. */
10292 bool
10293 initializer_zerop (const_tree init)
10294 {
10295 tree elt;
10296
10297 STRIP_NOPS (init);
10298
10299 switch (TREE_CODE (init))
10300 {
10301 case INTEGER_CST:
10302 return integer_zerop (init);
10303
10304 case REAL_CST:
10305 /* ??? Note that this is not correct for C4X float formats. There,
10306 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10307 negative exponent. */
10308 return real_zerop (init)
10309 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10310
10311 case FIXED_CST:
10312 return fixed_zerop (init);
10313
10314 case COMPLEX_CST:
10315 return integer_zerop (init)
10316 || (real_zerop (init)
10317 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10318 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10319
10320 case VECTOR_CST:
10321 {
10322 unsigned i;
10323 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10324 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10325 return false;
10326 return true;
10327 }
10328
10329 case CONSTRUCTOR:
10330 {
10331 unsigned HOST_WIDE_INT idx;
10332
10333 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10334 if (!initializer_zerop (elt))
10335 return false;
10336 return true;
10337 }
10338
10339 case STRING_CST:
10340 {
10341 int i;
10342
10343 /* We need to loop through all elements to handle cases like
10344 "\0" and "\0foobar". */
10345 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10346 if (TREE_STRING_POINTER (init)[i] != '\0')
10347 return false;
10348
10349 return true;
10350 }
10351
10352 default:
10353 return false;
10354 }
10355 }
10356
10357 /* Check if vector VEC consists of all the equal elements and
10358 that the number of elements corresponds to the type of VEC.
10359 The function returns first element of the vector
10360 or NULL_TREE if the vector is not uniform. */
10361 tree
10362 uniform_vector_p (const_tree vec)
10363 {
10364 tree first, t;
10365 unsigned i;
10366
10367 if (vec == NULL_TREE)
10368 return NULL_TREE;
10369
10370 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10371
10372 if (TREE_CODE (vec) == VECTOR_CST)
10373 {
10374 first = VECTOR_CST_ELT (vec, 0);
10375 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10376 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10377 return NULL_TREE;
10378
10379 return first;
10380 }
10381
10382 else if (TREE_CODE (vec) == CONSTRUCTOR)
10383 {
10384 first = error_mark_node;
10385
10386 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10387 {
10388 if (i == 0)
10389 {
10390 first = t;
10391 continue;
10392 }
10393 if (!operand_equal_p (first, t, 0))
10394 return NULL_TREE;
10395 }
10396 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10397 return NULL_TREE;
10398
10399 return first;
10400 }
10401
10402 return NULL_TREE;
10403 }
10404
10405 /* Build an empty statement at location LOC. */
10406
10407 tree
10408 build_empty_stmt (location_t loc)
10409 {
10410 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10411 SET_EXPR_LOCATION (t, loc);
10412 return t;
10413 }
10414
10415
10416 /* Build an OpenMP clause with code CODE. LOC is the location of the
10417 clause. */
10418
10419 tree
10420 build_omp_clause (location_t loc, enum omp_clause_code code)
10421 {
10422 tree t;
10423 int size, length;
10424
10425 length = omp_clause_num_ops[code];
10426 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10427
10428 record_node_allocation_statistics (OMP_CLAUSE, size);
10429
10430 t = (tree) ggc_internal_alloc (size);
10431 memset (t, 0, size);
10432 TREE_SET_CODE (t, OMP_CLAUSE);
10433 OMP_CLAUSE_SET_CODE (t, code);
10434 OMP_CLAUSE_LOCATION (t) = loc;
10435
10436 return t;
10437 }
10438
10439 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10440 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10441 Except for the CODE and operand count field, other storage for the
10442 object is initialized to zeros. */
10443
10444 tree
10445 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10446 {
10447 tree t;
10448 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10449
10450 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10451 gcc_assert (len >= 1);
10452
10453 record_node_allocation_statistics (code, length);
10454
10455 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10456
10457 TREE_SET_CODE (t, code);
10458
10459 /* Can't use TREE_OPERAND to store the length because if checking is
10460 enabled, it will try to check the length before we store it. :-P */
10461 t->exp.operands[0] = build_int_cst (sizetype, len);
10462
10463 return t;
10464 }
10465
10466 /* Helper function for build_call_* functions; build a CALL_EXPR with
10467 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10468 the argument slots. */
10469
10470 static tree
10471 build_call_1 (tree return_type, tree fn, int nargs)
10472 {
10473 tree t;
10474
10475 t = build_vl_exp (CALL_EXPR, nargs + 3);
10476 TREE_TYPE (t) = return_type;
10477 CALL_EXPR_FN (t) = fn;
10478 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10479
10480 return t;
10481 }
10482
10483 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10484 FN and a null static chain slot. NARGS is the number of call arguments
10485 which are specified as "..." arguments. */
10486
10487 tree
10488 build_call_nary (tree return_type, tree fn, int nargs, ...)
10489 {
10490 tree ret;
10491 va_list args;
10492 va_start (args, nargs);
10493 ret = build_call_valist (return_type, fn, nargs, args);
10494 va_end (args);
10495 return ret;
10496 }
10497
10498 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10499 FN and a null static chain slot. NARGS is the number of call arguments
10500 which are specified as a va_list ARGS. */
10501
10502 tree
10503 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10504 {
10505 tree t;
10506 int i;
10507
10508 t = build_call_1 (return_type, fn, nargs);
10509 for (i = 0; i < nargs; i++)
10510 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10511 process_call_operands (t);
10512 return t;
10513 }
10514
10515 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10516 FN and a null static chain slot. NARGS is the number of call arguments
10517 which are specified as a tree array ARGS. */
10518
10519 tree
10520 build_call_array_loc (location_t loc, tree return_type, tree fn,
10521 int nargs, const tree *args)
10522 {
10523 tree t;
10524 int i;
10525
10526 t = build_call_1 (return_type, fn, nargs);
10527 for (i = 0; i < nargs; i++)
10528 CALL_EXPR_ARG (t, i) = args[i];
10529 process_call_operands (t);
10530 SET_EXPR_LOCATION (t, loc);
10531 return t;
10532 }
10533
10534 /* Like build_call_array, but takes a vec. */
10535
10536 tree
10537 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10538 {
10539 tree ret, t;
10540 unsigned int ix;
10541
10542 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10543 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10544 CALL_EXPR_ARG (ret, ix) = t;
10545 process_call_operands (ret);
10546 return ret;
10547 }
10548
10549 /* Conveniently construct a function call expression. FNDECL names the
10550 function to be called and N arguments are passed in the array
10551 ARGARRAY. */
10552
10553 tree
10554 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10555 {
10556 tree fntype = TREE_TYPE (fndecl);
10557 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10558
10559 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10560 }
10561
10562 /* Conveniently construct a function call expression. FNDECL names the
10563 function to be called and the arguments are passed in the vector
10564 VEC. */
10565
10566 tree
10567 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10568 {
10569 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10570 vec_safe_address (vec));
10571 }
10572
10573
10574 /* Conveniently construct a function call expression. FNDECL names the
10575 function to be called, N is the number of arguments, and the "..."
10576 parameters are the argument expressions. */
10577
10578 tree
10579 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10580 {
10581 va_list ap;
10582 tree *argarray = XALLOCAVEC (tree, n);
10583 int i;
10584
10585 va_start (ap, n);
10586 for (i = 0; i < n; i++)
10587 argarray[i] = va_arg (ap, tree);
10588 va_end (ap);
10589 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10590 }
10591
10592 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10593 varargs macros aren't supported by all bootstrap compilers. */
10594
10595 tree
10596 build_call_expr (tree fndecl, int n, ...)
10597 {
10598 va_list ap;
10599 tree *argarray = XALLOCAVEC (tree, n);
10600 int i;
10601
10602 va_start (ap, n);
10603 for (i = 0; i < n; i++)
10604 argarray[i] = va_arg (ap, tree);
10605 va_end (ap);
10606 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10607 }
10608
10609 /* Build internal call expression. This is just like CALL_EXPR, except
10610 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10611 internal function. */
10612
10613 tree
10614 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10615 tree type, int n, ...)
10616 {
10617 va_list ap;
10618 int i;
10619
10620 tree fn = build_call_1 (type, NULL_TREE, n);
10621 va_start (ap, n);
10622 for (i = 0; i < n; i++)
10623 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10624 va_end (ap);
10625 SET_EXPR_LOCATION (fn, loc);
10626 CALL_EXPR_IFN (fn) = ifn;
10627 return fn;
10628 }
10629
10630 /* Create a new constant string literal and return a char* pointer to it.
10631 The STRING_CST value is the LEN characters at STR. */
10632 tree
10633 build_string_literal (int len, const char *str)
10634 {
10635 tree t, elem, index, type;
10636
10637 t = build_string (len, str);
10638 elem = build_type_variant (char_type_node, 1, 0);
10639 index = build_index_type (size_int (len - 1));
10640 type = build_array_type (elem, index);
10641 TREE_TYPE (t) = type;
10642 TREE_CONSTANT (t) = 1;
10643 TREE_READONLY (t) = 1;
10644 TREE_STATIC (t) = 1;
10645
10646 type = build_pointer_type (elem);
10647 t = build1 (ADDR_EXPR, type,
10648 build4 (ARRAY_REF, elem,
10649 t, integer_zero_node, NULL_TREE, NULL_TREE));
10650 return t;
10651 }
10652
10653
10654
10655 /* Return true if T (assumed to be a DECL) must be assigned a memory
10656 location. */
10657
10658 bool
10659 needs_to_live_in_memory (const_tree t)
10660 {
10661 return (TREE_ADDRESSABLE (t)
10662 || is_global_var (t)
10663 || (TREE_CODE (t) == RESULT_DECL
10664 && !DECL_BY_REFERENCE (t)
10665 && aggregate_value_p (t, current_function_decl)));
10666 }
10667
10668 /* Return value of a constant X and sign-extend it. */
10669
10670 HOST_WIDE_INT
10671 int_cst_value (const_tree x)
10672 {
10673 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10674 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10675
10676 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10677 gcc_assert (cst_and_fits_in_hwi (x));
10678
10679 if (bits < HOST_BITS_PER_WIDE_INT)
10680 {
10681 bool negative = ((val >> (bits - 1)) & 1) != 0;
10682 if (negative)
10683 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10684 else
10685 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10686 }
10687
10688 return val;
10689 }
10690
10691 /* If TYPE is an integral or pointer type, return an integer type with
10692 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10693 if TYPE is already an integer type of signedness UNSIGNEDP. */
10694
10695 tree
10696 signed_or_unsigned_type_for (int unsignedp, tree type)
10697 {
10698 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10699 return type;
10700
10701 if (TREE_CODE (type) == VECTOR_TYPE)
10702 {
10703 tree inner = TREE_TYPE (type);
10704 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10705 if (!inner2)
10706 return NULL_TREE;
10707 if (inner == inner2)
10708 return type;
10709 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10710 }
10711
10712 if (!INTEGRAL_TYPE_P (type)
10713 && !POINTER_TYPE_P (type)
10714 && TREE_CODE (type) != OFFSET_TYPE)
10715 return NULL_TREE;
10716
10717 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10718 }
10719
10720 /* If TYPE is an integral or pointer type, return an integer type with
10721 the same precision which is unsigned, or itself if TYPE is already an
10722 unsigned integer type. */
10723
10724 tree
10725 unsigned_type_for (tree type)
10726 {
10727 return signed_or_unsigned_type_for (1, type);
10728 }
10729
10730 /* If TYPE is an integral or pointer type, return an integer type with
10731 the same precision which is signed, or itself if TYPE is already a
10732 signed integer type. */
10733
10734 tree
10735 signed_type_for (tree type)
10736 {
10737 return signed_or_unsigned_type_for (0, type);
10738 }
10739
10740 /* If TYPE is a vector type, return a signed integer vector type with the
10741 same width and number of subparts. Otherwise return boolean_type_node. */
10742
10743 tree
10744 truth_type_for (tree type)
10745 {
10746 if (TREE_CODE (type) == VECTOR_TYPE)
10747 {
10748 tree elem = lang_hooks.types.type_for_size
10749 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10750 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10751 }
10752 else
10753 return boolean_type_node;
10754 }
10755
10756 /* Returns the largest value obtainable by casting something in INNER type to
10757 OUTER type. */
10758
10759 tree
10760 upper_bound_in_type (tree outer, tree inner)
10761 {
10762 unsigned int det = 0;
10763 unsigned oprec = TYPE_PRECISION (outer);
10764 unsigned iprec = TYPE_PRECISION (inner);
10765 unsigned prec;
10766
10767 /* Compute a unique number for every combination. */
10768 det |= (oprec > iprec) ? 4 : 0;
10769 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10770 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10771
10772 /* Determine the exponent to use. */
10773 switch (det)
10774 {
10775 case 0:
10776 case 1:
10777 /* oprec <= iprec, outer: signed, inner: don't care. */
10778 prec = oprec - 1;
10779 break;
10780 case 2:
10781 case 3:
10782 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10783 prec = oprec;
10784 break;
10785 case 4:
10786 /* oprec > iprec, outer: signed, inner: signed. */
10787 prec = iprec - 1;
10788 break;
10789 case 5:
10790 /* oprec > iprec, outer: signed, inner: unsigned. */
10791 prec = iprec;
10792 break;
10793 case 6:
10794 /* oprec > iprec, outer: unsigned, inner: signed. */
10795 prec = oprec;
10796 break;
10797 case 7:
10798 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10799 prec = iprec;
10800 break;
10801 default:
10802 gcc_unreachable ();
10803 }
10804
10805 return wide_int_to_tree (outer,
10806 wi::mask (prec, false, TYPE_PRECISION (outer)));
10807 }
10808
10809 /* Returns the smallest value obtainable by casting something in INNER type to
10810 OUTER type. */
10811
10812 tree
10813 lower_bound_in_type (tree outer, tree inner)
10814 {
10815 unsigned oprec = TYPE_PRECISION (outer);
10816 unsigned iprec = TYPE_PRECISION (inner);
10817
10818 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10819 and obtain 0. */
10820 if (TYPE_UNSIGNED (outer)
10821 /* If we are widening something of an unsigned type, OUTER type
10822 contains all values of INNER type. In particular, both INNER
10823 and OUTER types have zero in common. */
10824 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10825 return build_int_cst (outer, 0);
10826 else
10827 {
10828 /* If we are widening a signed type to another signed type, we
10829 want to obtain -2^^(iprec-1). If we are keeping the
10830 precision or narrowing to a signed type, we want to obtain
10831 -2^(oprec-1). */
10832 unsigned prec = oprec > iprec ? iprec : oprec;
10833 return wide_int_to_tree (outer,
10834 wi::mask (prec - 1, true,
10835 TYPE_PRECISION (outer)));
10836 }
10837 }
10838
10839 /* Return nonzero if two operands that are suitable for PHI nodes are
10840 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10841 SSA_NAME or invariant. Note that this is strictly an optimization.
10842 That is, callers of this function can directly call operand_equal_p
10843 and get the same result, only slower. */
10844
10845 int
10846 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10847 {
10848 if (arg0 == arg1)
10849 return 1;
10850 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10851 return 0;
10852 return operand_equal_p (arg0, arg1, 0);
10853 }
10854
10855 /* Returns number of zeros at the end of binary representation of X. */
10856
10857 tree
10858 num_ending_zeros (const_tree x)
10859 {
10860 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10861 }
10862
10863
10864 #define WALK_SUBTREE(NODE) \
10865 do \
10866 { \
10867 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10868 if (result) \
10869 return result; \
10870 } \
10871 while (0)
10872
10873 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10874 be walked whenever a type is seen in the tree. Rest of operands and return
10875 value are as for walk_tree. */
10876
10877 static tree
10878 walk_type_fields (tree type, walk_tree_fn func, void *data,
10879 hash_set<tree> *pset, walk_tree_lh lh)
10880 {
10881 tree result = NULL_TREE;
10882
10883 switch (TREE_CODE (type))
10884 {
10885 case POINTER_TYPE:
10886 case REFERENCE_TYPE:
10887 case VECTOR_TYPE:
10888 /* We have to worry about mutually recursive pointers. These can't
10889 be written in C. They can in Ada. It's pathological, but
10890 there's an ACATS test (c38102a) that checks it. Deal with this
10891 by checking if we're pointing to another pointer, that one
10892 points to another pointer, that one does too, and we have no htab.
10893 If so, get a hash table. We check three levels deep to avoid
10894 the cost of the hash table if we don't need one. */
10895 if (POINTER_TYPE_P (TREE_TYPE (type))
10896 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10897 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10898 && !pset)
10899 {
10900 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10901 func, data);
10902 if (result)
10903 return result;
10904
10905 break;
10906 }
10907
10908 /* ... fall through ... */
10909
10910 case COMPLEX_TYPE:
10911 WALK_SUBTREE (TREE_TYPE (type));
10912 break;
10913
10914 case METHOD_TYPE:
10915 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10916
10917 /* Fall through. */
10918
10919 case FUNCTION_TYPE:
10920 WALK_SUBTREE (TREE_TYPE (type));
10921 {
10922 tree arg;
10923
10924 /* We never want to walk into default arguments. */
10925 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10926 WALK_SUBTREE (TREE_VALUE (arg));
10927 }
10928 break;
10929
10930 case ARRAY_TYPE:
10931 /* Don't follow this nodes's type if a pointer for fear that
10932 we'll have infinite recursion. If we have a PSET, then we
10933 need not fear. */
10934 if (pset
10935 || (!POINTER_TYPE_P (TREE_TYPE (type))
10936 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10937 WALK_SUBTREE (TREE_TYPE (type));
10938 WALK_SUBTREE (TYPE_DOMAIN (type));
10939 break;
10940
10941 case OFFSET_TYPE:
10942 WALK_SUBTREE (TREE_TYPE (type));
10943 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10944 break;
10945
10946 default:
10947 break;
10948 }
10949
10950 return NULL_TREE;
10951 }
10952
10953 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10954 called with the DATA and the address of each sub-tree. If FUNC returns a
10955 non-NULL value, the traversal is stopped, and the value returned by FUNC
10956 is returned. If PSET is non-NULL it is used to record the nodes visited,
10957 and to avoid visiting a node more than once. */
10958
10959 tree
10960 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10961 hash_set<tree> *pset, walk_tree_lh lh)
10962 {
10963 enum tree_code code;
10964 int walk_subtrees;
10965 tree result;
10966
10967 #define WALK_SUBTREE_TAIL(NODE) \
10968 do \
10969 { \
10970 tp = & (NODE); \
10971 goto tail_recurse; \
10972 } \
10973 while (0)
10974
10975 tail_recurse:
10976 /* Skip empty subtrees. */
10977 if (!*tp)
10978 return NULL_TREE;
10979
10980 /* Don't walk the same tree twice, if the user has requested
10981 that we avoid doing so. */
10982 if (pset && pset->add (*tp))
10983 return NULL_TREE;
10984
10985 /* Call the function. */
10986 walk_subtrees = 1;
10987 result = (*func) (tp, &walk_subtrees, data);
10988
10989 /* If we found something, return it. */
10990 if (result)
10991 return result;
10992
10993 code = TREE_CODE (*tp);
10994
10995 /* Even if we didn't, FUNC may have decided that there was nothing
10996 interesting below this point in the tree. */
10997 if (!walk_subtrees)
10998 {
10999 /* But we still need to check our siblings. */
11000 if (code == TREE_LIST)
11001 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11002 else if (code == OMP_CLAUSE)
11003 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11004 else
11005 return NULL_TREE;
11006 }
11007
11008 if (lh)
11009 {
11010 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11011 if (result || !walk_subtrees)
11012 return result;
11013 }
11014
11015 switch (code)
11016 {
11017 case ERROR_MARK:
11018 case IDENTIFIER_NODE:
11019 case INTEGER_CST:
11020 case REAL_CST:
11021 case FIXED_CST:
11022 case VECTOR_CST:
11023 case STRING_CST:
11024 case BLOCK:
11025 case PLACEHOLDER_EXPR:
11026 case SSA_NAME:
11027 case FIELD_DECL:
11028 case RESULT_DECL:
11029 /* None of these have subtrees other than those already walked
11030 above. */
11031 break;
11032
11033 case TREE_LIST:
11034 WALK_SUBTREE (TREE_VALUE (*tp));
11035 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11036 break;
11037
11038 case TREE_VEC:
11039 {
11040 int len = TREE_VEC_LENGTH (*tp);
11041
11042 if (len == 0)
11043 break;
11044
11045 /* Walk all elements but the first. */
11046 while (--len)
11047 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11048
11049 /* Now walk the first one as a tail call. */
11050 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11051 }
11052
11053 case COMPLEX_CST:
11054 WALK_SUBTREE (TREE_REALPART (*tp));
11055 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11056
11057 case CONSTRUCTOR:
11058 {
11059 unsigned HOST_WIDE_INT idx;
11060 constructor_elt *ce;
11061
11062 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11063 idx++)
11064 WALK_SUBTREE (ce->value);
11065 }
11066 break;
11067
11068 case SAVE_EXPR:
11069 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11070
11071 case BIND_EXPR:
11072 {
11073 tree decl;
11074 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11075 {
11076 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11077 into declarations that are just mentioned, rather than
11078 declared; they don't really belong to this part of the tree.
11079 And, we can see cycles: the initializer for a declaration
11080 can refer to the declaration itself. */
11081 WALK_SUBTREE (DECL_INITIAL (decl));
11082 WALK_SUBTREE (DECL_SIZE (decl));
11083 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11084 }
11085 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11086 }
11087
11088 case STATEMENT_LIST:
11089 {
11090 tree_stmt_iterator i;
11091 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11092 WALK_SUBTREE (*tsi_stmt_ptr (i));
11093 }
11094 break;
11095
11096 case OMP_CLAUSE:
11097 switch (OMP_CLAUSE_CODE (*tp))
11098 {
11099 case OMP_CLAUSE_PRIVATE:
11100 case OMP_CLAUSE_SHARED:
11101 case OMP_CLAUSE_FIRSTPRIVATE:
11102 case OMP_CLAUSE_COPYIN:
11103 case OMP_CLAUSE_COPYPRIVATE:
11104 case OMP_CLAUSE_FINAL:
11105 case OMP_CLAUSE_IF:
11106 case OMP_CLAUSE_NUM_THREADS:
11107 case OMP_CLAUSE_SCHEDULE:
11108 case OMP_CLAUSE_UNIFORM:
11109 case OMP_CLAUSE_DEPEND:
11110 case OMP_CLAUSE_NUM_TEAMS:
11111 case OMP_CLAUSE_THREAD_LIMIT:
11112 case OMP_CLAUSE_DEVICE:
11113 case OMP_CLAUSE_DIST_SCHEDULE:
11114 case OMP_CLAUSE_SAFELEN:
11115 case OMP_CLAUSE_SIMDLEN:
11116 case OMP_CLAUSE__LOOPTEMP_:
11117 case OMP_CLAUSE__SIMDUID_:
11118 case OMP_CLAUSE__CILK_FOR_COUNT_:
11119 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11120 /* FALLTHRU */
11121
11122 case OMP_CLAUSE_NOWAIT:
11123 case OMP_CLAUSE_ORDERED:
11124 case OMP_CLAUSE_DEFAULT:
11125 case OMP_CLAUSE_UNTIED:
11126 case OMP_CLAUSE_MERGEABLE:
11127 case OMP_CLAUSE_PROC_BIND:
11128 case OMP_CLAUSE_INBRANCH:
11129 case OMP_CLAUSE_NOTINBRANCH:
11130 case OMP_CLAUSE_FOR:
11131 case OMP_CLAUSE_PARALLEL:
11132 case OMP_CLAUSE_SECTIONS:
11133 case OMP_CLAUSE_TASKGROUP:
11134 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11135
11136 case OMP_CLAUSE_LASTPRIVATE:
11137 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11138 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11139 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11140
11141 case OMP_CLAUSE_COLLAPSE:
11142 {
11143 int i;
11144 for (i = 0; i < 3; i++)
11145 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11146 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11147 }
11148
11149 case OMP_CLAUSE_LINEAR:
11150 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11151 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11152 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11153 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11154
11155 case OMP_CLAUSE_ALIGNED:
11156 case OMP_CLAUSE_FROM:
11157 case OMP_CLAUSE_TO:
11158 case OMP_CLAUSE_MAP:
11159 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11160 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11161 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11162
11163 case OMP_CLAUSE_REDUCTION:
11164 {
11165 int i;
11166 for (i = 0; i < 4; i++)
11167 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11168 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11169 }
11170
11171 default:
11172 gcc_unreachable ();
11173 }
11174 break;
11175
11176 case TARGET_EXPR:
11177 {
11178 int i, len;
11179
11180 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11181 But, we only want to walk once. */
11182 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11183 for (i = 0; i < len; ++i)
11184 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11185 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11186 }
11187
11188 case DECL_EXPR:
11189 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11190 defining. We only want to walk into these fields of a type in this
11191 case and not in the general case of a mere reference to the type.
11192
11193 The criterion is as follows: if the field can be an expression, it
11194 must be walked only here. This should be in keeping with the fields
11195 that are directly gimplified in gimplify_type_sizes in order for the
11196 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11197 variable-sized types.
11198
11199 Note that DECLs get walked as part of processing the BIND_EXPR. */
11200 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11201 {
11202 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11203 if (TREE_CODE (*type_p) == ERROR_MARK)
11204 return NULL_TREE;
11205
11206 /* Call the function for the type. See if it returns anything or
11207 doesn't want us to continue. If we are to continue, walk both
11208 the normal fields and those for the declaration case. */
11209 result = (*func) (type_p, &walk_subtrees, data);
11210 if (result || !walk_subtrees)
11211 return result;
11212
11213 /* But do not walk a pointed-to type since it may itself need to
11214 be walked in the declaration case if it isn't anonymous. */
11215 if (!POINTER_TYPE_P (*type_p))
11216 {
11217 result = walk_type_fields (*type_p, func, data, pset, lh);
11218 if (result)
11219 return result;
11220 }
11221
11222 /* If this is a record type, also walk the fields. */
11223 if (RECORD_OR_UNION_TYPE_P (*type_p))
11224 {
11225 tree field;
11226
11227 for (field = TYPE_FIELDS (*type_p); field;
11228 field = DECL_CHAIN (field))
11229 {
11230 /* We'd like to look at the type of the field, but we can
11231 easily get infinite recursion. So assume it's pointed
11232 to elsewhere in the tree. Also, ignore things that
11233 aren't fields. */
11234 if (TREE_CODE (field) != FIELD_DECL)
11235 continue;
11236
11237 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11238 WALK_SUBTREE (DECL_SIZE (field));
11239 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11240 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11241 WALK_SUBTREE (DECL_QUALIFIER (field));
11242 }
11243 }
11244
11245 /* Same for scalar types. */
11246 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11247 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11248 || TREE_CODE (*type_p) == INTEGER_TYPE
11249 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11250 || TREE_CODE (*type_p) == REAL_TYPE)
11251 {
11252 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11253 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11254 }
11255
11256 WALK_SUBTREE (TYPE_SIZE (*type_p));
11257 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11258 }
11259 /* FALLTHRU */
11260
11261 default:
11262 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11263 {
11264 int i, len;
11265
11266 /* Walk over all the sub-trees of this operand. */
11267 len = TREE_OPERAND_LENGTH (*tp);
11268
11269 /* Go through the subtrees. We need to do this in forward order so
11270 that the scope of a FOR_EXPR is handled properly. */
11271 if (len)
11272 {
11273 for (i = 0; i < len - 1; ++i)
11274 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11275 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11276 }
11277 }
11278 /* If this is a type, walk the needed fields in the type. */
11279 else if (TYPE_P (*tp))
11280 return walk_type_fields (*tp, func, data, pset, lh);
11281 break;
11282 }
11283
11284 /* We didn't find what we were looking for. */
11285 return NULL_TREE;
11286
11287 #undef WALK_SUBTREE_TAIL
11288 }
11289 #undef WALK_SUBTREE
11290
11291 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11292
11293 tree
11294 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11295 walk_tree_lh lh)
11296 {
11297 tree result;
11298
11299 hash_set<tree> pset;
11300 result = walk_tree_1 (tp, func, data, &pset, lh);
11301 return result;
11302 }
11303
11304
11305 tree
11306 tree_block (tree t)
11307 {
11308 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11309
11310 if (IS_EXPR_CODE_CLASS (c))
11311 return LOCATION_BLOCK (t->exp.locus);
11312 gcc_unreachable ();
11313 return NULL;
11314 }
11315
11316 void
11317 tree_set_block (tree t, tree b)
11318 {
11319 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11320
11321 if (IS_EXPR_CODE_CLASS (c))
11322 {
11323 if (b)
11324 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11325 else
11326 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11327 }
11328 else
11329 gcc_unreachable ();
11330 }
11331
11332 /* Create a nameless artificial label and put it in the current
11333 function context. The label has a location of LOC. Returns the
11334 newly created label. */
11335
11336 tree
11337 create_artificial_label (location_t loc)
11338 {
11339 tree lab = build_decl (loc,
11340 LABEL_DECL, NULL_TREE, void_type_node);
11341
11342 DECL_ARTIFICIAL (lab) = 1;
11343 DECL_IGNORED_P (lab) = 1;
11344 DECL_CONTEXT (lab) = current_function_decl;
11345 return lab;
11346 }
11347
11348 /* Given a tree, try to return a useful variable name that we can use
11349 to prefix a temporary that is being assigned the value of the tree.
11350 I.E. given <temp> = &A, return A. */
11351
11352 const char *
11353 get_name (tree t)
11354 {
11355 tree stripped_decl;
11356
11357 stripped_decl = t;
11358 STRIP_NOPS (stripped_decl);
11359 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11360 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11361 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11362 {
11363 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11364 if (!name)
11365 return NULL;
11366 return IDENTIFIER_POINTER (name);
11367 }
11368 else
11369 {
11370 switch (TREE_CODE (stripped_decl))
11371 {
11372 case ADDR_EXPR:
11373 return get_name (TREE_OPERAND (stripped_decl, 0));
11374 default:
11375 return NULL;
11376 }
11377 }
11378 }
11379
11380 /* Return true if TYPE has a variable argument list. */
11381
11382 bool
11383 stdarg_p (const_tree fntype)
11384 {
11385 function_args_iterator args_iter;
11386 tree n = NULL_TREE, t;
11387
11388 if (!fntype)
11389 return false;
11390
11391 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11392 {
11393 n = t;
11394 }
11395
11396 return n != NULL_TREE && n != void_type_node;
11397 }
11398
11399 /* Return true if TYPE has a prototype. */
11400
11401 bool
11402 prototype_p (tree fntype)
11403 {
11404 tree t;
11405
11406 gcc_assert (fntype != NULL_TREE);
11407
11408 t = TYPE_ARG_TYPES (fntype);
11409 return (t != NULL_TREE);
11410 }
11411
11412 /* If BLOCK is inlined from an __attribute__((__artificial__))
11413 routine, return pointer to location from where it has been
11414 called. */
11415 location_t *
11416 block_nonartificial_location (tree block)
11417 {
11418 location_t *ret = NULL;
11419
11420 while (block && TREE_CODE (block) == BLOCK
11421 && BLOCK_ABSTRACT_ORIGIN (block))
11422 {
11423 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11424
11425 while (TREE_CODE (ao) == BLOCK
11426 && BLOCK_ABSTRACT_ORIGIN (ao)
11427 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11428 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11429
11430 if (TREE_CODE (ao) == FUNCTION_DECL)
11431 {
11432 /* If AO is an artificial inline, point RET to the
11433 call site locus at which it has been inlined and continue
11434 the loop, in case AO's caller is also an artificial
11435 inline. */
11436 if (DECL_DECLARED_INLINE_P (ao)
11437 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11438 ret = &BLOCK_SOURCE_LOCATION (block);
11439 else
11440 break;
11441 }
11442 else if (TREE_CODE (ao) != BLOCK)
11443 break;
11444
11445 block = BLOCK_SUPERCONTEXT (block);
11446 }
11447 return ret;
11448 }
11449
11450
11451 /* If EXP is inlined from an __attribute__((__artificial__))
11452 function, return the location of the original call expression. */
11453
11454 location_t
11455 tree_nonartificial_location (tree exp)
11456 {
11457 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11458
11459 if (loc)
11460 return *loc;
11461 else
11462 return EXPR_LOCATION (exp);
11463 }
11464
11465
11466 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11467 nodes. */
11468
11469 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11470
11471 static hashval_t
11472 cl_option_hash_hash (const void *x)
11473 {
11474 const_tree const t = (const_tree) x;
11475 const char *p;
11476 size_t i;
11477 size_t len = 0;
11478 hashval_t hash = 0;
11479
11480 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11481 {
11482 p = (const char *)TREE_OPTIMIZATION (t);
11483 len = sizeof (struct cl_optimization);
11484 }
11485
11486 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11487 {
11488 p = (const char *)TREE_TARGET_OPTION (t);
11489 len = sizeof (struct cl_target_option);
11490 }
11491
11492 else
11493 gcc_unreachable ();
11494
11495 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11496 something else. */
11497 for (i = 0; i < len; i++)
11498 if (p[i])
11499 hash = (hash << 4) ^ ((i << 2) | p[i]);
11500
11501 return hash;
11502 }
11503
11504 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11505 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11506 same. */
11507
11508 static int
11509 cl_option_hash_eq (const void *x, const void *y)
11510 {
11511 const_tree const xt = (const_tree) x;
11512 const_tree const yt = (const_tree) y;
11513 const char *xp;
11514 const char *yp;
11515 size_t len;
11516
11517 if (TREE_CODE (xt) != TREE_CODE (yt))
11518 return 0;
11519
11520 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11521 {
11522 xp = (const char *)TREE_OPTIMIZATION (xt);
11523 yp = (const char *)TREE_OPTIMIZATION (yt);
11524 len = sizeof (struct cl_optimization);
11525 }
11526
11527 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11528 {
11529 xp = (const char *)TREE_TARGET_OPTION (xt);
11530 yp = (const char *)TREE_TARGET_OPTION (yt);
11531 len = sizeof (struct cl_target_option);
11532 }
11533
11534 else
11535 gcc_unreachable ();
11536
11537 return (memcmp (xp, yp, len) == 0);
11538 }
11539
11540 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11541
11542 tree
11543 build_optimization_node (struct gcc_options *opts)
11544 {
11545 tree t;
11546 void **slot;
11547
11548 /* Use the cache of optimization nodes. */
11549
11550 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11551 opts);
11552
11553 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11554 t = (tree) *slot;
11555 if (!t)
11556 {
11557 /* Insert this one into the hash table. */
11558 t = cl_optimization_node;
11559 *slot = t;
11560
11561 /* Make a new node for next time round. */
11562 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11563 }
11564
11565 return t;
11566 }
11567
11568 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11569
11570 tree
11571 build_target_option_node (struct gcc_options *opts)
11572 {
11573 tree t;
11574 void **slot;
11575
11576 /* Use the cache of optimization nodes. */
11577
11578 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11579 opts);
11580
11581 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11582 t = (tree) *slot;
11583 if (!t)
11584 {
11585 /* Insert this one into the hash table. */
11586 t = cl_target_option_node;
11587 *slot = t;
11588
11589 /* Make a new node for next time round. */
11590 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11591 }
11592
11593 return t;
11594 }
11595
11596 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11597 Called through htab_traverse. */
11598
11599 static int
11600 prepare_target_option_node_for_pch (void **slot, void *)
11601 {
11602 tree node = (tree) *slot;
11603 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11604 TREE_TARGET_GLOBALS (node) = NULL;
11605 return 1;
11606 }
11607
11608 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11609 so that they aren't saved during PCH writing. */
11610
11611 void
11612 prepare_target_option_nodes_for_pch (void)
11613 {
11614 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11615 NULL);
11616 }
11617
11618 /* Determine the "ultimate origin" of a block. The block may be an inlined
11619 instance of an inlined instance of a block which is local to an inline
11620 function, so we have to trace all of the way back through the origin chain
11621 to find out what sort of node actually served as the original seed for the
11622 given block. */
11623
11624 tree
11625 block_ultimate_origin (const_tree block)
11626 {
11627 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11628
11629 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11630 we're trying to output the abstract instance of this function. */
11631 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11632 return NULL_TREE;
11633
11634 if (immediate_origin == NULL_TREE)
11635 return NULL_TREE;
11636 else
11637 {
11638 tree ret_val;
11639 tree lookahead = immediate_origin;
11640
11641 do
11642 {
11643 ret_val = lookahead;
11644 lookahead = (TREE_CODE (ret_val) == BLOCK
11645 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11646 }
11647 while (lookahead != NULL && lookahead != ret_val);
11648
11649 /* The block's abstract origin chain may not be the *ultimate* origin of
11650 the block. It could lead to a DECL that has an abstract origin set.
11651 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11652 will give us if it has one). Note that DECL's abstract origins are
11653 supposed to be the most distant ancestor (or so decl_ultimate_origin
11654 claims), so we don't need to loop following the DECL origins. */
11655 if (DECL_P (ret_val))
11656 return DECL_ORIGIN (ret_val);
11657
11658 return ret_val;
11659 }
11660 }
11661
11662 /* Return true iff conversion in EXP generates no instruction. Mark
11663 it inline so that we fully inline into the stripping functions even
11664 though we have two uses of this function. */
11665
11666 static inline bool
11667 tree_nop_conversion (const_tree exp)
11668 {
11669 tree outer_type, inner_type;
11670
11671 if (!CONVERT_EXPR_P (exp)
11672 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11673 return false;
11674 if (TREE_OPERAND (exp, 0) == error_mark_node)
11675 return false;
11676
11677 outer_type = TREE_TYPE (exp);
11678 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11679
11680 if (!inner_type)
11681 return false;
11682
11683 /* Use precision rather then machine mode when we can, which gives
11684 the correct answer even for submode (bit-field) types. */
11685 if ((INTEGRAL_TYPE_P (outer_type)
11686 || POINTER_TYPE_P (outer_type)
11687 || TREE_CODE (outer_type) == OFFSET_TYPE)
11688 && (INTEGRAL_TYPE_P (inner_type)
11689 || POINTER_TYPE_P (inner_type)
11690 || TREE_CODE (inner_type) == OFFSET_TYPE))
11691 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11692
11693 /* Otherwise fall back on comparing machine modes (e.g. for
11694 aggregate types, floats). */
11695 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11696 }
11697
11698 /* Return true iff conversion in EXP generates no instruction. Don't
11699 consider conversions changing the signedness. */
11700
11701 static bool
11702 tree_sign_nop_conversion (const_tree exp)
11703 {
11704 tree outer_type, inner_type;
11705
11706 if (!tree_nop_conversion (exp))
11707 return false;
11708
11709 outer_type = TREE_TYPE (exp);
11710 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11711
11712 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11713 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11714 }
11715
11716 /* Strip conversions from EXP according to tree_nop_conversion and
11717 return the resulting expression. */
11718
11719 tree
11720 tree_strip_nop_conversions (tree exp)
11721 {
11722 while (tree_nop_conversion (exp))
11723 exp = TREE_OPERAND (exp, 0);
11724 return exp;
11725 }
11726
11727 /* Strip conversions from EXP according to tree_sign_nop_conversion
11728 and return the resulting expression. */
11729
11730 tree
11731 tree_strip_sign_nop_conversions (tree exp)
11732 {
11733 while (tree_sign_nop_conversion (exp))
11734 exp = TREE_OPERAND (exp, 0);
11735 return exp;
11736 }
11737
11738 /* Avoid any floating point extensions from EXP. */
11739 tree
11740 strip_float_extensions (tree exp)
11741 {
11742 tree sub, expt, subt;
11743
11744 /* For floating point constant look up the narrowest type that can hold
11745 it properly and handle it like (type)(narrowest_type)constant.
11746 This way we can optimize for instance a=a*2.0 where "a" is float
11747 but 2.0 is double constant. */
11748 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11749 {
11750 REAL_VALUE_TYPE orig;
11751 tree type = NULL;
11752
11753 orig = TREE_REAL_CST (exp);
11754 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11755 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11756 type = float_type_node;
11757 else if (TYPE_PRECISION (TREE_TYPE (exp))
11758 > TYPE_PRECISION (double_type_node)
11759 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11760 type = double_type_node;
11761 if (type)
11762 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11763 }
11764
11765 if (!CONVERT_EXPR_P (exp))
11766 return exp;
11767
11768 sub = TREE_OPERAND (exp, 0);
11769 subt = TREE_TYPE (sub);
11770 expt = TREE_TYPE (exp);
11771
11772 if (!FLOAT_TYPE_P (subt))
11773 return exp;
11774
11775 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11776 return exp;
11777
11778 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11779 return exp;
11780
11781 return strip_float_extensions (sub);
11782 }
11783
11784 /* Strip out all handled components that produce invariant
11785 offsets. */
11786
11787 const_tree
11788 strip_invariant_refs (const_tree op)
11789 {
11790 while (handled_component_p (op))
11791 {
11792 switch (TREE_CODE (op))
11793 {
11794 case ARRAY_REF:
11795 case ARRAY_RANGE_REF:
11796 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11797 || TREE_OPERAND (op, 2) != NULL_TREE
11798 || TREE_OPERAND (op, 3) != NULL_TREE)
11799 return NULL;
11800 break;
11801
11802 case COMPONENT_REF:
11803 if (TREE_OPERAND (op, 2) != NULL_TREE)
11804 return NULL;
11805 break;
11806
11807 default:;
11808 }
11809 op = TREE_OPERAND (op, 0);
11810 }
11811
11812 return op;
11813 }
11814
11815 static GTY(()) tree gcc_eh_personality_decl;
11816
11817 /* Return the GCC personality function decl. */
11818
11819 tree
11820 lhd_gcc_personality (void)
11821 {
11822 if (!gcc_eh_personality_decl)
11823 gcc_eh_personality_decl = build_personality_function ("gcc");
11824 return gcc_eh_personality_decl;
11825 }
11826
11827 /* TARGET is a call target of GIMPLE call statement
11828 (obtained by gimple_call_fn). Return true if it is
11829 OBJ_TYPE_REF representing an virtual call of C++ method.
11830 (As opposed to OBJ_TYPE_REF representing objc calls
11831 through a cast where middle-end devirtualization machinery
11832 can't apply.) */
11833
11834 bool
11835 virtual_method_call_p (tree target)
11836 {
11837 if (TREE_CODE (target) != OBJ_TYPE_REF)
11838 return false;
11839 target = TREE_TYPE (target);
11840 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11841 target = TREE_TYPE (target);
11842 if (TREE_CODE (target) == FUNCTION_TYPE)
11843 return false;
11844 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11845 return true;
11846 }
11847
11848 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11849
11850 tree
11851 obj_type_ref_class (tree ref)
11852 {
11853 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11854 ref = TREE_TYPE (ref);
11855 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11856 ref = TREE_TYPE (ref);
11857 /* We look for type THIS points to. ObjC also builds
11858 OBJ_TYPE_REF with non-method calls, Their first parameter
11859 ID however also corresponds to class type. */
11860 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11861 || TREE_CODE (ref) == FUNCTION_TYPE);
11862 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11863 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11864 return TREE_TYPE (ref);
11865 }
11866
11867 /* Return true if T is in anonymous namespace. */
11868
11869 bool
11870 type_in_anonymous_namespace_p (const_tree t)
11871 {
11872 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11873 bulitin types; those have CONTEXT NULL. */
11874 if (!TYPE_CONTEXT (t))
11875 return false;
11876 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11877 }
11878
11879 /* Try to find a base info of BINFO that would have its field decl at offset
11880 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11881 found, return, otherwise return NULL_TREE. */
11882
11883 tree
11884 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11885 {
11886 tree type = BINFO_TYPE (binfo);
11887
11888 while (true)
11889 {
11890 HOST_WIDE_INT pos, size;
11891 tree fld;
11892 int i;
11893
11894 if (types_same_for_odr (type, expected_type))
11895 return binfo;
11896 if (offset < 0)
11897 return NULL_TREE;
11898
11899 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11900 {
11901 if (TREE_CODE (fld) != FIELD_DECL)
11902 continue;
11903
11904 pos = int_bit_position (fld);
11905 size = tree_to_uhwi (DECL_SIZE (fld));
11906 if (pos <= offset && (pos + size) > offset)
11907 break;
11908 }
11909 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11910 return NULL_TREE;
11911
11912 if (!DECL_ARTIFICIAL (fld))
11913 {
11914 binfo = TYPE_BINFO (TREE_TYPE (fld));
11915 if (!binfo)
11916 return NULL_TREE;
11917 }
11918 /* Offset 0 indicates the primary base, whose vtable contents are
11919 represented in the binfo for the derived class. */
11920 else if (offset != 0)
11921 {
11922 tree base_binfo, binfo2 = binfo;
11923
11924 /* Find BINFO corresponding to FLD. This is bit harder
11925 by a fact that in virtual inheritance we may need to walk down
11926 the non-virtual inheritance chain. */
11927 while (true)
11928 {
11929 tree containing_binfo = NULL, found_binfo = NULL;
11930 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11931 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11932 {
11933 found_binfo = base_binfo;
11934 break;
11935 }
11936 else
11937 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11938 - tree_to_shwi (BINFO_OFFSET (binfo)))
11939 * BITS_PER_UNIT < pos
11940 /* Rule out types with no virtual methods or we can get confused
11941 here by zero sized bases. */
11942 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11943 && (!containing_binfo
11944 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11945 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11946 containing_binfo = base_binfo;
11947 if (found_binfo)
11948 {
11949 binfo = found_binfo;
11950 break;
11951 }
11952 if (!containing_binfo)
11953 return NULL_TREE;
11954 binfo2 = containing_binfo;
11955 }
11956 }
11957
11958 type = TREE_TYPE (fld);
11959 offset -= pos;
11960 }
11961 }
11962
11963 /* Returns true if X is a typedef decl. */
11964
11965 bool
11966 is_typedef_decl (tree x)
11967 {
11968 return (x && TREE_CODE (x) == TYPE_DECL
11969 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11970 }
11971
11972 /* Returns true iff TYPE is a type variant created for a typedef. */
11973
11974 bool
11975 typedef_variant_p (tree type)
11976 {
11977 return is_typedef_decl (TYPE_NAME (type));
11978 }
11979
11980 /* Warn about a use of an identifier which was marked deprecated. */
11981 void
11982 warn_deprecated_use (tree node, tree attr)
11983 {
11984 const char *msg;
11985
11986 if (node == 0 || !warn_deprecated_decl)
11987 return;
11988
11989 if (!attr)
11990 {
11991 if (DECL_P (node))
11992 attr = DECL_ATTRIBUTES (node);
11993 else if (TYPE_P (node))
11994 {
11995 tree decl = TYPE_STUB_DECL (node);
11996 if (decl)
11997 attr = lookup_attribute ("deprecated",
11998 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
11999 }
12000 }
12001
12002 if (attr)
12003 attr = lookup_attribute ("deprecated", attr);
12004
12005 if (attr)
12006 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12007 else
12008 msg = NULL;
12009
12010 if (DECL_P (node))
12011 {
12012 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
12013 if (msg)
12014 warning (OPT_Wdeprecated_declarations,
12015 "%qD is deprecated (declared at %r%s:%d%R): %s",
12016 node, "locus", xloc.file, xloc.line, msg);
12017 else
12018 warning (OPT_Wdeprecated_declarations,
12019 "%qD is deprecated (declared at %r%s:%d%R)",
12020 node, "locus", xloc.file, xloc.line);
12021 }
12022 else if (TYPE_P (node))
12023 {
12024 tree what = NULL_TREE;
12025 tree decl = TYPE_STUB_DECL (node);
12026
12027 if (TYPE_NAME (node))
12028 {
12029 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12030 what = TYPE_NAME (node);
12031 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12032 && DECL_NAME (TYPE_NAME (node)))
12033 what = DECL_NAME (TYPE_NAME (node));
12034 }
12035
12036 if (decl)
12037 {
12038 expanded_location xloc
12039 = expand_location (DECL_SOURCE_LOCATION (decl));
12040 if (what)
12041 {
12042 if (msg)
12043 warning (OPT_Wdeprecated_declarations,
12044 "%qE is deprecated (declared at %r%s:%d%R): %s",
12045 what, "locus", xloc.file, xloc.line, msg);
12046 else
12047 warning (OPT_Wdeprecated_declarations,
12048 "%qE is deprecated (declared at %r%s:%d%R)",
12049 what, "locus", xloc.file, xloc.line);
12050 }
12051 else
12052 {
12053 if (msg)
12054 warning (OPT_Wdeprecated_declarations,
12055 "type is deprecated (declared at %r%s:%d%R): %s",
12056 "locus", xloc.file, xloc.line, msg);
12057 else
12058 warning (OPT_Wdeprecated_declarations,
12059 "type is deprecated (declared at %r%s:%d%R)",
12060 "locus", xloc.file, xloc.line);
12061 }
12062 }
12063 else
12064 {
12065 if (what)
12066 {
12067 if (msg)
12068 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12069 what, msg);
12070 else
12071 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12072 }
12073 else
12074 {
12075 if (msg)
12076 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12077 msg);
12078 else
12079 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12080 }
12081 }
12082 }
12083 }
12084
12085 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12086 somewhere in it. */
12087
12088 bool
12089 contains_bitfld_component_ref_p (const_tree ref)
12090 {
12091 while (handled_component_p (ref))
12092 {
12093 if (TREE_CODE (ref) == COMPONENT_REF
12094 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12095 return true;
12096 ref = TREE_OPERAND (ref, 0);
12097 }
12098
12099 return false;
12100 }
12101
12102 /* Try to determine whether a TRY_CATCH expression can fall through.
12103 This is a subroutine of block_may_fallthru. */
12104
12105 static bool
12106 try_catch_may_fallthru (const_tree stmt)
12107 {
12108 tree_stmt_iterator i;
12109
12110 /* If the TRY block can fall through, the whole TRY_CATCH can
12111 fall through. */
12112 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12113 return true;
12114
12115 i = tsi_start (TREE_OPERAND (stmt, 1));
12116 switch (TREE_CODE (tsi_stmt (i)))
12117 {
12118 case CATCH_EXPR:
12119 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12120 catch expression and a body. The whole TRY_CATCH may fall
12121 through iff any of the catch bodies falls through. */
12122 for (; !tsi_end_p (i); tsi_next (&i))
12123 {
12124 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12125 return true;
12126 }
12127 return false;
12128
12129 case EH_FILTER_EXPR:
12130 /* The exception filter expression only matters if there is an
12131 exception. If the exception does not match EH_FILTER_TYPES,
12132 we will execute EH_FILTER_FAILURE, and we will fall through
12133 if that falls through. If the exception does match
12134 EH_FILTER_TYPES, the stack unwinder will continue up the
12135 stack, so we will not fall through. We don't know whether we
12136 will throw an exception which matches EH_FILTER_TYPES or not,
12137 so we just ignore EH_FILTER_TYPES and assume that we might
12138 throw an exception which doesn't match. */
12139 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12140
12141 default:
12142 /* This case represents statements to be executed when an
12143 exception occurs. Those statements are implicitly followed
12144 by a RESX statement to resume execution after the exception.
12145 So in this case the TRY_CATCH never falls through. */
12146 return false;
12147 }
12148 }
12149
12150 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12151 need not be 100% accurate; simply be conservative and return true if we
12152 don't know. This is used only to avoid stupidly generating extra code.
12153 If we're wrong, we'll just delete the extra code later. */
12154
12155 bool
12156 block_may_fallthru (const_tree block)
12157 {
12158 /* This CONST_CAST is okay because expr_last returns its argument
12159 unmodified and we assign it to a const_tree. */
12160 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12161
12162 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12163 {
12164 case GOTO_EXPR:
12165 case RETURN_EXPR:
12166 /* Easy cases. If the last statement of the block implies
12167 control transfer, then we can't fall through. */
12168 return false;
12169
12170 case SWITCH_EXPR:
12171 /* If SWITCH_LABELS is set, this is lowered, and represents a
12172 branch to a selected label and hence can not fall through.
12173 Otherwise SWITCH_BODY is set, and the switch can fall
12174 through. */
12175 return SWITCH_LABELS (stmt) == NULL_TREE;
12176
12177 case COND_EXPR:
12178 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12179 return true;
12180 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12181
12182 case BIND_EXPR:
12183 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12184
12185 case TRY_CATCH_EXPR:
12186 return try_catch_may_fallthru (stmt);
12187
12188 case TRY_FINALLY_EXPR:
12189 /* The finally clause is always executed after the try clause,
12190 so if it does not fall through, then the try-finally will not
12191 fall through. Otherwise, if the try clause does not fall
12192 through, then when the finally clause falls through it will
12193 resume execution wherever the try clause was going. So the
12194 whole try-finally will only fall through if both the try
12195 clause and the finally clause fall through. */
12196 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12197 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12198
12199 case MODIFY_EXPR:
12200 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12201 stmt = TREE_OPERAND (stmt, 1);
12202 else
12203 return true;
12204 /* FALLTHRU */
12205
12206 case CALL_EXPR:
12207 /* Functions that do not return do not fall through. */
12208 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12209
12210 case CLEANUP_POINT_EXPR:
12211 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12212
12213 case TARGET_EXPR:
12214 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12215
12216 case ERROR_MARK:
12217 return true;
12218
12219 default:
12220 return lang_hooks.block_may_fallthru (stmt);
12221 }
12222 }
12223
12224 /* True if we are using EH to handle cleanups. */
12225 static bool using_eh_for_cleanups_flag = false;
12226
12227 /* This routine is called from front ends to indicate eh should be used for
12228 cleanups. */
12229 void
12230 using_eh_for_cleanups (void)
12231 {
12232 using_eh_for_cleanups_flag = true;
12233 }
12234
12235 /* Query whether EH is used for cleanups. */
12236 bool
12237 using_eh_for_cleanups_p (void)
12238 {
12239 return using_eh_for_cleanups_flag;
12240 }
12241
12242 /* Wrapper for tree_code_name to ensure that tree code is valid */
12243 const char *
12244 get_tree_code_name (enum tree_code code)
12245 {
12246 const char *invalid = "<invalid tree code>";
12247
12248 if (code >= MAX_TREE_CODES)
12249 return invalid;
12250
12251 return tree_code_name[code];
12252 }
12253
12254 /* Drops the TREE_OVERFLOW flag from T. */
12255
12256 tree
12257 drop_tree_overflow (tree t)
12258 {
12259 gcc_checking_assert (TREE_OVERFLOW (t));
12260
12261 /* For tree codes with a sharing machinery re-build the result. */
12262 if (TREE_CODE (t) == INTEGER_CST)
12263 return wide_int_to_tree (TREE_TYPE (t), t);
12264
12265 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12266 and drop the flag. */
12267 t = copy_node (t);
12268 TREE_OVERFLOW (t) = 0;
12269 return t;
12270 }
12271
12272 /* Given a memory reference expression T, return its base address.
12273 The base address of a memory reference expression is the main
12274 object being referenced. For instance, the base address for
12275 'array[i].fld[j]' is 'array'. You can think of this as stripping
12276 away the offset part from a memory address.
12277
12278 This function calls handled_component_p to strip away all the inner
12279 parts of the memory reference until it reaches the base object. */
12280
12281 tree
12282 get_base_address (tree t)
12283 {
12284 while (handled_component_p (t))
12285 t = TREE_OPERAND (t, 0);
12286
12287 if ((TREE_CODE (t) == MEM_REF
12288 || TREE_CODE (t) == TARGET_MEM_REF)
12289 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12290 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12291
12292 /* ??? Either the alias oracle or all callers need to properly deal
12293 with WITH_SIZE_EXPRs before we can look through those. */
12294 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12295 return NULL_TREE;
12296
12297 return t;
12298 }
12299
12300 #include "gt-tree.h"