tree.c (free_lang_data_in_decl): Set DECL_FUNCTION_SPECIFIC_OPTIMIZATION to optimizat...
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "hashtab.h"
42 #include "hash-set.h"
43 #include "vec.h"
44 #include "machmode.h"
45 #include "hard-reg-set.h"
46 #include "input.h"
47 #include "function.h"
48 #include "obstack.h"
49 #include "toplev.h" /* get_random_seed */
50 #include "inchash.h"
51 #include "filenames.h"
52 #include "output.h"
53 #include "target.h"
54 #include "common/common-target.h"
55 #include "langhooks.h"
56 #include "tree-inline.h"
57 #include "tree-iterator.h"
58 #include "predict.h"
59 #include "dominance.h"
60 #include "cfg.h"
61 #include "basic-block.h"
62 #include "bitmap.h"
63 #include "tree-ssa-alias.h"
64 #include "internal-fn.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimple-iterator.h"
69 #include "gimplify.h"
70 #include "gimple-ssa.h"
71 #include "hash-map.h"
72 #include "plugin-api.h"
73 #include "ipa-ref.h"
74 #include "cgraph.h"
75 #include "tree-phinodes.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
78 #include "expr.h"
79 #include "tree-dfa.h"
80 #include "params.h"
81 #include "tree-pass.h"
82 #include "langhooks-def.h"
83 #include "diagnostic.h"
84 #include "tree-diagnostic.h"
85 #include "tree-pretty-print.h"
86 #include "except.h"
87 #include "debug.h"
88 #include "intl.h"
89 #include "wide-int.h"
90 #include "builtins.h"
91
92 /* Tree code classes. */
93
94 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
95 #define END_OF_BASE_TREE_CODES tcc_exceptional,
96
97 const enum tree_code_class tree_code_type[] = {
98 #include "all-tree.def"
99 };
100
101 #undef DEFTREECODE
102 #undef END_OF_BASE_TREE_CODES
103
104 /* Table indexed by tree code giving number of expression
105 operands beyond the fixed part of the node structure.
106 Not used for types or decls. */
107
108 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
109 #define END_OF_BASE_TREE_CODES 0,
110
111 const unsigned char tree_code_length[] = {
112 #include "all-tree.def"
113 };
114
115 #undef DEFTREECODE
116 #undef END_OF_BASE_TREE_CODES
117
118 /* Names of tree components.
119 Used for printing out the tree and error messages. */
120 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
121 #define END_OF_BASE_TREE_CODES "@dummy",
122
123 static const char *const tree_code_name[] = {
124 #include "all-tree.def"
125 };
126
127 #undef DEFTREECODE
128 #undef END_OF_BASE_TREE_CODES
129
130 /* Each tree code class has an associated string representation.
131 These must correspond to the tree_code_class entries. */
132
133 const char *const tree_code_class_strings[] =
134 {
135 "exceptional",
136 "constant",
137 "type",
138 "declaration",
139 "reference",
140 "comparison",
141 "unary",
142 "binary",
143 "statement",
144 "vl_exp",
145 "expression"
146 };
147
148 /* obstack.[ch] explicitly declined to prototype this. */
149 extern int _obstack_allocated_p (struct obstack *h, void *obj);
150
151 /* Statistics-gathering stuff. */
152
153 static int tree_code_counts[MAX_TREE_CODES];
154 int tree_node_counts[(int) all_kinds];
155 int tree_node_sizes[(int) all_kinds];
156
157 /* Keep in sync with tree.h:enum tree_node_kind. */
158 static const char * const tree_node_kind_names[] = {
159 "decls",
160 "types",
161 "blocks",
162 "stmts",
163 "refs",
164 "exprs",
165 "constants",
166 "identifiers",
167 "vecs",
168 "binfos",
169 "ssa names",
170 "constructors",
171 "random kinds",
172 "lang_decl kinds",
173 "lang_type kinds",
174 "omp clauses",
175 };
176
177 /* Unique id for next decl created. */
178 static GTY(()) int next_decl_uid;
179 /* Unique id for next type created. */
180 static GTY(()) int next_type_uid = 1;
181 /* Unique id for next debug decl created. Use negative numbers,
182 to catch erroneous uses. */
183 static GTY(()) int next_debug_decl_uid;
184
185 /* Since we cannot rehash a type after it is in the table, we have to
186 keep the hash code. */
187
188 struct GTY(()) type_hash {
189 unsigned long hash;
190 tree type;
191 };
192
193 /* Initial size of the hash table (rounded to next prime). */
194 #define TYPE_HASH_INITIAL_SIZE 1000
195
196 /* Now here is the hash table. When recording a type, it is added to
197 the slot whose index is the hash code. Note that the hash table is
198 used for several kinds of types (function types, array types and
199 array index range types, for now). While all these live in the
200 same table, they are completely independent, and the hash code is
201 computed differently for each of these. */
202
203 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
204 htab_t type_hash_table;
205
206 /* Hash table and temporary node for larger integer const values. */
207 static GTY (()) tree int_cst_node;
208 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
209 htab_t int_cst_hash_table;
210
211 /* Hash table for optimization flags and target option flags. Use the same
212 hash table for both sets of options. Nodes for building the current
213 optimization and target option nodes. The assumption is most of the time
214 the options created will already be in the hash table, so we avoid
215 allocating and freeing up a node repeatably. */
216 static GTY (()) tree cl_optimization_node;
217 static GTY (()) tree cl_target_option_node;
218 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
219 htab_t cl_option_hash_table;
220
221 /* General tree->tree mapping structure for use in hash tables. */
222
223
224 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
225 htab_t debug_expr_for_decl;
226
227 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
228 htab_t value_expr_for_decl;
229
230 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
231 htab_t debug_args_for_decl;
232
233 static void set_type_quals (tree, int);
234 static int type_hash_eq (const void *, const void *);
235 static hashval_t type_hash_hash (const void *);
236 static hashval_t int_cst_hash_hash (const void *);
237 static int int_cst_hash_eq (const void *, const void *);
238 static hashval_t cl_option_hash_hash (const void *);
239 static int cl_option_hash_eq (const void *, const void *);
240 static void print_type_hash_statistics (void);
241 static void print_debug_expr_statistics (void);
242 static void print_value_expr_statistics (void);
243 static int type_hash_marked_p (const void *);
244 static void type_hash_list (const_tree, inchash::hash &);
245 static void attribute_hash_list (const_tree, inchash::hash &);
246
247 tree global_trees[TI_MAX];
248 tree integer_types[itk_none];
249
250 bool int_n_enabled_p[NUM_INT_N_ENTS];
251 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
252
253 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
254
255 /* Number of operands for each OpenMP clause. */
256 unsigned const char omp_clause_num_ops[] =
257 {
258 0, /* OMP_CLAUSE_ERROR */
259 1, /* OMP_CLAUSE_PRIVATE */
260 1, /* OMP_CLAUSE_SHARED */
261 1, /* OMP_CLAUSE_FIRSTPRIVATE */
262 2, /* OMP_CLAUSE_LASTPRIVATE */
263 4, /* OMP_CLAUSE_REDUCTION */
264 1, /* OMP_CLAUSE_COPYIN */
265 1, /* OMP_CLAUSE_COPYPRIVATE */
266 3, /* OMP_CLAUSE_LINEAR */
267 2, /* OMP_CLAUSE_ALIGNED */
268 1, /* OMP_CLAUSE_DEPEND */
269 1, /* OMP_CLAUSE_UNIFORM */
270 2, /* OMP_CLAUSE_FROM */
271 2, /* OMP_CLAUSE_TO */
272 2, /* OMP_CLAUSE_MAP */
273 1, /* OMP_CLAUSE__LOOPTEMP_ */
274 1, /* OMP_CLAUSE_IF */
275 1, /* OMP_CLAUSE_NUM_THREADS */
276 1, /* OMP_CLAUSE_SCHEDULE */
277 0, /* OMP_CLAUSE_NOWAIT */
278 0, /* OMP_CLAUSE_ORDERED */
279 0, /* OMP_CLAUSE_DEFAULT */
280 3, /* OMP_CLAUSE_COLLAPSE */
281 0, /* OMP_CLAUSE_UNTIED */
282 1, /* OMP_CLAUSE_FINAL */
283 0, /* OMP_CLAUSE_MERGEABLE */
284 1, /* OMP_CLAUSE_DEVICE */
285 1, /* OMP_CLAUSE_DIST_SCHEDULE */
286 0, /* OMP_CLAUSE_INBRANCH */
287 0, /* OMP_CLAUSE_NOTINBRANCH */
288 1, /* OMP_CLAUSE_NUM_TEAMS */
289 1, /* OMP_CLAUSE_THREAD_LIMIT */
290 0, /* OMP_CLAUSE_PROC_BIND */
291 1, /* OMP_CLAUSE_SAFELEN */
292 1, /* OMP_CLAUSE_SIMDLEN */
293 0, /* OMP_CLAUSE_FOR */
294 0, /* OMP_CLAUSE_PARALLEL */
295 0, /* OMP_CLAUSE_SECTIONS */
296 0, /* OMP_CLAUSE_TASKGROUP */
297 1, /* OMP_CLAUSE__SIMDUID_ */
298 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
299 };
300
301 const char * const omp_clause_code_name[] =
302 {
303 "error_clause",
304 "private",
305 "shared",
306 "firstprivate",
307 "lastprivate",
308 "reduction",
309 "copyin",
310 "copyprivate",
311 "linear",
312 "aligned",
313 "depend",
314 "uniform",
315 "from",
316 "to",
317 "map",
318 "_looptemp_",
319 "if",
320 "num_threads",
321 "schedule",
322 "nowait",
323 "ordered",
324 "default",
325 "collapse",
326 "untied",
327 "final",
328 "mergeable",
329 "device",
330 "dist_schedule",
331 "inbranch",
332 "notinbranch",
333 "num_teams",
334 "thread_limit",
335 "proc_bind",
336 "safelen",
337 "simdlen",
338 "for",
339 "parallel",
340 "sections",
341 "taskgroup",
342 "_simduid_",
343 "_Cilk_for_count_"
344 };
345
346
347 /* Return the tree node structure used by tree code CODE. */
348
349 static inline enum tree_node_structure_enum
350 tree_node_structure_for_code (enum tree_code code)
351 {
352 switch (TREE_CODE_CLASS (code))
353 {
354 case tcc_declaration:
355 {
356 switch (code)
357 {
358 case FIELD_DECL:
359 return TS_FIELD_DECL;
360 case PARM_DECL:
361 return TS_PARM_DECL;
362 case VAR_DECL:
363 return TS_VAR_DECL;
364 case LABEL_DECL:
365 return TS_LABEL_DECL;
366 case RESULT_DECL:
367 return TS_RESULT_DECL;
368 case DEBUG_EXPR_DECL:
369 return TS_DECL_WRTL;
370 case CONST_DECL:
371 return TS_CONST_DECL;
372 case TYPE_DECL:
373 return TS_TYPE_DECL;
374 case FUNCTION_DECL:
375 return TS_FUNCTION_DECL;
376 case TRANSLATION_UNIT_DECL:
377 return TS_TRANSLATION_UNIT_DECL;
378 default:
379 return TS_DECL_NON_COMMON;
380 }
381 }
382 case tcc_type:
383 return TS_TYPE_NON_COMMON;
384 case tcc_reference:
385 case tcc_comparison:
386 case tcc_unary:
387 case tcc_binary:
388 case tcc_expression:
389 case tcc_statement:
390 case tcc_vl_exp:
391 return TS_EXP;
392 default: /* tcc_constant and tcc_exceptional */
393 break;
394 }
395 switch (code)
396 {
397 /* tcc_constant cases. */
398 case VOID_CST: return TS_TYPED;
399 case INTEGER_CST: return TS_INT_CST;
400 case REAL_CST: return TS_REAL_CST;
401 case FIXED_CST: return TS_FIXED_CST;
402 case COMPLEX_CST: return TS_COMPLEX;
403 case VECTOR_CST: return TS_VECTOR;
404 case STRING_CST: return TS_STRING;
405 /* tcc_exceptional cases. */
406 case ERROR_MARK: return TS_COMMON;
407 case IDENTIFIER_NODE: return TS_IDENTIFIER;
408 case TREE_LIST: return TS_LIST;
409 case TREE_VEC: return TS_VEC;
410 case SSA_NAME: return TS_SSA_NAME;
411 case PLACEHOLDER_EXPR: return TS_COMMON;
412 case STATEMENT_LIST: return TS_STATEMENT_LIST;
413 case BLOCK: return TS_BLOCK;
414 case CONSTRUCTOR: return TS_CONSTRUCTOR;
415 case TREE_BINFO: return TS_BINFO;
416 case OMP_CLAUSE: return TS_OMP_CLAUSE;
417 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
418 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
419
420 default:
421 gcc_unreachable ();
422 }
423 }
424
425
426 /* Initialize tree_contains_struct to describe the hierarchy of tree
427 nodes. */
428
429 static void
430 initialize_tree_contains_struct (void)
431 {
432 unsigned i;
433
434 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
435 {
436 enum tree_code code;
437 enum tree_node_structure_enum ts_code;
438
439 code = (enum tree_code) i;
440 ts_code = tree_node_structure_for_code (code);
441
442 /* Mark the TS structure itself. */
443 tree_contains_struct[code][ts_code] = 1;
444
445 /* Mark all the structures that TS is derived from. */
446 switch (ts_code)
447 {
448 case TS_TYPED:
449 case TS_BLOCK:
450 MARK_TS_BASE (code);
451 break;
452
453 case TS_COMMON:
454 case TS_INT_CST:
455 case TS_REAL_CST:
456 case TS_FIXED_CST:
457 case TS_VECTOR:
458 case TS_STRING:
459 case TS_COMPLEX:
460 case TS_SSA_NAME:
461 case TS_CONSTRUCTOR:
462 case TS_EXP:
463 case TS_STATEMENT_LIST:
464 MARK_TS_TYPED (code);
465 break;
466
467 case TS_IDENTIFIER:
468 case TS_DECL_MINIMAL:
469 case TS_TYPE_COMMON:
470 case TS_LIST:
471 case TS_VEC:
472 case TS_BINFO:
473 case TS_OMP_CLAUSE:
474 case TS_OPTIMIZATION:
475 case TS_TARGET_OPTION:
476 MARK_TS_COMMON (code);
477 break;
478
479 case TS_TYPE_WITH_LANG_SPECIFIC:
480 MARK_TS_TYPE_COMMON (code);
481 break;
482
483 case TS_TYPE_NON_COMMON:
484 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
485 break;
486
487 case TS_DECL_COMMON:
488 MARK_TS_DECL_MINIMAL (code);
489 break;
490
491 case TS_DECL_WRTL:
492 case TS_CONST_DECL:
493 MARK_TS_DECL_COMMON (code);
494 break;
495
496 case TS_DECL_NON_COMMON:
497 MARK_TS_DECL_WITH_VIS (code);
498 break;
499
500 case TS_DECL_WITH_VIS:
501 case TS_PARM_DECL:
502 case TS_LABEL_DECL:
503 case TS_RESULT_DECL:
504 MARK_TS_DECL_WRTL (code);
505 break;
506
507 case TS_FIELD_DECL:
508 MARK_TS_DECL_COMMON (code);
509 break;
510
511 case TS_VAR_DECL:
512 MARK_TS_DECL_WITH_VIS (code);
513 break;
514
515 case TS_TYPE_DECL:
516 case TS_FUNCTION_DECL:
517 MARK_TS_DECL_NON_COMMON (code);
518 break;
519
520 case TS_TRANSLATION_UNIT_DECL:
521 MARK_TS_DECL_COMMON (code);
522 break;
523
524 default:
525 gcc_unreachable ();
526 }
527 }
528
529 /* Basic consistency checks for attributes used in fold. */
530 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
531 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
532 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
533 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
534 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
535 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
536 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
537 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
538 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
539 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
540 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
541 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
542 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
543 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
544 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
545 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
546 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
547 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
548 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
549 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
550 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
551 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
552 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
553 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
554 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
555 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
556 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
557 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
558 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
559 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
560 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
561 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
562 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
563 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
564 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
565 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
566 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
567 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
568 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
569 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
570 }
571
572
573 /* Init tree.c. */
574
575 void
576 init_ttree (void)
577 {
578 /* Initialize the hash table of types. */
579 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
580 type_hash_eq, 0);
581
582 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
583 tree_decl_map_eq, 0);
584
585 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
586 tree_decl_map_eq, 0);
587
588 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
589 int_cst_hash_eq, NULL);
590
591 int_cst_node = make_int_cst (1, 1);
592
593 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
594 cl_option_hash_eq, NULL);
595
596 cl_optimization_node = make_node (OPTIMIZATION_NODE);
597 cl_target_option_node = make_node (TARGET_OPTION_NODE);
598
599 /* Initialize the tree_contains_struct array. */
600 initialize_tree_contains_struct ();
601 lang_hooks.init_ts ();
602 }
603
604 \f
605 /* The name of the object as the assembler will see it (but before any
606 translations made by ASM_OUTPUT_LABELREF). Often this is the same
607 as DECL_NAME. It is an IDENTIFIER_NODE. */
608 tree
609 decl_assembler_name (tree decl)
610 {
611 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
612 lang_hooks.set_decl_assembler_name (decl);
613 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
614 }
615
616 /* When the target supports COMDAT groups, this indicates which group the
617 DECL is associated with. This can be either an IDENTIFIER_NODE or a
618 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
619 tree
620 decl_comdat_group (const_tree node)
621 {
622 struct symtab_node *snode = symtab_node::get (node);
623 if (!snode)
624 return NULL;
625 return snode->get_comdat_group ();
626 }
627
628 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
629 tree
630 decl_comdat_group_id (const_tree node)
631 {
632 struct symtab_node *snode = symtab_node::get (node);
633 if (!snode)
634 return NULL;
635 return snode->get_comdat_group_id ();
636 }
637
638 /* When the target supports named section, return its name as IDENTIFIER_NODE
639 or NULL if it is in no section. */
640 const char *
641 decl_section_name (const_tree node)
642 {
643 struct symtab_node *snode = symtab_node::get (node);
644 if (!snode)
645 return NULL;
646 return snode->get_section ();
647 }
648
649 /* Set section section name of NODE to VALUE (that is expected to
650 be identifier node) */
651 void
652 set_decl_section_name (tree node, const char *value)
653 {
654 struct symtab_node *snode;
655
656 if (value == NULL)
657 {
658 snode = symtab_node::get (node);
659 if (!snode)
660 return;
661 }
662 else if (TREE_CODE (node) == VAR_DECL)
663 snode = varpool_node::get_create (node);
664 else
665 snode = cgraph_node::get_create (node);
666 snode->set_section (value);
667 }
668
669 /* Return TLS model of a variable NODE. */
670 enum tls_model
671 decl_tls_model (const_tree node)
672 {
673 struct varpool_node *snode = varpool_node::get (node);
674 if (!snode)
675 return TLS_MODEL_NONE;
676 return snode->tls_model;
677 }
678
679 /* Set TLS model of variable NODE to MODEL. */
680 void
681 set_decl_tls_model (tree node, enum tls_model model)
682 {
683 struct varpool_node *vnode;
684
685 if (model == TLS_MODEL_NONE)
686 {
687 vnode = varpool_node::get (node);
688 if (!vnode)
689 return;
690 }
691 else
692 vnode = varpool_node::get_create (node);
693 vnode->tls_model = model;
694 }
695
696 /* Compute the number of bytes occupied by a tree with code CODE.
697 This function cannot be used for nodes that have variable sizes,
698 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
699 size_t
700 tree_code_size (enum tree_code code)
701 {
702 switch (TREE_CODE_CLASS (code))
703 {
704 case tcc_declaration: /* A decl node */
705 {
706 switch (code)
707 {
708 case FIELD_DECL:
709 return sizeof (struct tree_field_decl);
710 case PARM_DECL:
711 return sizeof (struct tree_parm_decl);
712 case VAR_DECL:
713 return sizeof (struct tree_var_decl);
714 case LABEL_DECL:
715 return sizeof (struct tree_label_decl);
716 case RESULT_DECL:
717 return sizeof (struct tree_result_decl);
718 case CONST_DECL:
719 return sizeof (struct tree_const_decl);
720 case TYPE_DECL:
721 return sizeof (struct tree_type_decl);
722 case FUNCTION_DECL:
723 return sizeof (struct tree_function_decl);
724 case DEBUG_EXPR_DECL:
725 return sizeof (struct tree_decl_with_rtl);
726 case TRANSLATION_UNIT_DECL:
727 return sizeof (struct tree_translation_unit_decl);
728 case NAMESPACE_DECL:
729 case IMPORTED_DECL:
730 case NAMELIST_DECL:
731 return sizeof (struct tree_decl_non_common);
732 default:
733 return lang_hooks.tree_size (code);
734 }
735 }
736
737 case tcc_type: /* a type node */
738 return sizeof (struct tree_type_non_common);
739
740 case tcc_reference: /* a reference */
741 case tcc_expression: /* an expression */
742 case tcc_statement: /* an expression with side effects */
743 case tcc_comparison: /* a comparison expression */
744 case tcc_unary: /* a unary arithmetic expression */
745 case tcc_binary: /* a binary arithmetic expression */
746 return (sizeof (struct tree_exp)
747 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
748
749 case tcc_constant: /* a constant */
750 switch (code)
751 {
752 case VOID_CST: return sizeof (struct tree_typed);
753 case INTEGER_CST: gcc_unreachable ();
754 case REAL_CST: return sizeof (struct tree_real_cst);
755 case FIXED_CST: return sizeof (struct tree_fixed_cst);
756 case COMPLEX_CST: return sizeof (struct tree_complex);
757 case VECTOR_CST: return sizeof (struct tree_vector);
758 case STRING_CST: gcc_unreachable ();
759 default:
760 return lang_hooks.tree_size (code);
761 }
762
763 case tcc_exceptional: /* something random, like an identifier. */
764 switch (code)
765 {
766 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
767 case TREE_LIST: return sizeof (struct tree_list);
768
769 case ERROR_MARK:
770 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
771
772 case TREE_VEC:
773 case OMP_CLAUSE: gcc_unreachable ();
774
775 case SSA_NAME: return sizeof (struct tree_ssa_name);
776
777 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
778 case BLOCK: return sizeof (struct tree_block);
779 case CONSTRUCTOR: return sizeof (struct tree_constructor);
780 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
781 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
782
783 default:
784 return lang_hooks.tree_size (code);
785 }
786
787 default:
788 gcc_unreachable ();
789 }
790 }
791
792 /* Compute the number of bytes occupied by NODE. This routine only
793 looks at TREE_CODE, except for those nodes that have variable sizes. */
794 size_t
795 tree_size (const_tree node)
796 {
797 const enum tree_code code = TREE_CODE (node);
798 switch (code)
799 {
800 case INTEGER_CST:
801 return (sizeof (struct tree_int_cst)
802 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
803
804 case TREE_BINFO:
805 return (offsetof (struct tree_binfo, base_binfos)
806 + vec<tree, va_gc>
807 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
808
809 case TREE_VEC:
810 return (sizeof (struct tree_vec)
811 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
812
813 case VECTOR_CST:
814 return (sizeof (struct tree_vector)
815 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
816
817 case STRING_CST:
818 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
819
820 case OMP_CLAUSE:
821 return (sizeof (struct tree_omp_clause)
822 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
823 * sizeof (tree));
824
825 default:
826 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
827 return (sizeof (struct tree_exp)
828 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
829 else
830 return tree_code_size (code);
831 }
832 }
833
834 /* Record interesting allocation statistics for a tree node with CODE
835 and LENGTH. */
836
837 static void
838 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
839 size_t length ATTRIBUTE_UNUSED)
840 {
841 enum tree_code_class type = TREE_CODE_CLASS (code);
842 tree_node_kind kind;
843
844 if (!GATHER_STATISTICS)
845 return;
846
847 switch (type)
848 {
849 case tcc_declaration: /* A decl node */
850 kind = d_kind;
851 break;
852
853 case tcc_type: /* a type node */
854 kind = t_kind;
855 break;
856
857 case tcc_statement: /* an expression with side effects */
858 kind = s_kind;
859 break;
860
861 case tcc_reference: /* a reference */
862 kind = r_kind;
863 break;
864
865 case tcc_expression: /* an expression */
866 case tcc_comparison: /* a comparison expression */
867 case tcc_unary: /* a unary arithmetic expression */
868 case tcc_binary: /* a binary arithmetic expression */
869 kind = e_kind;
870 break;
871
872 case tcc_constant: /* a constant */
873 kind = c_kind;
874 break;
875
876 case tcc_exceptional: /* something random, like an identifier. */
877 switch (code)
878 {
879 case IDENTIFIER_NODE:
880 kind = id_kind;
881 break;
882
883 case TREE_VEC:
884 kind = vec_kind;
885 break;
886
887 case TREE_BINFO:
888 kind = binfo_kind;
889 break;
890
891 case SSA_NAME:
892 kind = ssa_name_kind;
893 break;
894
895 case BLOCK:
896 kind = b_kind;
897 break;
898
899 case CONSTRUCTOR:
900 kind = constr_kind;
901 break;
902
903 case OMP_CLAUSE:
904 kind = omp_clause_kind;
905 break;
906
907 default:
908 kind = x_kind;
909 break;
910 }
911 break;
912
913 case tcc_vl_exp:
914 kind = e_kind;
915 break;
916
917 default:
918 gcc_unreachable ();
919 }
920
921 tree_code_counts[(int) code]++;
922 tree_node_counts[(int) kind]++;
923 tree_node_sizes[(int) kind] += length;
924 }
925
926 /* Allocate and return a new UID from the DECL_UID namespace. */
927
928 int
929 allocate_decl_uid (void)
930 {
931 return next_decl_uid++;
932 }
933
934 /* Return a newly allocated node of code CODE. For decl and type
935 nodes, some other fields are initialized. The rest of the node is
936 initialized to zero. This function cannot be used for TREE_VEC,
937 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
938 tree_code_size.
939
940 Achoo! I got a code in the node. */
941
942 tree
943 make_node_stat (enum tree_code code MEM_STAT_DECL)
944 {
945 tree t;
946 enum tree_code_class type = TREE_CODE_CLASS (code);
947 size_t length = tree_code_size (code);
948
949 record_node_allocation_statistics (code, length);
950
951 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
952 TREE_SET_CODE (t, code);
953
954 switch (type)
955 {
956 case tcc_statement:
957 TREE_SIDE_EFFECTS (t) = 1;
958 break;
959
960 case tcc_declaration:
961 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
962 {
963 if (code == FUNCTION_DECL)
964 {
965 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
966 DECL_MODE (t) = FUNCTION_MODE;
967 }
968 else
969 DECL_ALIGN (t) = 1;
970 }
971 DECL_SOURCE_LOCATION (t) = input_location;
972 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
973 DECL_UID (t) = --next_debug_decl_uid;
974 else
975 {
976 DECL_UID (t) = allocate_decl_uid ();
977 SET_DECL_PT_UID (t, -1);
978 }
979 if (TREE_CODE (t) == LABEL_DECL)
980 LABEL_DECL_UID (t) = -1;
981
982 break;
983
984 case tcc_type:
985 TYPE_UID (t) = next_type_uid++;
986 TYPE_ALIGN (t) = BITS_PER_UNIT;
987 TYPE_USER_ALIGN (t) = 0;
988 TYPE_MAIN_VARIANT (t) = t;
989 TYPE_CANONICAL (t) = t;
990
991 /* Default to no attributes for type, but let target change that. */
992 TYPE_ATTRIBUTES (t) = NULL_TREE;
993 targetm.set_default_type_attributes (t);
994
995 /* We have not yet computed the alias set for this type. */
996 TYPE_ALIAS_SET (t) = -1;
997 break;
998
999 case tcc_constant:
1000 TREE_CONSTANT (t) = 1;
1001 break;
1002
1003 case tcc_expression:
1004 switch (code)
1005 {
1006 case INIT_EXPR:
1007 case MODIFY_EXPR:
1008 case VA_ARG_EXPR:
1009 case PREDECREMENT_EXPR:
1010 case PREINCREMENT_EXPR:
1011 case POSTDECREMENT_EXPR:
1012 case POSTINCREMENT_EXPR:
1013 /* All of these have side-effects, no matter what their
1014 operands are. */
1015 TREE_SIDE_EFFECTS (t) = 1;
1016 break;
1017
1018 default:
1019 break;
1020 }
1021 break;
1022
1023 default:
1024 /* Other classes need no special treatment. */
1025 break;
1026 }
1027
1028 return t;
1029 }
1030 \f
1031 /* Return a new node with the same contents as NODE except that its
1032 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1033
1034 tree
1035 copy_node_stat (tree node MEM_STAT_DECL)
1036 {
1037 tree t;
1038 enum tree_code code = TREE_CODE (node);
1039 size_t length;
1040
1041 gcc_assert (code != STATEMENT_LIST);
1042
1043 length = tree_size (node);
1044 record_node_allocation_statistics (code, length);
1045 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1046 memcpy (t, node, length);
1047
1048 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1049 TREE_CHAIN (t) = 0;
1050 TREE_ASM_WRITTEN (t) = 0;
1051 TREE_VISITED (t) = 0;
1052
1053 if (TREE_CODE_CLASS (code) == tcc_declaration)
1054 {
1055 if (code == DEBUG_EXPR_DECL)
1056 DECL_UID (t) = --next_debug_decl_uid;
1057 else
1058 {
1059 DECL_UID (t) = allocate_decl_uid ();
1060 if (DECL_PT_UID_SET_P (node))
1061 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1062 }
1063 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1064 && DECL_HAS_VALUE_EXPR_P (node))
1065 {
1066 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1067 DECL_HAS_VALUE_EXPR_P (t) = 1;
1068 }
1069 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1070 if (TREE_CODE (node) == VAR_DECL)
1071 {
1072 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1073 t->decl_with_vis.symtab_node = NULL;
1074 }
1075 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1076 {
1077 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1078 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1079 }
1080 if (TREE_CODE (node) == FUNCTION_DECL)
1081 {
1082 DECL_STRUCT_FUNCTION (t) = NULL;
1083 t->decl_with_vis.symtab_node = NULL;
1084 }
1085 }
1086 else if (TREE_CODE_CLASS (code) == tcc_type)
1087 {
1088 TYPE_UID (t) = next_type_uid++;
1089 /* The following is so that the debug code for
1090 the copy is different from the original type.
1091 The two statements usually duplicate each other
1092 (because they clear fields of the same union),
1093 but the optimizer should catch that. */
1094 TYPE_SYMTAB_POINTER (t) = 0;
1095 TYPE_SYMTAB_ADDRESS (t) = 0;
1096
1097 /* Do not copy the values cache. */
1098 if (TYPE_CACHED_VALUES_P (t))
1099 {
1100 TYPE_CACHED_VALUES_P (t) = 0;
1101 TYPE_CACHED_VALUES (t) = NULL_TREE;
1102 }
1103 }
1104
1105 return t;
1106 }
1107
1108 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1109 For example, this can copy a list made of TREE_LIST nodes. */
1110
1111 tree
1112 copy_list (tree list)
1113 {
1114 tree head;
1115 tree prev, next;
1116
1117 if (list == 0)
1118 return 0;
1119
1120 head = prev = copy_node (list);
1121 next = TREE_CHAIN (list);
1122 while (next)
1123 {
1124 TREE_CHAIN (prev) = copy_node (next);
1125 prev = TREE_CHAIN (prev);
1126 next = TREE_CHAIN (next);
1127 }
1128 return head;
1129 }
1130
1131 \f
1132 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1133 INTEGER_CST with value CST and type TYPE. */
1134
1135 static unsigned int
1136 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1137 {
1138 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1139 /* We need an extra zero HWI if CST is an unsigned integer with its
1140 upper bit set, and if CST occupies a whole number of HWIs. */
1141 if (TYPE_UNSIGNED (type)
1142 && wi::neg_p (cst)
1143 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1144 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1145 return cst.get_len ();
1146 }
1147
1148 /* Return a new INTEGER_CST with value CST and type TYPE. */
1149
1150 static tree
1151 build_new_int_cst (tree type, const wide_int &cst)
1152 {
1153 unsigned int len = cst.get_len ();
1154 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1155 tree nt = make_int_cst (len, ext_len);
1156
1157 if (len < ext_len)
1158 {
1159 --ext_len;
1160 TREE_INT_CST_ELT (nt, ext_len) = 0;
1161 for (unsigned int i = len; i < ext_len; ++i)
1162 TREE_INT_CST_ELT (nt, i) = -1;
1163 }
1164 else if (TYPE_UNSIGNED (type)
1165 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1166 {
1167 len--;
1168 TREE_INT_CST_ELT (nt, len)
1169 = zext_hwi (cst.elt (len),
1170 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1171 }
1172
1173 for (unsigned int i = 0; i < len; i++)
1174 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1175 TREE_TYPE (nt) = type;
1176 return nt;
1177 }
1178
1179 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1180
1181 tree
1182 build_int_cst (tree type, HOST_WIDE_INT low)
1183 {
1184 /* Support legacy code. */
1185 if (!type)
1186 type = integer_type_node;
1187
1188 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1189 }
1190
1191 tree
1192 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1193 {
1194 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1195 }
1196
1197 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1198
1199 tree
1200 build_int_cst_type (tree type, HOST_WIDE_INT low)
1201 {
1202 gcc_assert (type);
1203 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1204 }
1205
1206 /* Constructs tree in type TYPE from with value given by CST. Signedness
1207 of CST is assumed to be the same as the signedness of TYPE. */
1208
1209 tree
1210 double_int_to_tree (tree type, double_int cst)
1211 {
1212 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1213 }
1214
1215 /* We force the wide_int CST to the range of the type TYPE by sign or
1216 zero extending it. OVERFLOWABLE indicates if we are interested in
1217 overflow of the value, when >0 we are only interested in signed
1218 overflow, for <0 we are interested in any overflow. OVERFLOWED
1219 indicates whether overflow has already occurred. CONST_OVERFLOWED
1220 indicates whether constant overflow has already occurred. We force
1221 T's value to be within range of T's type (by setting to 0 or 1 all
1222 the bits outside the type's range). We set TREE_OVERFLOWED if,
1223 OVERFLOWED is nonzero,
1224 or OVERFLOWABLE is >0 and signed overflow occurs
1225 or OVERFLOWABLE is <0 and any overflow occurs
1226 We return a new tree node for the extended wide_int. The node
1227 is shared if no overflow flags are set. */
1228
1229
1230 tree
1231 force_fit_type (tree type, const wide_int_ref &cst,
1232 int overflowable, bool overflowed)
1233 {
1234 signop sign = TYPE_SIGN (type);
1235
1236 /* If we need to set overflow flags, return a new unshared node. */
1237 if (overflowed || !wi::fits_to_tree_p (cst, type))
1238 {
1239 if (overflowed
1240 || overflowable < 0
1241 || (overflowable > 0 && sign == SIGNED))
1242 {
1243 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1244 tree t = build_new_int_cst (type, tmp);
1245 TREE_OVERFLOW (t) = 1;
1246 return t;
1247 }
1248 }
1249
1250 /* Else build a shared node. */
1251 return wide_int_to_tree (type, cst);
1252 }
1253
1254 /* These are the hash table functions for the hash table of INTEGER_CST
1255 nodes of a sizetype. */
1256
1257 /* Return the hash code code X, an INTEGER_CST. */
1258
1259 static hashval_t
1260 int_cst_hash_hash (const void *x)
1261 {
1262 const_tree const t = (const_tree) x;
1263 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1264 int i;
1265
1266 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1267 code ^= TREE_INT_CST_ELT (t, i);
1268
1269 return code;
1270 }
1271
1272 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1273 is the same as that given by *Y, which is the same. */
1274
1275 static int
1276 int_cst_hash_eq (const void *x, const void *y)
1277 {
1278 const_tree const xt = (const_tree) x;
1279 const_tree const yt = (const_tree) y;
1280
1281 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1282 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1283 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1284 return false;
1285
1286 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1287 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1288 return false;
1289
1290 return true;
1291 }
1292
1293 /* Create an INT_CST node of TYPE and value CST.
1294 The returned node is always shared. For small integers we use a
1295 per-type vector cache, for larger ones we use a single hash table.
1296 The value is extended from its precision according to the sign of
1297 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1298 the upper bits and ensures that hashing and value equality based
1299 upon the underlying HOST_WIDE_INTs works without masking. */
1300
1301 tree
1302 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1303 {
1304 tree t;
1305 int ix = -1;
1306 int limit = 0;
1307
1308 gcc_assert (type);
1309 unsigned int prec = TYPE_PRECISION (type);
1310 signop sgn = TYPE_SIGN (type);
1311
1312 /* Verify that everything is canonical. */
1313 int l = pcst.get_len ();
1314 if (l > 1)
1315 {
1316 if (pcst.elt (l - 1) == 0)
1317 gcc_checking_assert (pcst.elt (l - 2) < 0);
1318 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1319 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1320 }
1321
1322 wide_int cst = wide_int::from (pcst, prec, sgn);
1323 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1324
1325 if (ext_len == 1)
1326 {
1327 /* We just need to store a single HOST_WIDE_INT. */
1328 HOST_WIDE_INT hwi;
1329 if (TYPE_UNSIGNED (type))
1330 hwi = cst.to_uhwi ();
1331 else
1332 hwi = cst.to_shwi ();
1333
1334 switch (TREE_CODE (type))
1335 {
1336 case NULLPTR_TYPE:
1337 gcc_assert (hwi == 0);
1338 /* Fallthru. */
1339
1340 case POINTER_TYPE:
1341 case REFERENCE_TYPE:
1342 case POINTER_BOUNDS_TYPE:
1343 /* Cache NULL pointer and zero bounds. */
1344 if (hwi == 0)
1345 {
1346 limit = 1;
1347 ix = 0;
1348 }
1349 break;
1350
1351 case BOOLEAN_TYPE:
1352 /* Cache false or true. */
1353 limit = 2;
1354 if (hwi < 2)
1355 ix = hwi;
1356 break;
1357
1358 case INTEGER_TYPE:
1359 case OFFSET_TYPE:
1360 if (TYPE_SIGN (type) == UNSIGNED)
1361 {
1362 /* Cache [0, N). */
1363 limit = INTEGER_SHARE_LIMIT;
1364 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1365 ix = hwi;
1366 }
1367 else
1368 {
1369 /* Cache [-1, N). */
1370 limit = INTEGER_SHARE_LIMIT + 1;
1371 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1372 ix = hwi + 1;
1373 }
1374 break;
1375
1376 case ENUMERAL_TYPE:
1377 break;
1378
1379 default:
1380 gcc_unreachable ();
1381 }
1382
1383 if (ix >= 0)
1384 {
1385 /* Look for it in the type's vector of small shared ints. */
1386 if (!TYPE_CACHED_VALUES_P (type))
1387 {
1388 TYPE_CACHED_VALUES_P (type) = 1;
1389 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1390 }
1391
1392 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1393 if (t)
1394 /* Make sure no one is clobbering the shared constant. */
1395 gcc_checking_assert (TREE_TYPE (t) == type
1396 && TREE_INT_CST_NUNITS (t) == 1
1397 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1398 && TREE_INT_CST_EXT_NUNITS (t) == 1
1399 && TREE_INT_CST_ELT (t, 0) == hwi);
1400 else
1401 {
1402 /* Create a new shared int. */
1403 t = build_new_int_cst (type, cst);
1404 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1405 }
1406 }
1407 else
1408 {
1409 /* Use the cache of larger shared ints, using int_cst_node as
1410 a temporary. */
1411 void **slot;
1412
1413 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1414 TREE_TYPE (int_cst_node) = type;
1415
1416 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1417 t = (tree) *slot;
1418 if (!t)
1419 {
1420 /* Insert this one into the hash table. */
1421 t = int_cst_node;
1422 *slot = t;
1423 /* Make a new node for next time round. */
1424 int_cst_node = make_int_cst (1, 1);
1425 }
1426 }
1427 }
1428 else
1429 {
1430 /* The value either hashes properly or we drop it on the floor
1431 for the gc to take care of. There will not be enough of them
1432 to worry about. */
1433 void **slot;
1434
1435 tree nt = build_new_int_cst (type, cst);
1436 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1437 t = (tree) *slot;
1438 if (!t)
1439 {
1440 /* Insert this one into the hash table. */
1441 t = nt;
1442 *slot = t;
1443 }
1444 }
1445
1446 return t;
1447 }
1448
1449 void
1450 cache_integer_cst (tree t)
1451 {
1452 tree type = TREE_TYPE (t);
1453 int ix = -1;
1454 int limit = 0;
1455 int prec = TYPE_PRECISION (type);
1456
1457 gcc_assert (!TREE_OVERFLOW (t));
1458
1459 switch (TREE_CODE (type))
1460 {
1461 case NULLPTR_TYPE:
1462 gcc_assert (integer_zerop (t));
1463 /* Fallthru. */
1464
1465 case POINTER_TYPE:
1466 case REFERENCE_TYPE:
1467 /* Cache NULL pointer. */
1468 if (integer_zerop (t))
1469 {
1470 limit = 1;
1471 ix = 0;
1472 }
1473 break;
1474
1475 case BOOLEAN_TYPE:
1476 /* Cache false or true. */
1477 limit = 2;
1478 if (wi::ltu_p (t, 2))
1479 ix = TREE_INT_CST_ELT (t, 0);
1480 break;
1481
1482 case INTEGER_TYPE:
1483 case OFFSET_TYPE:
1484 if (TYPE_UNSIGNED (type))
1485 {
1486 /* Cache 0..N */
1487 limit = INTEGER_SHARE_LIMIT;
1488
1489 /* This is a little hokie, but if the prec is smaller than
1490 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1491 obvious test will not get the correct answer. */
1492 if (prec < HOST_BITS_PER_WIDE_INT)
1493 {
1494 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1495 ix = tree_to_uhwi (t);
1496 }
1497 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1498 ix = tree_to_uhwi (t);
1499 }
1500 else
1501 {
1502 /* Cache -1..N */
1503 limit = INTEGER_SHARE_LIMIT + 1;
1504
1505 if (integer_minus_onep (t))
1506 ix = 0;
1507 else if (!wi::neg_p (t))
1508 {
1509 if (prec < HOST_BITS_PER_WIDE_INT)
1510 {
1511 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1512 ix = tree_to_shwi (t) + 1;
1513 }
1514 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1515 ix = tree_to_shwi (t) + 1;
1516 }
1517 }
1518 break;
1519
1520 case ENUMERAL_TYPE:
1521 break;
1522
1523 default:
1524 gcc_unreachable ();
1525 }
1526
1527 if (ix >= 0)
1528 {
1529 /* Look for it in the type's vector of small shared ints. */
1530 if (!TYPE_CACHED_VALUES_P (type))
1531 {
1532 TYPE_CACHED_VALUES_P (type) = 1;
1533 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1534 }
1535
1536 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1537 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1538 }
1539 else
1540 {
1541 /* Use the cache of larger shared ints. */
1542 void **slot;
1543
1544 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1545 /* If there is already an entry for the number verify it's the
1546 same. */
1547 if (*slot)
1548 gcc_assert (wi::eq_p (tree (*slot), t));
1549 else
1550 /* Otherwise insert this one into the hash table. */
1551 *slot = t;
1552 }
1553 }
1554
1555
1556 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1557 and the rest are zeros. */
1558
1559 tree
1560 build_low_bits_mask (tree type, unsigned bits)
1561 {
1562 gcc_assert (bits <= TYPE_PRECISION (type));
1563
1564 return wide_int_to_tree (type, wi::mask (bits, false,
1565 TYPE_PRECISION (type)));
1566 }
1567
1568 /* Checks that X is integer constant that can be expressed in (unsigned)
1569 HOST_WIDE_INT without loss of precision. */
1570
1571 bool
1572 cst_and_fits_in_hwi (const_tree x)
1573 {
1574 if (TREE_CODE (x) != INTEGER_CST)
1575 return false;
1576
1577 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1578 return false;
1579
1580 return TREE_INT_CST_NUNITS (x) == 1;
1581 }
1582
1583 /* Build a newly constructed TREE_VEC node of length LEN. */
1584
1585 tree
1586 make_vector_stat (unsigned len MEM_STAT_DECL)
1587 {
1588 tree t;
1589 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1590
1591 record_node_allocation_statistics (VECTOR_CST, length);
1592
1593 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1594
1595 TREE_SET_CODE (t, VECTOR_CST);
1596 TREE_CONSTANT (t) = 1;
1597
1598 return t;
1599 }
1600
1601 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1602 are in a list pointed to by VALS. */
1603
1604 tree
1605 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1606 {
1607 int over = 0;
1608 unsigned cnt = 0;
1609 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1610 TREE_TYPE (v) = type;
1611
1612 /* Iterate through elements and check for overflow. */
1613 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1614 {
1615 tree value = vals[cnt];
1616
1617 VECTOR_CST_ELT (v, cnt) = value;
1618
1619 /* Don't crash if we get an address constant. */
1620 if (!CONSTANT_CLASS_P (value))
1621 continue;
1622
1623 over |= TREE_OVERFLOW (value);
1624 }
1625
1626 TREE_OVERFLOW (v) = over;
1627 return v;
1628 }
1629
1630 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1631 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1632
1633 tree
1634 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1635 {
1636 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1637 unsigned HOST_WIDE_INT idx;
1638 tree value;
1639
1640 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1641 vec[idx] = value;
1642 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1643 vec[idx] = build_zero_cst (TREE_TYPE (type));
1644
1645 return build_vector (type, vec);
1646 }
1647
1648 /* Build a vector of type VECTYPE where all the elements are SCs. */
1649 tree
1650 build_vector_from_val (tree vectype, tree sc)
1651 {
1652 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1653
1654 if (sc == error_mark_node)
1655 return sc;
1656
1657 /* Verify that the vector type is suitable for SC. Note that there
1658 is some inconsistency in the type-system with respect to restrict
1659 qualifications of pointers. Vector types always have a main-variant
1660 element type and the qualification is applied to the vector-type.
1661 So TREE_TYPE (vector-type) does not return a properly qualified
1662 vector element-type. */
1663 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1664 TREE_TYPE (vectype)));
1665
1666 if (CONSTANT_CLASS_P (sc))
1667 {
1668 tree *v = XALLOCAVEC (tree, nunits);
1669 for (i = 0; i < nunits; ++i)
1670 v[i] = sc;
1671 return build_vector (vectype, v);
1672 }
1673 else
1674 {
1675 vec<constructor_elt, va_gc> *v;
1676 vec_alloc (v, nunits);
1677 for (i = 0; i < nunits; ++i)
1678 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1679 return build_constructor (vectype, v);
1680 }
1681 }
1682
1683 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1684 are in the vec pointed to by VALS. */
1685 tree
1686 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1687 {
1688 tree c = make_node (CONSTRUCTOR);
1689 unsigned int i;
1690 constructor_elt *elt;
1691 bool constant_p = true;
1692 bool side_effects_p = false;
1693
1694 TREE_TYPE (c) = type;
1695 CONSTRUCTOR_ELTS (c) = vals;
1696
1697 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1698 {
1699 /* Mostly ctors will have elts that don't have side-effects, so
1700 the usual case is to scan all the elements. Hence a single
1701 loop for both const and side effects, rather than one loop
1702 each (with early outs). */
1703 if (!TREE_CONSTANT (elt->value))
1704 constant_p = false;
1705 if (TREE_SIDE_EFFECTS (elt->value))
1706 side_effects_p = true;
1707 }
1708
1709 TREE_SIDE_EFFECTS (c) = side_effects_p;
1710 TREE_CONSTANT (c) = constant_p;
1711
1712 return c;
1713 }
1714
1715 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1716 INDEX and VALUE. */
1717 tree
1718 build_constructor_single (tree type, tree index, tree value)
1719 {
1720 vec<constructor_elt, va_gc> *v;
1721 constructor_elt elt = {index, value};
1722
1723 vec_alloc (v, 1);
1724 v->quick_push (elt);
1725
1726 return build_constructor (type, v);
1727 }
1728
1729
1730 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1731 are in a list pointed to by VALS. */
1732 tree
1733 build_constructor_from_list (tree type, tree vals)
1734 {
1735 tree t;
1736 vec<constructor_elt, va_gc> *v = NULL;
1737
1738 if (vals)
1739 {
1740 vec_alloc (v, list_length (vals));
1741 for (t = vals; t; t = TREE_CHAIN (t))
1742 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1743 }
1744
1745 return build_constructor (type, v);
1746 }
1747
1748 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1749 of elements, provided as index/value pairs. */
1750
1751 tree
1752 build_constructor_va (tree type, int nelts, ...)
1753 {
1754 vec<constructor_elt, va_gc> *v = NULL;
1755 va_list p;
1756
1757 va_start (p, nelts);
1758 vec_alloc (v, nelts);
1759 while (nelts--)
1760 {
1761 tree index = va_arg (p, tree);
1762 tree value = va_arg (p, tree);
1763 CONSTRUCTOR_APPEND_ELT (v, index, value);
1764 }
1765 va_end (p);
1766 return build_constructor (type, v);
1767 }
1768
1769 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1770
1771 tree
1772 build_fixed (tree type, FIXED_VALUE_TYPE f)
1773 {
1774 tree v;
1775 FIXED_VALUE_TYPE *fp;
1776
1777 v = make_node (FIXED_CST);
1778 fp = ggc_alloc<fixed_value> ();
1779 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1780
1781 TREE_TYPE (v) = type;
1782 TREE_FIXED_CST_PTR (v) = fp;
1783 return v;
1784 }
1785
1786 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1787
1788 tree
1789 build_real (tree type, REAL_VALUE_TYPE d)
1790 {
1791 tree v;
1792 REAL_VALUE_TYPE *dp;
1793 int overflow = 0;
1794
1795 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1796 Consider doing it via real_convert now. */
1797
1798 v = make_node (REAL_CST);
1799 dp = ggc_alloc<real_value> ();
1800 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1801
1802 TREE_TYPE (v) = type;
1803 TREE_REAL_CST_PTR (v) = dp;
1804 TREE_OVERFLOW (v) = overflow;
1805 return v;
1806 }
1807
1808 /* Return a new REAL_CST node whose type is TYPE
1809 and whose value is the integer value of the INTEGER_CST node I. */
1810
1811 REAL_VALUE_TYPE
1812 real_value_from_int_cst (const_tree type, const_tree i)
1813 {
1814 REAL_VALUE_TYPE d;
1815
1816 /* Clear all bits of the real value type so that we can later do
1817 bitwise comparisons to see if two values are the same. */
1818 memset (&d, 0, sizeof d);
1819
1820 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1821 TYPE_SIGN (TREE_TYPE (i)));
1822 return d;
1823 }
1824
1825 /* Given a tree representing an integer constant I, return a tree
1826 representing the same value as a floating-point constant of type TYPE. */
1827
1828 tree
1829 build_real_from_int_cst (tree type, const_tree i)
1830 {
1831 tree v;
1832 int overflow = TREE_OVERFLOW (i);
1833
1834 v = build_real (type, real_value_from_int_cst (type, i));
1835
1836 TREE_OVERFLOW (v) |= overflow;
1837 return v;
1838 }
1839
1840 /* Return a newly constructed STRING_CST node whose value is
1841 the LEN characters at STR.
1842 Note that for a C string literal, LEN should include the trailing NUL.
1843 The TREE_TYPE is not initialized. */
1844
1845 tree
1846 build_string (int len, const char *str)
1847 {
1848 tree s;
1849 size_t length;
1850
1851 /* Do not waste bytes provided by padding of struct tree_string. */
1852 length = len + offsetof (struct tree_string, str) + 1;
1853
1854 record_node_allocation_statistics (STRING_CST, length);
1855
1856 s = (tree) ggc_internal_alloc (length);
1857
1858 memset (s, 0, sizeof (struct tree_typed));
1859 TREE_SET_CODE (s, STRING_CST);
1860 TREE_CONSTANT (s) = 1;
1861 TREE_STRING_LENGTH (s) = len;
1862 memcpy (s->string.str, str, len);
1863 s->string.str[len] = '\0';
1864
1865 return s;
1866 }
1867
1868 /* Return a newly constructed COMPLEX_CST node whose value is
1869 specified by the real and imaginary parts REAL and IMAG.
1870 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1871 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1872
1873 tree
1874 build_complex (tree type, tree real, tree imag)
1875 {
1876 tree t = make_node (COMPLEX_CST);
1877
1878 TREE_REALPART (t) = real;
1879 TREE_IMAGPART (t) = imag;
1880 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1881 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1882 return t;
1883 }
1884
1885 /* Return a constant of arithmetic type TYPE which is the
1886 multiplicative identity of the set TYPE. */
1887
1888 tree
1889 build_one_cst (tree type)
1890 {
1891 switch (TREE_CODE (type))
1892 {
1893 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1894 case POINTER_TYPE: case REFERENCE_TYPE:
1895 case OFFSET_TYPE:
1896 return build_int_cst (type, 1);
1897
1898 case REAL_TYPE:
1899 return build_real (type, dconst1);
1900
1901 case FIXED_POINT_TYPE:
1902 /* We can only generate 1 for accum types. */
1903 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1904 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1905
1906 case VECTOR_TYPE:
1907 {
1908 tree scalar = build_one_cst (TREE_TYPE (type));
1909
1910 return build_vector_from_val (type, scalar);
1911 }
1912
1913 case COMPLEX_TYPE:
1914 return build_complex (type,
1915 build_one_cst (TREE_TYPE (type)),
1916 build_zero_cst (TREE_TYPE (type)));
1917
1918 default:
1919 gcc_unreachable ();
1920 }
1921 }
1922
1923 /* Return an integer of type TYPE containing all 1's in as much precision as
1924 it contains, or a complex or vector whose subparts are such integers. */
1925
1926 tree
1927 build_all_ones_cst (tree type)
1928 {
1929 if (TREE_CODE (type) == COMPLEX_TYPE)
1930 {
1931 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1932 return build_complex (type, scalar, scalar);
1933 }
1934 else
1935 return build_minus_one_cst (type);
1936 }
1937
1938 /* Return a constant of arithmetic type TYPE which is the
1939 opposite of the multiplicative identity of the set TYPE. */
1940
1941 tree
1942 build_minus_one_cst (tree type)
1943 {
1944 switch (TREE_CODE (type))
1945 {
1946 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1947 case POINTER_TYPE: case REFERENCE_TYPE:
1948 case OFFSET_TYPE:
1949 return build_int_cst (type, -1);
1950
1951 case REAL_TYPE:
1952 return build_real (type, dconstm1);
1953
1954 case FIXED_POINT_TYPE:
1955 /* We can only generate 1 for accum types. */
1956 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1957 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1958 TYPE_MODE (type)));
1959
1960 case VECTOR_TYPE:
1961 {
1962 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1963
1964 return build_vector_from_val (type, scalar);
1965 }
1966
1967 case COMPLEX_TYPE:
1968 return build_complex (type,
1969 build_minus_one_cst (TREE_TYPE (type)),
1970 build_zero_cst (TREE_TYPE (type)));
1971
1972 default:
1973 gcc_unreachable ();
1974 }
1975 }
1976
1977 /* Build 0 constant of type TYPE. This is used by constructor folding
1978 and thus the constant should be represented in memory by
1979 zero(es). */
1980
1981 tree
1982 build_zero_cst (tree type)
1983 {
1984 switch (TREE_CODE (type))
1985 {
1986 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1987 case POINTER_TYPE: case REFERENCE_TYPE:
1988 case OFFSET_TYPE: case NULLPTR_TYPE:
1989 return build_int_cst (type, 0);
1990
1991 case REAL_TYPE:
1992 return build_real (type, dconst0);
1993
1994 case FIXED_POINT_TYPE:
1995 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1996
1997 case VECTOR_TYPE:
1998 {
1999 tree scalar = build_zero_cst (TREE_TYPE (type));
2000
2001 return build_vector_from_val (type, scalar);
2002 }
2003
2004 case COMPLEX_TYPE:
2005 {
2006 tree zero = build_zero_cst (TREE_TYPE (type));
2007
2008 return build_complex (type, zero, zero);
2009 }
2010
2011 default:
2012 if (!AGGREGATE_TYPE_P (type))
2013 return fold_convert (type, integer_zero_node);
2014 return build_constructor (type, NULL);
2015 }
2016 }
2017
2018
2019 /* Build a BINFO with LEN language slots. */
2020
2021 tree
2022 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2023 {
2024 tree t;
2025 size_t length = (offsetof (struct tree_binfo, base_binfos)
2026 + vec<tree, va_gc>::embedded_size (base_binfos));
2027
2028 record_node_allocation_statistics (TREE_BINFO, length);
2029
2030 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2031
2032 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2033
2034 TREE_SET_CODE (t, TREE_BINFO);
2035
2036 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2037
2038 return t;
2039 }
2040
2041 /* Create a CASE_LABEL_EXPR tree node and return it. */
2042
2043 tree
2044 build_case_label (tree low_value, tree high_value, tree label_decl)
2045 {
2046 tree t = make_node (CASE_LABEL_EXPR);
2047
2048 TREE_TYPE (t) = void_type_node;
2049 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2050
2051 CASE_LOW (t) = low_value;
2052 CASE_HIGH (t) = high_value;
2053 CASE_LABEL (t) = label_decl;
2054 CASE_CHAIN (t) = NULL_TREE;
2055
2056 return t;
2057 }
2058
2059 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2060 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2061 The latter determines the length of the HOST_WIDE_INT vector. */
2062
2063 tree
2064 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2065 {
2066 tree t;
2067 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2068 + sizeof (struct tree_int_cst));
2069
2070 gcc_assert (len);
2071 record_node_allocation_statistics (INTEGER_CST, length);
2072
2073 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2074
2075 TREE_SET_CODE (t, INTEGER_CST);
2076 TREE_INT_CST_NUNITS (t) = len;
2077 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2078 /* to_offset can only be applied to trees that are offset_int-sized
2079 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2080 must be exactly the precision of offset_int and so LEN is correct. */
2081 if (ext_len <= OFFSET_INT_ELTS)
2082 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2083 else
2084 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2085
2086 TREE_CONSTANT (t) = 1;
2087
2088 return t;
2089 }
2090
2091 /* Build a newly constructed TREE_VEC node of length LEN. */
2092
2093 tree
2094 make_tree_vec_stat (int len MEM_STAT_DECL)
2095 {
2096 tree t;
2097 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2098
2099 record_node_allocation_statistics (TREE_VEC, length);
2100
2101 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2102
2103 TREE_SET_CODE (t, TREE_VEC);
2104 TREE_VEC_LENGTH (t) = len;
2105
2106 return t;
2107 }
2108
2109 /* Grow a TREE_VEC node to new length LEN. */
2110
2111 tree
2112 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2113 {
2114 gcc_assert (TREE_CODE (v) == TREE_VEC);
2115
2116 int oldlen = TREE_VEC_LENGTH (v);
2117 gcc_assert (len > oldlen);
2118
2119 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2120 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2121
2122 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2123
2124 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2125
2126 TREE_VEC_LENGTH (v) = len;
2127
2128 return v;
2129 }
2130 \f
2131 /* Return 1 if EXPR is the integer constant zero or a complex constant
2132 of zero. */
2133
2134 int
2135 integer_zerop (const_tree expr)
2136 {
2137 STRIP_NOPS (expr);
2138
2139 switch (TREE_CODE (expr))
2140 {
2141 case INTEGER_CST:
2142 return wi::eq_p (expr, 0);
2143 case COMPLEX_CST:
2144 return (integer_zerop (TREE_REALPART (expr))
2145 && integer_zerop (TREE_IMAGPART (expr)));
2146 case VECTOR_CST:
2147 {
2148 unsigned i;
2149 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2150 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2151 return false;
2152 return true;
2153 }
2154 default:
2155 return false;
2156 }
2157 }
2158
2159 /* Return 1 if EXPR is the integer constant one or the corresponding
2160 complex constant. */
2161
2162 int
2163 integer_onep (const_tree expr)
2164 {
2165 STRIP_NOPS (expr);
2166
2167 switch (TREE_CODE (expr))
2168 {
2169 case INTEGER_CST:
2170 return wi::eq_p (wi::to_widest (expr), 1);
2171 case COMPLEX_CST:
2172 return (integer_onep (TREE_REALPART (expr))
2173 && integer_zerop (TREE_IMAGPART (expr)));
2174 case VECTOR_CST:
2175 {
2176 unsigned i;
2177 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2178 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2179 return false;
2180 return true;
2181 }
2182 default:
2183 return false;
2184 }
2185 }
2186
2187 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2188 return 1 if every piece is the integer constant one. */
2189
2190 int
2191 integer_each_onep (const_tree expr)
2192 {
2193 STRIP_NOPS (expr);
2194
2195 if (TREE_CODE (expr) == COMPLEX_CST)
2196 return (integer_onep (TREE_REALPART (expr))
2197 && integer_onep (TREE_IMAGPART (expr)));
2198 else
2199 return integer_onep (expr);
2200 }
2201
2202 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2203 it contains, or a complex or vector whose subparts are such integers. */
2204
2205 int
2206 integer_all_onesp (const_tree expr)
2207 {
2208 STRIP_NOPS (expr);
2209
2210 if (TREE_CODE (expr) == COMPLEX_CST
2211 && integer_all_onesp (TREE_REALPART (expr))
2212 && integer_all_onesp (TREE_IMAGPART (expr)))
2213 return 1;
2214
2215 else if (TREE_CODE (expr) == VECTOR_CST)
2216 {
2217 unsigned i;
2218 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2219 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2220 return 0;
2221 return 1;
2222 }
2223
2224 else if (TREE_CODE (expr) != INTEGER_CST)
2225 return 0;
2226
2227 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2228 }
2229
2230 /* Return 1 if EXPR is the integer constant minus one. */
2231
2232 int
2233 integer_minus_onep (const_tree expr)
2234 {
2235 STRIP_NOPS (expr);
2236
2237 if (TREE_CODE (expr) == COMPLEX_CST)
2238 return (integer_all_onesp (TREE_REALPART (expr))
2239 && integer_zerop (TREE_IMAGPART (expr)));
2240 else
2241 return integer_all_onesp (expr);
2242 }
2243
2244 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2245 one bit on). */
2246
2247 int
2248 integer_pow2p (const_tree expr)
2249 {
2250 STRIP_NOPS (expr);
2251
2252 if (TREE_CODE (expr) == COMPLEX_CST
2253 && integer_pow2p (TREE_REALPART (expr))
2254 && integer_zerop (TREE_IMAGPART (expr)))
2255 return 1;
2256
2257 if (TREE_CODE (expr) != INTEGER_CST)
2258 return 0;
2259
2260 return wi::popcount (expr) == 1;
2261 }
2262
2263 /* Return 1 if EXPR is an integer constant other than zero or a
2264 complex constant other than zero. */
2265
2266 int
2267 integer_nonzerop (const_tree expr)
2268 {
2269 STRIP_NOPS (expr);
2270
2271 return ((TREE_CODE (expr) == INTEGER_CST
2272 && !wi::eq_p (expr, 0))
2273 || (TREE_CODE (expr) == COMPLEX_CST
2274 && (integer_nonzerop (TREE_REALPART (expr))
2275 || integer_nonzerop (TREE_IMAGPART (expr)))));
2276 }
2277
2278 /* Return 1 if EXPR is the fixed-point constant zero. */
2279
2280 int
2281 fixed_zerop (const_tree expr)
2282 {
2283 return (TREE_CODE (expr) == FIXED_CST
2284 && TREE_FIXED_CST (expr).data.is_zero ());
2285 }
2286
2287 /* Return the power of two represented by a tree node known to be a
2288 power of two. */
2289
2290 int
2291 tree_log2 (const_tree expr)
2292 {
2293 STRIP_NOPS (expr);
2294
2295 if (TREE_CODE (expr) == COMPLEX_CST)
2296 return tree_log2 (TREE_REALPART (expr));
2297
2298 return wi::exact_log2 (expr);
2299 }
2300
2301 /* Similar, but return the largest integer Y such that 2 ** Y is less
2302 than or equal to EXPR. */
2303
2304 int
2305 tree_floor_log2 (const_tree expr)
2306 {
2307 STRIP_NOPS (expr);
2308
2309 if (TREE_CODE (expr) == COMPLEX_CST)
2310 return tree_log2 (TREE_REALPART (expr));
2311
2312 return wi::floor_log2 (expr);
2313 }
2314
2315 /* Return number of known trailing zero bits in EXPR, or, if the value of
2316 EXPR is known to be zero, the precision of it's type. */
2317
2318 unsigned int
2319 tree_ctz (const_tree expr)
2320 {
2321 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2322 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2323 return 0;
2324
2325 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2326 switch (TREE_CODE (expr))
2327 {
2328 case INTEGER_CST:
2329 ret1 = wi::ctz (expr);
2330 return MIN (ret1, prec);
2331 case SSA_NAME:
2332 ret1 = wi::ctz (get_nonzero_bits (expr));
2333 return MIN (ret1, prec);
2334 case PLUS_EXPR:
2335 case MINUS_EXPR:
2336 case BIT_IOR_EXPR:
2337 case BIT_XOR_EXPR:
2338 case MIN_EXPR:
2339 case MAX_EXPR:
2340 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2341 if (ret1 == 0)
2342 return ret1;
2343 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2344 return MIN (ret1, ret2);
2345 case POINTER_PLUS_EXPR:
2346 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2347 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2348 /* Second operand is sizetype, which could be in theory
2349 wider than pointer's precision. Make sure we never
2350 return more than prec. */
2351 ret2 = MIN (ret2, prec);
2352 return MIN (ret1, ret2);
2353 case BIT_AND_EXPR:
2354 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2355 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2356 return MAX (ret1, ret2);
2357 case MULT_EXPR:
2358 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2359 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2360 return MIN (ret1 + ret2, prec);
2361 case LSHIFT_EXPR:
2362 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2363 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2364 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2365 {
2366 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2367 return MIN (ret1 + ret2, prec);
2368 }
2369 return ret1;
2370 case RSHIFT_EXPR:
2371 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2372 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2373 {
2374 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2375 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2376 if (ret1 > ret2)
2377 return ret1 - ret2;
2378 }
2379 return 0;
2380 case TRUNC_DIV_EXPR:
2381 case CEIL_DIV_EXPR:
2382 case FLOOR_DIV_EXPR:
2383 case ROUND_DIV_EXPR:
2384 case EXACT_DIV_EXPR:
2385 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2386 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2387 {
2388 int l = tree_log2 (TREE_OPERAND (expr, 1));
2389 if (l >= 0)
2390 {
2391 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2392 ret2 = l;
2393 if (ret1 > ret2)
2394 return ret1 - ret2;
2395 }
2396 }
2397 return 0;
2398 CASE_CONVERT:
2399 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2400 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2401 ret1 = prec;
2402 return MIN (ret1, prec);
2403 case SAVE_EXPR:
2404 return tree_ctz (TREE_OPERAND (expr, 0));
2405 case COND_EXPR:
2406 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2407 if (ret1 == 0)
2408 return 0;
2409 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2410 return MIN (ret1, ret2);
2411 case COMPOUND_EXPR:
2412 return tree_ctz (TREE_OPERAND (expr, 1));
2413 case ADDR_EXPR:
2414 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2415 if (ret1 > BITS_PER_UNIT)
2416 {
2417 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2418 return MIN (ret1, prec);
2419 }
2420 return 0;
2421 default:
2422 return 0;
2423 }
2424 }
2425
2426 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2427 decimal float constants, so don't return 1 for them. */
2428
2429 int
2430 real_zerop (const_tree expr)
2431 {
2432 STRIP_NOPS (expr);
2433
2434 switch (TREE_CODE (expr))
2435 {
2436 case REAL_CST:
2437 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2438 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2439 case COMPLEX_CST:
2440 return real_zerop (TREE_REALPART (expr))
2441 && real_zerop (TREE_IMAGPART (expr));
2442 case VECTOR_CST:
2443 {
2444 unsigned i;
2445 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2446 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2447 return false;
2448 return true;
2449 }
2450 default:
2451 return false;
2452 }
2453 }
2454
2455 /* Return 1 if EXPR is the real constant one in real or complex form.
2456 Trailing zeroes matter for decimal float constants, so don't return
2457 1 for them. */
2458
2459 int
2460 real_onep (const_tree expr)
2461 {
2462 STRIP_NOPS (expr);
2463
2464 switch (TREE_CODE (expr))
2465 {
2466 case REAL_CST:
2467 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2468 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2469 case COMPLEX_CST:
2470 return real_onep (TREE_REALPART (expr))
2471 && real_zerop (TREE_IMAGPART (expr));
2472 case VECTOR_CST:
2473 {
2474 unsigned i;
2475 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2476 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2477 return false;
2478 return true;
2479 }
2480 default:
2481 return false;
2482 }
2483 }
2484
2485 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2486 matter for decimal float constants, so don't return 1 for them. */
2487
2488 int
2489 real_minus_onep (const_tree expr)
2490 {
2491 STRIP_NOPS (expr);
2492
2493 switch (TREE_CODE (expr))
2494 {
2495 case REAL_CST:
2496 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2497 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2498 case COMPLEX_CST:
2499 return real_minus_onep (TREE_REALPART (expr))
2500 && real_zerop (TREE_IMAGPART (expr));
2501 case VECTOR_CST:
2502 {
2503 unsigned i;
2504 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2505 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2506 return false;
2507 return true;
2508 }
2509 default:
2510 return false;
2511 }
2512 }
2513
2514 /* Nonzero if EXP is a constant or a cast of a constant. */
2515
2516 int
2517 really_constant_p (const_tree exp)
2518 {
2519 /* This is not quite the same as STRIP_NOPS. It does more. */
2520 while (CONVERT_EXPR_P (exp)
2521 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2522 exp = TREE_OPERAND (exp, 0);
2523 return TREE_CONSTANT (exp);
2524 }
2525 \f
2526 /* Return first list element whose TREE_VALUE is ELEM.
2527 Return 0 if ELEM is not in LIST. */
2528
2529 tree
2530 value_member (tree elem, tree list)
2531 {
2532 while (list)
2533 {
2534 if (elem == TREE_VALUE (list))
2535 return list;
2536 list = TREE_CHAIN (list);
2537 }
2538 return NULL_TREE;
2539 }
2540
2541 /* Return first list element whose TREE_PURPOSE is ELEM.
2542 Return 0 if ELEM is not in LIST. */
2543
2544 tree
2545 purpose_member (const_tree elem, tree list)
2546 {
2547 while (list)
2548 {
2549 if (elem == TREE_PURPOSE (list))
2550 return list;
2551 list = TREE_CHAIN (list);
2552 }
2553 return NULL_TREE;
2554 }
2555
2556 /* Return true if ELEM is in V. */
2557
2558 bool
2559 vec_member (const_tree elem, vec<tree, va_gc> *v)
2560 {
2561 unsigned ix;
2562 tree t;
2563 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2564 if (elem == t)
2565 return true;
2566 return false;
2567 }
2568
2569 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2570 NULL_TREE. */
2571
2572 tree
2573 chain_index (int idx, tree chain)
2574 {
2575 for (; chain && idx > 0; --idx)
2576 chain = TREE_CHAIN (chain);
2577 return chain;
2578 }
2579
2580 /* Return nonzero if ELEM is part of the chain CHAIN. */
2581
2582 int
2583 chain_member (const_tree elem, const_tree chain)
2584 {
2585 while (chain)
2586 {
2587 if (elem == chain)
2588 return 1;
2589 chain = DECL_CHAIN (chain);
2590 }
2591
2592 return 0;
2593 }
2594
2595 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2596 We expect a null pointer to mark the end of the chain.
2597 This is the Lisp primitive `length'. */
2598
2599 int
2600 list_length (const_tree t)
2601 {
2602 const_tree p = t;
2603 #ifdef ENABLE_TREE_CHECKING
2604 const_tree q = t;
2605 #endif
2606 int len = 0;
2607
2608 while (p)
2609 {
2610 p = TREE_CHAIN (p);
2611 #ifdef ENABLE_TREE_CHECKING
2612 if (len % 2)
2613 q = TREE_CHAIN (q);
2614 gcc_assert (p != q);
2615 #endif
2616 len++;
2617 }
2618
2619 return len;
2620 }
2621
2622 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2623 UNION_TYPE TYPE, or NULL_TREE if none. */
2624
2625 tree
2626 first_field (const_tree type)
2627 {
2628 tree t = TYPE_FIELDS (type);
2629 while (t && TREE_CODE (t) != FIELD_DECL)
2630 t = TREE_CHAIN (t);
2631 return t;
2632 }
2633
2634 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2635 by modifying the last node in chain 1 to point to chain 2.
2636 This is the Lisp primitive `nconc'. */
2637
2638 tree
2639 chainon (tree op1, tree op2)
2640 {
2641 tree t1;
2642
2643 if (!op1)
2644 return op2;
2645 if (!op2)
2646 return op1;
2647
2648 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2649 continue;
2650 TREE_CHAIN (t1) = op2;
2651
2652 #ifdef ENABLE_TREE_CHECKING
2653 {
2654 tree t2;
2655 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2656 gcc_assert (t2 != t1);
2657 }
2658 #endif
2659
2660 return op1;
2661 }
2662
2663 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2664
2665 tree
2666 tree_last (tree chain)
2667 {
2668 tree next;
2669 if (chain)
2670 while ((next = TREE_CHAIN (chain)))
2671 chain = next;
2672 return chain;
2673 }
2674
2675 /* Reverse the order of elements in the chain T,
2676 and return the new head of the chain (old last element). */
2677
2678 tree
2679 nreverse (tree t)
2680 {
2681 tree prev = 0, decl, next;
2682 for (decl = t; decl; decl = next)
2683 {
2684 /* We shouldn't be using this function to reverse BLOCK chains; we
2685 have blocks_nreverse for that. */
2686 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2687 next = TREE_CHAIN (decl);
2688 TREE_CHAIN (decl) = prev;
2689 prev = decl;
2690 }
2691 return prev;
2692 }
2693 \f
2694 /* Return a newly created TREE_LIST node whose
2695 purpose and value fields are PARM and VALUE. */
2696
2697 tree
2698 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2699 {
2700 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2701 TREE_PURPOSE (t) = parm;
2702 TREE_VALUE (t) = value;
2703 return t;
2704 }
2705
2706 /* Build a chain of TREE_LIST nodes from a vector. */
2707
2708 tree
2709 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2710 {
2711 tree ret = NULL_TREE;
2712 tree *pp = &ret;
2713 unsigned int i;
2714 tree t;
2715 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2716 {
2717 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2718 pp = &TREE_CHAIN (*pp);
2719 }
2720 return ret;
2721 }
2722
2723 /* Return a newly created TREE_LIST node whose
2724 purpose and value fields are PURPOSE and VALUE
2725 and whose TREE_CHAIN is CHAIN. */
2726
2727 tree
2728 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2729 {
2730 tree node;
2731
2732 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2733 memset (node, 0, sizeof (struct tree_common));
2734
2735 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2736
2737 TREE_SET_CODE (node, TREE_LIST);
2738 TREE_CHAIN (node) = chain;
2739 TREE_PURPOSE (node) = purpose;
2740 TREE_VALUE (node) = value;
2741 return node;
2742 }
2743
2744 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2745 trees. */
2746
2747 vec<tree, va_gc> *
2748 ctor_to_vec (tree ctor)
2749 {
2750 vec<tree, va_gc> *vec;
2751 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2752 unsigned int ix;
2753 tree val;
2754
2755 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2756 vec->quick_push (val);
2757
2758 return vec;
2759 }
2760 \f
2761 /* Return the size nominally occupied by an object of type TYPE
2762 when it resides in memory. The value is measured in units of bytes,
2763 and its data type is that normally used for type sizes
2764 (which is the first type created by make_signed_type or
2765 make_unsigned_type). */
2766
2767 tree
2768 size_in_bytes (const_tree type)
2769 {
2770 tree t;
2771
2772 if (type == error_mark_node)
2773 return integer_zero_node;
2774
2775 type = TYPE_MAIN_VARIANT (type);
2776 t = TYPE_SIZE_UNIT (type);
2777
2778 if (t == 0)
2779 {
2780 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2781 return size_zero_node;
2782 }
2783
2784 return t;
2785 }
2786
2787 /* Return the size of TYPE (in bytes) as a wide integer
2788 or return -1 if the size can vary or is larger than an integer. */
2789
2790 HOST_WIDE_INT
2791 int_size_in_bytes (const_tree type)
2792 {
2793 tree t;
2794
2795 if (type == error_mark_node)
2796 return 0;
2797
2798 type = TYPE_MAIN_VARIANT (type);
2799 t = TYPE_SIZE_UNIT (type);
2800
2801 if (t && tree_fits_uhwi_p (t))
2802 return TREE_INT_CST_LOW (t);
2803 else
2804 return -1;
2805 }
2806
2807 /* Return the maximum size of TYPE (in bytes) as a wide integer
2808 or return -1 if the size can vary or is larger than an integer. */
2809
2810 HOST_WIDE_INT
2811 max_int_size_in_bytes (const_tree type)
2812 {
2813 HOST_WIDE_INT size = -1;
2814 tree size_tree;
2815
2816 /* If this is an array type, check for a possible MAX_SIZE attached. */
2817
2818 if (TREE_CODE (type) == ARRAY_TYPE)
2819 {
2820 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2821
2822 if (size_tree && tree_fits_uhwi_p (size_tree))
2823 size = tree_to_uhwi (size_tree);
2824 }
2825
2826 /* If we still haven't been able to get a size, see if the language
2827 can compute a maximum size. */
2828
2829 if (size == -1)
2830 {
2831 size_tree = lang_hooks.types.max_size (type);
2832
2833 if (size_tree && tree_fits_uhwi_p (size_tree))
2834 size = tree_to_uhwi (size_tree);
2835 }
2836
2837 return size;
2838 }
2839 \f
2840 /* Return the bit position of FIELD, in bits from the start of the record.
2841 This is a tree of type bitsizetype. */
2842
2843 tree
2844 bit_position (const_tree field)
2845 {
2846 return bit_from_pos (DECL_FIELD_OFFSET (field),
2847 DECL_FIELD_BIT_OFFSET (field));
2848 }
2849 \f
2850 /* Return the byte position of FIELD, in bytes from the start of the record.
2851 This is a tree of type sizetype. */
2852
2853 tree
2854 byte_position (const_tree field)
2855 {
2856 return byte_from_pos (DECL_FIELD_OFFSET (field),
2857 DECL_FIELD_BIT_OFFSET (field));
2858 }
2859
2860 /* Likewise, but return as an integer. It must be representable in
2861 that way (since it could be a signed value, we don't have the
2862 option of returning -1 like int_size_in_byte can. */
2863
2864 HOST_WIDE_INT
2865 int_byte_position (const_tree field)
2866 {
2867 return tree_to_shwi (byte_position (field));
2868 }
2869 \f
2870 /* Return the strictest alignment, in bits, that T is known to have. */
2871
2872 unsigned int
2873 expr_align (const_tree t)
2874 {
2875 unsigned int align0, align1;
2876
2877 switch (TREE_CODE (t))
2878 {
2879 CASE_CONVERT: case NON_LVALUE_EXPR:
2880 /* If we have conversions, we know that the alignment of the
2881 object must meet each of the alignments of the types. */
2882 align0 = expr_align (TREE_OPERAND (t, 0));
2883 align1 = TYPE_ALIGN (TREE_TYPE (t));
2884 return MAX (align0, align1);
2885
2886 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2887 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2888 case CLEANUP_POINT_EXPR:
2889 /* These don't change the alignment of an object. */
2890 return expr_align (TREE_OPERAND (t, 0));
2891
2892 case COND_EXPR:
2893 /* The best we can do is say that the alignment is the least aligned
2894 of the two arms. */
2895 align0 = expr_align (TREE_OPERAND (t, 1));
2896 align1 = expr_align (TREE_OPERAND (t, 2));
2897 return MIN (align0, align1);
2898
2899 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2900 meaningfully, it's always 1. */
2901 case LABEL_DECL: case CONST_DECL:
2902 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2903 case FUNCTION_DECL:
2904 gcc_assert (DECL_ALIGN (t) != 0);
2905 return DECL_ALIGN (t);
2906
2907 default:
2908 break;
2909 }
2910
2911 /* Otherwise take the alignment from that of the type. */
2912 return TYPE_ALIGN (TREE_TYPE (t));
2913 }
2914 \f
2915 /* Return, as a tree node, the number of elements for TYPE (which is an
2916 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2917
2918 tree
2919 array_type_nelts (const_tree type)
2920 {
2921 tree index_type, min, max;
2922
2923 /* If they did it with unspecified bounds, then we should have already
2924 given an error about it before we got here. */
2925 if (! TYPE_DOMAIN (type))
2926 return error_mark_node;
2927
2928 index_type = TYPE_DOMAIN (type);
2929 min = TYPE_MIN_VALUE (index_type);
2930 max = TYPE_MAX_VALUE (index_type);
2931
2932 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2933 if (!max)
2934 return error_mark_node;
2935
2936 return (integer_zerop (min)
2937 ? max
2938 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2939 }
2940 \f
2941 /* If arg is static -- a reference to an object in static storage -- then
2942 return the object. This is not the same as the C meaning of `static'.
2943 If arg isn't static, return NULL. */
2944
2945 tree
2946 staticp (tree arg)
2947 {
2948 switch (TREE_CODE (arg))
2949 {
2950 case FUNCTION_DECL:
2951 /* Nested functions are static, even though taking their address will
2952 involve a trampoline as we unnest the nested function and create
2953 the trampoline on the tree level. */
2954 return arg;
2955
2956 case VAR_DECL:
2957 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2958 && ! DECL_THREAD_LOCAL_P (arg)
2959 && ! DECL_DLLIMPORT_P (arg)
2960 ? arg : NULL);
2961
2962 case CONST_DECL:
2963 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2964 ? arg : NULL);
2965
2966 case CONSTRUCTOR:
2967 return TREE_STATIC (arg) ? arg : NULL;
2968
2969 case LABEL_DECL:
2970 case STRING_CST:
2971 return arg;
2972
2973 case COMPONENT_REF:
2974 /* If the thing being referenced is not a field, then it is
2975 something language specific. */
2976 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2977
2978 /* If we are referencing a bitfield, we can't evaluate an
2979 ADDR_EXPR at compile time and so it isn't a constant. */
2980 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2981 return NULL;
2982
2983 return staticp (TREE_OPERAND (arg, 0));
2984
2985 case BIT_FIELD_REF:
2986 return NULL;
2987
2988 case INDIRECT_REF:
2989 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2990
2991 case ARRAY_REF:
2992 case ARRAY_RANGE_REF:
2993 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2994 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2995 return staticp (TREE_OPERAND (arg, 0));
2996 else
2997 return NULL;
2998
2999 case COMPOUND_LITERAL_EXPR:
3000 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3001
3002 default:
3003 return NULL;
3004 }
3005 }
3006
3007 \f
3008
3009
3010 /* Return whether OP is a DECL whose address is function-invariant. */
3011
3012 bool
3013 decl_address_invariant_p (const_tree op)
3014 {
3015 /* The conditions below are slightly less strict than the one in
3016 staticp. */
3017
3018 switch (TREE_CODE (op))
3019 {
3020 case PARM_DECL:
3021 case RESULT_DECL:
3022 case LABEL_DECL:
3023 case FUNCTION_DECL:
3024 return true;
3025
3026 case VAR_DECL:
3027 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3028 || DECL_THREAD_LOCAL_P (op)
3029 || DECL_CONTEXT (op) == current_function_decl
3030 || decl_function_context (op) == current_function_decl)
3031 return true;
3032 break;
3033
3034 case CONST_DECL:
3035 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3036 || decl_function_context (op) == current_function_decl)
3037 return true;
3038 break;
3039
3040 default:
3041 break;
3042 }
3043
3044 return false;
3045 }
3046
3047 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3048
3049 bool
3050 decl_address_ip_invariant_p (const_tree op)
3051 {
3052 /* The conditions below are slightly less strict than the one in
3053 staticp. */
3054
3055 switch (TREE_CODE (op))
3056 {
3057 case LABEL_DECL:
3058 case FUNCTION_DECL:
3059 case STRING_CST:
3060 return true;
3061
3062 case VAR_DECL:
3063 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3064 && !DECL_DLLIMPORT_P (op))
3065 || DECL_THREAD_LOCAL_P (op))
3066 return true;
3067 break;
3068
3069 case CONST_DECL:
3070 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3071 return true;
3072 break;
3073
3074 default:
3075 break;
3076 }
3077
3078 return false;
3079 }
3080
3081
3082 /* Return true if T is function-invariant (internal function, does
3083 not handle arithmetic; that's handled in skip_simple_arithmetic and
3084 tree_invariant_p). */
3085
3086 static bool tree_invariant_p (tree t);
3087
3088 static bool
3089 tree_invariant_p_1 (tree t)
3090 {
3091 tree op;
3092
3093 if (TREE_CONSTANT (t)
3094 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3095 return true;
3096
3097 switch (TREE_CODE (t))
3098 {
3099 case SAVE_EXPR:
3100 return true;
3101
3102 case ADDR_EXPR:
3103 op = TREE_OPERAND (t, 0);
3104 while (handled_component_p (op))
3105 {
3106 switch (TREE_CODE (op))
3107 {
3108 case ARRAY_REF:
3109 case ARRAY_RANGE_REF:
3110 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3111 || TREE_OPERAND (op, 2) != NULL_TREE
3112 || TREE_OPERAND (op, 3) != NULL_TREE)
3113 return false;
3114 break;
3115
3116 case COMPONENT_REF:
3117 if (TREE_OPERAND (op, 2) != NULL_TREE)
3118 return false;
3119 break;
3120
3121 default:;
3122 }
3123 op = TREE_OPERAND (op, 0);
3124 }
3125
3126 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3127
3128 default:
3129 break;
3130 }
3131
3132 return false;
3133 }
3134
3135 /* Return true if T is function-invariant. */
3136
3137 static bool
3138 tree_invariant_p (tree t)
3139 {
3140 tree inner = skip_simple_arithmetic (t);
3141 return tree_invariant_p_1 (inner);
3142 }
3143
3144 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3145 Do this to any expression which may be used in more than one place,
3146 but must be evaluated only once.
3147
3148 Normally, expand_expr would reevaluate the expression each time.
3149 Calling save_expr produces something that is evaluated and recorded
3150 the first time expand_expr is called on it. Subsequent calls to
3151 expand_expr just reuse the recorded value.
3152
3153 The call to expand_expr that generates code that actually computes
3154 the value is the first call *at compile time*. Subsequent calls
3155 *at compile time* generate code to use the saved value.
3156 This produces correct result provided that *at run time* control
3157 always flows through the insns made by the first expand_expr
3158 before reaching the other places where the save_expr was evaluated.
3159 You, the caller of save_expr, must make sure this is so.
3160
3161 Constants, and certain read-only nodes, are returned with no
3162 SAVE_EXPR because that is safe. Expressions containing placeholders
3163 are not touched; see tree.def for an explanation of what these
3164 are used for. */
3165
3166 tree
3167 save_expr (tree expr)
3168 {
3169 tree t = fold (expr);
3170 tree inner;
3171
3172 /* If the tree evaluates to a constant, then we don't want to hide that
3173 fact (i.e. this allows further folding, and direct checks for constants).
3174 However, a read-only object that has side effects cannot be bypassed.
3175 Since it is no problem to reevaluate literals, we just return the
3176 literal node. */
3177 inner = skip_simple_arithmetic (t);
3178 if (TREE_CODE (inner) == ERROR_MARK)
3179 return inner;
3180
3181 if (tree_invariant_p_1 (inner))
3182 return t;
3183
3184 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3185 it means that the size or offset of some field of an object depends on
3186 the value within another field.
3187
3188 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3189 and some variable since it would then need to be both evaluated once and
3190 evaluated more than once. Front-ends must assure this case cannot
3191 happen by surrounding any such subexpressions in their own SAVE_EXPR
3192 and forcing evaluation at the proper time. */
3193 if (contains_placeholder_p (inner))
3194 return t;
3195
3196 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3197 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3198
3199 /* This expression might be placed ahead of a jump to ensure that the
3200 value was computed on both sides of the jump. So make sure it isn't
3201 eliminated as dead. */
3202 TREE_SIDE_EFFECTS (t) = 1;
3203 return t;
3204 }
3205
3206 /* Look inside EXPR into any simple arithmetic operations. Return the
3207 outermost non-arithmetic or non-invariant node. */
3208
3209 tree
3210 skip_simple_arithmetic (tree expr)
3211 {
3212 /* We don't care about whether this can be used as an lvalue in this
3213 context. */
3214 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3215 expr = TREE_OPERAND (expr, 0);
3216
3217 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3218 a constant, it will be more efficient to not make another SAVE_EXPR since
3219 it will allow better simplification and GCSE will be able to merge the
3220 computations if they actually occur. */
3221 while (true)
3222 {
3223 if (UNARY_CLASS_P (expr))
3224 expr = TREE_OPERAND (expr, 0);
3225 else if (BINARY_CLASS_P (expr))
3226 {
3227 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3228 expr = TREE_OPERAND (expr, 0);
3229 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3230 expr = TREE_OPERAND (expr, 1);
3231 else
3232 break;
3233 }
3234 else
3235 break;
3236 }
3237
3238 return expr;
3239 }
3240
3241 /* Look inside EXPR into simple arithmetic operations involving constants.
3242 Return the outermost non-arithmetic or non-constant node. */
3243
3244 tree
3245 skip_simple_constant_arithmetic (tree expr)
3246 {
3247 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3248 expr = TREE_OPERAND (expr, 0);
3249
3250 while (true)
3251 {
3252 if (UNARY_CLASS_P (expr))
3253 expr = TREE_OPERAND (expr, 0);
3254 else if (BINARY_CLASS_P (expr))
3255 {
3256 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3257 expr = TREE_OPERAND (expr, 0);
3258 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3259 expr = TREE_OPERAND (expr, 1);
3260 else
3261 break;
3262 }
3263 else
3264 break;
3265 }
3266
3267 return expr;
3268 }
3269
3270 /* Return which tree structure is used by T. */
3271
3272 enum tree_node_structure_enum
3273 tree_node_structure (const_tree t)
3274 {
3275 const enum tree_code code = TREE_CODE (t);
3276 return tree_node_structure_for_code (code);
3277 }
3278
3279 /* Set various status flags when building a CALL_EXPR object T. */
3280
3281 static void
3282 process_call_operands (tree t)
3283 {
3284 bool side_effects = TREE_SIDE_EFFECTS (t);
3285 bool read_only = false;
3286 int i = call_expr_flags (t);
3287
3288 /* Calls have side-effects, except those to const or pure functions. */
3289 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3290 side_effects = true;
3291 /* Propagate TREE_READONLY of arguments for const functions. */
3292 if (i & ECF_CONST)
3293 read_only = true;
3294
3295 if (!side_effects || read_only)
3296 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3297 {
3298 tree op = TREE_OPERAND (t, i);
3299 if (op && TREE_SIDE_EFFECTS (op))
3300 side_effects = true;
3301 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3302 read_only = false;
3303 }
3304
3305 TREE_SIDE_EFFECTS (t) = side_effects;
3306 TREE_READONLY (t) = read_only;
3307 }
3308 \f
3309 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3310 size or offset that depends on a field within a record. */
3311
3312 bool
3313 contains_placeholder_p (const_tree exp)
3314 {
3315 enum tree_code code;
3316
3317 if (!exp)
3318 return 0;
3319
3320 code = TREE_CODE (exp);
3321 if (code == PLACEHOLDER_EXPR)
3322 return 1;
3323
3324 switch (TREE_CODE_CLASS (code))
3325 {
3326 case tcc_reference:
3327 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3328 position computations since they will be converted into a
3329 WITH_RECORD_EXPR involving the reference, which will assume
3330 here will be valid. */
3331 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3332
3333 case tcc_exceptional:
3334 if (code == TREE_LIST)
3335 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3336 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3337 break;
3338
3339 case tcc_unary:
3340 case tcc_binary:
3341 case tcc_comparison:
3342 case tcc_expression:
3343 switch (code)
3344 {
3345 case COMPOUND_EXPR:
3346 /* Ignoring the first operand isn't quite right, but works best. */
3347 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3348
3349 case COND_EXPR:
3350 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3351 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3352 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3353
3354 case SAVE_EXPR:
3355 /* The save_expr function never wraps anything containing
3356 a PLACEHOLDER_EXPR. */
3357 return 0;
3358
3359 default:
3360 break;
3361 }
3362
3363 switch (TREE_CODE_LENGTH (code))
3364 {
3365 case 1:
3366 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3367 case 2:
3368 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3369 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3370 default:
3371 return 0;
3372 }
3373
3374 case tcc_vl_exp:
3375 switch (code)
3376 {
3377 case CALL_EXPR:
3378 {
3379 const_tree arg;
3380 const_call_expr_arg_iterator iter;
3381 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3382 if (CONTAINS_PLACEHOLDER_P (arg))
3383 return 1;
3384 return 0;
3385 }
3386 default:
3387 return 0;
3388 }
3389
3390 default:
3391 return 0;
3392 }
3393 return 0;
3394 }
3395
3396 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3397 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3398 field positions. */
3399
3400 static bool
3401 type_contains_placeholder_1 (const_tree type)
3402 {
3403 /* If the size contains a placeholder or the parent type (component type in
3404 the case of arrays) type involves a placeholder, this type does. */
3405 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3406 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3407 || (!POINTER_TYPE_P (type)
3408 && TREE_TYPE (type)
3409 && type_contains_placeholder_p (TREE_TYPE (type))))
3410 return true;
3411
3412 /* Now do type-specific checks. Note that the last part of the check above
3413 greatly limits what we have to do below. */
3414 switch (TREE_CODE (type))
3415 {
3416 case VOID_TYPE:
3417 case POINTER_BOUNDS_TYPE:
3418 case COMPLEX_TYPE:
3419 case ENUMERAL_TYPE:
3420 case BOOLEAN_TYPE:
3421 case POINTER_TYPE:
3422 case OFFSET_TYPE:
3423 case REFERENCE_TYPE:
3424 case METHOD_TYPE:
3425 case FUNCTION_TYPE:
3426 case VECTOR_TYPE:
3427 case NULLPTR_TYPE:
3428 return false;
3429
3430 case INTEGER_TYPE:
3431 case REAL_TYPE:
3432 case FIXED_POINT_TYPE:
3433 /* Here we just check the bounds. */
3434 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3435 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3436
3437 case ARRAY_TYPE:
3438 /* We have already checked the component type above, so just check the
3439 domain type. */
3440 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3441
3442 case RECORD_TYPE:
3443 case UNION_TYPE:
3444 case QUAL_UNION_TYPE:
3445 {
3446 tree field;
3447
3448 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3449 if (TREE_CODE (field) == FIELD_DECL
3450 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3451 || (TREE_CODE (type) == QUAL_UNION_TYPE
3452 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3453 || type_contains_placeholder_p (TREE_TYPE (field))))
3454 return true;
3455
3456 return false;
3457 }
3458
3459 default:
3460 gcc_unreachable ();
3461 }
3462 }
3463
3464 /* Wrapper around above function used to cache its result. */
3465
3466 bool
3467 type_contains_placeholder_p (tree type)
3468 {
3469 bool result;
3470
3471 /* If the contains_placeholder_bits field has been initialized,
3472 then we know the answer. */
3473 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3474 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3475
3476 /* Indicate that we've seen this type node, and the answer is false.
3477 This is what we want to return if we run into recursion via fields. */
3478 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3479
3480 /* Compute the real value. */
3481 result = type_contains_placeholder_1 (type);
3482
3483 /* Store the real value. */
3484 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3485
3486 return result;
3487 }
3488 \f
3489 /* Push tree EXP onto vector QUEUE if it is not already present. */
3490
3491 static void
3492 push_without_duplicates (tree exp, vec<tree> *queue)
3493 {
3494 unsigned int i;
3495 tree iter;
3496
3497 FOR_EACH_VEC_ELT (*queue, i, iter)
3498 if (simple_cst_equal (iter, exp) == 1)
3499 break;
3500
3501 if (!iter)
3502 queue->safe_push (exp);
3503 }
3504
3505 /* Given a tree EXP, find all occurrences of references to fields
3506 in a PLACEHOLDER_EXPR and place them in vector REFS without
3507 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3508 we assume here that EXP contains only arithmetic expressions
3509 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3510 argument list. */
3511
3512 void
3513 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3514 {
3515 enum tree_code code = TREE_CODE (exp);
3516 tree inner;
3517 int i;
3518
3519 /* We handle TREE_LIST and COMPONENT_REF separately. */
3520 if (code == TREE_LIST)
3521 {
3522 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3523 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3524 }
3525 else if (code == COMPONENT_REF)
3526 {
3527 for (inner = TREE_OPERAND (exp, 0);
3528 REFERENCE_CLASS_P (inner);
3529 inner = TREE_OPERAND (inner, 0))
3530 ;
3531
3532 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3533 push_without_duplicates (exp, refs);
3534 else
3535 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3536 }
3537 else
3538 switch (TREE_CODE_CLASS (code))
3539 {
3540 case tcc_constant:
3541 break;
3542
3543 case tcc_declaration:
3544 /* Variables allocated to static storage can stay. */
3545 if (!TREE_STATIC (exp))
3546 push_without_duplicates (exp, refs);
3547 break;
3548
3549 case tcc_expression:
3550 /* This is the pattern built in ada/make_aligning_type. */
3551 if (code == ADDR_EXPR
3552 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3553 {
3554 push_without_duplicates (exp, refs);
3555 break;
3556 }
3557
3558 /* Fall through... */
3559
3560 case tcc_exceptional:
3561 case tcc_unary:
3562 case tcc_binary:
3563 case tcc_comparison:
3564 case tcc_reference:
3565 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3566 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3567 break;
3568
3569 case tcc_vl_exp:
3570 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3571 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3572 break;
3573
3574 default:
3575 gcc_unreachable ();
3576 }
3577 }
3578
3579 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3580 return a tree with all occurrences of references to F in a
3581 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3582 CONST_DECLs. Note that we assume here that EXP contains only
3583 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3584 occurring only in their argument list. */
3585
3586 tree
3587 substitute_in_expr (tree exp, tree f, tree r)
3588 {
3589 enum tree_code code = TREE_CODE (exp);
3590 tree op0, op1, op2, op3;
3591 tree new_tree;
3592
3593 /* We handle TREE_LIST and COMPONENT_REF separately. */
3594 if (code == TREE_LIST)
3595 {
3596 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3597 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3598 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3599 return exp;
3600
3601 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3602 }
3603 else if (code == COMPONENT_REF)
3604 {
3605 tree inner;
3606
3607 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3608 and it is the right field, replace it with R. */
3609 for (inner = TREE_OPERAND (exp, 0);
3610 REFERENCE_CLASS_P (inner);
3611 inner = TREE_OPERAND (inner, 0))
3612 ;
3613
3614 /* The field. */
3615 op1 = TREE_OPERAND (exp, 1);
3616
3617 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3618 return r;
3619
3620 /* If this expression hasn't been completed let, leave it alone. */
3621 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3622 return exp;
3623
3624 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3625 if (op0 == TREE_OPERAND (exp, 0))
3626 return exp;
3627
3628 new_tree
3629 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3630 }
3631 else
3632 switch (TREE_CODE_CLASS (code))
3633 {
3634 case tcc_constant:
3635 return exp;
3636
3637 case tcc_declaration:
3638 if (exp == f)
3639 return r;
3640 else
3641 return exp;
3642
3643 case tcc_expression:
3644 if (exp == f)
3645 return r;
3646
3647 /* Fall through... */
3648
3649 case tcc_exceptional:
3650 case tcc_unary:
3651 case tcc_binary:
3652 case tcc_comparison:
3653 case tcc_reference:
3654 switch (TREE_CODE_LENGTH (code))
3655 {
3656 case 0:
3657 return exp;
3658
3659 case 1:
3660 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3661 if (op0 == TREE_OPERAND (exp, 0))
3662 return exp;
3663
3664 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3665 break;
3666
3667 case 2:
3668 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3669 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3670
3671 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3672 return exp;
3673
3674 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3675 break;
3676
3677 case 3:
3678 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3679 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3680 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3681
3682 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3683 && op2 == TREE_OPERAND (exp, 2))
3684 return exp;
3685
3686 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3687 break;
3688
3689 case 4:
3690 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3691 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3692 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3693 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3694
3695 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3696 && op2 == TREE_OPERAND (exp, 2)
3697 && op3 == TREE_OPERAND (exp, 3))
3698 return exp;
3699
3700 new_tree
3701 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3702 break;
3703
3704 default:
3705 gcc_unreachable ();
3706 }
3707 break;
3708
3709 case tcc_vl_exp:
3710 {
3711 int i;
3712
3713 new_tree = NULL_TREE;
3714
3715 /* If we are trying to replace F with a constant, inline back
3716 functions which do nothing else than computing a value from
3717 the arguments they are passed. This makes it possible to
3718 fold partially or entirely the replacement expression. */
3719 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3720 {
3721 tree t = maybe_inline_call_in_expr (exp);
3722 if (t)
3723 return SUBSTITUTE_IN_EXPR (t, f, r);
3724 }
3725
3726 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3727 {
3728 tree op = TREE_OPERAND (exp, i);
3729 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3730 if (new_op != op)
3731 {
3732 if (!new_tree)
3733 new_tree = copy_node (exp);
3734 TREE_OPERAND (new_tree, i) = new_op;
3735 }
3736 }
3737
3738 if (new_tree)
3739 {
3740 new_tree = fold (new_tree);
3741 if (TREE_CODE (new_tree) == CALL_EXPR)
3742 process_call_operands (new_tree);
3743 }
3744 else
3745 return exp;
3746 }
3747 break;
3748
3749 default:
3750 gcc_unreachable ();
3751 }
3752
3753 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3754
3755 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3756 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3757
3758 return new_tree;
3759 }
3760
3761 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3762 for it within OBJ, a tree that is an object or a chain of references. */
3763
3764 tree
3765 substitute_placeholder_in_expr (tree exp, tree obj)
3766 {
3767 enum tree_code code = TREE_CODE (exp);
3768 tree op0, op1, op2, op3;
3769 tree new_tree;
3770
3771 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3772 in the chain of OBJ. */
3773 if (code == PLACEHOLDER_EXPR)
3774 {
3775 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3776 tree elt;
3777
3778 for (elt = obj; elt != 0;
3779 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3780 || TREE_CODE (elt) == COND_EXPR)
3781 ? TREE_OPERAND (elt, 1)
3782 : (REFERENCE_CLASS_P (elt)
3783 || UNARY_CLASS_P (elt)
3784 || BINARY_CLASS_P (elt)
3785 || VL_EXP_CLASS_P (elt)
3786 || EXPRESSION_CLASS_P (elt))
3787 ? TREE_OPERAND (elt, 0) : 0))
3788 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3789 return elt;
3790
3791 for (elt = obj; elt != 0;
3792 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3793 || TREE_CODE (elt) == COND_EXPR)
3794 ? TREE_OPERAND (elt, 1)
3795 : (REFERENCE_CLASS_P (elt)
3796 || UNARY_CLASS_P (elt)
3797 || BINARY_CLASS_P (elt)
3798 || VL_EXP_CLASS_P (elt)
3799 || EXPRESSION_CLASS_P (elt))
3800 ? TREE_OPERAND (elt, 0) : 0))
3801 if (POINTER_TYPE_P (TREE_TYPE (elt))
3802 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3803 == need_type))
3804 return fold_build1 (INDIRECT_REF, need_type, elt);
3805
3806 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3807 survives until RTL generation, there will be an error. */
3808 return exp;
3809 }
3810
3811 /* TREE_LIST is special because we need to look at TREE_VALUE
3812 and TREE_CHAIN, not TREE_OPERANDS. */
3813 else if (code == TREE_LIST)
3814 {
3815 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3816 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3817 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3818 return exp;
3819
3820 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3821 }
3822 else
3823 switch (TREE_CODE_CLASS (code))
3824 {
3825 case tcc_constant:
3826 case tcc_declaration:
3827 return exp;
3828
3829 case tcc_exceptional:
3830 case tcc_unary:
3831 case tcc_binary:
3832 case tcc_comparison:
3833 case tcc_expression:
3834 case tcc_reference:
3835 case tcc_statement:
3836 switch (TREE_CODE_LENGTH (code))
3837 {
3838 case 0:
3839 return exp;
3840
3841 case 1:
3842 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3843 if (op0 == TREE_OPERAND (exp, 0))
3844 return exp;
3845
3846 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3847 break;
3848
3849 case 2:
3850 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3851 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3852
3853 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3854 return exp;
3855
3856 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3857 break;
3858
3859 case 3:
3860 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3861 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3862 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3863
3864 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3865 && op2 == TREE_OPERAND (exp, 2))
3866 return exp;
3867
3868 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3869 break;
3870
3871 case 4:
3872 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3873 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3874 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3875 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3876
3877 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3878 && op2 == TREE_OPERAND (exp, 2)
3879 && op3 == TREE_OPERAND (exp, 3))
3880 return exp;
3881
3882 new_tree
3883 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3884 break;
3885
3886 default:
3887 gcc_unreachable ();
3888 }
3889 break;
3890
3891 case tcc_vl_exp:
3892 {
3893 int i;
3894
3895 new_tree = NULL_TREE;
3896
3897 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3898 {
3899 tree op = TREE_OPERAND (exp, i);
3900 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3901 if (new_op != op)
3902 {
3903 if (!new_tree)
3904 new_tree = copy_node (exp);
3905 TREE_OPERAND (new_tree, i) = new_op;
3906 }
3907 }
3908
3909 if (new_tree)
3910 {
3911 new_tree = fold (new_tree);
3912 if (TREE_CODE (new_tree) == CALL_EXPR)
3913 process_call_operands (new_tree);
3914 }
3915 else
3916 return exp;
3917 }
3918 break;
3919
3920 default:
3921 gcc_unreachable ();
3922 }
3923
3924 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3925
3926 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3927 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3928
3929 return new_tree;
3930 }
3931 \f
3932
3933 /* Subroutine of stabilize_reference; this is called for subtrees of
3934 references. Any expression with side-effects must be put in a SAVE_EXPR
3935 to ensure that it is only evaluated once.
3936
3937 We don't put SAVE_EXPR nodes around everything, because assigning very
3938 simple expressions to temporaries causes us to miss good opportunities
3939 for optimizations. Among other things, the opportunity to fold in the
3940 addition of a constant into an addressing mode often gets lost, e.g.
3941 "y[i+1] += x;". In general, we take the approach that we should not make
3942 an assignment unless we are forced into it - i.e., that any non-side effect
3943 operator should be allowed, and that cse should take care of coalescing
3944 multiple utterances of the same expression should that prove fruitful. */
3945
3946 static tree
3947 stabilize_reference_1 (tree e)
3948 {
3949 tree result;
3950 enum tree_code code = TREE_CODE (e);
3951
3952 /* We cannot ignore const expressions because it might be a reference
3953 to a const array but whose index contains side-effects. But we can
3954 ignore things that are actual constant or that already have been
3955 handled by this function. */
3956
3957 if (tree_invariant_p (e))
3958 return e;
3959
3960 switch (TREE_CODE_CLASS (code))
3961 {
3962 case tcc_exceptional:
3963 case tcc_type:
3964 case tcc_declaration:
3965 case tcc_comparison:
3966 case tcc_statement:
3967 case tcc_expression:
3968 case tcc_reference:
3969 case tcc_vl_exp:
3970 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3971 so that it will only be evaluated once. */
3972 /* The reference (r) and comparison (<) classes could be handled as
3973 below, but it is generally faster to only evaluate them once. */
3974 if (TREE_SIDE_EFFECTS (e))
3975 return save_expr (e);
3976 return e;
3977
3978 case tcc_constant:
3979 /* Constants need no processing. In fact, we should never reach
3980 here. */
3981 return e;
3982
3983 case tcc_binary:
3984 /* Division is slow and tends to be compiled with jumps,
3985 especially the division by powers of 2 that is often
3986 found inside of an array reference. So do it just once. */
3987 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3988 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3989 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3990 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3991 return save_expr (e);
3992 /* Recursively stabilize each operand. */
3993 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3994 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3995 break;
3996
3997 case tcc_unary:
3998 /* Recursively stabilize each operand. */
3999 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4000 break;
4001
4002 default:
4003 gcc_unreachable ();
4004 }
4005
4006 TREE_TYPE (result) = TREE_TYPE (e);
4007 TREE_READONLY (result) = TREE_READONLY (e);
4008 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4009 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4010
4011 return result;
4012 }
4013
4014 /* Stabilize a reference so that we can use it any number of times
4015 without causing its operands to be evaluated more than once.
4016 Returns the stabilized reference. This works by means of save_expr,
4017 so see the caveats in the comments about save_expr.
4018
4019 Also allows conversion expressions whose operands are references.
4020 Any other kind of expression is returned unchanged. */
4021
4022 tree
4023 stabilize_reference (tree ref)
4024 {
4025 tree result;
4026 enum tree_code code = TREE_CODE (ref);
4027
4028 switch (code)
4029 {
4030 case VAR_DECL:
4031 case PARM_DECL:
4032 case RESULT_DECL:
4033 /* No action is needed in this case. */
4034 return ref;
4035
4036 CASE_CONVERT:
4037 case FLOAT_EXPR:
4038 case FIX_TRUNC_EXPR:
4039 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4040 break;
4041
4042 case INDIRECT_REF:
4043 result = build_nt (INDIRECT_REF,
4044 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4045 break;
4046
4047 case COMPONENT_REF:
4048 result = build_nt (COMPONENT_REF,
4049 stabilize_reference (TREE_OPERAND (ref, 0)),
4050 TREE_OPERAND (ref, 1), NULL_TREE);
4051 break;
4052
4053 case BIT_FIELD_REF:
4054 result = build_nt (BIT_FIELD_REF,
4055 stabilize_reference (TREE_OPERAND (ref, 0)),
4056 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4057 break;
4058
4059 case ARRAY_REF:
4060 result = build_nt (ARRAY_REF,
4061 stabilize_reference (TREE_OPERAND (ref, 0)),
4062 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4063 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4064 break;
4065
4066 case ARRAY_RANGE_REF:
4067 result = build_nt (ARRAY_RANGE_REF,
4068 stabilize_reference (TREE_OPERAND (ref, 0)),
4069 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4070 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4071 break;
4072
4073 case COMPOUND_EXPR:
4074 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4075 it wouldn't be ignored. This matters when dealing with
4076 volatiles. */
4077 return stabilize_reference_1 (ref);
4078
4079 /* If arg isn't a kind of lvalue we recognize, make no change.
4080 Caller should recognize the error for an invalid lvalue. */
4081 default:
4082 return ref;
4083
4084 case ERROR_MARK:
4085 return error_mark_node;
4086 }
4087
4088 TREE_TYPE (result) = TREE_TYPE (ref);
4089 TREE_READONLY (result) = TREE_READONLY (ref);
4090 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4091 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4092
4093 return result;
4094 }
4095 \f
4096 /* Low-level constructors for expressions. */
4097
4098 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4099 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4100
4101 void
4102 recompute_tree_invariant_for_addr_expr (tree t)
4103 {
4104 tree node;
4105 bool tc = true, se = false;
4106
4107 /* We started out assuming this address is both invariant and constant, but
4108 does not have side effects. Now go down any handled components and see if
4109 any of them involve offsets that are either non-constant or non-invariant.
4110 Also check for side-effects.
4111
4112 ??? Note that this code makes no attempt to deal with the case where
4113 taking the address of something causes a copy due to misalignment. */
4114
4115 #define UPDATE_FLAGS(NODE) \
4116 do { tree _node = (NODE); \
4117 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4118 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4119
4120 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4121 node = TREE_OPERAND (node, 0))
4122 {
4123 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4124 array reference (probably made temporarily by the G++ front end),
4125 so ignore all the operands. */
4126 if ((TREE_CODE (node) == ARRAY_REF
4127 || TREE_CODE (node) == ARRAY_RANGE_REF)
4128 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4129 {
4130 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4131 if (TREE_OPERAND (node, 2))
4132 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4133 if (TREE_OPERAND (node, 3))
4134 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4135 }
4136 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4137 FIELD_DECL, apparently. The G++ front end can put something else
4138 there, at least temporarily. */
4139 else if (TREE_CODE (node) == COMPONENT_REF
4140 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4141 {
4142 if (TREE_OPERAND (node, 2))
4143 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4144 }
4145 }
4146
4147 node = lang_hooks.expr_to_decl (node, &tc, &se);
4148
4149 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4150 the address, since &(*a)->b is a form of addition. If it's a constant, the
4151 address is constant too. If it's a decl, its address is constant if the
4152 decl is static. Everything else is not constant and, furthermore,
4153 taking the address of a volatile variable is not volatile. */
4154 if (TREE_CODE (node) == INDIRECT_REF
4155 || TREE_CODE (node) == MEM_REF)
4156 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4157 else if (CONSTANT_CLASS_P (node))
4158 ;
4159 else if (DECL_P (node))
4160 tc &= (staticp (node) != NULL_TREE);
4161 else
4162 {
4163 tc = false;
4164 se |= TREE_SIDE_EFFECTS (node);
4165 }
4166
4167
4168 TREE_CONSTANT (t) = tc;
4169 TREE_SIDE_EFFECTS (t) = se;
4170 #undef UPDATE_FLAGS
4171 }
4172
4173 /* Build an expression of code CODE, data type TYPE, and operands as
4174 specified. Expressions and reference nodes can be created this way.
4175 Constants, decls, types and misc nodes cannot be.
4176
4177 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4178 enough for all extant tree codes. */
4179
4180 tree
4181 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4182 {
4183 tree t;
4184
4185 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4186
4187 t = make_node_stat (code PASS_MEM_STAT);
4188 TREE_TYPE (t) = tt;
4189
4190 return t;
4191 }
4192
4193 tree
4194 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4195 {
4196 int length = sizeof (struct tree_exp);
4197 tree t;
4198
4199 record_node_allocation_statistics (code, length);
4200
4201 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4202
4203 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4204
4205 memset (t, 0, sizeof (struct tree_common));
4206
4207 TREE_SET_CODE (t, code);
4208
4209 TREE_TYPE (t) = type;
4210 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4211 TREE_OPERAND (t, 0) = node;
4212 if (node && !TYPE_P (node))
4213 {
4214 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4215 TREE_READONLY (t) = TREE_READONLY (node);
4216 }
4217
4218 if (TREE_CODE_CLASS (code) == tcc_statement)
4219 TREE_SIDE_EFFECTS (t) = 1;
4220 else switch (code)
4221 {
4222 case VA_ARG_EXPR:
4223 /* All of these have side-effects, no matter what their
4224 operands are. */
4225 TREE_SIDE_EFFECTS (t) = 1;
4226 TREE_READONLY (t) = 0;
4227 break;
4228
4229 case INDIRECT_REF:
4230 /* Whether a dereference is readonly has nothing to do with whether
4231 its operand is readonly. */
4232 TREE_READONLY (t) = 0;
4233 break;
4234
4235 case ADDR_EXPR:
4236 if (node)
4237 recompute_tree_invariant_for_addr_expr (t);
4238 break;
4239
4240 default:
4241 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4242 && node && !TYPE_P (node)
4243 && TREE_CONSTANT (node))
4244 TREE_CONSTANT (t) = 1;
4245 if (TREE_CODE_CLASS (code) == tcc_reference
4246 && node && TREE_THIS_VOLATILE (node))
4247 TREE_THIS_VOLATILE (t) = 1;
4248 break;
4249 }
4250
4251 return t;
4252 }
4253
4254 #define PROCESS_ARG(N) \
4255 do { \
4256 TREE_OPERAND (t, N) = arg##N; \
4257 if (arg##N &&!TYPE_P (arg##N)) \
4258 { \
4259 if (TREE_SIDE_EFFECTS (arg##N)) \
4260 side_effects = 1; \
4261 if (!TREE_READONLY (arg##N) \
4262 && !CONSTANT_CLASS_P (arg##N)) \
4263 (void) (read_only = 0); \
4264 if (!TREE_CONSTANT (arg##N)) \
4265 (void) (constant = 0); \
4266 } \
4267 } while (0)
4268
4269 tree
4270 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4271 {
4272 bool constant, read_only, side_effects;
4273 tree t;
4274
4275 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4276
4277 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4278 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4279 /* When sizetype precision doesn't match that of pointers
4280 we need to be able to build explicit extensions or truncations
4281 of the offset argument. */
4282 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4283 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4284 && TREE_CODE (arg1) == INTEGER_CST);
4285
4286 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4287 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4288 && ptrofftype_p (TREE_TYPE (arg1)));
4289
4290 t = make_node_stat (code PASS_MEM_STAT);
4291 TREE_TYPE (t) = tt;
4292
4293 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4294 result based on those same flags for the arguments. But if the
4295 arguments aren't really even `tree' expressions, we shouldn't be trying
4296 to do this. */
4297
4298 /* Expressions without side effects may be constant if their
4299 arguments are as well. */
4300 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4301 || TREE_CODE_CLASS (code) == tcc_binary);
4302 read_only = 1;
4303 side_effects = TREE_SIDE_EFFECTS (t);
4304
4305 PROCESS_ARG (0);
4306 PROCESS_ARG (1);
4307
4308 TREE_READONLY (t) = read_only;
4309 TREE_CONSTANT (t) = constant;
4310 TREE_SIDE_EFFECTS (t) = side_effects;
4311 TREE_THIS_VOLATILE (t)
4312 = (TREE_CODE_CLASS (code) == tcc_reference
4313 && arg0 && TREE_THIS_VOLATILE (arg0));
4314
4315 return t;
4316 }
4317
4318
4319 tree
4320 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4321 tree arg2 MEM_STAT_DECL)
4322 {
4323 bool constant, read_only, side_effects;
4324 tree t;
4325
4326 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4327 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4328
4329 t = make_node_stat (code PASS_MEM_STAT);
4330 TREE_TYPE (t) = tt;
4331
4332 read_only = 1;
4333
4334 /* As a special exception, if COND_EXPR has NULL branches, we
4335 assume that it is a gimple statement and always consider
4336 it to have side effects. */
4337 if (code == COND_EXPR
4338 && tt == void_type_node
4339 && arg1 == NULL_TREE
4340 && arg2 == NULL_TREE)
4341 side_effects = true;
4342 else
4343 side_effects = TREE_SIDE_EFFECTS (t);
4344
4345 PROCESS_ARG (0);
4346 PROCESS_ARG (1);
4347 PROCESS_ARG (2);
4348
4349 if (code == COND_EXPR)
4350 TREE_READONLY (t) = read_only;
4351
4352 TREE_SIDE_EFFECTS (t) = side_effects;
4353 TREE_THIS_VOLATILE (t)
4354 = (TREE_CODE_CLASS (code) == tcc_reference
4355 && arg0 && TREE_THIS_VOLATILE (arg0));
4356
4357 return t;
4358 }
4359
4360 tree
4361 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4362 tree arg2, tree arg3 MEM_STAT_DECL)
4363 {
4364 bool constant, read_only, side_effects;
4365 tree t;
4366
4367 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4368
4369 t = make_node_stat (code PASS_MEM_STAT);
4370 TREE_TYPE (t) = tt;
4371
4372 side_effects = TREE_SIDE_EFFECTS (t);
4373
4374 PROCESS_ARG (0);
4375 PROCESS_ARG (1);
4376 PROCESS_ARG (2);
4377 PROCESS_ARG (3);
4378
4379 TREE_SIDE_EFFECTS (t) = side_effects;
4380 TREE_THIS_VOLATILE (t)
4381 = (TREE_CODE_CLASS (code) == tcc_reference
4382 && arg0 && TREE_THIS_VOLATILE (arg0));
4383
4384 return t;
4385 }
4386
4387 tree
4388 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4389 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4390 {
4391 bool constant, read_only, side_effects;
4392 tree t;
4393
4394 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4395
4396 t = make_node_stat (code PASS_MEM_STAT);
4397 TREE_TYPE (t) = tt;
4398
4399 side_effects = TREE_SIDE_EFFECTS (t);
4400
4401 PROCESS_ARG (0);
4402 PROCESS_ARG (1);
4403 PROCESS_ARG (2);
4404 PROCESS_ARG (3);
4405 PROCESS_ARG (4);
4406
4407 TREE_SIDE_EFFECTS (t) = side_effects;
4408 TREE_THIS_VOLATILE (t)
4409 = (TREE_CODE_CLASS (code) == tcc_reference
4410 && arg0 && TREE_THIS_VOLATILE (arg0));
4411
4412 return t;
4413 }
4414
4415 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4416 on the pointer PTR. */
4417
4418 tree
4419 build_simple_mem_ref_loc (location_t loc, tree ptr)
4420 {
4421 HOST_WIDE_INT offset = 0;
4422 tree ptype = TREE_TYPE (ptr);
4423 tree tem;
4424 /* For convenience allow addresses that collapse to a simple base
4425 and offset. */
4426 if (TREE_CODE (ptr) == ADDR_EXPR
4427 && (handled_component_p (TREE_OPERAND (ptr, 0))
4428 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4429 {
4430 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4431 gcc_assert (ptr);
4432 ptr = build_fold_addr_expr (ptr);
4433 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4434 }
4435 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4436 ptr, build_int_cst (ptype, offset));
4437 SET_EXPR_LOCATION (tem, loc);
4438 return tem;
4439 }
4440
4441 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4442
4443 offset_int
4444 mem_ref_offset (const_tree t)
4445 {
4446 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4447 }
4448
4449 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4450 offsetted by OFFSET units. */
4451
4452 tree
4453 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4454 {
4455 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4456 build_fold_addr_expr (base),
4457 build_int_cst (ptr_type_node, offset));
4458 tree addr = build1 (ADDR_EXPR, type, ref);
4459 recompute_tree_invariant_for_addr_expr (addr);
4460 return addr;
4461 }
4462
4463 /* Similar except don't specify the TREE_TYPE
4464 and leave the TREE_SIDE_EFFECTS as 0.
4465 It is permissible for arguments to be null,
4466 or even garbage if their values do not matter. */
4467
4468 tree
4469 build_nt (enum tree_code code, ...)
4470 {
4471 tree t;
4472 int length;
4473 int i;
4474 va_list p;
4475
4476 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4477
4478 va_start (p, code);
4479
4480 t = make_node (code);
4481 length = TREE_CODE_LENGTH (code);
4482
4483 for (i = 0; i < length; i++)
4484 TREE_OPERAND (t, i) = va_arg (p, tree);
4485
4486 va_end (p);
4487 return t;
4488 }
4489
4490 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4491 tree vec. */
4492
4493 tree
4494 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4495 {
4496 tree ret, t;
4497 unsigned int ix;
4498
4499 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4500 CALL_EXPR_FN (ret) = fn;
4501 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4502 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4503 CALL_EXPR_ARG (ret, ix) = t;
4504 return ret;
4505 }
4506 \f
4507 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4508 We do NOT enter this node in any sort of symbol table.
4509
4510 LOC is the location of the decl.
4511
4512 layout_decl is used to set up the decl's storage layout.
4513 Other slots are initialized to 0 or null pointers. */
4514
4515 tree
4516 build_decl_stat (location_t loc, enum tree_code code, tree name,
4517 tree type MEM_STAT_DECL)
4518 {
4519 tree t;
4520
4521 t = make_node_stat (code PASS_MEM_STAT);
4522 DECL_SOURCE_LOCATION (t) = loc;
4523
4524 /* if (type == error_mark_node)
4525 type = integer_type_node; */
4526 /* That is not done, deliberately, so that having error_mark_node
4527 as the type can suppress useless errors in the use of this variable. */
4528
4529 DECL_NAME (t) = name;
4530 TREE_TYPE (t) = type;
4531
4532 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4533 layout_decl (t, 0);
4534
4535 return t;
4536 }
4537
4538 /* Builds and returns function declaration with NAME and TYPE. */
4539
4540 tree
4541 build_fn_decl (const char *name, tree type)
4542 {
4543 tree id = get_identifier (name);
4544 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4545
4546 DECL_EXTERNAL (decl) = 1;
4547 TREE_PUBLIC (decl) = 1;
4548 DECL_ARTIFICIAL (decl) = 1;
4549 TREE_NOTHROW (decl) = 1;
4550
4551 return decl;
4552 }
4553
4554 vec<tree, va_gc> *all_translation_units;
4555
4556 /* Builds a new translation-unit decl with name NAME, queues it in the
4557 global list of translation-unit decls and returns it. */
4558
4559 tree
4560 build_translation_unit_decl (tree name)
4561 {
4562 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4563 name, NULL_TREE);
4564 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4565 vec_safe_push (all_translation_units, tu);
4566 return tu;
4567 }
4568
4569 \f
4570 /* BLOCK nodes are used to represent the structure of binding contours
4571 and declarations, once those contours have been exited and their contents
4572 compiled. This information is used for outputting debugging info. */
4573
4574 tree
4575 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4576 {
4577 tree block = make_node (BLOCK);
4578
4579 BLOCK_VARS (block) = vars;
4580 BLOCK_SUBBLOCKS (block) = subblocks;
4581 BLOCK_SUPERCONTEXT (block) = supercontext;
4582 BLOCK_CHAIN (block) = chain;
4583 return block;
4584 }
4585
4586 \f
4587 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4588
4589 LOC is the location to use in tree T. */
4590
4591 void
4592 protected_set_expr_location (tree t, location_t loc)
4593 {
4594 if (CAN_HAVE_LOCATION_P (t))
4595 SET_EXPR_LOCATION (t, loc);
4596 }
4597 \f
4598 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4599 is ATTRIBUTE. */
4600
4601 tree
4602 build_decl_attribute_variant (tree ddecl, tree attribute)
4603 {
4604 DECL_ATTRIBUTES (ddecl) = attribute;
4605 return ddecl;
4606 }
4607
4608 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4609 is ATTRIBUTE and its qualifiers are QUALS.
4610
4611 Record such modified types already made so we don't make duplicates. */
4612
4613 tree
4614 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4615 {
4616 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4617 {
4618 inchash::hash hstate;
4619 tree ntype;
4620 int i;
4621 tree t;
4622 enum tree_code code = TREE_CODE (ttype);
4623
4624 /* Building a distinct copy of a tagged type is inappropriate; it
4625 causes breakage in code that expects there to be a one-to-one
4626 relationship between a struct and its fields.
4627 build_duplicate_type is another solution (as used in
4628 handle_transparent_union_attribute), but that doesn't play well
4629 with the stronger C++ type identity model. */
4630 if (TREE_CODE (ttype) == RECORD_TYPE
4631 || TREE_CODE (ttype) == UNION_TYPE
4632 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4633 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4634 {
4635 warning (OPT_Wattributes,
4636 "ignoring attributes applied to %qT after definition",
4637 TYPE_MAIN_VARIANT (ttype));
4638 return build_qualified_type (ttype, quals);
4639 }
4640
4641 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4642 ntype = build_distinct_type_copy (ttype);
4643
4644 TYPE_ATTRIBUTES (ntype) = attribute;
4645
4646 hstate.add_int (code);
4647 if (TREE_TYPE (ntype))
4648 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4649 attribute_hash_list (attribute, hstate);
4650
4651 switch (TREE_CODE (ntype))
4652 {
4653 case FUNCTION_TYPE:
4654 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4655 break;
4656 case ARRAY_TYPE:
4657 if (TYPE_DOMAIN (ntype))
4658 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4659 break;
4660 case INTEGER_TYPE:
4661 t = TYPE_MAX_VALUE (ntype);
4662 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4663 hstate.add_object (TREE_INT_CST_ELT (t, i));
4664 break;
4665 case REAL_TYPE:
4666 case FIXED_POINT_TYPE:
4667 {
4668 unsigned int precision = TYPE_PRECISION (ntype);
4669 hstate.add_object (precision);
4670 }
4671 break;
4672 default:
4673 break;
4674 }
4675
4676 ntype = type_hash_canon (hstate.end(), ntype);
4677
4678 /* If the target-dependent attributes make NTYPE different from
4679 its canonical type, we will need to use structural equality
4680 checks for this type. */
4681 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4682 || !comp_type_attributes (ntype, ttype))
4683 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4684 else if (TYPE_CANONICAL (ntype) == ntype)
4685 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4686
4687 ttype = build_qualified_type (ntype, quals);
4688 }
4689 else if (TYPE_QUALS (ttype) != quals)
4690 ttype = build_qualified_type (ttype, quals);
4691
4692 return ttype;
4693 }
4694
4695 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4696 the same. */
4697
4698 static bool
4699 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4700 {
4701 tree cl1, cl2;
4702 for (cl1 = clauses1, cl2 = clauses2;
4703 cl1 && cl2;
4704 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4705 {
4706 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4707 return false;
4708 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4709 {
4710 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4711 OMP_CLAUSE_DECL (cl2)) != 1)
4712 return false;
4713 }
4714 switch (OMP_CLAUSE_CODE (cl1))
4715 {
4716 case OMP_CLAUSE_ALIGNED:
4717 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4718 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4719 return false;
4720 break;
4721 case OMP_CLAUSE_LINEAR:
4722 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4723 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4724 return false;
4725 break;
4726 case OMP_CLAUSE_SIMDLEN:
4727 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4728 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4729 return false;
4730 default:
4731 break;
4732 }
4733 }
4734 return true;
4735 }
4736
4737 /* Compare two constructor-element-type constants. Return 1 if the lists
4738 are known to be equal; otherwise return 0. */
4739
4740 static bool
4741 simple_cst_list_equal (const_tree l1, const_tree l2)
4742 {
4743 while (l1 != NULL_TREE && l2 != NULL_TREE)
4744 {
4745 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4746 return false;
4747
4748 l1 = TREE_CHAIN (l1);
4749 l2 = TREE_CHAIN (l2);
4750 }
4751
4752 return l1 == l2;
4753 }
4754
4755 /* Compare two attributes for their value identity. Return true if the
4756 attribute values are known to be equal; otherwise return false.
4757 */
4758
4759 static bool
4760 attribute_value_equal (const_tree attr1, const_tree attr2)
4761 {
4762 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4763 return true;
4764
4765 if (TREE_VALUE (attr1) != NULL_TREE
4766 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4767 && TREE_VALUE (attr2) != NULL
4768 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4769 return (simple_cst_list_equal (TREE_VALUE (attr1),
4770 TREE_VALUE (attr2)) == 1);
4771
4772 if ((flag_openmp || flag_openmp_simd)
4773 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4774 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4775 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4776 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4777 TREE_VALUE (attr2));
4778
4779 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4780 }
4781
4782 /* Return 0 if the attributes for two types are incompatible, 1 if they
4783 are compatible, and 2 if they are nearly compatible (which causes a
4784 warning to be generated). */
4785 int
4786 comp_type_attributes (const_tree type1, const_tree type2)
4787 {
4788 const_tree a1 = TYPE_ATTRIBUTES (type1);
4789 const_tree a2 = TYPE_ATTRIBUTES (type2);
4790 const_tree a;
4791
4792 if (a1 == a2)
4793 return 1;
4794 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4795 {
4796 const struct attribute_spec *as;
4797 const_tree attr;
4798
4799 as = lookup_attribute_spec (get_attribute_name (a));
4800 if (!as || as->affects_type_identity == false)
4801 continue;
4802
4803 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4804 if (!attr || !attribute_value_equal (a, attr))
4805 break;
4806 }
4807 if (!a)
4808 {
4809 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4810 {
4811 const struct attribute_spec *as;
4812
4813 as = lookup_attribute_spec (get_attribute_name (a));
4814 if (!as || as->affects_type_identity == false)
4815 continue;
4816
4817 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4818 break;
4819 /* We don't need to compare trees again, as we did this
4820 already in first loop. */
4821 }
4822 /* All types - affecting identity - are equal, so
4823 there is no need to call target hook for comparison. */
4824 if (!a)
4825 return 1;
4826 }
4827 /* As some type combinations - like default calling-convention - might
4828 be compatible, we have to call the target hook to get the final result. */
4829 return targetm.comp_type_attributes (type1, type2);
4830 }
4831
4832 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4833 is ATTRIBUTE.
4834
4835 Record such modified types already made so we don't make duplicates. */
4836
4837 tree
4838 build_type_attribute_variant (tree ttype, tree attribute)
4839 {
4840 return build_type_attribute_qual_variant (ttype, attribute,
4841 TYPE_QUALS (ttype));
4842 }
4843
4844
4845 /* Reset the expression *EXPR_P, a size or position.
4846
4847 ??? We could reset all non-constant sizes or positions. But it's cheap
4848 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4849
4850 We need to reset self-referential sizes or positions because they cannot
4851 be gimplified and thus can contain a CALL_EXPR after the gimplification
4852 is finished, which will run afoul of LTO streaming. And they need to be
4853 reset to something essentially dummy but not constant, so as to preserve
4854 the properties of the object they are attached to. */
4855
4856 static inline void
4857 free_lang_data_in_one_sizepos (tree *expr_p)
4858 {
4859 tree expr = *expr_p;
4860 if (CONTAINS_PLACEHOLDER_P (expr))
4861 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4862 }
4863
4864
4865 /* Reset all the fields in a binfo node BINFO. We only keep
4866 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4867
4868 static void
4869 free_lang_data_in_binfo (tree binfo)
4870 {
4871 unsigned i;
4872 tree t;
4873
4874 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4875
4876 BINFO_VIRTUALS (binfo) = NULL_TREE;
4877 BINFO_BASE_ACCESSES (binfo) = NULL;
4878 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4879 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4880
4881 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4882 free_lang_data_in_binfo (t);
4883 }
4884
4885
4886 /* Reset all language specific information still present in TYPE. */
4887
4888 static void
4889 free_lang_data_in_type (tree type)
4890 {
4891 gcc_assert (TYPE_P (type));
4892
4893 /* Give the FE a chance to remove its own data first. */
4894 lang_hooks.free_lang_data (type);
4895
4896 TREE_LANG_FLAG_0 (type) = 0;
4897 TREE_LANG_FLAG_1 (type) = 0;
4898 TREE_LANG_FLAG_2 (type) = 0;
4899 TREE_LANG_FLAG_3 (type) = 0;
4900 TREE_LANG_FLAG_4 (type) = 0;
4901 TREE_LANG_FLAG_5 (type) = 0;
4902 TREE_LANG_FLAG_6 (type) = 0;
4903
4904 if (TREE_CODE (type) == FUNCTION_TYPE)
4905 {
4906 /* Remove the const and volatile qualifiers from arguments. The
4907 C++ front end removes them, but the C front end does not,
4908 leading to false ODR violation errors when merging two
4909 instances of the same function signature compiled by
4910 different front ends. */
4911 tree p;
4912
4913 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4914 {
4915 tree arg_type = TREE_VALUE (p);
4916
4917 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4918 {
4919 int quals = TYPE_QUALS (arg_type)
4920 & ~TYPE_QUAL_CONST
4921 & ~TYPE_QUAL_VOLATILE;
4922 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4923 free_lang_data_in_type (TREE_VALUE (p));
4924 }
4925 }
4926 }
4927
4928 /* Remove members that are not actually FIELD_DECLs from the field
4929 list of an aggregate. These occur in C++. */
4930 if (RECORD_OR_UNION_TYPE_P (type))
4931 {
4932 tree prev, member;
4933
4934 /* Note that TYPE_FIELDS can be shared across distinct
4935 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4936 to be removed, we cannot set its TREE_CHAIN to NULL.
4937 Otherwise, we would not be able to find all the other fields
4938 in the other instances of this TREE_TYPE.
4939
4940 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4941 prev = NULL_TREE;
4942 member = TYPE_FIELDS (type);
4943 while (member)
4944 {
4945 if (TREE_CODE (member) == FIELD_DECL
4946 || TREE_CODE (member) == TYPE_DECL)
4947 {
4948 if (prev)
4949 TREE_CHAIN (prev) = member;
4950 else
4951 TYPE_FIELDS (type) = member;
4952 prev = member;
4953 }
4954
4955 member = TREE_CHAIN (member);
4956 }
4957
4958 if (prev)
4959 TREE_CHAIN (prev) = NULL_TREE;
4960 else
4961 TYPE_FIELDS (type) = NULL_TREE;
4962
4963 TYPE_METHODS (type) = NULL_TREE;
4964 if (TYPE_BINFO (type))
4965 free_lang_data_in_binfo (TYPE_BINFO (type));
4966 }
4967 else
4968 {
4969 /* For non-aggregate types, clear out the language slot (which
4970 overloads TYPE_BINFO). */
4971 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4972
4973 if (INTEGRAL_TYPE_P (type)
4974 || SCALAR_FLOAT_TYPE_P (type)
4975 || FIXED_POINT_TYPE_P (type))
4976 {
4977 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4978 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4979 }
4980 }
4981
4982 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4983 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4984
4985 if (TYPE_CONTEXT (type)
4986 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4987 {
4988 tree ctx = TYPE_CONTEXT (type);
4989 do
4990 {
4991 ctx = BLOCK_SUPERCONTEXT (ctx);
4992 }
4993 while (ctx && TREE_CODE (ctx) == BLOCK);
4994 TYPE_CONTEXT (type) = ctx;
4995 }
4996 }
4997
4998
4999 /* Return true if DECL may need an assembler name to be set. */
5000
5001 static inline bool
5002 need_assembler_name_p (tree decl)
5003 {
5004 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule
5005 merging. */
5006 if (flag_lto_odr_type_mering
5007 && TREE_CODE (decl) == TYPE_DECL
5008 && DECL_NAME (decl)
5009 && decl == TYPE_NAME (TREE_TYPE (decl))
5010 && !is_lang_specific (TREE_TYPE (decl))
5011 && AGGREGATE_TYPE_P (TREE_TYPE (decl))
5012 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)
5013 && !type_in_anonymous_namespace_p (TREE_TYPE (decl)))
5014 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5015 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5016 if (TREE_CODE (decl) != FUNCTION_DECL
5017 && TREE_CODE (decl) != VAR_DECL)
5018 return false;
5019
5020 /* If DECL already has its assembler name set, it does not need a
5021 new one. */
5022 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5023 || DECL_ASSEMBLER_NAME_SET_P (decl))
5024 return false;
5025
5026 /* Abstract decls do not need an assembler name. */
5027 if (DECL_ABSTRACT_P (decl))
5028 return false;
5029
5030 /* For VAR_DECLs, only static, public and external symbols need an
5031 assembler name. */
5032 if (TREE_CODE (decl) == VAR_DECL
5033 && !TREE_STATIC (decl)
5034 && !TREE_PUBLIC (decl)
5035 && !DECL_EXTERNAL (decl))
5036 return false;
5037
5038 if (TREE_CODE (decl) == FUNCTION_DECL)
5039 {
5040 /* Do not set assembler name on builtins. Allow RTL expansion to
5041 decide whether to expand inline or via a regular call. */
5042 if (DECL_BUILT_IN (decl)
5043 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5044 return false;
5045
5046 /* Functions represented in the callgraph need an assembler name. */
5047 if (cgraph_node::get (decl) != NULL)
5048 return true;
5049
5050 /* Unused and not public functions don't need an assembler name. */
5051 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5052 return false;
5053 }
5054
5055 return true;
5056 }
5057
5058
5059 /* Reset all language specific information still present in symbol
5060 DECL. */
5061
5062 static void
5063 free_lang_data_in_decl (tree decl)
5064 {
5065 gcc_assert (DECL_P (decl));
5066
5067 /* Give the FE a chance to remove its own data first. */
5068 lang_hooks.free_lang_data (decl);
5069
5070 TREE_LANG_FLAG_0 (decl) = 0;
5071 TREE_LANG_FLAG_1 (decl) = 0;
5072 TREE_LANG_FLAG_2 (decl) = 0;
5073 TREE_LANG_FLAG_3 (decl) = 0;
5074 TREE_LANG_FLAG_4 (decl) = 0;
5075 TREE_LANG_FLAG_5 (decl) = 0;
5076 TREE_LANG_FLAG_6 (decl) = 0;
5077
5078 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5079 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5080 if (TREE_CODE (decl) == FIELD_DECL)
5081 {
5082 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5083 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5084 DECL_QUALIFIER (decl) = NULL_TREE;
5085 }
5086
5087 if (TREE_CODE (decl) == FUNCTION_DECL)
5088 {
5089 struct cgraph_node *node;
5090 if (!(node = cgraph_node::get (decl))
5091 || (!node->definition && !node->clones))
5092 {
5093 if (node)
5094 node->release_body ();
5095 else
5096 {
5097 release_function_body (decl);
5098 DECL_ARGUMENTS (decl) = NULL;
5099 DECL_RESULT (decl) = NULL;
5100 DECL_INITIAL (decl) = error_mark_node;
5101 }
5102 }
5103 if (gimple_has_body_p (decl))
5104 {
5105 tree t;
5106
5107 /* If DECL has a gimple body, then the context for its
5108 arguments must be DECL. Otherwise, it doesn't really
5109 matter, as we will not be emitting any code for DECL. In
5110 general, there may be other instances of DECL created by
5111 the front end and since PARM_DECLs are generally shared,
5112 their DECL_CONTEXT changes as the replicas of DECL are
5113 created. The only time where DECL_CONTEXT is important
5114 is for the FUNCTION_DECLs that have a gimple body (since
5115 the PARM_DECL will be used in the function's body). */
5116 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5117 DECL_CONTEXT (t) = decl;
5118 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5119 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5120 = target_option_default_node;
5121 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5122 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5123 = optimization_default_node;
5124 }
5125
5126 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5127 At this point, it is not needed anymore. */
5128 DECL_SAVED_TREE (decl) = NULL_TREE;
5129
5130 /* Clear the abstract origin if it refers to a method. Otherwise
5131 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5132 origin will not be output correctly. */
5133 if (DECL_ABSTRACT_ORIGIN (decl)
5134 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5135 && RECORD_OR_UNION_TYPE_P
5136 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5137 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5138
5139 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5140 DECL_VINDEX referring to itself into a vtable slot number as it
5141 should. Happens with functions that are copied and then forgotten
5142 about. Just clear it, it won't matter anymore. */
5143 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5144 DECL_VINDEX (decl) = NULL_TREE;
5145 }
5146 else if (TREE_CODE (decl) == VAR_DECL)
5147 {
5148 if ((DECL_EXTERNAL (decl)
5149 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5150 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5151 DECL_INITIAL (decl) = NULL_TREE;
5152 }
5153 else if (TREE_CODE (decl) == TYPE_DECL
5154 || TREE_CODE (decl) == FIELD_DECL)
5155 DECL_INITIAL (decl) = NULL_TREE;
5156 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5157 && DECL_INITIAL (decl)
5158 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5159 {
5160 /* Strip builtins from the translation-unit BLOCK. We still have targets
5161 without builtin_decl_explicit support and also builtins are shared
5162 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5163 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5164 while (*nextp)
5165 {
5166 tree var = *nextp;
5167 if (TREE_CODE (var) == FUNCTION_DECL
5168 && DECL_BUILT_IN (var))
5169 *nextp = TREE_CHAIN (var);
5170 else
5171 nextp = &TREE_CHAIN (var);
5172 }
5173 }
5174 }
5175
5176
5177 /* Data used when collecting DECLs and TYPEs for language data removal. */
5178
5179 struct free_lang_data_d
5180 {
5181 /* Worklist to avoid excessive recursion. */
5182 vec<tree> worklist;
5183
5184 /* Set of traversed objects. Used to avoid duplicate visits. */
5185 hash_set<tree> *pset;
5186
5187 /* Array of symbols to process with free_lang_data_in_decl. */
5188 vec<tree> decls;
5189
5190 /* Array of types to process with free_lang_data_in_type. */
5191 vec<tree> types;
5192 };
5193
5194
5195 /* Save all language fields needed to generate proper debug information
5196 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5197
5198 static void
5199 save_debug_info_for_decl (tree t)
5200 {
5201 /*struct saved_debug_info_d *sdi;*/
5202
5203 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5204
5205 /* FIXME. Partial implementation for saving debug info removed. */
5206 }
5207
5208
5209 /* Save all language fields needed to generate proper debug information
5210 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5211
5212 static void
5213 save_debug_info_for_type (tree t)
5214 {
5215 /*struct saved_debug_info_d *sdi;*/
5216
5217 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5218
5219 /* FIXME. Partial implementation for saving debug info removed. */
5220 }
5221
5222
5223 /* Add type or decl T to one of the list of tree nodes that need their
5224 language data removed. The lists are held inside FLD. */
5225
5226 static void
5227 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5228 {
5229 if (DECL_P (t))
5230 {
5231 fld->decls.safe_push (t);
5232 if (debug_info_level > DINFO_LEVEL_TERSE)
5233 save_debug_info_for_decl (t);
5234 }
5235 else if (TYPE_P (t))
5236 {
5237 fld->types.safe_push (t);
5238 if (debug_info_level > DINFO_LEVEL_TERSE)
5239 save_debug_info_for_type (t);
5240 }
5241 else
5242 gcc_unreachable ();
5243 }
5244
5245 /* Push tree node T into FLD->WORKLIST. */
5246
5247 static inline void
5248 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5249 {
5250 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5251 fld->worklist.safe_push ((t));
5252 }
5253
5254
5255 /* Operand callback helper for free_lang_data_in_node. *TP is the
5256 subtree operand being considered. */
5257
5258 static tree
5259 find_decls_types_r (tree *tp, int *ws, void *data)
5260 {
5261 tree t = *tp;
5262 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5263
5264 if (TREE_CODE (t) == TREE_LIST)
5265 return NULL_TREE;
5266
5267 /* Language specific nodes will be removed, so there is no need
5268 to gather anything under them. */
5269 if (is_lang_specific (t))
5270 {
5271 *ws = 0;
5272 return NULL_TREE;
5273 }
5274
5275 if (DECL_P (t))
5276 {
5277 /* Note that walk_tree does not traverse every possible field in
5278 decls, so we have to do our own traversals here. */
5279 add_tree_to_fld_list (t, fld);
5280
5281 fld_worklist_push (DECL_NAME (t), fld);
5282 fld_worklist_push (DECL_CONTEXT (t), fld);
5283 fld_worklist_push (DECL_SIZE (t), fld);
5284 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5285
5286 /* We are going to remove everything under DECL_INITIAL for
5287 TYPE_DECLs. No point walking them. */
5288 if (TREE_CODE (t) != TYPE_DECL)
5289 fld_worklist_push (DECL_INITIAL (t), fld);
5290
5291 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5292 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5293
5294 if (TREE_CODE (t) == FUNCTION_DECL)
5295 {
5296 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5297 fld_worklist_push (DECL_RESULT (t), fld);
5298 }
5299 else if (TREE_CODE (t) == TYPE_DECL)
5300 {
5301 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5302 }
5303 else if (TREE_CODE (t) == FIELD_DECL)
5304 {
5305 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5306 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5307 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5308 fld_worklist_push (DECL_FCONTEXT (t), fld);
5309 }
5310
5311 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5312 && DECL_HAS_VALUE_EXPR_P (t))
5313 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5314
5315 if (TREE_CODE (t) != FIELD_DECL
5316 && TREE_CODE (t) != TYPE_DECL)
5317 fld_worklist_push (TREE_CHAIN (t), fld);
5318 *ws = 0;
5319 }
5320 else if (TYPE_P (t))
5321 {
5322 /* Note that walk_tree does not traverse every possible field in
5323 types, so we have to do our own traversals here. */
5324 add_tree_to_fld_list (t, fld);
5325
5326 if (!RECORD_OR_UNION_TYPE_P (t))
5327 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5328 fld_worklist_push (TYPE_SIZE (t), fld);
5329 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5330 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5331 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5332 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5333 fld_worklist_push (TYPE_NAME (t), fld);
5334 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5335 them and thus do not and want not to reach unused pointer types
5336 this way. */
5337 if (!POINTER_TYPE_P (t))
5338 fld_worklist_push (TYPE_MINVAL (t), fld);
5339 if (!RECORD_OR_UNION_TYPE_P (t))
5340 fld_worklist_push (TYPE_MAXVAL (t), fld);
5341 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5342 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5343 do not and want not to reach unused variants this way. */
5344 if (TYPE_CONTEXT (t))
5345 {
5346 tree ctx = TYPE_CONTEXT (t);
5347 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5348 So push that instead. */
5349 while (ctx && TREE_CODE (ctx) == BLOCK)
5350 ctx = BLOCK_SUPERCONTEXT (ctx);
5351 fld_worklist_push (ctx, fld);
5352 }
5353 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5354 and want not to reach unused types this way. */
5355
5356 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5357 {
5358 unsigned i;
5359 tree tem;
5360 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5361 fld_worklist_push (TREE_TYPE (tem), fld);
5362 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5363 if (tem
5364 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5365 && TREE_CODE (tem) == TREE_LIST)
5366 do
5367 {
5368 fld_worklist_push (TREE_VALUE (tem), fld);
5369 tem = TREE_CHAIN (tem);
5370 }
5371 while (tem);
5372 }
5373 if (RECORD_OR_UNION_TYPE_P (t))
5374 {
5375 tree tem;
5376 /* Push all TYPE_FIELDS - there can be interleaving interesting
5377 and non-interesting things. */
5378 tem = TYPE_FIELDS (t);
5379 while (tem)
5380 {
5381 if (TREE_CODE (tem) == FIELD_DECL
5382 || TREE_CODE (tem) == TYPE_DECL)
5383 fld_worklist_push (tem, fld);
5384 tem = TREE_CHAIN (tem);
5385 }
5386 }
5387
5388 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5389 *ws = 0;
5390 }
5391 else if (TREE_CODE (t) == BLOCK)
5392 {
5393 tree tem;
5394 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5395 fld_worklist_push (tem, fld);
5396 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5397 fld_worklist_push (tem, fld);
5398 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5399 }
5400
5401 if (TREE_CODE (t) != IDENTIFIER_NODE
5402 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5403 fld_worklist_push (TREE_TYPE (t), fld);
5404
5405 return NULL_TREE;
5406 }
5407
5408
5409 /* Find decls and types in T. */
5410
5411 static void
5412 find_decls_types (tree t, struct free_lang_data_d *fld)
5413 {
5414 while (1)
5415 {
5416 if (!fld->pset->contains (t))
5417 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5418 if (fld->worklist.is_empty ())
5419 break;
5420 t = fld->worklist.pop ();
5421 }
5422 }
5423
5424 /* Translate all the types in LIST with the corresponding runtime
5425 types. */
5426
5427 static tree
5428 get_eh_types_for_runtime (tree list)
5429 {
5430 tree head, prev;
5431
5432 if (list == NULL_TREE)
5433 return NULL_TREE;
5434
5435 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5436 prev = head;
5437 list = TREE_CHAIN (list);
5438 while (list)
5439 {
5440 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5441 TREE_CHAIN (prev) = n;
5442 prev = TREE_CHAIN (prev);
5443 list = TREE_CHAIN (list);
5444 }
5445
5446 return head;
5447 }
5448
5449
5450 /* Find decls and types referenced in EH region R and store them in
5451 FLD->DECLS and FLD->TYPES. */
5452
5453 static void
5454 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5455 {
5456 switch (r->type)
5457 {
5458 case ERT_CLEANUP:
5459 break;
5460
5461 case ERT_TRY:
5462 {
5463 eh_catch c;
5464
5465 /* The types referenced in each catch must first be changed to the
5466 EH types used at runtime. This removes references to FE types
5467 in the region. */
5468 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5469 {
5470 c->type_list = get_eh_types_for_runtime (c->type_list);
5471 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5472 }
5473 }
5474 break;
5475
5476 case ERT_ALLOWED_EXCEPTIONS:
5477 r->u.allowed.type_list
5478 = get_eh_types_for_runtime (r->u.allowed.type_list);
5479 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5480 break;
5481
5482 case ERT_MUST_NOT_THROW:
5483 walk_tree (&r->u.must_not_throw.failure_decl,
5484 find_decls_types_r, fld, fld->pset);
5485 break;
5486 }
5487 }
5488
5489
5490 /* Find decls and types referenced in cgraph node N and store them in
5491 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5492 look for *every* kind of DECL and TYPE node reachable from N,
5493 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5494 NAMESPACE_DECLs, etc). */
5495
5496 static void
5497 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5498 {
5499 basic_block bb;
5500 struct function *fn;
5501 unsigned ix;
5502 tree t;
5503
5504 find_decls_types (n->decl, fld);
5505
5506 if (!gimple_has_body_p (n->decl))
5507 return;
5508
5509 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5510
5511 fn = DECL_STRUCT_FUNCTION (n->decl);
5512
5513 /* Traverse locals. */
5514 FOR_EACH_LOCAL_DECL (fn, ix, t)
5515 find_decls_types (t, fld);
5516
5517 /* Traverse EH regions in FN. */
5518 {
5519 eh_region r;
5520 FOR_ALL_EH_REGION_FN (r, fn)
5521 find_decls_types_in_eh_region (r, fld);
5522 }
5523
5524 /* Traverse every statement in FN. */
5525 FOR_EACH_BB_FN (bb, fn)
5526 {
5527 gimple_stmt_iterator si;
5528 unsigned i;
5529
5530 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5531 {
5532 gimple phi = gsi_stmt (si);
5533
5534 for (i = 0; i < gimple_phi_num_args (phi); i++)
5535 {
5536 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5537 find_decls_types (*arg_p, fld);
5538 }
5539 }
5540
5541 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5542 {
5543 gimple stmt = gsi_stmt (si);
5544
5545 if (is_gimple_call (stmt))
5546 find_decls_types (gimple_call_fntype (stmt), fld);
5547
5548 for (i = 0; i < gimple_num_ops (stmt); i++)
5549 {
5550 tree arg = gimple_op (stmt, i);
5551 find_decls_types (arg, fld);
5552 }
5553 }
5554 }
5555 }
5556
5557
5558 /* Find decls and types referenced in varpool node N and store them in
5559 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5560 look for *every* kind of DECL and TYPE node reachable from N,
5561 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5562 NAMESPACE_DECLs, etc). */
5563
5564 static void
5565 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5566 {
5567 find_decls_types (v->decl, fld);
5568 }
5569
5570 /* If T needs an assembler name, have one created for it. */
5571
5572 void
5573 assign_assembler_name_if_neeeded (tree t)
5574 {
5575 if (need_assembler_name_p (t))
5576 {
5577 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5578 diagnostics that use input_location to show locus
5579 information. The problem here is that, at this point,
5580 input_location is generally anchored to the end of the file
5581 (since the parser is long gone), so we don't have a good
5582 position to pin it to.
5583
5584 To alleviate this problem, this uses the location of T's
5585 declaration. Examples of this are
5586 testsuite/g++.dg/template/cond2.C and
5587 testsuite/g++.dg/template/pr35240.C. */
5588 location_t saved_location = input_location;
5589 input_location = DECL_SOURCE_LOCATION (t);
5590
5591 decl_assembler_name (t);
5592
5593 input_location = saved_location;
5594 }
5595 }
5596
5597
5598 /* Free language specific information for every operand and expression
5599 in every node of the call graph. This process operates in three stages:
5600
5601 1- Every callgraph node and varpool node is traversed looking for
5602 decls and types embedded in them. This is a more exhaustive
5603 search than that done by find_referenced_vars, because it will
5604 also collect individual fields, decls embedded in types, etc.
5605
5606 2- All the decls found are sent to free_lang_data_in_decl.
5607
5608 3- All the types found are sent to free_lang_data_in_type.
5609
5610 The ordering between decls and types is important because
5611 free_lang_data_in_decl sets assembler names, which includes
5612 mangling. So types cannot be freed up until assembler names have
5613 been set up. */
5614
5615 static void
5616 free_lang_data_in_cgraph (void)
5617 {
5618 struct cgraph_node *n;
5619 varpool_node *v;
5620 struct free_lang_data_d fld;
5621 tree t;
5622 unsigned i;
5623 alias_pair *p;
5624
5625 /* Initialize sets and arrays to store referenced decls and types. */
5626 fld.pset = new hash_set<tree>;
5627 fld.worklist.create (0);
5628 fld.decls.create (100);
5629 fld.types.create (100);
5630
5631 /* Find decls and types in the body of every function in the callgraph. */
5632 FOR_EACH_FUNCTION (n)
5633 find_decls_types_in_node (n, &fld);
5634
5635 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5636 find_decls_types (p->decl, &fld);
5637
5638 /* Find decls and types in every varpool symbol. */
5639 FOR_EACH_VARIABLE (v)
5640 find_decls_types_in_var (v, &fld);
5641
5642 /* Set the assembler name on every decl found. We need to do this
5643 now because free_lang_data_in_decl will invalidate data needed
5644 for mangling. This breaks mangling on interdependent decls. */
5645 FOR_EACH_VEC_ELT (fld.decls, i, t)
5646 assign_assembler_name_if_neeeded (t);
5647
5648 /* Traverse every decl found freeing its language data. */
5649 FOR_EACH_VEC_ELT (fld.decls, i, t)
5650 free_lang_data_in_decl (t);
5651
5652 /* Traverse every type found freeing its language data. */
5653 FOR_EACH_VEC_ELT (fld.types, i, t)
5654 free_lang_data_in_type (t);
5655
5656 delete fld.pset;
5657 fld.worklist.release ();
5658 fld.decls.release ();
5659 fld.types.release ();
5660 }
5661
5662
5663 /* Free resources that are used by FE but are not needed once they are done. */
5664
5665 static unsigned
5666 free_lang_data (void)
5667 {
5668 unsigned i;
5669
5670 /* If we are the LTO frontend we have freed lang-specific data already. */
5671 if (in_lto_p
5672 || !flag_generate_lto)
5673 return 0;
5674
5675 /* Allocate and assign alias sets to the standard integer types
5676 while the slots are still in the way the frontends generated them. */
5677 for (i = 0; i < itk_none; ++i)
5678 if (integer_types[i])
5679 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5680
5681 /* Traverse the IL resetting language specific information for
5682 operands, expressions, etc. */
5683 free_lang_data_in_cgraph ();
5684
5685 /* Create gimple variants for common types. */
5686 ptrdiff_type_node = integer_type_node;
5687 fileptr_type_node = ptr_type_node;
5688
5689 /* Reset some langhooks. Do not reset types_compatible_p, it may
5690 still be used indirectly via the get_alias_set langhook. */
5691 lang_hooks.dwarf_name = lhd_dwarf_name;
5692 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5693 /* We do not want the default decl_assembler_name implementation,
5694 rather if we have fixed everything we want a wrapper around it
5695 asserting that all non-local symbols already got their assembler
5696 name and only produce assembler names for local symbols. Or rather
5697 make sure we never call decl_assembler_name on local symbols and
5698 devise a separate, middle-end private scheme for it. */
5699
5700 /* Reset diagnostic machinery. */
5701 tree_diagnostics_defaults (global_dc);
5702
5703 return 0;
5704 }
5705
5706
5707 namespace {
5708
5709 const pass_data pass_data_ipa_free_lang_data =
5710 {
5711 SIMPLE_IPA_PASS, /* type */
5712 "*free_lang_data", /* name */
5713 OPTGROUP_NONE, /* optinfo_flags */
5714 TV_IPA_FREE_LANG_DATA, /* tv_id */
5715 0, /* properties_required */
5716 0, /* properties_provided */
5717 0, /* properties_destroyed */
5718 0, /* todo_flags_start */
5719 0, /* todo_flags_finish */
5720 };
5721
5722 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5723 {
5724 public:
5725 pass_ipa_free_lang_data (gcc::context *ctxt)
5726 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5727 {}
5728
5729 /* opt_pass methods: */
5730 virtual unsigned int execute (function *) { return free_lang_data (); }
5731
5732 }; // class pass_ipa_free_lang_data
5733
5734 } // anon namespace
5735
5736 simple_ipa_opt_pass *
5737 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5738 {
5739 return new pass_ipa_free_lang_data (ctxt);
5740 }
5741
5742 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5743 ATTR_NAME. Also used internally by remove_attribute(). */
5744 bool
5745 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5746 {
5747 size_t ident_len = IDENTIFIER_LENGTH (ident);
5748
5749 if (ident_len == attr_len)
5750 {
5751 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5752 return true;
5753 }
5754 else if (ident_len == attr_len + 4)
5755 {
5756 /* There is the possibility that ATTR is 'text' and IDENT is
5757 '__text__'. */
5758 const char *p = IDENTIFIER_POINTER (ident);
5759 if (p[0] == '_' && p[1] == '_'
5760 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5761 && strncmp (attr_name, p + 2, attr_len) == 0)
5762 return true;
5763 }
5764
5765 return false;
5766 }
5767
5768 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5769 of ATTR_NAME, and LIST is not NULL_TREE. */
5770 tree
5771 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5772 {
5773 while (list)
5774 {
5775 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5776
5777 if (ident_len == attr_len)
5778 {
5779 if (!strcmp (attr_name,
5780 IDENTIFIER_POINTER (get_attribute_name (list))))
5781 break;
5782 }
5783 /* TODO: If we made sure that attributes were stored in the
5784 canonical form without '__...__' (ie, as in 'text' as opposed
5785 to '__text__') then we could avoid the following case. */
5786 else if (ident_len == attr_len + 4)
5787 {
5788 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5789 if (p[0] == '_' && p[1] == '_'
5790 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5791 && strncmp (attr_name, p + 2, attr_len) == 0)
5792 break;
5793 }
5794 list = TREE_CHAIN (list);
5795 }
5796
5797 return list;
5798 }
5799
5800 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5801 return a pointer to the attribute's list first element if the attribute
5802 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5803 '__text__'). */
5804
5805 tree
5806 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5807 tree list)
5808 {
5809 while (list)
5810 {
5811 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5812
5813 if (attr_len > ident_len)
5814 {
5815 list = TREE_CHAIN (list);
5816 continue;
5817 }
5818
5819 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5820
5821 if (strncmp (attr_name, p, attr_len) == 0)
5822 break;
5823
5824 /* TODO: If we made sure that attributes were stored in the
5825 canonical form without '__...__' (ie, as in 'text' as opposed
5826 to '__text__') then we could avoid the following case. */
5827 if (p[0] == '_' && p[1] == '_' &&
5828 strncmp (attr_name, p + 2, attr_len) == 0)
5829 break;
5830
5831 list = TREE_CHAIN (list);
5832 }
5833
5834 return list;
5835 }
5836
5837
5838 /* A variant of lookup_attribute() that can be used with an identifier
5839 as the first argument, and where the identifier can be either
5840 'text' or '__text__'.
5841
5842 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5843 return a pointer to the attribute's list element if the attribute
5844 is part of the list, or NULL_TREE if not found. If the attribute
5845 appears more than once, this only returns the first occurrence; the
5846 TREE_CHAIN of the return value should be passed back in if further
5847 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5848 can be in the form 'text' or '__text__'. */
5849 static tree
5850 lookup_ident_attribute (tree attr_identifier, tree list)
5851 {
5852 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5853
5854 while (list)
5855 {
5856 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5857 == IDENTIFIER_NODE);
5858
5859 /* Identifiers can be compared directly for equality. */
5860 if (attr_identifier == get_attribute_name (list))
5861 break;
5862
5863 /* If they are not equal, they may still be one in the form
5864 'text' while the other one is in the form '__text__'. TODO:
5865 If we were storing attributes in normalized 'text' form, then
5866 this could all go away and we could take full advantage of
5867 the fact that we're comparing identifiers. :-) */
5868 {
5869 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5870 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5871
5872 if (ident_len == attr_len + 4)
5873 {
5874 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5875 const char *q = IDENTIFIER_POINTER (attr_identifier);
5876 if (p[0] == '_' && p[1] == '_'
5877 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5878 && strncmp (q, p + 2, attr_len) == 0)
5879 break;
5880 }
5881 else if (ident_len + 4 == attr_len)
5882 {
5883 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5884 const char *q = IDENTIFIER_POINTER (attr_identifier);
5885 if (q[0] == '_' && q[1] == '_'
5886 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5887 && strncmp (q + 2, p, ident_len) == 0)
5888 break;
5889 }
5890 }
5891 list = TREE_CHAIN (list);
5892 }
5893
5894 return list;
5895 }
5896
5897 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5898 modified list. */
5899
5900 tree
5901 remove_attribute (const char *attr_name, tree list)
5902 {
5903 tree *p;
5904 size_t attr_len = strlen (attr_name);
5905
5906 gcc_checking_assert (attr_name[0] != '_');
5907
5908 for (p = &list; *p; )
5909 {
5910 tree l = *p;
5911 /* TODO: If we were storing attributes in normalized form, here
5912 we could use a simple strcmp(). */
5913 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5914 *p = TREE_CHAIN (l);
5915 else
5916 p = &TREE_CHAIN (l);
5917 }
5918
5919 return list;
5920 }
5921
5922 /* Return an attribute list that is the union of a1 and a2. */
5923
5924 tree
5925 merge_attributes (tree a1, tree a2)
5926 {
5927 tree attributes;
5928
5929 /* Either one unset? Take the set one. */
5930
5931 if ((attributes = a1) == 0)
5932 attributes = a2;
5933
5934 /* One that completely contains the other? Take it. */
5935
5936 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5937 {
5938 if (attribute_list_contained (a2, a1))
5939 attributes = a2;
5940 else
5941 {
5942 /* Pick the longest list, and hang on the other list. */
5943
5944 if (list_length (a1) < list_length (a2))
5945 attributes = a2, a2 = a1;
5946
5947 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5948 {
5949 tree a;
5950 for (a = lookup_ident_attribute (get_attribute_name (a2),
5951 attributes);
5952 a != NULL_TREE && !attribute_value_equal (a, a2);
5953 a = lookup_ident_attribute (get_attribute_name (a2),
5954 TREE_CHAIN (a)))
5955 ;
5956 if (a == NULL_TREE)
5957 {
5958 a1 = copy_node (a2);
5959 TREE_CHAIN (a1) = attributes;
5960 attributes = a1;
5961 }
5962 }
5963 }
5964 }
5965 return attributes;
5966 }
5967
5968 /* Given types T1 and T2, merge their attributes and return
5969 the result. */
5970
5971 tree
5972 merge_type_attributes (tree t1, tree t2)
5973 {
5974 return merge_attributes (TYPE_ATTRIBUTES (t1),
5975 TYPE_ATTRIBUTES (t2));
5976 }
5977
5978 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5979 the result. */
5980
5981 tree
5982 merge_decl_attributes (tree olddecl, tree newdecl)
5983 {
5984 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5985 DECL_ATTRIBUTES (newdecl));
5986 }
5987
5988 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5989
5990 /* Specialization of merge_decl_attributes for various Windows targets.
5991
5992 This handles the following situation:
5993
5994 __declspec (dllimport) int foo;
5995 int foo;
5996
5997 The second instance of `foo' nullifies the dllimport. */
5998
5999 tree
6000 merge_dllimport_decl_attributes (tree old, tree new_tree)
6001 {
6002 tree a;
6003 int delete_dllimport_p = 1;
6004
6005 /* What we need to do here is remove from `old' dllimport if it doesn't
6006 appear in `new'. dllimport behaves like extern: if a declaration is
6007 marked dllimport and a definition appears later, then the object
6008 is not dllimport'd. We also remove a `new' dllimport if the old list
6009 contains dllexport: dllexport always overrides dllimport, regardless
6010 of the order of declaration. */
6011 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6012 delete_dllimport_p = 0;
6013 else if (DECL_DLLIMPORT_P (new_tree)
6014 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6015 {
6016 DECL_DLLIMPORT_P (new_tree) = 0;
6017 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6018 "dllimport ignored", new_tree);
6019 }
6020 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6021 {
6022 /* Warn about overriding a symbol that has already been used, e.g.:
6023 extern int __attribute__ ((dllimport)) foo;
6024 int* bar () {return &foo;}
6025 int foo;
6026 */
6027 if (TREE_USED (old))
6028 {
6029 warning (0, "%q+D redeclared without dllimport attribute "
6030 "after being referenced with dll linkage", new_tree);
6031 /* If we have used a variable's address with dllimport linkage,
6032 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6033 decl may already have had TREE_CONSTANT computed.
6034 We still remove the attribute so that assembler code refers
6035 to '&foo rather than '_imp__foo'. */
6036 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6037 DECL_DLLIMPORT_P (new_tree) = 1;
6038 }
6039
6040 /* Let an inline definition silently override the external reference,
6041 but otherwise warn about attribute inconsistency. */
6042 else if (TREE_CODE (new_tree) == VAR_DECL
6043 || !DECL_DECLARED_INLINE_P (new_tree))
6044 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6045 "previous dllimport ignored", new_tree);
6046 }
6047 else
6048 delete_dllimport_p = 0;
6049
6050 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6051
6052 if (delete_dllimport_p)
6053 a = remove_attribute ("dllimport", a);
6054
6055 return a;
6056 }
6057
6058 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6059 struct attribute_spec.handler. */
6060
6061 tree
6062 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6063 bool *no_add_attrs)
6064 {
6065 tree node = *pnode;
6066 bool is_dllimport;
6067
6068 /* These attributes may apply to structure and union types being created,
6069 but otherwise should pass to the declaration involved. */
6070 if (!DECL_P (node))
6071 {
6072 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6073 | (int) ATTR_FLAG_ARRAY_NEXT))
6074 {
6075 *no_add_attrs = true;
6076 return tree_cons (name, args, NULL_TREE);
6077 }
6078 if (TREE_CODE (node) == RECORD_TYPE
6079 || TREE_CODE (node) == UNION_TYPE)
6080 {
6081 node = TYPE_NAME (node);
6082 if (!node)
6083 return NULL_TREE;
6084 }
6085 else
6086 {
6087 warning (OPT_Wattributes, "%qE attribute ignored",
6088 name);
6089 *no_add_attrs = true;
6090 return NULL_TREE;
6091 }
6092 }
6093
6094 if (TREE_CODE (node) != FUNCTION_DECL
6095 && TREE_CODE (node) != VAR_DECL
6096 && TREE_CODE (node) != TYPE_DECL)
6097 {
6098 *no_add_attrs = true;
6099 warning (OPT_Wattributes, "%qE attribute ignored",
6100 name);
6101 return NULL_TREE;
6102 }
6103
6104 if (TREE_CODE (node) == TYPE_DECL
6105 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6106 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6107 {
6108 *no_add_attrs = true;
6109 warning (OPT_Wattributes, "%qE attribute ignored",
6110 name);
6111 return NULL_TREE;
6112 }
6113
6114 is_dllimport = is_attribute_p ("dllimport", name);
6115
6116 /* Report error on dllimport ambiguities seen now before they cause
6117 any damage. */
6118 if (is_dllimport)
6119 {
6120 /* Honor any target-specific overrides. */
6121 if (!targetm.valid_dllimport_attribute_p (node))
6122 *no_add_attrs = true;
6123
6124 else if (TREE_CODE (node) == FUNCTION_DECL
6125 && DECL_DECLARED_INLINE_P (node))
6126 {
6127 warning (OPT_Wattributes, "inline function %q+D declared as "
6128 " dllimport: attribute ignored", node);
6129 *no_add_attrs = true;
6130 }
6131 /* Like MS, treat definition of dllimported variables and
6132 non-inlined functions on declaration as syntax errors. */
6133 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6134 {
6135 error ("function %q+D definition is marked dllimport", node);
6136 *no_add_attrs = true;
6137 }
6138
6139 else if (TREE_CODE (node) == VAR_DECL)
6140 {
6141 if (DECL_INITIAL (node))
6142 {
6143 error ("variable %q+D definition is marked dllimport",
6144 node);
6145 *no_add_attrs = true;
6146 }
6147
6148 /* `extern' needn't be specified with dllimport.
6149 Specify `extern' now and hope for the best. Sigh. */
6150 DECL_EXTERNAL (node) = 1;
6151 /* Also, implicitly give dllimport'd variables declared within
6152 a function global scope, unless declared static. */
6153 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6154 TREE_PUBLIC (node) = 1;
6155 }
6156
6157 if (*no_add_attrs == false)
6158 DECL_DLLIMPORT_P (node) = 1;
6159 }
6160 else if (TREE_CODE (node) == FUNCTION_DECL
6161 && DECL_DECLARED_INLINE_P (node)
6162 && flag_keep_inline_dllexport)
6163 /* An exported function, even if inline, must be emitted. */
6164 DECL_EXTERNAL (node) = 0;
6165
6166 /* Report error if symbol is not accessible at global scope. */
6167 if (!TREE_PUBLIC (node)
6168 && (TREE_CODE (node) == VAR_DECL
6169 || TREE_CODE (node) == FUNCTION_DECL))
6170 {
6171 error ("external linkage required for symbol %q+D because of "
6172 "%qE attribute", node, name);
6173 *no_add_attrs = true;
6174 }
6175
6176 /* A dllexport'd entity must have default visibility so that other
6177 program units (shared libraries or the main executable) can see
6178 it. A dllimport'd entity must have default visibility so that
6179 the linker knows that undefined references within this program
6180 unit can be resolved by the dynamic linker. */
6181 if (!*no_add_attrs)
6182 {
6183 if (DECL_VISIBILITY_SPECIFIED (node)
6184 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6185 error ("%qE implies default visibility, but %qD has already "
6186 "been declared with a different visibility",
6187 name, node);
6188 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6189 DECL_VISIBILITY_SPECIFIED (node) = 1;
6190 }
6191
6192 return NULL_TREE;
6193 }
6194
6195 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6196 \f
6197 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6198 of the various TYPE_QUAL values. */
6199
6200 static void
6201 set_type_quals (tree type, int type_quals)
6202 {
6203 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6204 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6205 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6206 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6207 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6208 }
6209
6210 /* Returns true iff unqualified CAND and BASE are equivalent. */
6211
6212 bool
6213 check_base_type (const_tree cand, const_tree base)
6214 {
6215 return (TYPE_NAME (cand) == TYPE_NAME (base)
6216 /* Apparently this is needed for Objective-C. */
6217 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6218 /* Check alignment. */
6219 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6220 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6221 TYPE_ATTRIBUTES (base)));
6222 }
6223
6224 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6225
6226 bool
6227 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6228 {
6229 return (TYPE_QUALS (cand) == type_quals
6230 && check_base_type (cand, base));
6231 }
6232
6233 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6234
6235 static bool
6236 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6237 {
6238 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6239 && TYPE_NAME (cand) == TYPE_NAME (base)
6240 /* Apparently this is needed for Objective-C. */
6241 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6242 /* Check alignment. */
6243 && TYPE_ALIGN (cand) == align
6244 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6245 TYPE_ATTRIBUTES (base)));
6246 }
6247
6248 /* This function checks to see if TYPE matches the size one of the built-in
6249 atomic types, and returns that core atomic type. */
6250
6251 static tree
6252 find_atomic_core_type (tree type)
6253 {
6254 tree base_atomic_type;
6255
6256 /* Only handle complete types. */
6257 if (TYPE_SIZE (type) == NULL_TREE)
6258 return NULL_TREE;
6259
6260 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6261 switch (type_size)
6262 {
6263 case 8:
6264 base_atomic_type = atomicQI_type_node;
6265 break;
6266
6267 case 16:
6268 base_atomic_type = atomicHI_type_node;
6269 break;
6270
6271 case 32:
6272 base_atomic_type = atomicSI_type_node;
6273 break;
6274
6275 case 64:
6276 base_atomic_type = atomicDI_type_node;
6277 break;
6278
6279 case 128:
6280 base_atomic_type = atomicTI_type_node;
6281 break;
6282
6283 default:
6284 base_atomic_type = NULL_TREE;
6285 }
6286
6287 return base_atomic_type;
6288 }
6289
6290 /* Return a version of the TYPE, qualified as indicated by the
6291 TYPE_QUALS, if one exists. If no qualified version exists yet,
6292 return NULL_TREE. */
6293
6294 tree
6295 get_qualified_type (tree type, int type_quals)
6296 {
6297 tree t;
6298
6299 if (TYPE_QUALS (type) == type_quals)
6300 return type;
6301
6302 /* Search the chain of variants to see if there is already one there just
6303 like the one we need to have. If so, use that existing one. We must
6304 preserve the TYPE_NAME, since there is code that depends on this. */
6305 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6306 if (check_qualified_type (t, type, type_quals))
6307 return t;
6308
6309 return NULL_TREE;
6310 }
6311
6312 /* Like get_qualified_type, but creates the type if it does not
6313 exist. This function never returns NULL_TREE. */
6314
6315 tree
6316 build_qualified_type (tree type, int type_quals)
6317 {
6318 tree t;
6319
6320 /* See if we already have the appropriate qualified variant. */
6321 t = get_qualified_type (type, type_quals);
6322
6323 /* If not, build it. */
6324 if (!t)
6325 {
6326 t = build_variant_type_copy (type);
6327 set_type_quals (t, type_quals);
6328
6329 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6330 {
6331 /* See if this object can map to a basic atomic type. */
6332 tree atomic_type = find_atomic_core_type (type);
6333 if (atomic_type)
6334 {
6335 /* Ensure the alignment of this type is compatible with
6336 the required alignment of the atomic type. */
6337 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6338 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6339 }
6340 }
6341
6342 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6343 /* Propagate structural equality. */
6344 SET_TYPE_STRUCTURAL_EQUALITY (t);
6345 else if (TYPE_CANONICAL (type) != type)
6346 /* Build the underlying canonical type, since it is different
6347 from TYPE. */
6348 {
6349 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6350 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6351 }
6352 else
6353 /* T is its own canonical type. */
6354 TYPE_CANONICAL (t) = t;
6355
6356 }
6357
6358 return t;
6359 }
6360
6361 /* Create a variant of type T with alignment ALIGN. */
6362
6363 tree
6364 build_aligned_type (tree type, unsigned int align)
6365 {
6366 tree t;
6367
6368 if (TYPE_PACKED (type)
6369 || TYPE_ALIGN (type) == align)
6370 return type;
6371
6372 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6373 if (check_aligned_type (t, type, align))
6374 return t;
6375
6376 t = build_variant_type_copy (type);
6377 TYPE_ALIGN (t) = align;
6378
6379 return t;
6380 }
6381
6382 /* Create a new distinct copy of TYPE. The new type is made its own
6383 MAIN_VARIANT. If TYPE requires structural equality checks, the
6384 resulting type requires structural equality checks; otherwise, its
6385 TYPE_CANONICAL points to itself. */
6386
6387 tree
6388 build_distinct_type_copy (tree type)
6389 {
6390 tree t = copy_node (type);
6391
6392 TYPE_POINTER_TO (t) = 0;
6393 TYPE_REFERENCE_TO (t) = 0;
6394
6395 /* Set the canonical type either to a new equivalence class, or
6396 propagate the need for structural equality checks. */
6397 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6398 SET_TYPE_STRUCTURAL_EQUALITY (t);
6399 else
6400 TYPE_CANONICAL (t) = t;
6401
6402 /* Make it its own variant. */
6403 TYPE_MAIN_VARIANT (t) = t;
6404 TYPE_NEXT_VARIANT (t) = 0;
6405
6406 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6407 whose TREE_TYPE is not t. This can also happen in the Ada
6408 frontend when using subtypes. */
6409
6410 return t;
6411 }
6412
6413 /* Create a new variant of TYPE, equivalent but distinct. This is so
6414 the caller can modify it. TYPE_CANONICAL for the return type will
6415 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6416 are considered equal by the language itself (or that both types
6417 require structural equality checks). */
6418
6419 tree
6420 build_variant_type_copy (tree type)
6421 {
6422 tree t, m = TYPE_MAIN_VARIANT (type);
6423
6424 t = build_distinct_type_copy (type);
6425
6426 /* Since we're building a variant, assume that it is a non-semantic
6427 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6428 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6429
6430 /* Add the new type to the chain of variants of TYPE. */
6431 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6432 TYPE_NEXT_VARIANT (m) = t;
6433 TYPE_MAIN_VARIANT (t) = m;
6434
6435 return t;
6436 }
6437 \f
6438 /* Return true if the from tree in both tree maps are equal. */
6439
6440 int
6441 tree_map_base_eq (const void *va, const void *vb)
6442 {
6443 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6444 *const b = (const struct tree_map_base *) vb;
6445 return (a->from == b->from);
6446 }
6447
6448 /* Hash a from tree in a tree_base_map. */
6449
6450 unsigned int
6451 tree_map_base_hash (const void *item)
6452 {
6453 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6454 }
6455
6456 /* Return true if this tree map structure is marked for garbage collection
6457 purposes. We simply return true if the from tree is marked, so that this
6458 structure goes away when the from tree goes away. */
6459
6460 int
6461 tree_map_base_marked_p (const void *p)
6462 {
6463 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6464 }
6465
6466 /* Hash a from tree in a tree_map. */
6467
6468 unsigned int
6469 tree_map_hash (const void *item)
6470 {
6471 return (((const struct tree_map *) item)->hash);
6472 }
6473
6474 /* Hash a from tree in a tree_decl_map. */
6475
6476 unsigned int
6477 tree_decl_map_hash (const void *item)
6478 {
6479 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6480 }
6481
6482 /* Return the initialization priority for DECL. */
6483
6484 priority_type
6485 decl_init_priority_lookup (tree decl)
6486 {
6487 symtab_node *snode = symtab_node::get (decl);
6488
6489 if (!snode)
6490 return DEFAULT_INIT_PRIORITY;
6491 return
6492 snode->get_init_priority ();
6493 }
6494
6495 /* Return the finalization priority for DECL. */
6496
6497 priority_type
6498 decl_fini_priority_lookup (tree decl)
6499 {
6500 cgraph_node *node = cgraph_node::get (decl);
6501
6502 if (!node)
6503 return DEFAULT_INIT_PRIORITY;
6504 return
6505 node->get_fini_priority ();
6506 }
6507
6508 /* Set the initialization priority for DECL to PRIORITY. */
6509
6510 void
6511 decl_init_priority_insert (tree decl, priority_type priority)
6512 {
6513 struct symtab_node *snode;
6514
6515 if (priority == DEFAULT_INIT_PRIORITY)
6516 {
6517 snode = symtab_node::get (decl);
6518 if (!snode)
6519 return;
6520 }
6521 else if (TREE_CODE (decl) == VAR_DECL)
6522 snode = varpool_node::get_create (decl);
6523 else
6524 snode = cgraph_node::get_create (decl);
6525 snode->set_init_priority (priority);
6526 }
6527
6528 /* Set the finalization priority for DECL to PRIORITY. */
6529
6530 void
6531 decl_fini_priority_insert (tree decl, priority_type priority)
6532 {
6533 struct cgraph_node *node;
6534
6535 if (priority == DEFAULT_INIT_PRIORITY)
6536 {
6537 node = cgraph_node::get (decl);
6538 if (!node)
6539 return;
6540 }
6541 else
6542 node = cgraph_node::get_create (decl);
6543 node->set_fini_priority (priority);
6544 }
6545
6546 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6547
6548 static void
6549 print_debug_expr_statistics (void)
6550 {
6551 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6552 (long) htab_size (debug_expr_for_decl),
6553 (long) htab_elements (debug_expr_for_decl),
6554 htab_collisions (debug_expr_for_decl));
6555 }
6556
6557 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6558
6559 static void
6560 print_value_expr_statistics (void)
6561 {
6562 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6563 (long) htab_size (value_expr_for_decl),
6564 (long) htab_elements (value_expr_for_decl),
6565 htab_collisions (value_expr_for_decl));
6566 }
6567
6568 /* Lookup a debug expression for FROM, and return it if we find one. */
6569
6570 tree
6571 decl_debug_expr_lookup (tree from)
6572 {
6573 struct tree_decl_map *h, in;
6574 in.base.from = from;
6575
6576 h = (struct tree_decl_map *)
6577 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6578 if (h)
6579 return h->to;
6580 return NULL_TREE;
6581 }
6582
6583 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6584
6585 void
6586 decl_debug_expr_insert (tree from, tree to)
6587 {
6588 struct tree_decl_map *h;
6589 void **loc;
6590
6591 h = ggc_alloc<tree_decl_map> ();
6592 h->base.from = from;
6593 h->to = to;
6594 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6595 INSERT);
6596 *(struct tree_decl_map **) loc = h;
6597 }
6598
6599 /* Lookup a value expression for FROM, and return it if we find one. */
6600
6601 tree
6602 decl_value_expr_lookup (tree from)
6603 {
6604 struct tree_decl_map *h, in;
6605 in.base.from = from;
6606
6607 h = (struct tree_decl_map *)
6608 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6609 if (h)
6610 return h->to;
6611 return NULL_TREE;
6612 }
6613
6614 /* Insert a mapping FROM->TO in the value expression hashtable. */
6615
6616 void
6617 decl_value_expr_insert (tree from, tree to)
6618 {
6619 struct tree_decl_map *h;
6620 void **loc;
6621
6622 h = ggc_alloc<tree_decl_map> ();
6623 h->base.from = from;
6624 h->to = to;
6625 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6626 INSERT);
6627 *(struct tree_decl_map **) loc = h;
6628 }
6629
6630 /* Lookup a vector of debug arguments for FROM, and return it if we
6631 find one. */
6632
6633 vec<tree, va_gc> **
6634 decl_debug_args_lookup (tree from)
6635 {
6636 struct tree_vec_map *h, in;
6637
6638 if (!DECL_HAS_DEBUG_ARGS_P (from))
6639 return NULL;
6640 gcc_checking_assert (debug_args_for_decl != NULL);
6641 in.base.from = from;
6642 h = (struct tree_vec_map *)
6643 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6644 if (h)
6645 return &h->to;
6646 return NULL;
6647 }
6648
6649 /* Insert a mapping FROM->empty vector of debug arguments in the value
6650 expression hashtable. */
6651
6652 vec<tree, va_gc> **
6653 decl_debug_args_insert (tree from)
6654 {
6655 struct tree_vec_map *h;
6656 void **loc;
6657
6658 if (DECL_HAS_DEBUG_ARGS_P (from))
6659 return decl_debug_args_lookup (from);
6660 if (debug_args_for_decl == NULL)
6661 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6662 tree_vec_map_eq, 0);
6663 h = ggc_alloc<tree_vec_map> ();
6664 h->base.from = from;
6665 h->to = NULL;
6666 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6667 INSERT);
6668 *(struct tree_vec_map **) loc = h;
6669 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6670 return &h->to;
6671 }
6672
6673 /* Hashing of types so that we don't make duplicates.
6674 The entry point is `type_hash_canon'. */
6675
6676 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6677 with types in the TREE_VALUE slots), by adding the hash codes
6678 of the individual types. */
6679
6680 static void
6681 type_hash_list (const_tree list, inchash::hash &hstate)
6682 {
6683 const_tree tail;
6684
6685 for (tail = list; tail; tail = TREE_CHAIN (tail))
6686 if (TREE_VALUE (tail) != error_mark_node)
6687 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6688 }
6689
6690 /* These are the Hashtable callback functions. */
6691
6692 /* Returns true iff the types are equivalent. */
6693
6694 static int
6695 type_hash_eq (const void *va, const void *vb)
6696 {
6697 const struct type_hash *const a = (const struct type_hash *) va,
6698 *const b = (const struct type_hash *) vb;
6699
6700 /* First test the things that are the same for all types. */
6701 if (a->hash != b->hash
6702 || TREE_CODE (a->type) != TREE_CODE (b->type)
6703 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6704 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6705 TYPE_ATTRIBUTES (b->type))
6706 || (TREE_CODE (a->type) != COMPLEX_TYPE
6707 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6708 return 0;
6709
6710 /* Be careful about comparing arrays before and after the element type
6711 has been completed; don't compare TYPE_ALIGN unless both types are
6712 complete. */
6713 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6714 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6715 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6716 return 0;
6717
6718 switch (TREE_CODE (a->type))
6719 {
6720 case VOID_TYPE:
6721 case COMPLEX_TYPE:
6722 case POINTER_TYPE:
6723 case REFERENCE_TYPE:
6724 case NULLPTR_TYPE:
6725 return 1;
6726
6727 case VECTOR_TYPE:
6728 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6729
6730 case ENUMERAL_TYPE:
6731 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6732 && !(TYPE_VALUES (a->type)
6733 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6734 && TYPE_VALUES (b->type)
6735 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6736 && type_list_equal (TYPE_VALUES (a->type),
6737 TYPE_VALUES (b->type))))
6738 return 0;
6739
6740 /* ... fall through ... */
6741
6742 case INTEGER_TYPE:
6743 case REAL_TYPE:
6744 case BOOLEAN_TYPE:
6745 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6746 return false;
6747 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6748 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6749 TYPE_MAX_VALUE (b->type)))
6750 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6751 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6752 TYPE_MIN_VALUE (b->type))));
6753
6754 case FIXED_POINT_TYPE:
6755 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6756
6757 case OFFSET_TYPE:
6758 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6759
6760 case METHOD_TYPE:
6761 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6762 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6763 || (TYPE_ARG_TYPES (a->type)
6764 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6765 && TYPE_ARG_TYPES (b->type)
6766 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6767 && type_list_equal (TYPE_ARG_TYPES (a->type),
6768 TYPE_ARG_TYPES (b->type)))))
6769 break;
6770 return 0;
6771 case ARRAY_TYPE:
6772 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6773
6774 case RECORD_TYPE:
6775 case UNION_TYPE:
6776 case QUAL_UNION_TYPE:
6777 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6778 || (TYPE_FIELDS (a->type)
6779 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6780 && TYPE_FIELDS (b->type)
6781 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6782 && type_list_equal (TYPE_FIELDS (a->type),
6783 TYPE_FIELDS (b->type))));
6784
6785 case FUNCTION_TYPE:
6786 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6787 || (TYPE_ARG_TYPES (a->type)
6788 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6789 && TYPE_ARG_TYPES (b->type)
6790 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6791 && type_list_equal (TYPE_ARG_TYPES (a->type),
6792 TYPE_ARG_TYPES (b->type))))
6793 break;
6794 return 0;
6795
6796 default:
6797 return 0;
6798 }
6799
6800 if (lang_hooks.types.type_hash_eq != NULL)
6801 return lang_hooks.types.type_hash_eq (a->type, b->type);
6802
6803 return 1;
6804 }
6805
6806 /* Return the cached hash value. */
6807
6808 static hashval_t
6809 type_hash_hash (const void *item)
6810 {
6811 return ((const struct type_hash *) item)->hash;
6812 }
6813
6814 /* Given TYPE, and HASHCODE its hash code, return the canonical
6815 object for an identical type if one already exists.
6816 Otherwise, return TYPE, and record it as the canonical object.
6817
6818 To use this function, first create a type of the sort you want.
6819 Then compute its hash code from the fields of the type that
6820 make it different from other similar types.
6821 Then call this function and use the value. */
6822
6823 tree
6824 type_hash_canon (unsigned int hashcode, tree type)
6825 {
6826 type_hash in;
6827 void **loc;
6828
6829 /* The hash table only contains main variants, so ensure that's what we're
6830 being passed. */
6831 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6832
6833 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6834 must call that routine before comparing TYPE_ALIGNs. */
6835 layout_type (type);
6836
6837 in.hash = hashcode;
6838 in.type = type;
6839
6840 loc = htab_find_slot_with_hash (type_hash_table, &in, hashcode, INSERT);
6841 if (*loc)
6842 {
6843 tree t1 = ((type_hash *) *loc)->type;
6844 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6845 if (GATHER_STATISTICS)
6846 {
6847 tree_code_counts[(int) TREE_CODE (type)]--;
6848 tree_node_counts[(int) t_kind]--;
6849 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6850 }
6851 return t1;
6852 }
6853 else
6854 {
6855 struct type_hash *h;
6856
6857 h = ggc_alloc<type_hash> ();
6858 h->hash = hashcode;
6859 h->type = type;
6860 *loc = (void *)h;
6861
6862 return type;
6863 }
6864 }
6865
6866 /* See if the data pointed to by the type hash table is marked. We consider
6867 it marked if the type is marked or if a debug type number or symbol
6868 table entry has been made for the type. */
6869
6870 static int
6871 type_hash_marked_p (const void *p)
6872 {
6873 const_tree const type = ((const struct type_hash *) p)->type;
6874
6875 return ggc_marked_p (type);
6876 }
6877
6878 static void
6879 print_type_hash_statistics (void)
6880 {
6881 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6882 (long) htab_size (type_hash_table),
6883 (long) htab_elements (type_hash_table),
6884 htab_collisions (type_hash_table));
6885 }
6886
6887 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6888 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6889 by adding the hash codes of the individual attributes. */
6890
6891 static void
6892 attribute_hash_list (const_tree list, inchash::hash &hstate)
6893 {
6894 const_tree tail;
6895
6896 for (tail = list; tail; tail = TREE_CHAIN (tail))
6897 /* ??? Do we want to add in TREE_VALUE too? */
6898 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6899 }
6900
6901 /* Given two lists of attributes, return true if list l2 is
6902 equivalent to l1. */
6903
6904 int
6905 attribute_list_equal (const_tree l1, const_tree l2)
6906 {
6907 if (l1 == l2)
6908 return 1;
6909
6910 return attribute_list_contained (l1, l2)
6911 && attribute_list_contained (l2, l1);
6912 }
6913
6914 /* Given two lists of attributes, return true if list L2 is
6915 completely contained within L1. */
6916 /* ??? This would be faster if attribute names were stored in a canonicalized
6917 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6918 must be used to show these elements are equivalent (which they are). */
6919 /* ??? It's not clear that attributes with arguments will always be handled
6920 correctly. */
6921
6922 int
6923 attribute_list_contained (const_tree l1, const_tree l2)
6924 {
6925 const_tree t1, t2;
6926
6927 /* First check the obvious, maybe the lists are identical. */
6928 if (l1 == l2)
6929 return 1;
6930
6931 /* Maybe the lists are similar. */
6932 for (t1 = l1, t2 = l2;
6933 t1 != 0 && t2 != 0
6934 && get_attribute_name (t1) == get_attribute_name (t2)
6935 && TREE_VALUE (t1) == TREE_VALUE (t2);
6936 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6937 ;
6938
6939 /* Maybe the lists are equal. */
6940 if (t1 == 0 && t2 == 0)
6941 return 1;
6942
6943 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6944 {
6945 const_tree attr;
6946 /* This CONST_CAST is okay because lookup_attribute does not
6947 modify its argument and the return value is assigned to a
6948 const_tree. */
6949 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6950 CONST_CAST_TREE (l1));
6951 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6952 attr = lookup_ident_attribute (get_attribute_name (t2),
6953 TREE_CHAIN (attr)))
6954 ;
6955
6956 if (attr == NULL_TREE)
6957 return 0;
6958 }
6959
6960 return 1;
6961 }
6962
6963 /* Given two lists of types
6964 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6965 return 1 if the lists contain the same types in the same order.
6966 Also, the TREE_PURPOSEs must match. */
6967
6968 int
6969 type_list_equal (const_tree l1, const_tree l2)
6970 {
6971 const_tree t1, t2;
6972
6973 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6974 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6975 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6976 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6977 && (TREE_TYPE (TREE_PURPOSE (t1))
6978 == TREE_TYPE (TREE_PURPOSE (t2))))))
6979 return 0;
6980
6981 return t1 == t2;
6982 }
6983
6984 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6985 given by TYPE. If the argument list accepts variable arguments,
6986 then this function counts only the ordinary arguments. */
6987
6988 int
6989 type_num_arguments (const_tree type)
6990 {
6991 int i = 0;
6992 tree t;
6993
6994 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6995 /* If the function does not take a variable number of arguments,
6996 the last element in the list will have type `void'. */
6997 if (VOID_TYPE_P (TREE_VALUE (t)))
6998 break;
6999 else
7000 ++i;
7001
7002 return i;
7003 }
7004
7005 /* Nonzero if integer constants T1 and T2
7006 represent the same constant value. */
7007
7008 int
7009 tree_int_cst_equal (const_tree t1, const_tree t2)
7010 {
7011 if (t1 == t2)
7012 return 1;
7013
7014 if (t1 == 0 || t2 == 0)
7015 return 0;
7016
7017 if (TREE_CODE (t1) == INTEGER_CST
7018 && TREE_CODE (t2) == INTEGER_CST
7019 && wi::to_widest (t1) == wi::to_widest (t2))
7020 return 1;
7021
7022 return 0;
7023 }
7024
7025 /* Return true if T is an INTEGER_CST whose numerical value (extended
7026 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7027
7028 bool
7029 tree_fits_shwi_p (const_tree t)
7030 {
7031 return (t != NULL_TREE
7032 && TREE_CODE (t) == INTEGER_CST
7033 && wi::fits_shwi_p (wi::to_widest (t)));
7034 }
7035
7036 /* Return true if T is an INTEGER_CST whose numerical value (extended
7037 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7038
7039 bool
7040 tree_fits_uhwi_p (const_tree t)
7041 {
7042 return (t != NULL_TREE
7043 && TREE_CODE (t) == INTEGER_CST
7044 && wi::fits_uhwi_p (wi::to_widest (t)));
7045 }
7046
7047 /* T is an INTEGER_CST whose numerical value (extended according to
7048 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7049 HOST_WIDE_INT. */
7050
7051 HOST_WIDE_INT
7052 tree_to_shwi (const_tree t)
7053 {
7054 gcc_assert (tree_fits_shwi_p (t));
7055 return TREE_INT_CST_LOW (t);
7056 }
7057
7058 /* T is an INTEGER_CST whose numerical value (extended according to
7059 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7060 HOST_WIDE_INT. */
7061
7062 unsigned HOST_WIDE_INT
7063 tree_to_uhwi (const_tree t)
7064 {
7065 gcc_assert (tree_fits_uhwi_p (t));
7066 return TREE_INT_CST_LOW (t);
7067 }
7068
7069 /* Return the most significant (sign) bit of T. */
7070
7071 int
7072 tree_int_cst_sign_bit (const_tree t)
7073 {
7074 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7075
7076 return wi::extract_uhwi (t, bitno, 1);
7077 }
7078
7079 /* Return an indication of the sign of the integer constant T.
7080 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7081 Note that -1 will never be returned if T's type is unsigned. */
7082
7083 int
7084 tree_int_cst_sgn (const_tree t)
7085 {
7086 if (wi::eq_p (t, 0))
7087 return 0;
7088 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7089 return 1;
7090 else if (wi::neg_p (t))
7091 return -1;
7092 else
7093 return 1;
7094 }
7095
7096 /* Return the minimum number of bits needed to represent VALUE in a
7097 signed or unsigned type, UNSIGNEDP says which. */
7098
7099 unsigned int
7100 tree_int_cst_min_precision (tree value, signop sgn)
7101 {
7102 /* If the value is negative, compute its negative minus 1. The latter
7103 adjustment is because the absolute value of the largest negative value
7104 is one larger than the largest positive value. This is equivalent to
7105 a bit-wise negation, so use that operation instead. */
7106
7107 if (tree_int_cst_sgn (value) < 0)
7108 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7109
7110 /* Return the number of bits needed, taking into account the fact
7111 that we need one more bit for a signed than unsigned type.
7112 If value is 0 or -1, the minimum precision is 1 no matter
7113 whether unsignedp is true or false. */
7114
7115 if (integer_zerop (value))
7116 return 1;
7117 else
7118 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7119 }
7120
7121 /* Return truthvalue of whether T1 is the same tree structure as T2.
7122 Return 1 if they are the same.
7123 Return 0 if they are understandably different.
7124 Return -1 if either contains tree structure not understood by
7125 this function. */
7126
7127 int
7128 simple_cst_equal (const_tree t1, const_tree t2)
7129 {
7130 enum tree_code code1, code2;
7131 int cmp;
7132 int i;
7133
7134 if (t1 == t2)
7135 return 1;
7136 if (t1 == 0 || t2 == 0)
7137 return 0;
7138
7139 code1 = TREE_CODE (t1);
7140 code2 = TREE_CODE (t2);
7141
7142 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7143 {
7144 if (CONVERT_EXPR_CODE_P (code2)
7145 || code2 == NON_LVALUE_EXPR)
7146 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7147 else
7148 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7149 }
7150
7151 else if (CONVERT_EXPR_CODE_P (code2)
7152 || code2 == NON_LVALUE_EXPR)
7153 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7154
7155 if (code1 != code2)
7156 return 0;
7157
7158 switch (code1)
7159 {
7160 case INTEGER_CST:
7161 return wi::to_widest (t1) == wi::to_widest (t2);
7162
7163 case REAL_CST:
7164 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7165
7166 case FIXED_CST:
7167 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7168
7169 case STRING_CST:
7170 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7171 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7172 TREE_STRING_LENGTH (t1)));
7173
7174 case CONSTRUCTOR:
7175 {
7176 unsigned HOST_WIDE_INT idx;
7177 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7178 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7179
7180 if (vec_safe_length (v1) != vec_safe_length (v2))
7181 return false;
7182
7183 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7184 /* ??? Should we handle also fields here? */
7185 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7186 return false;
7187 return true;
7188 }
7189
7190 case SAVE_EXPR:
7191 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7192
7193 case CALL_EXPR:
7194 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7195 if (cmp <= 0)
7196 return cmp;
7197 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7198 return 0;
7199 {
7200 const_tree arg1, arg2;
7201 const_call_expr_arg_iterator iter1, iter2;
7202 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7203 arg2 = first_const_call_expr_arg (t2, &iter2);
7204 arg1 && arg2;
7205 arg1 = next_const_call_expr_arg (&iter1),
7206 arg2 = next_const_call_expr_arg (&iter2))
7207 {
7208 cmp = simple_cst_equal (arg1, arg2);
7209 if (cmp <= 0)
7210 return cmp;
7211 }
7212 return arg1 == arg2;
7213 }
7214
7215 case TARGET_EXPR:
7216 /* Special case: if either target is an unallocated VAR_DECL,
7217 it means that it's going to be unified with whatever the
7218 TARGET_EXPR is really supposed to initialize, so treat it
7219 as being equivalent to anything. */
7220 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7221 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7222 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7223 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7224 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7225 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7226 cmp = 1;
7227 else
7228 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7229
7230 if (cmp <= 0)
7231 return cmp;
7232
7233 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7234
7235 case WITH_CLEANUP_EXPR:
7236 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7237 if (cmp <= 0)
7238 return cmp;
7239
7240 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7241
7242 case COMPONENT_REF:
7243 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7244 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7245
7246 return 0;
7247
7248 case VAR_DECL:
7249 case PARM_DECL:
7250 case CONST_DECL:
7251 case FUNCTION_DECL:
7252 return 0;
7253
7254 default:
7255 break;
7256 }
7257
7258 /* This general rule works for most tree codes. All exceptions should be
7259 handled above. If this is a language-specific tree code, we can't
7260 trust what might be in the operand, so say we don't know
7261 the situation. */
7262 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7263 return -1;
7264
7265 switch (TREE_CODE_CLASS (code1))
7266 {
7267 case tcc_unary:
7268 case tcc_binary:
7269 case tcc_comparison:
7270 case tcc_expression:
7271 case tcc_reference:
7272 case tcc_statement:
7273 cmp = 1;
7274 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7275 {
7276 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7277 if (cmp <= 0)
7278 return cmp;
7279 }
7280
7281 return cmp;
7282
7283 default:
7284 return -1;
7285 }
7286 }
7287
7288 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7289 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7290 than U, respectively. */
7291
7292 int
7293 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7294 {
7295 if (tree_int_cst_sgn (t) < 0)
7296 return -1;
7297 else if (!tree_fits_uhwi_p (t))
7298 return 1;
7299 else if (TREE_INT_CST_LOW (t) == u)
7300 return 0;
7301 else if (TREE_INT_CST_LOW (t) < u)
7302 return -1;
7303 else
7304 return 1;
7305 }
7306
7307 /* Return true if SIZE represents a constant size that is in bounds of
7308 what the middle-end and the backend accepts (covering not more than
7309 half of the address-space). */
7310
7311 bool
7312 valid_constant_size_p (const_tree size)
7313 {
7314 if (! tree_fits_uhwi_p (size)
7315 || TREE_OVERFLOW (size)
7316 || tree_int_cst_sign_bit (size) != 0)
7317 return false;
7318 return true;
7319 }
7320
7321 /* Return the precision of the type, or for a complex or vector type the
7322 precision of the type of its elements. */
7323
7324 unsigned int
7325 element_precision (const_tree type)
7326 {
7327 enum tree_code code = TREE_CODE (type);
7328 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7329 type = TREE_TYPE (type);
7330
7331 return TYPE_PRECISION (type);
7332 }
7333
7334 /* Return true if CODE represents an associative tree code. Otherwise
7335 return false. */
7336 bool
7337 associative_tree_code (enum tree_code code)
7338 {
7339 switch (code)
7340 {
7341 case BIT_IOR_EXPR:
7342 case BIT_AND_EXPR:
7343 case BIT_XOR_EXPR:
7344 case PLUS_EXPR:
7345 case MULT_EXPR:
7346 case MIN_EXPR:
7347 case MAX_EXPR:
7348 return true;
7349
7350 default:
7351 break;
7352 }
7353 return false;
7354 }
7355
7356 /* Return true if CODE represents a commutative tree code. Otherwise
7357 return false. */
7358 bool
7359 commutative_tree_code (enum tree_code code)
7360 {
7361 switch (code)
7362 {
7363 case PLUS_EXPR:
7364 case MULT_EXPR:
7365 case MULT_HIGHPART_EXPR:
7366 case MIN_EXPR:
7367 case MAX_EXPR:
7368 case BIT_IOR_EXPR:
7369 case BIT_XOR_EXPR:
7370 case BIT_AND_EXPR:
7371 case NE_EXPR:
7372 case EQ_EXPR:
7373 case UNORDERED_EXPR:
7374 case ORDERED_EXPR:
7375 case UNEQ_EXPR:
7376 case LTGT_EXPR:
7377 case TRUTH_AND_EXPR:
7378 case TRUTH_XOR_EXPR:
7379 case TRUTH_OR_EXPR:
7380 case WIDEN_MULT_EXPR:
7381 case VEC_WIDEN_MULT_HI_EXPR:
7382 case VEC_WIDEN_MULT_LO_EXPR:
7383 case VEC_WIDEN_MULT_EVEN_EXPR:
7384 case VEC_WIDEN_MULT_ODD_EXPR:
7385 return true;
7386
7387 default:
7388 break;
7389 }
7390 return false;
7391 }
7392
7393 /* Return true if CODE represents a ternary tree code for which the
7394 first two operands are commutative. Otherwise return false. */
7395 bool
7396 commutative_ternary_tree_code (enum tree_code code)
7397 {
7398 switch (code)
7399 {
7400 case WIDEN_MULT_PLUS_EXPR:
7401 case WIDEN_MULT_MINUS_EXPR:
7402 case DOT_PROD_EXPR:
7403 case FMA_EXPR:
7404 return true;
7405
7406 default:
7407 break;
7408 }
7409 return false;
7410 }
7411
7412 namespace inchash
7413 {
7414
7415 /* Generate a hash value for an expression. This can be used iteratively
7416 by passing a previous result as the HSTATE argument.
7417
7418 This function is intended to produce the same hash for expressions which
7419 would compare equal using operand_equal_p. */
7420 void
7421 add_expr (const_tree t, inchash::hash &hstate)
7422 {
7423 int i;
7424 enum tree_code code;
7425 enum tree_code_class tclass;
7426
7427 if (t == NULL_TREE)
7428 {
7429 hstate.merge_hash (0);
7430 return;
7431 }
7432
7433 code = TREE_CODE (t);
7434
7435 switch (code)
7436 {
7437 /* Alas, constants aren't shared, so we can't rely on pointer
7438 identity. */
7439 case VOID_CST:
7440 hstate.merge_hash (0);
7441 return;
7442 case INTEGER_CST:
7443 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7444 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7445 return;
7446 case REAL_CST:
7447 {
7448 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7449 hstate.merge_hash (val2);
7450 return;
7451 }
7452 case FIXED_CST:
7453 {
7454 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7455 hstate.merge_hash (val2);
7456 return;
7457 }
7458 case STRING_CST:
7459 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7460 return;
7461 case COMPLEX_CST:
7462 inchash::add_expr (TREE_REALPART (t), hstate);
7463 inchash::add_expr (TREE_IMAGPART (t), hstate);
7464 return;
7465 case VECTOR_CST:
7466 {
7467 unsigned i;
7468 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7469 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7470 return;
7471 }
7472 case SSA_NAME:
7473 /* We can just compare by pointer. */
7474 hstate.add_wide_int (SSA_NAME_VERSION (t));
7475 return;
7476 case PLACEHOLDER_EXPR:
7477 /* The node itself doesn't matter. */
7478 return;
7479 case TREE_LIST:
7480 /* A list of expressions, for a CALL_EXPR or as the elements of a
7481 VECTOR_CST. */
7482 for (; t; t = TREE_CHAIN (t))
7483 inchash::add_expr (TREE_VALUE (t), hstate);
7484 return;
7485 case CONSTRUCTOR:
7486 {
7487 unsigned HOST_WIDE_INT idx;
7488 tree field, value;
7489 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7490 {
7491 inchash::add_expr (field, hstate);
7492 inchash::add_expr (value, hstate);
7493 }
7494 return;
7495 }
7496 case FUNCTION_DECL:
7497 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7498 Otherwise nodes that compare equal according to operand_equal_p might
7499 get different hash codes. However, don't do this for machine specific
7500 or front end builtins, since the function code is overloaded in those
7501 cases. */
7502 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7503 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7504 {
7505 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7506 code = TREE_CODE (t);
7507 }
7508 /* FALL THROUGH */
7509 default:
7510 tclass = TREE_CODE_CLASS (code);
7511
7512 if (tclass == tcc_declaration)
7513 {
7514 /* DECL's have a unique ID */
7515 hstate.add_wide_int (DECL_UID (t));
7516 }
7517 else
7518 {
7519 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7520
7521 hstate.add_object (code);
7522
7523 /* Don't hash the type, that can lead to having nodes which
7524 compare equal according to operand_equal_p, but which
7525 have different hash codes. */
7526 if (CONVERT_EXPR_CODE_P (code)
7527 || code == NON_LVALUE_EXPR)
7528 {
7529 /* Make sure to include signness in the hash computation. */
7530 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7531 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7532 }
7533
7534 else if (commutative_tree_code (code))
7535 {
7536 /* It's a commutative expression. We want to hash it the same
7537 however it appears. We do this by first hashing both operands
7538 and then rehashing based on the order of their independent
7539 hashes. */
7540 inchash::hash one, two;
7541 inchash::add_expr (TREE_OPERAND (t, 0), one);
7542 inchash::add_expr (TREE_OPERAND (t, 1), two);
7543 hstate.add_commutative (one, two);
7544 }
7545 else
7546 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7547 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7548 }
7549 return;
7550 }
7551 }
7552
7553 }
7554
7555 /* Constructors for pointer, array and function types.
7556 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7557 constructed by language-dependent code, not here.) */
7558
7559 /* Construct, lay out and return the type of pointers to TO_TYPE with
7560 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7561 reference all of memory. If such a type has already been
7562 constructed, reuse it. */
7563
7564 tree
7565 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7566 bool can_alias_all)
7567 {
7568 tree t;
7569
7570 if (to_type == error_mark_node)
7571 return error_mark_node;
7572
7573 /* If the pointed-to type has the may_alias attribute set, force
7574 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7575 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7576 can_alias_all = true;
7577
7578 /* In some cases, languages will have things that aren't a POINTER_TYPE
7579 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7580 In that case, return that type without regard to the rest of our
7581 operands.
7582
7583 ??? This is a kludge, but consistent with the way this function has
7584 always operated and there doesn't seem to be a good way to avoid this
7585 at the moment. */
7586 if (TYPE_POINTER_TO (to_type) != 0
7587 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7588 return TYPE_POINTER_TO (to_type);
7589
7590 /* First, if we already have a type for pointers to TO_TYPE and it's
7591 the proper mode, use it. */
7592 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7593 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7594 return t;
7595
7596 t = make_node (POINTER_TYPE);
7597
7598 TREE_TYPE (t) = to_type;
7599 SET_TYPE_MODE (t, mode);
7600 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7601 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7602 TYPE_POINTER_TO (to_type) = t;
7603
7604 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7605 SET_TYPE_STRUCTURAL_EQUALITY (t);
7606 else if (TYPE_CANONICAL (to_type) != to_type)
7607 TYPE_CANONICAL (t)
7608 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7609 mode, can_alias_all);
7610
7611 /* Lay out the type. This function has many callers that are concerned
7612 with expression-construction, and this simplifies them all. */
7613 layout_type (t);
7614
7615 return t;
7616 }
7617
7618 /* By default build pointers in ptr_mode. */
7619
7620 tree
7621 build_pointer_type (tree to_type)
7622 {
7623 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7624 : TYPE_ADDR_SPACE (to_type);
7625 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7626 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7627 }
7628
7629 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7630
7631 tree
7632 build_reference_type_for_mode (tree to_type, machine_mode mode,
7633 bool can_alias_all)
7634 {
7635 tree t;
7636
7637 if (to_type == error_mark_node)
7638 return error_mark_node;
7639
7640 /* If the pointed-to type has the may_alias attribute set, force
7641 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7642 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7643 can_alias_all = true;
7644
7645 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7646 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7647 In that case, return that type without regard to the rest of our
7648 operands.
7649
7650 ??? This is a kludge, but consistent with the way this function has
7651 always operated and there doesn't seem to be a good way to avoid this
7652 at the moment. */
7653 if (TYPE_REFERENCE_TO (to_type) != 0
7654 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7655 return TYPE_REFERENCE_TO (to_type);
7656
7657 /* First, if we already have a type for pointers to TO_TYPE and it's
7658 the proper mode, use it. */
7659 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7660 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7661 return t;
7662
7663 t = make_node (REFERENCE_TYPE);
7664
7665 TREE_TYPE (t) = to_type;
7666 SET_TYPE_MODE (t, mode);
7667 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7668 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7669 TYPE_REFERENCE_TO (to_type) = t;
7670
7671 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7672 SET_TYPE_STRUCTURAL_EQUALITY (t);
7673 else if (TYPE_CANONICAL (to_type) != to_type)
7674 TYPE_CANONICAL (t)
7675 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7676 mode, can_alias_all);
7677
7678 layout_type (t);
7679
7680 return t;
7681 }
7682
7683
7684 /* Build the node for the type of references-to-TO_TYPE by default
7685 in ptr_mode. */
7686
7687 tree
7688 build_reference_type (tree to_type)
7689 {
7690 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7691 : TYPE_ADDR_SPACE (to_type);
7692 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7693 return build_reference_type_for_mode (to_type, pointer_mode, false);
7694 }
7695
7696 #define MAX_INT_CACHED_PREC \
7697 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7698 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7699
7700 /* Builds a signed or unsigned integer type of precision PRECISION.
7701 Used for C bitfields whose precision does not match that of
7702 built-in target types. */
7703 tree
7704 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7705 int unsignedp)
7706 {
7707 tree itype, ret;
7708
7709 if (unsignedp)
7710 unsignedp = MAX_INT_CACHED_PREC + 1;
7711
7712 if (precision <= MAX_INT_CACHED_PREC)
7713 {
7714 itype = nonstandard_integer_type_cache[precision + unsignedp];
7715 if (itype)
7716 return itype;
7717 }
7718
7719 itype = make_node (INTEGER_TYPE);
7720 TYPE_PRECISION (itype) = precision;
7721
7722 if (unsignedp)
7723 fixup_unsigned_type (itype);
7724 else
7725 fixup_signed_type (itype);
7726
7727 ret = itype;
7728 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7729 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7730 if (precision <= MAX_INT_CACHED_PREC)
7731 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7732
7733 return ret;
7734 }
7735
7736 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7737 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7738 is true, reuse such a type that has already been constructed. */
7739
7740 static tree
7741 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7742 {
7743 tree itype = make_node (INTEGER_TYPE);
7744 inchash::hash hstate;
7745
7746 TREE_TYPE (itype) = type;
7747
7748 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7749 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7750
7751 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7752 SET_TYPE_MODE (itype, TYPE_MODE (type));
7753 TYPE_SIZE (itype) = TYPE_SIZE (type);
7754 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7755 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7756 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7757
7758 if (!shared)
7759 return itype;
7760
7761 if ((TYPE_MIN_VALUE (itype)
7762 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7763 || (TYPE_MAX_VALUE (itype)
7764 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7765 {
7766 /* Since we cannot reliably merge this type, we need to compare it using
7767 structural equality checks. */
7768 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7769 return itype;
7770 }
7771
7772 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7773 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7774 hstate.merge_hash (TYPE_HASH (type));
7775 itype = type_hash_canon (hstate.end (), itype);
7776
7777 return itype;
7778 }
7779
7780 /* Wrapper around build_range_type_1 with SHARED set to true. */
7781
7782 tree
7783 build_range_type (tree type, tree lowval, tree highval)
7784 {
7785 return build_range_type_1 (type, lowval, highval, true);
7786 }
7787
7788 /* Wrapper around build_range_type_1 with SHARED set to false. */
7789
7790 tree
7791 build_nonshared_range_type (tree type, tree lowval, tree highval)
7792 {
7793 return build_range_type_1 (type, lowval, highval, false);
7794 }
7795
7796 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7797 MAXVAL should be the maximum value in the domain
7798 (one less than the length of the array).
7799
7800 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7801 We don't enforce this limit, that is up to caller (e.g. language front end).
7802 The limit exists because the result is a signed type and we don't handle
7803 sizes that use more than one HOST_WIDE_INT. */
7804
7805 tree
7806 build_index_type (tree maxval)
7807 {
7808 return build_range_type (sizetype, size_zero_node, maxval);
7809 }
7810
7811 /* Return true if the debug information for TYPE, a subtype, should be emitted
7812 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7813 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7814 debug info and doesn't reflect the source code. */
7815
7816 bool
7817 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7818 {
7819 tree base_type = TREE_TYPE (type), low, high;
7820
7821 /* Subrange types have a base type which is an integral type. */
7822 if (!INTEGRAL_TYPE_P (base_type))
7823 return false;
7824
7825 /* Get the real bounds of the subtype. */
7826 if (lang_hooks.types.get_subrange_bounds)
7827 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7828 else
7829 {
7830 low = TYPE_MIN_VALUE (type);
7831 high = TYPE_MAX_VALUE (type);
7832 }
7833
7834 /* If the type and its base type have the same representation and the same
7835 name, then the type is not a subrange but a copy of the base type. */
7836 if ((TREE_CODE (base_type) == INTEGER_TYPE
7837 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7838 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7839 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7840 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7841 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7842 return false;
7843
7844 if (lowval)
7845 *lowval = low;
7846 if (highval)
7847 *highval = high;
7848 return true;
7849 }
7850
7851 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7852 and number of elements specified by the range of values of INDEX_TYPE.
7853 If SHARED is true, reuse such a type that has already been constructed. */
7854
7855 static tree
7856 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7857 {
7858 tree t;
7859
7860 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7861 {
7862 error ("arrays of functions are not meaningful");
7863 elt_type = integer_type_node;
7864 }
7865
7866 t = make_node (ARRAY_TYPE);
7867 TREE_TYPE (t) = elt_type;
7868 TYPE_DOMAIN (t) = index_type;
7869 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7870 layout_type (t);
7871
7872 /* If the element type is incomplete at this point we get marked for
7873 structural equality. Do not record these types in the canonical
7874 type hashtable. */
7875 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7876 return t;
7877
7878 if (shared)
7879 {
7880 inchash::hash hstate;
7881 hstate.add_object (TYPE_HASH (elt_type));
7882 if (index_type)
7883 hstate.add_object (TYPE_HASH (index_type));
7884 t = type_hash_canon (hstate.end (), t);
7885 }
7886
7887 if (TYPE_CANONICAL (t) == t)
7888 {
7889 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7890 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7891 SET_TYPE_STRUCTURAL_EQUALITY (t);
7892 else if (TYPE_CANONICAL (elt_type) != elt_type
7893 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7894 TYPE_CANONICAL (t)
7895 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7896 index_type
7897 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7898 shared);
7899 }
7900
7901 return t;
7902 }
7903
7904 /* Wrapper around build_array_type_1 with SHARED set to true. */
7905
7906 tree
7907 build_array_type (tree elt_type, tree index_type)
7908 {
7909 return build_array_type_1 (elt_type, index_type, true);
7910 }
7911
7912 /* Wrapper around build_array_type_1 with SHARED set to false. */
7913
7914 tree
7915 build_nonshared_array_type (tree elt_type, tree index_type)
7916 {
7917 return build_array_type_1 (elt_type, index_type, false);
7918 }
7919
7920 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7921 sizetype. */
7922
7923 tree
7924 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7925 {
7926 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7927 }
7928
7929 /* Recursively examines the array elements of TYPE, until a non-array
7930 element type is found. */
7931
7932 tree
7933 strip_array_types (tree type)
7934 {
7935 while (TREE_CODE (type) == ARRAY_TYPE)
7936 type = TREE_TYPE (type);
7937
7938 return type;
7939 }
7940
7941 /* Computes the canonical argument types from the argument type list
7942 ARGTYPES.
7943
7944 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7945 on entry to this function, or if any of the ARGTYPES are
7946 structural.
7947
7948 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7949 true on entry to this function, or if any of the ARGTYPES are
7950 non-canonical.
7951
7952 Returns a canonical argument list, which may be ARGTYPES when the
7953 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7954 true) or would not differ from ARGTYPES. */
7955
7956 static tree
7957 maybe_canonicalize_argtypes (tree argtypes,
7958 bool *any_structural_p,
7959 bool *any_noncanonical_p)
7960 {
7961 tree arg;
7962 bool any_noncanonical_argtypes_p = false;
7963
7964 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7965 {
7966 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7967 /* Fail gracefully by stating that the type is structural. */
7968 *any_structural_p = true;
7969 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7970 *any_structural_p = true;
7971 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7972 || TREE_PURPOSE (arg))
7973 /* If the argument has a default argument, we consider it
7974 non-canonical even though the type itself is canonical.
7975 That way, different variants of function and method types
7976 with default arguments will all point to the variant with
7977 no defaults as their canonical type. */
7978 any_noncanonical_argtypes_p = true;
7979 }
7980
7981 if (*any_structural_p)
7982 return argtypes;
7983
7984 if (any_noncanonical_argtypes_p)
7985 {
7986 /* Build the canonical list of argument types. */
7987 tree canon_argtypes = NULL_TREE;
7988 bool is_void = false;
7989
7990 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7991 {
7992 if (arg == void_list_node)
7993 is_void = true;
7994 else
7995 canon_argtypes = tree_cons (NULL_TREE,
7996 TYPE_CANONICAL (TREE_VALUE (arg)),
7997 canon_argtypes);
7998 }
7999
8000 canon_argtypes = nreverse (canon_argtypes);
8001 if (is_void)
8002 canon_argtypes = chainon (canon_argtypes, void_list_node);
8003
8004 /* There is a non-canonical type. */
8005 *any_noncanonical_p = true;
8006 return canon_argtypes;
8007 }
8008
8009 /* The canonical argument types are the same as ARGTYPES. */
8010 return argtypes;
8011 }
8012
8013 /* Construct, lay out and return
8014 the type of functions returning type VALUE_TYPE
8015 given arguments of types ARG_TYPES.
8016 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8017 are data type nodes for the arguments of the function.
8018 If such a type has already been constructed, reuse it. */
8019
8020 tree
8021 build_function_type (tree value_type, tree arg_types)
8022 {
8023 tree t;
8024 inchash::hash hstate;
8025 bool any_structural_p, any_noncanonical_p;
8026 tree canon_argtypes;
8027
8028 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8029 {
8030 error ("function return type cannot be function");
8031 value_type = integer_type_node;
8032 }
8033
8034 /* Make a node of the sort we want. */
8035 t = make_node (FUNCTION_TYPE);
8036 TREE_TYPE (t) = value_type;
8037 TYPE_ARG_TYPES (t) = arg_types;
8038
8039 /* If we already have such a type, use the old one. */
8040 hstate.add_object (TYPE_HASH (value_type));
8041 type_hash_list (arg_types, hstate);
8042 t = type_hash_canon (hstate.end (), t);
8043
8044 /* Set up the canonical type. */
8045 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8046 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8047 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8048 &any_structural_p,
8049 &any_noncanonical_p);
8050 if (any_structural_p)
8051 SET_TYPE_STRUCTURAL_EQUALITY (t);
8052 else if (any_noncanonical_p)
8053 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8054 canon_argtypes);
8055
8056 if (!COMPLETE_TYPE_P (t))
8057 layout_type (t);
8058 return t;
8059 }
8060
8061 /* Build a function type. The RETURN_TYPE is the type returned by the
8062 function. If VAARGS is set, no void_type_node is appended to the
8063 the list. ARGP must be always be terminated be a NULL_TREE. */
8064
8065 static tree
8066 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8067 {
8068 tree t, args, last;
8069
8070 t = va_arg (argp, tree);
8071 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8072 args = tree_cons (NULL_TREE, t, args);
8073
8074 if (vaargs)
8075 {
8076 last = args;
8077 if (args != NULL_TREE)
8078 args = nreverse (args);
8079 gcc_assert (last != void_list_node);
8080 }
8081 else if (args == NULL_TREE)
8082 args = void_list_node;
8083 else
8084 {
8085 last = args;
8086 args = nreverse (args);
8087 TREE_CHAIN (last) = void_list_node;
8088 }
8089 args = build_function_type (return_type, args);
8090
8091 return args;
8092 }
8093
8094 /* Build a function type. The RETURN_TYPE is the type returned by the
8095 function. If additional arguments are provided, they are
8096 additional argument types. The list of argument types must always
8097 be terminated by NULL_TREE. */
8098
8099 tree
8100 build_function_type_list (tree return_type, ...)
8101 {
8102 tree args;
8103 va_list p;
8104
8105 va_start (p, return_type);
8106 args = build_function_type_list_1 (false, return_type, p);
8107 va_end (p);
8108 return args;
8109 }
8110
8111 /* Build a variable argument function type. The RETURN_TYPE is the
8112 type returned by the function. If additional arguments are provided,
8113 they are additional argument types. The list of argument types must
8114 always be terminated by NULL_TREE. */
8115
8116 tree
8117 build_varargs_function_type_list (tree return_type, ...)
8118 {
8119 tree args;
8120 va_list p;
8121
8122 va_start (p, return_type);
8123 args = build_function_type_list_1 (true, return_type, p);
8124 va_end (p);
8125
8126 return args;
8127 }
8128
8129 /* Build a function type. RETURN_TYPE is the type returned by the
8130 function; VAARGS indicates whether the function takes varargs. The
8131 function takes N named arguments, the types of which are provided in
8132 ARG_TYPES. */
8133
8134 static tree
8135 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8136 tree *arg_types)
8137 {
8138 int i;
8139 tree t = vaargs ? NULL_TREE : void_list_node;
8140
8141 for (i = n - 1; i >= 0; i--)
8142 t = tree_cons (NULL_TREE, arg_types[i], t);
8143
8144 return build_function_type (return_type, t);
8145 }
8146
8147 /* Build a function type. RETURN_TYPE is the type returned by the
8148 function. The function takes N named arguments, the types of which
8149 are provided in ARG_TYPES. */
8150
8151 tree
8152 build_function_type_array (tree return_type, int n, tree *arg_types)
8153 {
8154 return build_function_type_array_1 (false, return_type, n, arg_types);
8155 }
8156
8157 /* Build a variable argument function type. RETURN_TYPE is the type
8158 returned by the function. The function takes N named arguments, the
8159 types of which are provided in ARG_TYPES. */
8160
8161 tree
8162 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8163 {
8164 return build_function_type_array_1 (true, return_type, n, arg_types);
8165 }
8166
8167 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8168 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8169 for the method. An implicit additional parameter (of type
8170 pointer-to-BASETYPE) is added to the ARGTYPES. */
8171
8172 tree
8173 build_method_type_directly (tree basetype,
8174 tree rettype,
8175 tree argtypes)
8176 {
8177 tree t;
8178 tree ptype;
8179 inchash::hash hstate;
8180 bool any_structural_p, any_noncanonical_p;
8181 tree canon_argtypes;
8182
8183 /* Make a node of the sort we want. */
8184 t = make_node (METHOD_TYPE);
8185
8186 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8187 TREE_TYPE (t) = rettype;
8188 ptype = build_pointer_type (basetype);
8189
8190 /* The actual arglist for this function includes a "hidden" argument
8191 which is "this". Put it into the list of argument types. */
8192 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8193 TYPE_ARG_TYPES (t) = argtypes;
8194
8195 /* If we already have such a type, use the old one. */
8196 hstate.add_object (TYPE_HASH (basetype));
8197 hstate.add_object (TYPE_HASH (rettype));
8198 type_hash_list (argtypes, hstate);
8199 t = type_hash_canon (hstate.end (), t);
8200
8201 /* Set up the canonical type. */
8202 any_structural_p
8203 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8204 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8205 any_noncanonical_p
8206 = (TYPE_CANONICAL (basetype) != basetype
8207 || TYPE_CANONICAL (rettype) != rettype);
8208 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8209 &any_structural_p,
8210 &any_noncanonical_p);
8211 if (any_structural_p)
8212 SET_TYPE_STRUCTURAL_EQUALITY (t);
8213 else if (any_noncanonical_p)
8214 TYPE_CANONICAL (t)
8215 = build_method_type_directly (TYPE_CANONICAL (basetype),
8216 TYPE_CANONICAL (rettype),
8217 canon_argtypes);
8218 if (!COMPLETE_TYPE_P (t))
8219 layout_type (t);
8220
8221 return t;
8222 }
8223
8224 /* Construct, lay out and return the type of methods belonging to class
8225 BASETYPE and whose arguments and values are described by TYPE.
8226 If that type exists already, reuse it.
8227 TYPE must be a FUNCTION_TYPE node. */
8228
8229 tree
8230 build_method_type (tree basetype, tree type)
8231 {
8232 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8233
8234 return build_method_type_directly (basetype,
8235 TREE_TYPE (type),
8236 TYPE_ARG_TYPES (type));
8237 }
8238
8239 /* Construct, lay out and return the type of offsets to a value
8240 of type TYPE, within an object of type BASETYPE.
8241 If a suitable offset type exists already, reuse it. */
8242
8243 tree
8244 build_offset_type (tree basetype, tree type)
8245 {
8246 tree t;
8247 inchash::hash hstate;
8248
8249 /* Make a node of the sort we want. */
8250 t = make_node (OFFSET_TYPE);
8251
8252 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8253 TREE_TYPE (t) = type;
8254
8255 /* If we already have such a type, use the old one. */
8256 hstate.add_object (TYPE_HASH (basetype));
8257 hstate.add_object (TYPE_HASH (type));
8258 t = type_hash_canon (hstate.end (), t);
8259
8260 if (!COMPLETE_TYPE_P (t))
8261 layout_type (t);
8262
8263 if (TYPE_CANONICAL (t) == t)
8264 {
8265 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8266 || TYPE_STRUCTURAL_EQUALITY_P (type))
8267 SET_TYPE_STRUCTURAL_EQUALITY (t);
8268 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8269 || TYPE_CANONICAL (type) != type)
8270 TYPE_CANONICAL (t)
8271 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8272 TYPE_CANONICAL (type));
8273 }
8274
8275 return t;
8276 }
8277
8278 /* Create a complex type whose components are COMPONENT_TYPE. */
8279
8280 tree
8281 build_complex_type (tree component_type)
8282 {
8283 tree t;
8284 inchash::hash hstate;
8285
8286 gcc_assert (INTEGRAL_TYPE_P (component_type)
8287 || SCALAR_FLOAT_TYPE_P (component_type)
8288 || FIXED_POINT_TYPE_P (component_type));
8289
8290 /* Make a node of the sort we want. */
8291 t = make_node (COMPLEX_TYPE);
8292
8293 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8294
8295 /* If we already have such a type, use the old one. */
8296 hstate.add_object (TYPE_HASH (component_type));
8297 t = type_hash_canon (hstate.end (), t);
8298
8299 if (!COMPLETE_TYPE_P (t))
8300 layout_type (t);
8301
8302 if (TYPE_CANONICAL (t) == t)
8303 {
8304 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8305 SET_TYPE_STRUCTURAL_EQUALITY (t);
8306 else if (TYPE_CANONICAL (component_type) != component_type)
8307 TYPE_CANONICAL (t)
8308 = build_complex_type (TYPE_CANONICAL (component_type));
8309 }
8310
8311 /* We need to create a name, since complex is a fundamental type. */
8312 if (! TYPE_NAME (t))
8313 {
8314 const char *name;
8315 if (component_type == char_type_node)
8316 name = "complex char";
8317 else if (component_type == signed_char_type_node)
8318 name = "complex signed char";
8319 else if (component_type == unsigned_char_type_node)
8320 name = "complex unsigned char";
8321 else if (component_type == short_integer_type_node)
8322 name = "complex short int";
8323 else if (component_type == short_unsigned_type_node)
8324 name = "complex short unsigned int";
8325 else if (component_type == integer_type_node)
8326 name = "complex int";
8327 else if (component_type == unsigned_type_node)
8328 name = "complex unsigned int";
8329 else if (component_type == long_integer_type_node)
8330 name = "complex long int";
8331 else if (component_type == long_unsigned_type_node)
8332 name = "complex long unsigned int";
8333 else if (component_type == long_long_integer_type_node)
8334 name = "complex long long int";
8335 else if (component_type == long_long_unsigned_type_node)
8336 name = "complex long long unsigned int";
8337 else
8338 name = 0;
8339
8340 if (name != 0)
8341 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8342 get_identifier (name), t);
8343 }
8344
8345 return build_qualified_type (t, TYPE_QUALS (component_type));
8346 }
8347
8348 /* If TYPE is a real or complex floating-point type and the target
8349 does not directly support arithmetic on TYPE then return the wider
8350 type to be used for arithmetic on TYPE. Otherwise, return
8351 NULL_TREE. */
8352
8353 tree
8354 excess_precision_type (tree type)
8355 {
8356 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8357 {
8358 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8359 switch (TREE_CODE (type))
8360 {
8361 case REAL_TYPE:
8362 switch (flt_eval_method)
8363 {
8364 case 1:
8365 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8366 return double_type_node;
8367 break;
8368 case 2:
8369 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8370 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8371 return long_double_type_node;
8372 break;
8373 default:
8374 gcc_unreachable ();
8375 }
8376 break;
8377 case COMPLEX_TYPE:
8378 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8379 return NULL_TREE;
8380 switch (flt_eval_method)
8381 {
8382 case 1:
8383 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8384 return complex_double_type_node;
8385 break;
8386 case 2:
8387 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8388 || (TYPE_MODE (TREE_TYPE (type))
8389 == TYPE_MODE (double_type_node)))
8390 return complex_long_double_type_node;
8391 break;
8392 default:
8393 gcc_unreachable ();
8394 }
8395 break;
8396 default:
8397 break;
8398 }
8399 }
8400 return NULL_TREE;
8401 }
8402 \f
8403 /* Return OP, stripped of any conversions to wider types as much as is safe.
8404 Converting the value back to OP's type makes a value equivalent to OP.
8405
8406 If FOR_TYPE is nonzero, we return a value which, if converted to
8407 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8408
8409 OP must have integer, real or enumeral type. Pointers are not allowed!
8410
8411 There are some cases where the obvious value we could return
8412 would regenerate to OP if converted to OP's type,
8413 but would not extend like OP to wider types.
8414 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8415 For example, if OP is (unsigned short)(signed char)-1,
8416 we avoid returning (signed char)-1 if FOR_TYPE is int,
8417 even though extending that to an unsigned short would regenerate OP,
8418 since the result of extending (signed char)-1 to (int)
8419 is different from (int) OP. */
8420
8421 tree
8422 get_unwidened (tree op, tree for_type)
8423 {
8424 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8425 tree type = TREE_TYPE (op);
8426 unsigned final_prec
8427 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8428 int uns
8429 = (for_type != 0 && for_type != type
8430 && final_prec > TYPE_PRECISION (type)
8431 && TYPE_UNSIGNED (type));
8432 tree win = op;
8433
8434 while (CONVERT_EXPR_P (op))
8435 {
8436 int bitschange;
8437
8438 /* TYPE_PRECISION on vector types has different meaning
8439 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8440 so avoid them here. */
8441 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8442 break;
8443
8444 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8445 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8446
8447 /* Truncations are many-one so cannot be removed.
8448 Unless we are later going to truncate down even farther. */
8449 if (bitschange < 0
8450 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8451 break;
8452
8453 /* See what's inside this conversion. If we decide to strip it,
8454 we will set WIN. */
8455 op = TREE_OPERAND (op, 0);
8456
8457 /* If we have not stripped any zero-extensions (uns is 0),
8458 we can strip any kind of extension.
8459 If we have previously stripped a zero-extension,
8460 only zero-extensions can safely be stripped.
8461 Any extension can be stripped if the bits it would produce
8462 are all going to be discarded later by truncating to FOR_TYPE. */
8463
8464 if (bitschange > 0)
8465 {
8466 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8467 win = op;
8468 /* TYPE_UNSIGNED says whether this is a zero-extension.
8469 Let's avoid computing it if it does not affect WIN
8470 and if UNS will not be needed again. */
8471 if ((uns
8472 || CONVERT_EXPR_P (op))
8473 && TYPE_UNSIGNED (TREE_TYPE (op)))
8474 {
8475 uns = 1;
8476 win = op;
8477 }
8478 }
8479 }
8480
8481 /* If we finally reach a constant see if it fits in for_type and
8482 in that case convert it. */
8483 if (for_type
8484 && TREE_CODE (win) == INTEGER_CST
8485 && TREE_TYPE (win) != for_type
8486 && int_fits_type_p (win, for_type))
8487 win = fold_convert (for_type, win);
8488
8489 return win;
8490 }
8491 \f
8492 /* Return OP or a simpler expression for a narrower value
8493 which can be sign-extended or zero-extended to give back OP.
8494 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8495 or 0 if the value should be sign-extended. */
8496
8497 tree
8498 get_narrower (tree op, int *unsignedp_ptr)
8499 {
8500 int uns = 0;
8501 int first = 1;
8502 tree win = op;
8503 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8504
8505 while (TREE_CODE (op) == NOP_EXPR)
8506 {
8507 int bitschange
8508 = (TYPE_PRECISION (TREE_TYPE (op))
8509 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8510
8511 /* Truncations are many-one so cannot be removed. */
8512 if (bitschange < 0)
8513 break;
8514
8515 /* See what's inside this conversion. If we decide to strip it,
8516 we will set WIN. */
8517
8518 if (bitschange > 0)
8519 {
8520 op = TREE_OPERAND (op, 0);
8521 /* An extension: the outermost one can be stripped,
8522 but remember whether it is zero or sign extension. */
8523 if (first)
8524 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8525 /* Otherwise, if a sign extension has been stripped,
8526 only sign extensions can now be stripped;
8527 if a zero extension has been stripped, only zero-extensions. */
8528 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8529 break;
8530 first = 0;
8531 }
8532 else /* bitschange == 0 */
8533 {
8534 /* A change in nominal type can always be stripped, but we must
8535 preserve the unsignedness. */
8536 if (first)
8537 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8538 first = 0;
8539 op = TREE_OPERAND (op, 0);
8540 /* Keep trying to narrow, but don't assign op to win if it
8541 would turn an integral type into something else. */
8542 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8543 continue;
8544 }
8545
8546 win = op;
8547 }
8548
8549 if (TREE_CODE (op) == COMPONENT_REF
8550 /* Since type_for_size always gives an integer type. */
8551 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8552 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8553 /* Ensure field is laid out already. */
8554 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8555 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8556 {
8557 unsigned HOST_WIDE_INT innerprec
8558 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8559 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8560 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8561 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8562
8563 /* We can get this structure field in a narrower type that fits it,
8564 but the resulting extension to its nominal type (a fullword type)
8565 must satisfy the same conditions as for other extensions.
8566
8567 Do this only for fields that are aligned (not bit-fields),
8568 because when bit-field insns will be used there is no
8569 advantage in doing this. */
8570
8571 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8572 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8573 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8574 && type != 0)
8575 {
8576 if (first)
8577 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8578 win = fold_convert (type, op);
8579 }
8580 }
8581
8582 *unsignedp_ptr = uns;
8583 return win;
8584 }
8585 \f
8586 /* Returns true if integer constant C has a value that is permissible
8587 for type TYPE (an INTEGER_TYPE). */
8588
8589 bool
8590 int_fits_type_p (const_tree c, const_tree type)
8591 {
8592 tree type_low_bound, type_high_bound;
8593 bool ok_for_low_bound, ok_for_high_bound;
8594 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8595
8596 retry:
8597 type_low_bound = TYPE_MIN_VALUE (type);
8598 type_high_bound = TYPE_MAX_VALUE (type);
8599
8600 /* If at least one bound of the type is a constant integer, we can check
8601 ourselves and maybe make a decision. If no such decision is possible, but
8602 this type is a subtype, try checking against that. Otherwise, use
8603 fits_to_tree_p, which checks against the precision.
8604
8605 Compute the status for each possibly constant bound, and return if we see
8606 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8607 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8608 for "constant known to fit". */
8609
8610 /* Check if c >= type_low_bound. */
8611 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8612 {
8613 if (tree_int_cst_lt (c, type_low_bound))
8614 return false;
8615 ok_for_low_bound = true;
8616 }
8617 else
8618 ok_for_low_bound = false;
8619
8620 /* Check if c <= type_high_bound. */
8621 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8622 {
8623 if (tree_int_cst_lt (type_high_bound, c))
8624 return false;
8625 ok_for_high_bound = true;
8626 }
8627 else
8628 ok_for_high_bound = false;
8629
8630 /* If the constant fits both bounds, the result is known. */
8631 if (ok_for_low_bound && ok_for_high_bound)
8632 return true;
8633
8634 /* Perform some generic filtering which may allow making a decision
8635 even if the bounds are not constant. First, negative integers
8636 never fit in unsigned types, */
8637 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8638 return false;
8639
8640 /* Second, narrower types always fit in wider ones. */
8641 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8642 return true;
8643
8644 /* Third, unsigned integers with top bit set never fit signed types. */
8645 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8646 {
8647 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8648 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8649 {
8650 /* When a tree_cst is converted to a wide-int, the precision
8651 is taken from the type. However, if the precision of the
8652 mode underneath the type is smaller than that, it is
8653 possible that the value will not fit. The test below
8654 fails if any bit is set between the sign bit of the
8655 underlying mode and the top bit of the type. */
8656 if (wi::ne_p (wi::zext (c, prec - 1), c))
8657 return false;
8658 }
8659 else if (wi::neg_p (c))
8660 return false;
8661 }
8662
8663 /* If we haven't been able to decide at this point, there nothing more we
8664 can check ourselves here. Look at the base type if we have one and it
8665 has the same precision. */
8666 if (TREE_CODE (type) == INTEGER_TYPE
8667 && TREE_TYPE (type) != 0
8668 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8669 {
8670 type = TREE_TYPE (type);
8671 goto retry;
8672 }
8673
8674 /* Or to fits_to_tree_p, if nothing else. */
8675 return wi::fits_to_tree_p (c, type);
8676 }
8677
8678 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8679 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8680 represented (assuming two's-complement arithmetic) within the bit
8681 precision of the type are returned instead. */
8682
8683 void
8684 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8685 {
8686 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8687 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8688 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8689 else
8690 {
8691 if (TYPE_UNSIGNED (type))
8692 mpz_set_ui (min, 0);
8693 else
8694 {
8695 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8696 wi::to_mpz (mn, min, SIGNED);
8697 }
8698 }
8699
8700 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8701 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8702 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8703 else
8704 {
8705 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8706 wi::to_mpz (mn, max, TYPE_SIGN (type));
8707 }
8708 }
8709
8710 /* Return true if VAR is an automatic variable defined in function FN. */
8711
8712 bool
8713 auto_var_in_fn_p (const_tree var, const_tree fn)
8714 {
8715 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8716 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8717 || TREE_CODE (var) == PARM_DECL)
8718 && ! TREE_STATIC (var))
8719 || TREE_CODE (var) == LABEL_DECL
8720 || TREE_CODE (var) == RESULT_DECL));
8721 }
8722
8723 /* Subprogram of following function. Called by walk_tree.
8724
8725 Return *TP if it is an automatic variable or parameter of the
8726 function passed in as DATA. */
8727
8728 static tree
8729 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8730 {
8731 tree fn = (tree) data;
8732
8733 if (TYPE_P (*tp))
8734 *walk_subtrees = 0;
8735
8736 else if (DECL_P (*tp)
8737 && auto_var_in_fn_p (*tp, fn))
8738 return *tp;
8739
8740 return NULL_TREE;
8741 }
8742
8743 /* Returns true if T is, contains, or refers to a type with variable
8744 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8745 arguments, but not the return type. If FN is nonzero, only return
8746 true if a modifier of the type or position of FN is a variable or
8747 parameter inside FN.
8748
8749 This concept is more general than that of C99 'variably modified types':
8750 in C99, a struct type is never variably modified because a VLA may not
8751 appear as a structure member. However, in GNU C code like:
8752
8753 struct S { int i[f()]; };
8754
8755 is valid, and other languages may define similar constructs. */
8756
8757 bool
8758 variably_modified_type_p (tree type, tree fn)
8759 {
8760 tree t;
8761
8762 /* Test if T is either variable (if FN is zero) or an expression containing
8763 a variable in FN. If TYPE isn't gimplified, return true also if
8764 gimplify_one_sizepos would gimplify the expression into a local
8765 variable. */
8766 #define RETURN_TRUE_IF_VAR(T) \
8767 do { tree _t = (T); \
8768 if (_t != NULL_TREE \
8769 && _t != error_mark_node \
8770 && TREE_CODE (_t) != INTEGER_CST \
8771 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8772 && (!fn \
8773 || (!TYPE_SIZES_GIMPLIFIED (type) \
8774 && !is_gimple_sizepos (_t)) \
8775 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8776 return true; } while (0)
8777
8778 if (type == error_mark_node)
8779 return false;
8780
8781 /* If TYPE itself has variable size, it is variably modified. */
8782 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8783 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8784
8785 switch (TREE_CODE (type))
8786 {
8787 case POINTER_TYPE:
8788 case REFERENCE_TYPE:
8789 case VECTOR_TYPE:
8790 if (variably_modified_type_p (TREE_TYPE (type), fn))
8791 return true;
8792 break;
8793
8794 case FUNCTION_TYPE:
8795 case METHOD_TYPE:
8796 /* If TYPE is a function type, it is variably modified if the
8797 return type is variably modified. */
8798 if (variably_modified_type_p (TREE_TYPE (type), fn))
8799 return true;
8800 break;
8801
8802 case INTEGER_TYPE:
8803 case REAL_TYPE:
8804 case FIXED_POINT_TYPE:
8805 case ENUMERAL_TYPE:
8806 case BOOLEAN_TYPE:
8807 /* Scalar types are variably modified if their end points
8808 aren't constant. */
8809 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8810 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8811 break;
8812
8813 case RECORD_TYPE:
8814 case UNION_TYPE:
8815 case QUAL_UNION_TYPE:
8816 /* We can't see if any of the fields are variably-modified by the
8817 definition we normally use, since that would produce infinite
8818 recursion via pointers. */
8819 /* This is variably modified if some field's type is. */
8820 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8821 if (TREE_CODE (t) == FIELD_DECL)
8822 {
8823 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8824 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8825 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8826
8827 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8828 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8829 }
8830 break;
8831
8832 case ARRAY_TYPE:
8833 /* Do not call ourselves to avoid infinite recursion. This is
8834 variably modified if the element type is. */
8835 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8836 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8837 break;
8838
8839 default:
8840 break;
8841 }
8842
8843 /* The current language may have other cases to check, but in general,
8844 all other types are not variably modified. */
8845 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8846
8847 #undef RETURN_TRUE_IF_VAR
8848 }
8849
8850 /* Given a DECL or TYPE, return the scope in which it was declared, or
8851 NULL_TREE if there is no containing scope. */
8852
8853 tree
8854 get_containing_scope (const_tree t)
8855 {
8856 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8857 }
8858
8859 /* Return the innermost context enclosing DECL that is
8860 a FUNCTION_DECL, or zero if none. */
8861
8862 tree
8863 decl_function_context (const_tree decl)
8864 {
8865 tree context;
8866
8867 if (TREE_CODE (decl) == ERROR_MARK)
8868 return 0;
8869
8870 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8871 where we look up the function at runtime. Such functions always take
8872 a first argument of type 'pointer to real context'.
8873
8874 C++ should really be fixed to use DECL_CONTEXT for the real context,
8875 and use something else for the "virtual context". */
8876 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8877 context
8878 = TYPE_MAIN_VARIANT
8879 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8880 else
8881 context = DECL_CONTEXT (decl);
8882
8883 while (context && TREE_CODE (context) != FUNCTION_DECL)
8884 {
8885 if (TREE_CODE (context) == BLOCK)
8886 context = BLOCK_SUPERCONTEXT (context);
8887 else
8888 context = get_containing_scope (context);
8889 }
8890
8891 return context;
8892 }
8893
8894 /* Return the innermost context enclosing DECL that is
8895 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8896 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8897
8898 tree
8899 decl_type_context (const_tree decl)
8900 {
8901 tree context = DECL_CONTEXT (decl);
8902
8903 while (context)
8904 switch (TREE_CODE (context))
8905 {
8906 case NAMESPACE_DECL:
8907 case TRANSLATION_UNIT_DECL:
8908 return NULL_TREE;
8909
8910 case RECORD_TYPE:
8911 case UNION_TYPE:
8912 case QUAL_UNION_TYPE:
8913 return context;
8914
8915 case TYPE_DECL:
8916 case FUNCTION_DECL:
8917 context = DECL_CONTEXT (context);
8918 break;
8919
8920 case BLOCK:
8921 context = BLOCK_SUPERCONTEXT (context);
8922 break;
8923
8924 default:
8925 gcc_unreachable ();
8926 }
8927
8928 return NULL_TREE;
8929 }
8930
8931 /* CALL is a CALL_EXPR. Return the declaration for the function
8932 called, or NULL_TREE if the called function cannot be
8933 determined. */
8934
8935 tree
8936 get_callee_fndecl (const_tree call)
8937 {
8938 tree addr;
8939
8940 if (call == error_mark_node)
8941 return error_mark_node;
8942
8943 /* It's invalid to call this function with anything but a
8944 CALL_EXPR. */
8945 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8946
8947 /* The first operand to the CALL is the address of the function
8948 called. */
8949 addr = CALL_EXPR_FN (call);
8950
8951 /* If there is no function, return early. */
8952 if (addr == NULL_TREE)
8953 return NULL_TREE;
8954
8955 STRIP_NOPS (addr);
8956
8957 /* If this is a readonly function pointer, extract its initial value. */
8958 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8959 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8960 && DECL_INITIAL (addr))
8961 addr = DECL_INITIAL (addr);
8962
8963 /* If the address is just `&f' for some function `f', then we know
8964 that `f' is being called. */
8965 if (TREE_CODE (addr) == ADDR_EXPR
8966 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8967 return TREE_OPERAND (addr, 0);
8968
8969 /* We couldn't figure out what was being called. */
8970 return NULL_TREE;
8971 }
8972
8973 /* Print debugging information about tree nodes generated during the compile,
8974 and any language-specific information. */
8975
8976 void
8977 dump_tree_statistics (void)
8978 {
8979 if (GATHER_STATISTICS)
8980 {
8981 int i;
8982 int total_nodes, total_bytes;
8983 fprintf (stderr, "Kind Nodes Bytes\n");
8984 fprintf (stderr, "---------------------------------------\n");
8985 total_nodes = total_bytes = 0;
8986 for (i = 0; i < (int) all_kinds; i++)
8987 {
8988 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8989 tree_node_counts[i], tree_node_sizes[i]);
8990 total_nodes += tree_node_counts[i];
8991 total_bytes += tree_node_sizes[i];
8992 }
8993 fprintf (stderr, "---------------------------------------\n");
8994 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8995 fprintf (stderr, "---------------------------------------\n");
8996 fprintf (stderr, "Code Nodes\n");
8997 fprintf (stderr, "----------------------------\n");
8998 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8999 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9000 tree_code_counts[i]);
9001 fprintf (stderr, "----------------------------\n");
9002 ssanames_print_statistics ();
9003 phinodes_print_statistics ();
9004 }
9005 else
9006 fprintf (stderr, "(No per-node statistics)\n");
9007
9008 print_type_hash_statistics ();
9009 print_debug_expr_statistics ();
9010 print_value_expr_statistics ();
9011 lang_hooks.print_statistics ();
9012 }
9013 \f
9014 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9015
9016 /* Generate a crc32 of a byte. */
9017
9018 static unsigned
9019 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9020 {
9021 unsigned ix;
9022
9023 for (ix = bits; ix--; value <<= 1)
9024 {
9025 unsigned feedback;
9026
9027 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9028 chksum <<= 1;
9029 chksum ^= feedback;
9030 }
9031 return chksum;
9032 }
9033
9034 /* Generate a crc32 of a 32-bit unsigned. */
9035
9036 unsigned
9037 crc32_unsigned (unsigned chksum, unsigned value)
9038 {
9039 return crc32_unsigned_bits (chksum, value, 32);
9040 }
9041
9042 /* Generate a crc32 of a byte. */
9043
9044 unsigned
9045 crc32_byte (unsigned chksum, char byte)
9046 {
9047 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9048 }
9049
9050 /* Generate a crc32 of a string. */
9051
9052 unsigned
9053 crc32_string (unsigned chksum, const char *string)
9054 {
9055 do
9056 {
9057 chksum = crc32_byte (chksum, *string);
9058 }
9059 while (*string++);
9060 return chksum;
9061 }
9062
9063 /* P is a string that will be used in a symbol. Mask out any characters
9064 that are not valid in that context. */
9065
9066 void
9067 clean_symbol_name (char *p)
9068 {
9069 for (; *p; p++)
9070 if (! (ISALNUM (*p)
9071 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9072 || *p == '$'
9073 #endif
9074 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9075 || *p == '.'
9076 #endif
9077 ))
9078 *p = '_';
9079 }
9080
9081 /* Generate a name for a special-purpose function.
9082 The generated name may need to be unique across the whole link.
9083 Changes to this function may also require corresponding changes to
9084 xstrdup_mask_random.
9085 TYPE is some string to identify the purpose of this function to the
9086 linker or collect2; it must start with an uppercase letter,
9087 one of:
9088 I - for constructors
9089 D - for destructors
9090 N - for C++ anonymous namespaces
9091 F - for DWARF unwind frame information. */
9092
9093 tree
9094 get_file_function_name (const char *type)
9095 {
9096 char *buf;
9097 const char *p;
9098 char *q;
9099
9100 /* If we already have a name we know to be unique, just use that. */
9101 if (first_global_object_name)
9102 p = q = ASTRDUP (first_global_object_name);
9103 /* If the target is handling the constructors/destructors, they
9104 will be local to this file and the name is only necessary for
9105 debugging purposes.
9106 We also assign sub_I and sub_D sufixes to constructors called from
9107 the global static constructors. These are always local. */
9108 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9109 || (strncmp (type, "sub_", 4) == 0
9110 && (type[4] == 'I' || type[4] == 'D')))
9111 {
9112 const char *file = main_input_filename;
9113 if (! file)
9114 file = LOCATION_FILE (input_location);
9115 /* Just use the file's basename, because the full pathname
9116 might be quite long. */
9117 p = q = ASTRDUP (lbasename (file));
9118 }
9119 else
9120 {
9121 /* Otherwise, the name must be unique across the entire link.
9122 We don't have anything that we know to be unique to this translation
9123 unit, so use what we do have and throw in some randomness. */
9124 unsigned len;
9125 const char *name = weak_global_object_name;
9126 const char *file = main_input_filename;
9127
9128 if (! name)
9129 name = "";
9130 if (! file)
9131 file = LOCATION_FILE (input_location);
9132
9133 len = strlen (file);
9134 q = (char *) alloca (9 + 17 + len + 1);
9135 memcpy (q, file, len + 1);
9136
9137 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9138 crc32_string (0, name), get_random_seed (false));
9139
9140 p = q;
9141 }
9142
9143 clean_symbol_name (q);
9144 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9145 + strlen (type));
9146
9147 /* Set up the name of the file-level functions we may need.
9148 Use a global object (which is already required to be unique over
9149 the program) rather than the file name (which imposes extra
9150 constraints). */
9151 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9152
9153 return get_identifier (buf);
9154 }
9155 \f
9156 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9157
9158 /* Complain that the tree code of NODE does not match the expected 0
9159 terminated list of trailing codes. The trailing code list can be
9160 empty, for a more vague error message. FILE, LINE, and FUNCTION
9161 are of the caller. */
9162
9163 void
9164 tree_check_failed (const_tree node, const char *file,
9165 int line, const char *function, ...)
9166 {
9167 va_list args;
9168 const char *buffer;
9169 unsigned length = 0;
9170 enum tree_code code;
9171
9172 va_start (args, function);
9173 while ((code = (enum tree_code) va_arg (args, int)))
9174 length += 4 + strlen (get_tree_code_name (code));
9175 va_end (args);
9176 if (length)
9177 {
9178 char *tmp;
9179 va_start (args, function);
9180 length += strlen ("expected ");
9181 buffer = tmp = (char *) alloca (length);
9182 length = 0;
9183 while ((code = (enum tree_code) va_arg (args, int)))
9184 {
9185 const char *prefix = length ? " or " : "expected ";
9186
9187 strcpy (tmp + length, prefix);
9188 length += strlen (prefix);
9189 strcpy (tmp + length, get_tree_code_name (code));
9190 length += strlen (get_tree_code_name (code));
9191 }
9192 va_end (args);
9193 }
9194 else
9195 buffer = "unexpected node";
9196
9197 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9198 buffer, get_tree_code_name (TREE_CODE (node)),
9199 function, trim_filename (file), line);
9200 }
9201
9202 /* Complain that the tree code of NODE does match the expected 0
9203 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9204 the caller. */
9205
9206 void
9207 tree_not_check_failed (const_tree node, const char *file,
9208 int line, const char *function, ...)
9209 {
9210 va_list args;
9211 char *buffer;
9212 unsigned length = 0;
9213 enum tree_code code;
9214
9215 va_start (args, function);
9216 while ((code = (enum tree_code) va_arg (args, int)))
9217 length += 4 + strlen (get_tree_code_name (code));
9218 va_end (args);
9219 va_start (args, function);
9220 buffer = (char *) alloca (length);
9221 length = 0;
9222 while ((code = (enum tree_code) va_arg (args, int)))
9223 {
9224 if (length)
9225 {
9226 strcpy (buffer + length, " or ");
9227 length += 4;
9228 }
9229 strcpy (buffer + length, get_tree_code_name (code));
9230 length += strlen (get_tree_code_name (code));
9231 }
9232 va_end (args);
9233
9234 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9235 buffer, get_tree_code_name (TREE_CODE (node)),
9236 function, trim_filename (file), line);
9237 }
9238
9239 /* Similar to tree_check_failed, except that we check for a class of tree
9240 code, given in CL. */
9241
9242 void
9243 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9244 const char *file, int line, const char *function)
9245 {
9246 internal_error
9247 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9248 TREE_CODE_CLASS_STRING (cl),
9249 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9250 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9251 }
9252
9253 /* Similar to tree_check_failed, except that instead of specifying a
9254 dozen codes, use the knowledge that they're all sequential. */
9255
9256 void
9257 tree_range_check_failed (const_tree node, const char *file, int line,
9258 const char *function, enum tree_code c1,
9259 enum tree_code c2)
9260 {
9261 char *buffer;
9262 unsigned length = 0;
9263 unsigned int c;
9264
9265 for (c = c1; c <= c2; ++c)
9266 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9267
9268 length += strlen ("expected ");
9269 buffer = (char *) alloca (length);
9270 length = 0;
9271
9272 for (c = c1; c <= c2; ++c)
9273 {
9274 const char *prefix = length ? " or " : "expected ";
9275
9276 strcpy (buffer + length, prefix);
9277 length += strlen (prefix);
9278 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9279 length += strlen (get_tree_code_name ((enum tree_code) c));
9280 }
9281
9282 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9283 buffer, get_tree_code_name (TREE_CODE (node)),
9284 function, trim_filename (file), line);
9285 }
9286
9287
9288 /* Similar to tree_check_failed, except that we check that a tree does
9289 not have the specified code, given in CL. */
9290
9291 void
9292 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9293 const char *file, int line, const char *function)
9294 {
9295 internal_error
9296 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9297 TREE_CODE_CLASS_STRING (cl),
9298 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9299 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9300 }
9301
9302
9303 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9304
9305 void
9306 omp_clause_check_failed (const_tree node, const char *file, int line,
9307 const char *function, enum omp_clause_code code)
9308 {
9309 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9310 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9311 function, trim_filename (file), line);
9312 }
9313
9314
9315 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9316
9317 void
9318 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9319 const char *function, enum omp_clause_code c1,
9320 enum omp_clause_code c2)
9321 {
9322 char *buffer;
9323 unsigned length = 0;
9324 unsigned int c;
9325
9326 for (c = c1; c <= c2; ++c)
9327 length += 4 + strlen (omp_clause_code_name[c]);
9328
9329 length += strlen ("expected ");
9330 buffer = (char *) alloca (length);
9331 length = 0;
9332
9333 for (c = c1; c <= c2; ++c)
9334 {
9335 const char *prefix = length ? " or " : "expected ";
9336
9337 strcpy (buffer + length, prefix);
9338 length += strlen (prefix);
9339 strcpy (buffer + length, omp_clause_code_name[c]);
9340 length += strlen (omp_clause_code_name[c]);
9341 }
9342
9343 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9344 buffer, omp_clause_code_name[TREE_CODE (node)],
9345 function, trim_filename (file), line);
9346 }
9347
9348
9349 #undef DEFTREESTRUCT
9350 #define DEFTREESTRUCT(VAL, NAME) NAME,
9351
9352 static const char *ts_enum_names[] = {
9353 #include "treestruct.def"
9354 };
9355 #undef DEFTREESTRUCT
9356
9357 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9358
9359 /* Similar to tree_class_check_failed, except that we check for
9360 whether CODE contains the tree structure identified by EN. */
9361
9362 void
9363 tree_contains_struct_check_failed (const_tree node,
9364 const enum tree_node_structure_enum en,
9365 const char *file, int line,
9366 const char *function)
9367 {
9368 internal_error
9369 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9370 TS_ENUM_NAME (en),
9371 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9372 }
9373
9374
9375 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9376 (dynamically sized) vector. */
9377
9378 void
9379 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9380 const char *function)
9381 {
9382 internal_error
9383 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9384 idx + 1, len, function, trim_filename (file), line);
9385 }
9386
9387 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9388 (dynamically sized) vector. */
9389
9390 void
9391 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9392 const char *function)
9393 {
9394 internal_error
9395 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9396 idx + 1, len, function, trim_filename (file), line);
9397 }
9398
9399 /* Similar to above, except that the check is for the bounds of the operand
9400 vector of an expression node EXP. */
9401
9402 void
9403 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9404 int line, const char *function)
9405 {
9406 enum tree_code code = TREE_CODE (exp);
9407 internal_error
9408 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9409 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9410 function, trim_filename (file), line);
9411 }
9412
9413 /* Similar to above, except that the check is for the number of
9414 operands of an OMP_CLAUSE node. */
9415
9416 void
9417 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9418 int line, const char *function)
9419 {
9420 internal_error
9421 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9422 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9423 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9424 trim_filename (file), line);
9425 }
9426 #endif /* ENABLE_TREE_CHECKING */
9427 \f
9428 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9429 and mapped to the machine mode MODE. Initialize its fields and build
9430 the information necessary for debugging output. */
9431
9432 static tree
9433 make_vector_type (tree innertype, int nunits, machine_mode mode)
9434 {
9435 tree t;
9436 inchash::hash hstate;
9437
9438 t = make_node (VECTOR_TYPE);
9439 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9440 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9441 SET_TYPE_MODE (t, mode);
9442
9443 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9444 SET_TYPE_STRUCTURAL_EQUALITY (t);
9445 else if (TYPE_CANONICAL (innertype) != innertype
9446 || mode != VOIDmode)
9447 TYPE_CANONICAL (t)
9448 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9449
9450 layout_type (t);
9451
9452 hstate.add_wide_int (VECTOR_TYPE);
9453 hstate.add_wide_int (nunits);
9454 hstate.add_wide_int (mode);
9455 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9456 t = type_hash_canon (hstate.end (), t);
9457
9458 /* We have built a main variant, based on the main variant of the
9459 inner type. Use it to build the variant we return. */
9460 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9461 && TREE_TYPE (t) != innertype)
9462 return build_type_attribute_qual_variant (t,
9463 TYPE_ATTRIBUTES (innertype),
9464 TYPE_QUALS (innertype));
9465
9466 return t;
9467 }
9468
9469 static tree
9470 make_or_reuse_type (unsigned size, int unsignedp)
9471 {
9472 int i;
9473
9474 if (size == INT_TYPE_SIZE)
9475 return unsignedp ? unsigned_type_node : integer_type_node;
9476 if (size == CHAR_TYPE_SIZE)
9477 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9478 if (size == SHORT_TYPE_SIZE)
9479 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9480 if (size == LONG_TYPE_SIZE)
9481 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9482 if (size == LONG_LONG_TYPE_SIZE)
9483 return (unsignedp ? long_long_unsigned_type_node
9484 : long_long_integer_type_node);
9485
9486 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9487 if (size == int_n_data[i].bitsize
9488 && int_n_enabled_p[i])
9489 return (unsignedp ? int_n_trees[i].unsigned_type
9490 : int_n_trees[i].signed_type);
9491
9492 if (unsignedp)
9493 return make_unsigned_type (size);
9494 else
9495 return make_signed_type (size);
9496 }
9497
9498 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9499
9500 static tree
9501 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9502 {
9503 if (satp)
9504 {
9505 if (size == SHORT_FRACT_TYPE_SIZE)
9506 return unsignedp ? sat_unsigned_short_fract_type_node
9507 : sat_short_fract_type_node;
9508 if (size == FRACT_TYPE_SIZE)
9509 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9510 if (size == LONG_FRACT_TYPE_SIZE)
9511 return unsignedp ? sat_unsigned_long_fract_type_node
9512 : sat_long_fract_type_node;
9513 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9514 return unsignedp ? sat_unsigned_long_long_fract_type_node
9515 : sat_long_long_fract_type_node;
9516 }
9517 else
9518 {
9519 if (size == SHORT_FRACT_TYPE_SIZE)
9520 return unsignedp ? unsigned_short_fract_type_node
9521 : short_fract_type_node;
9522 if (size == FRACT_TYPE_SIZE)
9523 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9524 if (size == LONG_FRACT_TYPE_SIZE)
9525 return unsignedp ? unsigned_long_fract_type_node
9526 : long_fract_type_node;
9527 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9528 return unsignedp ? unsigned_long_long_fract_type_node
9529 : long_long_fract_type_node;
9530 }
9531
9532 return make_fract_type (size, unsignedp, satp);
9533 }
9534
9535 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9536
9537 static tree
9538 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9539 {
9540 if (satp)
9541 {
9542 if (size == SHORT_ACCUM_TYPE_SIZE)
9543 return unsignedp ? sat_unsigned_short_accum_type_node
9544 : sat_short_accum_type_node;
9545 if (size == ACCUM_TYPE_SIZE)
9546 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9547 if (size == LONG_ACCUM_TYPE_SIZE)
9548 return unsignedp ? sat_unsigned_long_accum_type_node
9549 : sat_long_accum_type_node;
9550 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9551 return unsignedp ? sat_unsigned_long_long_accum_type_node
9552 : sat_long_long_accum_type_node;
9553 }
9554 else
9555 {
9556 if (size == SHORT_ACCUM_TYPE_SIZE)
9557 return unsignedp ? unsigned_short_accum_type_node
9558 : short_accum_type_node;
9559 if (size == ACCUM_TYPE_SIZE)
9560 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9561 if (size == LONG_ACCUM_TYPE_SIZE)
9562 return unsignedp ? unsigned_long_accum_type_node
9563 : long_accum_type_node;
9564 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9565 return unsignedp ? unsigned_long_long_accum_type_node
9566 : long_long_accum_type_node;
9567 }
9568
9569 return make_accum_type (size, unsignedp, satp);
9570 }
9571
9572
9573 /* Create an atomic variant node for TYPE. This routine is called
9574 during initialization of data types to create the 5 basic atomic
9575 types. The generic build_variant_type function requires these to
9576 already be set up in order to function properly, so cannot be
9577 called from there. If ALIGN is non-zero, then ensure alignment is
9578 overridden to this value. */
9579
9580 static tree
9581 build_atomic_base (tree type, unsigned int align)
9582 {
9583 tree t;
9584
9585 /* Make sure its not already registered. */
9586 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9587 return t;
9588
9589 t = build_variant_type_copy (type);
9590 set_type_quals (t, TYPE_QUAL_ATOMIC);
9591
9592 if (align)
9593 TYPE_ALIGN (t) = align;
9594
9595 return t;
9596 }
9597
9598 /* Create nodes for all integer types (and error_mark_node) using the sizes
9599 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9600 SHORT_DOUBLE specifies whether double should be of the same precision
9601 as float. */
9602
9603 void
9604 build_common_tree_nodes (bool signed_char, bool short_double)
9605 {
9606 int i;
9607
9608 error_mark_node = make_node (ERROR_MARK);
9609 TREE_TYPE (error_mark_node) = error_mark_node;
9610
9611 initialize_sizetypes ();
9612
9613 /* Define both `signed char' and `unsigned char'. */
9614 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9615 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9616 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9617 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9618
9619 /* Define `char', which is like either `signed char' or `unsigned char'
9620 but not the same as either. */
9621 char_type_node
9622 = (signed_char
9623 ? make_signed_type (CHAR_TYPE_SIZE)
9624 : make_unsigned_type (CHAR_TYPE_SIZE));
9625 TYPE_STRING_FLAG (char_type_node) = 1;
9626
9627 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9628 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9629 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9630 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9631 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9632 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9633 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9634 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9635
9636 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9637 {
9638 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9639 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9640 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9641 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9642
9643 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9644 && int_n_enabled_p[i])
9645 {
9646 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9647 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9648 }
9649 }
9650
9651 /* Define a boolean type. This type only represents boolean values but
9652 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9653 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9654 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9655 TYPE_PRECISION (boolean_type_node) = 1;
9656 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9657
9658 /* Define what type to use for size_t. */
9659 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9660 size_type_node = unsigned_type_node;
9661 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9662 size_type_node = long_unsigned_type_node;
9663 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9664 size_type_node = long_long_unsigned_type_node;
9665 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9666 size_type_node = short_unsigned_type_node;
9667 else
9668 {
9669 int i;
9670
9671 size_type_node = NULL_TREE;
9672 for (i = 0; i < NUM_INT_N_ENTS; i++)
9673 if (int_n_enabled_p[i])
9674 {
9675 char name[50];
9676 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9677
9678 if (strcmp (name, SIZE_TYPE) == 0)
9679 {
9680 size_type_node = int_n_trees[i].unsigned_type;
9681 }
9682 }
9683 if (size_type_node == NULL_TREE)
9684 gcc_unreachable ();
9685 }
9686
9687 /* Fill in the rest of the sized types. Reuse existing type nodes
9688 when possible. */
9689 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9690 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9691 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9692 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9693 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9694
9695 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9696 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9697 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9698 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9699 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9700
9701 /* Don't call build_qualified type for atomics. That routine does
9702 special processing for atomics, and until they are initialized
9703 it's better not to make that call.
9704
9705 Check to see if there is a target override for atomic types. */
9706
9707 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9708 targetm.atomic_align_for_mode (QImode));
9709 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9710 targetm.atomic_align_for_mode (HImode));
9711 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9712 targetm.atomic_align_for_mode (SImode));
9713 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9714 targetm.atomic_align_for_mode (DImode));
9715 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9716 targetm.atomic_align_for_mode (TImode));
9717
9718 access_public_node = get_identifier ("public");
9719 access_protected_node = get_identifier ("protected");
9720 access_private_node = get_identifier ("private");
9721
9722 /* Define these next since types below may used them. */
9723 integer_zero_node = build_int_cst (integer_type_node, 0);
9724 integer_one_node = build_int_cst (integer_type_node, 1);
9725 integer_three_node = build_int_cst (integer_type_node, 3);
9726 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9727
9728 size_zero_node = size_int (0);
9729 size_one_node = size_int (1);
9730 bitsize_zero_node = bitsize_int (0);
9731 bitsize_one_node = bitsize_int (1);
9732 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9733
9734 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9735 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9736
9737 void_type_node = make_node (VOID_TYPE);
9738 layout_type (void_type_node);
9739
9740 pointer_bounds_type_node = targetm.chkp_bound_type ();
9741
9742 /* We are not going to have real types in C with less than byte alignment,
9743 so we might as well not have any types that claim to have it. */
9744 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9745 TYPE_USER_ALIGN (void_type_node) = 0;
9746
9747 void_node = make_node (VOID_CST);
9748 TREE_TYPE (void_node) = void_type_node;
9749
9750 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9751 layout_type (TREE_TYPE (null_pointer_node));
9752
9753 ptr_type_node = build_pointer_type (void_type_node);
9754 const_ptr_type_node
9755 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9756 fileptr_type_node = ptr_type_node;
9757
9758 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9759
9760 float_type_node = make_node (REAL_TYPE);
9761 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9762 layout_type (float_type_node);
9763
9764 double_type_node = make_node (REAL_TYPE);
9765 if (short_double)
9766 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9767 else
9768 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9769 layout_type (double_type_node);
9770
9771 long_double_type_node = make_node (REAL_TYPE);
9772 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9773 layout_type (long_double_type_node);
9774
9775 float_ptr_type_node = build_pointer_type (float_type_node);
9776 double_ptr_type_node = build_pointer_type (double_type_node);
9777 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9778 integer_ptr_type_node = build_pointer_type (integer_type_node);
9779
9780 /* Fixed size integer types. */
9781 uint16_type_node = make_or_reuse_type (16, 1);
9782 uint32_type_node = make_or_reuse_type (32, 1);
9783 uint64_type_node = make_or_reuse_type (64, 1);
9784
9785 /* Decimal float types. */
9786 dfloat32_type_node = make_node (REAL_TYPE);
9787 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9788 layout_type (dfloat32_type_node);
9789 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9790 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9791
9792 dfloat64_type_node = make_node (REAL_TYPE);
9793 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9794 layout_type (dfloat64_type_node);
9795 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9796 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9797
9798 dfloat128_type_node = make_node (REAL_TYPE);
9799 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9800 layout_type (dfloat128_type_node);
9801 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9802 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9803
9804 complex_integer_type_node = build_complex_type (integer_type_node);
9805 complex_float_type_node = build_complex_type (float_type_node);
9806 complex_double_type_node = build_complex_type (double_type_node);
9807 complex_long_double_type_node = build_complex_type (long_double_type_node);
9808
9809 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9810 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9811 sat_ ## KIND ## _type_node = \
9812 make_sat_signed_ ## KIND ## _type (SIZE); \
9813 sat_unsigned_ ## KIND ## _type_node = \
9814 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9815 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9816 unsigned_ ## KIND ## _type_node = \
9817 make_unsigned_ ## KIND ## _type (SIZE);
9818
9819 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9820 sat_ ## WIDTH ## KIND ## _type_node = \
9821 make_sat_signed_ ## KIND ## _type (SIZE); \
9822 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9823 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9824 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9825 unsigned_ ## WIDTH ## KIND ## _type_node = \
9826 make_unsigned_ ## KIND ## _type (SIZE);
9827
9828 /* Make fixed-point type nodes based on four different widths. */
9829 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9830 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9831 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9832 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9833 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9834
9835 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9836 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9837 NAME ## _type_node = \
9838 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9839 u ## NAME ## _type_node = \
9840 make_or_reuse_unsigned_ ## KIND ## _type \
9841 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9842 sat_ ## NAME ## _type_node = \
9843 make_or_reuse_sat_signed_ ## KIND ## _type \
9844 (GET_MODE_BITSIZE (MODE ## mode)); \
9845 sat_u ## NAME ## _type_node = \
9846 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9847 (GET_MODE_BITSIZE (U ## MODE ## mode));
9848
9849 /* Fixed-point type and mode nodes. */
9850 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9851 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9852 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9853 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9854 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9855 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9856 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9857 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9858 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9859 MAKE_FIXED_MODE_NODE (accum, da, DA)
9860 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9861
9862 {
9863 tree t = targetm.build_builtin_va_list ();
9864
9865 /* Many back-ends define record types without setting TYPE_NAME.
9866 If we copied the record type here, we'd keep the original
9867 record type without a name. This breaks name mangling. So,
9868 don't copy record types and let c_common_nodes_and_builtins()
9869 declare the type to be __builtin_va_list. */
9870 if (TREE_CODE (t) != RECORD_TYPE)
9871 t = build_variant_type_copy (t);
9872
9873 va_list_type_node = t;
9874 }
9875 }
9876
9877 /* Modify DECL for given flags.
9878 TM_PURE attribute is set only on types, so the function will modify
9879 DECL's type when ECF_TM_PURE is used. */
9880
9881 void
9882 set_call_expr_flags (tree decl, int flags)
9883 {
9884 if (flags & ECF_NOTHROW)
9885 TREE_NOTHROW (decl) = 1;
9886 if (flags & ECF_CONST)
9887 TREE_READONLY (decl) = 1;
9888 if (flags & ECF_PURE)
9889 DECL_PURE_P (decl) = 1;
9890 if (flags & ECF_LOOPING_CONST_OR_PURE)
9891 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9892 if (flags & ECF_NOVOPS)
9893 DECL_IS_NOVOPS (decl) = 1;
9894 if (flags & ECF_NORETURN)
9895 TREE_THIS_VOLATILE (decl) = 1;
9896 if (flags & ECF_MALLOC)
9897 DECL_IS_MALLOC (decl) = 1;
9898 if (flags & ECF_RETURNS_TWICE)
9899 DECL_IS_RETURNS_TWICE (decl) = 1;
9900 if (flags & ECF_LEAF)
9901 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9902 NULL, DECL_ATTRIBUTES (decl));
9903 if ((flags & ECF_TM_PURE) && flag_tm)
9904 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9905 /* Looping const or pure is implied by noreturn.
9906 There is currently no way to declare looping const or looping pure alone. */
9907 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9908 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9909 }
9910
9911
9912 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9913
9914 static void
9915 local_define_builtin (const char *name, tree type, enum built_in_function code,
9916 const char *library_name, int ecf_flags)
9917 {
9918 tree decl;
9919
9920 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9921 library_name, NULL_TREE);
9922 set_call_expr_flags (decl, ecf_flags);
9923
9924 set_builtin_decl (code, decl, true);
9925 }
9926
9927 /* Call this function after instantiating all builtins that the language
9928 front end cares about. This will build the rest of the builtins
9929 and internal functions that are relied upon by the tree optimizers and
9930 the middle-end. */
9931
9932 void
9933 build_common_builtin_nodes (void)
9934 {
9935 tree tmp, ftype;
9936 int ecf_flags;
9937
9938 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9939 {
9940 ftype = build_function_type (void_type_node, void_list_node);
9941 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9942 "__builtin_unreachable",
9943 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9944 | ECF_CONST);
9945 }
9946
9947 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9948 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9949 {
9950 ftype = build_function_type_list (ptr_type_node,
9951 ptr_type_node, const_ptr_type_node,
9952 size_type_node, NULL_TREE);
9953
9954 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9955 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9956 "memcpy", ECF_NOTHROW | ECF_LEAF);
9957 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9958 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9959 "memmove", ECF_NOTHROW | ECF_LEAF);
9960 }
9961
9962 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9963 {
9964 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9965 const_ptr_type_node, size_type_node,
9966 NULL_TREE);
9967 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9968 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9969 }
9970
9971 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9972 {
9973 ftype = build_function_type_list (ptr_type_node,
9974 ptr_type_node, integer_type_node,
9975 size_type_node, NULL_TREE);
9976 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9977 "memset", ECF_NOTHROW | ECF_LEAF);
9978 }
9979
9980 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9981 {
9982 ftype = build_function_type_list (ptr_type_node,
9983 size_type_node, NULL_TREE);
9984 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9985 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9986 }
9987
9988 ftype = build_function_type_list (ptr_type_node, size_type_node,
9989 size_type_node, NULL_TREE);
9990 local_define_builtin ("__builtin_alloca_with_align", ftype,
9991 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9992 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9993
9994 /* If we're checking the stack, `alloca' can throw. */
9995 if (flag_stack_check)
9996 {
9997 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9998 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9999 }
10000
10001 ftype = build_function_type_list (void_type_node,
10002 ptr_type_node, ptr_type_node,
10003 ptr_type_node, NULL_TREE);
10004 local_define_builtin ("__builtin_init_trampoline", ftype,
10005 BUILT_IN_INIT_TRAMPOLINE,
10006 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10007 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10008 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10009 "__builtin_init_heap_trampoline",
10010 ECF_NOTHROW | ECF_LEAF);
10011
10012 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10013 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10014 BUILT_IN_ADJUST_TRAMPOLINE,
10015 "__builtin_adjust_trampoline",
10016 ECF_CONST | ECF_NOTHROW);
10017
10018 ftype = build_function_type_list (void_type_node,
10019 ptr_type_node, ptr_type_node, NULL_TREE);
10020 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10021 BUILT_IN_NONLOCAL_GOTO,
10022 "__builtin_nonlocal_goto",
10023 ECF_NORETURN | ECF_NOTHROW);
10024
10025 ftype = build_function_type_list (void_type_node,
10026 ptr_type_node, ptr_type_node, NULL_TREE);
10027 local_define_builtin ("__builtin_setjmp_setup", ftype,
10028 BUILT_IN_SETJMP_SETUP,
10029 "__builtin_setjmp_setup", ECF_NOTHROW);
10030
10031 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10032 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10033 BUILT_IN_SETJMP_RECEIVER,
10034 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10035
10036 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10037 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10038 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10039
10040 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10041 local_define_builtin ("__builtin_stack_restore", ftype,
10042 BUILT_IN_STACK_RESTORE,
10043 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10044
10045 /* If there's a possibility that we might use the ARM EABI, build the
10046 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10047 if (targetm.arm_eabi_unwinder)
10048 {
10049 ftype = build_function_type_list (void_type_node, NULL_TREE);
10050 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10051 BUILT_IN_CXA_END_CLEANUP,
10052 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10053 }
10054
10055 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10056 local_define_builtin ("__builtin_unwind_resume", ftype,
10057 BUILT_IN_UNWIND_RESUME,
10058 ((targetm_common.except_unwind_info (&global_options)
10059 == UI_SJLJ)
10060 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10061 ECF_NORETURN);
10062
10063 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10064 {
10065 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10066 NULL_TREE);
10067 local_define_builtin ("__builtin_return_address", ftype,
10068 BUILT_IN_RETURN_ADDRESS,
10069 "__builtin_return_address",
10070 ECF_NOTHROW);
10071 }
10072
10073 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10074 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10075 {
10076 ftype = build_function_type_list (void_type_node, ptr_type_node,
10077 ptr_type_node, NULL_TREE);
10078 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10079 local_define_builtin ("__cyg_profile_func_enter", ftype,
10080 BUILT_IN_PROFILE_FUNC_ENTER,
10081 "__cyg_profile_func_enter", 0);
10082 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10083 local_define_builtin ("__cyg_profile_func_exit", ftype,
10084 BUILT_IN_PROFILE_FUNC_EXIT,
10085 "__cyg_profile_func_exit", 0);
10086 }
10087
10088 /* The exception object and filter values from the runtime. The argument
10089 must be zero before exception lowering, i.e. from the front end. After
10090 exception lowering, it will be the region number for the exception
10091 landing pad. These functions are PURE instead of CONST to prevent
10092 them from being hoisted past the exception edge that will initialize
10093 its value in the landing pad. */
10094 ftype = build_function_type_list (ptr_type_node,
10095 integer_type_node, NULL_TREE);
10096 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10097 /* Only use TM_PURE if we we have TM language support. */
10098 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10099 ecf_flags |= ECF_TM_PURE;
10100 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10101 "__builtin_eh_pointer", ecf_flags);
10102
10103 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10104 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10105 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10106 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10107
10108 ftype = build_function_type_list (void_type_node,
10109 integer_type_node, integer_type_node,
10110 NULL_TREE);
10111 local_define_builtin ("__builtin_eh_copy_values", ftype,
10112 BUILT_IN_EH_COPY_VALUES,
10113 "__builtin_eh_copy_values", ECF_NOTHROW);
10114
10115 /* Complex multiplication and division. These are handled as builtins
10116 rather than optabs because emit_library_call_value doesn't support
10117 complex. Further, we can do slightly better with folding these
10118 beasties if the real and complex parts of the arguments are separate. */
10119 {
10120 int mode;
10121
10122 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10123 {
10124 char mode_name_buf[4], *q;
10125 const char *p;
10126 enum built_in_function mcode, dcode;
10127 tree type, inner_type;
10128 const char *prefix = "__";
10129
10130 if (targetm.libfunc_gnu_prefix)
10131 prefix = "__gnu_";
10132
10133 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10134 if (type == NULL)
10135 continue;
10136 inner_type = TREE_TYPE (type);
10137
10138 ftype = build_function_type_list (type, inner_type, inner_type,
10139 inner_type, inner_type, NULL_TREE);
10140
10141 mcode = ((enum built_in_function)
10142 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10143 dcode = ((enum built_in_function)
10144 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10145
10146 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10147 *q = TOLOWER (*p);
10148 *q = '\0';
10149
10150 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10151 NULL);
10152 local_define_builtin (built_in_names[mcode], ftype, mcode,
10153 built_in_names[mcode],
10154 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10155
10156 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10157 NULL);
10158 local_define_builtin (built_in_names[dcode], ftype, dcode,
10159 built_in_names[dcode],
10160 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10161 }
10162 }
10163
10164 init_internal_fns ();
10165 }
10166
10167 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10168 better way.
10169
10170 If we requested a pointer to a vector, build up the pointers that
10171 we stripped off while looking for the inner type. Similarly for
10172 return values from functions.
10173
10174 The argument TYPE is the top of the chain, and BOTTOM is the
10175 new type which we will point to. */
10176
10177 tree
10178 reconstruct_complex_type (tree type, tree bottom)
10179 {
10180 tree inner, outer;
10181
10182 if (TREE_CODE (type) == POINTER_TYPE)
10183 {
10184 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10185 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10186 TYPE_REF_CAN_ALIAS_ALL (type));
10187 }
10188 else if (TREE_CODE (type) == REFERENCE_TYPE)
10189 {
10190 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10191 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10192 TYPE_REF_CAN_ALIAS_ALL (type));
10193 }
10194 else if (TREE_CODE (type) == ARRAY_TYPE)
10195 {
10196 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10197 outer = build_array_type (inner, TYPE_DOMAIN (type));
10198 }
10199 else if (TREE_CODE (type) == FUNCTION_TYPE)
10200 {
10201 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10202 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10203 }
10204 else if (TREE_CODE (type) == METHOD_TYPE)
10205 {
10206 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10207 /* The build_method_type_directly() routine prepends 'this' to argument list,
10208 so we must compensate by getting rid of it. */
10209 outer
10210 = build_method_type_directly
10211 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10212 inner,
10213 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10214 }
10215 else if (TREE_CODE (type) == OFFSET_TYPE)
10216 {
10217 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10218 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10219 }
10220 else
10221 return bottom;
10222
10223 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10224 TYPE_QUALS (type));
10225 }
10226
10227 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10228 the inner type. */
10229 tree
10230 build_vector_type_for_mode (tree innertype, machine_mode mode)
10231 {
10232 int nunits;
10233
10234 switch (GET_MODE_CLASS (mode))
10235 {
10236 case MODE_VECTOR_INT:
10237 case MODE_VECTOR_FLOAT:
10238 case MODE_VECTOR_FRACT:
10239 case MODE_VECTOR_UFRACT:
10240 case MODE_VECTOR_ACCUM:
10241 case MODE_VECTOR_UACCUM:
10242 nunits = GET_MODE_NUNITS (mode);
10243 break;
10244
10245 case MODE_INT:
10246 /* Check that there are no leftover bits. */
10247 gcc_assert (GET_MODE_BITSIZE (mode)
10248 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10249
10250 nunits = GET_MODE_BITSIZE (mode)
10251 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10252 break;
10253
10254 default:
10255 gcc_unreachable ();
10256 }
10257
10258 return make_vector_type (innertype, nunits, mode);
10259 }
10260
10261 /* Similarly, but takes the inner type and number of units, which must be
10262 a power of two. */
10263
10264 tree
10265 build_vector_type (tree innertype, int nunits)
10266 {
10267 return make_vector_type (innertype, nunits, VOIDmode);
10268 }
10269
10270 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10271
10272 tree
10273 build_opaque_vector_type (tree innertype, int nunits)
10274 {
10275 tree t = make_vector_type (innertype, nunits, VOIDmode);
10276 tree cand;
10277 /* We always build the non-opaque variant before the opaque one,
10278 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10279 cand = TYPE_NEXT_VARIANT (t);
10280 if (cand
10281 && TYPE_VECTOR_OPAQUE (cand)
10282 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10283 return cand;
10284 /* Othewise build a variant type and make sure to queue it after
10285 the non-opaque type. */
10286 cand = build_distinct_type_copy (t);
10287 TYPE_VECTOR_OPAQUE (cand) = true;
10288 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10289 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10290 TYPE_NEXT_VARIANT (t) = cand;
10291 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10292 return cand;
10293 }
10294
10295
10296 /* Given an initializer INIT, return TRUE if INIT is zero or some
10297 aggregate of zeros. Otherwise return FALSE. */
10298 bool
10299 initializer_zerop (const_tree init)
10300 {
10301 tree elt;
10302
10303 STRIP_NOPS (init);
10304
10305 switch (TREE_CODE (init))
10306 {
10307 case INTEGER_CST:
10308 return integer_zerop (init);
10309
10310 case REAL_CST:
10311 /* ??? Note that this is not correct for C4X float formats. There,
10312 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10313 negative exponent. */
10314 return real_zerop (init)
10315 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10316
10317 case FIXED_CST:
10318 return fixed_zerop (init);
10319
10320 case COMPLEX_CST:
10321 return integer_zerop (init)
10322 || (real_zerop (init)
10323 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10324 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10325
10326 case VECTOR_CST:
10327 {
10328 unsigned i;
10329 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10330 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10331 return false;
10332 return true;
10333 }
10334
10335 case CONSTRUCTOR:
10336 {
10337 unsigned HOST_WIDE_INT idx;
10338
10339 if (TREE_CLOBBER_P (init))
10340 return false;
10341 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10342 if (!initializer_zerop (elt))
10343 return false;
10344 return true;
10345 }
10346
10347 case STRING_CST:
10348 {
10349 int i;
10350
10351 /* We need to loop through all elements to handle cases like
10352 "\0" and "\0foobar". */
10353 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10354 if (TREE_STRING_POINTER (init)[i] != '\0')
10355 return false;
10356
10357 return true;
10358 }
10359
10360 default:
10361 return false;
10362 }
10363 }
10364
10365 /* Check if vector VEC consists of all the equal elements and
10366 that the number of elements corresponds to the type of VEC.
10367 The function returns first element of the vector
10368 or NULL_TREE if the vector is not uniform. */
10369 tree
10370 uniform_vector_p (const_tree vec)
10371 {
10372 tree first, t;
10373 unsigned i;
10374
10375 if (vec == NULL_TREE)
10376 return NULL_TREE;
10377
10378 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10379
10380 if (TREE_CODE (vec) == VECTOR_CST)
10381 {
10382 first = VECTOR_CST_ELT (vec, 0);
10383 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10384 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10385 return NULL_TREE;
10386
10387 return first;
10388 }
10389
10390 else if (TREE_CODE (vec) == CONSTRUCTOR)
10391 {
10392 first = error_mark_node;
10393
10394 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10395 {
10396 if (i == 0)
10397 {
10398 first = t;
10399 continue;
10400 }
10401 if (!operand_equal_p (first, t, 0))
10402 return NULL_TREE;
10403 }
10404 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10405 return NULL_TREE;
10406
10407 return first;
10408 }
10409
10410 return NULL_TREE;
10411 }
10412
10413 /* Build an empty statement at location LOC. */
10414
10415 tree
10416 build_empty_stmt (location_t loc)
10417 {
10418 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10419 SET_EXPR_LOCATION (t, loc);
10420 return t;
10421 }
10422
10423
10424 /* Build an OpenMP clause with code CODE. LOC is the location of the
10425 clause. */
10426
10427 tree
10428 build_omp_clause (location_t loc, enum omp_clause_code code)
10429 {
10430 tree t;
10431 int size, length;
10432
10433 length = omp_clause_num_ops[code];
10434 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10435
10436 record_node_allocation_statistics (OMP_CLAUSE, size);
10437
10438 t = (tree) ggc_internal_alloc (size);
10439 memset (t, 0, size);
10440 TREE_SET_CODE (t, OMP_CLAUSE);
10441 OMP_CLAUSE_SET_CODE (t, code);
10442 OMP_CLAUSE_LOCATION (t) = loc;
10443
10444 return t;
10445 }
10446
10447 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10448 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10449 Except for the CODE and operand count field, other storage for the
10450 object is initialized to zeros. */
10451
10452 tree
10453 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10454 {
10455 tree t;
10456 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10457
10458 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10459 gcc_assert (len >= 1);
10460
10461 record_node_allocation_statistics (code, length);
10462
10463 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10464
10465 TREE_SET_CODE (t, code);
10466
10467 /* Can't use TREE_OPERAND to store the length because if checking is
10468 enabled, it will try to check the length before we store it. :-P */
10469 t->exp.operands[0] = build_int_cst (sizetype, len);
10470
10471 return t;
10472 }
10473
10474 /* Helper function for build_call_* functions; build a CALL_EXPR with
10475 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10476 the argument slots. */
10477
10478 static tree
10479 build_call_1 (tree return_type, tree fn, int nargs)
10480 {
10481 tree t;
10482
10483 t = build_vl_exp (CALL_EXPR, nargs + 3);
10484 TREE_TYPE (t) = return_type;
10485 CALL_EXPR_FN (t) = fn;
10486 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10487
10488 return t;
10489 }
10490
10491 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10492 FN and a null static chain slot. NARGS is the number of call arguments
10493 which are specified as "..." arguments. */
10494
10495 tree
10496 build_call_nary (tree return_type, tree fn, int nargs, ...)
10497 {
10498 tree ret;
10499 va_list args;
10500 va_start (args, nargs);
10501 ret = build_call_valist (return_type, fn, nargs, args);
10502 va_end (args);
10503 return ret;
10504 }
10505
10506 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10507 FN and a null static chain slot. NARGS is the number of call arguments
10508 which are specified as a va_list ARGS. */
10509
10510 tree
10511 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10512 {
10513 tree t;
10514 int i;
10515
10516 t = build_call_1 (return_type, fn, nargs);
10517 for (i = 0; i < nargs; i++)
10518 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10519 process_call_operands (t);
10520 return t;
10521 }
10522
10523 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10524 FN and a null static chain slot. NARGS is the number of call arguments
10525 which are specified as a tree array ARGS. */
10526
10527 tree
10528 build_call_array_loc (location_t loc, tree return_type, tree fn,
10529 int nargs, const tree *args)
10530 {
10531 tree t;
10532 int i;
10533
10534 t = build_call_1 (return_type, fn, nargs);
10535 for (i = 0; i < nargs; i++)
10536 CALL_EXPR_ARG (t, i) = args[i];
10537 process_call_operands (t);
10538 SET_EXPR_LOCATION (t, loc);
10539 return t;
10540 }
10541
10542 /* Like build_call_array, but takes a vec. */
10543
10544 tree
10545 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10546 {
10547 tree ret, t;
10548 unsigned int ix;
10549
10550 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10551 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10552 CALL_EXPR_ARG (ret, ix) = t;
10553 process_call_operands (ret);
10554 return ret;
10555 }
10556
10557 /* Conveniently construct a function call expression. FNDECL names the
10558 function to be called and N arguments are passed in the array
10559 ARGARRAY. */
10560
10561 tree
10562 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10563 {
10564 tree fntype = TREE_TYPE (fndecl);
10565 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10566
10567 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10568 }
10569
10570 /* Conveniently construct a function call expression. FNDECL names the
10571 function to be called and the arguments are passed in the vector
10572 VEC. */
10573
10574 tree
10575 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10576 {
10577 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10578 vec_safe_address (vec));
10579 }
10580
10581
10582 /* Conveniently construct a function call expression. FNDECL names the
10583 function to be called, N is the number of arguments, and the "..."
10584 parameters are the argument expressions. */
10585
10586 tree
10587 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10588 {
10589 va_list ap;
10590 tree *argarray = XALLOCAVEC (tree, n);
10591 int i;
10592
10593 va_start (ap, n);
10594 for (i = 0; i < n; i++)
10595 argarray[i] = va_arg (ap, tree);
10596 va_end (ap);
10597 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10598 }
10599
10600 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10601 varargs macros aren't supported by all bootstrap compilers. */
10602
10603 tree
10604 build_call_expr (tree fndecl, int n, ...)
10605 {
10606 va_list ap;
10607 tree *argarray = XALLOCAVEC (tree, n);
10608 int i;
10609
10610 va_start (ap, n);
10611 for (i = 0; i < n; i++)
10612 argarray[i] = va_arg (ap, tree);
10613 va_end (ap);
10614 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10615 }
10616
10617 /* Build internal call expression. This is just like CALL_EXPR, except
10618 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10619 internal function. */
10620
10621 tree
10622 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10623 tree type, int n, ...)
10624 {
10625 va_list ap;
10626 int i;
10627
10628 tree fn = build_call_1 (type, NULL_TREE, n);
10629 va_start (ap, n);
10630 for (i = 0; i < n; i++)
10631 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10632 va_end (ap);
10633 SET_EXPR_LOCATION (fn, loc);
10634 CALL_EXPR_IFN (fn) = ifn;
10635 return fn;
10636 }
10637
10638 /* Create a new constant string literal and return a char* pointer to it.
10639 The STRING_CST value is the LEN characters at STR. */
10640 tree
10641 build_string_literal (int len, const char *str)
10642 {
10643 tree t, elem, index, type;
10644
10645 t = build_string (len, str);
10646 elem = build_type_variant (char_type_node, 1, 0);
10647 index = build_index_type (size_int (len - 1));
10648 type = build_array_type (elem, index);
10649 TREE_TYPE (t) = type;
10650 TREE_CONSTANT (t) = 1;
10651 TREE_READONLY (t) = 1;
10652 TREE_STATIC (t) = 1;
10653
10654 type = build_pointer_type (elem);
10655 t = build1 (ADDR_EXPR, type,
10656 build4 (ARRAY_REF, elem,
10657 t, integer_zero_node, NULL_TREE, NULL_TREE));
10658 return t;
10659 }
10660
10661
10662
10663 /* Return true if T (assumed to be a DECL) must be assigned a memory
10664 location. */
10665
10666 bool
10667 needs_to_live_in_memory (const_tree t)
10668 {
10669 return (TREE_ADDRESSABLE (t)
10670 || is_global_var (t)
10671 || (TREE_CODE (t) == RESULT_DECL
10672 && !DECL_BY_REFERENCE (t)
10673 && aggregate_value_p (t, current_function_decl)));
10674 }
10675
10676 /* Return value of a constant X and sign-extend it. */
10677
10678 HOST_WIDE_INT
10679 int_cst_value (const_tree x)
10680 {
10681 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10682 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10683
10684 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10685 gcc_assert (cst_and_fits_in_hwi (x));
10686
10687 if (bits < HOST_BITS_PER_WIDE_INT)
10688 {
10689 bool negative = ((val >> (bits - 1)) & 1) != 0;
10690 if (negative)
10691 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10692 else
10693 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10694 }
10695
10696 return val;
10697 }
10698
10699 /* If TYPE is an integral or pointer type, return an integer type with
10700 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10701 if TYPE is already an integer type of signedness UNSIGNEDP. */
10702
10703 tree
10704 signed_or_unsigned_type_for (int unsignedp, tree type)
10705 {
10706 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10707 return type;
10708
10709 if (TREE_CODE (type) == VECTOR_TYPE)
10710 {
10711 tree inner = TREE_TYPE (type);
10712 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10713 if (!inner2)
10714 return NULL_TREE;
10715 if (inner == inner2)
10716 return type;
10717 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10718 }
10719
10720 if (!INTEGRAL_TYPE_P (type)
10721 && !POINTER_TYPE_P (type)
10722 && TREE_CODE (type) != OFFSET_TYPE)
10723 return NULL_TREE;
10724
10725 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10726 }
10727
10728 /* If TYPE is an integral or pointer type, return an integer type with
10729 the same precision which is unsigned, or itself if TYPE is already an
10730 unsigned integer type. */
10731
10732 tree
10733 unsigned_type_for (tree type)
10734 {
10735 return signed_or_unsigned_type_for (1, type);
10736 }
10737
10738 /* If TYPE is an integral or pointer type, return an integer type with
10739 the same precision which is signed, or itself if TYPE is already a
10740 signed integer type. */
10741
10742 tree
10743 signed_type_for (tree type)
10744 {
10745 return signed_or_unsigned_type_for (0, type);
10746 }
10747
10748 /* If TYPE is a vector type, return a signed integer vector type with the
10749 same width and number of subparts. Otherwise return boolean_type_node. */
10750
10751 tree
10752 truth_type_for (tree type)
10753 {
10754 if (TREE_CODE (type) == VECTOR_TYPE)
10755 {
10756 tree elem = lang_hooks.types.type_for_size
10757 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10758 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10759 }
10760 else
10761 return boolean_type_node;
10762 }
10763
10764 /* Returns the largest value obtainable by casting something in INNER type to
10765 OUTER type. */
10766
10767 tree
10768 upper_bound_in_type (tree outer, tree inner)
10769 {
10770 unsigned int det = 0;
10771 unsigned oprec = TYPE_PRECISION (outer);
10772 unsigned iprec = TYPE_PRECISION (inner);
10773 unsigned prec;
10774
10775 /* Compute a unique number for every combination. */
10776 det |= (oprec > iprec) ? 4 : 0;
10777 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10778 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10779
10780 /* Determine the exponent to use. */
10781 switch (det)
10782 {
10783 case 0:
10784 case 1:
10785 /* oprec <= iprec, outer: signed, inner: don't care. */
10786 prec = oprec - 1;
10787 break;
10788 case 2:
10789 case 3:
10790 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10791 prec = oprec;
10792 break;
10793 case 4:
10794 /* oprec > iprec, outer: signed, inner: signed. */
10795 prec = iprec - 1;
10796 break;
10797 case 5:
10798 /* oprec > iprec, outer: signed, inner: unsigned. */
10799 prec = iprec;
10800 break;
10801 case 6:
10802 /* oprec > iprec, outer: unsigned, inner: signed. */
10803 prec = oprec;
10804 break;
10805 case 7:
10806 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10807 prec = iprec;
10808 break;
10809 default:
10810 gcc_unreachable ();
10811 }
10812
10813 return wide_int_to_tree (outer,
10814 wi::mask (prec, false, TYPE_PRECISION (outer)));
10815 }
10816
10817 /* Returns the smallest value obtainable by casting something in INNER type to
10818 OUTER type. */
10819
10820 tree
10821 lower_bound_in_type (tree outer, tree inner)
10822 {
10823 unsigned oprec = TYPE_PRECISION (outer);
10824 unsigned iprec = TYPE_PRECISION (inner);
10825
10826 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10827 and obtain 0. */
10828 if (TYPE_UNSIGNED (outer)
10829 /* If we are widening something of an unsigned type, OUTER type
10830 contains all values of INNER type. In particular, both INNER
10831 and OUTER types have zero in common. */
10832 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10833 return build_int_cst (outer, 0);
10834 else
10835 {
10836 /* If we are widening a signed type to another signed type, we
10837 want to obtain -2^^(iprec-1). If we are keeping the
10838 precision or narrowing to a signed type, we want to obtain
10839 -2^(oprec-1). */
10840 unsigned prec = oprec > iprec ? iprec : oprec;
10841 return wide_int_to_tree (outer,
10842 wi::mask (prec - 1, true,
10843 TYPE_PRECISION (outer)));
10844 }
10845 }
10846
10847 /* Return nonzero if two operands that are suitable for PHI nodes are
10848 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10849 SSA_NAME or invariant. Note that this is strictly an optimization.
10850 That is, callers of this function can directly call operand_equal_p
10851 and get the same result, only slower. */
10852
10853 int
10854 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10855 {
10856 if (arg0 == arg1)
10857 return 1;
10858 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10859 return 0;
10860 return operand_equal_p (arg0, arg1, 0);
10861 }
10862
10863 /* Returns number of zeros at the end of binary representation of X. */
10864
10865 tree
10866 num_ending_zeros (const_tree x)
10867 {
10868 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10869 }
10870
10871
10872 #define WALK_SUBTREE(NODE) \
10873 do \
10874 { \
10875 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10876 if (result) \
10877 return result; \
10878 } \
10879 while (0)
10880
10881 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10882 be walked whenever a type is seen in the tree. Rest of operands and return
10883 value are as for walk_tree. */
10884
10885 static tree
10886 walk_type_fields (tree type, walk_tree_fn func, void *data,
10887 hash_set<tree> *pset, walk_tree_lh lh)
10888 {
10889 tree result = NULL_TREE;
10890
10891 switch (TREE_CODE (type))
10892 {
10893 case POINTER_TYPE:
10894 case REFERENCE_TYPE:
10895 case VECTOR_TYPE:
10896 /* We have to worry about mutually recursive pointers. These can't
10897 be written in C. They can in Ada. It's pathological, but
10898 there's an ACATS test (c38102a) that checks it. Deal with this
10899 by checking if we're pointing to another pointer, that one
10900 points to another pointer, that one does too, and we have no htab.
10901 If so, get a hash table. We check three levels deep to avoid
10902 the cost of the hash table if we don't need one. */
10903 if (POINTER_TYPE_P (TREE_TYPE (type))
10904 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10905 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10906 && !pset)
10907 {
10908 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10909 func, data);
10910 if (result)
10911 return result;
10912
10913 break;
10914 }
10915
10916 /* ... fall through ... */
10917
10918 case COMPLEX_TYPE:
10919 WALK_SUBTREE (TREE_TYPE (type));
10920 break;
10921
10922 case METHOD_TYPE:
10923 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10924
10925 /* Fall through. */
10926
10927 case FUNCTION_TYPE:
10928 WALK_SUBTREE (TREE_TYPE (type));
10929 {
10930 tree arg;
10931
10932 /* We never want to walk into default arguments. */
10933 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10934 WALK_SUBTREE (TREE_VALUE (arg));
10935 }
10936 break;
10937
10938 case ARRAY_TYPE:
10939 /* Don't follow this nodes's type if a pointer for fear that
10940 we'll have infinite recursion. If we have a PSET, then we
10941 need not fear. */
10942 if (pset
10943 || (!POINTER_TYPE_P (TREE_TYPE (type))
10944 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10945 WALK_SUBTREE (TREE_TYPE (type));
10946 WALK_SUBTREE (TYPE_DOMAIN (type));
10947 break;
10948
10949 case OFFSET_TYPE:
10950 WALK_SUBTREE (TREE_TYPE (type));
10951 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10952 break;
10953
10954 default:
10955 break;
10956 }
10957
10958 return NULL_TREE;
10959 }
10960
10961 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10962 called with the DATA and the address of each sub-tree. If FUNC returns a
10963 non-NULL value, the traversal is stopped, and the value returned by FUNC
10964 is returned. If PSET is non-NULL it is used to record the nodes visited,
10965 and to avoid visiting a node more than once. */
10966
10967 tree
10968 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10969 hash_set<tree> *pset, walk_tree_lh lh)
10970 {
10971 enum tree_code code;
10972 int walk_subtrees;
10973 tree result;
10974
10975 #define WALK_SUBTREE_TAIL(NODE) \
10976 do \
10977 { \
10978 tp = & (NODE); \
10979 goto tail_recurse; \
10980 } \
10981 while (0)
10982
10983 tail_recurse:
10984 /* Skip empty subtrees. */
10985 if (!*tp)
10986 return NULL_TREE;
10987
10988 /* Don't walk the same tree twice, if the user has requested
10989 that we avoid doing so. */
10990 if (pset && pset->add (*tp))
10991 return NULL_TREE;
10992
10993 /* Call the function. */
10994 walk_subtrees = 1;
10995 result = (*func) (tp, &walk_subtrees, data);
10996
10997 /* If we found something, return it. */
10998 if (result)
10999 return result;
11000
11001 code = TREE_CODE (*tp);
11002
11003 /* Even if we didn't, FUNC may have decided that there was nothing
11004 interesting below this point in the tree. */
11005 if (!walk_subtrees)
11006 {
11007 /* But we still need to check our siblings. */
11008 if (code == TREE_LIST)
11009 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11010 else if (code == OMP_CLAUSE)
11011 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11012 else
11013 return NULL_TREE;
11014 }
11015
11016 if (lh)
11017 {
11018 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11019 if (result || !walk_subtrees)
11020 return result;
11021 }
11022
11023 switch (code)
11024 {
11025 case ERROR_MARK:
11026 case IDENTIFIER_NODE:
11027 case INTEGER_CST:
11028 case REAL_CST:
11029 case FIXED_CST:
11030 case VECTOR_CST:
11031 case STRING_CST:
11032 case BLOCK:
11033 case PLACEHOLDER_EXPR:
11034 case SSA_NAME:
11035 case FIELD_DECL:
11036 case RESULT_DECL:
11037 /* None of these have subtrees other than those already walked
11038 above. */
11039 break;
11040
11041 case TREE_LIST:
11042 WALK_SUBTREE (TREE_VALUE (*tp));
11043 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11044 break;
11045
11046 case TREE_VEC:
11047 {
11048 int len = TREE_VEC_LENGTH (*tp);
11049
11050 if (len == 0)
11051 break;
11052
11053 /* Walk all elements but the first. */
11054 while (--len)
11055 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11056
11057 /* Now walk the first one as a tail call. */
11058 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11059 }
11060
11061 case COMPLEX_CST:
11062 WALK_SUBTREE (TREE_REALPART (*tp));
11063 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11064
11065 case CONSTRUCTOR:
11066 {
11067 unsigned HOST_WIDE_INT idx;
11068 constructor_elt *ce;
11069
11070 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11071 idx++)
11072 WALK_SUBTREE (ce->value);
11073 }
11074 break;
11075
11076 case SAVE_EXPR:
11077 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11078
11079 case BIND_EXPR:
11080 {
11081 tree decl;
11082 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11083 {
11084 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11085 into declarations that are just mentioned, rather than
11086 declared; they don't really belong to this part of the tree.
11087 And, we can see cycles: the initializer for a declaration
11088 can refer to the declaration itself. */
11089 WALK_SUBTREE (DECL_INITIAL (decl));
11090 WALK_SUBTREE (DECL_SIZE (decl));
11091 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11092 }
11093 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11094 }
11095
11096 case STATEMENT_LIST:
11097 {
11098 tree_stmt_iterator i;
11099 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11100 WALK_SUBTREE (*tsi_stmt_ptr (i));
11101 }
11102 break;
11103
11104 case OMP_CLAUSE:
11105 switch (OMP_CLAUSE_CODE (*tp))
11106 {
11107 case OMP_CLAUSE_PRIVATE:
11108 case OMP_CLAUSE_SHARED:
11109 case OMP_CLAUSE_FIRSTPRIVATE:
11110 case OMP_CLAUSE_COPYIN:
11111 case OMP_CLAUSE_COPYPRIVATE:
11112 case OMP_CLAUSE_FINAL:
11113 case OMP_CLAUSE_IF:
11114 case OMP_CLAUSE_NUM_THREADS:
11115 case OMP_CLAUSE_SCHEDULE:
11116 case OMP_CLAUSE_UNIFORM:
11117 case OMP_CLAUSE_DEPEND:
11118 case OMP_CLAUSE_NUM_TEAMS:
11119 case OMP_CLAUSE_THREAD_LIMIT:
11120 case OMP_CLAUSE_DEVICE:
11121 case OMP_CLAUSE_DIST_SCHEDULE:
11122 case OMP_CLAUSE_SAFELEN:
11123 case OMP_CLAUSE_SIMDLEN:
11124 case OMP_CLAUSE__LOOPTEMP_:
11125 case OMP_CLAUSE__SIMDUID_:
11126 case OMP_CLAUSE__CILK_FOR_COUNT_:
11127 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11128 /* FALLTHRU */
11129
11130 case OMP_CLAUSE_NOWAIT:
11131 case OMP_CLAUSE_ORDERED:
11132 case OMP_CLAUSE_DEFAULT:
11133 case OMP_CLAUSE_UNTIED:
11134 case OMP_CLAUSE_MERGEABLE:
11135 case OMP_CLAUSE_PROC_BIND:
11136 case OMP_CLAUSE_INBRANCH:
11137 case OMP_CLAUSE_NOTINBRANCH:
11138 case OMP_CLAUSE_FOR:
11139 case OMP_CLAUSE_PARALLEL:
11140 case OMP_CLAUSE_SECTIONS:
11141 case OMP_CLAUSE_TASKGROUP:
11142 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11143
11144 case OMP_CLAUSE_LASTPRIVATE:
11145 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11146 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11147 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11148
11149 case OMP_CLAUSE_COLLAPSE:
11150 {
11151 int i;
11152 for (i = 0; i < 3; i++)
11153 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11154 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11155 }
11156
11157 case OMP_CLAUSE_LINEAR:
11158 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11159 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11160 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11161 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11162
11163 case OMP_CLAUSE_ALIGNED:
11164 case OMP_CLAUSE_FROM:
11165 case OMP_CLAUSE_TO:
11166 case OMP_CLAUSE_MAP:
11167 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11168 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11169 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11170
11171 case OMP_CLAUSE_REDUCTION:
11172 {
11173 int i;
11174 for (i = 0; i < 4; i++)
11175 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11176 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11177 }
11178
11179 default:
11180 gcc_unreachable ();
11181 }
11182 break;
11183
11184 case TARGET_EXPR:
11185 {
11186 int i, len;
11187
11188 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11189 But, we only want to walk once. */
11190 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11191 for (i = 0; i < len; ++i)
11192 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11193 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11194 }
11195
11196 case DECL_EXPR:
11197 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11198 defining. We only want to walk into these fields of a type in this
11199 case and not in the general case of a mere reference to the type.
11200
11201 The criterion is as follows: if the field can be an expression, it
11202 must be walked only here. This should be in keeping with the fields
11203 that are directly gimplified in gimplify_type_sizes in order for the
11204 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11205 variable-sized types.
11206
11207 Note that DECLs get walked as part of processing the BIND_EXPR. */
11208 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11209 {
11210 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11211 if (TREE_CODE (*type_p) == ERROR_MARK)
11212 return NULL_TREE;
11213
11214 /* Call the function for the type. See if it returns anything or
11215 doesn't want us to continue. If we are to continue, walk both
11216 the normal fields and those for the declaration case. */
11217 result = (*func) (type_p, &walk_subtrees, data);
11218 if (result || !walk_subtrees)
11219 return result;
11220
11221 /* But do not walk a pointed-to type since it may itself need to
11222 be walked in the declaration case if it isn't anonymous. */
11223 if (!POINTER_TYPE_P (*type_p))
11224 {
11225 result = walk_type_fields (*type_p, func, data, pset, lh);
11226 if (result)
11227 return result;
11228 }
11229
11230 /* If this is a record type, also walk the fields. */
11231 if (RECORD_OR_UNION_TYPE_P (*type_p))
11232 {
11233 tree field;
11234
11235 for (field = TYPE_FIELDS (*type_p); field;
11236 field = DECL_CHAIN (field))
11237 {
11238 /* We'd like to look at the type of the field, but we can
11239 easily get infinite recursion. So assume it's pointed
11240 to elsewhere in the tree. Also, ignore things that
11241 aren't fields. */
11242 if (TREE_CODE (field) != FIELD_DECL)
11243 continue;
11244
11245 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11246 WALK_SUBTREE (DECL_SIZE (field));
11247 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11248 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11249 WALK_SUBTREE (DECL_QUALIFIER (field));
11250 }
11251 }
11252
11253 /* Same for scalar types. */
11254 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11255 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11256 || TREE_CODE (*type_p) == INTEGER_TYPE
11257 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11258 || TREE_CODE (*type_p) == REAL_TYPE)
11259 {
11260 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11261 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11262 }
11263
11264 WALK_SUBTREE (TYPE_SIZE (*type_p));
11265 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11266 }
11267 /* FALLTHRU */
11268
11269 default:
11270 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11271 {
11272 int i, len;
11273
11274 /* Walk over all the sub-trees of this operand. */
11275 len = TREE_OPERAND_LENGTH (*tp);
11276
11277 /* Go through the subtrees. We need to do this in forward order so
11278 that the scope of a FOR_EXPR is handled properly. */
11279 if (len)
11280 {
11281 for (i = 0; i < len - 1; ++i)
11282 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11283 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11284 }
11285 }
11286 /* If this is a type, walk the needed fields in the type. */
11287 else if (TYPE_P (*tp))
11288 return walk_type_fields (*tp, func, data, pset, lh);
11289 break;
11290 }
11291
11292 /* We didn't find what we were looking for. */
11293 return NULL_TREE;
11294
11295 #undef WALK_SUBTREE_TAIL
11296 }
11297 #undef WALK_SUBTREE
11298
11299 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11300
11301 tree
11302 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11303 walk_tree_lh lh)
11304 {
11305 tree result;
11306
11307 hash_set<tree> pset;
11308 result = walk_tree_1 (tp, func, data, &pset, lh);
11309 return result;
11310 }
11311
11312
11313 tree
11314 tree_block (tree t)
11315 {
11316 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11317
11318 if (IS_EXPR_CODE_CLASS (c))
11319 return LOCATION_BLOCK (t->exp.locus);
11320 gcc_unreachable ();
11321 return NULL;
11322 }
11323
11324 void
11325 tree_set_block (tree t, tree b)
11326 {
11327 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11328
11329 if (IS_EXPR_CODE_CLASS (c))
11330 {
11331 if (b)
11332 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11333 else
11334 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11335 }
11336 else
11337 gcc_unreachable ();
11338 }
11339
11340 /* Create a nameless artificial label and put it in the current
11341 function context. The label has a location of LOC. Returns the
11342 newly created label. */
11343
11344 tree
11345 create_artificial_label (location_t loc)
11346 {
11347 tree lab = build_decl (loc,
11348 LABEL_DECL, NULL_TREE, void_type_node);
11349
11350 DECL_ARTIFICIAL (lab) = 1;
11351 DECL_IGNORED_P (lab) = 1;
11352 DECL_CONTEXT (lab) = current_function_decl;
11353 return lab;
11354 }
11355
11356 /* Given a tree, try to return a useful variable name that we can use
11357 to prefix a temporary that is being assigned the value of the tree.
11358 I.E. given <temp> = &A, return A. */
11359
11360 const char *
11361 get_name (tree t)
11362 {
11363 tree stripped_decl;
11364
11365 stripped_decl = t;
11366 STRIP_NOPS (stripped_decl);
11367 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11368 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11369 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11370 {
11371 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11372 if (!name)
11373 return NULL;
11374 return IDENTIFIER_POINTER (name);
11375 }
11376 else
11377 {
11378 switch (TREE_CODE (stripped_decl))
11379 {
11380 case ADDR_EXPR:
11381 return get_name (TREE_OPERAND (stripped_decl, 0));
11382 default:
11383 return NULL;
11384 }
11385 }
11386 }
11387
11388 /* Return true if TYPE has a variable argument list. */
11389
11390 bool
11391 stdarg_p (const_tree fntype)
11392 {
11393 function_args_iterator args_iter;
11394 tree n = NULL_TREE, t;
11395
11396 if (!fntype)
11397 return false;
11398
11399 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11400 {
11401 n = t;
11402 }
11403
11404 return n != NULL_TREE && n != void_type_node;
11405 }
11406
11407 /* Return true if TYPE has a prototype. */
11408
11409 bool
11410 prototype_p (tree fntype)
11411 {
11412 tree t;
11413
11414 gcc_assert (fntype != NULL_TREE);
11415
11416 t = TYPE_ARG_TYPES (fntype);
11417 return (t != NULL_TREE);
11418 }
11419
11420 /* If BLOCK is inlined from an __attribute__((__artificial__))
11421 routine, return pointer to location from where it has been
11422 called. */
11423 location_t *
11424 block_nonartificial_location (tree block)
11425 {
11426 location_t *ret = NULL;
11427
11428 while (block && TREE_CODE (block) == BLOCK
11429 && BLOCK_ABSTRACT_ORIGIN (block))
11430 {
11431 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11432
11433 while (TREE_CODE (ao) == BLOCK
11434 && BLOCK_ABSTRACT_ORIGIN (ao)
11435 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11436 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11437
11438 if (TREE_CODE (ao) == FUNCTION_DECL)
11439 {
11440 /* If AO is an artificial inline, point RET to the
11441 call site locus at which it has been inlined and continue
11442 the loop, in case AO's caller is also an artificial
11443 inline. */
11444 if (DECL_DECLARED_INLINE_P (ao)
11445 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11446 ret = &BLOCK_SOURCE_LOCATION (block);
11447 else
11448 break;
11449 }
11450 else if (TREE_CODE (ao) != BLOCK)
11451 break;
11452
11453 block = BLOCK_SUPERCONTEXT (block);
11454 }
11455 return ret;
11456 }
11457
11458
11459 /* If EXP is inlined from an __attribute__((__artificial__))
11460 function, return the location of the original call expression. */
11461
11462 location_t
11463 tree_nonartificial_location (tree exp)
11464 {
11465 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11466
11467 if (loc)
11468 return *loc;
11469 else
11470 return EXPR_LOCATION (exp);
11471 }
11472
11473
11474 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11475 nodes. */
11476
11477 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11478
11479 static hashval_t
11480 cl_option_hash_hash (const void *x)
11481 {
11482 const_tree const t = (const_tree) x;
11483 const char *p;
11484 size_t i;
11485 size_t len = 0;
11486 hashval_t hash = 0;
11487
11488 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11489 {
11490 p = (const char *)TREE_OPTIMIZATION (t);
11491 len = sizeof (struct cl_optimization);
11492 }
11493
11494 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11495 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11496
11497 else
11498 gcc_unreachable ();
11499
11500 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11501 something else. */
11502 for (i = 0; i < len; i++)
11503 if (p[i])
11504 hash = (hash << 4) ^ ((i << 2) | p[i]);
11505
11506 return hash;
11507 }
11508
11509 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11510 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11511 same. */
11512
11513 static int
11514 cl_option_hash_eq (const void *x, const void *y)
11515 {
11516 const_tree const xt = (const_tree) x;
11517 const_tree const yt = (const_tree) y;
11518 const char *xp;
11519 const char *yp;
11520 size_t len;
11521
11522 if (TREE_CODE (xt) != TREE_CODE (yt))
11523 return 0;
11524
11525 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11526 {
11527 xp = (const char *)TREE_OPTIMIZATION (xt);
11528 yp = (const char *)TREE_OPTIMIZATION (yt);
11529 len = sizeof (struct cl_optimization);
11530 }
11531
11532 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11533 {
11534 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11535 TREE_TARGET_OPTION (yt));
11536 }
11537
11538 else
11539 gcc_unreachable ();
11540
11541 return (memcmp (xp, yp, len) == 0);
11542 }
11543
11544 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11545
11546 tree
11547 build_optimization_node (struct gcc_options *opts)
11548 {
11549 tree t;
11550 void **slot;
11551
11552 /* Use the cache of optimization nodes. */
11553
11554 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11555 opts);
11556
11557 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11558 t = (tree) *slot;
11559 if (!t)
11560 {
11561 /* Insert this one into the hash table. */
11562 t = cl_optimization_node;
11563 *slot = t;
11564
11565 /* Make a new node for next time round. */
11566 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11567 }
11568
11569 return t;
11570 }
11571
11572 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11573
11574 tree
11575 build_target_option_node (struct gcc_options *opts)
11576 {
11577 tree t;
11578 void **slot;
11579
11580 /* Use the cache of optimization nodes. */
11581
11582 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11583 opts);
11584
11585 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11586 t = (tree) *slot;
11587 if (!t)
11588 {
11589 /* Insert this one into the hash table. */
11590 t = cl_target_option_node;
11591 *slot = t;
11592
11593 /* Make a new node for next time round. */
11594 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11595 }
11596
11597 return t;
11598 }
11599
11600 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11601 Called through htab_traverse. */
11602
11603 static int
11604 prepare_target_option_node_for_pch (void **slot, void *)
11605 {
11606 tree node = (tree) *slot;
11607 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11608 TREE_TARGET_GLOBALS (node) = NULL;
11609 return 1;
11610 }
11611
11612 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11613 so that they aren't saved during PCH writing. */
11614
11615 void
11616 prepare_target_option_nodes_for_pch (void)
11617 {
11618 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11619 NULL);
11620 }
11621
11622 /* Determine the "ultimate origin" of a block. The block may be an inlined
11623 instance of an inlined instance of a block which is local to an inline
11624 function, so we have to trace all of the way back through the origin chain
11625 to find out what sort of node actually served as the original seed for the
11626 given block. */
11627
11628 tree
11629 block_ultimate_origin (const_tree block)
11630 {
11631 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11632
11633 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11634 we're trying to output the abstract instance of this function. */
11635 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11636 return NULL_TREE;
11637
11638 if (immediate_origin == NULL_TREE)
11639 return NULL_TREE;
11640 else
11641 {
11642 tree ret_val;
11643 tree lookahead = immediate_origin;
11644
11645 do
11646 {
11647 ret_val = lookahead;
11648 lookahead = (TREE_CODE (ret_val) == BLOCK
11649 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11650 }
11651 while (lookahead != NULL && lookahead != ret_val);
11652
11653 /* The block's abstract origin chain may not be the *ultimate* origin of
11654 the block. It could lead to a DECL that has an abstract origin set.
11655 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11656 will give us if it has one). Note that DECL's abstract origins are
11657 supposed to be the most distant ancestor (or so decl_ultimate_origin
11658 claims), so we don't need to loop following the DECL origins. */
11659 if (DECL_P (ret_val))
11660 return DECL_ORIGIN (ret_val);
11661
11662 return ret_val;
11663 }
11664 }
11665
11666 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11667 no instruction. */
11668
11669 bool
11670 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11671 {
11672 /* Use precision rather then machine mode when we can, which gives
11673 the correct answer even for submode (bit-field) types. */
11674 if ((INTEGRAL_TYPE_P (outer_type)
11675 || POINTER_TYPE_P (outer_type)
11676 || TREE_CODE (outer_type) == OFFSET_TYPE)
11677 && (INTEGRAL_TYPE_P (inner_type)
11678 || POINTER_TYPE_P (inner_type)
11679 || TREE_CODE (inner_type) == OFFSET_TYPE))
11680 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11681
11682 /* Otherwise fall back on comparing machine modes (e.g. for
11683 aggregate types, floats). */
11684 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11685 }
11686
11687 /* Return true iff conversion in EXP generates no instruction. Mark
11688 it inline so that we fully inline into the stripping functions even
11689 though we have two uses of this function. */
11690
11691 static inline bool
11692 tree_nop_conversion (const_tree exp)
11693 {
11694 tree outer_type, inner_type;
11695
11696 if (!CONVERT_EXPR_P (exp)
11697 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11698 return false;
11699 if (TREE_OPERAND (exp, 0) == error_mark_node)
11700 return false;
11701
11702 outer_type = TREE_TYPE (exp);
11703 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11704
11705 if (!inner_type)
11706 return false;
11707
11708 return tree_nop_conversion_p (outer_type, inner_type);
11709 }
11710
11711 /* Return true iff conversion in EXP generates no instruction. Don't
11712 consider conversions changing the signedness. */
11713
11714 static bool
11715 tree_sign_nop_conversion (const_tree exp)
11716 {
11717 tree outer_type, inner_type;
11718
11719 if (!tree_nop_conversion (exp))
11720 return false;
11721
11722 outer_type = TREE_TYPE (exp);
11723 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11724
11725 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11726 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11727 }
11728
11729 /* Strip conversions from EXP according to tree_nop_conversion and
11730 return the resulting expression. */
11731
11732 tree
11733 tree_strip_nop_conversions (tree exp)
11734 {
11735 while (tree_nop_conversion (exp))
11736 exp = TREE_OPERAND (exp, 0);
11737 return exp;
11738 }
11739
11740 /* Strip conversions from EXP according to tree_sign_nop_conversion
11741 and return the resulting expression. */
11742
11743 tree
11744 tree_strip_sign_nop_conversions (tree exp)
11745 {
11746 while (tree_sign_nop_conversion (exp))
11747 exp = TREE_OPERAND (exp, 0);
11748 return exp;
11749 }
11750
11751 /* Avoid any floating point extensions from EXP. */
11752 tree
11753 strip_float_extensions (tree exp)
11754 {
11755 tree sub, expt, subt;
11756
11757 /* For floating point constant look up the narrowest type that can hold
11758 it properly and handle it like (type)(narrowest_type)constant.
11759 This way we can optimize for instance a=a*2.0 where "a" is float
11760 but 2.0 is double constant. */
11761 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11762 {
11763 REAL_VALUE_TYPE orig;
11764 tree type = NULL;
11765
11766 orig = TREE_REAL_CST (exp);
11767 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11768 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11769 type = float_type_node;
11770 else if (TYPE_PRECISION (TREE_TYPE (exp))
11771 > TYPE_PRECISION (double_type_node)
11772 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11773 type = double_type_node;
11774 if (type)
11775 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11776 }
11777
11778 if (!CONVERT_EXPR_P (exp))
11779 return exp;
11780
11781 sub = TREE_OPERAND (exp, 0);
11782 subt = TREE_TYPE (sub);
11783 expt = TREE_TYPE (exp);
11784
11785 if (!FLOAT_TYPE_P (subt))
11786 return exp;
11787
11788 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11789 return exp;
11790
11791 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11792 return exp;
11793
11794 return strip_float_extensions (sub);
11795 }
11796
11797 /* Strip out all handled components that produce invariant
11798 offsets. */
11799
11800 const_tree
11801 strip_invariant_refs (const_tree op)
11802 {
11803 while (handled_component_p (op))
11804 {
11805 switch (TREE_CODE (op))
11806 {
11807 case ARRAY_REF:
11808 case ARRAY_RANGE_REF:
11809 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11810 || TREE_OPERAND (op, 2) != NULL_TREE
11811 || TREE_OPERAND (op, 3) != NULL_TREE)
11812 return NULL;
11813 break;
11814
11815 case COMPONENT_REF:
11816 if (TREE_OPERAND (op, 2) != NULL_TREE)
11817 return NULL;
11818 break;
11819
11820 default:;
11821 }
11822 op = TREE_OPERAND (op, 0);
11823 }
11824
11825 return op;
11826 }
11827
11828 static GTY(()) tree gcc_eh_personality_decl;
11829
11830 /* Return the GCC personality function decl. */
11831
11832 tree
11833 lhd_gcc_personality (void)
11834 {
11835 if (!gcc_eh_personality_decl)
11836 gcc_eh_personality_decl = build_personality_function ("gcc");
11837 return gcc_eh_personality_decl;
11838 }
11839
11840 /* TARGET is a call target of GIMPLE call statement
11841 (obtained by gimple_call_fn). Return true if it is
11842 OBJ_TYPE_REF representing an virtual call of C++ method.
11843 (As opposed to OBJ_TYPE_REF representing objc calls
11844 through a cast where middle-end devirtualization machinery
11845 can't apply.) */
11846
11847 bool
11848 virtual_method_call_p (tree target)
11849 {
11850 if (TREE_CODE (target) != OBJ_TYPE_REF)
11851 return false;
11852 target = TREE_TYPE (target);
11853 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11854 target = TREE_TYPE (target);
11855 if (TREE_CODE (target) == FUNCTION_TYPE)
11856 return false;
11857 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11858 return true;
11859 }
11860
11861 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11862
11863 tree
11864 obj_type_ref_class (tree ref)
11865 {
11866 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11867 ref = TREE_TYPE (ref);
11868 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11869 ref = TREE_TYPE (ref);
11870 /* We look for type THIS points to. ObjC also builds
11871 OBJ_TYPE_REF with non-method calls, Their first parameter
11872 ID however also corresponds to class type. */
11873 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11874 || TREE_CODE (ref) == FUNCTION_TYPE);
11875 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11876 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11877 return TREE_TYPE (ref);
11878 }
11879
11880 /* Return true if T is in anonymous namespace. */
11881
11882 bool
11883 type_in_anonymous_namespace_p (const_tree t)
11884 {
11885 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11886 bulitin types; those have CONTEXT NULL. */
11887 if (!TYPE_CONTEXT (t))
11888 return false;
11889 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11890 }
11891
11892 /* Try to find a base info of BINFO that would have its field decl at offset
11893 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11894 found, return, otherwise return NULL_TREE. */
11895
11896 tree
11897 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11898 {
11899 tree type = BINFO_TYPE (binfo);
11900
11901 while (true)
11902 {
11903 HOST_WIDE_INT pos, size;
11904 tree fld;
11905 int i;
11906
11907 if (types_same_for_odr (type, expected_type))
11908 return binfo;
11909 if (offset < 0)
11910 return NULL_TREE;
11911
11912 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11913 {
11914 if (TREE_CODE (fld) != FIELD_DECL)
11915 continue;
11916
11917 pos = int_bit_position (fld);
11918 size = tree_to_uhwi (DECL_SIZE (fld));
11919 if (pos <= offset && (pos + size) > offset)
11920 break;
11921 }
11922 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11923 return NULL_TREE;
11924
11925 if (!DECL_ARTIFICIAL (fld))
11926 {
11927 binfo = TYPE_BINFO (TREE_TYPE (fld));
11928 if (!binfo)
11929 return NULL_TREE;
11930 }
11931 /* Offset 0 indicates the primary base, whose vtable contents are
11932 represented in the binfo for the derived class. */
11933 else if (offset != 0)
11934 {
11935 tree base_binfo, binfo2 = binfo;
11936
11937 /* Find BINFO corresponding to FLD. This is bit harder
11938 by a fact that in virtual inheritance we may need to walk down
11939 the non-virtual inheritance chain. */
11940 while (true)
11941 {
11942 tree containing_binfo = NULL, found_binfo = NULL;
11943 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11944 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11945 {
11946 found_binfo = base_binfo;
11947 break;
11948 }
11949 else
11950 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11951 - tree_to_shwi (BINFO_OFFSET (binfo)))
11952 * BITS_PER_UNIT < pos
11953 /* Rule out types with no virtual methods or we can get confused
11954 here by zero sized bases. */
11955 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11956 && (!containing_binfo
11957 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11958 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11959 containing_binfo = base_binfo;
11960 if (found_binfo)
11961 {
11962 binfo = found_binfo;
11963 break;
11964 }
11965 if (!containing_binfo)
11966 return NULL_TREE;
11967 binfo2 = containing_binfo;
11968 }
11969 }
11970
11971 type = TREE_TYPE (fld);
11972 offset -= pos;
11973 }
11974 }
11975
11976 /* Returns true if X is a typedef decl. */
11977
11978 bool
11979 is_typedef_decl (tree x)
11980 {
11981 return (x && TREE_CODE (x) == TYPE_DECL
11982 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11983 }
11984
11985 /* Returns true iff TYPE is a type variant created for a typedef. */
11986
11987 bool
11988 typedef_variant_p (tree type)
11989 {
11990 return is_typedef_decl (TYPE_NAME (type));
11991 }
11992
11993 /* Warn about a use of an identifier which was marked deprecated. */
11994 void
11995 warn_deprecated_use (tree node, tree attr)
11996 {
11997 const char *msg;
11998
11999 if (node == 0 || !warn_deprecated_decl)
12000 return;
12001
12002 if (!attr)
12003 {
12004 if (DECL_P (node))
12005 attr = DECL_ATTRIBUTES (node);
12006 else if (TYPE_P (node))
12007 {
12008 tree decl = TYPE_STUB_DECL (node);
12009 if (decl)
12010 attr = lookup_attribute ("deprecated",
12011 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12012 }
12013 }
12014
12015 if (attr)
12016 attr = lookup_attribute ("deprecated", attr);
12017
12018 if (attr)
12019 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12020 else
12021 msg = NULL;
12022
12023 if (DECL_P (node))
12024 {
12025 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
12026 if (msg)
12027 warning (OPT_Wdeprecated_declarations,
12028 "%qD is deprecated (declared at %r%s:%d%R): %s",
12029 node, "locus", xloc.file, xloc.line, msg);
12030 else
12031 warning (OPT_Wdeprecated_declarations,
12032 "%qD is deprecated (declared at %r%s:%d%R)",
12033 node, "locus", xloc.file, xloc.line);
12034 }
12035 else if (TYPE_P (node))
12036 {
12037 tree what = NULL_TREE;
12038 tree decl = TYPE_STUB_DECL (node);
12039
12040 if (TYPE_NAME (node))
12041 {
12042 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12043 what = TYPE_NAME (node);
12044 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12045 && DECL_NAME (TYPE_NAME (node)))
12046 what = DECL_NAME (TYPE_NAME (node));
12047 }
12048
12049 if (decl)
12050 {
12051 expanded_location xloc
12052 = expand_location (DECL_SOURCE_LOCATION (decl));
12053 if (what)
12054 {
12055 if (msg)
12056 warning (OPT_Wdeprecated_declarations,
12057 "%qE is deprecated (declared at %r%s:%d%R): %s",
12058 what, "locus", xloc.file, xloc.line, msg);
12059 else
12060 warning (OPT_Wdeprecated_declarations,
12061 "%qE is deprecated (declared at %r%s:%d%R)",
12062 what, "locus", xloc.file, xloc.line);
12063 }
12064 else
12065 {
12066 if (msg)
12067 warning (OPT_Wdeprecated_declarations,
12068 "type is deprecated (declared at %r%s:%d%R): %s",
12069 "locus", xloc.file, xloc.line, msg);
12070 else
12071 warning (OPT_Wdeprecated_declarations,
12072 "type is deprecated (declared at %r%s:%d%R)",
12073 "locus", xloc.file, xloc.line);
12074 }
12075 }
12076 else
12077 {
12078 if (what)
12079 {
12080 if (msg)
12081 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12082 what, msg);
12083 else
12084 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12085 }
12086 else
12087 {
12088 if (msg)
12089 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12090 msg);
12091 else
12092 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12093 }
12094 }
12095 }
12096 }
12097
12098 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12099 somewhere in it. */
12100
12101 bool
12102 contains_bitfld_component_ref_p (const_tree ref)
12103 {
12104 while (handled_component_p (ref))
12105 {
12106 if (TREE_CODE (ref) == COMPONENT_REF
12107 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12108 return true;
12109 ref = TREE_OPERAND (ref, 0);
12110 }
12111
12112 return false;
12113 }
12114
12115 /* Try to determine whether a TRY_CATCH expression can fall through.
12116 This is a subroutine of block_may_fallthru. */
12117
12118 static bool
12119 try_catch_may_fallthru (const_tree stmt)
12120 {
12121 tree_stmt_iterator i;
12122
12123 /* If the TRY block can fall through, the whole TRY_CATCH can
12124 fall through. */
12125 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12126 return true;
12127
12128 i = tsi_start (TREE_OPERAND (stmt, 1));
12129 switch (TREE_CODE (tsi_stmt (i)))
12130 {
12131 case CATCH_EXPR:
12132 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12133 catch expression and a body. The whole TRY_CATCH may fall
12134 through iff any of the catch bodies falls through. */
12135 for (; !tsi_end_p (i); tsi_next (&i))
12136 {
12137 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12138 return true;
12139 }
12140 return false;
12141
12142 case EH_FILTER_EXPR:
12143 /* The exception filter expression only matters if there is an
12144 exception. If the exception does not match EH_FILTER_TYPES,
12145 we will execute EH_FILTER_FAILURE, and we will fall through
12146 if that falls through. If the exception does match
12147 EH_FILTER_TYPES, the stack unwinder will continue up the
12148 stack, so we will not fall through. We don't know whether we
12149 will throw an exception which matches EH_FILTER_TYPES or not,
12150 so we just ignore EH_FILTER_TYPES and assume that we might
12151 throw an exception which doesn't match. */
12152 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12153
12154 default:
12155 /* This case represents statements to be executed when an
12156 exception occurs. Those statements are implicitly followed
12157 by a RESX statement to resume execution after the exception.
12158 So in this case the TRY_CATCH never falls through. */
12159 return false;
12160 }
12161 }
12162
12163 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12164 need not be 100% accurate; simply be conservative and return true if we
12165 don't know. This is used only to avoid stupidly generating extra code.
12166 If we're wrong, we'll just delete the extra code later. */
12167
12168 bool
12169 block_may_fallthru (const_tree block)
12170 {
12171 /* This CONST_CAST is okay because expr_last returns its argument
12172 unmodified and we assign it to a const_tree. */
12173 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12174
12175 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12176 {
12177 case GOTO_EXPR:
12178 case RETURN_EXPR:
12179 /* Easy cases. If the last statement of the block implies
12180 control transfer, then we can't fall through. */
12181 return false;
12182
12183 case SWITCH_EXPR:
12184 /* If SWITCH_LABELS is set, this is lowered, and represents a
12185 branch to a selected label and hence can not fall through.
12186 Otherwise SWITCH_BODY is set, and the switch can fall
12187 through. */
12188 return SWITCH_LABELS (stmt) == NULL_TREE;
12189
12190 case COND_EXPR:
12191 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12192 return true;
12193 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12194
12195 case BIND_EXPR:
12196 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12197
12198 case TRY_CATCH_EXPR:
12199 return try_catch_may_fallthru (stmt);
12200
12201 case TRY_FINALLY_EXPR:
12202 /* The finally clause is always executed after the try clause,
12203 so if it does not fall through, then the try-finally will not
12204 fall through. Otherwise, if the try clause does not fall
12205 through, then when the finally clause falls through it will
12206 resume execution wherever the try clause was going. So the
12207 whole try-finally will only fall through if both the try
12208 clause and the finally clause fall through. */
12209 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12210 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12211
12212 case MODIFY_EXPR:
12213 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12214 stmt = TREE_OPERAND (stmt, 1);
12215 else
12216 return true;
12217 /* FALLTHRU */
12218
12219 case CALL_EXPR:
12220 /* Functions that do not return do not fall through. */
12221 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12222
12223 case CLEANUP_POINT_EXPR:
12224 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12225
12226 case TARGET_EXPR:
12227 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12228
12229 case ERROR_MARK:
12230 return true;
12231
12232 default:
12233 return lang_hooks.block_may_fallthru (stmt);
12234 }
12235 }
12236
12237 /* True if we are using EH to handle cleanups. */
12238 static bool using_eh_for_cleanups_flag = false;
12239
12240 /* This routine is called from front ends to indicate eh should be used for
12241 cleanups. */
12242 void
12243 using_eh_for_cleanups (void)
12244 {
12245 using_eh_for_cleanups_flag = true;
12246 }
12247
12248 /* Query whether EH is used for cleanups. */
12249 bool
12250 using_eh_for_cleanups_p (void)
12251 {
12252 return using_eh_for_cleanups_flag;
12253 }
12254
12255 /* Wrapper for tree_code_name to ensure that tree code is valid */
12256 const char *
12257 get_tree_code_name (enum tree_code code)
12258 {
12259 const char *invalid = "<invalid tree code>";
12260
12261 if (code >= MAX_TREE_CODES)
12262 return invalid;
12263
12264 return tree_code_name[code];
12265 }
12266
12267 /* Drops the TREE_OVERFLOW flag from T. */
12268
12269 tree
12270 drop_tree_overflow (tree t)
12271 {
12272 gcc_checking_assert (TREE_OVERFLOW (t));
12273
12274 /* For tree codes with a sharing machinery re-build the result. */
12275 if (TREE_CODE (t) == INTEGER_CST)
12276 return wide_int_to_tree (TREE_TYPE (t), t);
12277
12278 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12279 and drop the flag. */
12280 t = copy_node (t);
12281 TREE_OVERFLOW (t) = 0;
12282 return t;
12283 }
12284
12285 /* Given a memory reference expression T, return its base address.
12286 The base address of a memory reference expression is the main
12287 object being referenced. For instance, the base address for
12288 'array[i].fld[j]' is 'array'. You can think of this as stripping
12289 away the offset part from a memory address.
12290
12291 This function calls handled_component_p to strip away all the inner
12292 parts of the memory reference until it reaches the base object. */
12293
12294 tree
12295 get_base_address (tree t)
12296 {
12297 while (handled_component_p (t))
12298 t = TREE_OPERAND (t, 0);
12299
12300 if ((TREE_CODE (t) == MEM_REF
12301 || TREE_CODE (t) == TARGET_MEM_REF)
12302 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12303 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12304
12305 /* ??? Either the alias oracle or all callers need to properly deal
12306 with WITH_SIZE_EXPRs before we can look through those. */
12307 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12308 return NULL_TREE;
12309
12310 return t;
12311 }
12312
12313 #include "gt-tree.h"