tree.c (element_mode, [...]): New functions.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "hashtab.h"
42 #include "hash-set.h"
43 #include "vec.h"
44 #include "machmode.h"
45 #include "hard-reg-set.h"
46 #include "input.h"
47 #include "function.h"
48 #include "obstack.h"
49 #include "toplev.h" /* get_random_seed */
50 #include "inchash.h"
51 #include "filenames.h"
52 #include "output.h"
53 #include "target.h"
54 #include "common/common-target.h"
55 #include "langhooks.h"
56 #include "tree-inline.h"
57 #include "tree-iterator.h"
58 #include "predict.h"
59 #include "dominance.h"
60 #include "cfg.h"
61 #include "basic-block.h"
62 #include "bitmap.h"
63 #include "tree-ssa-alias.h"
64 #include "internal-fn.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimple-iterator.h"
69 #include "gimplify.h"
70 #include "gimple-ssa.h"
71 #include "hash-map.h"
72 #include "plugin-api.h"
73 #include "ipa-ref.h"
74 #include "cgraph.h"
75 #include "tree-phinodes.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
78 #include "expr.h"
79 #include "tree-dfa.h"
80 #include "params.h"
81 #include "tree-pass.h"
82 #include "langhooks-def.h"
83 #include "diagnostic.h"
84 #include "tree-diagnostic.h"
85 #include "tree-pretty-print.h"
86 #include "except.h"
87 #include "debug.h"
88 #include "intl.h"
89 #include "wide-int.h"
90 #include "builtins.h"
91
92 /* Tree code classes. */
93
94 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
95 #define END_OF_BASE_TREE_CODES tcc_exceptional,
96
97 const enum tree_code_class tree_code_type[] = {
98 #include "all-tree.def"
99 };
100
101 #undef DEFTREECODE
102 #undef END_OF_BASE_TREE_CODES
103
104 /* Table indexed by tree code giving number of expression
105 operands beyond the fixed part of the node structure.
106 Not used for types or decls. */
107
108 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
109 #define END_OF_BASE_TREE_CODES 0,
110
111 const unsigned char tree_code_length[] = {
112 #include "all-tree.def"
113 };
114
115 #undef DEFTREECODE
116 #undef END_OF_BASE_TREE_CODES
117
118 /* Names of tree components.
119 Used for printing out the tree and error messages. */
120 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
121 #define END_OF_BASE_TREE_CODES "@dummy",
122
123 static const char *const tree_code_name[] = {
124 #include "all-tree.def"
125 };
126
127 #undef DEFTREECODE
128 #undef END_OF_BASE_TREE_CODES
129
130 /* Each tree code class has an associated string representation.
131 These must correspond to the tree_code_class entries. */
132
133 const char *const tree_code_class_strings[] =
134 {
135 "exceptional",
136 "constant",
137 "type",
138 "declaration",
139 "reference",
140 "comparison",
141 "unary",
142 "binary",
143 "statement",
144 "vl_exp",
145 "expression"
146 };
147
148 /* obstack.[ch] explicitly declined to prototype this. */
149 extern int _obstack_allocated_p (struct obstack *h, void *obj);
150
151 /* Statistics-gathering stuff. */
152
153 static int tree_code_counts[MAX_TREE_CODES];
154 int tree_node_counts[(int) all_kinds];
155 int tree_node_sizes[(int) all_kinds];
156
157 /* Keep in sync with tree.h:enum tree_node_kind. */
158 static const char * const tree_node_kind_names[] = {
159 "decls",
160 "types",
161 "blocks",
162 "stmts",
163 "refs",
164 "exprs",
165 "constants",
166 "identifiers",
167 "vecs",
168 "binfos",
169 "ssa names",
170 "constructors",
171 "random kinds",
172 "lang_decl kinds",
173 "lang_type kinds",
174 "omp clauses",
175 };
176
177 /* Unique id for next decl created. */
178 static GTY(()) int next_decl_uid;
179 /* Unique id for next type created. */
180 static GTY(()) int next_type_uid = 1;
181 /* Unique id for next debug decl created. Use negative numbers,
182 to catch erroneous uses. */
183 static GTY(()) int next_debug_decl_uid;
184
185 /* Since we cannot rehash a type after it is in the table, we have to
186 keep the hash code. */
187
188 struct GTY(()) type_hash {
189 unsigned long hash;
190 tree type;
191 };
192
193 /* Initial size of the hash table (rounded to next prime). */
194 #define TYPE_HASH_INITIAL_SIZE 1000
195
196 /* Now here is the hash table. When recording a type, it is added to
197 the slot whose index is the hash code. Note that the hash table is
198 used for several kinds of types (function types, array types and
199 array index range types, for now). While all these live in the
200 same table, they are completely independent, and the hash code is
201 computed differently for each of these. */
202
203 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
204 htab_t type_hash_table;
205
206 /* Hash table and temporary node for larger integer const values. */
207 static GTY (()) tree int_cst_node;
208 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
209 htab_t int_cst_hash_table;
210
211 /* Hash table for optimization flags and target option flags. Use the same
212 hash table for both sets of options. Nodes for building the current
213 optimization and target option nodes. The assumption is most of the time
214 the options created will already be in the hash table, so we avoid
215 allocating and freeing up a node repeatably. */
216 static GTY (()) tree cl_optimization_node;
217 static GTY (()) tree cl_target_option_node;
218 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
219 htab_t cl_option_hash_table;
220
221 /* General tree->tree mapping structure for use in hash tables. */
222
223
224 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
225 htab_t debug_expr_for_decl;
226
227 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
228 htab_t value_expr_for_decl;
229
230 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
231 htab_t debug_args_for_decl;
232
233 static void set_type_quals (tree, int);
234 static int type_hash_eq (const void *, const void *);
235 static hashval_t type_hash_hash (const void *);
236 static hashval_t int_cst_hash_hash (const void *);
237 static int int_cst_hash_eq (const void *, const void *);
238 static hashval_t cl_option_hash_hash (const void *);
239 static int cl_option_hash_eq (const void *, const void *);
240 static void print_type_hash_statistics (void);
241 static void print_debug_expr_statistics (void);
242 static void print_value_expr_statistics (void);
243 static int type_hash_marked_p (const void *);
244 static void type_hash_list (const_tree, inchash::hash &);
245 static void attribute_hash_list (const_tree, inchash::hash &);
246
247 tree global_trees[TI_MAX];
248 tree integer_types[itk_none];
249
250 bool int_n_enabled_p[NUM_INT_N_ENTS];
251 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
252
253 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
254
255 /* Number of operands for each OpenMP clause. */
256 unsigned const char omp_clause_num_ops[] =
257 {
258 0, /* OMP_CLAUSE_ERROR */
259 1, /* OMP_CLAUSE_PRIVATE */
260 1, /* OMP_CLAUSE_SHARED */
261 1, /* OMP_CLAUSE_FIRSTPRIVATE */
262 2, /* OMP_CLAUSE_LASTPRIVATE */
263 4, /* OMP_CLAUSE_REDUCTION */
264 1, /* OMP_CLAUSE_COPYIN */
265 1, /* OMP_CLAUSE_COPYPRIVATE */
266 3, /* OMP_CLAUSE_LINEAR */
267 2, /* OMP_CLAUSE_ALIGNED */
268 1, /* OMP_CLAUSE_DEPEND */
269 1, /* OMP_CLAUSE_UNIFORM */
270 2, /* OMP_CLAUSE_FROM */
271 2, /* OMP_CLAUSE_TO */
272 2, /* OMP_CLAUSE_MAP */
273 1, /* OMP_CLAUSE__LOOPTEMP_ */
274 1, /* OMP_CLAUSE_IF */
275 1, /* OMP_CLAUSE_NUM_THREADS */
276 1, /* OMP_CLAUSE_SCHEDULE */
277 0, /* OMP_CLAUSE_NOWAIT */
278 0, /* OMP_CLAUSE_ORDERED */
279 0, /* OMP_CLAUSE_DEFAULT */
280 3, /* OMP_CLAUSE_COLLAPSE */
281 0, /* OMP_CLAUSE_UNTIED */
282 1, /* OMP_CLAUSE_FINAL */
283 0, /* OMP_CLAUSE_MERGEABLE */
284 1, /* OMP_CLAUSE_DEVICE */
285 1, /* OMP_CLAUSE_DIST_SCHEDULE */
286 0, /* OMP_CLAUSE_INBRANCH */
287 0, /* OMP_CLAUSE_NOTINBRANCH */
288 1, /* OMP_CLAUSE_NUM_TEAMS */
289 1, /* OMP_CLAUSE_THREAD_LIMIT */
290 0, /* OMP_CLAUSE_PROC_BIND */
291 1, /* OMP_CLAUSE_SAFELEN */
292 1, /* OMP_CLAUSE_SIMDLEN */
293 0, /* OMP_CLAUSE_FOR */
294 0, /* OMP_CLAUSE_PARALLEL */
295 0, /* OMP_CLAUSE_SECTIONS */
296 0, /* OMP_CLAUSE_TASKGROUP */
297 1, /* OMP_CLAUSE__SIMDUID_ */
298 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
299 };
300
301 const char * const omp_clause_code_name[] =
302 {
303 "error_clause",
304 "private",
305 "shared",
306 "firstprivate",
307 "lastprivate",
308 "reduction",
309 "copyin",
310 "copyprivate",
311 "linear",
312 "aligned",
313 "depend",
314 "uniform",
315 "from",
316 "to",
317 "map",
318 "_looptemp_",
319 "if",
320 "num_threads",
321 "schedule",
322 "nowait",
323 "ordered",
324 "default",
325 "collapse",
326 "untied",
327 "final",
328 "mergeable",
329 "device",
330 "dist_schedule",
331 "inbranch",
332 "notinbranch",
333 "num_teams",
334 "thread_limit",
335 "proc_bind",
336 "safelen",
337 "simdlen",
338 "for",
339 "parallel",
340 "sections",
341 "taskgroup",
342 "_simduid_",
343 "_Cilk_for_count_"
344 };
345
346
347 /* Return the tree node structure used by tree code CODE. */
348
349 static inline enum tree_node_structure_enum
350 tree_node_structure_for_code (enum tree_code code)
351 {
352 switch (TREE_CODE_CLASS (code))
353 {
354 case tcc_declaration:
355 {
356 switch (code)
357 {
358 case FIELD_DECL:
359 return TS_FIELD_DECL;
360 case PARM_DECL:
361 return TS_PARM_DECL;
362 case VAR_DECL:
363 return TS_VAR_DECL;
364 case LABEL_DECL:
365 return TS_LABEL_DECL;
366 case RESULT_DECL:
367 return TS_RESULT_DECL;
368 case DEBUG_EXPR_DECL:
369 return TS_DECL_WRTL;
370 case CONST_DECL:
371 return TS_CONST_DECL;
372 case TYPE_DECL:
373 return TS_TYPE_DECL;
374 case FUNCTION_DECL:
375 return TS_FUNCTION_DECL;
376 case TRANSLATION_UNIT_DECL:
377 return TS_TRANSLATION_UNIT_DECL;
378 default:
379 return TS_DECL_NON_COMMON;
380 }
381 }
382 case tcc_type:
383 return TS_TYPE_NON_COMMON;
384 case tcc_reference:
385 case tcc_comparison:
386 case tcc_unary:
387 case tcc_binary:
388 case tcc_expression:
389 case tcc_statement:
390 case tcc_vl_exp:
391 return TS_EXP;
392 default: /* tcc_constant and tcc_exceptional */
393 break;
394 }
395 switch (code)
396 {
397 /* tcc_constant cases. */
398 case VOID_CST: return TS_TYPED;
399 case INTEGER_CST: return TS_INT_CST;
400 case REAL_CST: return TS_REAL_CST;
401 case FIXED_CST: return TS_FIXED_CST;
402 case COMPLEX_CST: return TS_COMPLEX;
403 case VECTOR_CST: return TS_VECTOR;
404 case STRING_CST: return TS_STRING;
405 /* tcc_exceptional cases. */
406 case ERROR_MARK: return TS_COMMON;
407 case IDENTIFIER_NODE: return TS_IDENTIFIER;
408 case TREE_LIST: return TS_LIST;
409 case TREE_VEC: return TS_VEC;
410 case SSA_NAME: return TS_SSA_NAME;
411 case PLACEHOLDER_EXPR: return TS_COMMON;
412 case STATEMENT_LIST: return TS_STATEMENT_LIST;
413 case BLOCK: return TS_BLOCK;
414 case CONSTRUCTOR: return TS_CONSTRUCTOR;
415 case TREE_BINFO: return TS_BINFO;
416 case OMP_CLAUSE: return TS_OMP_CLAUSE;
417 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
418 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
419
420 default:
421 gcc_unreachable ();
422 }
423 }
424
425
426 /* Initialize tree_contains_struct to describe the hierarchy of tree
427 nodes. */
428
429 static void
430 initialize_tree_contains_struct (void)
431 {
432 unsigned i;
433
434 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
435 {
436 enum tree_code code;
437 enum tree_node_structure_enum ts_code;
438
439 code = (enum tree_code) i;
440 ts_code = tree_node_structure_for_code (code);
441
442 /* Mark the TS structure itself. */
443 tree_contains_struct[code][ts_code] = 1;
444
445 /* Mark all the structures that TS is derived from. */
446 switch (ts_code)
447 {
448 case TS_TYPED:
449 case TS_BLOCK:
450 MARK_TS_BASE (code);
451 break;
452
453 case TS_COMMON:
454 case TS_INT_CST:
455 case TS_REAL_CST:
456 case TS_FIXED_CST:
457 case TS_VECTOR:
458 case TS_STRING:
459 case TS_COMPLEX:
460 case TS_SSA_NAME:
461 case TS_CONSTRUCTOR:
462 case TS_EXP:
463 case TS_STATEMENT_LIST:
464 MARK_TS_TYPED (code);
465 break;
466
467 case TS_IDENTIFIER:
468 case TS_DECL_MINIMAL:
469 case TS_TYPE_COMMON:
470 case TS_LIST:
471 case TS_VEC:
472 case TS_BINFO:
473 case TS_OMP_CLAUSE:
474 case TS_OPTIMIZATION:
475 case TS_TARGET_OPTION:
476 MARK_TS_COMMON (code);
477 break;
478
479 case TS_TYPE_WITH_LANG_SPECIFIC:
480 MARK_TS_TYPE_COMMON (code);
481 break;
482
483 case TS_TYPE_NON_COMMON:
484 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
485 break;
486
487 case TS_DECL_COMMON:
488 MARK_TS_DECL_MINIMAL (code);
489 break;
490
491 case TS_DECL_WRTL:
492 case TS_CONST_DECL:
493 MARK_TS_DECL_COMMON (code);
494 break;
495
496 case TS_DECL_NON_COMMON:
497 MARK_TS_DECL_WITH_VIS (code);
498 break;
499
500 case TS_DECL_WITH_VIS:
501 case TS_PARM_DECL:
502 case TS_LABEL_DECL:
503 case TS_RESULT_DECL:
504 MARK_TS_DECL_WRTL (code);
505 break;
506
507 case TS_FIELD_DECL:
508 MARK_TS_DECL_COMMON (code);
509 break;
510
511 case TS_VAR_DECL:
512 MARK_TS_DECL_WITH_VIS (code);
513 break;
514
515 case TS_TYPE_DECL:
516 case TS_FUNCTION_DECL:
517 MARK_TS_DECL_NON_COMMON (code);
518 break;
519
520 case TS_TRANSLATION_UNIT_DECL:
521 MARK_TS_DECL_COMMON (code);
522 break;
523
524 default:
525 gcc_unreachable ();
526 }
527 }
528
529 /* Basic consistency checks for attributes used in fold. */
530 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
531 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
532 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
533 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
534 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
535 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
536 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
537 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
538 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
539 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
540 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
541 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
542 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
543 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
544 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
545 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
546 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
547 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
548 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
549 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
550 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
551 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
552 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
553 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
554 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
555 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
556 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
557 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
558 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
559 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
560 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
561 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
562 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
563 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
564 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
565 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
566 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
567 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
568 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
569 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
570 }
571
572
573 /* Init tree.c. */
574
575 void
576 init_ttree (void)
577 {
578 /* Initialize the hash table of types. */
579 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
580 type_hash_eq, 0);
581
582 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
583 tree_decl_map_eq, 0);
584
585 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
586 tree_decl_map_eq, 0);
587
588 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
589 int_cst_hash_eq, NULL);
590
591 int_cst_node = make_int_cst (1, 1);
592
593 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
594 cl_option_hash_eq, NULL);
595
596 cl_optimization_node = make_node (OPTIMIZATION_NODE);
597 cl_target_option_node = make_node (TARGET_OPTION_NODE);
598
599 /* Initialize the tree_contains_struct array. */
600 initialize_tree_contains_struct ();
601 lang_hooks.init_ts ();
602 }
603
604 \f
605 /* The name of the object as the assembler will see it (but before any
606 translations made by ASM_OUTPUT_LABELREF). Often this is the same
607 as DECL_NAME. It is an IDENTIFIER_NODE. */
608 tree
609 decl_assembler_name (tree decl)
610 {
611 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
612 lang_hooks.set_decl_assembler_name (decl);
613 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
614 }
615
616 /* When the target supports COMDAT groups, this indicates which group the
617 DECL is associated with. This can be either an IDENTIFIER_NODE or a
618 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
619 tree
620 decl_comdat_group (const_tree node)
621 {
622 struct symtab_node *snode = symtab_node::get (node);
623 if (!snode)
624 return NULL;
625 return snode->get_comdat_group ();
626 }
627
628 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
629 tree
630 decl_comdat_group_id (const_tree node)
631 {
632 struct symtab_node *snode = symtab_node::get (node);
633 if (!snode)
634 return NULL;
635 return snode->get_comdat_group_id ();
636 }
637
638 /* When the target supports named section, return its name as IDENTIFIER_NODE
639 or NULL if it is in no section. */
640 const char *
641 decl_section_name (const_tree node)
642 {
643 struct symtab_node *snode = symtab_node::get (node);
644 if (!snode)
645 return NULL;
646 return snode->get_section ();
647 }
648
649 /* Set section section name of NODE to VALUE (that is expected to
650 be identifier node) */
651 void
652 set_decl_section_name (tree node, const char *value)
653 {
654 struct symtab_node *snode;
655
656 if (value == NULL)
657 {
658 snode = symtab_node::get (node);
659 if (!snode)
660 return;
661 }
662 else if (TREE_CODE (node) == VAR_DECL)
663 snode = varpool_node::get_create (node);
664 else
665 snode = cgraph_node::get_create (node);
666 snode->set_section (value);
667 }
668
669 /* Return TLS model of a variable NODE. */
670 enum tls_model
671 decl_tls_model (const_tree node)
672 {
673 struct varpool_node *snode = varpool_node::get (node);
674 if (!snode)
675 return TLS_MODEL_NONE;
676 return snode->tls_model;
677 }
678
679 /* Set TLS model of variable NODE to MODEL. */
680 void
681 set_decl_tls_model (tree node, enum tls_model model)
682 {
683 struct varpool_node *vnode;
684
685 if (model == TLS_MODEL_NONE)
686 {
687 vnode = varpool_node::get (node);
688 if (!vnode)
689 return;
690 }
691 else
692 vnode = varpool_node::get_create (node);
693 vnode->tls_model = model;
694 }
695
696 /* Compute the number of bytes occupied by a tree with code CODE.
697 This function cannot be used for nodes that have variable sizes,
698 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
699 size_t
700 tree_code_size (enum tree_code code)
701 {
702 switch (TREE_CODE_CLASS (code))
703 {
704 case tcc_declaration: /* A decl node */
705 {
706 switch (code)
707 {
708 case FIELD_DECL:
709 return sizeof (struct tree_field_decl);
710 case PARM_DECL:
711 return sizeof (struct tree_parm_decl);
712 case VAR_DECL:
713 return sizeof (struct tree_var_decl);
714 case LABEL_DECL:
715 return sizeof (struct tree_label_decl);
716 case RESULT_DECL:
717 return sizeof (struct tree_result_decl);
718 case CONST_DECL:
719 return sizeof (struct tree_const_decl);
720 case TYPE_DECL:
721 return sizeof (struct tree_type_decl);
722 case FUNCTION_DECL:
723 return sizeof (struct tree_function_decl);
724 case DEBUG_EXPR_DECL:
725 return sizeof (struct tree_decl_with_rtl);
726 case TRANSLATION_UNIT_DECL:
727 return sizeof (struct tree_translation_unit_decl);
728 case NAMESPACE_DECL:
729 case IMPORTED_DECL:
730 case NAMELIST_DECL:
731 return sizeof (struct tree_decl_non_common);
732 default:
733 return lang_hooks.tree_size (code);
734 }
735 }
736
737 case tcc_type: /* a type node */
738 return sizeof (struct tree_type_non_common);
739
740 case tcc_reference: /* a reference */
741 case tcc_expression: /* an expression */
742 case tcc_statement: /* an expression with side effects */
743 case tcc_comparison: /* a comparison expression */
744 case tcc_unary: /* a unary arithmetic expression */
745 case tcc_binary: /* a binary arithmetic expression */
746 return (sizeof (struct tree_exp)
747 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
748
749 case tcc_constant: /* a constant */
750 switch (code)
751 {
752 case VOID_CST: return sizeof (struct tree_typed);
753 case INTEGER_CST: gcc_unreachable ();
754 case REAL_CST: return sizeof (struct tree_real_cst);
755 case FIXED_CST: return sizeof (struct tree_fixed_cst);
756 case COMPLEX_CST: return sizeof (struct tree_complex);
757 case VECTOR_CST: return sizeof (struct tree_vector);
758 case STRING_CST: gcc_unreachable ();
759 default:
760 return lang_hooks.tree_size (code);
761 }
762
763 case tcc_exceptional: /* something random, like an identifier. */
764 switch (code)
765 {
766 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
767 case TREE_LIST: return sizeof (struct tree_list);
768
769 case ERROR_MARK:
770 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
771
772 case TREE_VEC:
773 case OMP_CLAUSE: gcc_unreachable ();
774
775 case SSA_NAME: return sizeof (struct tree_ssa_name);
776
777 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
778 case BLOCK: return sizeof (struct tree_block);
779 case CONSTRUCTOR: return sizeof (struct tree_constructor);
780 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
781 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
782
783 default:
784 return lang_hooks.tree_size (code);
785 }
786
787 default:
788 gcc_unreachable ();
789 }
790 }
791
792 /* Compute the number of bytes occupied by NODE. This routine only
793 looks at TREE_CODE, except for those nodes that have variable sizes. */
794 size_t
795 tree_size (const_tree node)
796 {
797 const enum tree_code code = TREE_CODE (node);
798 switch (code)
799 {
800 case INTEGER_CST:
801 return (sizeof (struct tree_int_cst)
802 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
803
804 case TREE_BINFO:
805 return (offsetof (struct tree_binfo, base_binfos)
806 + vec<tree, va_gc>
807 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
808
809 case TREE_VEC:
810 return (sizeof (struct tree_vec)
811 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
812
813 case VECTOR_CST:
814 return (sizeof (struct tree_vector)
815 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
816
817 case STRING_CST:
818 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
819
820 case OMP_CLAUSE:
821 return (sizeof (struct tree_omp_clause)
822 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
823 * sizeof (tree));
824
825 default:
826 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
827 return (sizeof (struct tree_exp)
828 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
829 else
830 return tree_code_size (code);
831 }
832 }
833
834 /* Record interesting allocation statistics for a tree node with CODE
835 and LENGTH. */
836
837 static void
838 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
839 size_t length ATTRIBUTE_UNUSED)
840 {
841 enum tree_code_class type = TREE_CODE_CLASS (code);
842 tree_node_kind kind;
843
844 if (!GATHER_STATISTICS)
845 return;
846
847 switch (type)
848 {
849 case tcc_declaration: /* A decl node */
850 kind = d_kind;
851 break;
852
853 case tcc_type: /* a type node */
854 kind = t_kind;
855 break;
856
857 case tcc_statement: /* an expression with side effects */
858 kind = s_kind;
859 break;
860
861 case tcc_reference: /* a reference */
862 kind = r_kind;
863 break;
864
865 case tcc_expression: /* an expression */
866 case tcc_comparison: /* a comparison expression */
867 case tcc_unary: /* a unary arithmetic expression */
868 case tcc_binary: /* a binary arithmetic expression */
869 kind = e_kind;
870 break;
871
872 case tcc_constant: /* a constant */
873 kind = c_kind;
874 break;
875
876 case tcc_exceptional: /* something random, like an identifier. */
877 switch (code)
878 {
879 case IDENTIFIER_NODE:
880 kind = id_kind;
881 break;
882
883 case TREE_VEC:
884 kind = vec_kind;
885 break;
886
887 case TREE_BINFO:
888 kind = binfo_kind;
889 break;
890
891 case SSA_NAME:
892 kind = ssa_name_kind;
893 break;
894
895 case BLOCK:
896 kind = b_kind;
897 break;
898
899 case CONSTRUCTOR:
900 kind = constr_kind;
901 break;
902
903 case OMP_CLAUSE:
904 kind = omp_clause_kind;
905 break;
906
907 default:
908 kind = x_kind;
909 break;
910 }
911 break;
912
913 case tcc_vl_exp:
914 kind = e_kind;
915 break;
916
917 default:
918 gcc_unreachable ();
919 }
920
921 tree_code_counts[(int) code]++;
922 tree_node_counts[(int) kind]++;
923 tree_node_sizes[(int) kind] += length;
924 }
925
926 /* Allocate and return a new UID from the DECL_UID namespace. */
927
928 int
929 allocate_decl_uid (void)
930 {
931 return next_decl_uid++;
932 }
933
934 /* Return a newly allocated node of code CODE. For decl and type
935 nodes, some other fields are initialized. The rest of the node is
936 initialized to zero. This function cannot be used for TREE_VEC,
937 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
938 tree_code_size.
939
940 Achoo! I got a code in the node. */
941
942 tree
943 make_node_stat (enum tree_code code MEM_STAT_DECL)
944 {
945 tree t;
946 enum tree_code_class type = TREE_CODE_CLASS (code);
947 size_t length = tree_code_size (code);
948
949 record_node_allocation_statistics (code, length);
950
951 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
952 TREE_SET_CODE (t, code);
953
954 switch (type)
955 {
956 case tcc_statement:
957 TREE_SIDE_EFFECTS (t) = 1;
958 break;
959
960 case tcc_declaration:
961 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
962 {
963 if (code == FUNCTION_DECL)
964 {
965 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
966 DECL_MODE (t) = FUNCTION_MODE;
967 }
968 else
969 DECL_ALIGN (t) = 1;
970 }
971 DECL_SOURCE_LOCATION (t) = input_location;
972 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
973 DECL_UID (t) = --next_debug_decl_uid;
974 else
975 {
976 DECL_UID (t) = allocate_decl_uid ();
977 SET_DECL_PT_UID (t, -1);
978 }
979 if (TREE_CODE (t) == LABEL_DECL)
980 LABEL_DECL_UID (t) = -1;
981
982 break;
983
984 case tcc_type:
985 TYPE_UID (t) = next_type_uid++;
986 TYPE_ALIGN (t) = BITS_PER_UNIT;
987 TYPE_USER_ALIGN (t) = 0;
988 TYPE_MAIN_VARIANT (t) = t;
989 TYPE_CANONICAL (t) = t;
990
991 /* Default to no attributes for type, but let target change that. */
992 TYPE_ATTRIBUTES (t) = NULL_TREE;
993 targetm.set_default_type_attributes (t);
994
995 /* We have not yet computed the alias set for this type. */
996 TYPE_ALIAS_SET (t) = -1;
997 break;
998
999 case tcc_constant:
1000 TREE_CONSTANT (t) = 1;
1001 break;
1002
1003 case tcc_expression:
1004 switch (code)
1005 {
1006 case INIT_EXPR:
1007 case MODIFY_EXPR:
1008 case VA_ARG_EXPR:
1009 case PREDECREMENT_EXPR:
1010 case PREINCREMENT_EXPR:
1011 case POSTDECREMENT_EXPR:
1012 case POSTINCREMENT_EXPR:
1013 /* All of these have side-effects, no matter what their
1014 operands are. */
1015 TREE_SIDE_EFFECTS (t) = 1;
1016 break;
1017
1018 default:
1019 break;
1020 }
1021 break;
1022
1023 default:
1024 /* Other classes need no special treatment. */
1025 break;
1026 }
1027
1028 return t;
1029 }
1030 \f
1031 /* Return a new node with the same contents as NODE except that its
1032 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1033
1034 tree
1035 copy_node_stat (tree node MEM_STAT_DECL)
1036 {
1037 tree t;
1038 enum tree_code code = TREE_CODE (node);
1039 size_t length;
1040
1041 gcc_assert (code != STATEMENT_LIST);
1042
1043 length = tree_size (node);
1044 record_node_allocation_statistics (code, length);
1045 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1046 memcpy (t, node, length);
1047
1048 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1049 TREE_CHAIN (t) = 0;
1050 TREE_ASM_WRITTEN (t) = 0;
1051 TREE_VISITED (t) = 0;
1052
1053 if (TREE_CODE_CLASS (code) == tcc_declaration)
1054 {
1055 if (code == DEBUG_EXPR_DECL)
1056 DECL_UID (t) = --next_debug_decl_uid;
1057 else
1058 {
1059 DECL_UID (t) = allocate_decl_uid ();
1060 if (DECL_PT_UID_SET_P (node))
1061 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1062 }
1063 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1064 && DECL_HAS_VALUE_EXPR_P (node))
1065 {
1066 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1067 DECL_HAS_VALUE_EXPR_P (t) = 1;
1068 }
1069 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1070 if (TREE_CODE (node) == VAR_DECL)
1071 {
1072 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1073 t->decl_with_vis.symtab_node = NULL;
1074 }
1075 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1076 {
1077 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1078 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1079 }
1080 if (TREE_CODE (node) == FUNCTION_DECL)
1081 {
1082 DECL_STRUCT_FUNCTION (t) = NULL;
1083 t->decl_with_vis.symtab_node = NULL;
1084 }
1085 }
1086 else if (TREE_CODE_CLASS (code) == tcc_type)
1087 {
1088 TYPE_UID (t) = next_type_uid++;
1089 /* The following is so that the debug code for
1090 the copy is different from the original type.
1091 The two statements usually duplicate each other
1092 (because they clear fields of the same union),
1093 but the optimizer should catch that. */
1094 TYPE_SYMTAB_POINTER (t) = 0;
1095 TYPE_SYMTAB_ADDRESS (t) = 0;
1096
1097 /* Do not copy the values cache. */
1098 if (TYPE_CACHED_VALUES_P (t))
1099 {
1100 TYPE_CACHED_VALUES_P (t) = 0;
1101 TYPE_CACHED_VALUES (t) = NULL_TREE;
1102 }
1103 }
1104
1105 return t;
1106 }
1107
1108 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1109 For example, this can copy a list made of TREE_LIST nodes. */
1110
1111 tree
1112 copy_list (tree list)
1113 {
1114 tree head;
1115 tree prev, next;
1116
1117 if (list == 0)
1118 return 0;
1119
1120 head = prev = copy_node (list);
1121 next = TREE_CHAIN (list);
1122 while (next)
1123 {
1124 TREE_CHAIN (prev) = copy_node (next);
1125 prev = TREE_CHAIN (prev);
1126 next = TREE_CHAIN (next);
1127 }
1128 return head;
1129 }
1130
1131 \f
1132 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1133 INTEGER_CST with value CST and type TYPE. */
1134
1135 static unsigned int
1136 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1137 {
1138 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1139 /* We need an extra zero HWI if CST is an unsigned integer with its
1140 upper bit set, and if CST occupies a whole number of HWIs. */
1141 if (TYPE_UNSIGNED (type)
1142 && wi::neg_p (cst)
1143 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1144 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1145 return cst.get_len ();
1146 }
1147
1148 /* Return a new INTEGER_CST with value CST and type TYPE. */
1149
1150 static tree
1151 build_new_int_cst (tree type, const wide_int &cst)
1152 {
1153 unsigned int len = cst.get_len ();
1154 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1155 tree nt = make_int_cst (len, ext_len);
1156
1157 if (len < ext_len)
1158 {
1159 --ext_len;
1160 TREE_INT_CST_ELT (nt, ext_len) = 0;
1161 for (unsigned int i = len; i < ext_len; ++i)
1162 TREE_INT_CST_ELT (nt, i) = -1;
1163 }
1164 else if (TYPE_UNSIGNED (type)
1165 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1166 {
1167 len--;
1168 TREE_INT_CST_ELT (nt, len)
1169 = zext_hwi (cst.elt (len),
1170 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1171 }
1172
1173 for (unsigned int i = 0; i < len; i++)
1174 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1175 TREE_TYPE (nt) = type;
1176 return nt;
1177 }
1178
1179 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1180
1181 tree
1182 build_int_cst (tree type, HOST_WIDE_INT low)
1183 {
1184 /* Support legacy code. */
1185 if (!type)
1186 type = integer_type_node;
1187
1188 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1189 }
1190
1191 tree
1192 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1193 {
1194 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1195 }
1196
1197 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1198
1199 tree
1200 build_int_cst_type (tree type, HOST_WIDE_INT low)
1201 {
1202 gcc_assert (type);
1203 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1204 }
1205
1206 /* Constructs tree in type TYPE from with value given by CST. Signedness
1207 of CST is assumed to be the same as the signedness of TYPE. */
1208
1209 tree
1210 double_int_to_tree (tree type, double_int cst)
1211 {
1212 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1213 }
1214
1215 /* We force the wide_int CST to the range of the type TYPE by sign or
1216 zero extending it. OVERFLOWABLE indicates if we are interested in
1217 overflow of the value, when >0 we are only interested in signed
1218 overflow, for <0 we are interested in any overflow. OVERFLOWED
1219 indicates whether overflow has already occurred. CONST_OVERFLOWED
1220 indicates whether constant overflow has already occurred. We force
1221 T's value to be within range of T's type (by setting to 0 or 1 all
1222 the bits outside the type's range). We set TREE_OVERFLOWED if,
1223 OVERFLOWED is nonzero,
1224 or OVERFLOWABLE is >0 and signed overflow occurs
1225 or OVERFLOWABLE is <0 and any overflow occurs
1226 We return a new tree node for the extended wide_int. The node
1227 is shared if no overflow flags are set. */
1228
1229
1230 tree
1231 force_fit_type (tree type, const wide_int_ref &cst,
1232 int overflowable, bool overflowed)
1233 {
1234 signop sign = TYPE_SIGN (type);
1235
1236 /* If we need to set overflow flags, return a new unshared node. */
1237 if (overflowed || !wi::fits_to_tree_p (cst, type))
1238 {
1239 if (overflowed
1240 || overflowable < 0
1241 || (overflowable > 0 && sign == SIGNED))
1242 {
1243 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1244 tree t = build_new_int_cst (type, tmp);
1245 TREE_OVERFLOW (t) = 1;
1246 return t;
1247 }
1248 }
1249
1250 /* Else build a shared node. */
1251 return wide_int_to_tree (type, cst);
1252 }
1253
1254 /* These are the hash table functions for the hash table of INTEGER_CST
1255 nodes of a sizetype. */
1256
1257 /* Return the hash code code X, an INTEGER_CST. */
1258
1259 static hashval_t
1260 int_cst_hash_hash (const void *x)
1261 {
1262 const_tree const t = (const_tree) x;
1263 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1264 int i;
1265
1266 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1267 code ^= TREE_INT_CST_ELT (t, i);
1268
1269 return code;
1270 }
1271
1272 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1273 is the same as that given by *Y, which is the same. */
1274
1275 static int
1276 int_cst_hash_eq (const void *x, const void *y)
1277 {
1278 const_tree const xt = (const_tree) x;
1279 const_tree const yt = (const_tree) y;
1280
1281 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1282 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1283 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1284 return false;
1285
1286 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1287 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1288 return false;
1289
1290 return true;
1291 }
1292
1293 /* Create an INT_CST node of TYPE and value CST.
1294 The returned node is always shared. For small integers we use a
1295 per-type vector cache, for larger ones we use a single hash table.
1296 The value is extended from its precision according to the sign of
1297 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1298 the upper bits and ensures that hashing and value equality based
1299 upon the underlying HOST_WIDE_INTs works without masking. */
1300
1301 tree
1302 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1303 {
1304 tree t;
1305 int ix = -1;
1306 int limit = 0;
1307
1308 gcc_assert (type);
1309 unsigned int prec = TYPE_PRECISION (type);
1310 signop sgn = TYPE_SIGN (type);
1311
1312 /* Verify that everything is canonical. */
1313 int l = pcst.get_len ();
1314 if (l > 1)
1315 {
1316 if (pcst.elt (l - 1) == 0)
1317 gcc_checking_assert (pcst.elt (l - 2) < 0);
1318 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1319 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1320 }
1321
1322 wide_int cst = wide_int::from (pcst, prec, sgn);
1323 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1324
1325 if (ext_len == 1)
1326 {
1327 /* We just need to store a single HOST_WIDE_INT. */
1328 HOST_WIDE_INT hwi;
1329 if (TYPE_UNSIGNED (type))
1330 hwi = cst.to_uhwi ();
1331 else
1332 hwi = cst.to_shwi ();
1333
1334 switch (TREE_CODE (type))
1335 {
1336 case NULLPTR_TYPE:
1337 gcc_assert (hwi == 0);
1338 /* Fallthru. */
1339
1340 case POINTER_TYPE:
1341 case REFERENCE_TYPE:
1342 case POINTER_BOUNDS_TYPE:
1343 /* Cache NULL pointer and zero bounds. */
1344 if (hwi == 0)
1345 {
1346 limit = 1;
1347 ix = 0;
1348 }
1349 break;
1350
1351 case BOOLEAN_TYPE:
1352 /* Cache false or true. */
1353 limit = 2;
1354 if (hwi < 2)
1355 ix = hwi;
1356 break;
1357
1358 case INTEGER_TYPE:
1359 case OFFSET_TYPE:
1360 if (TYPE_SIGN (type) == UNSIGNED)
1361 {
1362 /* Cache [0, N). */
1363 limit = INTEGER_SHARE_LIMIT;
1364 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1365 ix = hwi;
1366 }
1367 else
1368 {
1369 /* Cache [-1, N). */
1370 limit = INTEGER_SHARE_LIMIT + 1;
1371 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1372 ix = hwi + 1;
1373 }
1374 break;
1375
1376 case ENUMERAL_TYPE:
1377 break;
1378
1379 default:
1380 gcc_unreachable ();
1381 }
1382
1383 if (ix >= 0)
1384 {
1385 /* Look for it in the type's vector of small shared ints. */
1386 if (!TYPE_CACHED_VALUES_P (type))
1387 {
1388 TYPE_CACHED_VALUES_P (type) = 1;
1389 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1390 }
1391
1392 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1393 if (t)
1394 /* Make sure no one is clobbering the shared constant. */
1395 gcc_checking_assert (TREE_TYPE (t) == type
1396 && TREE_INT_CST_NUNITS (t) == 1
1397 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1398 && TREE_INT_CST_EXT_NUNITS (t) == 1
1399 && TREE_INT_CST_ELT (t, 0) == hwi);
1400 else
1401 {
1402 /* Create a new shared int. */
1403 t = build_new_int_cst (type, cst);
1404 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1405 }
1406 }
1407 else
1408 {
1409 /* Use the cache of larger shared ints, using int_cst_node as
1410 a temporary. */
1411 void **slot;
1412
1413 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1414 TREE_TYPE (int_cst_node) = type;
1415
1416 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1417 t = (tree) *slot;
1418 if (!t)
1419 {
1420 /* Insert this one into the hash table. */
1421 t = int_cst_node;
1422 *slot = t;
1423 /* Make a new node for next time round. */
1424 int_cst_node = make_int_cst (1, 1);
1425 }
1426 }
1427 }
1428 else
1429 {
1430 /* The value either hashes properly or we drop it on the floor
1431 for the gc to take care of. There will not be enough of them
1432 to worry about. */
1433 void **slot;
1434
1435 tree nt = build_new_int_cst (type, cst);
1436 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1437 t = (tree) *slot;
1438 if (!t)
1439 {
1440 /* Insert this one into the hash table. */
1441 t = nt;
1442 *slot = t;
1443 }
1444 }
1445
1446 return t;
1447 }
1448
1449 void
1450 cache_integer_cst (tree t)
1451 {
1452 tree type = TREE_TYPE (t);
1453 int ix = -1;
1454 int limit = 0;
1455 int prec = TYPE_PRECISION (type);
1456
1457 gcc_assert (!TREE_OVERFLOW (t));
1458
1459 switch (TREE_CODE (type))
1460 {
1461 case NULLPTR_TYPE:
1462 gcc_assert (integer_zerop (t));
1463 /* Fallthru. */
1464
1465 case POINTER_TYPE:
1466 case REFERENCE_TYPE:
1467 /* Cache NULL pointer. */
1468 if (integer_zerop (t))
1469 {
1470 limit = 1;
1471 ix = 0;
1472 }
1473 break;
1474
1475 case BOOLEAN_TYPE:
1476 /* Cache false or true. */
1477 limit = 2;
1478 if (wi::ltu_p (t, 2))
1479 ix = TREE_INT_CST_ELT (t, 0);
1480 break;
1481
1482 case INTEGER_TYPE:
1483 case OFFSET_TYPE:
1484 if (TYPE_UNSIGNED (type))
1485 {
1486 /* Cache 0..N */
1487 limit = INTEGER_SHARE_LIMIT;
1488
1489 /* This is a little hokie, but if the prec is smaller than
1490 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1491 obvious test will not get the correct answer. */
1492 if (prec < HOST_BITS_PER_WIDE_INT)
1493 {
1494 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1495 ix = tree_to_uhwi (t);
1496 }
1497 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1498 ix = tree_to_uhwi (t);
1499 }
1500 else
1501 {
1502 /* Cache -1..N */
1503 limit = INTEGER_SHARE_LIMIT + 1;
1504
1505 if (integer_minus_onep (t))
1506 ix = 0;
1507 else if (!wi::neg_p (t))
1508 {
1509 if (prec < HOST_BITS_PER_WIDE_INT)
1510 {
1511 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1512 ix = tree_to_shwi (t) + 1;
1513 }
1514 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1515 ix = tree_to_shwi (t) + 1;
1516 }
1517 }
1518 break;
1519
1520 case ENUMERAL_TYPE:
1521 break;
1522
1523 default:
1524 gcc_unreachable ();
1525 }
1526
1527 if (ix >= 0)
1528 {
1529 /* Look for it in the type's vector of small shared ints. */
1530 if (!TYPE_CACHED_VALUES_P (type))
1531 {
1532 TYPE_CACHED_VALUES_P (type) = 1;
1533 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1534 }
1535
1536 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1537 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1538 }
1539 else
1540 {
1541 /* Use the cache of larger shared ints. */
1542 void **slot;
1543
1544 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1545 /* If there is already an entry for the number verify it's the
1546 same. */
1547 if (*slot)
1548 gcc_assert (wi::eq_p (tree (*slot), t));
1549 else
1550 /* Otherwise insert this one into the hash table. */
1551 *slot = t;
1552 }
1553 }
1554
1555
1556 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1557 and the rest are zeros. */
1558
1559 tree
1560 build_low_bits_mask (tree type, unsigned bits)
1561 {
1562 gcc_assert (bits <= TYPE_PRECISION (type));
1563
1564 return wide_int_to_tree (type, wi::mask (bits, false,
1565 TYPE_PRECISION (type)));
1566 }
1567
1568 /* Checks that X is integer constant that can be expressed in (unsigned)
1569 HOST_WIDE_INT without loss of precision. */
1570
1571 bool
1572 cst_and_fits_in_hwi (const_tree x)
1573 {
1574 if (TREE_CODE (x) != INTEGER_CST)
1575 return false;
1576
1577 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1578 return false;
1579
1580 return TREE_INT_CST_NUNITS (x) == 1;
1581 }
1582
1583 /* Build a newly constructed TREE_VEC node of length LEN. */
1584
1585 tree
1586 make_vector_stat (unsigned len MEM_STAT_DECL)
1587 {
1588 tree t;
1589 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1590
1591 record_node_allocation_statistics (VECTOR_CST, length);
1592
1593 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1594
1595 TREE_SET_CODE (t, VECTOR_CST);
1596 TREE_CONSTANT (t) = 1;
1597
1598 return t;
1599 }
1600
1601 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1602 are in a list pointed to by VALS. */
1603
1604 tree
1605 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1606 {
1607 int over = 0;
1608 unsigned cnt = 0;
1609 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1610 TREE_TYPE (v) = type;
1611
1612 /* Iterate through elements and check for overflow. */
1613 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1614 {
1615 tree value = vals[cnt];
1616
1617 VECTOR_CST_ELT (v, cnt) = value;
1618
1619 /* Don't crash if we get an address constant. */
1620 if (!CONSTANT_CLASS_P (value))
1621 continue;
1622
1623 over |= TREE_OVERFLOW (value);
1624 }
1625
1626 TREE_OVERFLOW (v) = over;
1627 return v;
1628 }
1629
1630 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1631 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1632
1633 tree
1634 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1635 {
1636 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1637 unsigned HOST_WIDE_INT idx;
1638 tree value;
1639
1640 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1641 vec[idx] = value;
1642 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1643 vec[idx] = build_zero_cst (TREE_TYPE (type));
1644
1645 return build_vector (type, vec);
1646 }
1647
1648 /* Build a vector of type VECTYPE where all the elements are SCs. */
1649 tree
1650 build_vector_from_val (tree vectype, tree sc)
1651 {
1652 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1653
1654 if (sc == error_mark_node)
1655 return sc;
1656
1657 /* Verify that the vector type is suitable for SC. Note that there
1658 is some inconsistency in the type-system with respect to restrict
1659 qualifications of pointers. Vector types always have a main-variant
1660 element type and the qualification is applied to the vector-type.
1661 So TREE_TYPE (vector-type) does not return a properly qualified
1662 vector element-type. */
1663 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1664 TREE_TYPE (vectype)));
1665
1666 if (CONSTANT_CLASS_P (sc))
1667 {
1668 tree *v = XALLOCAVEC (tree, nunits);
1669 for (i = 0; i < nunits; ++i)
1670 v[i] = sc;
1671 return build_vector (vectype, v);
1672 }
1673 else
1674 {
1675 vec<constructor_elt, va_gc> *v;
1676 vec_alloc (v, nunits);
1677 for (i = 0; i < nunits; ++i)
1678 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1679 return build_constructor (vectype, v);
1680 }
1681 }
1682
1683 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1684 are in the vec pointed to by VALS. */
1685 tree
1686 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1687 {
1688 tree c = make_node (CONSTRUCTOR);
1689 unsigned int i;
1690 constructor_elt *elt;
1691 bool constant_p = true;
1692 bool side_effects_p = false;
1693
1694 TREE_TYPE (c) = type;
1695 CONSTRUCTOR_ELTS (c) = vals;
1696
1697 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1698 {
1699 /* Mostly ctors will have elts that don't have side-effects, so
1700 the usual case is to scan all the elements. Hence a single
1701 loop for both const and side effects, rather than one loop
1702 each (with early outs). */
1703 if (!TREE_CONSTANT (elt->value))
1704 constant_p = false;
1705 if (TREE_SIDE_EFFECTS (elt->value))
1706 side_effects_p = true;
1707 }
1708
1709 TREE_SIDE_EFFECTS (c) = side_effects_p;
1710 TREE_CONSTANT (c) = constant_p;
1711
1712 return c;
1713 }
1714
1715 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1716 INDEX and VALUE. */
1717 tree
1718 build_constructor_single (tree type, tree index, tree value)
1719 {
1720 vec<constructor_elt, va_gc> *v;
1721 constructor_elt elt = {index, value};
1722
1723 vec_alloc (v, 1);
1724 v->quick_push (elt);
1725
1726 return build_constructor (type, v);
1727 }
1728
1729
1730 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1731 are in a list pointed to by VALS. */
1732 tree
1733 build_constructor_from_list (tree type, tree vals)
1734 {
1735 tree t;
1736 vec<constructor_elt, va_gc> *v = NULL;
1737
1738 if (vals)
1739 {
1740 vec_alloc (v, list_length (vals));
1741 for (t = vals; t; t = TREE_CHAIN (t))
1742 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1743 }
1744
1745 return build_constructor (type, v);
1746 }
1747
1748 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1749 of elements, provided as index/value pairs. */
1750
1751 tree
1752 build_constructor_va (tree type, int nelts, ...)
1753 {
1754 vec<constructor_elt, va_gc> *v = NULL;
1755 va_list p;
1756
1757 va_start (p, nelts);
1758 vec_alloc (v, nelts);
1759 while (nelts--)
1760 {
1761 tree index = va_arg (p, tree);
1762 tree value = va_arg (p, tree);
1763 CONSTRUCTOR_APPEND_ELT (v, index, value);
1764 }
1765 va_end (p);
1766 return build_constructor (type, v);
1767 }
1768
1769 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1770
1771 tree
1772 build_fixed (tree type, FIXED_VALUE_TYPE f)
1773 {
1774 tree v;
1775 FIXED_VALUE_TYPE *fp;
1776
1777 v = make_node (FIXED_CST);
1778 fp = ggc_alloc<fixed_value> ();
1779 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1780
1781 TREE_TYPE (v) = type;
1782 TREE_FIXED_CST_PTR (v) = fp;
1783 return v;
1784 }
1785
1786 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1787
1788 tree
1789 build_real (tree type, REAL_VALUE_TYPE d)
1790 {
1791 tree v;
1792 REAL_VALUE_TYPE *dp;
1793 int overflow = 0;
1794
1795 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1796 Consider doing it via real_convert now. */
1797
1798 v = make_node (REAL_CST);
1799 dp = ggc_alloc<real_value> ();
1800 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1801
1802 TREE_TYPE (v) = type;
1803 TREE_REAL_CST_PTR (v) = dp;
1804 TREE_OVERFLOW (v) = overflow;
1805 return v;
1806 }
1807
1808 /* Return a new REAL_CST node whose type is TYPE
1809 and whose value is the integer value of the INTEGER_CST node I. */
1810
1811 REAL_VALUE_TYPE
1812 real_value_from_int_cst (const_tree type, const_tree i)
1813 {
1814 REAL_VALUE_TYPE d;
1815
1816 /* Clear all bits of the real value type so that we can later do
1817 bitwise comparisons to see if two values are the same. */
1818 memset (&d, 0, sizeof d);
1819
1820 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1821 TYPE_SIGN (TREE_TYPE (i)));
1822 return d;
1823 }
1824
1825 /* Given a tree representing an integer constant I, return a tree
1826 representing the same value as a floating-point constant of type TYPE. */
1827
1828 tree
1829 build_real_from_int_cst (tree type, const_tree i)
1830 {
1831 tree v;
1832 int overflow = TREE_OVERFLOW (i);
1833
1834 v = build_real (type, real_value_from_int_cst (type, i));
1835
1836 TREE_OVERFLOW (v) |= overflow;
1837 return v;
1838 }
1839
1840 /* Return a newly constructed STRING_CST node whose value is
1841 the LEN characters at STR.
1842 Note that for a C string literal, LEN should include the trailing NUL.
1843 The TREE_TYPE is not initialized. */
1844
1845 tree
1846 build_string (int len, const char *str)
1847 {
1848 tree s;
1849 size_t length;
1850
1851 /* Do not waste bytes provided by padding of struct tree_string. */
1852 length = len + offsetof (struct tree_string, str) + 1;
1853
1854 record_node_allocation_statistics (STRING_CST, length);
1855
1856 s = (tree) ggc_internal_alloc (length);
1857
1858 memset (s, 0, sizeof (struct tree_typed));
1859 TREE_SET_CODE (s, STRING_CST);
1860 TREE_CONSTANT (s) = 1;
1861 TREE_STRING_LENGTH (s) = len;
1862 memcpy (s->string.str, str, len);
1863 s->string.str[len] = '\0';
1864
1865 return s;
1866 }
1867
1868 /* Return a newly constructed COMPLEX_CST node whose value is
1869 specified by the real and imaginary parts REAL and IMAG.
1870 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1871 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1872
1873 tree
1874 build_complex (tree type, tree real, tree imag)
1875 {
1876 tree t = make_node (COMPLEX_CST);
1877
1878 TREE_REALPART (t) = real;
1879 TREE_IMAGPART (t) = imag;
1880 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1881 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1882 return t;
1883 }
1884
1885 /* Return a constant of arithmetic type TYPE which is the
1886 multiplicative identity of the set TYPE. */
1887
1888 tree
1889 build_one_cst (tree type)
1890 {
1891 switch (TREE_CODE (type))
1892 {
1893 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1894 case POINTER_TYPE: case REFERENCE_TYPE:
1895 case OFFSET_TYPE:
1896 return build_int_cst (type, 1);
1897
1898 case REAL_TYPE:
1899 return build_real (type, dconst1);
1900
1901 case FIXED_POINT_TYPE:
1902 /* We can only generate 1 for accum types. */
1903 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1904 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1905
1906 case VECTOR_TYPE:
1907 {
1908 tree scalar = build_one_cst (TREE_TYPE (type));
1909
1910 return build_vector_from_val (type, scalar);
1911 }
1912
1913 case COMPLEX_TYPE:
1914 return build_complex (type,
1915 build_one_cst (TREE_TYPE (type)),
1916 build_zero_cst (TREE_TYPE (type)));
1917
1918 default:
1919 gcc_unreachable ();
1920 }
1921 }
1922
1923 /* Return an integer of type TYPE containing all 1's in as much precision as
1924 it contains, or a complex or vector whose subparts are such integers. */
1925
1926 tree
1927 build_all_ones_cst (tree type)
1928 {
1929 if (TREE_CODE (type) == COMPLEX_TYPE)
1930 {
1931 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1932 return build_complex (type, scalar, scalar);
1933 }
1934 else
1935 return build_minus_one_cst (type);
1936 }
1937
1938 /* Return a constant of arithmetic type TYPE which is the
1939 opposite of the multiplicative identity of the set TYPE. */
1940
1941 tree
1942 build_minus_one_cst (tree type)
1943 {
1944 switch (TREE_CODE (type))
1945 {
1946 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1947 case POINTER_TYPE: case REFERENCE_TYPE:
1948 case OFFSET_TYPE:
1949 return build_int_cst (type, -1);
1950
1951 case REAL_TYPE:
1952 return build_real (type, dconstm1);
1953
1954 case FIXED_POINT_TYPE:
1955 /* We can only generate 1 for accum types. */
1956 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1957 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1958 TYPE_MODE (type)));
1959
1960 case VECTOR_TYPE:
1961 {
1962 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1963
1964 return build_vector_from_val (type, scalar);
1965 }
1966
1967 case COMPLEX_TYPE:
1968 return build_complex (type,
1969 build_minus_one_cst (TREE_TYPE (type)),
1970 build_zero_cst (TREE_TYPE (type)));
1971
1972 default:
1973 gcc_unreachable ();
1974 }
1975 }
1976
1977 /* Build 0 constant of type TYPE. This is used by constructor folding
1978 and thus the constant should be represented in memory by
1979 zero(es). */
1980
1981 tree
1982 build_zero_cst (tree type)
1983 {
1984 switch (TREE_CODE (type))
1985 {
1986 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1987 case POINTER_TYPE: case REFERENCE_TYPE:
1988 case OFFSET_TYPE: case NULLPTR_TYPE:
1989 return build_int_cst (type, 0);
1990
1991 case REAL_TYPE:
1992 return build_real (type, dconst0);
1993
1994 case FIXED_POINT_TYPE:
1995 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1996
1997 case VECTOR_TYPE:
1998 {
1999 tree scalar = build_zero_cst (TREE_TYPE (type));
2000
2001 return build_vector_from_val (type, scalar);
2002 }
2003
2004 case COMPLEX_TYPE:
2005 {
2006 tree zero = build_zero_cst (TREE_TYPE (type));
2007
2008 return build_complex (type, zero, zero);
2009 }
2010
2011 default:
2012 if (!AGGREGATE_TYPE_P (type))
2013 return fold_convert (type, integer_zero_node);
2014 return build_constructor (type, NULL);
2015 }
2016 }
2017
2018
2019 /* Build a BINFO with LEN language slots. */
2020
2021 tree
2022 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2023 {
2024 tree t;
2025 size_t length = (offsetof (struct tree_binfo, base_binfos)
2026 + vec<tree, va_gc>::embedded_size (base_binfos));
2027
2028 record_node_allocation_statistics (TREE_BINFO, length);
2029
2030 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2031
2032 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2033
2034 TREE_SET_CODE (t, TREE_BINFO);
2035
2036 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2037
2038 return t;
2039 }
2040
2041 /* Create a CASE_LABEL_EXPR tree node and return it. */
2042
2043 tree
2044 build_case_label (tree low_value, tree high_value, tree label_decl)
2045 {
2046 tree t = make_node (CASE_LABEL_EXPR);
2047
2048 TREE_TYPE (t) = void_type_node;
2049 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2050
2051 CASE_LOW (t) = low_value;
2052 CASE_HIGH (t) = high_value;
2053 CASE_LABEL (t) = label_decl;
2054 CASE_CHAIN (t) = NULL_TREE;
2055
2056 return t;
2057 }
2058
2059 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2060 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2061 The latter determines the length of the HOST_WIDE_INT vector. */
2062
2063 tree
2064 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2065 {
2066 tree t;
2067 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2068 + sizeof (struct tree_int_cst));
2069
2070 gcc_assert (len);
2071 record_node_allocation_statistics (INTEGER_CST, length);
2072
2073 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2074
2075 TREE_SET_CODE (t, INTEGER_CST);
2076 TREE_INT_CST_NUNITS (t) = len;
2077 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2078 /* to_offset can only be applied to trees that are offset_int-sized
2079 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2080 must be exactly the precision of offset_int and so LEN is correct. */
2081 if (ext_len <= OFFSET_INT_ELTS)
2082 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2083 else
2084 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2085
2086 TREE_CONSTANT (t) = 1;
2087
2088 return t;
2089 }
2090
2091 /* Build a newly constructed TREE_VEC node of length LEN. */
2092
2093 tree
2094 make_tree_vec_stat (int len MEM_STAT_DECL)
2095 {
2096 tree t;
2097 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2098
2099 record_node_allocation_statistics (TREE_VEC, length);
2100
2101 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2102
2103 TREE_SET_CODE (t, TREE_VEC);
2104 TREE_VEC_LENGTH (t) = len;
2105
2106 return t;
2107 }
2108
2109 /* Grow a TREE_VEC node to new length LEN. */
2110
2111 tree
2112 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2113 {
2114 gcc_assert (TREE_CODE (v) == TREE_VEC);
2115
2116 int oldlen = TREE_VEC_LENGTH (v);
2117 gcc_assert (len > oldlen);
2118
2119 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2120 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2121
2122 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2123
2124 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2125
2126 TREE_VEC_LENGTH (v) = len;
2127
2128 return v;
2129 }
2130 \f
2131 /* Return 1 if EXPR is the integer constant zero or a complex constant
2132 of zero. */
2133
2134 int
2135 integer_zerop (const_tree expr)
2136 {
2137 STRIP_NOPS (expr);
2138
2139 switch (TREE_CODE (expr))
2140 {
2141 case INTEGER_CST:
2142 return wi::eq_p (expr, 0);
2143 case COMPLEX_CST:
2144 return (integer_zerop (TREE_REALPART (expr))
2145 && integer_zerop (TREE_IMAGPART (expr)));
2146 case VECTOR_CST:
2147 {
2148 unsigned i;
2149 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2150 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2151 return false;
2152 return true;
2153 }
2154 default:
2155 return false;
2156 }
2157 }
2158
2159 /* Return 1 if EXPR is the integer constant one or the corresponding
2160 complex constant. */
2161
2162 int
2163 integer_onep (const_tree expr)
2164 {
2165 STRIP_NOPS (expr);
2166
2167 switch (TREE_CODE (expr))
2168 {
2169 case INTEGER_CST:
2170 return wi::eq_p (wi::to_widest (expr), 1);
2171 case COMPLEX_CST:
2172 return (integer_onep (TREE_REALPART (expr))
2173 && integer_zerop (TREE_IMAGPART (expr)));
2174 case VECTOR_CST:
2175 {
2176 unsigned i;
2177 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2178 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2179 return false;
2180 return true;
2181 }
2182 default:
2183 return false;
2184 }
2185 }
2186
2187 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2188 return 1 if every piece is the integer constant one. */
2189
2190 int
2191 integer_each_onep (const_tree expr)
2192 {
2193 STRIP_NOPS (expr);
2194
2195 if (TREE_CODE (expr) == COMPLEX_CST)
2196 return (integer_onep (TREE_REALPART (expr))
2197 && integer_onep (TREE_IMAGPART (expr)));
2198 else
2199 return integer_onep (expr);
2200 }
2201
2202 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2203 it contains, or a complex or vector whose subparts are such integers. */
2204
2205 int
2206 integer_all_onesp (const_tree expr)
2207 {
2208 STRIP_NOPS (expr);
2209
2210 if (TREE_CODE (expr) == COMPLEX_CST
2211 && integer_all_onesp (TREE_REALPART (expr))
2212 && integer_all_onesp (TREE_IMAGPART (expr)))
2213 return 1;
2214
2215 else if (TREE_CODE (expr) == VECTOR_CST)
2216 {
2217 unsigned i;
2218 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2219 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2220 return 0;
2221 return 1;
2222 }
2223
2224 else if (TREE_CODE (expr) != INTEGER_CST)
2225 return 0;
2226
2227 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2228 }
2229
2230 /* Return 1 if EXPR is the integer constant minus one. */
2231
2232 int
2233 integer_minus_onep (const_tree expr)
2234 {
2235 STRIP_NOPS (expr);
2236
2237 if (TREE_CODE (expr) == COMPLEX_CST)
2238 return (integer_all_onesp (TREE_REALPART (expr))
2239 && integer_zerop (TREE_IMAGPART (expr)));
2240 else
2241 return integer_all_onesp (expr);
2242 }
2243
2244 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2245 one bit on). */
2246
2247 int
2248 integer_pow2p (const_tree expr)
2249 {
2250 STRIP_NOPS (expr);
2251
2252 if (TREE_CODE (expr) == COMPLEX_CST
2253 && integer_pow2p (TREE_REALPART (expr))
2254 && integer_zerop (TREE_IMAGPART (expr)))
2255 return 1;
2256
2257 if (TREE_CODE (expr) != INTEGER_CST)
2258 return 0;
2259
2260 return wi::popcount (expr) == 1;
2261 }
2262
2263 /* Return 1 if EXPR is an integer constant other than zero or a
2264 complex constant other than zero. */
2265
2266 int
2267 integer_nonzerop (const_tree expr)
2268 {
2269 STRIP_NOPS (expr);
2270
2271 return ((TREE_CODE (expr) == INTEGER_CST
2272 && !wi::eq_p (expr, 0))
2273 || (TREE_CODE (expr) == COMPLEX_CST
2274 && (integer_nonzerop (TREE_REALPART (expr))
2275 || integer_nonzerop (TREE_IMAGPART (expr)))));
2276 }
2277
2278 /* Return 1 if EXPR is the integer constant one. For vector,
2279 return 1 if every piece is the integer constant minus one
2280 (representing the value TRUE). */
2281
2282 int
2283 integer_truep (const_tree expr)
2284 {
2285 STRIP_NOPS (expr);
2286
2287 if (TREE_CODE (expr) == VECTOR_CST)
2288 return integer_all_onesp (expr);
2289 return integer_onep (expr);
2290 }
2291
2292 /* Return 1 if EXPR is the fixed-point constant zero. */
2293
2294 int
2295 fixed_zerop (const_tree expr)
2296 {
2297 return (TREE_CODE (expr) == FIXED_CST
2298 && TREE_FIXED_CST (expr).data.is_zero ());
2299 }
2300
2301 /* Return the power of two represented by a tree node known to be a
2302 power of two. */
2303
2304 int
2305 tree_log2 (const_tree expr)
2306 {
2307 STRIP_NOPS (expr);
2308
2309 if (TREE_CODE (expr) == COMPLEX_CST)
2310 return tree_log2 (TREE_REALPART (expr));
2311
2312 return wi::exact_log2 (expr);
2313 }
2314
2315 /* Similar, but return the largest integer Y such that 2 ** Y is less
2316 than or equal to EXPR. */
2317
2318 int
2319 tree_floor_log2 (const_tree expr)
2320 {
2321 STRIP_NOPS (expr);
2322
2323 if (TREE_CODE (expr) == COMPLEX_CST)
2324 return tree_log2 (TREE_REALPART (expr));
2325
2326 return wi::floor_log2 (expr);
2327 }
2328
2329 /* Return number of known trailing zero bits in EXPR, or, if the value of
2330 EXPR is known to be zero, the precision of it's type. */
2331
2332 unsigned int
2333 tree_ctz (const_tree expr)
2334 {
2335 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2336 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2337 return 0;
2338
2339 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2340 switch (TREE_CODE (expr))
2341 {
2342 case INTEGER_CST:
2343 ret1 = wi::ctz (expr);
2344 return MIN (ret1, prec);
2345 case SSA_NAME:
2346 ret1 = wi::ctz (get_nonzero_bits (expr));
2347 return MIN (ret1, prec);
2348 case PLUS_EXPR:
2349 case MINUS_EXPR:
2350 case BIT_IOR_EXPR:
2351 case BIT_XOR_EXPR:
2352 case MIN_EXPR:
2353 case MAX_EXPR:
2354 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2355 if (ret1 == 0)
2356 return ret1;
2357 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2358 return MIN (ret1, ret2);
2359 case POINTER_PLUS_EXPR:
2360 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2361 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2362 /* Second operand is sizetype, which could be in theory
2363 wider than pointer's precision. Make sure we never
2364 return more than prec. */
2365 ret2 = MIN (ret2, prec);
2366 return MIN (ret1, ret2);
2367 case BIT_AND_EXPR:
2368 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2369 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2370 return MAX (ret1, ret2);
2371 case MULT_EXPR:
2372 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2373 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2374 return MIN (ret1 + ret2, prec);
2375 case LSHIFT_EXPR:
2376 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2377 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2378 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2379 {
2380 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2381 return MIN (ret1 + ret2, prec);
2382 }
2383 return ret1;
2384 case RSHIFT_EXPR:
2385 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2386 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2387 {
2388 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2389 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2390 if (ret1 > ret2)
2391 return ret1 - ret2;
2392 }
2393 return 0;
2394 case TRUNC_DIV_EXPR:
2395 case CEIL_DIV_EXPR:
2396 case FLOOR_DIV_EXPR:
2397 case ROUND_DIV_EXPR:
2398 case EXACT_DIV_EXPR:
2399 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2400 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2401 {
2402 int l = tree_log2 (TREE_OPERAND (expr, 1));
2403 if (l >= 0)
2404 {
2405 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2406 ret2 = l;
2407 if (ret1 > ret2)
2408 return ret1 - ret2;
2409 }
2410 }
2411 return 0;
2412 CASE_CONVERT:
2413 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2414 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2415 ret1 = prec;
2416 return MIN (ret1, prec);
2417 case SAVE_EXPR:
2418 return tree_ctz (TREE_OPERAND (expr, 0));
2419 case COND_EXPR:
2420 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2421 if (ret1 == 0)
2422 return 0;
2423 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2424 return MIN (ret1, ret2);
2425 case COMPOUND_EXPR:
2426 return tree_ctz (TREE_OPERAND (expr, 1));
2427 case ADDR_EXPR:
2428 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2429 if (ret1 > BITS_PER_UNIT)
2430 {
2431 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2432 return MIN (ret1, prec);
2433 }
2434 return 0;
2435 default:
2436 return 0;
2437 }
2438 }
2439
2440 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2441 decimal float constants, so don't return 1 for them. */
2442
2443 int
2444 real_zerop (const_tree expr)
2445 {
2446 STRIP_NOPS (expr);
2447
2448 switch (TREE_CODE (expr))
2449 {
2450 case REAL_CST:
2451 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2452 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2453 case COMPLEX_CST:
2454 return real_zerop (TREE_REALPART (expr))
2455 && real_zerop (TREE_IMAGPART (expr));
2456 case VECTOR_CST:
2457 {
2458 unsigned i;
2459 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2460 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2461 return false;
2462 return true;
2463 }
2464 default:
2465 return false;
2466 }
2467 }
2468
2469 /* Return 1 if EXPR is the real constant one in real or complex form.
2470 Trailing zeroes matter for decimal float constants, so don't return
2471 1 for them. */
2472
2473 int
2474 real_onep (const_tree expr)
2475 {
2476 STRIP_NOPS (expr);
2477
2478 switch (TREE_CODE (expr))
2479 {
2480 case REAL_CST:
2481 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2482 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2483 case COMPLEX_CST:
2484 return real_onep (TREE_REALPART (expr))
2485 && real_zerop (TREE_IMAGPART (expr));
2486 case VECTOR_CST:
2487 {
2488 unsigned i;
2489 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2490 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2491 return false;
2492 return true;
2493 }
2494 default:
2495 return false;
2496 }
2497 }
2498
2499 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2500 matter for decimal float constants, so don't return 1 for them. */
2501
2502 int
2503 real_minus_onep (const_tree expr)
2504 {
2505 STRIP_NOPS (expr);
2506
2507 switch (TREE_CODE (expr))
2508 {
2509 case REAL_CST:
2510 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2511 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2512 case COMPLEX_CST:
2513 return real_minus_onep (TREE_REALPART (expr))
2514 && real_zerop (TREE_IMAGPART (expr));
2515 case VECTOR_CST:
2516 {
2517 unsigned i;
2518 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2519 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2520 return false;
2521 return true;
2522 }
2523 default:
2524 return false;
2525 }
2526 }
2527
2528 /* Nonzero if EXP is a constant or a cast of a constant. */
2529
2530 int
2531 really_constant_p (const_tree exp)
2532 {
2533 /* This is not quite the same as STRIP_NOPS. It does more. */
2534 while (CONVERT_EXPR_P (exp)
2535 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2536 exp = TREE_OPERAND (exp, 0);
2537 return TREE_CONSTANT (exp);
2538 }
2539 \f
2540 /* Return first list element whose TREE_VALUE is ELEM.
2541 Return 0 if ELEM is not in LIST. */
2542
2543 tree
2544 value_member (tree elem, tree list)
2545 {
2546 while (list)
2547 {
2548 if (elem == TREE_VALUE (list))
2549 return list;
2550 list = TREE_CHAIN (list);
2551 }
2552 return NULL_TREE;
2553 }
2554
2555 /* Return first list element whose TREE_PURPOSE is ELEM.
2556 Return 0 if ELEM is not in LIST. */
2557
2558 tree
2559 purpose_member (const_tree elem, tree list)
2560 {
2561 while (list)
2562 {
2563 if (elem == TREE_PURPOSE (list))
2564 return list;
2565 list = TREE_CHAIN (list);
2566 }
2567 return NULL_TREE;
2568 }
2569
2570 /* Return true if ELEM is in V. */
2571
2572 bool
2573 vec_member (const_tree elem, vec<tree, va_gc> *v)
2574 {
2575 unsigned ix;
2576 tree t;
2577 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2578 if (elem == t)
2579 return true;
2580 return false;
2581 }
2582
2583 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2584 NULL_TREE. */
2585
2586 tree
2587 chain_index (int idx, tree chain)
2588 {
2589 for (; chain && idx > 0; --idx)
2590 chain = TREE_CHAIN (chain);
2591 return chain;
2592 }
2593
2594 /* Return nonzero if ELEM is part of the chain CHAIN. */
2595
2596 int
2597 chain_member (const_tree elem, const_tree chain)
2598 {
2599 while (chain)
2600 {
2601 if (elem == chain)
2602 return 1;
2603 chain = DECL_CHAIN (chain);
2604 }
2605
2606 return 0;
2607 }
2608
2609 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2610 We expect a null pointer to mark the end of the chain.
2611 This is the Lisp primitive `length'. */
2612
2613 int
2614 list_length (const_tree t)
2615 {
2616 const_tree p = t;
2617 #ifdef ENABLE_TREE_CHECKING
2618 const_tree q = t;
2619 #endif
2620 int len = 0;
2621
2622 while (p)
2623 {
2624 p = TREE_CHAIN (p);
2625 #ifdef ENABLE_TREE_CHECKING
2626 if (len % 2)
2627 q = TREE_CHAIN (q);
2628 gcc_assert (p != q);
2629 #endif
2630 len++;
2631 }
2632
2633 return len;
2634 }
2635
2636 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2637 UNION_TYPE TYPE, or NULL_TREE if none. */
2638
2639 tree
2640 first_field (const_tree type)
2641 {
2642 tree t = TYPE_FIELDS (type);
2643 while (t && TREE_CODE (t) != FIELD_DECL)
2644 t = TREE_CHAIN (t);
2645 return t;
2646 }
2647
2648 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2649 by modifying the last node in chain 1 to point to chain 2.
2650 This is the Lisp primitive `nconc'. */
2651
2652 tree
2653 chainon (tree op1, tree op2)
2654 {
2655 tree t1;
2656
2657 if (!op1)
2658 return op2;
2659 if (!op2)
2660 return op1;
2661
2662 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2663 continue;
2664 TREE_CHAIN (t1) = op2;
2665
2666 #ifdef ENABLE_TREE_CHECKING
2667 {
2668 tree t2;
2669 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2670 gcc_assert (t2 != t1);
2671 }
2672 #endif
2673
2674 return op1;
2675 }
2676
2677 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2678
2679 tree
2680 tree_last (tree chain)
2681 {
2682 tree next;
2683 if (chain)
2684 while ((next = TREE_CHAIN (chain)))
2685 chain = next;
2686 return chain;
2687 }
2688
2689 /* Reverse the order of elements in the chain T,
2690 and return the new head of the chain (old last element). */
2691
2692 tree
2693 nreverse (tree t)
2694 {
2695 tree prev = 0, decl, next;
2696 for (decl = t; decl; decl = next)
2697 {
2698 /* We shouldn't be using this function to reverse BLOCK chains; we
2699 have blocks_nreverse for that. */
2700 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2701 next = TREE_CHAIN (decl);
2702 TREE_CHAIN (decl) = prev;
2703 prev = decl;
2704 }
2705 return prev;
2706 }
2707 \f
2708 /* Return a newly created TREE_LIST node whose
2709 purpose and value fields are PARM and VALUE. */
2710
2711 tree
2712 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2713 {
2714 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2715 TREE_PURPOSE (t) = parm;
2716 TREE_VALUE (t) = value;
2717 return t;
2718 }
2719
2720 /* Build a chain of TREE_LIST nodes from a vector. */
2721
2722 tree
2723 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2724 {
2725 tree ret = NULL_TREE;
2726 tree *pp = &ret;
2727 unsigned int i;
2728 tree t;
2729 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2730 {
2731 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2732 pp = &TREE_CHAIN (*pp);
2733 }
2734 return ret;
2735 }
2736
2737 /* Return a newly created TREE_LIST node whose
2738 purpose and value fields are PURPOSE and VALUE
2739 and whose TREE_CHAIN is CHAIN. */
2740
2741 tree
2742 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2743 {
2744 tree node;
2745
2746 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2747 memset (node, 0, sizeof (struct tree_common));
2748
2749 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2750
2751 TREE_SET_CODE (node, TREE_LIST);
2752 TREE_CHAIN (node) = chain;
2753 TREE_PURPOSE (node) = purpose;
2754 TREE_VALUE (node) = value;
2755 return node;
2756 }
2757
2758 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2759 trees. */
2760
2761 vec<tree, va_gc> *
2762 ctor_to_vec (tree ctor)
2763 {
2764 vec<tree, va_gc> *vec;
2765 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2766 unsigned int ix;
2767 tree val;
2768
2769 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2770 vec->quick_push (val);
2771
2772 return vec;
2773 }
2774 \f
2775 /* Return the size nominally occupied by an object of type TYPE
2776 when it resides in memory. The value is measured in units of bytes,
2777 and its data type is that normally used for type sizes
2778 (which is the first type created by make_signed_type or
2779 make_unsigned_type). */
2780
2781 tree
2782 size_in_bytes (const_tree type)
2783 {
2784 tree t;
2785
2786 if (type == error_mark_node)
2787 return integer_zero_node;
2788
2789 type = TYPE_MAIN_VARIANT (type);
2790 t = TYPE_SIZE_UNIT (type);
2791
2792 if (t == 0)
2793 {
2794 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2795 return size_zero_node;
2796 }
2797
2798 return t;
2799 }
2800
2801 /* Return the size of TYPE (in bytes) as a wide integer
2802 or return -1 if the size can vary or is larger than an integer. */
2803
2804 HOST_WIDE_INT
2805 int_size_in_bytes (const_tree type)
2806 {
2807 tree t;
2808
2809 if (type == error_mark_node)
2810 return 0;
2811
2812 type = TYPE_MAIN_VARIANT (type);
2813 t = TYPE_SIZE_UNIT (type);
2814
2815 if (t && tree_fits_uhwi_p (t))
2816 return TREE_INT_CST_LOW (t);
2817 else
2818 return -1;
2819 }
2820
2821 /* Return the maximum size of TYPE (in bytes) as a wide integer
2822 or return -1 if the size can vary or is larger than an integer. */
2823
2824 HOST_WIDE_INT
2825 max_int_size_in_bytes (const_tree type)
2826 {
2827 HOST_WIDE_INT size = -1;
2828 tree size_tree;
2829
2830 /* If this is an array type, check for a possible MAX_SIZE attached. */
2831
2832 if (TREE_CODE (type) == ARRAY_TYPE)
2833 {
2834 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2835
2836 if (size_tree && tree_fits_uhwi_p (size_tree))
2837 size = tree_to_uhwi (size_tree);
2838 }
2839
2840 /* If we still haven't been able to get a size, see if the language
2841 can compute a maximum size. */
2842
2843 if (size == -1)
2844 {
2845 size_tree = lang_hooks.types.max_size (type);
2846
2847 if (size_tree && tree_fits_uhwi_p (size_tree))
2848 size = tree_to_uhwi (size_tree);
2849 }
2850
2851 return size;
2852 }
2853 \f
2854 /* Return the bit position of FIELD, in bits from the start of the record.
2855 This is a tree of type bitsizetype. */
2856
2857 tree
2858 bit_position (const_tree field)
2859 {
2860 return bit_from_pos (DECL_FIELD_OFFSET (field),
2861 DECL_FIELD_BIT_OFFSET (field));
2862 }
2863 \f
2864 /* Return the byte position of FIELD, in bytes from the start of the record.
2865 This is a tree of type sizetype. */
2866
2867 tree
2868 byte_position (const_tree field)
2869 {
2870 return byte_from_pos (DECL_FIELD_OFFSET (field),
2871 DECL_FIELD_BIT_OFFSET (field));
2872 }
2873
2874 /* Likewise, but return as an integer. It must be representable in
2875 that way (since it could be a signed value, we don't have the
2876 option of returning -1 like int_size_in_byte can. */
2877
2878 HOST_WIDE_INT
2879 int_byte_position (const_tree field)
2880 {
2881 return tree_to_shwi (byte_position (field));
2882 }
2883 \f
2884 /* Return the strictest alignment, in bits, that T is known to have. */
2885
2886 unsigned int
2887 expr_align (const_tree t)
2888 {
2889 unsigned int align0, align1;
2890
2891 switch (TREE_CODE (t))
2892 {
2893 CASE_CONVERT: case NON_LVALUE_EXPR:
2894 /* If we have conversions, we know that the alignment of the
2895 object must meet each of the alignments of the types. */
2896 align0 = expr_align (TREE_OPERAND (t, 0));
2897 align1 = TYPE_ALIGN (TREE_TYPE (t));
2898 return MAX (align0, align1);
2899
2900 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2901 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2902 case CLEANUP_POINT_EXPR:
2903 /* These don't change the alignment of an object. */
2904 return expr_align (TREE_OPERAND (t, 0));
2905
2906 case COND_EXPR:
2907 /* The best we can do is say that the alignment is the least aligned
2908 of the two arms. */
2909 align0 = expr_align (TREE_OPERAND (t, 1));
2910 align1 = expr_align (TREE_OPERAND (t, 2));
2911 return MIN (align0, align1);
2912
2913 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2914 meaningfully, it's always 1. */
2915 case LABEL_DECL: case CONST_DECL:
2916 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2917 case FUNCTION_DECL:
2918 gcc_assert (DECL_ALIGN (t) != 0);
2919 return DECL_ALIGN (t);
2920
2921 default:
2922 break;
2923 }
2924
2925 /* Otherwise take the alignment from that of the type. */
2926 return TYPE_ALIGN (TREE_TYPE (t));
2927 }
2928 \f
2929 /* Return, as a tree node, the number of elements for TYPE (which is an
2930 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2931
2932 tree
2933 array_type_nelts (const_tree type)
2934 {
2935 tree index_type, min, max;
2936
2937 /* If they did it with unspecified bounds, then we should have already
2938 given an error about it before we got here. */
2939 if (! TYPE_DOMAIN (type))
2940 return error_mark_node;
2941
2942 index_type = TYPE_DOMAIN (type);
2943 min = TYPE_MIN_VALUE (index_type);
2944 max = TYPE_MAX_VALUE (index_type);
2945
2946 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2947 if (!max)
2948 return error_mark_node;
2949
2950 return (integer_zerop (min)
2951 ? max
2952 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2953 }
2954 \f
2955 /* If arg is static -- a reference to an object in static storage -- then
2956 return the object. This is not the same as the C meaning of `static'.
2957 If arg isn't static, return NULL. */
2958
2959 tree
2960 staticp (tree arg)
2961 {
2962 switch (TREE_CODE (arg))
2963 {
2964 case FUNCTION_DECL:
2965 /* Nested functions are static, even though taking their address will
2966 involve a trampoline as we unnest the nested function and create
2967 the trampoline on the tree level. */
2968 return arg;
2969
2970 case VAR_DECL:
2971 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2972 && ! DECL_THREAD_LOCAL_P (arg)
2973 && ! DECL_DLLIMPORT_P (arg)
2974 ? arg : NULL);
2975
2976 case CONST_DECL:
2977 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2978 ? arg : NULL);
2979
2980 case CONSTRUCTOR:
2981 return TREE_STATIC (arg) ? arg : NULL;
2982
2983 case LABEL_DECL:
2984 case STRING_CST:
2985 return arg;
2986
2987 case COMPONENT_REF:
2988 /* If the thing being referenced is not a field, then it is
2989 something language specific. */
2990 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2991
2992 /* If we are referencing a bitfield, we can't evaluate an
2993 ADDR_EXPR at compile time and so it isn't a constant. */
2994 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2995 return NULL;
2996
2997 return staticp (TREE_OPERAND (arg, 0));
2998
2999 case BIT_FIELD_REF:
3000 return NULL;
3001
3002 case INDIRECT_REF:
3003 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3004
3005 case ARRAY_REF:
3006 case ARRAY_RANGE_REF:
3007 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3008 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3009 return staticp (TREE_OPERAND (arg, 0));
3010 else
3011 return NULL;
3012
3013 case COMPOUND_LITERAL_EXPR:
3014 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3015
3016 default:
3017 return NULL;
3018 }
3019 }
3020
3021 \f
3022
3023
3024 /* Return whether OP is a DECL whose address is function-invariant. */
3025
3026 bool
3027 decl_address_invariant_p (const_tree op)
3028 {
3029 /* The conditions below are slightly less strict than the one in
3030 staticp. */
3031
3032 switch (TREE_CODE (op))
3033 {
3034 case PARM_DECL:
3035 case RESULT_DECL:
3036 case LABEL_DECL:
3037 case FUNCTION_DECL:
3038 return true;
3039
3040 case VAR_DECL:
3041 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3042 || DECL_THREAD_LOCAL_P (op)
3043 || DECL_CONTEXT (op) == current_function_decl
3044 || decl_function_context (op) == current_function_decl)
3045 return true;
3046 break;
3047
3048 case CONST_DECL:
3049 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3050 || decl_function_context (op) == current_function_decl)
3051 return true;
3052 break;
3053
3054 default:
3055 break;
3056 }
3057
3058 return false;
3059 }
3060
3061 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3062
3063 bool
3064 decl_address_ip_invariant_p (const_tree op)
3065 {
3066 /* The conditions below are slightly less strict than the one in
3067 staticp. */
3068
3069 switch (TREE_CODE (op))
3070 {
3071 case LABEL_DECL:
3072 case FUNCTION_DECL:
3073 case STRING_CST:
3074 return true;
3075
3076 case VAR_DECL:
3077 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3078 && !DECL_DLLIMPORT_P (op))
3079 || DECL_THREAD_LOCAL_P (op))
3080 return true;
3081 break;
3082
3083 case CONST_DECL:
3084 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3085 return true;
3086 break;
3087
3088 default:
3089 break;
3090 }
3091
3092 return false;
3093 }
3094
3095
3096 /* Return true if T is function-invariant (internal function, does
3097 not handle arithmetic; that's handled in skip_simple_arithmetic and
3098 tree_invariant_p). */
3099
3100 static bool tree_invariant_p (tree t);
3101
3102 static bool
3103 tree_invariant_p_1 (tree t)
3104 {
3105 tree op;
3106
3107 if (TREE_CONSTANT (t)
3108 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3109 return true;
3110
3111 switch (TREE_CODE (t))
3112 {
3113 case SAVE_EXPR:
3114 return true;
3115
3116 case ADDR_EXPR:
3117 op = TREE_OPERAND (t, 0);
3118 while (handled_component_p (op))
3119 {
3120 switch (TREE_CODE (op))
3121 {
3122 case ARRAY_REF:
3123 case ARRAY_RANGE_REF:
3124 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3125 || TREE_OPERAND (op, 2) != NULL_TREE
3126 || TREE_OPERAND (op, 3) != NULL_TREE)
3127 return false;
3128 break;
3129
3130 case COMPONENT_REF:
3131 if (TREE_OPERAND (op, 2) != NULL_TREE)
3132 return false;
3133 break;
3134
3135 default:;
3136 }
3137 op = TREE_OPERAND (op, 0);
3138 }
3139
3140 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3141
3142 default:
3143 break;
3144 }
3145
3146 return false;
3147 }
3148
3149 /* Return true if T is function-invariant. */
3150
3151 static bool
3152 tree_invariant_p (tree t)
3153 {
3154 tree inner = skip_simple_arithmetic (t);
3155 return tree_invariant_p_1 (inner);
3156 }
3157
3158 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3159 Do this to any expression which may be used in more than one place,
3160 but must be evaluated only once.
3161
3162 Normally, expand_expr would reevaluate the expression each time.
3163 Calling save_expr produces something that is evaluated and recorded
3164 the first time expand_expr is called on it. Subsequent calls to
3165 expand_expr just reuse the recorded value.
3166
3167 The call to expand_expr that generates code that actually computes
3168 the value is the first call *at compile time*. Subsequent calls
3169 *at compile time* generate code to use the saved value.
3170 This produces correct result provided that *at run time* control
3171 always flows through the insns made by the first expand_expr
3172 before reaching the other places where the save_expr was evaluated.
3173 You, the caller of save_expr, must make sure this is so.
3174
3175 Constants, and certain read-only nodes, are returned with no
3176 SAVE_EXPR because that is safe. Expressions containing placeholders
3177 are not touched; see tree.def for an explanation of what these
3178 are used for. */
3179
3180 tree
3181 save_expr (tree expr)
3182 {
3183 tree t = fold (expr);
3184 tree inner;
3185
3186 /* If the tree evaluates to a constant, then we don't want to hide that
3187 fact (i.e. this allows further folding, and direct checks for constants).
3188 However, a read-only object that has side effects cannot be bypassed.
3189 Since it is no problem to reevaluate literals, we just return the
3190 literal node. */
3191 inner = skip_simple_arithmetic (t);
3192 if (TREE_CODE (inner) == ERROR_MARK)
3193 return inner;
3194
3195 if (tree_invariant_p_1 (inner))
3196 return t;
3197
3198 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3199 it means that the size or offset of some field of an object depends on
3200 the value within another field.
3201
3202 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3203 and some variable since it would then need to be both evaluated once and
3204 evaluated more than once. Front-ends must assure this case cannot
3205 happen by surrounding any such subexpressions in their own SAVE_EXPR
3206 and forcing evaluation at the proper time. */
3207 if (contains_placeholder_p (inner))
3208 return t;
3209
3210 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3211 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3212
3213 /* This expression might be placed ahead of a jump to ensure that the
3214 value was computed on both sides of the jump. So make sure it isn't
3215 eliminated as dead. */
3216 TREE_SIDE_EFFECTS (t) = 1;
3217 return t;
3218 }
3219
3220 /* Look inside EXPR into any simple arithmetic operations. Return the
3221 outermost non-arithmetic or non-invariant node. */
3222
3223 tree
3224 skip_simple_arithmetic (tree expr)
3225 {
3226 /* We don't care about whether this can be used as an lvalue in this
3227 context. */
3228 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3229 expr = TREE_OPERAND (expr, 0);
3230
3231 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3232 a constant, it will be more efficient to not make another SAVE_EXPR since
3233 it will allow better simplification and GCSE will be able to merge the
3234 computations if they actually occur. */
3235 while (true)
3236 {
3237 if (UNARY_CLASS_P (expr))
3238 expr = TREE_OPERAND (expr, 0);
3239 else if (BINARY_CLASS_P (expr))
3240 {
3241 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3242 expr = TREE_OPERAND (expr, 0);
3243 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3244 expr = TREE_OPERAND (expr, 1);
3245 else
3246 break;
3247 }
3248 else
3249 break;
3250 }
3251
3252 return expr;
3253 }
3254
3255 /* Look inside EXPR into simple arithmetic operations involving constants.
3256 Return the outermost non-arithmetic or non-constant node. */
3257
3258 tree
3259 skip_simple_constant_arithmetic (tree expr)
3260 {
3261 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3262 expr = TREE_OPERAND (expr, 0);
3263
3264 while (true)
3265 {
3266 if (UNARY_CLASS_P (expr))
3267 expr = TREE_OPERAND (expr, 0);
3268 else if (BINARY_CLASS_P (expr))
3269 {
3270 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3271 expr = TREE_OPERAND (expr, 0);
3272 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3273 expr = TREE_OPERAND (expr, 1);
3274 else
3275 break;
3276 }
3277 else
3278 break;
3279 }
3280
3281 return expr;
3282 }
3283
3284 /* Return which tree structure is used by T. */
3285
3286 enum tree_node_structure_enum
3287 tree_node_structure (const_tree t)
3288 {
3289 const enum tree_code code = TREE_CODE (t);
3290 return tree_node_structure_for_code (code);
3291 }
3292
3293 /* Set various status flags when building a CALL_EXPR object T. */
3294
3295 static void
3296 process_call_operands (tree t)
3297 {
3298 bool side_effects = TREE_SIDE_EFFECTS (t);
3299 bool read_only = false;
3300 int i = call_expr_flags (t);
3301
3302 /* Calls have side-effects, except those to const or pure functions. */
3303 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3304 side_effects = true;
3305 /* Propagate TREE_READONLY of arguments for const functions. */
3306 if (i & ECF_CONST)
3307 read_only = true;
3308
3309 if (!side_effects || read_only)
3310 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3311 {
3312 tree op = TREE_OPERAND (t, i);
3313 if (op && TREE_SIDE_EFFECTS (op))
3314 side_effects = true;
3315 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3316 read_only = false;
3317 }
3318
3319 TREE_SIDE_EFFECTS (t) = side_effects;
3320 TREE_READONLY (t) = read_only;
3321 }
3322 \f
3323 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3324 size or offset that depends on a field within a record. */
3325
3326 bool
3327 contains_placeholder_p (const_tree exp)
3328 {
3329 enum tree_code code;
3330
3331 if (!exp)
3332 return 0;
3333
3334 code = TREE_CODE (exp);
3335 if (code == PLACEHOLDER_EXPR)
3336 return 1;
3337
3338 switch (TREE_CODE_CLASS (code))
3339 {
3340 case tcc_reference:
3341 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3342 position computations since they will be converted into a
3343 WITH_RECORD_EXPR involving the reference, which will assume
3344 here will be valid. */
3345 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3346
3347 case tcc_exceptional:
3348 if (code == TREE_LIST)
3349 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3350 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3351 break;
3352
3353 case tcc_unary:
3354 case tcc_binary:
3355 case tcc_comparison:
3356 case tcc_expression:
3357 switch (code)
3358 {
3359 case COMPOUND_EXPR:
3360 /* Ignoring the first operand isn't quite right, but works best. */
3361 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3362
3363 case COND_EXPR:
3364 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3365 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3366 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3367
3368 case SAVE_EXPR:
3369 /* The save_expr function never wraps anything containing
3370 a PLACEHOLDER_EXPR. */
3371 return 0;
3372
3373 default:
3374 break;
3375 }
3376
3377 switch (TREE_CODE_LENGTH (code))
3378 {
3379 case 1:
3380 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3381 case 2:
3382 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3383 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3384 default:
3385 return 0;
3386 }
3387
3388 case tcc_vl_exp:
3389 switch (code)
3390 {
3391 case CALL_EXPR:
3392 {
3393 const_tree arg;
3394 const_call_expr_arg_iterator iter;
3395 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3396 if (CONTAINS_PLACEHOLDER_P (arg))
3397 return 1;
3398 return 0;
3399 }
3400 default:
3401 return 0;
3402 }
3403
3404 default:
3405 return 0;
3406 }
3407 return 0;
3408 }
3409
3410 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3411 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3412 field positions. */
3413
3414 static bool
3415 type_contains_placeholder_1 (const_tree type)
3416 {
3417 /* If the size contains a placeholder or the parent type (component type in
3418 the case of arrays) type involves a placeholder, this type does. */
3419 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3420 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3421 || (!POINTER_TYPE_P (type)
3422 && TREE_TYPE (type)
3423 && type_contains_placeholder_p (TREE_TYPE (type))))
3424 return true;
3425
3426 /* Now do type-specific checks. Note that the last part of the check above
3427 greatly limits what we have to do below. */
3428 switch (TREE_CODE (type))
3429 {
3430 case VOID_TYPE:
3431 case POINTER_BOUNDS_TYPE:
3432 case COMPLEX_TYPE:
3433 case ENUMERAL_TYPE:
3434 case BOOLEAN_TYPE:
3435 case POINTER_TYPE:
3436 case OFFSET_TYPE:
3437 case REFERENCE_TYPE:
3438 case METHOD_TYPE:
3439 case FUNCTION_TYPE:
3440 case VECTOR_TYPE:
3441 case NULLPTR_TYPE:
3442 return false;
3443
3444 case INTEGER_TYPE:
3445 case REAL_TYPE:
3446 case FIXED_POINT_TYPE:
3447 /* Here we just check the bounds. */
3448 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3449 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3450
3451 case ARRAY_TYPE:
3452 /* We have already checked the component type above, so just check the
3453 domain type. */
3454 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3455
3456 case RECORD_TYPE:
3457 case UNION_TYPE:
3458 case QUAL_UNION_TYPE:
3459 {
3460 tree field;
3461
3462 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3463 if (TREE_CODE (field) == FIELD_DECL
3464 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3465 || (TREE_CODE (type) == QUAL_UNION_TYPE
3466 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3467 || type_contains_placeholder_p (TREE_TYPE (field))))
3468 return true;
3469
3470 return false;
3471 }
3472
3473 default:
3474 gcc_unreachable ();
3475 }
3476 }
3477
3478 /* Wrapper around above function used to cache its result. */
3479
3480 bool
3481 type_contains_placeholder_p (tree type)
3482 {
3483 bool result;
3484
3485 /* If the contains_placeholder_bits field has been initialized,
3486 then we know the answer. */
3487 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3488 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3489
3490 /* Indicate that we've seen this type node, and the answer is false.
3491 This is what we want to return if we run into recursion via fields. */
3492 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3493
3494 /* Compute the real value. */
3495 result = type_contains_placeholder_1 (type);
3496
3497 /* Store the real value. */
3498 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3499
3500 return result;
3501 }
3502 \f
3503 /* Push tree EXP onto vector QUEUE if it is not already present. */
3504
3505 static void
3506 push_without_duplicates (tree exp, vec<tree> *queue)
3507 {
3508 unsigned int i;
3509 tree iter;
3510
3511 FOR_EACH_VEC_ELT (*queue, i, iter)
3512 if (simple_cst_equal (iter, exp) == 1)
3513 break;
3514
3515 if (!iter)
3516 queue->safe_push (exp);
3517 }
3518
3519 /* Given a tree EXP, find all occurrences of references to fields
3520 in a PLACEHOLDER_EXPR and place them in vector REFS without
3521 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3522 we assume here that EXP contains only arithmetic expressions
3523 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3524 argument list. */
3525
3526 void
3527 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3528 {
3529 enum tree_code code = TREE_CODE (exp);
3530 tree inner;
3531 int i;
3532
3533 /* We handle TREE_LIST and COMPONENT_REF separately. */
3534 if (code == TREE_LIST)
3535 {
3536 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3537 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3538 }
3539 else if (code == COMPONENT_REF)
3540 {
3541 for (inner = TREE_OPERAND (exp, 0);
3542 REFERENCE_CLASS_P (inner);
3543 inner = TREE_OPERAND (inner, 0))
3544 ;
3545
3546 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3547 push_without_duplicates (exp, refs);
3548 else
3549 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3550 }
3551 else
3552 switch (TREE_CODE_CLASS (code))
3553 {
3554 case tcc_constant:
3555 break;
3556
3557 case tcc_declaration:
3558 /* Variables allocated to static storage can stay. */
3559 if (!TREE_STATIC (exp))
3560 push_without_duplicates (exp, refs);
3561 break;
3562
3563 case tcc_expression:
3564 /* This is the pattern built in ada/make_aligning_type. */
3565 if (code == ADDR_EXPR
3566 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3567 {
3568 push_without_duplicates (exp, refs);
3569 break;
3570 }
3571
3572 /* Fall through... */
3573
3574 case tcc_exceptional:
3575 case tcc_unary:
3576 case tcc_binary:
3577 case tcc_comparison:
3578 case tcc_reference:
3579 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3580 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3581 break;
3582
3583 case tcc_vl_exp:
3584 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3585 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3586 break;
3587
3588 default:
3589 gcc_unreachable ();
3590 }
3591 }
3592
3593 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3594 return a tree with all occurrences of references to F in a
3595 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3596 CONST_DECLs. Note that we assume here that EXP contains only
3597 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3598 occurring only in their argument list. */
3599
3600 tree
3601 substitute_in_expr (tree exp, tree f, tree r)
3602 {
3603 enum tree_code code = TREE_CODE (exp);
3604 tree op0, op1, op2, op3;
3605 tree new_tree;
3606
3607 /* We handle TREE_LIST and COMPONENT_REF separately. */
3608 if (code == TREE_LIST)
3609 {
3610 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3611 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3612 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3613 return exp;
3614
3615 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3616 }
3617 else if (code == COMPONENT_REF)
3618 {
3619 tree inner;
3620
3621 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3622 and it is the right field, replace it with R. */
3623 for (inner = TREE_OPERAND (exp, 0);
3624 REFERENCE_CLASS_P (inner);
3625 inner = TREE_OPERAND (inner, 0))
3626 ;
3627
3628 /* The field. */
3629 op1 = TREE_OPERAND (exp, 1);
3630
3631 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3632 return r;
3633
3634 /* If this expression hasn't been completed let, leave it alone. */
3635 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3636 return exp;
3637
3638 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3639 if (op0 == TREE_OPERAND (exp, 0))
3640 return exp;
3641
3642 new_tree
3643 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3644 }
3645 else
3646 switch (TREE_CODE_CLASS (code))
3647 {
3648 case tcc_constant:
3649 return exp;
3650
3651 case tcc_declaration:
3652 if (exp == f)
3653 return r;
3654 else
3655 return exp;
3656
3657 case tcc_expression:
3658 if (exp == f)
3659 return r;
3660
3661 /* Fall through... */
3662
3663 case tcc_exceptional:
3664 case tcc_unary:
3665 case tcc_binary:
3666 case tcc_comparison:
3667 case tcc_reference:
3668 switch (TREE_CODE_LENGTH (code))
3669 {
3670 case 0:
3671 return exp;
3672
3673 case 1:
3674 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3675 if (op0 == TREE_OPERAND (exp, 0))
3676 return exp;
3677
3678 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3679 break;
3680
3681 case 2:
3682 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3683 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3684
3685 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3686 return exp;
3687
3688 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3689 break;
3690
3691 case 3:
3692 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3693 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3694 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3695
3696 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3697 && op2 == TREE_OPERAND (exp, 2))
3698 return exp;
3699
3700 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3701 break;
3702
3703 case 4:
3704 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3705 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3706 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3707 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3708
3709 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3710 && op2 == TREE_OPERAND (exp, 2)
3711 && op3 == TREE_OPERAND (exp, 3))
3712 return exp;
3713
3714 new_tree
3715 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3716 break;
3717
3718 default:
3719 gcc_unreachable ();
3720 }
3721 break;
3722
3723 case tcc_vl_exp:
3724 {
3725 int i;
3726
3727 new_tree = NULL_TREE;
3728
3729 /* If we are trying to replace F with a constant, inline back
3730 functions which do nothing else than computing a value from
3731 the arguments they are passed. This makes it possible to
3732 fold partially or entirely the replacement expression. */
3733 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3734 {
3735 tree t = maybe_inline_call_in_expr (exp);
3736 if (t)
3737 return SUBSTITUTE_IN_EXPR (t, f, r);
3738 }
3739
3740 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3741 {
3742 tree op = TREE_OPERAND (exp, i);
3743 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3744 if (new_op != op)
3745 {
3746 if (!new_tree)
3747 new_tree = copy_node (exp);
3748 TREE_OPERAND (new_tree, i) = new_op;
3749 }
3750 }
3751
3752 if (new_tree)
3753 {
3754 new_tree = fold (new_tree);
3755 if (TREE_CODE (new_tree) == CALL_EXPR)
3756 process_call_operands (new_tree);
3757 }
3758 else
3759 return exp;
3760 }
3761 break;
3762
3763 default:
3764 gcc_unreachable ();
3765 }
3766
3767 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3768
3769 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3770 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3771
3772 return new_tree;
3773 }
3774
3775 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3776 for it within OBJ, a tree that is an object or a chain of references. */
3777
3778 tree
3779 substitute_placeholder_in_expr (tree exp, tree obj)
3780 {
3781 enum tree_code code = TREE_CODE (exp);
3782 tree op0, op1, op2, op3;
3783 tree new_tree;
3784
3785 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3786 in the chain of OBJ. */
3787 if (code == PLACEHOLDER_EXPR)
3788 {
3789 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3790 tree elt;
3791
3792 for (elt = obj; elt != 0;
3793 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3794 || TREE_CODE (elt) == COND_EXPR)
3795 ? TREE_OPERAND (elt, 1)
3796 : (REFERENCE_CLASS_P (elt)
3797 || UNARY_CLASS_P (elt)
3798 || BINARY_CLASS_P (elt)
3799 || VL_EXP_CLASS_P (elt)
3800 || EXPRESSION_CLASS_P (elt))
3801 ? TREE_OPERAND (elt, 0) : 0))
3802 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3803 return elt;
3804
3805 for (elt = obj; elt != 0;
3806 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3807 || TREE_CODE (elt) == COND_EXPR)
3808 ? TREE_OPERAND (elt, 1)
3809 : (REFERENCE_CLASS_P (elt)
3810 || UNARY_CLASS_P (elt)
3811 || BINARY_CLASS_P (elt)
3812 || VL_EXP_CLASS_P (elt)
3813 || EXPRESSION_CLASS_P (elt))
3814 ? TREE_OPERAND (elt, 0) : 0))
3815 if (POINTER_TYPE_P (TREE_TYPE (elt))
3816 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3817 == need_type))
3818 return fold_build1 (INDIRECT_REF, need_type, elt);
3819
3820 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3821 survives until RTL generation, there will be an error. */
3822 return exp;
3823 }
3824
3825 /* TREE_LIST is special because we need to look at TREE_VALUE
3826 and TREE_CHAIN, not TREE_OPERANDS. */
3827 else if (code == TREE_LIST)
3828 {
3829 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3830 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3831 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3832 return exp;
3833
3834 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3835 }
3836 else
3837 switch (TREE_CODE_CLASS (code))
3838 {
3839 case tcc_constant:
3840 case tcc_declaration:
3841 return exp;
3842
3843 case tcc_exceptional:
3844 case tcc_unary:
3845 case tcc_binary:
3846 case tcc_comparison:
3847 case tcc_expression:
3848 case tcc_reference:
3849 case tcc_statement:
3850 switch (TREE_CODE_LENGTH (code))
3851 {
3852 case 0:
3853 return exp;
3854
3855 case 1:
3856 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3857 if (op0 == TREE_OPERAND (exp, 0))
3858 return exp;
3859
3860 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3861 break;
3862
3863 case 2:
3864 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3865 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3866
3867 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3868 return exp;
3869
3870 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3871 break;
3872
3873 case 3:
3874 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3875 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3876 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3877
3878 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3879 && op2 == TREE_OPERAND (exp, 2))
3880 return exp;
3881
3882 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3883 break;
3884
3885 case 4:
3886 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3887 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3888 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3889 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3890
3891 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3892 && op2 == TREE_OPERAND (exp, 2)
3893 && op3 == TREE_OPERAND (exp, 3))
3894 return exp;
3895
3896 new_tree
3897 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3898 break;
3899
3900 default:
3901 gcc_unreachable ();
3902 }
3903 break;
3904
3905 case tcc_vl_exp:
3906 {
3907 int i;
3908
3909 new_tree = NULL_TREE;
3910
3911 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3912 {
3913 tree op = TREE_OPERAND (exp, i);
3914 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3915 if (new_op != op)
3916 {
3917 if (!new_tree)
3918 new_tree = copy_node (exp);
3919 TREE_OPERAND (new_tree, i) = new_op;
3920 }
3921 }
3922
3923 if (new_tree)
3924 {
3925 new_tree = fold (new_tree);
3926 if (TREE_CODE (new_tree) == CALL_EXPR)
3927 process_call_operands (new_tree);
3928 }
3929 else
3930 return exp;
3931 }
3932 break;
3933
3934 default:
3935 gcc_unreachable ();
3936 }
3937
3938 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3939
3940 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3941 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3942
3943 return new_tree;
3944 }
3945 \f
3946
3947 /* Subroutine of stabilize_reference; this is called for subtrees of
3948 references. Any expression with side-effects must be put in a SAVE_EXPR
3949 to ensure that it is only evaluated once.
3950
3951 We don't put SAVE_EXPR nodes around everything, because assigning very
3952 simple expressions to temporaries causes us to miss good opportunities
3953 for optimizations. Among other things, the opportunity to fold in the
3954 addition of a constant into an addressing mode often gets lost, e.g.
3955 "y[i+1] += x;". In general, we take the approach that we should not make
3956 an assignment unless we are forced into it - i.e., that any non-side effect
3957 operator should be allowed, and that cse should take care of coalescing
3958 multiple utterances of the same expression should that prove fruitful. */
3959
3960 static tree
3961 stabilize_reference_1 (tree e)
3962 {
3963 tree result;
3964 enum tree_code code = TREE_CODE (e);
3965
3966 /* We cannot ignore const expressions because it might be a reference
3967 to a const array but whose index contains side-effects. But we can
3968 ignore things that are actual constant or that already have been
3969 handled by this function. */
3970
3971 if (tree_invariant_p (e))
3972 return e;
3973
3974 switch (TREE_CODE_CLASS (code))
3975 {
3976 case tcc_exceptional:
3977 case tcc_type:
3978 case tcc_declaration:
3979 case tcc_comparison:
3980 case tcc_statement:
3981 case tcc_expression:
3982 case tcc_reference:
3983 case tcc_vl_exp:
3984 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3985 so that it will only be evaluated once. */
3986 /* The reference (r) and comparison (<) classes could be handled as
3987 below, but it is generally faster to only evaluate them once. */
3988 if (TREE_SIDE_EFFECTS (e))
3989 return save_expr (e);
3990 return e;
3991
3992 case tcc_constant:
3993 /* Constants need no processing. In fact, we should never reach
3994 here. */
3995 return e;
3996
3997 case tcc_binary:
3998 /* Division is slow and tends to be compiled with jumps,
3999 especially the division by powers of 2 that is often
4000 found inside of an array reference. So do it just once. */
4001 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4002 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4003 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4004 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4005 return save_expr (e);
4006 /* Recursively stabilize each operand. */
4007 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4008 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4009 break;
4010
4011 case tcc_unary:
4012 /* Recursively stabilize each operand. */
4013 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4014 break;
4015
4016 default:
4017 gcc_unreachable ();
4018 }
4019
4020 TREE_TYPE (result) = TREE_TYPE (e);
4021 TREE_READONLY (result) = TREE_READONLY (e);
4022 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4023 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4024
4025 return result;
4026 }
4027
4028 /* Stabilize a reference so that we can use it any number of times
4029 without causing its operands to be evaluated more than once.
4030 Returns the stabilized reference. This works by means of save_expr,
4031 so see the caveats in the comments about save_expr.
4032
4033 Also allows conversion expressions whose operands are references.
4034 Any other kind of expression is returned unchanged. */
4035
4036 tree
4037 stabilize_reference (tree ref)
4038 {
4039 tree result;
4040 enum tree_code code = TREE_CODE (ref);
4041
4042 switch (code)
4043 {
4044 case VAR_DECL:
4045 case PARM_DECL:
4046 case RESULT_DECL:
4047 /* No action is needed in this case. */
4048 return ref;
4049
4050 CASE_CONVERT:
4051 case FLOAT_EXPR:
4052 case FIX_TRUNC_EXPR:
4053 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4054 break;
4055
4056 case INDIRECT_REF:
4057 result = build_nt (INDIRECT_REF,
4058 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4059 break;
4060
4061 case COMPONENT_REF:
4062 result = build_nt (COMPONENT_REF,
4063 stabilize_reference (TREE_OPERAND (ref, 0)),
4064 TREE_OPERAND (ref, 1), NULL_TREE);
4065 break;
4066
4067 case BIT_FIELD_REF:
4068 result = build_nt (BIT_FIELD_REF,
4069 stabilize_reference (TREE_OPERAND (ref, 0)),
4070 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4071 break;
4072
4073 case ARRAY_REF:
4074 result = build_nt (ARRAY_REF,
4075 stabilize_reference (TREE_OPERAND (ref, 0)),
4076 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4077 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4078 break;
4079
4080 case ARRAY_RANGE_REF:
4081 result = build_nt (ARRAY_RANGE_REF,
4082 stabilize_reference (TREE_OPERAND (ref, 0)),
4083 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4084 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4085 break;
4086
4087 case COMPOUND_EXPR:
4088 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4089 it wouldn't be ignored. This matters when dealing with
4090 volatiles. */
4091 return stabilize_reference_1 (ref);
4092
4093 /* If arg isn't a kind of lvalue we recognize, make no change.
4094 Caller should recognize the error for an invalid lvalue. */
4095 default:
4096 return ref;
4097
4098 case ERROR_MARK:
4099 return error_mark_node;
4100 }
4101
4102 TREE_TYPE (result) = TREE_TYPE (ref);
4103 TREE_READONLY (result) = TREE_READONLY (ref);
4104 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4105 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4106
4107 return result;
4108 }
4109 \f
4110 /* Low-level constructors for expressions. */
4111
4112 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4113 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4114
4115 void
4116 recompute_tree_invariant_for_addr_expr (tree t)
4117 {
4118 tree node;
4119 bool tc = true, se = false;
4120
4121 /* We started out assuming this address is both invariant and constant, but
4122 does not have side effects. Now go down any handled components and see if
4123 any of them involve offsets that are either non-constant or non-invariant.
4124 Also check for side-effects.
4125
4126 ??? Note that this code makes no attempt to deal with the case where
4127 taking the address of something causes a copy due to misalignment. */
4128
4129 #define UPDATE_FLAGS(NODE) \
4130 do { tree _node = (NODE); \
4131 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4132 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4133
4134 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4135 node = TREE_OPERAND (node, 0))
4136 {
4137 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4138 array reference (probably made temporarily by the G++ front end),
4139 so ignore all the operands. */
4140 if ((TREE_CODE (node) == ARRAY_REF
4141 || TREE_CODE (node) == ARRAY_RANGE_REF)
4142 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4143 {
4144 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4145 if (TREE_OPERAND (node, 2))
4146 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4147 if (TREE_OPERAND (node, 3))
4148 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4149 }
4150 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4151 FIELD_DECL, apparently. The G++ front end can put something else
4152 there, at least temporarily. */
4153 else if (TREE_CODE (node) == COMPONENT_REF
4154 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4155 {
4156 if (TREE_OPERAND (node, 2))
4157 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4158 }
4159 }
4160
4161 node = lang_hooks.expr_to_decl (node, &tc, &se);
4162
4163 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4164 the address, since &(*a)->b is a form of addition. If it's a constant, the
4165 address is constant too. If it's a decl, its address is constant if the
4166 decl is static. Everything else is not constant and, furthermore,
4167 taking the address of a volatile variable is not volatile. */
4168 if (TREE_CODE (node) == INDIRECT_REF
4169 || TREE_CODE (node) == MEM_REF)
4170 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4171 else if (CONSTANT_CLASS_P (node))
4172 ;
4173 else if (DECL_P (node))
4174 tc &= (staticp (node) != NULL_TREE);
4175 else
4176 {
4177 tc = false;
4178 se |= TREE_SIDE_EFFECTS (node);
4179 }
4180
4181
4182 TREE_CONSTANT (t) = tc;
4183 TREE_SIDE_EFFECTS (t) = se;
4184 #undef UPDATE_FLAGS
4185 }
4186
4187 /* Build an expression of code CODE, data type TYPE, and operands as
4188 specified. Expressions and reference nodes can be created this way.
4189 Constants, decls, types and misc nodes cannot be.
4190
4191 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4192 enough for all extant tree codes. */
4193
4194 tree
4195 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4196 {
4197 tree t;
4198
4199 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4200
4201 t = make_node_stat (code PASS_MEM_STAT);
4202 TREE_TYPE (t) = tt;
4203
4204 return t;
4205 }
4206
4207 tree
4208 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4209 {
4210 int length = sizeof (struct tree_exp);
4211 tree t;
4212
4213 record_node_allocation_statistics (code, length);
4214
4215 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4216
4217 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4218
4219 memset (t, 0, sizeof (struct tree_common));
4220
4221 TREE_SET_CODE (t, code);
4222
4223 TREE_TYPE (t) = type;
4224 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4225 TREE_OPERAND (t, 0) = node;
4226 if (node && !TYPE_P (node))
4227 {
4228 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4229 TREE_READONLY (t) = TREE_READONLY (node);
4230 }
4231
4232 if (TREE_CODE_CLASS (code) == tcc_statement)
4233 TREE_SIDE_EFFECTS (t) = 1;
4234 else switch (code)
4235 {
4236 case VA_ARG_EXPR:
4237 /* All of these have side-effects, no matter what their
4238 operands are. */
4239 TREE_SIDE_EFFECTS (t) = 1;
4240 TREE_READONLY (t) = 0;
4241 break;
4242
4243 case INDIRECT_REF:
4244 /* Whether a dereference is readonly has nothing to do with whether
4245 its operand is readonly. */
4246 TREE_READONLY (t) = 0;
4247 break;
4248
4249 case ADDR_EXPR:
4250 if (node)
4251 recompute_tree_invariant_for_addr_expr (t);
4252 break;
4253
4254 default:
4255 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4256 && node && !TYPE_P (node)
4257 && TREE_CONSTANT (node))
4258 TREE_CONSTANT (t) = 1;
4259 if (TREE_CODE_CLASS (code) == tcc_reference
4260 && node && TREE_THIS_VOLATILE (node))
4261 TREE_THIS_VOLATILE (t) = 1;
4262 break;
4263 }
4264
4265 return t;
4266 }
4267
4268 #define PROCESS_ARG(N) \
4269 do { \
4270 TREE_OPERAND (t, N) = arg##N; \
4271 if (arg##N &&!TYPE_P (arg##N)) \
4272 { \
4273 if (TREE_SIDE_EFFECTS (arg##N)) \
4274 side_effects = 1; \
4275 if (!TREE_READONLY (arg##N) \
4276 && !CONSTANT_CLASS_P (arg##N)) \
4277 (void) (read_only = 0); \
4278 if (!TREE_CONSTANT (arg##N)) \
4279 (void) (constant = 0); \
4280 } \
4281 } while (0)
4282
4283 tree
4284 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4285 {
4286 bool constant, read_only, side_effects;
4287 tree t;
4288
4289 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4290
4291 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4292 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4293 /* When sizetype precision doesn't match that of pointers
4294 we need to be able to build explicit extensions or truncations
4295 of the offset argument. */
4296 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4297 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4298 && TREE_CODE (arg1) == INTEGER_CST);
4299
4300 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4301 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4302 && ptrofftype_p (TREE_TYPE (arg1)));
4303
4304 t = make_node_stat (code PASS_MEM_STAT);
4305 TREE_TYPE (t) = tt;
4306
4307 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4308 result based on those same flags for the arguments. But if the
4309 arguments aren't really even `tree' expressions, we shouldn't be trying
4310 to do this. */
4311
4312 /* Expressions without side effects may be constant if their
4313 arguments are as well. */
4314 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4315 || TREE_CODE_CLASS (code) == tcc_binary);
4316 read_only = 1;
4317 side_effects = TREE_SIDE_EFFECTS (t);
4318
4319 PROCESS_ARG (0);
4320 PROCESS_ARG (1);
4321
4322 TREE_READONLY (t) = read_only;
4323 TREE_CONSTANT (t) = constant;
4324 TREE_SIDE_EFFECTS (t) = side_effects;
4325 TREE_THIS_VOLATILE (t)
4326 = (TREE_CODE_CLASS (code) == tcc_reference
4327 && arg0 && TREE_THIS_VOLATILE (arg0));
4328
4329 return t;
4330 }
4331
4332
4333 tree
4334 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4335 tree arg2 MEM_STAT_DECL)
4336 {
4337 bool constant, read_only, side_effects;
4338 tree t;
4339
4340 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4341 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4342
4343 t = make_node_stat (code PASS_MEM_STAT);
4344 TREE_TYPE (t) = tt;
4345
4346 read_only = 1;
4347
4348 /* As a special exception, if COND_EXPR has NULL branches, we
4349 assume that it is a gimple statement and always consider
4350 it to have side effects. */
4351 if (code == COND_EXPR
4352 && tt == void_type_node
4353 && arg1 == NULL_TREE
4354 && arg2 == NULL_TREE)
4355 side_effects = true;
4356 else
4357 side_effects = TREE_SIDE_EFFECTS (t);
4358
4359 PROCESS_ARG (0);
4360 PROCESS_ARG (1);
4361 PROCESS_ARG (2);
4362
4363 if (code == COND_EXPR)
4364 TREE_READONLY (t) = read_only;
4365
4366 TREE_SIDE_EFFECTS (t) = side_effects;
4367 TREE_THIS_VOLATILE (t)
4368 = (TREE_CODE_CLASS (code) == tcc_reference
4369 && arg0 && TREE_THIS_VOLATILE (arg0));
4370
4371 return t;
4372 }
4373
4374 tree
4375 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4376 tree arg2, tree arg3 MEM_STAT_DECL)
4377 {
4378 bool constant, read_only, side_effects;
4379 tree t;
4380
4381 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4382
4383 t = make_node_stat (code PASS_MEM_STAT);
4384 TREE_TYPE (t) = tt;
4385
4386 side_effects = TREE_SIDE_EFFECTS (t);
4387
4388 PROCESS_ARG (0);
4389 PROCESS_ARG (1);
4390 PROCESS_ARG (2);
4391 PROCESS_ARG (3);
4392
4393 TREE_SIDE_EFFECTS (t) = side_effects;
4394 TREE_THIS_VOLATILE (t)
4395 = (TREE_CODE_CLASS (code) == tcc_reference
4396 && arg0 && TREE_THIS_VOLATILE (arg0));
4397
4398 return t;
4399 }
4400
4401 tree
4402 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4403 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4404 {
4405 bool constant, read_only, side_effects;
4406 tree t;
4407
4408 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4409
4410 t = make_node_stat (code PASS_MEM_STAT);
4411 TREE_TYPE (t) = tt;
4412
4413 side_effects = TREE_SIDE_EFFECTS (t);
4414
4415 PROCESS_ARG (0);
4416 PROCESS_ARG (1);
4417 PROCESS_ARG (2);
4418 PROCESS_ARG (3);
4419 PROCESS_ARG (4);
4420
4421 TREE_SIDE_EFFECTS (t) = side_effects;
4422 TREE_THIS_VOLATILE (t)
4423 = (TREE_CODE_CLASS (code) == tcc_reference
4424 && arg0 && TREE_THIS_VOLATILE (arg0));
4425
4426 return t;
4427 }
4428
4429 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4430 on the pointer PTR. */
4431
4432 tree
4433 build_simple_mem_ref_loc (location_t loc, tree ptr)
4434 {
4435 HOST_WIDE_INT offset = 0;
4436 tree ptype = TREE_TYPE (ptr);
4437 tree tem;
4438 /* For convenience allow addresses that collapse to a simple base
4439 and offset. */
4440 if (TREE_CODE (ptr) == ADDR_EXPR
4441 && (handled_component_p (TREE_OPERAND (ptr, 0))
4442 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4443 {
4444 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4445 gcc_assert (ptr);
4446 ptr = build_fold_addr_expr (ptr);
4447 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4448 }
4449 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4450 ptr, build_int_cst (ptype, offset));
4451 SET_EXPR_LOCATION (tem, loc);
4452 return tem;
4453 }
4454
4455 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4456
4457 offset_int
4458 mem_ref_offset (const_tree t)
4459 {
4460 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4461 }
4462
4463 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4464 offsetted by OFFSET units. */
4465
4466 tree
4467 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4468 {
4469 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4470 build_fold_addr_expr (base),
4471 build_int_cst (ptr_type_node, offset));
4472 tree addr = build1 (ADDR_EXPR, type, ref);
4473 recompute_tree_invariant_for_addr_expr (addr);
4474 return addr;
4475 }
4476
4477 /* Similar except don't specify the TREE_TYPE
4478 and leave the TREE_SIDE_EFFECTS as 0.
4479 It is permissible for arguments to be null,
4480 or even garbage if their values do not matter. */
4481
4482 tree
4483 build_nt (enum tree_code code, ...)
4484 {
4485 tree t;
4486 int length;
4487 int i;
4488 va_list p;
4489
4490 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4491
4492 va_start (p, code);
4493
4494 t = make_node (code);
4495 length = TREE_CODE_LENGTH (code);
4496
4497 for (i = 0; i < length; i++)
4498 TREE_OPERAND (t, i) = va_arg (p, tree);
4499
4500 va_end (p);
4501 return t;
4502 }
4503
4504 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4505 tree vec. */
4506
4507 tree
4508 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4509 {
4510 tree ret, t;
4511 unsigned int ix;
4512
4513 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4514 CALL_EXPR_FN (ret) = fn;
4515 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4516 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4517 CALL_EXPR_ARG (ret, ix) = t;
4518 return ret;
4519 }
4520 \f
4521 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4522 We do NOT enter this node in any sort of symbol table.
4523
4524 LOC is the location of the decl.
4525
4526 layout_decl is used to set up the decl's storage layout.
4527 Other slots are initialized to 0 or null pointers. */
4528
4529 tree
4530 build_decl_stat (location_t loc, enum tree_code code, tree name,
4531 tree type MEM_STAT_DECL)
4532 {
4533 tree t;
4534
4535 t = make_node_stat (code PASS_MEM_STAT);
4536 DECL_SOURCE_LOCATION (t) = loc;
4537
4538 /* if (type == error_mark_node)
4539 type = integer_type_node; */
4540 /* That is not done, deliberately, so that having error_mark_node
4541 as the type can suppress useless errors in the use of this variable. */
4542
4543 DECL_NAME (t) = name;
4544 TREE_TYPE (t) = type;
4545
4546 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4547 layout_decl (t, 0);
4548
4549 return t;
4550 }
4551
4552 /* Builds and returns function declaration with NAME and TYPE. */
4553
4554 tree
4555 build_fn_decl (const char *name, tree type)
4556 {
4557 tree id = get_identifier (name);
4558 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4559
4560 DECL_EXTERNAL (decl) = 1;
4561 TREE_PUBLIC (decl) = 1;
4562 DECL_ARTIFICIAL (decl) = 1;
4563 TREE_NOTHROW (decl) = 1;
4564
4565 return decl;
4566 }
4567
4568 vec<tree, va_gc> *all_translation_units;
4569
4570 /* Builds a new translation-unit decl with name NAME, queues it in the
4571 global list of translation-unit decls and returns it. */
4572
4573 tree
4574 build_translation_unit_decl (tree name)
4575 {
4576 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4577 name, NULL_TREE);
4578 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4579 vec_safe_push (all_translation_units, tu);
4580 return tu;
4581 }
4582
4583 \f
4584 /* BLOCK nodes are used to represent the structure of binding contours
4585 and declarations, once those contours have been exited and their contents
4586 compiled. This information is used for outputting debugging info. */
4587
4588 tree
4589 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4590 {
4591 tree block = make_node (BLOCK);
4592
4593 BLOCK_VARS (block) = vars;
4594 BLOCK_SUBBLOCKS (block) = subblocks;
4595 BLOCK_SUPERCONTEXT (block) = supercontext;
4596 BLOCK_CHAIN (block) = chain;
4597 return block;
4598 }
4599
4600 \f
4601 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4602
4603 LOC is the location to use in tree T. */
4604
4605 void
4606 protected_set_expr_location (tree t, location_t loc)
4607 {
4608 if (CAN_HAVE_LOCATION_P (t))
4609 SET_EXPR_LOCATION (t, loc);
4610 }
4611 \f
4612 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4613 is ATTRIBUTE. */
4614
4615 tree
4616 build_decl_attribute_variant (tree ddecl, tree attribute)
4617 {
4618 DECL_ATTRIBUTES (ddecl) = attribute;
4619 return ddecl;
4620 }
4621
4622 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4623 is ATTRIBUTE and its qualifiers are QUALS.
4624
4625 Record such modified types already made so we don't make duplicates. */
4626
4627 tree
4628 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4629 {
4630 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4631 {
4632 inchash::hash hstate;
4633 tree ntype;
4634 int i;
4635 tree t;
4636 enum tree_code code = TREE_CODE (ttype);
4637
4638 /* Building a distinct copy of a tagged type is inappropriate; it
4639 causes breakage in code that expects there to be a one-to-one
4640 relationship between a struct and its fields.
4641 build_duplicate_type is another solution (as used in
4642 handle_transparent_union_attribute), but that doesn't play well
4643 with the stronger C++ type identity model. */
4644 if (TREE_CODE (ttype) == RECORD_TYPE
4645 || TREE_CODE (ttype) == UNION_TYPE
4646 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4647 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4648 {
4649 warning (OPT_Wattributes,
4650 "ignoring attributes applied to %qT after definition",
4651 TYPE_MAIN_VARIANT (ttype));
4652 return build_qualified_type (ttype, quals);
4653 }
4654
4655 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4656 ntype = build_distinct_type_copy (ttype);
4657
4658 TYPE_ATTRIBUTES (ntype) = attribute;
4659
4660 hstate.add_int (code);
4661 if (TREE_TYPE (ntype))
4662 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4663 attribute_hash_list (attribute, hstate);
4664
4665 switch (TREE_CODE (ntype))
4666 {
4667 case FUNCTION_TYPE:
4668 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4669 break;
4670 case ARRAY_TYPE:
4671 if (TYPE_DOMAIN (ntype))
4672 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4673 break;
4674 case INTEGER_TYPE:
4675 t = TYPE_MAX_VALUE (ntype);
4676 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4677 hstate.add_object (TREE_INT_CST_ELT (t, i));
4678 break;
4679 case REAL_TYPE:
4680 case FIXED_POINT_TYPE:
4681 {
4682 unsigned int precision = TYPE_PRECISION (ntype);
4683 hstate.add_object (precision);
4684 }
4685 break;
4686 default:
4687 break;
4688 }
4689
4690 ntype = type_hash_canon (hstate.end(), ntype);
4691
4692 /* If the target-dependent attributes make NTYPE different from
4693 its canonical type, we will need to use structural equality
4694 checks for this type. */
4695 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4696 || !comp_type_attributes (ntype, ttype))
4697 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4698 else if (TYPE_CANONICAL (ntype) == ntype)
4699 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4700
4701 ttype = build_qualified_type (ntype, quals);
4702 }
4703 else if (TYPE_QUALS (ttype) != quals)
4704 ttype = build_qualified_type (ttype, quals);
4705
4706 return ttype;
4707 }
4708
4709 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4710 the same. */
4711
4712 static bool
4713 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4714 {
4715 tree cl1, cl2;
4716 for (cl1 = clauses1, cl2 = clauses2;
4717 cl1 && cl2;
4718 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4719 {
4720 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4721 return false;
4722 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4723 {
4724 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4725 OMP_CLAUSE_DECL (cl2)) != 1)
4726 return false;
4727 }
4728 switch (OMP_CLAUSE_CODE (cl1))
4729 {
4730 case OMP_CLAUSE_ALIGNED:
4731 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4732 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4733 return false;
4734 break;
4735 case OMP_CLAUSE_LINEAR:
4736 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4737 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4738 return false;
4739 break;
4740 case OMP_CLAUSE_SIMDLEN:
4741 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4742 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4743 return false;
4744 default:
4745 break;
4746 }
4747 }
4748 return true;
4749 }
4750
4751 /* Compare two constructor-element-type constants. Return 1 if the lists
4752 are known to be equal; otherwise return 0. */
4753
4754 static bool
4755 simple_cst_list_equal (const_tree l1, const_tree l2)
4756 {
4757 while (l1 != NULL_TREE && l2 != NULL_TREE)
4758 {
4759 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4760 return false;
4761
4762 l1 = TREE_CHAIN (l1);
4763 l2 = TREE_CHAIN (l2);
4764 }
4765
4766 return l1 == l2;
4767 }
4768
4769 /* Compare two attributes for their value identity. Return true if the
4770 attribute values are known to be equal; otherwise return false.
4771 */
4772
4773 static bool
4774 attribute_value_equal (const_tree attr1, const_tree attr2)
4775 {
4776 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4777 return true;
4778
4779 if (TREE_VALUE (attr1) != NULL_TREE
4780 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4781 && TREE_VALUE (attr2) != NULL
4782 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4783 return (simple_cst_list_equal (TREE_VALUE (attr1),
4784 TREE_VALUE (attr2)) == 1);
4785
4786 if ((flag_openmp || flag_openmp_simd)
4787 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4788 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4789 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4790 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4791 TREE_VALUE (attr2));
4792
4793 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4794 }
4795
4796 /* Return 0 if the attributes for two types are incompatible, 1 if they
4797 are compatible, and 2 if they are nearly compatible (which causes a
4798 warning to be generated). */
4799 int
4800 comp_type_attributes (const_tree type1, const_tree type2)
4801 {
4802 const_tree a1 = TYPE_ATTRIBUTES (type1);
4803 const_tree a2 = TYPE_ATTRIBUTES (type2);
4804 const_tree a;
4805
4806 if (a1 == a2)
4807 return 1;
4808 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4809 {
4810 const struct attribute_spec *as;
4811 const_tree attr;
4812
4813 as = lookup_attribute_spec (get_attribute_name (a));
4814 if (!as || as->affects_type_identity == false)
4815 continue;
4816
4817 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4818 if (!attr || !attribute_value_equal (a, attr))
4819 break;
4820 }
4821 if (!a)
4822 {
4823 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4824 {
4825 const struct attribute_spec *as;
4826
4827 as = lookup_attribute_spec (get_attribute_name (a));
4828 if (!as || as->affects_type_identity == false)
4829 continue;
4830
4831 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4832 break;
4833 /* We don't need to compare trees again, as we did this
4834 already in first loop. */
4835 }
4836 /* All types - affecting identity - are equal, so
4837 there is no need to call target hook for comparison. */
4838 if (!a)
4839 return 1;
4840 }
4841 /* As some type combinations - like default calling-convention - might
4842 be compatible, we have to call the target hook to get the final result. */
4843 return targetm.comp_type_attributes (type1, type2);
4844 }
4845
4846 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4847 is ATTRIBUTE.
4848
4849 Record such modified types already made so we don't make duplicates. */
4850
4851 tree
4852 build_type_attribute_variant (tree ttype, tree attribute)
4853 {
4854 return build_type_attribute_qual_variant (ttype, attribute,
4855 TYPE_QUALS (ttype));
4856 }
4857
4858
4859 /* Reset the expression *EXPR_P, a size or position.
4860
4861 ??? We could reset all non-constant sizes or positions. But it's cheap
4862 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4863
4864 We need to reset self-referential sizes or positions because they cannot
4865 be gimplified and thus can contain a CALL_EXPR after the gimplification
4866 is finished, which will run afoul of LTO streaming. And they need to be
4867 reset to something essentially dummy but not constant, so as to preserve
4868 the properties of the object they are attached to. */
4869
4870 static inline void
4871 free_lang_data_in_one_sizepos (tree *expr_p)
4872 {
4873 tree expr = *expr_p;
4874 if (CONTAINS_PLACEHOLDER_P (expr))
4875 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4876 }
4877
4878
4879 /* Reset all the fields in a binfo node BINFO. We only keep
4880 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4881
4882 static void
4883 free_lang_data_in_binfo (tree binfo)
4884 {
4885 unsigned i;
4886 tree t;
4887
4888 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4889
4890 BINFO_VIRTUALS (binfo) = NULL_TREE;
4891 BINFO_BASE_ACCESSES (binfo) = NULL;
4892 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4893 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4894
4895 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4896 free_lang_data_in_binfo (t);
4897 }
4898
4899
4900 /* Reset all language specific information still present in TYPE. */
4901
4902 static void
4903 free_lang_data_in_type (tree type)
4904 {
4905 gcc_assert (TYPE_P (type));
4906
4907 /* Give the FE a chance to remove its own data first. */
4908 lang_hooks.free_lang_data (type);
4909
4910 TREE_LANG_FLAG_0 (type) = 0;
4911 TREE_LANG_FLAG_1 (type) = 0;
4912 TREE_LANG_FLAG_2 (type) = 0;
4913 TREE_LANG_FLAG_3 (type) = 0;
4914 TREE_LANG_FLAG_4 (type) = 0;
4915 TREE_LANG_FLAG_5 (type) = 0;
4916 TREE_LANG_FLAG_6 (type) = 0;
4917
4918 if (TREE_CODE (type) == FUNCTION_TYPE)
4919 {
4920 /* Remove the const and volatile qualifiers from arguments. The
4921 C++ front end removes them, but the C front end does not,
4922 leading to false ODR violation errors when merging two
4923 instances of the same function signature compiled by
4924 different front ends. */
4925 tree p;
4926
4927 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4928 {
4929 tree arg_type = TREE_VALUE (p);
4930
4931 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4932 {
4933 int quals = TYPE_QUALS (arg_type)
4934 & ~TYPE_QUAL_CONST
4935 & ~TYPE_QUAL_VOLATILE;
4936 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4937 free_lang_data_in_type (TREE_VALUE (p));
4938 }
4939 }
4940 }
4941
4942 /* Remove members that are not actually FIELD_DECLs from the field
4943 list of an aggregate. These occur in C++. */
4944 if (RECORD_OR_UNION_TYPE_P (type))
4945 {
4946 tree prev, member;
4947
4948 /* Note that TYPE_FIELDS can be shared across distinct
4949 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4950 to be removed, we cannot set its TREE_CHAIN to NULL.
4951 Otherwise, we would not be able to find all the other fields
4952 in the other instances of this TREE_TYPE.
4953
4954 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4955 prev = NULL_TREE;
4956 member = TYPE_FIELDS (type);
4957 while (member)
4958 {
4959 if (TREE_CODE (member) == FIELD_DECL
4960 || TREE_CODE (member) == TYPE_DECL)
4961 {
4962 if (prev)
4963 TREE_CHAIN (prev) = member;
4964 else
4965 TYPE_FIELDS (type) = member;
4966 prev = member;
4967 }
4968
4969 member = TREE_CHAIN (member);
4970 }
4971
4972 if (prev)
4973 TREE_CHAIN (prev) = NULL_TREE;
4974 else
4975 TYPE_FIELDS (type) = NULL_TREE;
4976
4977 TYPE_METHODS (type) = NULL_TREE;
4978 if (TYPE_BINFO (type))
4979 free_lang_data_in_binfo (TYPE_BINFO (type));
4980 }
4981 else
4982 {
4983 /* For non-aggregate types, clear out the language slot (which
4984 overloads TYPE_BINFO). */
4985 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4986
4987 if (INTEGRAL_TYPE_P (type)
4988 || SCALAR_FLOAT_TYPE_P (type)
4989 || FIXED_POINT_TYPE_P (type))
4990 {
4991 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4992 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4993 }
4994 }
4995
4996 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4997 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4998
4999 if (TYPE_CONTEXT (type)
5000 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5001 {
5002 tree ctx = TYPE_CONTEXT (type);
5003 do
5004 {
5005 ctx = BLOCK_SUPERCONTEXT (ctx);
5006 }
5007 while (ctx && TREE_CODE (ctx) == BLOCK);
5008 TYPE_CONTEXT (type) = ctx;
5009 }
5010 }
5011
5012
5013 /* Return true if DECL may need an assembler name to be set. */
5014
5015 static inline bool
5016 need_assembler_name_p (tree decl)
5017 {
5018 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule
5019 merging. */
5020 if (flag_lto_odr_type_mering
5021 && TREE_CODE (decl) == TYPE_DECL
5022 && DECL_NAME (decl)
5023 && decl == TYPE_NAME (TREE_TYPE (decl))
5024 && !is_lang_specific (TREE_TYPE (decl))
5025 && AGGREGATE_TYPE_P (TREE_TYPE (decl))
5026 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)
5027 && !type_in_anonymous_namespace_p (TREE_TYPE (decl)))
5028 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5029 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5030 if (TREE_CODE (decl) != FUNCTION_DECL
5031 && TREE_CODE (decl) != VAR_DECL)
5032 return false;
5033
5034 /* If DECL already has its assembler name set, it does not need a
5035 new one. */
5036 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5037 || DECL_ASSEMBLER_NAME_SET_P (decl))
5038 return false;
5039
5040 /* Abstract decls do not need an assembler name. */
5041 if (DECL_ABSTRACT_P (decl))
5042 return false;
5043
5044 /* For VAR_DECLs, only static, public and external symbols need an
5045 assembler name. */
5046 if (TREE_CODE (decl) == VAR_DECL
5047 && !TREE_STATIC (decl)
5048 && !TREE_PUBLIC (decl)
5049 && !DECL_EXTERNAL (decl))
5050 return false;
5051
5052 if (TREE_CODE (decl) == FUNCTION_DECL)
5053 {
5054 /* Do not set assembler name on builtins. Allow RTL expansion to
5055 decide whether to expand inline or via a regular call. */
5056 if (DECL_BUILT_IN (decl)
5057 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5058 return false;
5059
5060 /* Functions represented in the callgraph need an assembler name. */
5061 if (cgraph_node::get (decl) != NULL)
5062 return true;
5063
5064 /* Unused and not public functions don't need an assembler name. */
5065 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5066 return false;
5067 }
5068
5069 return true;
5070 }
5071
5072
5073 /* Reset all language specific information still present in symbol
5074 DECL. */
5075
5076 static void
5077 free_lang_data_in_decl (tree decl)
5078 {
5079 gcc_assert (DECL_P (decl));
5080
5081 /* Give the FE a chance to remove its own data first. */
5082 lang_hooks.free_lang_data (decl);
5083
5084 TREE_LANG_FLAG_0 (decl) = 0;
5085 TREE_LANG_FLAG_1 (decl) = 0;
5086 TREE_LANG_FLAG_2 (decl) = 0;
5087 TREE_LANG_FLAG_3 (decl) = 0;
5088 TREE_LANG_FLAG_4 (decl) = 0;
5089 TREE_LANG_FLAG_5 (decl) = 0;
5090 TREE_LANG_FLAG_6 (decl) = 0;
5091
5092 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5093 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5094 if (TREE_CODE (decl) == FIELD_DECL)
5095 {
5096 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5097 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5098 DECL_QUALIFIER (decl) = NULL_TREE;
5099 }
5100
5101 if (TREE_CODE (decl) == FUNCTION_DECL)
5102 {
5103 struct cgraph_node *node;
5104 if (!(node = cgraph_node::get (decl))
5105 || (!node->definition && !node->clones))
5106 {
5107 if (node)
5108 node->release_body ();
5109 else
5110 {
5111 release_function_body (decl);
5112 DECL_ARGUMENTS (decl) = NULL;
5113 DECL_RESULT (decl) = NULL;
5114 DECL_INITIAL (decl) = error_mark_node;
5115 }
5116 }
5117 if (gimple_has_body_p (decl))
5118 {
5119 tree t;
5120
5121 /* If DECL has a gimple body, then the context for its
5122 arguments must be DECL. Otherwise, it doesn't really
5123 matter, as we will not be emitting any code for DECL. In
5124 general, there may be other instances of DECL created by
5125 the front end and since PARM_DECLs are generally shared,
5126 their DECL_CONTEXT changes as the replicas of DECL are
5127 created. The only time where DECL_CONTEXT is important
5128 is for the FUNCTION_DECLs that have a gimple body (since
5129 the PARM_DECL will be used in the function's body). */
5130 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5131 DECL_CONTEXT (t) = decl;
5132 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5133 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5134 = target_option_default_node;
5135 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5136 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5137 = optimization_default_node;
5138 }
5139
5140 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5141 At this point, it is not needed anymore. */
5142 DECL_SAVED_TREE (decl) = NULL_TREE;
5143
5144 /* Clear the abstract origin if it refers to a method. Otherwise
5145 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5146 origin will not be output correctly. */
5147 if (DECL_ABSTRACT_ORIGIN (decl)
5148 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5149 && RECORD_OR_UNION_TYPE_P
5150 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5151 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5152
5153 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5154 DECL_VINDEX referring to itself into a vtable slot number as it
5155 should. Happens with functions that are copied and then forgotten
5156 about. Just clear it, it won't matter anymore. */
5157 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5158 DECL_VINDEX (decl) = NULL_TREE;
5159 }
5160 else if (TREE_CODE (decl) == VAR_DECL)
5161 {
5162 if ((DECL_EXTERNAL (decl)
5163 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5164 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5165 DECL_INITIAL (decl) = NULL_TREE;
5166 }
5167 else if (TREE_CODE (decl) == TYPE_DECL
5168 || TREE_CODE (decl) == FIELD_DECL)
5169 DECL_INITIAL (decl) = NULL_TREE;
5170 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5171 && DECL_INITIAL (decl)
5172 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5173 {
5174 /* Strip builtins from the translation-unit BLOCK. We still have targets
5175 without builtin_decl_explicit support and also builtins are shared
5176 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5177 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5178 while (*nextp)
5179 {
5180 tree var = *nextp;
5181 if (TREE_CODE (var) == FUNCTION_DECL
5182 && DECL_BUILT_IN (var))
5183 *nextp = TREE_CHAIN (var);
5184 else
5185 nextp = &TREE_CHAIN (var);
5186 }
5187 }
5188 }
5189
5190
5191 /* Data used when collecting DECLs and TYPEs for language data removal. */
5192
5193 struct free_lang_data_d
5194 {
5195 /* Worklist to avoid excessive recursion. */
5196 vec<tree> worklist;
5197
5198 /* Set of traversed objects. Used to avoid duplicate visits. */
5199 hash_set<tree> *pset;
5200
5201 /* Array of symbols to process with free_lang_data_in_decl. */
5202 vec<tree> decls;
5203
5204 /* Array of types to process with free_lang_data_in_type. */
5205 vec<tree> types;
5206 };
5207
5208
5209 /* Save all language fields needed to generate proper debug information
5210 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5211
5212 static void
5213 save_debug_info_for_decl (tree t)
5214 {
5215 /*struct saved_debug_info_d *sdi;*/
5216
5217 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5218
5219 /* FIXME. Partial implementation for saving debug info removed. */
5220 }
5221
5222
5223 /* Save all language fields needed to generate proper debug information
5224 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5225
5226 static void
5227 save_debug_info_for_type (tree t)
5228 {
5229 /*struct saved_debug_info_d *sdi;*/
5230
5231 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5232
5233 /* FIXME. Partial implementation for saving debug info removed. */
5234 }
5235
5236
5237 /* Add type or decl T to one of the list of tree nodes that need their
5238 language data removed. The lists are held inside FLD. */
5239
5240 static void
5241 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5242 {
5243 if (DECL_P (t))
5244 {
5245 fld->decls.safe_push (t);
5246 if (debug_info_level > DINFO_LEVEL_TERSE)
5247 save_debug_info_for_decl (t);
5248 }
5249 else if (TYPE_P (t))
5250 {
5251 fld->types.safe_push (t);
5252 if (debug_info_level > DINFO_LEVEL_TERSE)
5253 save_debug_info_for_type (t);
5254 }
5255 else
5256 gcc_unreachable ();
5257 }
5258
5259 /* Push tree node T into FLD->WORKLIST. */
5260
5261 static inline void
5262 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5263 {
5264 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5265 fld->worklist.safe_push ((t));
5266 }
5267
5268
5269 /* Operand callback helper for free_lang_data_in_node. *TP is the
5270 subtree operand being considered. */
5271
5272 static tree
5273 find_decls_types_r (tree *tp, int *ws, void *data)
5274 {
5275 tree t = *tp;
5276 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5277
5278 if (TREE_CODE (t) == TREE_LIST)
5279 return NULL_TREE;
5280
5281 /* Language specific nodes will be removed, so there is no need
5282 to gather anything under them. */
5283 if (is_lang_specific (t))
5284 {
5285 *ws = 0;
5286 return NULL_TREE;
5287 }
5288
5289 if (DECL_P (t))
5290 {
5291 /* Note that walk_tree does not traverse every possible field in
5292 decls, so we have to do our own traversals here. */
5293 add_tree_to_fld_list (t, fld);
5294
5295 fld_worklist_push (DECL_NAME (t), fld);
5296 fld_worklist_push (DECL_CONTEXT (t), fld);
5297 fld_worklist_push (DECL_SIZE (t), fld);
5298 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5299
5300 /* We are going to remove everything under DECL_INITIAL for
5301 TYPE_DECLs. No point walking them. */
5302 if (TREE_CODE (t) != TYPE_DECL)
5303 fld_worklist_push (DECL_INITIAL (t), fld);
5304
5305 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5306 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5307
5308 if (TREE_CODE (t) == FUNCTION_DECL)
5309 {
5310 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5311 fld_worklist_push (DECL_RESULT (t), fld);
5312 }
5313 else if (TREE_CODE (t) == TYPE_DECL)
5314 {
5315 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5316 }
5317 else if (TREE_CODE (t) == FIELD_DECL)
5318 {
5319 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5320 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5321 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5322 fld_worklist_push (DECL_FCONTEXT (t), fld);
5323 }
5324
5325 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5326 && DECL_HAS_VALUE_EXPR_P (t))
5327 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5328
5329 if (TREE_CODE (t) != FIELD_DECL
5330 && TREE_CODE (t) != TYPE_DECL)
5331 fld_worklist_push (TREE_CHAIN (t), fld);
5332 *ws = 0;
5333 }
5334 else if (TYPE_P (t))
5335 {
5336 /* Note that walk_tree does not traverse every possible field in
5337 types, so we have to do our own traversals here. */
5338 add_tree_to_fld_list (t, fld);
5339
5340 if (!RECORD_OR_UNION_TYPE_P (t))
5341 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5342 fld_worklist_push (TYPE_SIZE (t), fld);
5343 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5344 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5345 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5346 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5347 fld_worklist_push (TYPE_NAME (t), fld);
5348 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5349 them and thus do not and want not to reach unused pointer types
5350 this way. */
5351 if (!POINTER_TYPE_P (t))
5352 fld_worklist_push (TYPE_MINVAL (t), fld);
5353 if (!RECORD_OR_UNION_TYPE_P (t))
5354 fld_worklist_push (TYPE_MAXVAL (t), fld);
5355 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5356 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5357 do not and want not to reach unused variants this way. */
5358 if (TYPE_CONTEXT (t))
5359 {
5360 tree ctx = TYPE_CONTEXT (t);
5361 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5362 So push that instead. */
5363 while (ctx && TREE_CODE (ctx) == BLOCK)
5364 ctx = BLOCK_SUPERCONTEXT (ctx);
5365 fld_worklist_push (ctx, fld);
5366 }
5367 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5368 and want not to reach unused types this way. */
5369
5370 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5371 {
5372 unsigned i;
5373 tree tem;
5374 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5375 fld_worklist_push (TREE_TYPE (tem), fld);
5376 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5377 if (tem
5378 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5379 && TREE_CODE (tem) == TREE_LIST)
5380 do
5381 {
5382 fld_worklist_push (TREE_VALUE (tem), fld);
5383 tem = TREE_CHAIN (tem);
5384 }
5385 while (tem);
5386 }
5387 if (RECORD_OR_UNION_TYPE_P (t))
5388 {
5389 tree tem;
5390 /* Push all TYPE_FIELDS - there can be interleaving interesting
5391 and non-interesting things. */
5392 tem = TYPE_FIELDS (t);
5393 while (tem)
5394 {
5395 if (TREE_CODE (tem) == FIELD_DECL
5396 || TREE_CODE (tem) == TYPE_DECL)
5397 fld_worklist_push (tem, fld);
5398 tem = TREE_CHAIN (tem);
5399 }
5400 }
5401
5402 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5403 *ws = 0;
5404 }
5405 else if (TREE_CODE (t) == BLOCK)
5406 {
5407 tree tem;
5408 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5409 fld_worklist_push (tem, fld);
5410 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5411 fld_worklist_push (tem, fld);
5412 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5413 }
5414
5415 if (TREE_CODE (t) != IDENTIFIER_NODE
5416 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5417 fld_worklist_push (TREE_TYPE (t), fld);
5418
5419 return NULL_TREE;
5420 }
5421
5422
5423 /* Find decls and types in T. */
5424
5425 static void
5426 find_decls_types (tree t, struct free_lang_data_d *fld)
5427 {
5428 while (1)
5429 {
5430 if (!fld->pset->contains (t))
5431 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5432 if (fld->worklist.is_empty ())
5433 break;
5434 t = fld->worklist.pop ();
5435 }
5436 }
5437
5438 /* Translate all the types in LIST with the corresponding runtime
5439 types. */
5440
5441 static tree
5442 get_eh_types_for_runtime (tree list)
5443 {
5444 tree head, prev;
5445
5446 if (list == NULL_TREE)
5447 return NULL_TREE;
5448
5449 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5450 prev = head;
5451 list = TREE_CHAIN (list);
5452 while (list)
5453 {
5454 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5455 TREE_CHAIN (prev) = n;
5456 prev = TREE_CHAIN (prev);
5457 list = TREE_CHAIN (list);
5458 }
5459
5460 return head;
5461 }
5462
5463
5464 /* Find decls and types referenced in EH region R and store them in
5465 FLD->DECLS and FLD->TYPES. */
5466
5467 static void
5468 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5469 {
5470 switch (r->type)
5471 {
5472 case ERT_CLEANUP:
5473 break;
5474
5475 case ERT_TRY:
5476 {
5477 eh_catch c;
5478
5479 /* The types referenced in each catch must first be changed to the
5480 EH types used at runtime. This removes references to FE types
5481 in the region. */
5482 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5483 {
5484 c->type_list = get_eh_types_for_runtime (c->type_list);
5485 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5486 }
5487 }
5488 break;
5489
5490 case ERT_ALLOWED_EXCEPTIONS:
5491 r->u.allowed.type_list
5492 = get_eh_types_for_runtime (r->u.allowed.type_list);
5493 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5494 break;
5495
5496 case ERT_MUST_NOT_THROW:
5497 walk_tree (&r->u.must_not_throw.failure_decl,
5498 find_decls_types_r, fld, fld->pset);
5499 break;
5500 }
5501 }
5502
5503
5504 /* Find decls and types referenced in cgraph node N and store them in
5505 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5506 look for *every* kind of DECL and TYPE node reachable from N,
5507 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5508 NAMESPACE_DECLs, etc). */
5509
5510 static void
5511 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5512 {
5513 basic_block bb;
5514 struct function *fn;
5515 unsigned ix;
5516 tree t;
5517
5518 find_decls_types (n->decl, fld);
5519
5520 if (!gimple_has_body_p (n->decl))
5521 return;
5522
5523 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5524
5525 fn = DECL_STRUCT_FUNCTION (n->decl);
5526
5527 /* Traverse locals. */
5528 FOR_EACH_LOCAL_DECL (fn, ix, t)
5529 find_decls_types (t, fld);
5530
5531 /* Traverse EH regions in FN. */
5532 {
5533 eh_region r;
5534 FOR_ALL_EH_REGION_FN (r, fn)
5535 find_decls_types_in_eh_region (r, fld);
5536 }
5537
5538 /* Traverse every statement in FN. */
5539 FOR_EACH_BB_FN (bb, fn)
5540 {
5541 gimple_stmt_iterator si;
5542 unsigned i;
5543
5544 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5545 {
5546 gimple phi = gsi_stmt (si);
5547
5548 for (i = 0; i < gimple_phi_num_args (phi); i++)
5549 {
5550 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5551 find_decls_types (*arg_p, fld);
5552 }
5553 }
5554
5555 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5556 {
5557 gimple stmt = gsi_stmt (si);
5558
5559 if (is_gimple_call (stmt))
5560 find_decls_types (gimple_call_fntype (stmt), fld);
5561
5562 for (i = 0; i < gimple_num_ops (stmt); i++)
5563 {
5564 tree arg = gimple_op (stmt, i);
5565 find_decls_types (arg, fld);
5566 }
5567 }
5568 }
5569 }
5570
5571
5572 /* Find decls and types referenced in varpool node N and store them in
5573 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5574 look for *every* kind of DECL and TYPE node reachable from N,
5575 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5576 NAMESPACE_DECLs, etc). */
5577
5578 static void
5579 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5580 {
5581 find_decls_types (v->decl, fld);
5582 }
5583
5584 /* If T needs an assembler name, have one created for it. */
5585
5586 void
5587 assign_assembler_name_if_neeeded (tree t)
5588 {
5589 if (need_assembler_name_p (t))
5590 {
5591 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5592 diagnostics that use input_location to show locus
5593 information. The problem here is that, at this point,
5594 input_location is generally anchored to the end of the file
5595 (since the parser is long gone), so we don't have a good
5596 position to pin it to.
5597
5598 To alleviate this problem, this uses the location of T's
5599 declaration. Examples of this are
5600 testsuite/g++.dg/template/cond2.C and
5601 testsuite/g++.dg/template/pr35240.C. */
5602 location_t saved_location = input_location;
5603 input_location = DECL_SOURCE_LOCATION (t);
5604
5605 decl_assembler_name (t);
5606
5607 input_location = saved_location;
5608 }
5609 }
5610
5611
5612 /* Free language specific information for every operand and expression
5613 in every node of the call graph. This process operates in three stages:
5614
5615 1- Every callgraph node and varpool node is traversed looking for
5616 decls and types embedded in them. This is a more exhaustive
5617 search than that done by find_referenced_vars, because it will
5618 also collect individual fields, decls embedded in types, etc.
5619
5620 2- All the decls found are sent to free_lang_data_in_decl.
5621
5622 3- All the types found are sent to free_lang_data_in_type.
5623
5624 The ordering between decls and types is important because
5625 free_lang_data_in_decl sets assembler names, which includes
5626 mangling. So types cannot be freed up until assembler names have
5627 been set up. */
5628
5629 static void
5630 free_lang_data_in_cgraph (void)
5631 {
5632 struct cgraph_node *n;
5633 varpool_node *v;
5634 struct free_lang_data_d fld;
5635 tree t;
5636 unsigned i;
5637 alias_pair *p;
5638
5639 /* Initialize sets and arrays to store referenced decls and types. */
5640 fld.pset = new hash_set<tree>;
5641 fld.worklist.create (0);
5642 fld.decls.create (100);
5643 fld.types.create (100);
5644
5645 /* Find decls and types in the body of every function in the callgraph. */
5646 FOR_EACH_FUNCTION (n)
5647 find_decls_types_in_node (n, &fld);
5648
5649 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5650 find_decls_types (p->decl, &fld);
5651
5652 /* Find decls and types in every varpool symbol. */
5653 FOR_EACH_VARIABLE (v)
5654 find_decls_types_in_var (v, &fld);
5655
5656 /* Set the assembler name on every decl found. We need to do this
5657 now because free_lang_data_in_decl will invalidate data needed
5658 for mangling. This breaks mangling on interdependent decls. */
5659 FOR_EACH_VEC_ELT (fld.decls, i, t)
5660 assign_assembler_name_if_neeeded (t);
5661
5662 /* Traverse every decl found freeing its language data. */
5663 FOR_EACH_VEC_ELT (fld.decls, i, t)
5664 free_lang_data_in_decl (t);
5665
5666 /* Traverse every type found freeing its language data. */
5667 FOR_EACH_VEC_ELT (fld.types, i, t)
5668 free_lang_data_in_type (t);
5669
5670 delete fld.pset;
5671 fld.worklist.release ();
5672 fld.decls.release ();
5673 fld.types.release ();
5674 }
5675
5676
5677 /* Free resources that are used by FE but are not needed once they are done. */
5678
5679 static unsigned
5680 free_lang_data (void)
5681 {
5682 unsigned i;
5683
5684 /* If we are the LTO frontend we have freed lang-specific data already. */
5685 if (in_lto_p
5686 || !flag_generate_lto)
5687 return 0;
5688
5689 /* Allocate and assign alias sets to the standard integer types
5690 while the slots are still in the way the frontends generated them. */
5691 for (i = 0; i < itk_none; ++i)
5692 if (integer_types[i])
5693 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5694
5695 /* Traverse the IL resetting language specific information for
5696 operands, expressions, etc. */
5697 free_lang_data_in_cgraph ();
5698
5699 /* Create gimple variants for common types. */
5700 ptrdiff_type_node = integer_type_node;
5701 fileptr_type_node = ptr_type_node;
5702
5703 /* Reset some langhooks. Do not reset types_compatible_p, it may
5704 still be used indirectly via the get_alias_set langhook. */
5705 lang_hooks.dwarf_name = lhd_dwarf_name;
5706 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5707 /* We do not want the default decl_assembler_name implementation,
5708 rather if we have fixed everything we want a wrapper around it
5709 asserting that all non-local symbols already got their assembler
5710 name and only produce assembler names for local symbols. Or rather
5711 make sure we never call decl_assembler_name on local symbols and
5712 devise a separate, middle-end private scheme for it. */
5713
5714 /* Reset diagnostic machinery. */
5715 tree_diagnostics_defaults (global_dc);
5716
5717 return 0;
5718 }
5719
5720
5721 namespace {
5722
5723 const pass_data pass_data_ipa_free_lang_data =
5724 {
5725 SIMPLE_IPA_PASS, /* type */
5726 "*free_lang_data", /* name */
5727 OPTGROUP_NONE, /* optinfo_flags */
5728 TV_IPA_FREE_LANG_DATA, /* tv_id */
5729 0, /* properties_required */
5730 0, /* properties_provided */
5731 0, /* properties_destroyed */
5732 0, /* todo_flags_start */
5733 0, /* todo_flags_finish */
5734 };
5735
5736 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5737 {
5738 public:
5739 pass_ipa_free_lang_data (gcc::context *ctxt)
5740 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5741 {}
5742
5743 /* opt_pass methods: */
5744 virtual unsigned int execute (function *) { return free_lang_data (); }
5745
5746 }; // class pass_ipa_free_lang_data
5747
5748 } // anon namespace
5749
5750 simple_ipa_opt_pass *
5751 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5752 {
5753 return new pass_ipa_free_lang_data (ctxt);
5754 }
5755
5756 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5757 ATTR_NAME. Also used internally by remove_attribute(). */
5758 bool
5759 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5760 {
5761 size_t ident_len = IDENTIFIER_LENGTH (ident);
5762
5763 if (ident_len == attr_len)
5764 {
5765 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5766 return true;
5767 }
5768 else if (ident_len == attr_len + 4)
5769 {
5770 /* There is the possibility that ATTR is 'text' and IDENT is
5771 '__text__'. */
5772 const char *p = IDENTIFIER_POINTER (ident);
5773 if (p[0] == '_' && p[1] == '_'
5774 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5775 && strncmp (attr_name, p + 2, attr_len) == 0)
5776 return true;
5777 }
5778
5779 return false;
5780 }
5781
5782 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5783 of ATTR_NAME, and LIST is not NULL_TREE. */
5784 tree
5785 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5786 {
5787 while (list)
5788 {
5789 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5790
5791 if (ident_len == attr_len)
5792 {
5793 if (!strcmp (attr_name,
5794 IDENTIFIER_POINTER (get_attribute_name (list))))
5795 break;
5796 }
5797 /* TODO: If we made sure that attributes were stored in the
5798 canonical form without '__...__' (ie, as in 'text' as opposed
5799 to '__text__') then we could avoid the following case. */
5800 else if (ident_len == attr_len + 4)
5801 {
5802 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5803 if (p[0] == '_' && p[1] == '_'
5804 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5805 && strncmp (attr_name, p + 2, attr_len) == 0)
5806 break;
5807 }
5808 list = TREE_CHAIN (list);
5809 }
5810
5811 return list;
5812 }
5813
5814 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5815 return a pointer to the attribute's list first element if the attribute
5816 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5817 '__text__'). */
5818
5819 tree
5820 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5821 tree list)
5822 {
5823 while (list)
5824 {
5825 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5826
5827 if (attr_len > ident_len)
5828 {
5829 list = TREE_CHAIN (list);
5830 continue;
5831 }
5832
5833 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5834
5835 if (strncmp (attr_name, p, attr_len) == 0)
5836 break;
5837
5838 /* TODO: If we made sure that attributes were stored in the
5839 canonical form without '__...__' (ie, as in 'text' as opposed
5840 to '__text__') then we could avoid the following case. */
5841 if (p[0] == '_' && p[1] == '_' &&
5842 strncmp (attr_name, p + 2, attr_len) == 0)
5843 break;
5844
5845 list = TREE_CHAIN (list);
5846 }
5847
5848 return list;
5849 }
5850
5851
5852 /* A variant of lookup_attribute() that can be used with an identifier
5853 as the first argument, and where the identifier can be either
5854 'text' or '__text__'.
5855
5856 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5857 return a pointer to the attribute's list element if the attribute
5858 is part of the list, or NULL_TREE if not found. If the attribute
5859 appears more than once, this only returns the first occurrence; the
5860 TREE_CHAIN of the return value should be passed back in if further
5861 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5862 can be in the form 'text' or '__text__'. */
5863 static tree
5864 lookup_ident_attribute (tree attr_identifier, tree list)
5865 {
5866 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5867
5868 while (list)
5869 {
5870 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5871 == IDENTIFIER_NODE);
5872
5873 /* Identifiers can be compared directly for equality. */
5874 if (attr_identifier == get_attribute_name (list))
5875 break;
5876
5877 /* If they are not equal, they may still be one in the form
5878 'text' while the other one is in the form '__text__'. TODO:
5879 If we were storing attributes in normalized 'text' form, then
5880 this could all go away and we could take full advantage of
5881 the fact that we're comparing identifiers. :-) */
5882 {
5883 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5884 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5885
5886 if (ident_len == attr_len + 4)
5887 {
5888 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5889 const char *q = IDENTIFIER_POINTER (attr_identifier);
5890 if (p[0] == '_' && p[1] == '_'
5891 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5892 && strncmp (q, p + 2, attr_len) == 0)
5893 break;
5894 }
5895 else if (ident_len + 4 == attr_len)
5896 {
5897 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5898 const char *q = IDENTIFIER_POINTER (attr_identifier);
5899 if (q[0] == '_' && q[1] == '_'
5900 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5901 && strncmp (q + 2, p, ident_len) == 0)
5902 break;
5903 }
5904 }
5905 list = TREE_CHAIN (list);
5906 }
5907
5908 return list;
5909 }
5910
5911 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5912 modified list. */
5913
5914 tree
5915 remove_attribute (const char *attr_name, tree list)
5916 {
5917 tree *p;
5918 size_t attr_len = strlen (attr_name);
5919
5920 gcc_checking_assert (attr_name[0] != '_');
5921
5922 for (p = &list; *p; )
5923 {
5924 tree l = *p;
5925 /* TODO: If we were storing attributes in normalized form, here
5926 we could use a simple strcmp(). */
5927 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5928 *p = TREE_CHAIN (l);
5929 else
5930 p = &TREE_CHAIN (l);
5931 }
5932
5933 return list;
5934 }
5935
5936 /* Return an attribute list that is the union of a1 and a2. */
5937
5938 tree
5939 merge_attributes (tree a1, tree a2)
5940 {
5941 tree attributes;
5942
5943 /* Either one unset? Take the set one. */
5944
5945 if ((attributes = a1) == 0)
5946 attributes = a2;
5947
5948 /* One that completely contains the other? Take it. */
5949
5950 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5951 {
5952 if (attribute_list_contained (a2, a1))
5953 attributes = a2;
5954 else
5955 {
5956 /* Pick the longest list, and hang on the other list. */
5957
5958 if (list_length (a1) < list_length (a2))
5959 attributes = a2, a2 = a1;
5960
5961 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5962 {
5963 tree a;
5964 for (a = lookup_ident_attribute (get_attribute_name (a2),
5965 attributes);
5966 a != NULL_TREE && !attribute_value_equal (a, a2);
5967 a = lookup_ident_attribute (get_attribute_name (a2),
5968 TREE_CHAIN (a)))
5969 ;
5970 if (a == NULL_TREE)
5971 {
5972 a1 = copy_node (a2);
5973 TREE_CHAIN (a1) = attributes;
5974 attributes = a1;
5975 }
5976 }
5977 }
5978 }
5979 return attributes;
5980 }
5981
5982 /* Given types T1 and T2, merge their attributes and return
5983 the result. */
5984
5985 tree
5986 merge_type_attributes (tree t1, tree t2)
5987 {
5988 return merge_attributes (TYPE_ATTRIBUTES (t1),
5989 TYPE_ATTRIBUTES (t2));
5990 }
5991
5992 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5993 the result. */
5994
5995 tree
5996 merge_decl_attributes (tree olddecl, tree newdecl)
5997 {
5998 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5999 DECL_ATTRIBUTES (newdecl));
6000 }
6001
6002 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6003
6004 /* Specialization of merge_decl_attributes for various Windows targets.
6005
6006 This handles the following situation:
6007
6008 __declspec (dllimport) int foo;
6009 int foo;
6010
6011 The second instance of `foo' nullifies the dllimport. */
6012
6013 tree
6014 merge_dllimport_decl_attributes (tree old, tree new_tree)
6015 {
6016 tree a;
6017 int delete_dllimport_p = 1;
6018
6019 /* What we need to do here is remove from `old' dllimport if it doesn't
6020 appear in `new'. dllimport behaves like extern: if a declaration is
6021 marked dllimport and a definition appears later, then the object
6022 is not dllimport'd. We also remove a `new' dllimport if the old list
6023 contains dllexport: dllexport always overrides dllimport, regardless
6024 of the order of declaration. */
6025 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6026 delete_dllimport_p = 0;
6027 else if (DECL_DLLIMPORT_P (new_tree)
6028 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6029 {
6030 DECL_DLLIMPORT_P (new_tree) = 0;
6031 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6032 "dllimport ignored", new_tree);
6033 }
6034 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6035 {
6036 /* Warn about overriding a symbol that has already been used, e.g.:
6037 extern int __attribute__ ((dllimport)) foo;
6038 int* bar () {return &foo;}
6039 int foo;
6040 */
6041 if (TREE_USED (old))
6042 {
6043 warning (0, "%q+D redeclared without dllimport attribute "
6044 "after being referenced with dll linkage", new_tree);
6045 /* If we have used a variable's address with dllimport linkage,
6046 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6047 decl may already have had TREE_CONSTANT computed.
6048 We still remove the attribute so that assembler code refers
6049 to '&foo rather than '_imp__foo'. */
6050 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6051 DECL_DLLIMPORT_P (new_tree) = 1;
6052 }
6053
6054 /* Let an inline definition silently override the external reference,
6055 but otherwise warn about attribute inconsistency. */
6056 else if (TREE_CODE (new_tree) == VAR_DECL
6057 || !DECL_DECLARED_INLINE_P (new_tree))
6058 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6059 "previous dllimport ignored", new_tree);
6060 }
6061 else
6062 delete_dllimport_p = 0;
6063
6064 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6065
6066 if (delete_dllimport_p)
6067 a = remove_attribute ("dllimport", a);
6068
6069 return a;
6070 }
6071
6072 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6073 struct attribute_spec.handler. */
6074
6075 tree
6076 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6077 bool *no_add_attrs)
6078 {
6079 tree node = *pnode;
6080 bool is_dllimport;
6081
6082 /* These attributes may apply to structure and union types being created,
6083 but otherwise should pass to the declaration involved. */
6084 if (!DECL_P (node))
6085 {
6086 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6087 | (int) ATTR_FLAG_ARRAY_NEXT))
6088 {
6089 *no_add_attrs = true;
6090 return tree_cons (name, args, NULL_TREE);
6091 }
6092 if (TREE_CODE (node) == RECORD_TYPE
6093 || TREE_CODE (node) == UNION_TYPE)
6094 {
6095 node = TYPE_NAME (node);
6096 if (!node)
6097 return NULL_TREE;
6098 }
6099 else
6100 {
6101 warning (OPT_Wattributes, "%qE attribute ignored",
6102 name);
6103 *no_add_attrs = true;
6104 return NULL_TREE;
6105 }
6106 }
6107
6108 if (TREE_CODE (node) != FUNCTION_DECL
6109 && TREE_CODE (node) != VAR_DECL
6110 && TREE_CODE (node) != TYPE_DECL)
6111 {
6112 *no_add_attrs = true;
6113 warning (OPT_Wattributes, "%qE attribute ignored",
6114 name);
6115 return NULL_TREE;
6116 }
6117
6118 if (TREE_CODE (node) == TYPE_DECL
6119 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6120 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6121 {
6122 *no_add_attrs = true;
6123 warning (OPT_Wattributes, "%qE attribute ignored",
6124 name);
6125 return NULL_TREE;
6126 }
6127
6128 is_dllimport = is_attribute_p ("dllimport", name);
6129
6130 /* Report error on dllimport ambiguities seen now before they cause
6131 any damage. */
6132 if (is_dllimport)
6133 {
6134 /* Honor any target-specific overrides. */
6135 if (!targetm.valid_dllimport_attribute_p (node))
6136 *no_add_attrs = true;
6137
6138 else if (TREE_CODE (node) == FUNCTION_DECL
6139 && DECL_DECLARED_INLINE_P (node))
6140 {
6141 warning (OPT_Wattributes, "inline function %q+D declared as "
6142 " dllimport: attribute ignored", node);
6143 *no_add_attrs = true;
6144 }
6145 /* Like MS, treat definition of dllimported variables and
6146 non-inlined functions on declaration as syntax errors. */
6147 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6148 {
6149 error ("function %q+D definition is marked dllimport", node);
6150 *no_add_attrs = true;
6151 }
6152
6153 else if (TREE_CODE (node) == VAR_DECL)
6154 {
6155 if (DECL_INITIAL (node))
6156 {
6157 error ("variable %q+D definition is marked dllimport",
6158 node);
6159 *no_add_attrs = true;
6160 }
6161
6162 /* `extern' needn't be specified with dllimport.
6163 Specify `extern' now and hope for the best. Sigh. */
6164 DECL_EXTERNAL (node) = 1;
6165 /* Also, implicitly give dllimport'd variables declared within
6166 a function global scope, unless declared static. */
6167 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6168 TREE_PUBLIC (node) = 1;
6169 }
6170
6171 if (*no_add_attrs == false)
6172 DECL_DLLIMPORT_P (node) = 1;
6173 }
6174 else if (TREE_CODE (node) == FUNCTION_DECL
6175 && DECL_DECLARED_INLINE_P (node)
6176 && flag_keep_inline_dllexport)
6177 /* An exported function, even if inline, must be emitted. */
6178 DECL_EXTERNAL (node) = 0;
6179
6180 /* Report error if symbol is not accessible at global scope. */
6181 if (!TREE_PUBLIC (node)
6182 && (TREE_CODE (node) == VAR_DECL
6183 || TREE_CODE (node) == FUNCTION_DECL))
6184 {
6185 error ("external linkage required for symbol %q+D because of "
6186 "%qE attribute", node, name);
6187 *no_add_attrs = true;
6188 }
6189
6190 /* A dllexport'd entity must have default visibility so that other
6191 program units (shared libraries or the main executable) can see
6192 it. A dllimport'd entity must have default visibility so that
6193 the linker knows that undefined references within this program
6194 unit can be resolved by the dynamic linker. */
6195 if (!*no_add_attrs)
6196 {
6197 if (DECL_VISIBILITY_SPECIFIED (node)
6198 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6199 error ("%qE implies default visibility, but %qD has already "
6200 "been declared with a different visibility",
6201 name, node);
6202 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6203 DECL_VISIBILITY_SPECIFIED (node) = 1;
6204 }
6205
6206 return NULL_TREE;
6207 }
6208
6209 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6210 \f
6211 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6212 of the various TYPE_QUAL values. */
6213
6214 static void
6215 set_type_quals (tree type, int type_quals)
6216 {
6217 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6218 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6219 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6220 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6221 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6222 }
6223
6224 /* Returns true iff unqualified CAND and BASE are equivalent. */
6225
6226 bool
6227 check_base_type (const_tree cand, const_tree base)
6228 {
6229 return (TYPE_NAME (cand) == TYPE_NAME (base)
6230 /* Apparently this is needed for Objective-C. */
6231 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6232 /* Check alignment. */
6233 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6234 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6235 TYPE_ATTRIBUTES (base)));
6236 }
6237
6238 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6239
6240 bool
6241 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6242 {
6243 return (TYPE_QUALS (cand) == type_quals
6244 && check_base_type (cand, base));
6245 }
6246
6247 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6248
6249 static bool
6250 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6251 {
6252 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6253 && TYPE_NAME (cand) == TYPE_NAME (base)
6254 /* Apparently this is needed for Objective-C. */
6255 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6256 /* Check alignment. */
6257 && TYPE_ALIGN (cand) == align
6258 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6259 TYPE_ATTRIBUTES (base)));
6260 }
6261
6262 /* This function checks to see if TYPE matches the size one of the built-in
6263 atomic types, and returns that core atomic type. */
6264
6265 static tree
6266 find_atomic_core_type (tree type)
6267 {
6268 tree base_atomic_type;
6269
6270 /* Only handle complete types. */
6271 if (TYPE_SIZE (type) == NULL_TREE)
6272 return NULL_TREE;
6273
6274 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6275 switch (type_size)
6276 {
6277 case 8:
6278 base_atomic_type = atomicQI_type_node;
6279 break;
6280
6281 case 16:
6282 base_atomic_type = atomicHI_type_node;
6283 break;
6284
6285 case 32:
6286 base_atomic_type = atomicSI_type_node;
6287 break;
6288
6289 case 64:
6290 base_atomic_type = atomicDI_type_node;
6291 break;
6292
6293 case 128:
6294 base_atomic_type = atomicTI_type_node;
6295 break;
6296
6297 default:
6298 base_atomic_type = NULL_TREE;
6299 }
6300
6301 return base_atomic_type;
6302 }
6303
6304 /* Return a version of the TYPE, qualified as indicated by the
6305 TYPE_QUALS, if one exists. If no qualified version exists yet,
6306 return NULL_TREE. */
6307
6308 tree
6309 get_qualified_type (tree type, int type_quals)
6310 {
6311 tree t;
6312
6313 if (TYPE_QUALS (type) == type_quals)
6314 return type;
6315
6316 /* Search the chain of variants to see if there is already one there just
6317 like the one we need to have. If so, use that existing one. We must
6318 preserve the TYPE_NAME, since there is code that depends on this. */
6319 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6320 if (check_qualified_type (t, type, type_quals))
6321 return t;
6322
6323 return NULL_TREE;
6324 }
6325
6326 /* Like get_qualified_type, but creates the type if it does not
6327 exist. This function never returns NULL_TREE. */
6328
6329 tree
6330 build_qualified_type (tree type, int type_quals)
6331 {
6332 tree t;
6333
6334 /* See if we already have the appropriate qualified variant. */
6335 t = get_qualified_type (type, type_quals);
6336
6337 /* If not, build it. */
6338 if (!t)
6339 {
6340 t = build_variant_type_copy (type);
6341 set_type_quals (t, type_quals);
6342
6343 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6344 {
6345 /* See if this object can map to a basic atomic type. */
6346 tree atomic_type = find_atomic_core_type (type);
6347 if (atomic_type)
6348 {
6349 /* Ensure the alignment of this type is compatible with
6350 the required alignment of the atomic type. */
6351 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6352 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6353 }
6354 }
6355
6356 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6357 /* Propagate structural equality. */
6358 SET_TYPE_STRUCTURAL_EQUALITY (t);
6359 else if (TYPE_CANONICAL (type) != type)
6360 /* Build the underlying canonical type, since it is different
6361 from TYPE. */
6362 {
6363 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6364 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6365 }
6366 else
6367 /* T is its own canonical type. */
6368 TYPE_CANONICAL (t) = t;
6369
6370 }
6371
6372 return t;
6373 }
6374
6375 /* Create a variant of type T with alignment ALIGN. */
6376
6377 tree
6378 build_aligned_type (tree type, unsigned int align)
6379 {
6380 tree t;
6381
6382 if (TYPE_PACKED (type)
6383 || TYPE_ALIGN (type) == align)
6384 return type;
6385
6386 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6387 if (check_aligned_type (t, type, align))
6388 return t;
6389
6390 t = build_variant_type_copy (type);
6391 TYPE_ALIGN (t) = align;
6392
6393 return t;
6394 }
6395
6396 /* Create a new distinct copy of TYPE. The new type is made its own
6397 MAIN_VARIANT. If TYPE requires structural equality checks, the
6398 resulting type requires structural equality checks; otherwise, its
6399 TYPE_CANONICAL points to itself. */
6400
6401 tree
6402 build_distinct_type_copy (tree type)
6403 {
6404 tree t = copy_node (type);
6405
6406 TYPE_POINTER_TO (t) = 0;
6407 TYPE_REFERENCE_TO (t) = 0;
6408
6409 /* Set the canonical type either to a new equivalence class, or
6410 propagate the need for structural equality checks. */
6411 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6412 SET_TYPE_STRUCTURAL_EQUALITY (t);
6413 else
6414 TYPE_CANONICAL (t) = t;
6415
6416 /* Make it its own variant. */
6417 TYPE_MAIN_VARIANT (t) = t;
6418 TYPE_NEXT_VARIANT (t) = 0;
6419
6420 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6421 whose TREE_TYPE is not t. This can also happen in the Ada
6422 frontend when using subtypes. */
6423
6424 return t;
6425 }
6426
6427 /* Create a new variant of TYPE, equivalent but distinct. This is so
6428 the caller can modify it. TYPE_CANONICAL for the return type will
6429 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6430 are considered equal by the language itself (or that both types
6431 require structural equality checks). */
6432
6433 tree
6434 build_variant_type_copy (tree type)
6435 {
6436 tree t, m = TYPE_MAIN_VARIANT (type);
6437
6438 t = build_distinct_type_copy (type);
6439
6440 /* Since we're building a variant, assume that it is a non-semantic
6441 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6442 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6443
6444 /* Add the new type to the chain of variants of TYPE. */
6445 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6446 TYPE_NEXT_VARIANT (m) = t;
6447 TYPE_MAIN_VARIANT (t) = m;
6448
6449 return t;
6450 }
6451 \f
6452 /* Return true if the from tree in both tree maps are equal. */
6453
6454 int
6455 tree_map_base_eq (const void *va, const void *vb)
6456 {
6457 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6458 *const b = (const struct tree_map_base *) vb;
6459 return (a->from == b->from);
6460 }
6461
6462 /* Hash a from tree in a tree_base_map. */
6463
6464 unsigned int
6465 tree_map_base_hash (const void *item)
6466 {
6467 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6468 }
6469
6470 /* Return true if this tree map structure is marked for garbage collection
6471 purposes. We simply return true if the from tree is marked, so that this
6472 structure goes away when the from tree goes away. */
6473
6474 int
6475 tree_map_base_marked_p (const void *p)
6476 {
6477 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6478 }
6479
6480 /* Hash a from tree in a tree_map. */
6481
6482 unsigned int
6483 tree_map_hash (const void *item)
6484 {
6485 return (((const struct tree_map *) item)->hash);
6486 }
6487
6488 /* Hash a from tree in a tree_decl_map. */
6489
6490 unsigned int
6491 tree_decl_map_hash (const void *item)
6492 {
6493 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6494 }
6495
6496 /* Return the initialization priority for DECL. */
6497
6498 priority_type
6499 decl_init_priority_lookup (tree decl)
6500 {
6501 symtab_node *snode = symtab_node::get (decl);
6502
6503 if (!snode)
6504 return DEFAULT_INIT_PRIORITY;
6505 return
6506 snode->get_init_priority ();
6507 }
6508
6509 /* Return the finalization priority for DECL. */
6510
6511 priority_type
6512 decl_fini_priority_lookup (tree decl)
6513 {
6514 cgraph_node *node = cgraph_node::get (decl);
6515
6516 if (!node)
6517 return DEFAULT_INIT_PRIORITY;
6518 return
6519 node->get_fini_priority ();
6520 }
6521
6522 /* Set the initialization priority for DECL to PRIORITY. */
6523
6524 void
6525 decl_init_priority_insert (tree decl, priority_type priority)
6526 {
6527 struct symtab_node *snode;
6528
6529 if (priority == DEFAULT_INIT_PRIORITY)
6530 {
6531 snode = symtab_node::get (decl);
6532 if (!snode)
6533 return;
6534 }
6535 else if (TREE_CODE (decl) == VAR_DECL)
6536 snode = varpool_node::get_create (decl);
6537 else
6538 snode = cgraph_node::get_create (decl);
6539 snode->set_init_priority (priority);
6540 }
6541
6542 /* Set the finalization priority for DECL to PRIORITY. */
6543
6544 void
6545 decl_fini_priority_insert (tree decl, priority_type priority)
6546 {
6547 struct cgraph_node *node;
6548
6549 if (priority == DEFAULT_INIT_PRIORITY)
6550 {
6551 node = cgraph_node::get (decl);
6552 if (!node)
6553 return;
6554 }
6555 else
6556 node = cgraph_node::get_create (decl);
6557 node->set_fini_priority (priority);
6558 }
6559
6560 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6561
6562 static void
6563 print_debug_expr_statistics (void)
6564 {
6565 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6566 (long) htab_size (debug_expr_for_decl),
6567 (long) htab_elements (debug_expr_for_decl),
6568 htab_collisions (debug_expr_for_decl));
6569 }
6570
6571 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6572
6573 static void
6574 print_value_expr_statistics (void)
6575 {
6576 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6577 (long) htab_size (value_expr_for_decl),
6578 (long) htab_elements (value_expr_for_decl),
6579 htab_collisions (value_expr_for_decl));
6580 }
6581
6582 /* Lookup a debug expression for FROM, and return it if we find one. */
6583
6584 tree
6585 decl_debug_expr_lookup (tree from)
6586 {
6587 struct tree_decl_map *h, in;
6588 in.base.from = from;
6589
6590 h = (struct tree_decl_map *)
6591 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6592 if (h)
6593 return h->to;
6594 return NULL_TREE;
6595 }
6596
6597 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6598
6599 void
6600 decl_debug_expr_insert (tree from, tree to)
6601 {
6602 struct tree_decl_map *h;
6603 void **loc;
6604
6605 h = ggc_alloc<tree_decl_map> ();
6606 h->base.from = from;
6607 h->to = to;
6608 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6609 INSERT);
6610 *(struct tree_decl_map **) loc = h;
6611 }
6612
6613 /* Lookup a value expression for FROM, and return it if we find one. */
6614
6615 tree
6616 decl_value_expr_lookup (tree from)
6617 {
6618 struct tree_decl_map *h, in;
6619 in.base.from = from;
6620
6621 h = (struct tree_decl_map *)
6622 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6623 if (h)
6624 return h->to;
6625 return NULL_TREE;
6626 }
6627
6628 /* Insert a mapping FROM->TO in the value expression hashtable. */
6629
6630 void
6631 decl_value_expr_insert (tree from, tree to)
6632 {
6633 struct tree_decl_map *h;
6634 void **loc;
6635
6636 h = ggc_alloc<tree_decl_map> ();
6637 h->base.from = from;
6638 h->to = to;
6639 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6640 INSERT);
6641 *(struct tree_decl_map **) loc = h;
6642 }
6643
6644 /* Lookup a vector of debug arguments for FROM, and return it if we
6645 find one. */
6646
6647 vec<tree, va_gc> **
6648 decl_debug_args_lookup (tree from)
6649 {
6650 struct tree_vec_map *h, in;
6651
6652 if (!DECL_HAS_DEBUG_ARGS_P (from))
6653 return NULL;
6654 gcc_checking_assert (debug_args_for_decl != NULL);
6655 in.base.from = from;
6656 h = (struct tree_vec_map *)
6657 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6658 if (h)
6659 return &h->to;
6660 return NULL;
6661 }
6662
6663 /* Insert a mapping FROM->empty vector of debug arguments in the value
6664 expression hashtable. */
6665
6666 vec<tree, va_gc> **
6667 decl_debug_args_insert (tree from)
6668 {
6669 struct tree_vec_map *h;
6670 void **loc;
6671
6672 if (DECL_HAS_DEBUG_ARGS_P (from))
6673 return decl_debug_args_lookup (from);
6674 if (debug_args_for_decl == NULL)
6675 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6676 tree_vec_map_eq, 0);
6677 h = ggc_alloc<tree_vec_map> ();
6678 h->base.from = from;
6679 h->to = NULL;
6680 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6681 INSERT);
6682 *(struct tree_vec_map **) loc = h;
6683 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6684 return &h->to;
6685 }
6686
6687 /* Hashing of types so that we don't make duplicates.
6688 The entry point is `type_hash_canon'. */
6689
6690 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6691 with types in the TREE_VALUE slots), by adding the hash codes
6692 of the individual types. */
6693
6694 static void
6695 type_hash_list (const_tree list, inchash::hash &hstate)
6696 {
6697 const_tree tail;
6698
6699 for (tail = list; tail; tail = TREE_CHAIN (tail))
6700 if (TREE_VALUE (tail) != error_mark_node)
6701 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6702 }
6703
6704 /* These are the Hashtable callback functions. */
6705
6706 /* Returns true iff the types are equivalent. */
6707
6708 static int
6709 type_hash_eq (const void *va, const void *vb)
6710 {
6711 const struct type_hash *const a = (const struct type_hash *) va,
6712 *const b = (const struct type_hash *) vb;
6713
6714 /* First test the things that are the same for all types. */
6715 if (a->hash != b->hash
6716 || TREE_CODE (a->type) != TREE_CODE (b->type)
6717 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6718 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6719 TYPE_ATTRIBUTES (b->type))
6720 || (TREE_CODE (a->type) != COMPLEX_TYPE
6721 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6722 return 0;
6723
6724 /* Be careful about comparing arrays before and after the element type
6725 has been completed; don't compare TYPE_ALIGN unless both types are
6726 complete. */
6727 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6728 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6729 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6730 return 0;
6731
6732 switch (TREE_CODE (a->type))
6733 {
6734 case VOID_TYPE:
6735 case COMPLEX_TYPE:
6736 case POINTER_TYPE:
6737 case REFERENCE_TYPE:
6738 case NULLPTR_TYPE:
6739 return 1;
6740
6741 case VECTOR_TYPE:
6742 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6743
6744 case ENUMERAL_TYPE:
6745 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6746 && !(TYPE_VALUES (a->type)
6747 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6748 && TYPE_VALUES (b->type)
6749 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6750 && type_list_equal (TYPE_VALUES (a->type),
6751 TYPE_VALUES (b->type))))
6752 return 0;
6753
6754 /* ... fall through ... */
6755
6756 case INTEGER_TYPE:
6757 case REAL_TYPE:
6758 case BOOLEAN_TYPE:
6759 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6760 return false;
6761 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6762 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6763 TYPE_MAX_VALUE (b->type)))
6764 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6765 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6766 TYPE_MIN_VALUE (b->type))));
6767
6768 case FIXED_POINT_TYPE:
6769 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6770
6771 case OFFSET_TYPE:
6772 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6773
6774 case METHOD_TYPE:
6775 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6776 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6777 || (TYPE_ARG_TYPES (a->type)
6778 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6779 && TYPE_ARG_TYPES (b->type)
6780 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6781 && type_list_equal (TYPE_ARG_TYPES (a->type),
6782 TYPE_ARG_TYPES (b->type)))))
6783 break;
6784 return 0;
6785 case ARRAY_TYPE:
6786 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6787
6788 case RECORD_TYPE:
6789 case UNION_TYPE:
6790 case QUAL_UNION_TYPE:
6791 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6792 || (TYPE_FIELDS (a->type)
6793 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6794 && TYPE_FIELDS (b->type)
6795 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6796 && type_list_equal (TYPE_FIELDS (a->type),
6797 TYPE_FIELDS (b->type))));
6798
6799 case FUNCTION_TYPE:
6800 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6801 || (TYPE_ARG_TYPES (a->type)
6802 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6803 && TYPE_ARG_TYPES (b->type)
6804 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6805 && type_list_equal (TYPE_ARG_TYPES (a->type),
6806 TYPE_ARG_TYPES (b->type))))
6807 break;
6808 return 0;
6809
6810 default:
6811 return 0;
6812 }
6813
6814 if (lang_hooks.types.type_hash_eq != NULL)
6815 return lang_hooks.types.type_hash_eq (a->type, b->type);
6816
6817 return 1;
6818 }
6819
6820 /* Return the cached hash value. */
6821
6822 static hashval_t
6823 type_hash_hash (const void *item)
6824 {
6825 return ((const struct type_hash *) item)->hash;
6826 }
6827
6828 /* Given TYPE, and HASHCODE its hash code, return the canonical
6829 object for an identical type if one already exists.
6830 Otherwise, return TYPE, and record it as the canonical object.
6831
6832 To use this function, first create a type of the sort you want.
6833 Then compute its hash code from the fields of the type that
6834 make it different from other similar types.
6835 Then call this function and use the value. */
6836
6837 tree
6838 type_hash_canon (unsigned int hashcode, tree type)
6839 {
6840 type_hash in;
6841 void **loc;
6842
6843 /* The hash table only contains main variants, so ensure that's what we're
6844 being passed. */
6845 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6846
6847 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6848 must call that routine before comparing TYPE_ALIGNs. */
6849 layout_type (type);
6850
6851 in.hash = hashcode;
6852 in.type = type;
6853
6854 loc = htab_find_slot_with_hash (type_hash_table, &in, hashcode, INSERT);
6855 if (*loc)
6856 {
6857 tree t1 = ((type_hash *) *loc)->type;
6858 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6859 if (GATHER_STATISTICS)
6860 {
6861 tree_code_counts[(int) TREE_CODE (type)]--;
6862 tree_node_counts[(int) t_kind]--;
6863 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6864 }
6865 return t1;
6866 }
6867 else
6868 {
6869 struct type_hash *h;
6870
6871 h = ggc_alloc<type_hash> ();
6872 h->hash = hashcode;
6873 h->type = type;
6874 *loc = (void *)h;
6875
6876 return type;
6877 }
6878 }
6879
6880 /* See if the data pointed to by the type hash table is marked. We consider
6881 it marked if the type is marked or if a debug type number or symbol
6882 table entry has been made for the type. */
6883
6884 static int
6885 type_hash_marked_p (const void *p)
6886 {
6887 const_tree const type = ((const struct type_hash *) p)->type;
6888
6889 return ggc_marked_p (type);
6890 }
6891
6892 static void
6893 print_type_hash_statistics (void)
6894 {
6895 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6896 (long) htab_size (type_hash_table),
6897 (long) htab_elements (type_hash_table),
6898 htab_collisions (type_hash_table));
6899 }
6900
6901 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6902 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6903 by adding the hash codes of the individual attributes. */
6904
6905 static void
6906 attribute_hash_list (const_tree list, inchash::hash &hstate)
6907 {
6908 const_tree tail;
6909
6910 for (tail = list; tail; tail = TREE_CHAIN (tail))
6911 /* ??? Do we want to add in TREE_VALUE too? */
6912 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6913 }
6914
6915 /* Given two lists of attributes, return true if list l2 is
6916 equivalent to l1. */
6917
6918 int
6919 attribute_list_equal (const_tree l1, const_tree l2)
6920 {
6921 if (l1 == l2)
6922 return 1;
6923
6924 return attribute_list_contained (l1, l2)
6925 && attribute_list_contained (l2, l1);
6926 }
6927
6928 /* Given two lists of attributes, return true if list L2 is
6929 completely contained within L1. */
6930 /* ??? This would be faster if attribute names were stored in a canonicalized
6931 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6932 must be used to show these elements are equivalent (which they are). */
6933 /* ??? It's not clear that attributes with arguments will always be handled
6934 correctly. */
6935
6936 int
6937 attribute_list_contained (const_tree l1, const_tree l2)
6938 {
6939 const_tree t1, t2;
6940
6941 /* First check the obvious, maybe the lists are identical. */
6942 if (l1 == l2)
6943 return 1;
6944
6945 /* Maybe the lists are similar. */
6946 for (t1 = l1, t2 = l2;
6947 t1 != 0 && t2 != 0
6948 && get_attribute_name (t1) == get_attribute_name (t2)
6949 && TREE_VALUE (t1) == TREE_VALUE (t2);
6950 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6951 ;
6952
6953 /* Maybe the lists are equal. */
6954 if (t1 == 0 && t2 == 0)
6955 return 1;
6956
6957 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6958 {
6959 const_tree attr;
6960 /* This CONST_CAST is okay because lookup_attribute does not
6961 modify its argument and the return value is assigned to a
6962 const_tree. */
6963 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6964 CONST_CAST_TREE (l1));
6965 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6966 attr = lookup_ident_attribute (get_attribute_name (t2),
6967 TREE_CHAIN (attr)))
6968 ;
6969
6970 if (attr == NULL_TREE)
6971 return 0;
6972 }
6973
6974 return 1;
6975 }
6976
6977 /* Given two lists of types
6978 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6979 return 1 if the lists contain the same types in the same order.
6980 Also, the TREE_PURPOSEs must match. */
6981
6982 int
6983 type_list_equal (const_tree l1, const_tree l2)
6984 {
6985 const_tree t1, t2;
6986
6987 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6988 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6989 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6990 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6991 && (TREE_TYPE (TREE_PURPOSE (t1))
6992 == TREE_TYPE (TREE_PURPOSE (t2))))))
6993 return 0;
6994
6995 return t1 == t2;
6996 }
6997
6998 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6999 given by TYPE. If the argument list accepts variable arguments,
7000 then this function counts only the ordinary arguments. */
7001
7002 int
7003 type_num_arguments (const_tree type)
7004 {
7005 int i = 0;
7006 tree t;
7007
7008 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7009 /* If the function does not take a variable number of arguments,
7010 the last element in the list will have type `void'. */
7011 if (VOID_TYPE_P (TREE_VALUE (t)))
7012 break;
7013 else
7014 ++i;
7015
7016 return i;
7017 }
7018
7019 /* Nonzero if integer constants T1 and T2
7020 represent the same constant value. */
7021
7022 int
7023 tree_int_cst_equal (const_tree t1, const_tree t2)
7024 {
7025 if (t1 == t2)
7026 return 1;
7027
7028 if (t1 == 0 || t2 == 0)
7029 return 0;
7030
7031 if (TREE_CODE (t1) == INTEGER_CST
7032 && TREE_CODE (t2) == INTEGER_CST
7033 && wi::to_widest (t1) == wi::to_widest (t2))
7034 return 1;
7035
7036 return 0;
7037 }
7038
7039 /* Return true if T is an INTEGER_CST whose numerical value (extended
7040 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7041
7042 bool
7043 tree_fits_shwi_p (const_tree t)
7044 {
7045 return (t != NULL_TREE
7046 && TREE_CODE (t) == INTEGER_CST
7047 && wi::fits_shwi_p (wi::to_widest (t)));
7048 }
7049
7050 /* Return true if T is an INTEGER_CST whose numerical value (extended
7051 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7052
7053 bool
7054 tree_fits_uhwi_p (const_tree t)
7055 {
7056 return (t != NULL_TREE
7057 && TREE_CODE (t) == INTEGER_CST
7058 && wi::fits_uhwi_p (wi::to_widest (t)));
7059 }
7060
7061 /* T is an INTEGER_CST whose numerical value (extended according to
7062 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7063 HOST_WIDE_INT. */
7064
7065 HOST_WIDE_INT
7066 tree_to_shwi (const_tree t)
7067 {
7068 gcc_assert (tree_fits_shwi_p (t));
7069 return TREE_INT_CST_LOW (t);
7070 }
7071
7072 /* T is an INTEGER_CST whose numerical value (extended according to
7073 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7074 HOST_WIDE_INT. */
7075
7076 unsigned HOST_WIDE_INT
7077 tree_to_uhwi (const_tree t)
7078 {
7079 gcc_assert (tree_fits_uhwi_p (t));
7080 return TREE_INT_CST_LOW (t);
7081 }
7082
7083 /* Return the most significant (sign) bit of T. */
7084
7085 int
7086 tree_int_cst_sign_bit (const_tree t)
7087 {
7088 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7089
7090 return wi::extract_uhwi (t, bitno, 1);
7091 }
7092
7093 /* Return an indication of the sign of the integer constant T.
7094 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7095 Note that -1 will never be returned if T's type is unsigned. */
7096
7097 int
7098 tree_int_cst_sgn (const_tree t)
7099 {
7100 if (wi::eq_p (t, 0))
7101 return 0;
7102 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7103 return 1;
7104 else if (wi::neg_p (t))
7105 return -1;
7106 else
7107 return 1;
7108 }
7109
7110 /* Return the minimum number of bits needed to represent VALUE in a
7111 signed or unsigned type, UNSIGNEDP says which. */
7112
7113 unsigned int
7114 tree_int_cst_min_precision (tree value, signop sgn)
7115 {
7116 /* If the value is negative, compute its negative minus 1. The latter
7117 adjustment is because the absolute value of the largest negative value
7118 is one larger than the largest positive value. This is equivalent to
7119 a bit-wise negation, so use that operation instead. */
7120
7121 if (tree_int_cst_sgn (value) < 0)
7122 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7123
7124 /* Return the number of bits needed, taking into account the fact
7125 that we need one more bit for a signed than unsigned type.
7126 If value is 0 or -1, the minimum precision is 1 no matter
7127 whether unsignedp is true or false. */
7128
7129 if (integer_zerop (value))
7130 return 1;
7131 else
7132 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7133 }
7134
7135 /* Return truthvalue of whether T1 is the same tree structure as T2.
7136 Return 1 if they are the same.
7137 Return 0 if they are understandably different.
7138 Return -1 if either contains tree structure not understood by
7139 this function. */
7140
7141 int
7142 simple_cst_equal (const_tree t1, const_tree t2)
7143 {
7144 enum tree_code code1, code2;
7145 int cmp;
7146 int i;
7147
7148 if (t1 == t2)
7149 return 1;
7150 if (t1 == 0 || t2 == 0)
7151 return 0;
7152
7153 code1 = TREE_CODE (t1);
7154 code2 = TREE_CODE (t2);
7155
7156 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7157 {
7158 if (CONVERT_EXPR_CODE_P (code2)
7159 || code2 == NON_LVALUE_EXPR)
7160 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7161 else
7162 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7163 }
7164
7165 else if (CONVERT_EXPR_CODE_P (code2)
7166 || code2 == NON_LVALUE_EXPR)
7167 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7168
7169 if (code1 != code2)
7170 return 0;
7171
7172 switch (code1)
7173 {
7174 case INTEGER_CST:
7175 return wi::to_widest (t1) == wi::to_widest (t2);
7176
7177 case REAL_CST:
7178 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7179
7180 case FIXED_CST:
7181 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7182
7183 case STRING_CST:
7184 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7185 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7186 TREE_STRING_LENGTH (t1)));
7187
7188 case CONSTRUCTOR:
7189 {
7190 unsigned HOST_WIDE_INT idx;
7191 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7192 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7193
7194 if (vec_safe_length (v1) != vec_safe_length (v2))
7195 return false;
7196
7197 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7198 /* ??? Should we handle also fields here? */
7199 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7200 return false;
7201 return true;
7202 }
7203
7204 case SAVE_EXPR:
7205 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7206
7207 case CALL_EXPR:
7208 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7209 if (cmp <= 0)
7210 return cmp;
7211 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7212 return 0;
7213 {
7214 const_tree arg1, arg2;
7215 const_call_expr_arg_iterator iter1, iter2;
7216 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7217 arg2 = first_const_call_expr_arg (t2, &iter2);
7218 arg1 && arg2;
7219 arg1 = next_const_call_expr_arg (&iter1),
7220 arg2 = next_const_call_expr_arg (&iter2))
7221 {
7222 cmp = simple_cst_equal (arg1, arg2);
7223 if (cmp <= 0)
7224 return cmp;
7225 }
7226 return arg1 == arg2;
7227 }
7228
7229 case TARGET_EXPR:
7230 /* Special case: if either target is an unallocated VAR_DECL,
7231 it means that it's going to be unified with whatever the
7232 TARGET_EXPR is really supposed to initialize, so treat it
7233 as being equivalent to anything. */
7234 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7235 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7236 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7237 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7238 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7239 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7240 cmp = 1;
7241 else
7242 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7243
7244 if (cmp <= 0)
7245 return cmp;
7246
7247 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7248
7249 case WITH_CLEANUP_EXPR:
7250 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7251 if (cmp <= 0)
7252 return cmp;
7253
7254 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7255
7256 case COMPONENT_REF:
7257 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7258 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7259
7260 return 0;
7261
7262 case VAR_DECL:
7263 case PARM_DECL:
7264 case CONST_DECL:
7265 case FUNCTION_DECL:
7266 return 0;
7267
7268 default:
7269 break;
7270 }
7271
7272 /* This general rule works for most tree codes. All exceptions should be
7273 handled above. If this is a language-specific tree code, we can't
7274 trust what might be in the operand, so say we don't know
7275 the situation. */
7276 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7277 return -1;
7278
7279 switch (TREE_CODE_CLASS (code1))
7280 {
7281 case tcc_unary:
7282 case tcc_binary:
7283 case tcc_comparison:
7284 case tcc_expression:
7285 case tcc_reference:
7286 case tcc_statement:
7287 cmp = 1;
7288 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7289 {
7290 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7291 if (cmp <= 0)
7292 return cmp;
7293 }
7294
7295 return cmp;
7296
7297 default:
7298 return -1;
7299 }
7300 }
7301
7302 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7303 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7304 than U, respectively. */
7305
7306 int
7307 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7308 {
7309 if (tree_int_cst_sgn (t) < 0)
7310 return -1;
7311 else if (!tree_fits_uhwi_p (t))
7312 return 1;
7313 else if (TREE_INT_CST_LOW (t) == u)
7314 return 0;
7315 else if (TREE_INT_CST_LOW (t) < u)
7316 return -1;
7317 else
7318 return 1;
7319 }
7320
7321 /* Return true if SIZE represents a constant size that is in bounds of
7322 what the middle-end and the backend accepts (covering not more than
7323 half of the address-space). */
7324
7325 bool
7326 valid_constant_size_p (const_tree size)
7327 {
7328 if (! tree_fits_uhwi_p (size)
7329 || TREE_OVERFLOW (size)
7330 || tree_int_cst_sign_bit (size) != 0)
7331 return false;
7332 return true;
7333 }
7334
7335 /* Return the precision of the type, or for a complex or vector type the
7336 precision of the type of its elements. */
7337
7338 unsigned int
7339 element_precision (const_tree type)
7340 {
7341 enum tree_code code = TREE_CODE (type);
7342 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7343 type = TREE_TYPE (type);
7344
7345 return TYPE_PRECISION (type);
7346 }
7347
7348 /* Return true if CODE represents an associative tree code. Otherwise
7349 return false. */
7350 bool
7351 associative_tree_code (enum tree_code code)
7352 {
7353 switch (code)
7354 {
7355 case BIT_IOR_EXPR:
7356 case BIT_AND_EXPR:
7357 case BIT_XOR_EXPR:
7358 case PLUS_EXPR:
7359 case MULT_EXPR:
7360 case MIN_EXPR:
7361 case MAX_EXPR:
7362 return true;
7363
7364 default:
7365 break;
7366 }
7367 return false;
7368 }
7369
7370 /* Return true if CODE represents a commutative tree code. Otherwise
7371 return false. */
7372 bool
7373 commutative_tree_code (enum tree_code code)
7374 {
7375 switch (code)
7376 {
7377 case PLUS_EXPR:
7378 case MULT_EXPR:
7379 case MULT_HIGHPART_EXPR:
7380 case MIN_EXPR:
7381 case MAX_EXPR:
7382 case BIT_IOR_EXPR:
7383 case BIT_XOR_EXPR:
7384 case BIT_AND_EXPR:
7385 case NE_EXPR:
7386 case EQ_EXPR:
7387 case UNORDERED_EXPR:
7388 case ORDERED_EXPR:
7389 case UNEQ_EXPR:
7390 case LTGT_EXPR:
7391 case TRUTH_AND_EXPR:
7392 case TRUTH_XOR_EXPR:
7393 case TRUTH_OR_EXPR:
7394 case WIDEN_MULT_EXPR:
7395 case VEC_WIDEN_MULT_HI_EXPR:
7396 case VEC_WIDEN_MULT_LO_EXPR:
7397 case VEC_WIDEN_MULT_EVEN_EXPR:
7398 case VEC_WIDEN_MULT_ODD_EXPR:
7399 return true;
7400
7401 default:
7402 break;
7403 }
7404 return false;
7405 }
7406
7407 /* Return true if CODE represents a ternary tree code for which the
7408 first two operands are commutative. Otherwise return false. */
7409 bool
7410 commutative_ternary_tree_code (enum tree_code code)
7411 {
7412 switch (code)
7413 {
7414 case WIDEN_MULT_PLUS_EXPR:
7415 case WIDEN_MULT_MINUS_EXPR:
7416 case DOT_PROD_EXPR:
7417 case FMA_EXPR:
7418 return true;
7419
7420 default:
7421 break;
7422 }
7423 return false;
7424 }
7425
7426 namespace inchash
7427 {
7428
7429 /* Generate a hash value for an expression. This can be used iteratively
7430 by passing a previous result as the HSTATE argument.
7431
7432 This function is intended to produce the same hash for expressions which
7433 would compare equal using operand_equal_p. */
7434 void
7435 add_expr (const_tree t, inchash::hash &hstate)
7436 {
7437 int i;
7438 enum tree_code code;
7439 enum tree_code_class tclass;
7440
7441 if (t == NULL_TREE)
7442 {
7443 hstate.merge_hash (0);
7444 return;
7445 }
7446
7447 code = TREE_CODE (t);
7448
7449 switch (code)
7450 {
7451 /* Alas, constants aren't shared, so we can't rely on pointer
7452 identity. */
7453 case VOID_CST:
7454 hstate.merge_hash (0);
7455 return;
7456 case INTEGER_CST:
7457 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7458 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7459 return;
7460 case REAL_CST:
7461 {
7462 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7463 hstate.merge_hash (val2);
7464 return;
7465 }
7466 case FIXED_CST:
7467 {
7468 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7469 hstate.merge_hash (val2);
7470 return;
7471 }
7472 case STRING_CST:
7473 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7474 return;
7475 case COMPLEX_CST:
7476 inchash::add_expr (TREE_REALPART (t), hstate);
7477 inchash::add_expr (TREE_IMAGPART (t), hstate);
7478 return;
7479 case VECTOR_CST:
7480 {
7481 unsigned i;
7482 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7483 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7484 return;
7485 }
7486 case SSA_NAME:
7487 /* We can just compare by pointer. */
7488 hstate.add_wide_int (SSA_NAME_VERSION (t));
7489 return;
7490 case PLACEHOLDER_EXPR:
7491 /* The node itself doesn't matter. */
7492 return;
7493 case TREE_LIST:
7494 /* A list of expressions, for a CALL_EXPR or as the elements of a
7495 VECTOR_CST. */
7496 for (; t; t = TREE_CHAIN (t))
7497 inchash::add_expr (TREE_VALUE (t), hstate);
7498 return;
7499 case CONSTRUCTOR:
7500 {
7501 unsigned HOST_WIDE_INT idx;
7502 tree field, value;
7503 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7504 {
7505 inchash::add_expr (field, hstate);
7506 inchash::add_expr (value, hstate);
7507 }
7508 return;
7509 }
7510 case FUNCTION_DECL:
7511 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7512 Otherwise nodes that compare equal according to operand_equal_p might
7513 get different hash codes. However, don't do this for machine specific
7514 or front end builtins, since the function code is overloaded in those
7515 cases. */
7516 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7517 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7518 {
7519 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7520 code = TREE_CODE (t);
7521 }
7522 /* FALL THROUGH */
7523 default:
7524 tclass = TREE_CODE_CLASS (code);
7525
7526 if (tclass == tcc_declaration)
7527 {
7528 /* DECL's have a unique ID */
7529 hstate.add_wide_int (DECL_UID (t));
7530 }
7531 else
7532 {
7533 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7534
7535 hstate.add_object (code);
7536
7537 /* Don't hash the type, that can lead to having nodes which
7538 compare equal according to operand_equal_p, but which
7539 have different hash codes. */
7540 if (CONVERT_EXPR_CODE_P (code)
7541 || code == NON_LVALUE_EXPR)
7542 {
7543 /* Make sure to include signness in the hash computation. */
7544 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7545 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7546 }
7547
7548 else if (commutative_tree_code (code))
7549 {
7550 /* It's a commutative expression. We want to hash it the same
7551 however it appears. We do this by first hashing both operands
7552 and then rehashing based on the order of their independent
7553 hashes. */
7554 inchash::hash one, two;
7555 inchash::add_expr (TREE_OPERAND (t, 0), one);
7556 inchash::add_expr (TREE_OPERAND (t, 1), two);
7557 hstate.add_commutative (one, two);
7558 }
7559 else
7560 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7561 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7562 }
7563 return;
7564 }
7565 }
7566
7567 }
7568
7569 /* Constructors for pointer, array and function types.
7570 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7571 constructed by language-dependent code, not here.) */
7572
7573 /* Construct, lay out and return the type of pointers to TO_TYPE with
7574 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7575 reference all of memory. If such a type has already been
7576 constructed, reuse it. */
7577
7578 tree
7579 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7580 bool can_alias_all)
7581 {
7582 tree t;
7583
7584 if (to_type == error_mark_node)
7585 return error_mark_node;
7586
7587 /* If the pointed-to type has the may_alias attribute set, force
7588 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7589 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7590 can_alias_all = true;
7591
7592 /* In some cases, languages will have things that aren't a POINTER_TYPE
7593 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7594 In that case, return that type without regard to the rest of our
7595 operands.
7596
7597 ??? This is a kludge, but consistent with the way this function has
7598 always operated and there doesn't seem to be a good way to avoid this
7599 at the moment. */
7600 if (TYPE_POINTER_TO (to_type) != 0
7601 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7602 return TYPE_POINTER_TO (to_type);
7603
7604 /* First, if we already have a type for pointers to TO_TYPE and it's
7605 the proper mode, use it. */
7606 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7607 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7608 return t;
7609
7610 t = make_node (POINTER_TYPE);
7611
7612 TREE_TYPE (t) = to_type;
7613 SET_TYPE_MODE (t, mode);
7614 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7615 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7616 TYPE_POINTER_TO (to_type) = t;
7617
7618 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7619 SET_TYPE_STRUCTURAL_EQUALITY (t);
7620 else if (TYPE_CANONICAL (to_type) != to_type)
7621 TYPE_CANONICAL (t)
7622 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7623 mode, can_alias_all);
7624
7625 /* Lay out the type. This function has many callers that are concerned
7626 with expression-construction, and this simplifies them all. */
7627 layout_type (t);
7628
7629 return t;
7630 }
7631
7632 /* By default build pointers in ptr_mode. */
7633
7634 tree
7635 build_pointer_type (tree to_type)
7636 {
7637 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7638 : TYPE_ADDR_SPACE (to_type);
7639 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7640 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7641 }
7642
7643 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7644
7645 tree
7646 build_reference_type_for_mode (tree to_type, machine_mode mode,
7647 bool can_alias_all)
7648 {
7649 tree t;
7650
7651 if (to_type == error_mark_node)
7652 return error_mark_node;
7653
7654 /* If the pointed-to type has the may_alias attribute set, force
7655 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7656 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7657 can_alias_all = true;
7658
7659 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7660 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7661 In that case, return that type without regard to the rest of our
7662 operands.
7663
7664 ??? This is a kludge, but consistent with the way this function has
7665 always operated and there doesn't seem to be a good way to avoid this
7666 at the moment. */
7667 if (TYPE_REFERENCE_TO (to_type) != 0
7668 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7669 return TYPE_REFERENCE_TO (to_type);
7670
7671 /* First, if we already have a type for pointers to TO_TYPE and it's
7672 the proper mode, use it. */
7673 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7674 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7675 return t;
7676
7677 t = make_node (REFERENCE_TYPE);
7678
7679 TREE_TYPE (t) = to_type;
7680 SET_TYPE_MODE (t, mode);
7681 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7682 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7683 TYPE_REFERENCE_TO (to_type) = t;
7684
7685 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7686 SET_TYPE_STRUCTURAL_EQUALITY (t);
7687 else if (TYPE_CANONICAL (to_type) != to_type)
7688 TYPE_CANONICAL (t)
7689 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7690 mode, can_alias_all);
7691
7692 layout_type (t);
7693
7694 return t;
7695 }
7696
7697
7698 /* Build the node for the type of references-to-TO_TYPE by default
7699 in ptr_mode. */
7700
7701 tree
7702 build_reference_type (tree to_type)
7703 {
7704 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7705 : TYPE_ADDR_SPACE (to_type);
7706 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7707 return build_reference_type_for_mode (to_type, pointer_mode, false);
7708 }
7709
7710 #define MAX_INT_CACHED_PREC \
7711 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7712 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7713
7714 /* Builds a signed or unsigned integer type of precision PRECISION.
7715 Used for C bitfields whose precision does not match that of
7716 built-in target types. */
7717 tree
7718 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7719 int unsignedp)
7720 {
7721 tree itype, ret;
7722
7723 if (unsignedp)
7724 unsignedp = MAX_INT_CACHED_PREC + 1;
7725
7726 if (precision <= MAX_INT_CACHED_PREC)
7727 {
7728 itype = nonstandard_integer_type_cache[precision + unsignedp];
7729 if (itype)
7730 return itype;
7731 }
7732
7733 itype = make_node (INTEGER_TYPE);
7734 TYPE_PRECISION (itype) = precision;
7735
7736 if (unsignedp)
7737 fixup_unsigned_type (itype);
7738 else
7739 fixup_signed_type (itype);
7740
7741 ret = itype;
7742 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7743 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7744 if (precision <= MAX_INT_CACHED_PREC)
7745 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7746
7747 return ret;
7748 }
7749
7750 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7751 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7752 is true, reuse such a type that has already been constructed. */
7753
7754 static tree
7755 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7756 {
7757 tree itype = make_node (INTEGER_TYPE);
7758 inchash::hash hstate;
7759
7760 TREE_TYPE (itype) = type;
7761
7762 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7763 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7764
7765 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7766 SET_TYPE_MODE (itype, TYPE_MODE (type));
7767 TYPE_SIZE (itype) = TYPE_SIZE (type);
7768 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7769 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7770 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7771
7772 if (!shared)
7773 return itype;
7774
7775 if ((TYPE_MIN_VALUE (itype)
7776 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7777 || (TYPE_MAX_VALUE (itype)
7778 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7779 {
7780 /* Since we cannot reliably merge this type, we need to compare it using
7781 structural equality checks. */
7782 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7783 return itype;
7784 }
7785
7786 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7787 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7788 hstate.merge_hash (TYPE_HASH (type));
7789 itype = type_hash_canon (hstate.end (), itype);
7790
7791 return itype;
7792 }
7793
7794 /* Wrapper around build_range_type_1 with SHARED set to true. */
7795
7796 tree
7797 build_range_type (tree type, tree lowval, tree highval)
7798 {
7799 return build_range_type_1 (type, lowval, highval, true);
7800 }
7801
7802 /* Wrapper around build_range_type_1 with SHARED set to false. */
7803
7804 tree
7805 build_nonshared_range_type (tree type, tree lowval, tree highval)
7806 {
7807 return build_range_type_1 (type, lowval, highval, false);
7808 }
7809
7810 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7811 MAXVAL should be the maximum value in the domain
7812 (one less than the length of the array).
7813
7814 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7815 We don't enforce this limit, that is up to caller (e.g. language front end).
7816 The limit exists because the result is a signed type and we don't handle
7817 sizes that use more than one HOST_WIDE_INT. */
7818
7819 tree
7820 build_index_type (tree maxval)
7821 {
7822 return build_range_type (sizetype, size_zero_node, maxval);
7823 }
7824
7825 /* Return true if the debug information for TYPE, a subtype, should be emitted
7826 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7827 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7828 debug info and doesn't reflect the source code. */
7829
7830 bool
7831 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7832 {
7833 tree base_type = TREE_TYPE (type), low, high;
7834
7835 /* Subrange types have a base type which is an integral type. */
7836 if (!INTEGRAL_TYPE_P (base_type))
7837 return false;
7838
7839 /* Get the real bounds of the subtype. */
7840 if (lang_hooks.types.get_subrange_bounds)
7841 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7842 else
7843 {
7844 low = TYPE_MIN_VALUE (type);
7845 high = TYPE_MAX_VALUE (type);
7846 }
7847
7848 /* If the type and its base type have the same representation and the same
7849 name, then the type is not a subrange but a copy of the base type. */
7850 if ((TREE_CODE (base_type) == INTEGER_TYPE
7851 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7852 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7853 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7854 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7855 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7856 return false;
7857
7858 if (lowval)
7859 *lowval = low;
7860 if (highval)
7861 *highval = high;
7862 return true;
7863 }
7864
7865 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7866 and number of elements specified by the range of values of INDEX_TYPE.
7867 If SHARED is true, reuse such a type that has already been constructed. */
7868
7869 static tree
7870 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7871 {
7872 tree t;
7873
7874 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7875 {
7876 error ("arrays of functions are not meaningful");
7877 elt_type = integer_type_node;
7878 }
7879
7880 t = make_node (ARRAY_TYPE);
7881 TREE_TYPE (t) = elt_type;
7882 TYPE_DOMAIN (t) = index_type;
7883 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7884 layout_type (t);
7885
7886 /* If the element type is incomplete at this point we get marked for
7887 structural equality. Do not record these types in the canonical
7888 type hashtable. */
7889 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7890 return t;
7891
7892 if (shared)
7893 {
7894 inchash::hash hstate;
7895 hstate.add_object (TYPE_HASH (elt_type));
7896 if (index_type)
7897 hstate.add_object (TYPE_HASH (index_type));
7898 t = type_hash_canon (hstate.end (), t);
7899 }
7900
7901 if (TYPE_CANONICAL (t) == t)
7902 {
7903 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7904 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7905 SET_TYPE_STRUCTURAL_EQUALITY (t);
7906 else if (TYPE_CANONICAL (elt_type) != elt_type
7907 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7908 TYPE_CANONICAL (t)
7909 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7910 index_type
7911 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7912 shared);
7913 }
7914
7915 return t;
7916 }
7917
7918 /* Wrapper around build_array_type_1 with SHARED set to true. */
7919
7920 tree
7921 build_array_type (tree elt_type, tree index_type)
7922 {
7923 return build_array_type_1 (elt_type, index_type, true);
7924 }
7925
7926 /* Wrapper around build_array_type_1 with SHARED set to false. */
7927
7928 tree
7929 build_nonshared_array_type (tree elt_type, tree index_type)
7930 {
7931 return build_array_type_1 (elt_type, index_type, false);
7932 }
7933
7934 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7935 sizetype. */
7936
7937 tree
7938 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7939 {
7940 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7941 }
7942
7943 /* Recursively examines the array elements of TYPE, until a non-array
7944 element type is found. */
7945
7946 tree
7947 strip_array_types (tree type)
7948 {
7949 while (TREE_CODE (type) == ARRAY_TYPE)
7950 type = TREE_TYPE (type);
7951
7952 return type;
7953 }
7954
7955 /* Computes the canonical argument types from the argument type list
7956 ARGTYPES.
7957
7958 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7959 on entry to this function, or if any of the ARGTYPES are
7960 structural.
7961
7962 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7963 true on entry to this function, or if any of the ARGTYPES are
7964 non-canonical.
7965
7966 Returns a canonical argument list, which may be ARGTYPES when the
7967 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7968 true) or would not differ from ARGTYPES. */
7969
7970 static tree
7971 maybe_canonicalize_argtypes (tree argtypes,
7972 bool *any_structural_p,
7973 bool *any_noncanonical_p)
7974 {
7975 tree arg;
7976 bool any_noncanonical_argtypes_p = false;
7977
7978 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7979 {
7980 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7981 /* Fail gracefully by stating that the type is structural. */
7982 *any_structural_p = true;
7983 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7984 *any_structural_p = true;
7985 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7986 || TREE_PURPOSE (arg))
7987 /* If the argument has a default argument, we consider it
7988 non-canonical even though the type itself is canonical.
7989 That way, different variants of function and method types
7990 with default arguments will all point to the variant with
7991 no defaults as their canonical type. */
7992 any_noncanonical_argtypes_p = true;
7993 }
7994
7995 if (*any_structural_p)
7996 return argtypes;
7997
7998 if (any_noncanonical_argtypes_p)
7999 {
8000 /* Build the canonical list of argument types. */
8001 tree canon_argtypes = NULL_TREE;
8002 bool is_void = false;
8003
8004 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8005 {
8006 if (arg == void_list_node)
8007 is_void = true;
8008 else
8009 canon_argtypes = tree_cons (NULL_TREE,
8010 TYPE_CANONICAL (TREE_VALUE (arg)),
8011 canon_argtypes);
8012 }
8013
8014 canon_argtypes = nreverse (canon_argtypes);
8015 if (is_void)
8016 canon_argtypes = chainon (canon_argtypes, void_list_node);
8017
8018 /* There is a non-canonical type. */
8019 *any_noncanonical_p = true;
8020 return canon_argtypes;
8021 }
8022
8023 /* The canonical argument types are the same as ARGTYPES. */
8024 return argtypes;
8025 }
8026
8027 /* Construct, lay out and return
8028 the type of functions returning type VALUE_TYPE
8029 given arguments of types ARG_TYPES.
8030 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8031 are data type nodes for the arguments of the function.
8032 If such a type has already been constructed, reuse it. */
8033
8034 tree
8035 build_function_type (tree value_type, tree arg_types)
8036 {
8037 tree t;
8038 inchash::hash hstate;
8039 bool any_structural_p, any_noncanonical_p;
8040 tree canon_argtypes;
8041
8042 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8043 {
8044 error ("function return type cannot be function");
8045 value_type = integer_type_node;
8046 }
8047
8048 /* Make a node of the sort we want. */
8049 t = make_node (FUNCTION_TYPE);
8050 TREE_TYPE (t) = value_type;
8051 TYPE_ARG_TYPES (t) = arg_types;
8052
8053 /* If we already have such a type, use the old one. */
8054 hstate.add_object (TYPE_HASH (value_type));
8055 type_hash_list (arg_types, hstate);
8056 t = type_hash_canon (hstate.end (), t);
8057
8058 /* Set up the canonical type. */
8059 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8060 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8061 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8062 &any_structural_p,
8063 &any_noncanonical_p);
8064 if (any_structural_p)
8065 SET_TYPE_STRUCTURAL_EQUALITY (t);
8066 else if (any_noncanonical_p)
8067 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8068 canon_argtypes);
8069
8070 if (!COMPLETE_TYPE_P (t))
8071 layout_type (t);
8072 return t;
8073 }
8074
8075 /* Build a function type. The RETURN_TYPE is the type returned by the
8076 function. If VAARGS is set, no void_type_node is appended to the
8077 the list. ARGP must be always be terminated be a NULL_TREE. */
8078
8079 static tree
8080 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8081 {
8082 tree t, args, last;
8083
8084 t = va_arg (argp, tree);
8085 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8086 args = tree_cons (NULL_TREE, t, args);
8087
8088 if (vaargs)
8089 {
8090 last = args;
8091 if (args != NULL_TREE)
8092 args = nreverse (args);
8093 gcc_assert (last != void_list_node);
8094 }
8095 else if (args == NULL_TREE)
8096 args = void_list_node;
8097 else
8098 {
8099 last = args;
8100 args = nreverse (args);
8101 TREE_CHAIN (last) = void_list_node;
8102 }
8103 args = build_function_type (return_type, args);
8104
8105 return args;
8106 }
8107
8108 /* Build a function type. The RETURN_TYPE is the type returned by the
8109 function. If additional arguments are provided, they are
8110 additional argument types. The list of argument types must always
8111 be terminated by NULL_TREE. */
8112
8113 tree
8114 build_function_type_list (tree return_type, ...)
8115 {
8116 tree args;
8117 va_list p;
8118
8119 va_start (p, return_type);
8120 args = build_function_type_list_1 (false, return_type, p);
8121 va_end (p);
8122 return args;
8123 }
8124
8125 /* Build a variable argument function type. The RETURN_TYPE is the
8126 type returned by the function. If additional arguments are provided,
8127 they are additional argument types. The list of argument types must
8128 always be terminated by NULL_TREE. */
8129
8130 tree
8131 build_varargs_function_type_list (tree return_type, ...)
8132 {
8133 tree args;
8134 va_list p;
8135
8136 va_start (p, return_type);
8137 args = build_function_type_list_1 (true, return_type, p);
8138 va_end (p);
8139
8140 return args;
8141 }
8142
8143 /* Build a function type. RETURN_TYPE is the type returned by the
8144 function; VAARGS indicates whether the function takes varargs. The
8145 function takes N named arguments, the types of which are provided in
8146 ARG_TYPES. */
8147
8148 static tree
8149 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8150 tree *arg_types)
8151 {
8152 int i;
8153 tree t = vaargs ? NULL_TREE : void_list_node;
8154
8155 for (i = n - 1; i >= 0; i--)
8156 t = tree_cons (NULL_TREE, arg_types[i], t);
8157
8158 return build_function_type (return_type, t);
8159 }
8160
8161 /* Build a function type. RETURN_TYPE is the type returned by the
8162 function. The function takes N named arguments, the types of which
8163 are provided in ARG_TYPES. */
8164
8165 tree
8166 build_function_type_array (tree return_type, int n, tree *arg_types)
8167 {
8168 return build_function_type_array_1 (false, return_type, n, arg_types);
8169 }
8170
8171 /* Build a variable argument function type. RETURN_TYPE is the type
8172 returned by the function. The function takes N named arguments, the
8173 types of which are provided in ARG_TYPES. */
8174
8175 tree
8176 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8177 {
8178 return build_function_type_array_1 (true, return_type, n, arg_types);
8179 }
8180
8181 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8182 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8183 for the method. An implicit additional parameter (of type
8184 pointer-to-BASETYPE) is added to the ARGTYPES. */
8185
8186 tree
8187 build_method_type_directly (tree basetype,
8188 tree rettype,
8189 tree argtypes)
8190 {
8191 tree t;
8192 tree ptype;
8193 inchash::hash hstate;
8194 bool any_structural_p, any_noncanonical_p;
8195 tree canon_argtypes;
8196
8197 /* Make a node of the sort we want. */
8198 t = make_node (METHOD_TYPE);
8199
8200 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8201 TREE_TYPE (t) = rettype;
8202 ptype = build_pointer_type (basetype);
8203
8204 /* The actual arglist for this function includes a "hidden" argument
8205 which is "this". Put it into the list of argument types. */
8206 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8207 TYPE_ARG_TYPES (t) = argtypes;
8208
8209 /* If we already have such a type, use the old one. */
8210 hstate.add_object (TYPE_HASH (basetype));
8211 hstate.add_object (TYPE_HASH (rettype));
8212 type_hash_list (argtypes, hstate);
8213 t = type_hash_canon (hstate.end (), t);
8214
8215 /* Set up the canonical type. */
8216 any_structural_p
8217 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8218 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8219 any_noncanonical_p
8220 = (TYPE_CANONICAL (basetype) != basetype
8221 || TYPE_CANONICAL (rettype) != rettype);
8222 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8223 &any_structural_p,
8224 &any_noncanonical_p);
8225 if (any_structural_p)
8226 SET_TYPE_STRUCTURAL_EQUALITY (t);
8227 else if (any_noncanonical_p)
8228 TYPE_CANONICAL (t)
8229 = build_method_type_directly (TYPE_CANONICAL (basetype),
8230 TYPE_CANONICAL (rettype),
8231 canon_argtypes);
8232 if (!COMPLETE_TYPE_P (t))
8233 layout_type (t);
8234
8235 return t;
8236 }
8237
8238 /* Construct, lay out and return the type of methods belonging to class
8239 BASETYPE and whose arguments and values are described by TYPE.
8240 If that type exists already, reuse it.
8241 TYPE must be a FUNCTION_TYPE node. */
8242
8243 tree
8244 build_method_type (tree basetype, tree type)
8245 {
8246 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8247
8248 return build_method_type_directly (basetype,
8249 TREE_TYPE (type),
8250 TYPE_ARG_TYPES (type));
8251 }
8252
8253 /* Construct, lay out and return the type of offsets to a value
8254 of type TYPE, within an object of type BASETYPE.
8255 If a suitable offset type exists already, reuse it. */
8256
8257 tree
8258 build_offset_type (tree basetype, tree type)
8259 {
8260 tree t;
8261 inchash::hash hstate;
8262
8263 /* Make a node of the sort we want. */
8264 t = make_node (OFFSET_TYPE);
8265
8266 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8267 TREE_TYPE (t) = type;
8268
8269 /* If we already have such a type, use the old one. */
8270 hstate.add_object (TYPE_HASH (basetype));
8271 hstate.add_object (TYPE_HASH (type));
8272 t = type_hash_canon (hstate.end (), t);
8273
8274 if (!COMPLETE_TYPE_P (t))
8275 layout_type (t);
8276
8277 if (TYPE_CANONICAL (t) == t)
8278 {
8279 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8280 || TYPE_STRUCTURAL_EQUALITY_P (type))
8281 SET_TYPE_STRUCTURAL_EQUALITY (t);
8282 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8283 || TYPE_CANONICAL (type) != type)
8284 TYPE_CANONICAL (t)
8285 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8286 TYPE_CANONICAL (type));
8287 }
8288
8289 return t;
8290 }
8291
8292 /* Create a complex type whose components are COMPONENT_TYPE. */
8293
8294 tree
8295 build_complex_type (tree component_type)
8296 {
8297 tree t;
8298 inchash::hash hstate;
8299
8300 gcc_assert (INTEGRAL_TYPE_P (component_type)
8301 || SCALAR_FLOAT_TYPE_P (component_type)
8302 || FIXED_POINT_TYPE_P (component_type));
8303
8304 /* Make a node of the sort we want. */
8305 t = make_node (COMPLEX_TYPE);
8306
8307 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8308
8309 /* If we already have such a type, use the old one. */
8310 hstate.add_object (TYPE_HASH (component_type));
8311 t = type_hash_canon (hstate.end (), t);
8312
8313 if (!COMPLETE_TYPE_P (t))
8314 layout_type (t);
8315
8316 if (TYPE_CANONICAL (t) == t)
8317 {
8318 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8319 SET_TYPE_STRUCTURAL_EQUALITY (t);
8320 else if (TYPE_CANONICAL (component_type) != component_type)
8321 TYPE_CANONICAL (t)
8322 = build_complex_type (TYPE_CANONICAL (component_type));
8323 }
8324
8325 /* We need to create a name, since complex is a fundamental type. */
8326 if (! TYPE_NAME (t))
8327 {
8328 const char *name;
8329 if (component_type == char_type_node)
8330 name = "complex char";
8331 else if (component_type == signed_char_type_node)
8332 name = "complex signed char";
8333 else if (component_type == unsigned_char_type_node)
8334 name = "complex unsigned char";
8335 else if (component_type == short_integer_type_node)
8336 name = "complex short int";
8337 else if (component_type == short_unsigned_type_node)
8338 name = "complex short unsigned int";
8339 else if (component_type == integer_type_node)
8340 name = "complex int";
8341 else if (component_type == unsigned_type_node)
8342 name = "complex unsigned int";
8343 else if (component_type == long_integer_type_node)
8344 name = "complex long int";
8345 else if (component_type == long_unsigned_type_node)
8346 name = "complex long unsigned int";
8347 else if (component_type == long_long_integer_type_node)
8348 name = "complex long long int";
8349 else if (component_type == long_long_unsigned_type_node)
8350 name = "complex long long unsigned int";
8351 else
8352 name = 0;
8353
8354 if (name != 0)
8355 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8356 get_identifier (name), t);
8357 }
8358
8359 return build_qualified_type (t, TYPE_QUALS (component_type));
8360 }
8361
8362 /* If TYPE is a real or complex floating-point type and the target
8363 does not directly support arithmetic on TYPE then return the wider
8364 type to be used for arithmetic on TYPE. Otherwise, return
8365 NULL_TREE. */
8366
8367 tree
8368 excess_precision_type (tree type)
8369 {
8370 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8371 {
8372 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8373 switch (TREE_CODE (type))
8374 {
8375 case REAL_TYPE:
8376 switch (flt_eval_method)
8377 {
8378 case 1:
8379 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8380 return double_type_node;
8381 break;
8382 case 2:
8383 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8384 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8385 return long_double_type_node;
8386 break;
8387 default:
8388 gcc_unreachable ();
8389 }
8390 break;
8391 case COMPLEX_TYPE:
8392 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8393 return NULL_TREE;
8394 switch (flt_eval_method)
8395 {
8396 case 1:
8397 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8398 return complex_double_type_node;
8399 break;
8400 case 2:
8401 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8402 || (TYPE_MODE (TREE_TYPE (type))
8403 == TYPE_MODE (double_type_node)))
8404 return complex_long_double_type_node;
8405 break;
8406 default:
8407 gcc_unreachable ();
8408 }
8409 break;
8410 default:
8411 break;
8412 }
8413 }
8414 return NULL_TREE;
8415 }
8416 \f
8417 /* Return OP, stripped of any conversions to wider types as much as is safe.
8418 Converting the value back to OP's type makes a value equivalent to OP.
8419
8420 If FOR_TYPE is nonzero, we return a value which, if converted to
8421 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8422
8423 OP must have integer, real or enumeral type. Pointers are not allowed!
8424
8425 There are some cases where the obvious value we could return
8426 would regenerate to OP if converted to OP's type,
8427 but would not extend like OP to wider types.
8428 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8429 For example, if OP is (unsigned short)(signed char)-1,
8430 we avoid returning (signed char)-1 if FOR_TYPE is int,
8431 even though extending that to an unsigned short would regenerate OP,
8432 since the result of extending (signed char)-1 to (int)
8433 is different from (int) OP. */
8434
8435 tree
8436 get_unwidened (tree op, tree for_type)
8437 {
8438 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8439 tree type = TREE_TYPE (op);
8440 unsigned final_prec
8441 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8442 int uns
8443 = (for_type != 0 && for_type != type
8444 && final_prec > TYPE_PRECISION (type)
8445 && TYPE_UNSIGNED (type));
8446 tree win = op;
8447
8448 while (CONVERT_EXPR_P (op))
8449 {
8450 int bitschange;
8451
8452 /* TYPE_PRECISION on vector types has different meaning
8453 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8454 so avoid them here. */
8455 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8456 break;
8457
8458 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8459 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8460
8461 /* Truncations are many-one so cannot be removed.
8462 Unless we are later going to truncate down even farther. */
8463 if (bitschange < 0
8464 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8465 break;
8466
8467 /* See what's inside this conversion. If we decide to strip it,
8468 we will set WIN. */
8469 op = TREE_OPERAND (op, 0);
8470
8471 /* If we have not stripped any zero-extensions (uns is 0),
8472 we can strip any kind of extension.
8473 If we have previously stripped a zero-extension,
8474 only zero-extensions can safely be stripped.
8475 Any extension can be stripped if the bits it would produce
8476 are all going to be discarded later by truncating to FOR_TYPE. */
8477
8478 if (bitschange > 0)
8479 {
8480 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8481 win = op;
8482 /* TYPE_UNSIGNED says whether this is a zero-extension.
8483 Let's avoid computing it if it does not affect WIN
8484 and if UNS will not be needed again. */
8485 if ((uns
8486 || CONVERT_EXPR_P (op))
8487 && TYPE_UNSIGNED (TREE_TYPE (op)))
8488 {
8489 uns = 1;
8490 win = op;
8491 }
8492 }
8493 }
8494
8495 /* If we finally reach a constant see if it fits in for_type and
8496 in that case convert it. */
8497 if (for_type
8498 && TREE_CODE (win) == INTEGER_CST
8499 && TREE_TYPE (win) != for_type
8500 && int_fits_type_p (win, for_type))
8501 win = fold_convert (for_type, win);
8502
8503 return win;
8504 }
8505 \f
8506 /* Return OP or a simpler expression for a narrower value
8507 which can be sign-extended or zero-extended to give back OP.
8508 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8509 or 0 if the value should be sign-extended. */
8510
8511 tree
8512 get_narrower (tree op, int *unsignedp_ptr)
8513 {
8514 int uns = 0;
8515 int first = 1;
8516 tree win = op;
8517 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8518
8519 while (TREE_CODE (op) == NOP_EXPR)
8520 {
8521 int bitschange
8522 = (TYPE_PRECISION (TREE_TYPE (op))
8523 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8524
8525 /* Truncations are many-one so cannot be removed. */
8526 if (bitschange < 0)
8527 break;
8528
8529 /* See what's inside this conversion. If we decide to strip it,
8530 we will set WIN. */
8531
8532 if (bitschange > 0)
8533 {
8534 op = TREE_OPERAND (op, 0);
8535 /* An extension: the outermost one can be stripped,
8536 but remember whether it is zero or sign extension. */
8537 if (first)
8538 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8539 /* Otherwise, if a sign extension has been stripped,
8540 only sign extensions can now be stripped;
8541 if a zero extension has been stripped, only zero-extensions. */
8542 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8543 break;
8544 first = 0;
8545 }
8546 else /* bitschange == 0 */
8547 {
8548 /* A change in nominal type can always be stripped, but we must
8549 preserve the unsignedness. */
8550 if (first)
8551 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8552 first = 0;
8553 op = TREE_OPERAND (op, 0);
8554 /* Keep trying to narrow, but don't assign op to win if it
8555 would turn an integral type into something else. */
8556 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8557 continue;
8558 }
8559
8560 win = op;
8561 }
8562
8563 if (TREE_CODE (op) == COMPONENT_REF
8564 /* Since type_for_size always gives an integer type. */
8565 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8566 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8567 /* Ensure field is laid out already. */
8568 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8569 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8570 {
8571 unsigned HOST_WIDE_INT innerprec
8572 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8573 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8574 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8575 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8576
8577 /* We can get this structure field in a narrower type that fits it,
8578 but the resulting extension to its nominal type (a fullword type)
8579 must satisfy the same conditions as for other extensions.
8580
8581 Do this only for fields that are aligned (not bit-fields),
8582 because when bit-field insns will be used there is no
8583 advantage in doing this. */
8584
8585 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8586 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8587 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8588 && type != 0)
8589 {
8590 if (first)
8591 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8592 win = fold_convert (type, op);
8593 }
8594 }
8595
8596 *unsignedp_ptr = uns;
8597 return win;
8598 }
8599 \f
8600 /* Returns true if integer constant C has a value that is permissible
8601 for type TYPE (an INTEGER_TYPE). */
8602
8603 bool
8604 int_fits_type_p (const_tree c, const_tree type)
8605 {
8606 tree type_low_bound, type_high_bound;
8607 bool ok_for_low_bound, ok_for_high_bound;
8608 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8609
8610 retry:
8611 type_low_bound = TYPE_MIN_VALUE (type);
8612 type_high_bound = TYPE_MAX_VALUE (type);
8613
8614 /* If at least one bound of the type is a constant integer, we can check
8615 ourselves and maybe make a decision. If no such decision is possible, but
8616 this type is a subtype, try checking against that. Otherwise, use
8617 fits_to_tree_p, which checks against the precision.
8618
8619 Compute the status for each possibly constant bound, and return if we see
8620 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8621 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8622 for "constant known to fit". */
8623
8624 /* Check if c >= type_low_bound. */
8625 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8626 {
8627 if (tree_int_cst_lt (c, type_low_bound))
8628 return false;
8629 ok_for_low_bound = true;
8630 }
8631 else
8632 ok_for_low_bound = false;
8633
8634 /* Check if c <= type_high_bound. */
8635 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8636 {
8637 if (tree_int_cst_lt (type_high_bound, c))
8638 return false;
8639 ok_for_high_bound = true;
8640 }
8641 else
8642 ok_for_high_bound = false;
8643
8644 /* If the constant fits both bounds, the result is known. */
8645 if (ok_for_low_bound && ok_for_high_bound)
8646 return true;
8647
8648 /* Perform some generic filtering which may allow making a decision
8649 even if the bounds are not constant. First, negative integers
8650 never fit in unsigned types, */
8651 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8652 return false;
8653
8654 /* Second, narrower types always fit in wider ones. */
8655 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8656 return true;
8657
8658 /* Third, unsigned integers with top bit set never fit signed types. */
8659 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8660 {
8661 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8662 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8663 {
8664 /* When a tree_cst is converted to a wide-int, the precision
8665 is taken from the type. However, if the precision of the
8666 mode underneath the type is smaller than that, it is
8667 possible that the value will not fit. The test below
8668 fails if any bit is set between the sign bit of the
8669 underlying mode and the top bit of the type. */
8670 if (wi::ne_p (wi::zext (c, prec - 1), c))
8671 return false;
8672 }
8673 else if (wi::neg_p (c))
8674 return false;
8675 }
8676
8677 /* If we haven't been able to decide at this point, there nothing more we
8678 can check ourselves here. Look at the base type if we have one and it
8679 has the same precision. */
8680 if (TREE_CODE (type) == INTEGER_TYPE
8681 && TREE_TYPE (type) != 0
8682 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8683 {
8684 type = TREE_TYPE (type);
8685 goto retry;
8686 }
8687
8688 /* Or to fits_to_tree_p, if nothing else. */
8689 return wi::fits_to_tree_p (c, type);
8690 }
8691
8692 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8693 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8694 represented (assuming two's-complement arithmetic) within the bit
8695 precision of the type are returned instead. */
8696
8697 void
8698 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8699 {
8700 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8701 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8702 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8703 else
8704 {
8705 if (TYPE_UNSIGNED (type))
8706 mpz_set_ui (min, 0);
8707 else
8708 {
8709 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8710 wi::to_mpz (mn, min, SIGNED);
8711 }
8712 }
8713
8714 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8715 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8716 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8717 else
8718 {
8719 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8720 wi::to_mpz (mn, max, TYPE_SIGN (type));
8721 }
8722 }
8723
8724 /* Return true if VAR is an automatic variable defined in function FN. */
8725
8726 bool
8727 auto_var_in_fn_p (const_tree var, const_tree fn)
8728 {
8729 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8730 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8731 || TREE_CODE (var) == PARM_DECL)
8732 && ! TREE_STATIC (var))
8733 || TREE_CODE (var) == LABEL_DECL
8734 || TREE_CODE (var) == RESULT_DECL));
8735 }
8736
8737 /* Subprogram of following function. Called by walk_tree.
8738
8739 Return *TP if it is an automatic variable or parameter of the
8740 function passed in as DATA. */
8741
8742 static tree
8743 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8744 {
8745 tree fn = (tree) data;
8746
8747 if (TYPE_P (*tp))
8748 *walk_subtrees = 0;
8749
8750 else if (DECL_P (*tp)
8751 && auto_var_in_fn_p (*tp, fn))
8752 return *tp;
8753
8754 return NULL_TREE;
8755 }
8756
8757 /* Returns true if T is, contains, or refers to a type with variable
8758 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8759 arguments, but not the return type. If FN is nonzero, only return
8760 true if a modifier of the type or position of FN is a variable or
8761 parameter inside FN.
8762
8763 This concept is more general than that of C99 'variably modified types':
8764 in C99, a struct type is never variably modified because a VLA may not
8765 appear as a structure member. However, in GNU C code like:
8766
8767 struct S { int i[f()]; };
8768
8769 is valid, and other languages may define similar constructs. */
8770
8771 bool
8772 variably_modified_type_p (tree type, tree fn)
8773 {
8774 tree t;
8775
8776 /* Test if T is either variable (if FN is zero) or an expression containing
8777 a variable in FN. If TYPE isn't gimplified, return true also if
8778 gimplify_one_sizepos would gimplify the expression into a local
8779 variable. */
8780 #define RETURN_TRUE_IF_VAR(T) \
8781 do { tree _t = (T); \
8782 if (_t != NULL_TREE \
8783 && _t != error_mark_node \
8784 && TREE_CODE (_t) != INTEGER_CST \
8785 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8786 && (!fn \
8787 || (!TYPE_SIZES_GIMPLIFIED (type) \
8788 && !is_gimple_sizepos (_t)) \
8789 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8790 return true; } while (0)
8791
8792 if (type == error_mark_node)
8793 return false;
8794
8795 /* If TYPE itself has variable size, it is variably modified. */
8796 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8797 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8798
8799 switch (TREE_CODE (type))
8800 {
8801 case POINTER_TYPE:
8802 case REFERENCE_TYPE:
8803 case VECTOR_TYPE:
8804 if (variably_modified_type_p (TREE_TYPE (type), fn))
8805 return true;
8806 break;
8807
8808 case FUNCTION_TYPE:
8809 case METHOD_TYPE:
8810 /* If TYPE is a function type, it is variably modified if the
8811 return type is variably modified. */
8812 if (variably_modified_type_p (TREE_TYPE (type), fn))
8813 return true;
8814 break;
8815
8816 case INTEGER_TYPE:
8817 case REAL_TYPE:
8818 case FIXED_POINT_TYPE:
8819 case ENUMERAL_TYPE:
8820 case BOOLEAN_TYPE:
8821 /* Scalar types are variably modified if their end points
8822 aren't constant. */
8823 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8824 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8825 break;
8826
8827 case RECORD_TYPE:
8828 case UNION_TYPE:
8829 case QUAL_UNION_TYPE:
8830 /* We can't see if any of the fields are variably-modified by the
8831 definition we normally use, since that would produce infinite
8832 recursion via pointers. */
8833 /* This is variably modified if some field's type is. */
8834 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8835 if (TREE_CODE (t) == FIELD_DECL)
8836 {
8837 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8838 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8839 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8840
8841 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8842 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8843 }
8844 break;
8845
8846 case ARRAY_TYPE:
8847 /* Do not call ourselves to avoid infinite recursion. This is
8848 variably modified if the element type is. */
8849 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8850 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8851 break;
8852
8853 default:
8854 break;
8855 }
8856
8857 /* The current language may have other cases to check, but in general,
8858 all other types are not variably modified. */
8859 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8860
8861 #undef RETURN_TRUE_IF_VAR
8862 }
8863
8864 /* Given a DECL or TYPE, return the scope in which it was declared, or
8865 NULL_TREE if there is no containing scope. */
8866
8867 tree
8868 get_containing_scope (const_tree t)
8869 {
8870 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8871 }
8872
8873 /* Return the innermost context enclosing DECL that is
8874 a FUNCTION_DECL, or zero if none. */
8875
8876 tree
8877 decl_function_context (const_tree decl)
8878 {
8879 tree context;
8880
8881 if (TREE_CODE (decl) == ERROR_MARK)
8882 return 0;
8883
8884 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8885 where we look up the function at runtime. Such functions always take
8886 a first argument of type 'pointer to real context'.
8887
8888 C++ should really be fixed to use DECL_CONTEXT for the real context,
8889 and use something else for the "virtual context". */
8890 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8891 context
8892 = TYPE_MAIN_VARIANT
8893 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8894 else
8895 context = DECL_CONTEXT (decl);
8896
8897 while (context && TREE_CODE (context) != FUNCTION_DECL)
8898 {
8899 if (TREE_CODE (context) == BLOCK)
8900 context = BLOCK_SUPERCONTEXT (context);
8901 else
8902 context = get_containing_scope (context);
8903 }
8904
8905 return context;
8906 }
8907
8908 /* Return the innermost context enclosing DECL that is
8909 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8910 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8911
8912 tree
8913 decl_type_context (const_tree decl)
8914 {
8915 tree context = DECL_CONTEXT (decl);
8916
8917 while (context)
8918 switch (TREE_CODE (context))
8919 {
8920 case NAMESPACE_DECL:
8921 case TRANSLATION_UNIT_DECL:
8922 return NULL_TREE;
8923
8924 case RECORD_TYPE:
8925 case UNION_TYPE:
8926 case QUAL_UNION_TYPE:
8927 return context;
8928
8929 case TYPE_DECL:
8930 case FUNCTION_DECL:
8931 context = DECL_CONTEXT (context);
8932 break;
8933
8934 case BLOCK:
8935 context = BLOCK_SUPERCONTEXT (context);
8936 break;
8937
8938 default:
8939 gcc_unreachable ();
8940 }
8941
8942 return NULL_TREE;
8943 }
8944
8945 /* CALL is a CALL_EXPR. Return the declaration for the function
8946 called, or NULL_TREE if the called function cannot be
8947 determined. */
8948
8949 tree
8950 get_callee_fndecl (const_tree call)
8951 {
8952 tree addr;
8953
8954 if (call == error_mark_node)
8955 return error_mark_node;
8956
8957 /* It's invalid to call this function with anything but a
8958 CALL_EXPR. */
8959 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8960
8961 /* The first operand to the CALL is the address of the function
8962 called. */
8963 addr = CALL_EXPR_FN (call);
8964
8965 /* If there is no function, return early. */
8966 if (addr == NULL_TREE)
8967 return NULL_TREE;
8968
8969 STRIP_NOPS (addr);
8970
8971 /* If this is a readonly function pointer, extract its initial value. */
8972 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8973 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8974 && DECL_INITIAL (addr))
8975 addr = DECL_INITIAL (addr);
8976
8977 /* If the address is just `&f' for some function `f', then we know
8978 that `f' is being called. */
8979 if (TREE_CODE (addr) == ADDR_EXPR
8980 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8981 return TREE_OPERAND (addr, 0);
8982
8983 /* We couldn't figure out what was being called. */
8984 return NULL_TREE;
8985 }
8986
8987 /* Print debugging information about tree nodes generated during the compile,
8988 and any language-specific information. */
8989
8990 void
8991 dump_tree_statistics (void)
8992 {
8993 if (GATHER_STATISTICS)
8994 {
8995 int i;
8996 int total_nodes, total_bytes;
8997 fprintf (stderr, "Kind Nodes Bytes\n");
8998 fprintf (stderr, "---------------------------------------\n");
8999 total_nodes = total_bytes = 0;
9000 for (i = 0; i < (int) all_kinds; i++)
9001 {
9002 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9003 tree_node_counts[i], tree_node_sizes[i]);
9004 total_nodes += tree_node_counts[i];
9005 total_bytes += tree_node_sizes[i];
9006 }
9007 fprintf (stderr, "---------------------------------------\n");
9008 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9009 fprintf (stderr, "---------------------------------------\n");
9010 fprintf (stderr, "Code Nodes\n");
9011 fprintf (stderr, "----------------------------\n");
9012 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9013 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9014 tree_code_counts[i]);
9015 fprintf (stderr, "----------------------------\n");
9016 ssanames_print_statistics ();
9017 phinodes_print_statistics ();
9018 }
9019 else
9020 fprintf (stderr, "(No per-node statistics)\n");
9021
9022 print_type_hash_statistics ();
9023 print_debug_expr_statistics ();
9024 print_value_expr_statistics ();
9025 lang_hooks.print_statistics ();
9026 }
9027 \f
9028 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9029
9030 /* Generate a crc32 of a byte. */
9031
9032 static unsigned
9033 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9034 {
9035 unsigned ix;
9036
9037 for (ix = bits; ix--; value <<= 1)
9038 {
9039 unsigned feedback;
9040
9041 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9042 chksum <<= 1;
9043 chksum ^= feedback;
9044 }
9045 return chksum;
9046 }
9047
9048 /* Generate a crc32 of a 32-bit unsigned. */
9049
9050 unsigned
9051 crc32_unsigned (unsigned chksum, unsigned value)
9052 {
9053 return crc32_unsigned_bits (chksum, value, 32);
9054 }
9055
9056 /* Generate a crc32 of a byte. */
9057
9058 unsigned
9059 crc32_byte (unsigned chksum, char byte)
9060 {
9061 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9062 }
9063
9064 /* Generate a crc32 of a string. */
9065
9066 unsigned
9067 crc32_string (unsigned chksum, const char *string)
9068 {
9069 do
9070 {
9071 chksum = crc32_byte (chksum, *string);
9072 }
9073 while (*string++);
9074 return chksum;
9075 }
9076
9077 /* P is a string that will be used in a symbol. Mask out any characters
9078 that are not valid in that context. */
9079
9080 void
9081 clean_symbol_name (char *p)
9082 {
9083 for (; *p; p++)
9084 if (! (ISALNUM (*p)
9085 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9086 || *p == '$'
9087 #endif
9088 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9089 || *p == '.'
9090 #endif
9091 ))
9092 *p = '_';
9093 }
9094
9095 /* Generate a name for a special-purpose function.
9096 The generated name may need to be unique across the whole link.
9097 Changes to this function may also require corresponding changes to
9098 xstrdup_mask_random.
9099 TYPE is some string to identify the purpose of this function to the
9100 linker or collect2; it must start with an uppercase letter,
9101 one of:
9102 I - for constructors
9103 D - for destructors
9104 N - for C++ anonymous namespaces
9105 F - for DWARF unwind frame information. */
9106
9107 tree
9108 get_file_function_name (const char *type)
9109 {
9110 char *buf;
9111 const char *p;
9112 char *q;
9113
9114 /* If we already have a name we know to be unique, just use that. */
9115 if (first_global_object_name)
9116 p = q = ASTRDUP (first_global_object_name);
9117 /* If the target is handling the constructors/destructors, they
9118 will be local to this file and the name is only necessary for
9119 debugging purposes.
9120 We also assign sub_I and sub_D sufixes to constructors called from
9121 the global static constructors. These are always local. */
9122 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9123 || (strncmp (type, "sub_", 4) == 0
9124 && (type[4] == 'I' || type[4] == 'D')))
9125 {
9126 const char *file = main_input_filename;
9127 if (! file)
9128 file = LOCATION_FILE (input_location);
9129 /* Just use the file's basename, because the full pathname
9130 might be quite long. */
9131 p = q = ASTRDUP (lbasename (file));
9132 }
9133 else
9134 {
9135 /* Otherwise, the name must be unique across the entire link.
9136 We don't have anything that we know to be unique to this translation
9137 unit, so use what we do have and throw in some randomness. */
9138 unsigned len;
9139 const char *name = weak_global_object_name;
9140 const char *file = main_input_filename;
9141
9142 if (! name)
9143 name = "";
9144 if (! file)
9145 file = LOCATION_FILE (input_location);
9146
9147 len = strlen (file);
9148 q = (char *) alloca (9 + 17 + len + 1);
9149 memcpy (q, file, len + 1);
9150
9151 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9152 crc32_string (0, name), get_random_seed (false));
9153
9154 p = q;
9155 }
9156
9157 clean_symbol_name (q);
9158 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9159 + strlen (type));
9160
9161 /* Set up the name of the file-level functions we may need.
9162 Use a global object (which is already required to be unique over
9163 the program) rather than the file name (which imposes extra
9164 constraints). */
9165 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9166
9167 return get_identifier (buf);
9168 }
9169 \f
9170 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9171
9172 /* Complain that the tree code of NODE does not match the expected 0
9173 terminated list of trailing codes. The trailing code list can be
9174 empty, for a more vague error message. FILE, LINE, and FUNCTION
9175 are of the caller. */
9176
9177 void
9178 tree_check_failed (const_tree node, const char *file,
9179 int line, const char *function, ...)
9180 {
9181 va_list args;
9182 const char *buffer;
9183 unsigned length = 0;
9184 enum tree_code code;
9185
9186 va_start (args, function);
9187 while ((code = (enum tree_code) va_arg (args, int)))
9188 length += 4 + strlen (get_tree_code_name (code));
9189 va_end (args);
9190 if (length)
9191 {
9192 char *tmp;
9193 va_start (args, function);
9194 length += strlen ("expected ");
9195 buffer = tmp = (char *) alloca (length);
9196 length = 0;
9197 while ((code = (enum tree_code) va_arg (args, int)))
9198 {
9199 const char *prefix = length ? " or " : "expected ";
9200
9201 strcpy (tmp + length, prefix);
9202 length += strlen (prefix);
9203 strcpy (tmp + length, get_tree_code_name (code));
9204 length += strlen (get_tree_code_name (code));
9205 }
9206 va_end (args);
9207 }
9208 else
9209 buffer = "unexpected node";
9210
9211 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9212 buffer, get_tree_code_name (TREE_CODE (node)),
9213 function, trim_filename (file), line);
9214 }
9215
9216 /* Complain that the tree code of NODE does match the expected 0
9217 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9218 the caller. */
9219
9220 void
9221 tree_not_check_failed (const_tree node, const char *file,
9222 int line, const char *function, ...)
9223 {
9224 va_list args;
9225 char *buffer;
9226 unsigned length = 0;
9227 enum tree_code code;
9228
9229 va_start (args, function);
9230 while ((code = (enum tree_code) va_arg (args, int)))
9231 length += 4 + strlen (get_tree_code_name (code));
9232 va_end (args);
9233 va_start (args, function);
9234 buffer = (char *) alloca (length);
9235 length = 0;
9236 while ((code = (enum tree_code) va_arg (args, int)))
9237 {
9238 if (length)
9239 {
9240 strcpy (buffer + length, " or ");
9241 length += 4;
9242 }
9243 strcpy (buffer + length, get_tree_code_name (code));
9244 length += strlen (get_tree_code_name (code));
9245 }
9246 va_end (args);
9247
9248 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9249 buffer, get_tree_code_name (TREE_CODE (node)),
9250 function, trim_filename (file), line);
9251 }
9252
9253 /* Similar to tree_check_failed, except that we check for a class of tree
9254 code, given in CL. */
9255
9256 void
9257 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9258 const char *file, int line, const char *function)
9259 {
9260 internal_error
9261 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9262 TREE_CODE_CLASS_STRING (cl),
9263 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9264 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9265 }
9266
9267 /* Similar to tree_check_failed, except that instead of specifying a
9268 dozen codes, use the knowledge that they're all sequential. */
9269
9270 void
9271 tree_range_check_failed (const_tree node, const char *file, int line,
9272 const char *function, enum tree_code c1,
9273 enum tree_code c2)
9274 {
9275 char *buffer;
9276 unsigned length = 0;
9277 unsigned int c;
9278
9279 for (c = c1; c <= c2; ++c)
9280 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9281
9282 length += strlen ("expected ");
9283 buffer = (char *) alloca (length);
9284 length = 0;
9285
9286 for (c = c1; c <= c2; ++c)
9287 {
9288 const char *prefix = length ? " or " : "expected ";
9289
9290 strcpy (buffer + length, prefix);
9291 length += strlen (prefix);
9292 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9293 length += strlen (get_tree_code_name ((enum tree_code) c));
9294 }
9295
9296 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9297 buffer, get_tree_code_name (TREE_CODE (node)),
9298 function, trim_filename (file), line);
9299 }
9300
9301
9302 /* Similar to tree_check_failed, except that we check that a tree does
9303 not have the specified code, given in CL. */
9304
9305 void
9306 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9307 const char *file, int line, const char *function)
9308 {
9309 internal_error
9310 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9311 TREE_CODE_CLASS_STRING (cl),
9312 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9313 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9314 }
9315
9316
9317 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9318
9319 void
9320 omp_clause_check_failed (const_tree node, const char *file, int line,
9321 const char *function, enum omp_clause_code code)
9322 {
9323 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9324 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9325 function, trim_filename (file), line);
9326 }
9327
9328
9329 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9330
9331 void
9332 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9333 const char *function, enum omp_clause_code c1,
9334 enum omp_clause_code c2)
9335 {
9336 char *buffer;
9337 unsigned length = 0;
9338 unsigned int c;
9339
9340 for (c = c1; c <= c2; ++c)
9341 length += 4 + strlen (omp_clause_code_name[c]);
9342
9343 length += strlen ("expected ");
9344 buffer = (char *) alloca (length);
9345 length = 0;
9346
9347 for (c = c1; c <= c2; ++c)
9348 {
9349 const char *prefix = length ? " or " : "expected ";
9350
9351 strcpy (buffer + length, prefix);
9352 length += strlen (prefix);
9353 strcpy (buffer + length, omp_clause_code_name[c]);
9354 length += strlen (omp_clause_code_name[c]);
9355 }
9356
9357 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9358 buffer, omp_clause_code_name[TREE_CODE (node)],
9359 function, trim_filename (file), line);
9360 }
9361
9362
9363 #undef DEFTREESTRUCT
9364 #define DEFTREESTRUCT(VAL, NAME) NAME,
9365
9366 static const char *ts_enum_names[] = {
9367 #include "treestruct.def"
9368 };
9369 #undef DEFTREESTRUCT
9370
9371 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9372
9373 /* Similar to tree_class_check_failed, except that we check for
9374 whether CODE contains the tree structure identified by EN. */
9375
9376 void
9377 tree_contains_struct_check_failed (const_tree node,
9378 const enum tree_node_structure_enum en,
9379 const char *file, int line,
9380 const char *function)
9381 {
9382 internal_error
9383 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9384 TS_ENUM_NAME (en),
9385 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9386 }
9387
9388
9389 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9390 (dynamically sized) vector. */
9391
9392 void
9393 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9394 const char *function)
9395 {
9396 internal_error
9397 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9398 idx + 1, len, function, trim_filename (file), line);
9399 }
9400
9401 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9402 (dynamically sized) vector. */
9403
9404 void
9405 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9406 const char *function)
9407 {
9408 internal_error
9409 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9410 idx + 1, len, function, trim_filename (file), line);
9411 }
9412
9413 /* Similar to above, except that the check is for the bounds of the operand
9414 vector of an expression node EXP. */
9415
9416 void
9417 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9418 int line, const char *function)
9419 {
9420 enum tree_code code = TREE_CODE (exp);
9421 internal_error
9422 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9423 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9424 function, trim_filename (file), line);
9425 }
9426
9427 /* Similar to above, except that the check is for the number of
9428 operands of an OMP_CLAUSE node. */
9429
9430 void
9431 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9432 int line, const char *function)
9433 {
9434 internal_error
9435 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9436 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9437 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9438 trim_filename (file), line);
9439 }
9440 #endif /* ENABLE_TREE_CHECKING */
9441 \f
9442 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9443 and mapped to the machine mode MODE. Initialize its fields and build
9444 the information necessary for debugging output. */
9445
9446 static tree
9447 make_vector_type (tree innertype, int nunits, machine_mode mode)
9448 {
9449 tree t;
9450 inchash::hash hstate;
9451
9452 t = make_node (VECTOR_TYPE);
9453 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9454 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9455 SET_TYPE_MODE (t, mode);
9456
9457 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9458 SET_TYPE_STRUCTURAL_EQUALITY (t);
9459 else if (TYPE_CANONICAL (innertype) != innertype
9460 || mode != VOIDmode)
9461 TYPE_CANONICAL (t)
9462 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9463
9464 layout_type (t);
9465
9466 hstate.add_wide_int (VECTOR_TYPE);
9467 hstate.add_wide_int (nunits);
9468 hstate.add_wide_int (mode);
9469 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9470 t = type_hash_canon (hstate.end (), t);
9471
9472 /* We have built a main variant, based on the main variant of the
9473 inner type. Use it to build the variant we return. */
9474 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9475 && TREE_TYPE (t) != innertype)
9476 return build_type_attribute_qual_variant (t,
9477 TYPE_ATTRIBUTES (innertype),
9478 TYPE_QUALS (innertype));
9479
9480 return t;
9481 }
9482
9483 static tree
9484 make_or_reuse_type (unsigned size, int unsignedp)
9485 {
9486 int i;
9487
9488 if (size == INT_TYPE_SIZE)
9489 return unsignedp ? unsigned_type_node : integer_type_node;
9490 if (size == CHAR_TYPE_SIZE)
9491 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9492 if (size == SHORT_TYPE_SIZE)
9493 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9494 if (size == LONG_TYPE_SIZE)
9495 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9496 if (size == LONG_LONG_TYPE_SIZE)
9497 return (unsignedp ? long_long_unsigned_type_node
9498 : long_long_integer_type_node);
9499
9500 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9501 if (size == int_n_data[i].bitsize
9502 && int_n_enabled_p[i])
9503 return (unsignedp ? int_n_trees[i].unsigned_type
9504 : int_n_trees[i].signed_type);
9505
9506 if (unsignedp)
9507 return make_unsigned_type (size);
9508 else
9509 return make_signed_type (size);
9510 }
9511
9512 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9513
9514 static tree
9515 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9516 {
9517 if (satp)
9518 {
9519 if (size == SHORT_FRACT_TYPE_SIZE)
9520 return unsignedp ? sat_unsigned_short_fract_type_node
9521 : sat_short_fract_type_node;
9522 if (size == FRACT_TYPE_SIZE)
9523 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9524 if (size == LONG_FRACT_TYPE_SIZE)
9525 return unsignedp ? sat_unsigned_long_fract_type_node
9526 : sat_long_fract_type_node;
9527 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9528 return unsignedp ? sat_unsigned_long_long_fract_type_node
9529 : sat_long_long_fract_type_node;
9530 }
9531 else
9532 {
9533 if (size == SHORT_FRACT_TYPE_SIZE)
9534 return unsignedp ? unsigned_short_fract_type_node
9535 : short_fract_type_node;
9536 if (size == FRACT_TYPE_SIZE)
9537 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9538 if (size == LONG_FRACT_TYPE_SIZE)
9539 return unsignedp ? unsigned_long_fract_type_node
9540 : long_fract_type_node;
9541 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9542 return unsignedp ? unsigned_long_long_fract_type_node
9543 : long_long_fract_type_node;
9544 }
9545
9546 return make_fract_type (size, unsignedp, satp);
9547 }
9548
9549 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9550
9551 static tree
9552 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9553 {
9554 if (satp)
9555 {
9556 if (size == SHORT_ACCUM_TYPE_SIZE)
9557 return unsignedp ? sat_unsigned_short_accum_type_node
9558 : sat_short_accum_type_node;
9559 if (size == ACCUM_TYPE_SIZE)
9560 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9561 if (size == LONG_ACCUM_TYPE_SIZE)
9562 return unsignedp ? sat_unsigned_long_accum_type_node
9563 : sat_long_accum_type_node;
9564 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9565 return unsignedp ? sat_unsigned_long_long_accum_type_node
9566 : sat_long_long_accum_type_node;
9567 }
9568 else
9569 {
9570 if (size == SHORT_ACCUM_TYPE_SIZE)
9571 return unsignedp ? unsigned_short_accum_type_node
9572 : short_accum_type_node;
9573 if (size == ACCUM_TYPE_SIZE)
9574 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9575 if (size == LONG_ACCUM_TYPE_SIZE)
9576 return unsignedp ? unsigned_long_accum_type_node
9577 : long_accum_type_node;
9578 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9579 return unsignedp ? unsigned_long_long_accum_type_node
9580 : long_long_accum_type_node;
9581 }
9582
9583 return make_accum_type (size, unsignedp, satp);
9584 }
9585
9586
9587 /* Create an atomic variant node for TYPE. This routine is called
9588 during initialization of data types to create the 5 basic atomic
9589 types. The generic build_variant_type function requires these to
9590 already be set up in order to function properly, so cannot be
9591 called from there. If ALIGN is non-zero, then ensure alignment is
9592 overridden to this value. */
9593
9594 static tree
9595 build_atomic_base (tree type, unsigned int align)
9596 {
9597 tree t;
9598
9599 /* Make sure its not already registered. */
9600 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9601 return t;
9602
9603 t = build_variant_type_copy (type);
9604 set_type_quals (t, TYPE_QUAL_ATOMIC);
9605
9606 if (align)
9607 TYPE_ALIGN (t) = align;
9608
9609 return t;
9610 }
9611
9612 /* Create nodes for all integer types (and error_mark_node) using the sizes
9613 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9614 SHORT_DOUBLE specifies whether double should be of the same precision
9615 as float. */
9616
9617 void
9618 build_common_tree_nodes (bool signed_char, bool short_double)
9619 {
9620 int i;
9621
9622 error_mark_node = make_node (ERROR_MARK);
9623 TREE_TYPE (error_mark_node) = error_mark_node;
9624
9625 initialize_sizetypes ();
9626
9627 /* Define both `signed char' and `unsigned char'. */
9628 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9629 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9630 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9631 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9632
9633 /* Define `char', which is like either `signed char' or `unsigned char'
9634 but not the same as either. */
9635 char_type_node
9636 = (signed_char
9637 ? make_signed_type (CHAR_TYPE_SIZE)
9638 : make_unsigned_type (CHAR_TYPE_SIZE));
9639 TYPE_STRING_FLAG (char_type_node) = 1;
9640
9641 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9642 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9643 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9644 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9645 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9646 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9647 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9648 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9649
9650 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9651 {
9652 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9653 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9654 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9655 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9656
9657 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9658 && int_n_enabled_p[i])
9659 {
9660 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9661 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9662 }
9663 }
9664
9665 /* Define a boolean type. This type only represents boolean values but
9666 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9667 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9668 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9669 TYPE_PRECISION (boolean_type_node) = 1;
9670 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9671
9672 /* Define what type to use for size_t. */
9673 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9674 size_type_node = unsigned_type_node;
9675 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9676 size_type_node = long_unsigned_type_node;
9677 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9678 size_type_node = long_long_unsigned_type_node;
9679 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9680 size_type_node = short_unsigned_type_node;
9681 else
9682 {
9683 int i;
9684
9685 size_type_node = NULL_TREE;
9686 for (i = 0; i < NUM_INT_N_ENTS; i++)
9687 if (int_n_enabled_p[i])
9688 {
9689 char name[50];
9690 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9691
9692 if (strcmp (name, SIZE_TYPE) == 0)
9693 {
9694 size_type_node = int_n_trees[i].unsigned_type;
9695 }
9696 }
9697 if (size_type_node == NULL_TREE)
9698 gcc_unreachable ();
9699 }
9700
9701 /* Fill in the rest of the sized types. Reuse existing type nodes
9702 when possible. */
9703 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9704 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9705 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9706 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9707 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9708
9709 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9710 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9711 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9712 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9713 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9714
9715 /* Don't call build_qualified type for atomics. That routine does
9716 special processing for atomics, and until they are initialized
9717 it's better not to make that call.
9718
9719 Check to see if there is a target override for atomic types. */
9720
9721 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9722 targetm.atomic_align_for_mode (QImode));
9723 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9724 targetm.atomic_align_for_mode (HImode));
9725 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9726 targetm.atomic_align_for_mode (SImode));
9727 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9728 targetm.atomic_align_for_mode (DImode));
9729 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9730 targetm.atomic_align_for_mode (TImode));
9731
9732 access_public_node = get_identifier ("public");
9733 access_protected_node = get_identifier ("protected");
9734 access_private_node = get_identifier ("private");
9735
9736 /* Define these next since types below may used them. */
9737 integer_zero_node = build_int_cst (integer_type_node, 0);
9738 integer_one_node = build_int_cst (integer_type_node, 1);
9739 integer_three_node = build_int_cst (integer_type_node, 3);
9740 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9741
9742 size_zero_node = size_int (0);
9743 size_one_node = size_int (1);
9744 bitsize_zero_node = bitsize_int (0);
9745 bitsize_one_node = bitsize_int (1);
9746 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9747
9748 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9749 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9750
9751 void_type_node = make_node (VOID_TYPE);
9752 layout_type (void_type_node);
9753
9754 pointer_bounds_type_node = targetm.chkp_bound_type ();
9755
9756 /* We are not going to have real types in C with less than byte alignment,
9757 so we might as well not have any types that claim to have it. */
9758 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9759 TYPE_USER_ALIGN (void_type_node) = 0;
9760
9761 void_node = make_node (VOID_CST);
9762 TREE_TYPE (void_node) = void_type_node;
9763
9764 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9765 layout_type (TREE_TYPE (null_pointer_node));
9766
9767 ptr_type_node = build_pointer_type (void_type_node);
9768 const_ptr_type_node
9769 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9770 fileptr_type_node = ptr_type_node;
9771
9772 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9773
9774 float_type_node = make_node (REAL_TYPE);
9775 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9776 layout_type (float_type_node);
9777
9778 double_type_node = make_node (REAL_TYPE);
9779 if (short_double)
9780 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9781 else
9782 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9783 layout_type (double_type_node);
9784
9785 long_double_type_node = make_node (REAL_TYPE);
9786 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9787 layout_type (long_double_type_node);
9788
9789 float_ptr_type_node = build_pointer_type (float_type_node);
9790 double_ptr_type_node = build_pointer_type (double_type_node);
9791 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9792 integer_ptr_type_node = build_pointer_type (integer_type_node);
9793
9794 /* Fixed size integer types. */
9795 uint16_type_node = make_or_reuse_type (16, 1);
9796 uint32_type_node = make_or_reuse_type (32, 1);
9797 uint64_type_node = make_or_reuse_type (64, 1);
9798
9799 /* Decimal float types. */
9800 dfloat32_type_node = make_node (REAL_TYPE);
9801 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9802 layout_type (dfloat32_type_node);
9803 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9804 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9805
9806 dfloat64_type_node = make_node (REAL_TYPE);
9807 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9808 layout_type (dfloat64_type_node);
9809 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9810 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9811
9812 dfloat128_type_node = make_node (REAL_TYPE);
9813 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9814 layout_type (dfloat128_type_node);
9815 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9816 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9817
9818 complex_integer_type_node = build_complex_type (integer_type_node);
9819 complex_float_type_node = build_complex_type (float_type_node);
9820 complex_double_type_node = build_complex_type (double_type_node);
9821 complex_long_double_type_node = build_complex_type (long_double_type_node);
9822
9823 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9824 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9825 sat_ ## KIND ## _type_node = \
9826 make_sat_signed_ ## KIND ## _type (SIZE); \
9827 sat_unsigned_ ## KIND ## _type_node = \
9828 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9829 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9830 unsigned_ ## KIND ## _type_node = \
9831 make_unsigned_ ## KIND ## _type (SIZE);
9832
9833 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9834 sat_ ## WIDTH ## KIND ## _type_node = \
9835 make_sat_signed_ ## KIND ## _type (SIZE); \
9836 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9837 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9838 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9839 unsigned_ ## WIDTH ## KIND ## _type_node = \
9840 make_unsigned_ ## KIND ## _type (SIZE);
9841
9842 /* Make fixed-point type nodes based on four different widths. */
9843 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9844 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9845 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9846 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9847 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9848
9849 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9850 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9851 NAME ## _type_node = \
9852 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9853 u ## NAME ## _type_node = \
9854 make_or_reuse_unsigned_ ## KIND ## _type \
9855 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9856 sat_ ## NAME ## _type_node = \
9857 make_or_reuse_sat_signed_ ## KIND ## _type \
9858 (GET_MODE_BITSIZE (MODE ## mode)); \
9859 sat_u ## NAME ## _type_node = \
9860 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9861 (GET_MODE_BITSIZE (U ## MODE ## mode));
9862
9863 /* Fixed-point type and mode nodes. */
9864 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9865 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9866 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9867 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9868 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9869 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9870 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9871 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9872 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9873 MAKE_FIXED_MODE_NODE (accum, da, DA)
9874 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9875
9876 {
9877 tree t = targetm.build_builtin_va_list ();
9878
9879 /* Many back-ends define record types without setting TYPE_NAME.
9880 If we copied the record type here, we'd keep the original
9881 record type without a name. This breaks name mangling. So,
9882 don't copy record types and let c_common_nodes_and_builtins()
9883 declare the type to be __builtin_va_list. */
9884 if (TREE_CODE (t) != RECORD_TYPE)
9885 t = build_variant_type_copy (t);
9886
9887 va_list_type_node = t;
9888 }
9889 }
9890
9891 /* Modify DECL for given flags.
9892 TM_PURE attribute is set only on types, so the function will modify
9893 DECL's type when ECF_TM_PURE is used. */
9894
9895 void
9896 set_call_expr_flags (tree decl, int flags)
9897 {
9898 if (flags & ECF_NOTHROW)
9899 TREE_NOTHROW (decl) = 1;
9900 if (flags & ECF_CONST)
9901 TREE_READONLY (decl) = 1;
9902 if (flags & ECF_PURE)
9903 DECL_PURE_P (decl) = 1;
9904 if (flags & ECF_LOOPING_CONST_OR_PURE)
9905 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9906 if (flags & ECF_NOVOPS)
9907 DECL_IS_NOVOPS (decl) = 1;
9908 if (flags & ECF_NORETURN)
9909 TREE_THIS_VOLATILE (decl) = 1;
9910 if (flags & ECF_MALLOC)
9911 DECL_IS_MALLOC (decl) = 1;
9912 if (flags & ECF_RETURNS_TWICE)
9913 DECL_IS_RETURNS_TWICE (decl) = 1;
9914 if (flags & ECF_LEAF)
9915 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9916 NULL, DECL_ATTRIBUTES (decl));
9917 if ((flags & ECF_TM_PURE) && flag_tm)
9918 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9919 /* Looping const or pure is implied by noreturn.
9920 There is currently no way to declare looping const or looping pure alone. */
9921 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9922 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9923 }
9924
9925
9926 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9927
9928 static void
9929 local_define_builtin (const char *name, tree type, enum built_in_function code,
9930 const char *library_name, int ecf_flags)
9931 {
9932 tree decl;
9933
9934 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9935 library_name, NULL_TREE);
9936 set_call_expr_flags (decl, ecf_flags);
9937
9938 set_builtin_decl (code, decl, true);
9939 }
9940
9941 /* Call this function after instantiating all builtins that the language
9942 front end cares about. This will build the rest of the builtins
9943 and internal functions that are relied upon by the tree optimizers and
9944 the middle-end. */
9945
9946 void
9947 build_common_builtin_nodes (void)
9948 {
9949 tree tmp, ftype;
9950 int ecf_flags;
9951
9952 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9953 {
9954 ftype = build_function_type (void_type_node, void_list_node);
9955 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9956 "__builtin_unreachable",
9957 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9958 | ECF_CONST);
9959 }
9960
9961 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9962 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9963 {
9964 ftype = build_function_type_list (ptr_type_node,
9965 ptr_type_node, const_ptr_type_node,
9966 size_type_node, NULL_TREE);
9967
9968 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9969 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9970 "memcpy", ECF_NOTHROW | ECF_LEAF);
9971 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9972 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9973 "memmove", ECF_NOTHROW | ECF_LEAF);
9974 }
9975
9976 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9977 {
9978 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9979 const_ptr_type_node, size_type_node,
9980 NULL_TREE);
9981 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9982 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9983 }
9984
9985 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9986 {
9987 ftype = build_function_type_list (ptr_type_node,
9988 ptr_type_node, integer_type_node,
9989 size_type_node, NULL_TREE);
9990 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9991 "memset", ECF_NOTHROW | ECF_LEAF);
9992 }
9993
9994 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9995 {
9996 ftype = build_function_type_list (ptr_type_node,
9997 size_type_node, NULL_TREE);
9998 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9999 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10000 }
10001
10002 ftype = build_function_type_list (ptr_type_node, size_type_node,
10003 size_type_node, NULL_TREE);
10004 local_define_builtin ("__builtin_alloca_with_align", ftype,
10005 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
10006 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10007
10008 /* If we're checking the stack, `alloca' can throw. */
10009 if (flag_stack_check)
10010 {
10011 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10012 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10013 }
10014
10015 ftype = build_function_type_list (void_type_node,
10016 ptr_type_node, ptr_type_node,
10017 ptr_type_node, NULL_TREE);
10018 local_define_builtin ("__builtin_init_trampoline", ftype,
10019 BUILT_IN_INIT_TRAMPOLINE,
10020 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10021 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10022 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10023 "__builtin_init_heap_trampoline",
10024 ECF_NOTHROW | ECF_LEAF);
10025
10026 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10027 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10028 BUILT_IN_ADJUST_TRAMPOLINE,
10029 "__builtin_adjust_trampoline",
10030 ECF_CONST | ECF_NOTHROW);
10031
10032 ftype = build_function_type_list (void_type_node,
10033 ptr_type_node, ptr_type_node, NULL_TREE);
10034 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10035 BUILT_IN_NONLOCAL_GOTO,
10036 "__builtin_nonlocal_goto",
10037 ECF_NORETURN | ECF_NOTHROW);
10038
10039 ftype = build_function_type_list (void_type_node,
10040 ptr_type_node, ptr_type_node, NULL_TREE);
10041 local_define_builtin ("__builtin_setjmp_setup", ftype,
10042 BUILT_IN_SETJMP_SETUP,
10043 "__builtin_setjmp_setup", ECF_NOTHROW);
10044
10045 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10046 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10047 BUILT_IN_SETJMP_RECEIVER,
10048 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10049
10050 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10051 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10052 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10053
10054 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10055 local_define_builtin ("__builtin_stack_restore", ftype,
10056 BUILT_IN_STACK_RESTORE,
10057 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10058
10059 /* If there's a possibility that we might use the ARM EABI, build the
10060 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10061 if (targetm.arm_eabi_unwinder)
10062 {
10063 ftype = build_function_type_list (void_type_node, NULL_TREE);
10064 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10065 BUILT_IN_CXA_END_CLEANUP,
10066 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10067 }
10068
10069 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10070 local_define_builtin ("__builtin_unwind_resume", ftype,
10071 BUILT_IN_UNWIND_RESUME,
10072 ((targetm_common.except_unwind_info (&global_options)
10073 == UI_SJLJ)
10074 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10075 ECF_NORETURN);
10076
10077 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10078 {
10079 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10080 NULL_TREE);
10081 local_define_builtin ("__builtin_return_address", ftype,
10082 BUILT_IN_RETURN_ADDRESS,
10083 "__builtin_return_address",
10084 ECF_NOTHROW);
10085 }
10086
10087 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10088 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10089 {
10090 ftype = build_function_type_list (void_type_node, ptr_type_node,
10091 ptr_type_node, NULL_TREE);
10092 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10093 local_define_builtin ("__cyg_profile_func_enter", ftype,
10094 BUILT_IN_PROFILE_FUNC_ENTER,
10095 "__cyg_profile_func_enter", 0);
10096 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10097 local_define_builtin ("__cyg_profile_func_exit", ftype,
10098 BUILT_IN_PROFILE_FUNC_EXIT,
10099 "__cyg_profile_func_exit", 0);
10100 }
10101
10102 /* The exception object and filter values from the runtime. The argument
10103 must be zero before exception lowering, i.e. from the front end. After
10104 exception lowering, it will be the region number for the exception
10105 landing pad. These functions are PURE instead of CONST to prevent
10106 them from being hoisted past the exception edge that will initialize
10107 its value in the landing pad. */
10108 ftype = build_function_type_list (ptr_type_node,
10109 integer_type_node, NULL_TREE);
10110 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10111 /* Only use TM_PURE if we we have TM language support. */
10112 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10113 ecf_flags |= ECF_TM_PURE;
10114 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10115 "__builtin_eh_pointer", ecf_flags);
10116
10117 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10118 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10119 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10120 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10121
10122 ftype = build_function_type_list (void_type_node,
10123 integer_type_node, integer_type_node,
10124 NULL_TREE);
10125 local_define_builtin ("__builtin_eh_copy_values", ftype,
10126 BUILT_IN_EH_COPY_VALUES,
10127 "__builtin_eh_copy_values", ECF_NOTHROW);
10128
10129 /* Complex multiplication and division. These are handled as builtins
10130 rather than optabs because emit_library_call_value doesn't support
10131 complex. Further, we can do slightly better with folding these
10132 beasties if the real and complex parts of the arguments are separate. */
10133 {
10134 int mode;
10135
10136 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10137 {
10138 char mode_name_buf[4], *q;
10139 const char *p;
10140 enum built_in_function mcode, dcode;
10141 tree type, inner_type;
10142 const char *prefix = "__";
10143
10144 if (targetm.libfunc_gnu_prefix)
10145 prefix = "__gnu_";
10146
10147 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10148 if (type == NULL)
10149 continue;
10150 inner_type = TREE_TYPE (type);
10151
10152 ftype = build_function_type_list (type, inner_type, inner_type,
10153 inner_type, inner_type, NULL_TREE);
10154
10155 mcode = ((enum built_in_function)
10156 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10157 dcode = ((enum built_in_function)
10158 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10159
10160 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10161 *q = TOLOWER (*p);
10162 *q = '\0';
10163
10164 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10165 NULL);
10166 local_define_builtin (built_in_names[mcode], ftype, mcode,
10167 built_in_names[mcode],
10168 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10169
10170 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10171 NULL);
10172 local_define_builtin (built_in_names[dcode], ftype, dcode,
10173 built_in_names[dcode],
10174 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10175 }
10176 }
10177
10178 init_internal_fns ();
10179 }
10180
10181 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10182 better way.
10183
10184 If we requested a pointer to a vector, build up the pointers that
10185 we stripped off while looking for the inner type. Similarly for
10186 return values from functions.
10187
10188 The argument TYPE is the top of the chain, and BOTTOM is the
10189 new type which we will point to. */
10190
10191 tree
10192 reconstruct_complex_type (tree type, tree bottom)
10193 {
10194 tree inner, outer;
10195
10196 if (TREE_CODE (type) == POINTER_TYPE)
10197 {
10198 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10199 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10200 TYPE_REF_CAN_ALIAS_ALL (type));
10201 }
10202 else if (TREE_CODE (type) == REFERENCE_TYPE)
10203 {
10204 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10205 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10206 TYPE_REF_CAN_ALIAS_ALL (type));
10207 }
10208 else if (TREE_CODE (type) == ARRAY_TYPE)
10209 {
10210 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10211 outer = build_array_type (inner, TYPE_DOMAIN (type));
10212 }
10213 else if (TREE_CODE (type) == FUNCTION_TYPE)
10214 {
10215 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10216 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10217 }
10218 else if (TREE_CODE (type) == METHOD_TYPE)
10219 {
10220 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10221 /* The build_method_type_directly() routine prepends 'this' to argument list,
10222 so we must compensate by getting rid of it. */
10223 outer
10224 = build_method_type_directly
10225 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10226 inner,
10227 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10228 }
10229 else if (TREE_CODE (type) == OFFSET_TYPE)
10230 {
10231 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10232 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10233 }
10234 else
10235 return bottom;
10236
10237 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10238 TYPE_QUALS (type));
10239 }
10240
10241 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10242 the inner type. */
10243 tree
10244 build_vector_type_for_mode (tree innertype, machine_mode mode)
10245 {
10246 int nunits;
10247
10248 switch (GET_MODE_CLASS (mode))
10249 {
10250 case MODE_VECTOR_INT:
10251 case MODE_VECTOR_FLOAT:
10252 case MODE_VECTOR_FRACT:
10253 case MODE_VECTOR_UFRACT:
10254 case MODE_VECTOR_ACCUM:
10255 case MODE_VECTOR_UACCUM:
10256 nunits = GET_MODE_NUNITS (mode);
10257 break;
10258
10259 case MODE_INT:
10260 /* Check that there are no leftover bits. */
10261 gcc_assert (GET_MODE_BITSIZE (mode)
10262 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10263
10264 nunits = GET_MODE_BITSIZE (mode)
10265 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10266 break;
10267
10268 default:
10269 gcc_unreachable ();
10270 }
10271
10272 return make_vector_type (innertype, nunits, mode);
10273 }
10274
10275 /* Similarly, but takes the inner type and number of units, which must be
10276 a power of two. */
10277
10278 tree
10279 build_vector_type (tree innertype, int nunits)
10280 {
10281 return make_vector_type (innertype, nunits, VOIDmode);
10282 }
10283
10284 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10285
10286 tree
10287 build_opaque_vector_type (tree innertype, int nunits)
10288 {
10289 tree t = make_vector_type (innertype, nunits, VOIDmode);
10290 tree cand;
10291 /* We always build the non-opaque variant before the opaque one,
10292 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10293 cand = TYPE_NEXT_VARIANT (t);
10294 if (cand
10295 && TYPE_VECTOR_OPAQUE (cand)
10296 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10297 return cand;
10298 /* Othewise build a variant type and make sure to queue it after
10299 the non-opaque type. */
10300 cand = build_distinct_type_copy (t);
10301 TYPE_VECTOR_OPAQUE (cand) = true;
10302 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10303 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10304 TYPE_NEXT_VARIANT (t) = cand;
10305 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10306 return cand;
10307 }
10308
10309
10310 /* Given an initializer INIT, return TRUE if INIT is zero or some
10311 aggregate of zeros. Otherwise return FALSE. */
10312 bool
10313 initializer_zerop (const_tree init)
10314 {
10315 tree elt;
10316
10317 STRIP_NOPS (init);
10318
10319 switch (TREE_CODE (init))
10320 {
10321 case INTEGER_CST:
10322 return integer_zerop (init);
10323
10324 case REAL_CST:
10325 /* ??? Note that this is not correct for C4X float formats. There,
10326 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10327 negative exponent. */
10328 return real_zerop (init)
10329 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10330
10331 case FIXED_CST:
10332 return fixed_zerop (init);
10333
10334 case COMPLEX_CST:
10335 return integer_zerop (init)
10336 || (real_zerop (init)
10337 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10338 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10339
10340 case VECTOR_CST:
10341 {
10342 unsigned i;
10343 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10344 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10345 return false;
10346 return true;
10347 }
10348
10349 case CONSTRUCTOR:
10350 {
10351 unsigned HOST_WIDE_INT idx;
10352
10353 if (TREE_CLOBBER_P (init))
10354 return false;
10355 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10356 if (!initializer_zerop (elt))
10357 return false;
10358 return true;
10359 }
10360
10361 case STRING_CST:
10362 {
10363 int i;
10364
10365 /* We need to loop through all elements to handle cases like
10366 "\0" and "\0foobar". */
10367 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10368 if (TREE_STRING_POINTER (init)[i] != '\0')
10369 return false;
10370
10371 return true;
10372 }
10373
10374 default:
10375 return false;
10376 }
10377 }
10378
10379 /* Check if vector VEC consists of all the equal elements and
10380 that the number of elements corresponds to the type of VEC.
10381 The function returns first element of the vector
10382 or NULL_TREE if the vector is not uniform. */
10383 tree
10384 uniform_vector_p (const_tree vec)
10385 {
10386 tree first, t;
10387 unsigned i;
10388
10389 if (vec == NULL_TREE)
10390 return NULL_TREE;
10391
10392 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10393
10394 if (TREE_CODE (vec) == VECTOR_CST)
10395 {
10396 first = VECTOR_CST_ELT (vec, 0);
10397 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10398 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10399 return NULL_TREE;
10400
10401 return first;
10402 }
10403
10404 else if (TREE_CODE (vec) == CONSTRUCTOR)
10405 {
10406 first = error_mark_node;
10407
10408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10409 {
10410 if (i == 0)
10411 {
10412 first = t;
10413 continue;
10414 }
10415 if (!operand_equal_p (first, t, 0))
10416 return NULL_TREE;
10417 }
10418 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10419 return NULL_TREE;
10420
10421 return first;
10422 }
10423
10424 return NULL_TREE;
10425 }
10426
10427 /* Build an empty statement at location LOC. */
10428
10429 tree
10430 build_empty_stmt (location_t loc)
10431 {
10432 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10433 SET_EXPR_LOCATION (t, loc);
10434 return t;
10435 }
10436
10437
10438 /* Build an OpenMP clause with code CODE. LOC is the location of the
10439 clause. */
10440
10441 tree
10442 build_omp_clause (location_t loc, enum omp_clause_code code)
10443 {
10444 tree t;
10445 int size, length;
10446
10447 length = omp_clause_num_ops[code];
10448 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10449
10450 record_node_allocation_statistics (OMP_CLAUSE, size);
10451
10452 t = (tree) ggc_internal_alloc (size);
10453 memset (t, 0, size);
10454 TREE_SET_CODE (t, OMP_CLAUSE);
10455 OMP_CLAUSE_SET_CODE (t, code);
10456 OMP_CLAUSE_LOCATION (t) = loc;
10457
10458 return t;
10459 }
10460
10461 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10462 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10463 Except for the CODE and operand count field, other storage for the
10464 object is initialized to zeros. */
10465
10466 tree
10467 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10468 {
10469 tree t;
10470 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10471
10472 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10473 gcc_assert (len >= 1);
10474
10475 record_node_allocation_statistics (code, length);
10476
10477 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10478
10479 TREE_SET_CODE (t, code);
10480
10481 /* Can't use TREE_OPERAND to store the length because if checking is
10482 enabled, it will try to check the length before we store it. :-P */
10483 t->exp.operands[0] = build_int_cst (sizetype, len);
10484
10485 return t;
10486 }
10487
10488 /* Helper function for build_call_* functions; build a CALL_EXPR with
10489 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10490 the argument slots. */
10491
10492 static tree
10493 build_call_1 (tree return_type, tree fn, int nargs)
10494 {
10495 tree t;
10496
10497 t = build_vl_exp (CALL_EXPR, nargs + 3);
10498 TREE_TYPE (t) = return_type;
10499 CALL_EXPR_FN (t) = fn;
10500 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10501
10502 return t;
10503 }
10504
10505 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10506 FN and a null static chain slot. NARGS is the number of call arguments
10507 which are specified as "..." arguments. */
10508
10509 tree
10510 build_call_nary (tree return_type, tree fn, int nargs, ...)
10511 {
10512 tree ret;
10513 va_list args;
10514 va_start (args, nargs);
10515 ret = build_call_valist (return_type, fn, nargs, args);
10516 va_end (args);
10517 return ret;
10518 }
10519
10520 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10521 FN and a null static chain slot. NARGS is the number of call arguments
10522 which are specified as a va_list ARGS. */
10523
10524 tree
10525 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10526 {
10527 tree t;
10528 int i;
10529
10530 t = build_call_1 (return_type, fn, nargs);
10531 for (i = 0; i < nargs; i++)
10532 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10533 process_call_operands (t);
10534 return t;
10535 }
10536
10537 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10538 FN and a null static chain slot. NARGS is the number of call arguments
10539 which are specified as a tree array ARGS. */
10540
10541 tree
10542 build_call_array_loc (location_t loc, tree return_type, tree fn,
10543 int nargs, const tree *args)
10544 {
10545 tree t;
10546 int i;
10547
10548 t = build_call_1 (return_type, fn, nargs);
10549 for (i = 0; i < nargs; i++)
10550 CALL_EXPR_ARG (t, i) = args[i];
10551 process_call_operands (t);
10552 SET_EXPR_LOCATION (t, loc);
10553 return t;
10554 }
10555
10556 /* Like build_call_array, but takes a vec. */
10557
10558 tree
10559 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10560 {
10561 tree ret, t;
10562 unsigned int ix;
10563
10564 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10565 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10566 CALL_EXPR_ARG (ret, ix) = t;
10567 process_call_operands (ret);
10568 return ret;
10569 }
10570
10571 /* Conveniently construct a function call expression. FNDECL names the
10572 function to be called and N arguments are passed in the array
10573 ARGARRAY. */
10574
10575 tree
10576 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10577 {
10578 tree fntype = TREE_TYPE (fndecl);
10579 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10580
10581 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10582 }
10583
10584 /* Conveniently construct a function call expression. FNDECL names the
10585 function to be called and the arguments are passed in the vector
10586 VEC. */
10587
10588 tree
10589 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10590 {
10591 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10592 vec_safe_address (vec));
10593 }
10594
10595
10596 /* Conveniently construct a function call expression. FNDECL names the
10597 function to be called, N is the number of arguments, and the "..."
10598 parameters are the argument expressions. */
10599
10600 tree
10601 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10602 {
10603 va_list ap;
10604 tree *argarray = XALLOCAVEC (tree, n);
10605 int i;
10606
10607 va_start (ap, n);
10608 for (i = 0; i < n; i++)
10609 argarray[i] = va_arg (ap, tree);
10610 va_end (ap);
10611 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10612 }
10613
10614 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10615 varargs macros aren't supported by all bootstrap compilers. */
10616
10617 tree
10618 build_call_expr (tree fndecl, int n, ...)
10619 {
10620 va_list ap;
10621 tree *argarray = XALLOCAVEC (tree, n);
10622 int i;
10623
10624 va_start (ap, n);
10625 for (i = 0; i < n; i++)
10626 argarray[i] = va_arg (ap, tree);
10627 va_end (ap);
10628 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10629 }
10630
10631 /* Build internal call expression. This is just like CALL_EXPR, except
10632 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10633 internal function. */
10634
10635 tree
10636 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10637 tree type, int n, ...)
10638 {
10639 va_list ap;
10640 int i;
10641
10642 tree fn = build_call_1 (type, NULL_TREE, n);
10643 va_start (ap, n);
10644 for (i = 0; i < n; i++)
10645 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10646 va_end (ap);
10647 SET_EXPR_LOCATION (fn, loc);
10648 CALL_EXPR_IFN (fn) = ifn;
10649 return fn;
10650 }
10651
10652 /* Create a new constant string literal and return a char* pointer to it.
10653 The STRING_CST value is the LEN characters at STR. */
10654 tree
10655 build_string_literal (int len, const char *str)
10656 {
10657 tree t, elem, index, type;
10658
10659 t = build_string (len, str);
10660 elem = build_type_variant (char_type_node, 1, 0);
10661 index = build_index_type (size_int (len - 1));
10662 type = build_array_type (elem, index);
10663 TREE_TYPE (t) = type;
10664 TREE_CONSTANT (t) = 1;
10665 TREE_READONLY (t) = 1;
10666 TREE_STATIC (t) = 1;
10667
10668 type = build_pointer_type (elem);
10669 t = build1 (ADDR_EXPR, type,
10670 build4 (ARRAY_REF, elem,
10671 t, integer_zero_node, NULL_TREE, NULL_TREE));
10672 return t;
10673 }
10674
10675
10676
10677 /* Return true if T (assumed to be a DECL) must be assigned a memory
10678 location. */
10679
10680 bool
10681 needs_to_live_in_memory (const_tree t)
10682 {
10683 return (TREE_ADDRESSABLE (t)
10684 || is_global_var (t)
10685 || (TREE_CODE (t) == RESULT_DECL
10686 && !DECL_BY_REFERENCE (t)
10687 && aggregate_value_p (t, current_function_decl)));
10688 }
10689
10690 /* Return value of a constant X and sign-extend it. */
10691
10692 HOST_WIDE_INT
10693 int_cst_value (const_tree x)
10694 {
10695 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10696 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10697
10698 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10699 gcc_assert (cst_and_fits_in_hwi (x));
10700
10701 if (bits < HOST_BITS_PER_WIDE_INT)
10702 {
10703 bool negative = ((val >> (bits - 1)) & 1) != 0;
10704 if (negative)
10705 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10706 else
10707 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10708 }
10709
10710 return val;
10711 }
10712
10713 /* If TYPE is an integral or pointer type, return an integer type with
10714 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10715 if TYPE is already an integer type of signedness UNSIGNEDP. */
10716
10717 tree
10718 signed_or_unsigned_type_for (int unsignedp, tree type)
10719 {
10720 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10721 return type;
10722
10723 if (TREE_CODE (type) == VECTOR_TYPE)
10724 {
10725 tree inner = TREE_TYPE (type);
10726 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10727 if (!inner2)
10728 return NULL_TREE;
10729 if (inner == inner2)
10730 return type;
10731 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10732 }
10733
10734 if (!INTEGRAL_TYPE_P (type)
10735 && !POINTER_TYPE_P (type)
10736 && TREE_CODE (type) != OFFSET_TYPE)
10737 return NULL_TREE;
10738
10739 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10740 }
10741
10742 /* If TYPE is an integral or pointer type, return an integer type with
10743 the same precision which is unsigned, or itself if TYPE is already an
10744 unsigned integer type. */
10745
10746 tree
10747 unsigned_type_for (tree type)
10748 {
10749 return signed_or_unsigned_type_for (1, type);
10750 }
10751
10752 /* If TYPE is an integral or pointer type, return an integer type with
10753 the same precision which is signed, or itself if TYPE is already a
10754 signed integer type. */
10755
10756 tree
10757 signed_type_for (tree type)
10758 {
10759 return signed_or_unsigned_type_for (0, type);
10760 }
10761
10762 /* If TYPE is a vector type, return a signed integer vector type with the
10763 same width and number of subparts. Otherwise return boolean_type_node. */
10764
10765 tree
10766 truth_type_for (tree type)
10767 {
10768 if (TREE_CODE (type) == VECTOR_TYPE)
10769 {
10770 tree elem = lang_hooks.types.type_for_size
10771 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10772 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10773 }
10774 else
10775 return boolean_type_node;
10776 }
10777
10778 /* Returns the largest value obtainable by casting something in INNER type to
10779 OUTER type. */
10780
10781 tree
10782 upper_bound_in_type (tree outer, tree inner)
10783 {
10784 unsigned int det = 0;
10785 unsigned oprec = TYPE_PRECISION (outer);
10786 unsigned iprec = TYPE_PRECISION (inner);
10787 unsigned prec;
10788
10789 /* Compute a unique number for every combination. */
10790 det |= (oprec > iprec) ? 4 : 0;
10791 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10792 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10793
10794 /* Determine the exponent to use. */
10795 switch (det)
10796 {
10797 case 0:
10798 case 1:
10799 /* oprec <= iprec, outer: signed, inner: don't care. */
10800 prec = oprec - 1;
10801 break;
10802 case 2:
10803 case 3:
10804 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10805 prec = oprec;
10806 break;
10807 case 4:
10808 /* oprec > iprec, outer: signed, inner: signed. */
10809 prec = iprec - 1;
10810 break;
10811 case 5:
10812 /* oprec > iprec, outer: signed, inner: unsigned. */
10813 prec = iprec;
10814 break;
10815 case 6:
10816 /* oprec > iprec, outer: unsigned, inner: signed. */
10817 prec = oprec;
10818 break;
10819 case 7:
10820 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10821 prec = iprec;
10822 break;
10823 default:
10824 gcc_unreachable ();
10825 }
10826
10827 return wide_int_to_tree (outer,
10828 wi::mask (prec, false, TYPE_PRECISION (outer)));
10829 }
10830
10831 /* Returns the smallest value obtainable by casting something in INNER type to
10832 OUTER type. */
10833
10834 tree
10835 lower_bound_in_type (tree outer, tree inner)
10836 {
10837 unsigned oprec = TYPE_PRECISION (outer);
10838 unsigned iprec = TYPE_PRECISION (inner);
10839
10840 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10841 and obtain 0. */
10842 if (TYPE_UNSIGNED (outer)
10843 /* If we are widening something of an unsigned type, OUTER type
10844 contains all values of INNER type. In particular, both INNER
10845 and OUTER types have zero in common. */
10846 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10847 return build_int_cst (outer, 0);
10848 else
10849 {
10850 /* If we are widening a signed type to another signed type, we
10851 want to obtain -2^^(iprec-1). If we are keeping the
10852 precision or narrowing to a signed type, we want to obtain
10853 -2^(oprec-1). */
10854 unsigned prec = oprec > iprec ? iprec : oprec;
10855 return wide_int_to_tree (outer,
10856 wi::mask (prec - 1, true,
10857 TYPE_PRECISION (outer)));
10858 }
10859 }
10860
10861 /* Return nonzero if two operands that are suitable for PHI nodes are
10862 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10863 SSA_NAME or invariant. Note that this is strictly an optimization.
10864 That is, callers of this function can directly call operand_equal_p
10865 and get the same result, only slower. */
10866
10867 int
10868 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10869 {
10870 if (arg0 == arg1)
10871 return 1;
10872 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10873 return 0;
10874 return operand_equal_p (arg0, arg1, 0);
10875 }
10876
10877 /* Returns number of zeros at the end of binary representation of X. */
10878
10879 tree
10880 num_ending_zeros (const_tree x)
10881 {
10882 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10883 }
10884
10885
10886 #define WALK_SUBTREE(NODE) \
10887 do \
10888 { \
10889 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10890 if (result) \
10891 return result; \
10892 } \
10893 while (0)
10894
10895 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10896 be walked whenever a type is seen in the tree. Rest of operands and return
10897 value are as for walk_tree. */
10898
10899 static tree
10900 walk_type_fields (tree type, walk_tree_fn func, void *data,
10901 hash_set<tree> *pset, walk_tree_lh lh)
10902 {
10903 tree result = NULL_TREE;
10904
10905 switch (TREE_CODE (type))
10906 {
10907 case POINTER_TYPE:
10908 case REFERENCE_TYPE:
10909 case VECTOR_TYPE:
10910 /* We have to worry about mutually recursive pointers. These can't
10911 be written in C. They can in Ada. It's pathological, but
10912 there's an ACATS test (c38102a) that checks it. Deal with this
10913 by checking if we're pointing to another pointer, that one
10914 points to another pointer, that one does too, and we have no htab.
10915 If so, get a hash table. We check three levels deep to avoid
10916 the cost of the hash table if we don't need one. */
10917 if (POINTER_TYPE_P (TREE_TYPE (type))
10918 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10919 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10920 && !pset)
10921 {
10922 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10923 func, data);
10924 if (result)
10925 return result;
10926
10927 break;
10928 }
10929
10930 /* ... fall through ... */
10931
10932 case COMPLEX_TYPE:
10933 WALK_SUBTREE (TREE_TYPE (type));
10934 break;
10935
10936 case METHOD_TYPE:
10937 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10938
10939 /* Fall through. */
10940
10941 case FUNCTION_TYPE:
10942 WALK_SUBTREE (TREE_TYPE (type));
10943 {
10944 tree arg;
10945
10946 /* We never want to walk into default arguments. */
10947 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10948 WALK_SUBTREE (TREE_VALUE (arg));
10949 }
10950 break;
10951
10952 case ARRAY_TYPE:
10953 /* Don't follow this nodes's type if a pointer for fear that
10954 we'll have infinite recursion. If we have a PSET, then we
10955 need not fear. */
10956 if (pset
10957 || (!POINTER_TYPE_P (TREE_TYPE (type))
10958 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10959 WALK_SUBTREE (TREE_TYPE (type));
10960 WALK_SUBTREE (TYPE_DOMAIN (type));
10961 break;
10962
10963 case OFFSET_TYPE:
10964 WALK_SUBTREE (TREE_TYPE (type));
10965 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10966 break;
10967
10968 default:
10969 break;
10970 }
10971
10972 return NULL_TREE;
10973 }
10974
10975 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10976 called with the DATA and the address of each sub-tree. If FUNC returns a
10977 non-NULL value, the traversal is stopped, and the value returned by FUNC
10978 is returned. If PSET is non-NULL it is used to record the nodes visited,
10979 and to avoid visiting a node more than once. */
10980
10981 tree
10982 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10983 hash_set<tree> *pset, walk_tree_lh lh)
10984 {
10985 enum tree_code code;
10986 int walk_subtrees;
10987 tree result;
10988
10989 #define WALK_SUBTREE_TAIL(NODE) \
10990 do \
10991 { \
10992 tp = & (NODE); \
10993 goto tail_recurse; \
10994 } \
10995 while (0)
10996
10997 tail_recurse:
10998 /* Skip empty subtrees. */
10999 if (!*tp)
11000 return NULL_TREE;
11001
11002 /* Don't walk the same tree twice, if the user has requested
11003 that we avoid doing so. */
11004 if (pset && pset->add (*tp))
11005 return NULL_TREE;
11006
11007 /* Call the function. */
11008 walk_subtrees = 1;
11009 result = (*func) (tp, &walk_subtrees, data);
11010
11011 /* If we found something, return it. */
11012 if (result)
11013 return result;
11014
11015 code = TREE_CODE (*tp);
11016
11017 /* Even if we didn't, FUNC may have decided that there was nothing
11018 interesting below this point in the tree. */
11019 if (!walk_subtrees)
11020 {
11021 /* But we still need to check our siblings. */
11022 if (code == TREE_LIST)
11023 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11024 else if (code == OMP_CLAUSE)
11025 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11026 else
11027 return NULL_TREE;
11028 }
11029
11030 if (lh)
11031 {
11032 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11033 if (result || !walk_subtrees)
11034 return result;
11035 }
11036
11037 switch (code)
11038 {
11039 case ERROR_MARK:
11040 case IDENTIFIER_NODE:
11041 case INTEGER_CST:
11042 case REAL_CST:
11043 case FIXED_CST:
11044 case VECTOR_CST:
11045 case STRING_CST:
11046 case BLOCK:
11047 case PLACEHOLDER_EXPR:
11048 case SSA_NAME:
11049 case FIELD_DECL:
11050 case RESULT_DECL:
11051 /* None of these have subtrees other than those already walked
11052 above. */
11053 break;
11054
11055 case TREE_LIST:
11056 WALK_SUBTREE (TREE_VALUE (*tp));
11057 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11058 break;
11059
11060 case TREE_VEC:
11061 {
11062 int len = TREE_VEC_LENGTH (*tp);
11063
11064 if (len == 0)
11065 break;
11066
11067 /* Walk all elements but the first. */
11068 while (--len)
11069 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11070
11071 /* Now walk the first one as a tail call. */
11072 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11073 }
11074
11075 case COMPLEX_CST:
11076 WALK_SUBTREE (TREE_REALPART (*tp));
11077 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11078
11079 case CONSTRUCTOR:
11080 {
11081 unsigned HOST_WIDE_INT idx;
11082 constructor_elt *ce;
11083
11084 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11085 idx++)
11086 WALK_SUBTREE (ce->value);
11087 }
11088 break;
11089
11090 case SAVE_EXPR:
11091 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11092
11093 case BIND_EXPR:
11094 {
11095 tree decl;
11096 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11097 {
11098 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11099 into declarations that are just mentioned, rather than
11100 declared; they don't really belong to this part of the tree.
11101 And, we can see cycles: the initializer for a declaration
11102 can refer to the declaration itself. */
11103 WALK_SUBTREE (DECL_INITIAL (decl));
11104 WALK_SUBTREE (DECL_SIZE (decl));
11105 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11106 }
11107 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11108 }
11109
11110 case STATEMENT_LIST:
11111 {
11112 tree_stmt_iterator i;
11113 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11114 WALK_SUBTREE (*tsi_stmt_ptr (i));
11115 }
11116 break;
11117
11118 case OMP_CLAUSE:
11119 switch (OMP_CLAUSE_CODE (*tp))
11120 {
11121 case OMP_CLAUSE_PRIVATE:
11122 case OMP_CLAUSE_SHARED:
11123 case OMP_CLAUSE_FIRSTPRIVATE:
11124 case OMP_CLAUSE_COPYIN:
11125 case OMP_CLAUSE_COPYPRIVATE:
11126 case OMP_CLAUSE_FINAL:
11127 case OMP_CLAUSE_IF:
11128 case OMP_CLAUSE_NUM_THREADS:
11129 case OMP_CLAUSE_SCHEDULE:
11130 case OMP_CLAUSE_UNIFORM:
11131 case OMP_CLAUSE_DEPEND:
11132 case OMP_CLAUSE_NUM_TEAMS:
11133 case OMP_CLAUSE_THREAD_LIMIT:
11134 case OMP_CLAUSE_DEVICE:
11135 case OMP_CLAUSE_DIST_SCHEDULE:
11136 case OMP_CLAUSE_SAFELEN:
11137 case OMP_CLAUSE_SIMDLEN:
11138 case OMP_CLAUSE__LOOPTEMP_:
11139 case OMP_CLAUSE__SIMDUID_:
11140 case OMP_CLAUSE__CILK_FOR_COUNT_:
11141 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11142 /* FALLTHRU */
11143
11144 case OMP_CLAUSE_NOWAIT:
11145 case OMP_CLAUSE_ORDERED:
11146 case OMP_CLAUSE_DEFAULT:
11147 case OMP_CLAUSE_UNTIED:
11148 case OMP_CLAUSE_MERGEABLE:
11149 case OMP_CLAUSE_PROC_BIND:
11150 case OMP_CLAUSE_INBRANCH:
11151 case OMP_CLAUSE_NOTINBRANCH:
11152 case OMP_CLAUSE_FOR:
11153 case OMP_CLAUSE_PARALLEL:
11154 case OMP_CLAUSE_SECTIONS:
11155 case OMP_CLAUSE_TASKGROUP:
11156 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11157
11158 case OMP_CLAUSE_LASTPRIVATE:
11159 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11160 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11161 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11162
11163 case OMP_CLAUSE_COLLAPSE:
11164 {
11165 int i;
11166 for (i = 0; i < 3; i++)
11167 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11168 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11169 }
11170
11171 case OMP_CLAUSE_LINEAR:
11172 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11173 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11174 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11175 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11176
11177 case OMP_CLAUSE_ALIGNED:
11178 case OMP_CLAUSE_FROM:
11179 case OMP_CLAUSE_TO:
11180 case OMP_CLAUSE_MAP:
11181 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11182 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11183 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11184
11185 case OMP_CLAUSE_REDUCTION:
11186 {
11187 int i;
11188 for (i = 0; i < 4; i++)
11189 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11190 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11191 }
11192
11193 default:
11194 gcc_unreachable ();
11195 }
11196 break;
11197
11198 case TARGET_EXPR:
11199 {
11200 int i, len;
11201
11202 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11203 But, we only want to walk once. */
11204 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11205 for (i = 0; i < len; ++i)
11206 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11207 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11208 }
11209
11210 case DECL_EXPR:
11211 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11212 defining. We only want to walk into these fields of a type in this
11213 case and not in the general case of a mere reference to the type.
11214
11215 The criterion is as follows: if the field can be an expression, it
11216 must be walked only here. This should be in keeping with the fields
11217 that are directly gimplified in gimplify_type_sizes in order for the
11218 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11219 variable-sized types.
11220
11221 Note that DECLs get walked as part of processing the BIND_EXPR. */
11222 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11223 {
11224 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11225 if (TREE_CODE (*type_p) == ERROR_MARK)
11226 return NULL_TREE;
11227
11228 /* Call the function for the type. See if it returns anything or
11229 doesn't want us to continue. If we are to continue, walk both
11230 the normal fields and those for the declaration case. */
11231 result = (*func) (type_p, &walk_subtrees, data);
11232 if (result || !walk_subtrees)
11233 return result;
11234
11235 /* But do not walk a pointed-to type since it may itself need to
11236 be walked in the declaration case if it isn't anonymous. */
11237 if (!POINTER_TYPE_P (*type_p))
11238 {
11239 result = walk_type_fields (*type_p, func, data, pset, lh);
11240 if (result)
11241 return result;
11242 }
11243
11244 /* If this is a record type, also walk the fields. */
11245 if (RECORD_OR_UNION_TYPE_P (*type_p))
11246 {
11247 tree field;
11248
11249 for (field = TYPE_FIELDS (*type_p); field;
11250 field = DECL_CHAIN (field))
11251 {
11252 /* We'd like to look at the type of the field, but we can
11253 easily get infinite recursion. So assume it's pointed
11254 to elsewhere in the tree. Also, ignore things that
11255 aren't fields. */
11256 if (TREE_CODE (field) != FIELD_DECL)
11257 continue;
11258
11259 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11260 WALK_SUBTREE (DECL_SIZE (field));
11261 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11262 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11263 WALK_SUBTREE (DECL_QUALIFIER (field));
11264 }
11265 }
11266
11267 /* Same for scalar types. */
11268 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11269 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11270 || TREE_CODE (*type_p) == INTEGER_TYPE
11271 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11272 || TREE_CODE (*type_p) == REAL_TYPE)
11273 {
11274 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11275 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11276 }
11277
11278 WALK_SUBTREE (TYPE_SIZE (*type_p));
11279 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11280 }
11281 /* FALLTHRU */
11282
11283 default:
11284 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11285 {
11286 int i, len;
11287
11288 /* Walk over all the sub-trees of this operand. */
11289 len = TREE_OPERAND_LENGTH (*tp);
11290
11291 /* Go through the subtrees. We need to do this in forward order so
11292 that the scope of a FOR_EXPR is handled properly. */
11293 if (len)
11294 {
11295 for (i = 0; i < len - 1; ++i)
11296 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11297 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11298 }
11299 }
11300 /* If this is a type, walk the needed fields in the type. */
11301 else if (TYPE_P (*tp))
11302 return walk_type_fields (*tp, func, data, pset, lh);
11303 break;
11304 }
11305
11306 /* We didn't find what we were looking for. */
11307 return NULL_TREE;
11308
11309 #undef WALK_SUBTREE_TAIL
11310 }
11311 #undef WALK_SUBTREE
11312
11313 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11314
11315 tree
11316 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11317 walk_tree_lh lh)
11318 {
11319 tree result;
11320
11321 hash_set<tree> pset;
11322 result = walk_tree_1 (tp, func, data, &pset, lh);
11323 return result;
11324 }
11325
11326
11327 tree
11328 tree_block (tree t)
11329 {
11330 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11331
11332 if (IS_EXPR_CODE_CLASS (c))
11333 return LOCATION_BLOCK (t->exp.locus);
11334 gcc_unreachable ();
11335 return NULL;
11336 }
11337
11338 void
11339 tree_set_block (tree t, tree b)
11340 {
11341 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11342
11343 if (IS_EXPR_CODE_CLASS (c))
11344 {
11345 if (b)
11346 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11347 else
11348 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11349 }
11350 else
11351 gcc_unreachable ();
11352 }
11353
11354 /* Create a nameless artificial label and put it in the current
11355 function context. The label has a location of LOC. Returns the
11356 newly created label. */
11357
11358 tree
11359 create_artificial_label (location_t loc)
11360 {
11361 tree lab = build_decl (loc,
11362 LABEL_DECL, NULL_TREE, void_type_node);
11363
11364 DECL_ARTIFICIAL (lab) = 1;
11365 DECL_IGNORED_P (lab) = 1;
11366 DECL_CONTEXT (lab) = current_function_decl;
11367 return lab;
11368 }
11369
11370 /* Given a tree, try to return a useful variable name that we can use
11371 to prefix a temporary that is being assigned the value of the tree.
11372 I.E. given <temp> = &A, return A. */
11373
11374 const char *
11375 get_name (tree t)
11376 {
11377 tree stripped_decl;
11378
11379 stripped_decl = t;
11380 STRIP_NOPS (stripped_decl);
11381 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11382 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11383 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11384 {
11385 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11386 if (!name)
11387 return NULL;
11388 return IDENTIFIER_POINTER (name);
11389 }
11390 else
11391 {
11392 switch (TREE_CODE (stripped_decl))
11393 {
11394 case ADDR_EXPR:
11395 return get_name (TREE_OPERAND (stripped_decl, 0));
11396 default:
11397 return NULL;
11398 }
11399 }
11400 }
11401
11402 /* Return true if TYPE has a variable argument list. */
11403
11404 bool
11405 stdarg_p (const_tree fntype)
11406 {
11407 function_args_iterator args_iter;
11408 tree n = NULL_TREE, t;
11409
11410 if (!fntype)
11411 return false;
11412
11413 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11414 {
11415 n = t;
11416 }
11417
11418 return n != NULL_TREE && n != void_type_node;
11419 }
11420
11421 /* Return true if TYPE has a prototype. */
11422
11423 bool
11424 prototype_p (tree fntype)
11425 {
11426 tree t;
11427
11428 gcc_assert (fntype != NULL_TREE);
11429
11430 t = TYPE_ARG_TYPES (fntype);
11431 return (t != NULL_TREE);
11432 }
11433
11434 /* If BLOCK is inlined from an __attribute__((__artificial__))
11435 routine, return pointer to location from where it has been
11436 called. */
11437 location_t *
11438 block_nonartificial_location (tree block)
11439 {
11440 location_t *ret = NULL;
11441
11442 while (block && TREE_CODE (block) == BLOCK
11443 && BLOCK_ABSTRACT_ORIGIN (block))
11444 {
11445 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11446
11447 while (TREE_CODE (ao) == BLOCK
11448 && BLOCK_ABSTRACT_ORIGIN (ao)
11449 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11450 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11451
11452 if (TREE_CODE (ao) == FUNCTION_DECL)
11453 {
11454 /* If AO is an artificial inline, point RET to the
11455 call site locus at which it has been inlined and continue
11456 the loop, in case AO's caller is also an artificial
11457 inline. */
11458 if (DECL_DECLARED_INLINE_P (ao)
11459 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11460 ret = &BLOCK_SOURCE_LOCATION (block);
11461 else
11462 break;
11463 }
11464 else if (TREE_CODE (ao) != BLOCK)
11465 break;
11466
11467 block = BLOCK_SUPERCONTEXT (block);
11468 }
11469 return ret;
11470 }
11471
11472
11473 /* If EXP is inlined from an __attribute__((__artificial__))
11474 function, return the location of the original call expression. */
11475
11476 location_t
11477 tree_nonartificial_location (tree exp)
11478 {
11479 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11480
11481 if (loc)
11482 return *loc;
11483 else
11484 return EXPR_LOCATION (exp);
11485 }
11486
11487
11488 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11489 nodes. */
11490
11491 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11492
11493 static hashval_t
11494 cl_option_hash_hash (const void *x)
11495 {
11496 const_tree const t = (const_tree) x;
11497 const char *p;
11498 size_t i;
11499 size_t len = 0;
11500 hashval_t hash = 0;
11501
11502 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11503 {
11504 p = (const char *)TREE_OPTIMIZATION (t);
11505 len = sizeof (struct cl_optimization);
11506 }
11507
11508 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11509 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11510
11511 else
11512 gcc_unreachable ();
11513
11514 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11515 something else. */
11516 for (i = 0; i < len; i++)
11517 if (p[i])
11518 hash = (hash << 4) ^ ((i << 2) | p[i]);
11519
11520 return hash;
11521 }
11522
11523 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11524 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11525 same. */
11526
11527 static int
11528 cl_option_hash_eq (const void *x, const void *y)
11529 {
11530 const_tree const xt = (const_tree) x;
11531 const_tree const yt = (const_tree) y;
11532 const char *xp;
11533 const char *yp;
11534 size_t len;
11535
11536 if (TREE_CODE (xt) != TREE_CODE (yt))
11537 return 0;
11538
11539 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11540 {
11541 xp = (const char *)TREE_OPTIMIZATION (xt);
11542 yp = (const char *)TREE_OPTIMIZATION (yt);
11543 len = sizeof (struct cl_optimization);
11544 }
11545
11546 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11547 {
11548 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11549 TREE_TARGET_OPTION (yt));
11550 }
11551
11552 else
11553 gcc_unreachable ();
11554
11555 return (memcmp (xp, yp, len) == 0);
11556 }
11557
11558 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11559
11560 tree
11561 build_optimization_node (struct gcc_options *opts)
11562 {
11563 tree t;
11564 void **slot;
11565
11566 /* Use the cache of optimization nodes. */
11567
11568 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11569 opts);
11570
11571 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11572 t = (tree) *slot;
11573 if (!t)
11574 {
11575 /* Insert this one into the hash table. */
11576 t = cl_optimization_node;
11577 *slot = t;
11578
11579 /* Make a new node for next time round. */
11580 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11581 }
11582
11583 return t;
11584 }
11585
11586 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11587
11588 tree
11589 build_target_option_node (struct gcc_options *opts)
11590 {
11591 tree t;
11592 void **slot;
11593
11594 /* Use the cache of optimization nodes. */
11595
11596 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11597 opts);
11598
11599 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11600 t = (tree) *slot;
11601 if (!t)
11602 {
11603 /* Insert this one into the hash table. */
11604 t = cl_target_option_node;
11605 *slot = t;
11606
11607 /* Make a new node for next time round. */
11608 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11609 }
11610
11611 return t;
11612 }
11613
11614 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11615 Called through htab_traverse. */
11616
11617 static int
11618 prepare_target_option_node_for_pch (void **slot, void *)
11619 {
11620 tree node = (tree) *slot;
11621 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11622 TREE_TARGET_GLOBALS (node) = NULL;
11623 return 1;
11624 }
11625
11626 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11627 so that they aren't saved during PCH writing. */
11628
11629 void
11630 prepare_target_option_nodes_for_pch (void)
11631 {
11632 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11633 NULL);
11634 }
11635
11636 /* Determine the "ultimate origin" of a block. The block may be an inlined
11637 instance of an inlined instance of a block which is local to an inline
11638 function, so we have to trace all of the way back through the origin chain
11639 to find out what sort of node actually served as the original seed for the
11640 given block. */
11641
11642 tree
11643 block_ultimate_origin (const_tree block)
11644 {
11645 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11646
11647 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11648 we're trying to output the abstract instance of this function. */
11649 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11650 return NULL_TREE;
11651
11652 if (immediate_origin == NULL_TREE)
11653 return NULL_TREE;
11654 else
11655 {
11656 tree ret_val;
11657 tree lookahead = immediate_origin;
11658
11659 do
11660 {
11661 ret_val = lookahead;
11662 lookahead = (TREE_CODE (ret_val) == BLOCK
11663 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11664 }
11665 while (lookahead != NULL && lookahead != ret_val);
11666
11667 /* The block's abstract origin chain may not be the *ultimate* origin of
11668 the block. It could lead to a DECL that has an abstract origin set.
11669 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11670 will give us if it has one). Note that DECL's abstract origins are
11671 supposed to be the most distant ancestor (or so decl_ultimate_origin
11672 claims), so we don't need to loop following the DECL origins. */
11673 if (DECL_P (ret_val))
11674 return DECL_ORIGIN (ret_val);
11675
11676 return ret_val;
11677 }
11678 }
11679
11680 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11681 no instruction. */
11682
11683 bool
11684 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11685 {
11686 /* Use precision rather then machine mode when we can, which gives
11687 the correct answer even for submode (bit-field) types. */
11688 if ((INTEGRAL_TYPE_P (outer_type)
11689 || POINTER_TYPE_P (outer_type)
11690 || TREE_CODE (outer_type) == OFFSET_TYPE)
11691 && (INTEGRAL_TYPE_P (inner_type)
11692 || POINTER_TYPE_P (inner_type)
11693 || TREE_CODE (inner_type) == OFFSET_TYPE))
11694 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11695
11696 /* Otherwise fall back on comparing machine modes (e.g. for
11697 aggregate types, floats). */
11698 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11699 }
11700
11701 /* Return true iff conversion in EXP generates no instruction. Mark
11702 it inline so that we fully inline into the stripping functions even
11703 though we have two uses of this function. */
11704
11705 static inline bool
11706 tree_nop_conversion (const_tree exp)
11707 {
11708 tree outer_type, inner_type;
11709
11710 if (!CONVERT_EXPR_P (exp)
11711 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11712 return false;
11713 if (TREE_OPERAND (exp, 0) == error_mark_node)
11714 return false;
11715
11716 outer_type = TREE_TYPE (exp);
11717 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11718
11719 if (!inner_type)
11720 return false;
11721
11722 return tree_nop_conversion_p (outer_type, inner_type);
11723 }
11724
11725 /* Return true iff conversion in EXP generates no instruction. Don't
11726 consider conversions changing the signedness. */
11727
11728 static bool
11729 tree_sign_nop_conversion (const_tree exp)
11730 {
11731 tree outer_type, inner_type;
11732
11733 if (!tree_nop_conversion (exp))
11734 return false;
11735
11736 outer_type = TREE_TYPE (exp);
11737 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11738
11739 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11740 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11741 }
11742
11743 /* Strip conversions from EXP according to tree_nop_conversion and
11744 return the resulting expression. */
11745
11746 tree
11747 tree_strip_nop_conversions (tree exp)
11748 {
11749 while (tree_nop_conversion (exp))
11750 exp = TREE_OPERAND (exp, 0);
11751 return exp;
11752 }
11753
11754 /* Strip conversions from EXP according to tree_sign_nop_conversion
11755 and return the resulting expression. */
11756
11757 tree
11758 tree_strip_sign_nop_conversions (tree exp)
11759 {
11760 while (tree_sign_nop_conversion (exp))
11761 exp = TREE_OPERAND (exp, 0);
11762 return exp;
11763 }
11764
11765 /* Avoid any floating point extensions from EXP. */
11766 tree
11767 strip_float_extensions (tree exp)
11768 {
11769 tree sub, expt, subt;
11770
11771 /* For floating point constant look up the narrowest type that can hold
11772 it properly and handle it like (type)(narrowest_type)constant.
11773 This way we can optimize for instance a=a*2.0 where "a" is float
11774 but 2.0 is double constant. */
11775 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11776 {
11777 REAL_VALUE_TYPE orig;
11778 tree type = NULL;
11779
11780 orig = TREE_REAL_CST (exp);
11781 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11782 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11783 type = float_type_node;
11784 else if (TYPE_PRECISION (TREE_TYPE (exp))
11785 > TYPE_PRECISION (double_type_node)
11786 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11787 type = double_type_node;
11788 if (type)
11789 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11790 }
11791
11792 if (!CONVERT_EXPR_P (exp))
11793 return exp;
11794
11795 sub = TREE_OPERAND (exp, 0);
11796 subt = TREE_TYPE (sub);
11797 expt = TREE_TYPE (exp);
11798
11799 if (!FLOAT_TYPE_P (subt))
11800 return exp;
11801
11802 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11803 return exp;
11804
11805 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11806 return exp;
11807
11808 return strip_float_extensions (sub);
11809 }
11810
11811 /* Strip out all handled components that produce invariant
11812 offsets. */
11813
11814 const_tree
11815 strip_invariant_refs (const_tree op)
11816 {
11817 while (handled_component_p (op))
11818 {
11819 switch (TREE_CODE (op))
11820 {
11821 case ARRAY_REF:
11822 case ARRAY_RANGE_REF:
11823 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11824 || TREE_OPERAND (op, 2) != NULL_TREE
11825 || TREE_OPERAND (op, 3) != NULL_TREE)
11826 return NULL;
11827 break;
11828
11829 case COMPONENT_REF:
11830 if (TREE_OPERAND (op, 2) != NULL_TREE)
11831 return NULL;
11832 break;
11833
11834 default:;
11835 }
11836 op = TREE_OPERAND (op, 0);
11837 }
11838
11839 return op;
11840 }
11841
11842 static GTY(()) tree gcc_eh_personality_decl;
11843
11844 /* Return the GCC personality function decl. */
11845
11846 tree
11847 lhd_gcc_personality (void)
11848 {
11849 if (!gcc_eh_personality_decl)
11850 gcc_eh_personality_decl = build_personality_function ("gcc");
11851 return gcc_eh_personality_decl;
11852 }
11853
11854 /* TARGET is a call target of GIMPLE call statement
11855 (obtained by gimple_call_fn). Return true if it is
11856 OBJ_TYPE_REF representing an virtual call of C++ method.
11857 (As opposed to OBJ_TYPE_REF representing objc calls
11858 through a cast where middle-end devirtualization machinery
11859 can't apply.) */
11860
11861 bool
11862 virtual_method_call_p (tree target)
11863 {
11864 if (TREE_CODE (target) != OBJ_TYPE_REF)
11865 return false;
11866 target = TREE_TYPE (target);
11867 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11868 target = TREE_TYPE (target);
11869 if (TREE_CODE (target) == FUNCTION_TYPE)
11870 return false;
11871 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11872 return true;
11873 }
11874
11875 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11876
11877 tree
11878 obj_type_ref_class (tree ref)
11879 {
11880 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11881 ref = TREE_TYPE (ref);
11882 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11883 ref = TREE_TYPE (ref);
11884 /* We look for type THIS points to. ObjC also builds
11885 OBJ_TYPE_REF with non-method calls, Their first parameter
11886 ID however also corresponds to class type. */
11887 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11888 || TREE_CODE (ref) == FUNCTION_TYPE);
11889 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11890 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11891 return TREE_TYPE (ref);
11892 }
11893
11894 /* Return true if T is in anonymous namespace. */
11895
11896 bool
11897 type_in_anonymous_namespace_p (const_tree t)
11898 {
11899 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11900 bulitin types; those have CONTEXT NULL. */
11901 if (!TYPE_CONTEXT (t))
11902 return false;
11903 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11904 }
11905
11906 /* Try to find a base info of BINFO that would have its field decl at offset
11907 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11908 found, return, otherwise return NULL_TREE. */
11909
11910 tree
11911 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11912 {
11913 tree type = BINFO_TYPE (binfo);
11914
11915 while (true)
11916 {
11917 HOST_WIDE_INT pos, size;
11918 tree fld;
11919 int i;
11920
11921 if (types_same_for_odr (type, expected_type))
11922 return binfo;
11923 if (offset < 0)
11924 return NULL_TREE;
11925
11926 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11927 {
11928 if (TREE_CODE (fld) != FIELD_DECL)
11929 continue;
11930
11931 pos = int_bit_position (fld);
11932 size = tree_to_uhwi (DECL_SIZE (fld));
11933 if (pos <= offset && (pos + size) > offset)
11934 break;
11935 }
11936 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11937 return NULL_TREE;
11938
11939 if (!DECL_ARTIFICIAL (fld))
11940 {
11941 binfo = TYPE_BINFO (TREE_TYPE (fld));
11942 if (!binfo)
11943 return NULL_TREE;
11944 }
11945 /* Offset 0 indicates the primary base, whose vtable contents are
11946 represented in the binfo for the derived class. */
11947 else if (offset != 0)
11948 {
11949 tree base_binfo, binfo2 = binfo;
11950
11951 /* Find BINFO corresponding to FLD. This is bit harder
11952 by a fact that in virtual inheritance we may need to walk down
11953 the non-virtual inheritance chain. */
11954 while (true)
11955 {
11956 tree containing_binfo = NULL, found_binfo = NULL;
11957 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11958 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11959 {
11960 found_binfo = base_binfo;
11961 break;
11962 }
11963 else
11964 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11965 - tree_to_shwi (BINFO_OFFSET (binfo)))
11966 * BITS_PER_UNIT < pos
11967 /* Rule out types with no virtual methods or we can get confused
11968 here by zero sized bases. */
11969 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11970 && (!containing_binfo
11971 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11972 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11973 containing_binfo = base_binfo;
11974 if (found_binfo)
11975 {
11976 binfo = found_binfo;
11977 break;
11978 }
11979 if (!containing_binfo)
11980 return NULL_TREE;
11981 binfo2 = containing_binfo;
11982 }
11983 }
11984
11985 type = TREE_TYPE (fld);
11986 offset -= pos;
11987 }
11988 }
11989
11990 /* Returns true if X is a typedef decl. */
11991
11992 bool
11993 is_typedef_decl (tree x)
11994 {
11995 return (x && TREE_CODE (x) == TYPE_DECL
11996 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11997 }
11998
11999 /* Returns true iff TYPE is a type variant created for a typedef. */
12000
12001 bool
12002 typedef_variant_p (tree type)
12003 {
12004 return is_typedef_decl (TYPE_NAME (type));
12005 }
12006
12007 /* Warn about a use of an identifier which was marked deprecated. */
12008 void
12009 warn_deprecated_use (tree node, tree attr)
12010 {
12011 const char *msg;
12012
12013 if (node == 0 || !warn_deprecated_decl)
12014 return;
12015
12016 if (!attr)
12017 {
12018 if (DECL_P (node))
12019 attr = DECL_ATTRIBUTES (node);
12020 else if (TYPE_P (node))
12021 {
12022 tree decl = TYPE_STUB_DECL (node);
12023 if (decl)
12024 attr = lookup_attribute ("deprecated",
12025 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12026 }
12027 }
12028
12029 if (attr)
12030 attr = lookup_attribute ("deprecated", attr);
12031
12032 if (attr)
12033 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12034 else
12035 msg = NULL;
12036
12037 if (DECL_P (node))
12038 {
12039 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
12040 if (msg)
12041 warning (OPT_Wdeprecated_declarations,
12042 "%qD is deprecated (declared at %r%s:%d%R): %s",
12043 node, "locus", xloc.file, xloc.line, msg);
12044 else
12045 warning (OPT_Wdeprecated_declarations,
12046 "%qD is deprecated (declared at %r%s:%d%R)",
12047 node, "locus", xloc.file, xloc.line);
12048 }
12049 else if (TYPE_P (node))
12050 {
12051 tree what = NULL_TREE;
12052 tree decl = TYPE_STUB_DECL (node);
12053
12054 if (TYPE_NAME (node))
12055 {
12056 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12057 what = TYPE_NAME (node);
12058 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12059 && DECL_NAME (TYPE_NAME (node)))
12060 what = DECL_NAME (TYPE_NAME (node));
12061 }
12062
12063 if (decl)
12064 {
12065 expanded_location xloc
12066 = expand_location (DECL_SOURCE_LOCATION (decl));
12067 if (what)
12068 {
12069 if (msg)
12070 warning (OPT_Wdeprecated_declarations,
12071 "%qE is deprecated (declared at %r%s:%d%R): %s",
12072 what, "locus", xloc.file, xloc.line, msg);
12073 else
12074 warning (OPT_Wdeprecated_declarations,
12075 "%qE is deprecated (declared at %r%s:%d%R)",
12076 what, "locus", xloc.file, xloc.line);
12077 }
12078 else
12079 {
12080 if (msg)
12081 warning (OPT_Wdeprecated_declarations,
12082 "type is deprecated (declared at %r%s:%d%R): %s",
12083 "locus", xloc.file, xloc.line, msg);
12084 else
12085 warning (OPT_Wdeprecated_declarations,
12086 "type is deprecated (declared at %r%s:%d%R)",
12087 "locus", xloc.file, xloc.line);
12088 }
12089 }
12090 else
12091 {
12092 if (what)
12093 {
12094 if (msg)
12095 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12096 what, msg);
12097 else
12098 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12099 }
12100 else
12101 {
12102 if (msg)
12103 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12104 msg);
12105 else
12106 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12107 }
12108 }
12109 }
12110 }
12111
12112 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12113 somewhere in it. */
12114
12115 bool
12116 contains_bitfld_component_ref_p (const_tree ref)
12117 {
12118 while (handled_component_p (ref))
12119 {
12120 if (TREE_CODE (ref) == COMPONENT_REF
12121 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12122 return true;
12123 ref = TREE_OPERAND (ref, 0);
12124 }
12125
12126 return false;
12127 }
12128
12129 /* Try to determine whether a TRY_CATCH expression can fall through.
12130 This is a subroutine of block_may_fallthru. */
12131
12132 static bool
12133 try_catch_may_fallthru (const_tree stmt)
12134 {
12135 tree_stmt_iterator i;
12136
12137 /* If the TRY block can fall through, the whole TRY_CATCH can
12138 fall through. */
12139 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12140 return true;
12141
12142 i = tsi_start (TREE_OPERAND (stmt, 1));
12143 switch (TREE_CODE (tsi_stmt (i)))
12144 {
12145 case CATCH_EXPR:
12146 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12147 catch expression and a body. The whole TRY_CATCH may fall
12148 through iff any of the catch bodies falls through. */
12149 for (; !tsi_end_p (i); tsi_next (&i))
12150 {
12151 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12152 return true;
12153 }
12154 return false;
12155
12156 case EH_FILTER_EXPR:
12157 /* The exception filter expression only matters if there is an
12158 exception. If the exception does not match EH_FILTER_TYPES,
12159 we will execute EH_FILTER_FAILURE, and we will fall through
12160 if that falls through. If the exception does match
12161 EH_FILTER_TYPES, the stack unwinder will continue up the
12162 stack, so we will not fall through. We don't know whether we
12163 will throw an exception which matches EH_FILTER_TYPES or not,
12164 so we just ignore EH_FILTER_TYPES and assume that we might
12165 throw an exception which doesn't match. */
12166 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12167
12168 default:
12169 /* This case represents statements to be executed when an
12170 exception occurs. Those statements are implicitly followed
12171 by a RESX statement to resume execution after the exception.
12172 So in this case the TRY_CATCH never falls through. */
12173 return false;
12174 }
12175 }
12176
12177 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12178 need not be 100% accurate; simply be conservative and return true if we
12179 don't know. This is used only to avoid stupidly generating extra code.
12180 If we're wrong, we'll just delete the extra code later. */
12181
12182 bool
12183 block_may_fallthru (const_tree block)
12184 {
12185 /* This CONST_CAST is okay because expr_last returns its argument
12186 unmodified and we assign it to a const_tree. */
12187 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12188
12189 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12190 {
12191 case GOTO_EXPR:
12192 case RETURN_EXPR:
12193 /* Easy cases. If the last statement of the block implies
12194 control transfer, then we can't fall through. */
12195 return false;
12196
12197 case SWITCH_EXPR:
12198 /* If SWITCH_LABELS is set, this is lowered, and represents a
12199 branch to a selected label and hence can not fall through.
12200 Otherwise SWITCH_BODY is set, and the switch can fall
12201 through. */
12202 return SWITCH_LABELS (stmt) == NULL_TREE;
12203
12204 case COND_EXPR:
12205 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12206 return true;
12207 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12208
12209 case BIND_EXPR:
12210 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12211
12212 case TRY_CATCH_EXPR:
12213 return try_catch_may_fallthru (stmt);
12214
12215 case TRY_FINALLY_EXPR:
12216 /* The finally clause is always executed after the try clause,
12217 so if it does not fall through, then the try-finally will not
12218 fall through. Otherwise, if the try clause does not fall
12219 through, then when the finally clause falls through it will
12220 resume execution wherever the try clause was going. So the
12221 whole try-finally will only fall through if both the try
12222 clause and the finally clause fall through. */
12223 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12224 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12225
12226 case MODIFY_EXPR:
12227 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12228 stmt = TREE_OPERAND (stmt, 1);
12229 else
12230 return true;
12231 /* FALLTHRU */
12232
12233 case CALL_EXPR:
12234 /* Functions that do not return do not fall through. */
12235 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12236
12237 case CLEANUP_POINT_EXPR:
12238 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12239
12240 case TARGET_EXPR:
12241 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12242
12243 case ERROR_MARK:
12244 return true;
12245
12246 default:
12247 return lang_hooks.block_may_fallthru (stmt);
12248 }
12249 }
12250
12251 /* True if we are using EH to handle cleanups. */
12252 static bool using_eh_for_cleanups_flag = false;
12253
12254 /* This routine is called from front ends to indicate eh should be used for
12255 cleanups. */
12256 void
12257 using_eh_for_cleanups (void)
12258 {
12259 using_eh_for_cleanups_flag = true;
12260 }
12261
12262 /* Query whether EH is used for cleanups. */
12263 bool
12264 using_eh_for_cleanups_p (void)
12265 {
12266 return using_eh_for_cleanups_flag;
12267 }
12268
12269 /* Wrapper for tree_code_name to ensure that tree code is valid */
12270 const char *
12271 get_tree_code_name (enum tree_code code)
12272 {
12273 const char *invalid = "<invalid tree code>";
12274
12275 if (code >= MAX_TREE_CODES)
12276 return invalid;
12277
12278 return tree_code_name[code];
12279 }
12280
12281 /* Drops the TREE_OVERFLOW flag from T. */
12282
12283 tree
12284 drop_tree_overflow (tree t)
12285 {
12286 gcc_checking_assert (TREE_OVERFLOW (t));
12287
12288 /* For tree codes with a sharing machinery re-build the result. */
12289 if (TREE_CODE (t) == INTEGER_CST)
12290 return wide_int_to_tree (TREE_TYPE (t), t);
12291
12292 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12293 and drop the flag. */
12294 t = copy_node (t);
12295 TREE_OVERFLOW (t) = 0;
12296 return t;
12297 }
12298
12299 /* Given a memory reference expression T, return its base address.
12300 The base address of a memory reference expression is the main
12301 object being referenced. For instance, the base address for
12302 'array[i].fld[j]' is 'array'. You can think of this as stripping
12303 away the offset part from a memory address.
12304
12305 This function calls handled_component_p to strip away all the inner
12306 parts of the memory reference until it reaches the base object. */
12307
12308 tree
12309 get_base_address (tree t)
12310 {
12311 while (handled_component_p (t))
12312 t = TREE_OPERAND (t, 0);
12313
12314 if ((TREE_CODE (t) == MEM_REF
12315 || TREE_CODE (t) == TARGET_MEM_REF)
12316 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12317 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12318
12319 /* ??? Either the alias oracle or all callers need to properly deal
12320 with WITH_SIZE_EXPRs before we can look through those. */
12321 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12322 return NULL_TREE;
12323
12324 return t;
12325 }
12326
12327 /* Return the machine mode of T. For vectors, returns the mode of the
12328 inner type. The main use case is to feed the result to HONOR_NANS,
12329 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12330
12331 machine_mode
12332 element_mode (const_tree t)
12333 {
12334 if (!TYPE_P (t))
12335 t = TREE_TYPE (t);
12336 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12337 t = TREE_TYPE (t);
12338 return TYPE_MODE (t);
12339 }
12340
12341 #include "gt-tree.h"